python_code
stringlengths 0
679k
| repo_name
stringlengths 9
41
| file_path
stringlengths 6
149
|
---|---|---|
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from swift.common.swob import Request, Response
from swift.common.middleware import gatekeeper
class FakeApp(object):
def __init__(self, headers=None):
if headers is None:
headers = {}
self.headers = headers
self.req = None
def __call__(self, env, start_response):
self.req = Request(env)
return Response(request=self.req, body=b'FAKE APP',
headers=self.headers)(env, start_response)
class FakeMiddleware(object):
def __init__(self, app, conf, header_list=None):
self.app = app
self.conf = conf
self.header_list = header_list
def __call__(self, env, start_response):
def fake_resp(status, response_headers, exc_info=None):
for i in self.header_list:
response_headers.append(i)
return start_response(status, response_headers, exc_info)
return self.app(env, fake_resp)
class TestGatekeeper(unittest.TestCase):
methods = ['PUT', 'POST', 'GET', 'DELETE', 'HEAD', 'COPY', 'OPTIONS']
allowed_headers = {'xx-account-sysmeta-foo': 'value',
'xx-container-sysmeta-foo': 'value',
'xx-object-sysmeta-foo': 'value',
'x-account-meta-foo': 'value',
'x-container-meta-foo': 'value',
'x-object-meta-foo': 'value',
'x-timestamp-foo': 'value'}
sysmeta_headers = {'x-account-sysmeta-': 'value',
'x-container-sysmeta-': 'value',
'x-object-sysmeta-': 'value',
'x-account-sysmeta-foo': 'value',
'x-container-sysmeta-foo': 'value',
'x-object-sysmeta-foo': 'value',
'X-Account-Sysmeta-BAR': 'value',
'X-Container-Sysmeta-BAR': 'value',
'X-Object-Sysmeta-BAR': 'value'}
x_backend_headers = {'X-Backend-Replication': 'true',
'X-Backend-Replication-Headers': 'stuff'}
object_transient_sysmeta_headers = {
'x-object-transient-sysmeta-': 'value',
'x-object-transient-sysmeta-foo': 'value'}
x_timestamp_headers = {'X-Timestamp': '1455952805.719739'}
forbidden_headers_out = dict(sysmeta_headers)
forbidden_headers_out.update(x_backend_headers)
forbidden_headers_out.update(object_transient_sysmeta_headers)
forbidden_headers_in = dict(forbidden_headers_out)
shunted_headers_in = dict(x_timestamp_headers)
def _assertHeadersEqual(self, expected, actual):
for key in expected:
self.assertIn(key.lower(), actual)
def _assertHeadersAbsent(self, unexpected, actual):
for key in unexpected:
self.assertNotIn(key.lower(), actual)
def get_app(self, app, global_conf, **local_conf):
factory = gatekeeper.filter_factory(global_conf, **local_conf)
return factory(app)
def test_ok_header(self):
req = Request.blank('/v/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers=self.allowed_headers)
fake_app = FakeApp()
app = self.get_app(fake_app, {})
resp = req.get_response(app)
self.assertEqual('200 OK', resp.status)
self.assertEqual(resp.body, b'FAKE APP')
self._assertHeadersEqual(self.allowed_headers, fake_app.req.headers)
def _test_reserved_header_removed_inbound(self, method):
headers = dict(self.forbidden_headers_in)
headers.update(self.allowed_headers)
headers.update(self.shunted_headers_in)
req = Request.blank('/v/a/c', environ={'REQUEST_METHOD': method},
headers=headers)
fake_app = FakeApp()
app = self.get_app(fake_app, {})
resp = req.get_response(app)
self.assertEqual('200 OK', resp.status)
expected_headers = dict(self.allowed_headers)
# shunt_inbound_x_timestamp should be enabled by default
expected_headers.update({'X-Backend-Inbound-' + k: v
for k, v in self.shunted_headers_in.items()})
self._assertHeadersEqual(expected_headers, fake_app.req.headers)
unexpected_headers = dict(self.forbidden_headers_in)
unexpected_headers.update(self.shunted_headers_in)
self._assertHeadersAbsent(unexpected_headers, fake_app.req.headers)
def test_reserved_header_removed_inbound(self):
for method in self.methods:
self._test_reserved_header_removed_inbound(method)
def _test_reserved_header_shunted_inbound(self, method):
headers = dict(self.shunted_headers_in)
headers.update(self.allowed_headers)
req = Request.blank('/v/a/c', environ={'REQUEST_METHOD': method},
headers=headers)
fake_app = FakeApp()
app = self.get_app(fake_app, {}, shunt_inbound_x_timestamp='true')
resp = req.get_response(app)
self.assertEqual('200 OK', resp.status)
expected_headers = dict(self.allowed_headers)
expected_headers.update({'X-Backend-Inbound-' + k: v
for k, v in self.shunted_headers_in.items()})
self._assertHeadersEqual(expected_headers, fake_app.req.headers)
self._assertHeadersAbsent(self.shunted_headers_in,
fake_app.req.headers)
def test_reserved_header_shunted_inbound(self):
for method in self.methods:
self._test_reserved_header_shunted_inbound(method)
def _test_reserved_header_shunt_bypassed_inbound(self, method):
headers = dict(self.shunted_headers_in)
headers.update(self.allowed_headers)
req = Request.blank('/v/a/c', environ={'REQUEST_METHOD': method},
headers=headers)
fake_app = FakeApp()
app = self.get_app(fake_app, {}, shunt_inbound_x_timestamp='false')
resp = req.get_response(app)
self.assertEqual('200 OK', resp.status)
expected_headers = dict(self.allowed_headers)
expected_headers.update(self.shunted_headers_in)
self._assertHeadersEqual(expected_headers, fake_app.req.headers)
def test_reserved_header_shunt_bypassed_inbound(self):
for method in self.methods:
self._test_reserved_header_shunt_bypassed_inbound(method)
def _test_reserved_header_removed_outbound(self, method):
headers = dict(self.forbidden_headers_out)
headers.update(self.allowed_headers)
req = Request.blank('/v/a/c', environ={'REQUEST_METHOD': method})
fake_app = FakeApp(headers=headers)
app = self.get_app(fake_app, {})
resp = req.get_response(app)
self.assertEqual('200 OK', resp.status)
self._assertHeadersEqual(self.allowed_headers, resp.headers)
self._assertHeadersAbsent(self.forbidden_headers_out, resp.headers)
def test_reserved_header_removed_outbound(self):
for method in self.methods:
self._test_reserved_header_removed_outbound(method)
def _test_duplicate_headers_not_removed(self, method, app_hdrs):
def fake_factory(global_conf, **local_conf):
conf = global_conf.copy()
conf.update(local_conf)
headers = [('X-Header', 'xxx'),
('X-Header', 'yyy')]
def fake_filter(app):
return FakeMiddleware(app, conf, headers)
return fake_filter
def fake_start_response(status, response_headers, exc_info=None):
hdr_list = []
for k, v in response_headers:
if k == 'X-Header':
hdr_list.append(v)
self.assertTrue('xxx' in hdr_list)
self.assertTrue('yyy' in hdr_list)
self.assertEqual(len(hdr_list), 2)
req = Request.blank('/v/a/c', environ={'REQUEST_METHOD': method})
fake_app = FakeApp(headers=app_hdrs)
factory = gatekeeper.filter_factory({})
factory_wrap = fake_factory({})
app = factory(factory_wrap(fake_app))
app(req.environ, fake_start_response)
def test_duplicate_headers_not_removed(self):
for method in self.methods:
for app_hdrs in ({}, self.forbidden_headers_out):
self._test_duplicate_headers_not_removed(method, app_hdrs)
def _test_location_header(self, location_path):
headers = {'Location': location_path}
req = Request.blank(
'/v/a/c', environ={'REQUEST_METHOD': 'GET',
'swift.leave_relative_location': True})
class SelfishApp(FakeApp):
def __call__(self, env, start_response):
self.req = Request(env)
resp = Response(request=self.req, body=b'FAKE APP',
headers=self.headers)
# like webob, middlewares in the pipeline may rewrite
# location header from relative to absolute
resp.location = resp.absolute_location()
return resp(env, start_response)
selfish_app = SelfishApp(headers=headers)
app = self.get_app(selfish_app, {})
resp = req.get_response(app)
self.assertEqual('200 OK', resp.status)
self.assertIn('Location', resp.headers)
self.assertEqual(resp.headers['Location'], location_path)
def test_location_header_fixed(self):
self._test_location_header('/v/a/c/o2')
self._test_location_header('/v/a/c/o2?query=path&query2=doit')
self._test_location_header('/v/a/c/o2?query=path#test')
self._test_location_header('/v/a/c/o2;whatisparam?query=path#test')
def test_allow_reserved_names(self):
fake_app = FakeApp()
app = self.get_app(fake_app, {})
headers = {
'X-Allow-Reserved-Names': 'some-value'
}
req = Request.blank('/v/a/c/o', method='GET', headers=headers)
resp = req.get_response(app)
self.assertEqual('200 OK', resp.status)
self.assertNotIn('X-Backend-Allow-Reserved-Names',
fake_app.req.headers)
self.assertIn('X-Allow-Reserved-Names',
fake_app.req.headers)
self.assertEqual(
'some-value',
fake_app.req.headers['X-Allow-Reserved-Names'])
app.allow_reserved_names_header = True
req = Request.blank('/v/a/c/o', method='GET', headers=headers)
resp = req.get_response(app)
self.assertEqual('200 OK', resp.status)
self.assertIn('X-Backend-Allow-Reserved-Names',
fake_app.req.headers)
self.assertEqual(
'some-value',
fake_app.req.headers['X-Backend-Allow-Reserved-Names'])
self.assertEqual(
'some-value',
req.headers['X-Backend-Allow-Reserved-Names'])
self.assertNotIn('X-Allow-Reserved-Names', fake_app.req.headers)
self.assertNotIn('X-Allow-Reserved-Names', req.headers)
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/test_gatekeeper.py |
# Copyright (c) 2010-2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import unittest
from io import BytesIO
from logging.handlers import SysLogHandler
import six
from six.moves.urllib.parse import unquote
from swift.common.utils import get_logger, split_path, StatsdClient
from swift.common.middleware import proxy_logging
from swift.common.registry import register_sensitive_header, \
register_sensitive_param, get_sensitive_headers
from swift.common.swob import Request, Response
from swift.common import constraints, registry
from swift.common.storage_policy import StoragePolicy
from test.debug_logger import debug_logger
from test.unit import patch_policies
from test.unit.common.middleware.helpers import FakeAppThatExcepts
class FakeApp(object):
def __init__(self, body=None, response_str='200 OK', policy_idx='0',
chunked=False):
if body is None:
body = [b'FAKE APP']
elif isinstance(body, six.binary_type):
body = [body]
self.body = body
self.response_str = response_str
self.policy_idx = policy_idx
self.chunked = chunked
def __call__(self, env, start_response):
try:
# /v1/a/c or /v1/a/c/o
split_path(env['PATH_INFO'], 3, 4, True)
is_container_or_object_req = True
except ValueError:
is_container_or_object_req = False
headers = [('Content-Type', 'text/plain')]
if self.chunked:
headers.append(('Transfer-Encoding', 'chunked'))
elif not hasattr(self.body, 'close'):
content_length = sum(map(len, self.body))
headers.append(('Content-Length', str(content_length)))
if is_container_or_object_req and self.policy_idx is not None:
headers.append(('X-Backend-Storage-Policy-Index',
str(self.policy_idx)))
start_response(self.response_str, headers)
while env['wsgi.input'].read(5):
pass
return self.body
class FakeAppNoContentLengthNoTransferEncoding(object):
def __init__(self, body=None):
if body is None:
body = [b'FAKE APP']
self.body = body
def __call__(self, env, start_response):
start_response('200 OK', [('Content-Type', 'text/plain')])
while env['wsgi.input'].read(5):
pass
return self.body
class FileLikeExceptor(object):
def __init__(self):
pass
def read(self, len):
raise IOError('of some sort')
def readline(self, len=1024):
raise IOError('of some sort')
class FakeAppReadline(object):
def __call__(self, env, start_response):
start_response('200 OK', [('Content-Type', 'text/plain'),
('Content-Length', '8')])
env['wsgi.input'].readline()
return [b"FAKE APP"]
def start_response(*args):
pass
@patch_policies([StoragePolicy(0, 'zero', False)])
class TestProxyLogging(unittest.TestCase):
def _log_parts(self, app, should_be_empty=False):
info_calls = app.access_logger.log_dict['info']
if should_be_empty:
self.assertEqual([], info_calls)
else:
self.assertEqual(1, len(info_calls))
return info_calls[0][0][0].split(' ')
def assertTiming(self, exp_metric, app, exp_timing=None):
timing_calls = app.access_logger.statsd_client.calls['timing']
found = False
for timing_call in timing_calls:
self.assertEqual({}, timing_call[1])
self.assertEqual(2, len(timing_call[0]))
if timing_call[0][0] == exp_metric:
found = True
if exp_timing is not None:
self.assertAlmostEqual(exp_timing, timing_call[0][1],
places=4)
if not found:
self.fail('assertTiming: %s not found in %r' % (
exp_metric, timing_calls))
def assertNotTiming(self, not_exp_metric, app):
timing_calls = app.access_logger.statsd_client.calls['timing']
for timing_call in timing_calls:
self.assertNotEqual(not_exp_metric, timing_call[0][0])
def assertUpdateStats(self, exp_metrics_and_values, app):
update_stats_calls = sorted(
app.access_logger.statsd_client.calls['update_stats'])
got_metrics_values_and_kwargs = [(usc[0][0], usc[0][1], usc[1])
for usc in update_stats_calls]
exp_metrics_values_and_kwargs = [(emv[0], emv[1], {})
for emv in exp_metrics_and_values]
self.assertEqual(got_metrics_values_and_kwargs,
exp_metrics_values_and_kwargs)
def test_logger_statsd_prefix(self):
app = proxy_logging.ProxyLoggingMiddleware(
FakeApp(), {'log_statsd_host': 'example.com'})
self.assertIsNotNone(app.access_logger.logger.statsd_client)
self.assertIsInstance(app.access_logger.logger.statsd_client,
StatsdClient)
self.assertEqual('proxy-server.',
app.access_logger.logger.statsd_client._prefix)
app = proxy_logging.ProxyLoggingMiddleware(
FakeApp(), {'log_statsd_metric_prefix': 'foo', # set base prefix
'access_log_name': 'bar', # not used as tail prefix
'log_name': 'baz', # not used as tail prefix
'log_statsd_host': 'example.com'})
self.assertIsNotNone(app.access_logger.logger.statsd_client)
self.assertIsInstance(app.access_logger.logger.statsd_client,
StatsdClient)
self.assertEqual('foo.proxy-server.',
app.access_logger.logger.statsd_client._prefix)
def test_log_request_statsd_invalid_stats_types(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {})
app.access_logger = debug_logger()
for url in ['/', '/foo', '/foo/bar', '/v1']:
req = Request.blank(url, environ={'REQUEST_METHOD': 'GET'})
resp = app(req.environ, start_response)
# get body
b''.join(resp)
self.assertEqual([], app.access_logger.log_dict['timing'])
self.assertEqual([], app.access_logger.log_dict['update_stats'])
def test_log_request_stat_type_bad(self):
for bad_path in ['', '/', '/bad', '/baddy/mc_badderson', '/v1',
'/v1/']:
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {})
app.access_logger = debug_logger()
req = Request.blank(bad_path, environ={'REQUEST_METHOD': 'GET'})
now = 10000.0
app.log_request(req, 123, 7, 13, now, now + 2.71828182846)
self.assertEqual([], app.access_logger.log_dict['timing'])
self.assertEqual([], app.access_logger.log_dict['update_stats'])
def test_log_request_stat_type_good(self):
"""
log_request() should send timing and byte-count counters for GET
requests. Also, __call__()'s iter_response() function should
statsd-log time to first byte (calling the passed-in start_response
function), but only for GET requests.
"""
stub_times = []
def stub_time():
return stub_times.pop(0)
path_types = {
'/v1/a': 'account',
'/v1/a/': 'account',
'/v1/a/c': 'container',
'/v1/a/c/': 'container',
'/v1/a/c/o': 'object',
'/v1/a/c/o/': 'object',
'/v1/a/c/o/p': 'object',
'/v1/a/c/o/p/': 'object',
'/v1/a/c/o/p/p2': 'object',
}
with mock.patch("time.time", stub_time):
for path, exp_type in path_types.items():
# GET
app = proxy_logging.ProxyLoggingMiddleware(
FakeApp(body=b'7654321', response_str='321 Fubar'), {})
app.access_logger = debug_logger()
req = Request.blank(path, environ={
'REQUEST_METHOD': 'GET',
'wsgi.input': BytesIO(b'4321')})
stub_times = [18.0, 18.5, 20.71828182846]
iter_response = app(req.environ, lambda *_: None)
self.assertEqual(b'7654321', b''.join(iter_response))
self.assertTiming('%s.GET.321.timing' % exp_type, app,
exp_timing=2.71828182846 * 1000)
if exp_type == 'object':
# Object operations also return stats by policy
# In this case, the value needs to match the timing for GET
self.assertTiming('%s.policy.0.GET.321.timing' % exp_type,
app, exp_timing=2.71828182846 * 1000)
self.assertUpdateStats([('%s.GET.321.xfer' % exp_type,
4 + 7),
('object.policy.0.GET.321.xfer',
4 + 7)],
app)
else:
self.assertUpdateStats([('%s.GET.321.xfer' % exp_type,
4 + 7)],
app)
# GET Repeat the test above, but with a non-existent policy
# Do this only for object types
if exp_type == 'object':
app = proxy_logging.ProxyLoggingMiddleware(
FakeApp(body=b'7654321', response_str='321 Fubar',
policy_idx='-1'), {})
app.access_logger = debug_logger()
req = Request.blank(path, environ={
'REQUEST_METHOD': 'GET',
'wsgi.input': BytesIO(b'4321')})
stub_times = [18.0, 18.5, 20.71828182846]
iter_response = app(req.environ, lambda *_: None)
self.assertEqual(b'7654321', b''.join(iter_response))
self.assertTiming('%s.GET.321.timing' % exp_type, app,
exp_timing=2.71828182846 * 1000)
# No results returned for the non-existent policy
self.assertUpdateStats([('%s.GET.321.xfer' % exp_type,
4 + 7)],
app)
# GET with swift.proxy_access_log_made already set
app = proxy_logging.ProxyLoggingMiddleware(
FakeApp(body=b'7654321', response_str='321 Fubar'), {})
app.access_logger = debug_logger()
req = Request.blank(path, environ={
'REQUEST_METHOD': 'GET',
'swift.proxy_access_log_made': True,
'wsgi.input': BytesIO(b'4321')})
stub_times = [18.0, 20.71828182846]
iter_response = app(req.environ, lambda *_: None)
self.assertEqual(b'7654321', b''.join(iter_response))
self.assertEqual([], app.access_logger.log_dict['timing'])
self.assertEqual([],
app.access_logger.log_dict['timing_since'])
self.assertEqual([],
app.access_logger.log_dict['update_stats'])
# PUT (no first-byte timing!)
app = proxy_logging.ProxyLoggingMiddleware(
FakeApp(body=b'87654321', response_str='314 PiTown'), {})
app.access_logger = debug_logger()
req = Request.blank(path, environ={
'REQUEST_METHOD': 'PUT',
'wsgi.input': BytesIO(b'654321')})
# (it's not a GET, so time() doesn't have a 2nd call)
stub_times = [58.2, 58.2 + 7.3321]
iter_response = app(req.environ, lambda *_: None)
self.assertEqual(b'87654321', b''.join(iter_response))
self.assertTiming('%s.PUT.314.timing' % exp_type, app,
exp_timing=7.3321 * 1000)
self.assertNotTiming(
'%s.GET.314.first-byte.timing' % exp_type, app)
self.assertNotTiming(
'%s.PUT.314.first-byte.timing' % exp_type, app)
if exp_type == 'object':
# Object operations also return stats by policy In this
# case, the value needs to match the timing for PUT.
self.assertTiming('%s.policy.0.PUT.314.timing' %
exp_type, app,
exp_timing=7.3321 * 1000)
self.assertUpdateStats(
[('object.PUT.314.xfer', 6 + 8),
('object.policy.0.PUT.314.xfer', 6 + 8)], app)
else:
self.assertUpdateStats(
[('%s.PUT.314.xfer' % exp_type, 6 + 8)], app)
# PUT Repeat the test above, but with a non-existent policy
# Do this only for object types
if exp_type == 'object':
app = proxy_logging.ProxyLoggingMiddleware(
FakeApp(body=b'87654321', response_str='314 PiTown',
policy_idx='-1'), {})
app.access_logger = debug_logger()
req = Request.blank(path, environ={
'REQUEST_METHOD': 'PUT',
'wsgi.input': BytesIO(b'654321')})
# (it's not a GET, so time() doesn't have a 2nd call)
stub_times = [58.2, 58.2 + 7.3321]
iter_response = app(req.environ, lambda *_: None)
self.assertEqual(b'87654321', b''.join(iter_response))
self.assertTiming('%s.PUT.314.timing' % exp_type, app,
exp_timing=7.3321 * 1000)
self.assertNotTiming(
'%s.GET.314.first-byte.timing' % exp_type, app)
self.assertNotTiming(
'%s.PUT.314.first-byte.timing' % exp_type, app)
# No results returned for the non-existent policy
self.assertUpdateStats(
[('object.PUT.314.xfer', 6 + 8)], app)
def test_log_request_stat_method_filtering_default(self):
method_map = {
'foo': 'BAD_METHOD',
'': 'BAD_METHOD',
'PUTT': 'BAD_METHOD',
'SPECIAL': 'BAD_METHOD',
'GET': 'GET',
'PUT': 'PUT',
'COPY': 'COPY',
'HEAD': 'HEAD',
'POST': 'POST',
'DELETE': 'DELETE',
'OPTIONS': 'OPTIONS',
}
for method, exp_method in method_map.items():
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {})
app.access_logger = debug_logger()
req = Request.blank('/v1/a/', environ={'REQUEST_METHOD': method})
now = 10000.0
app.log_request(req, 299, 11, 3, now, now + 1.17)
self.assertTiming('account.%s.299.timing' % exp_method, app,
exp_timing=1.17 * 1000)
self.assertUpdateStats([('account.%s.299.xfer' % exp_method,
11 + 3)], app)
def test_log_request_stat_method_filtering_custom(self):
method_map = {
'foo': 'BAD_METHOD',
'': 'BAD_METHOD',
'PUTT': 'BAD_METHOD',
'SPECIAL': 'SPECIAL', # will be configured
'GET': 'GET',
'PUT': 'PUT',
'COPY': 'BAD_METHOD', # prove no one's special
}
# this conf var supports optional leading access_
for conf_key in ['access_log_statsd_valid_http_methods',
'log_statsd_valid_http_methods']:
for method, exp_method in method_map.items():
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {
conf_key: 'SPECIAL, GET,PUT ', # crazy spaces ok
})
app.access_logger = debug_logger()
req = Request.blank('/v1/a/c',
environ={'REQUEST_METHOD': method})
now = 10000.0
app.log_request(req, 911, 4, 43, now, now + 1.01)
self.assertTiming('container.%s.911.timing' % exp_method, app,
exp_timing=1.01 * 1000)
self.assertUpdateStats([('container.%s.911.xfer' % exp_method,
4 + 43)], app)
def test_basic_req(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {})
app.access_logger = debug_logger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
resp = app(req.environ, start_response)
resp_body = b''.join(resp)
log_parts = self._log_parts(app)
self.assertEqual(log_parts[3], 'GET')
self.assertEqual(log_parts[4], '/')
self.assertEqual(log_parts[5], 'HTTP/1.0')
self.assertEqual(log_parts[6], '200')
self.assertEqual(resp_body, b'FAKE APP')
self.assertEqual(log_parts[11], str(len(resp_body)))
def test_basic_req_second_time(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {})
app.access_logger = debug_logger()
req = Request.blank('/', environ={
'swift.proxy_access_log_made': True,
'REQUEST_METHOD': 'GET'})
resp = app(req.environ, start_response)
resp_body = b''.join(resp)
self._log_parts(app, should_be_empty=True)
self.assertEqual(resp_body, b'FAKE APP')
def test_log_msg_template(self):
# Access logs configuration should override the default one
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {
'log_anonymization_salt': 'secret_salt',
'log_msg_template': (
'template which can be edited in config: '
'{protocol} {path} {method} '
'{path.anonymized} {container.anonymized} '
'{request_time} {start_time.datetime} {end_time} {ttfb} '
'{domain}')})
app.access_logger = debug_logger()
req = Request.blank('/', headers={'Host': 'example.com'})
with mock.patch('time.time',
mock.MagicMock(
side_effect=[10000000.0, 10000000.5, 10000001.0])):
resp = app(req.environ, start_response)
# exhaust generator
resp_body = b''.join(resp)
log_parts = self._log_parts(app)
self.assertEqual(log_parts[0], 'template')
self.assertEqual(log_parts[7], 'HTTP/1.0')
self.assertEqual(log_parts[8], '/')
self.assertEqual(log_parts[9], 'GET')
self.assertEqual(log_parts[10],
'{SMD5}c65475e457fea0951fbb9ec9596b2177')
self.assertEqual(log_parts[11], '-')
self.assertEqual(log_parts[13], '26/Apr/1970/17/46/40')
self.assertEqual(log_parts[14], '10000001.000000000')
self.assertEqual(log_parts[15], '0.5')
self.assertEqual(log_parts[16], 'example.com')
self.assertEqual(resp_body, b'FAKE APP')
def test_log_msg_template_s3api(self):
# Access logs configuration should override the default one
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {
'log_msg_template': (
'{protocol} {path} {method} '
'{account} {container} {object}')})
app.access_logger = debug_logger()
req = Request.blank('/bucket/path/to/key', environ={
'REQUEST_METHOD': 'GET',
# This would actually get set in the app, but w/e
'swift.backend_path': '/v1/AUTH_test/bucket/path/to/key'})
with mock.patch("time.time", side_effect=[
18.0, 18.5, 20.71828182846]):
resp = app(req.environ, start_response)
# exhaust generator
resp_body = b''.join(resp)
log_parts = self._log_parts(app)
self.assertEqual(log_parts, [
'HTTP/1.0',
'/bucket/path/to/key',
'GET',
'AUTH_test',
'bucket',
'path/to/key',
])
self.assertEqual(resp_body, b'FAKE APP')
self.assertTiming('object.policy.0.GET.200.timing',
app, exp_timing=2.71828182846 * 1000)
self.assertUpdateStats([('object.GET.200.xfer', 8),
('object.policy.0.GET.200.xfer', 8)],
app)
def test_invalid_log_config(self):
with self.assertRaises(ValueError):
proxy_logging.ProxyLoggingMiddleware(FakeApp(), {
'log_anonymization_salt': 'secret_salt',
'log_msg_template': '{invalid_field}'})
with self.assertRaises(ValueError):
proxy_logging.ProxyLoggingMiddleware(FakeApp(), {
'log_anonymization_method': 'invalid_hash_method',
'log_anonymization_salt': 'secret_salt',
'log_msg_template': '{protocol}'})
def test_multi_segment_resp(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(
[b'some', b'chunks', b'of data']), {})
app.access_logger = debug_logger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET',
'swift.source': 'SOS'})
resp = app(req.environ, start_response)
resp_body = b''.join(resp)
log_parts = self._log_parts(app)
self.assertEqual(log_parts[3], 'GET')
self.assertEqual(log_parts[4], '/')
self.assertEqual(log_parts[5], 'HTTP/1.0')
self.assertEqual(log_parts[6], '200')
self.assertEqual(resp_body, b'somechunksof data')
self.assertEqual(log_parts[11], str(len(resp_body)))
self.assertUpdateStats([('SOS.GET.200.xfer', len(resp_body))],
app)
def test_log_headers(self):
for conf_key in ['access_log_headers', 'log_headers']:
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(),
{conf_key: 'yes'})
app.access_logger = debug_logger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
resp = app(req.environ, start_response)
# exhaust generator
[x for x in resp]
log_parts = self._log_parts(app)
headers = unquote(log_parts[14]).split('\n')
self.assertTrue('Host: localhost:80' in headers)
def test_access_log_headers_only(self):
app = proxy_logging.ProxyLoggingMiddleware(
FakeApp(), {'log_headers': 'yes',
'access_log_headers_only': 'FIRST, seCond'})
app.access_logger = debug_logger()
req = Request.blank('/',
environ={'REQUEST_METHOD': 'GET'},
headers={'First': '1',
'Second': '2',
'Third': '3'})
resp = app(req.environ, start_response)
# exhaust generator
[x for x in resp]
log_parts = self._log_parts(app)
headers = unquote(log_parts[14]).split('\n')
self.assertIn('First: 1', headers)
self.assertIn('Second: 2', headers)
self.assertNotIn('Third: 3', headers)
self.assertNotIn('Host: localhost:80', headers)
def test_upload_size(self):
# Using default policy
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(),
{'log_headers': 'yes'})
app.access_logger = debug_logger()
req = Request.blank(
'/v1/a/c/o/foo',
environ={'REQUEST_METHOD': 'PUT',
'wsgi.input': BytesIO(b'some stuff')})
resp = app(req.environ, start_response)
# exhaust generator
[x for x in resp]
log_parts = self._log_parts(app)
self.assertEqual(log_parts[11], str(len('FAKE APP')))
self.assertEqual(log_parts[10], str(len('some stuff')))
self.assertUpdateStats([('object.PUT.200.xfer',
len('some stuff') + len('FAKE APP')),
('object.policy.0.PUT.200.xfer',
len('some stuff') + len('FAKE APP'))],
app)
# Using a non-existent policy
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(policy_idx='-1'),
{'log_headers': 'yes'})
app.access_logger = debug_logger()
req = Request.blank(
'/v1/a/c/o/foo',
environ={'REQUEST_METHOD': 'PUT',
'wsgi.input': BytesIO(b'some stuff')})
resp = app(req.environ, start_response)
# exhaust generator
[x for x in resp]
log_parts = self._log_parts(app)
self.assertEqual(log_parts[11], str(len('FAKE APP')))
self.assertEqual(log_parts[10], str(len('some stuff')))
self.assertUpdateStats([('object.PUT.200.xfer',
len('some stuff') + len('FAKE APP'))],
app)
def test_upload_size_no_policy(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(policy_idx=None),
{'log_headers': 'yes'})
app.access_logger = debug_logger()
req = Request.blank(
'/v1/a/c/o/foo',
environ={'REQUEST_METHOD': 'PUT',
'wsgi.input': BytesIO(b'some stuff')})
resp = app(req.environ, start_response)
# exhaust generator
[x for x in resp]
log_parts = self._log_parts(app)
self.assertEqual(log_parts[11], str(len('FAKE APP')))
self.assertEqual(log_parts[10], str(len('some stuff')))
self.assertUpdateStats([('object.PUT.200.xfer',
len('some stuff') + len('FAKE APP'))],
app)
def test_upload_line(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeAppReadline(),
{'log_headers': 'yes'})
app.access_logger = debug_logger()
req = Request.blank(
'/v1/a/c',
environ={'REQUEST_METHOD': 'POST',
'wsgi.input': BytesIO(b'some stuff\nsome other stuff\n')})
resp = app(req.environ, start_response)
# exhaust generator
[x for x in resp]
log_parts = self._log_parts(app)
self.assertEqual(log_parts[11], str(len('FAKE APP')))
self.assertEqual(log_parts[10], str(len('some stuff\n')))
self.assertUpdateStats([('container.POST.200.xfer',
len('some stuff\n') + len('FAKE APP'))],
app)
def test_log_query_string(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {})
app.access_logger = debug_logger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET',
'QUERY_STRING': 'x=3'})
resp = app(req.environ, start_response)
# exhaust generator
[x for x in resp]
log_parts = self._log_parts(app)
self.assertEqual(unquote(log_parts[4]), '/?x=3')
def test_client_logging(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {})
app.access_logger = debug_logger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET',
'REMOTE_ADDR': '1.2.3.4'})
resp = app(req.environ, start_response)
# exhaust generator
[x for x in resp]
log_parts = self._log_parts(app)
self.assertEqual(log_parts[0], '1.2.3.4') # client ip
self.assertEqual(log_parts[1], '1.2.3.4') # remote addr
def test_iterator_closing(self):
class CloseableBody(object):
def __init__(self):
self.msg = b"CloseableBody"
self.closed = False
def close(self):
self.closed = True
def __iter__(self):
return self
def __next__(self):
if not self.msg:
raise StopIteration
result, self.msg = self.msg, b''
return result
next = __next__ # py2
body = CloseableBody()
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(body), {})
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET',
'REMOTE_ADDR': '1.2.3.4'})
resp = app(req.environ, start_response)
# exhaust generator
[x for x in resp]
self.assertTrue(body.closed)
def test_chunked_response(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(chunked=True), {})
req = Request.blank('/')
resp = app(req.environ, start_response)
# exhaust generator
[x for x in resp]
def test_proxy_client_logging(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {})
app.access_logger = debug_logger()
req = Request.blank('/', environ={
'REQUEST_METHOD': 'GET',
'REMOTE_ADDR': '1.2.3.4',
'HTTP_X_FORWARDED_FOR': '4.5.6.7,8.9.10.11'})
resp = app(req.environ, start_response)
# exhaust generator
[x for x in resp]
log_parts = self._log_parts(app)
self.assertEqual(log_parts[0], '4.5.6.7') # client ip
self.assertEqual(log_parts[1], '1.2.3.4') # remote addr
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {})
app.access_logger = debug_logger()
req = Request.blank('/', environ={
'REQUEST_METHOD': 'GET',
'REMOTE_ADDR': '1.2.3.4',
'HTTP_X_CLUSTER_CLIENT_IP': '4.5.6.7'})
resp = app(req.environ, start_response)
# exhaust generator
[x for x in resp]
log_parts = self._log_parts(app)
self.assertEqual(log_parts[0], '4.5.6.7') # client ip
self.assertEqual(log_parts[1], '1.2.3.4') # remote addr
def test_facility(self):
app = proxy_logging.ProxyLoggingMiddleware(
FakeApp(),
{'log_headers': 'yes',
'access_log_facility': 'LOG_LOCAL7'})
handler = get_logger.handler4logger[app.access_logger.logger]
self.assertEqual(SysLogHandler.LOG_LOCAL7, handler.facility)
def test_filter(self):
factory = proxy_logging.filter_factory({})
self.assertTrue(callable(factory))
self.assertTrue(callable(factory(FakeApp())))
def test_sensitive_headers_registered(self):
with mock.patch.object(registry, '_sensitive_headers', set()):
self.assertNotIn('x-auth-token', get_sensitive_headers())
self.assertNotIn('x-storage-token', get_sensitive_headers())
proxy_logging.filter_factory({})(FakeApp())
self.assertIn('x-auth-token', get_sensitive_headers())
self.assertIn('x-storage-token', get_sensitive_headers())
def test_unread_body(self):
app = proxy_logging.ProxyLoggingMiddleware(
FakeApp(['some', 'stuff']), {})
app.access_logger = debug_logger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
resp = app(req.environ, start_response)
# read first chunk
next(resp)
resp.close() # raise a GeneratorExit in middleware app_iter loop
log_parts = self._log_parts(app)
self.assertEqual(log_parts[6], '499')
self.assertEqual(log_parts[11], '4') # write length
def test_exploding_body(self):
def exploding_body():
yield 'some'
yield 'stuff'
raise Exception('kaboom!')
app = proxy_logging.ProxyLoggingMiddleware(
FakeApp(exploding_body()), {
'log_msg_template': '{method} {path} '
'{status_int} {wire_status_int}',
})
app.access_logger = debug_logger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(app)
with self.assertRaises(Exception) as ctx:
resp.body
self.assertEqual('kaboom!', str(ctx.exception))
log_parts = self._log_parts(app)
self.assertEqual(log_parts, ['GET', '/', '500', '200'])
def test_disconnect_on_readline(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeAppReadline(), {})
app.access_logger = debug_logger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET',
'wsgi.input': FileLikeExceptor()})
try:
resp = app(req.environ, start_response)
# read body
b''.join(resp)
except IOError:
pass
log_parts = self._log_parts(app)
self.assertEqual(log_parts[6], '499')
self.assertEqual(log_parts[10], '-') # read length
def test_disconnect_on_read(self):
app = proxy_logging.ProxyLoggingMiddleware(
FakeApp(['some', 'stuff']), {})
app.access_logger = debug_logger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET',
'wsgi.input': FileLikeExceptor()})
try:
resp = app(req.environ, start_response)
# read body
b''.join(resp)
except IOError:
pass
log_parts = self._log_parts(app)
self.assertEqual(log_parts[6], '499')
self.assertEqual(log_parts[10], '-') # read length
def test_app_exception(self):
app = proxy_logging.ProxyLoggingMiddleware(
FakeAppThatExcepts(), {})
app.access_logger = debug_logger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
try:
app(req.environ, start_response)
except Exception:
pass
log_parts = self._log_parts(app)
self.assertEqual(log_parts[6], '500')
self.assertEqual(log_parts[10], '-') # read length
def test_no_content_length_no_transfer_encoding_with_list_body(self):
app = proxy_logging.ProxyLoggingMiddleware(
FakeAppNoContentLengthNoTransferEncoding(
# test the "while not chunk: chunk = next(iterator)"
body=[b'', b'', b'line1\n', b'line2\n'],
), {})
app.access_logger = debug_logger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
resp = app(req.environ, start_response)
resp_body = b''.join(resp)
log_parts = self._log_parts(app)
self.assertEqual(log_parts[3], 'GET')
self.assertEqual(log_parts[4], '/')
self.assertEqual(log_parts[5], 'HTTP/1.0')
self.assertEqual(log_parts[6], '200')
self.assertEqual(resp_body, b'line1\nline2\n')
self.assertEqual(log_parts[11], str(len(resp_body)))
def test_no_content_length_no_transfer_encoding_with_empty_strings(self):
app = proxy_logging.ProxyLoggingMiddleware(
FakeAppNoContentLengthNoTransferEncoding(
# test the "while not chunk: chunk = next(iterator)"
body=[b'', b'', b''],
), {})
app.access_logger = debug_logger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
resp = app(req.environ, start_response)
resp_body = b''.join(resp)
log_parts = self._log_parts(app)
self.assertEqual(log_parts[3], 'GET')
self.assertEqual(log_parts[4], '/')
self.assertEqual(log_parts[5], 'HTTP/1.0')
self.assertEqual(log_parts[6], '200')
self.assertEqual(resp_body, b'')
self.assertEqual(log_parts[11], '-')
def test_no_content_length_no_transfer_encoding_with_generator(self):
class BodyGen(object):
def __init__(self, data):
self.data = data
def __iter__(self):
yield self.data
app = proxy_logging.ProxyLoggingMiddleware(
FakeAppNoContentLengthNoTransferEncoding(
body=BodyGen(b'abc'),
), {})
app.access_logger = debug_logger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
resp = app(req.environ, start_response)
resp_body = b''.join(resp)
log_parts = self._log_parts(app)
self.assertEqual(log_parts[3], 'GET')
self.assertEqual(log_parts[4], '/')
self.assertEqual(log_parts[5], 'HTTP/1.0')
self.assertEqual(log_parts[6], '200')
self.assertEqual(resp_body, b'abc')
self.assertEqual(log_parts[11], '3')
def test_req_path_info_popping(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {})
app.access_logger = debug_logger()
req = Request.blank('/v1/something', environ={'REQUEST_METHOD': 'GET'})
req.path_info_pop()
self.assertEqual(req.environ['PATH_INFO'], '/something')
resp = app(req.environ, start_response)
resp_body = b''.join(resp)
log_parts = self._log_parts(app)
self.assertEqual(log_parts[3], 'GET')
self.assertEqual(log_parts[4], '/v1/something')
self.assertEqual(log_parts[5], 'HTTP/1.0')
self.assertEqual(log_parts[6], '200')
self.assertEqual(resp_body, b'FAKE APP')
self.assertEqual(log_parts[11], str(len(resp_body)))
def test_ipv6(self):
ipv6addr = '2001:db8:85a3:8d3:1319:8a2e:370:7348'
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {})
app.access_logger = debug_logger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
req.remote_addr = ipv6addr
resp = app(req.environ, start_response)
resp_body = b''.join(resp)
log_parts = self._log_parts(app)
self.assertEqual(log_parts[0], ipv6addr)
self.assertEqual(log_parts[1], ipv6addr)
self.assertEqual(log_parts[3], 'GET')
self.assertEqual(log_parts[4], '/')
self.assertEqual(log_parts[5], 'HTTP/1.0')
self.assertEqual(log_parts[6], '200')
self.assertEqual(resp_body, b'FAKE APP')
self.assertEqual(log_parts[11], str(len(resp_body)))
def test_log_info_none(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {})
app.access_logger = debug_logger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
list(app(req.environ, start_response))
log_parts = self._log_parts(app)
self.assertEqual(log_parts[17], '-')
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {})
app.access_logger = debug_logger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
req.environ['swift.log_info'] = []
list(app(req.environ, start_response))
log_parts = self._log_parts(app)
self.assertEqual(log_parts[17], '-')
def test_log_info_single(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {})
app.access_logger = debug_logger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
req.environ['swift.log_info'] = ['one']
list(app(req.environ, start_response))
log_parts = self._log_parts(app)
self.assertEqual(log_parts[17], 'one')
def test_log_info_multiple(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {})
app.access_logger = debug_logger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
req.environ['swift.log_info'] = ['one', 'and two']
list(app(req.environ, start_response))
log_parts = self._log_parts(app)
self.assertEqual(log_parts[17], 'one%2Cand%20two')
def test_log_auth_token(self):
auth_token = 'b05bf940-0464-4c0e-8c70-87717d2d73e8'
with mock.patch.object(registry, '_sensitive_headers', set()):
# Default - reveal_sensitive_prefix is 16
# No x-auth-token header
app = proxy_logging.filter_factory({})(FakeApp())
app.access_logger = debug_logger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
resp = app(req.environ, start_response)
resp_body = b''.join(resp)
log_parts = self._log_parts(app)
self.assertEqual(log_parts[9], '-')
# Has x-auth-token header
app = proxy_logging.filter_factory({})(FakeApp())
app.access_logger = debug_logger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET',
'HTTP_X_AUTH_TOKEN': auth_token})
resp = app(req.environ, start_response)
resp_body = b''.join(resp)
log_parts = self._log_parts(app)
self.assertEqual(log_parts[9], 'b05bf940-0464-4c...', log_parts)
# Truncate to first 8 characters
app = proxy_logging.filter_factory(
{'reveal_sensitive_prefix': '8'})(FakeApp())
app.access_logger = debug_logger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
resp = app(req.environ, start_response)
resp_body = b''.join(resp)
log_parts = self._log_parts(app)
self.assertEqual(log_parts[9], '-')
app = proxy_logging.filter_factory(
{'reveal_sensitive_prefix': '8'})(FakeApp())
app.access_logger = debug_logger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET',
'HTTP_X_AUTH_TOKEN': auth_token})
resp = app(req.environ, start_response)
resp_body = b''.join(resp)
log_parts = self._log_parts(app)
self.assertEqual(log_parts[9], 'b05bf940...')
# Token length and reveal_sensitive_prefix are same (no truncate)
app = proxy_logging.filter_factory(
{'reveal_sensitive_prefix': str(len(auth_token))})(FakeApp())
app.access_logger = debug_logger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET',
'HTTP_X_AUTH_TOKEN': auth_token})
resp = app(req.environ, start_response)
resp_body = b''.join(resp)
log_parts = self._log_parts(app)
self.assertEqual(log_parts[9], auth_token)
# No effective limit on auth token
app = proxy_logging.filter_factory(
{'reveal_sensitive_prefix': constraints.MAX_HEADER_SIZE}
)(FakeApp())
app.access_logger = debug_logger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET',
'HTTP_X_AUTH_TOKEN': auth_token})
resp = app(req.environ, start_response)
resp_body = b''.join(resp)
log_parts = self._log_parts(app)
self.assertEqual(log_parts[9], auth_token)
# Don't log x-auth-token
app = proxy_logging.filter_factory(
{'reveal_sensitive_prefix': '0'})(FakeApp())
app.access_logger = debug_logger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
resp = app(req.environ, start_response)
resp_body = b''.join(resp)
log_parts = self._log_parts(app)
self.assertEqual(log_parts[9], '-')
app = proxy_logging.filter_factory(
{'reveal_sensitive_prefix': '0'})(FakeApp())
app.access_logger = debug_logger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET',
'HTTP_X_AUTH_TOKEN': auth_token})
resp = app(req.environ, start_response)
resp_body = b''.join(resp)
log_parts = self._log_parts(app)
self.assertEqual(log_parts[9], '...')
# Avoids pyflakes error, "local variable 'resp_body' is assigned to
# but never used
self.assertTrue(resp_body is not None)
def test_ensure_fields(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {})
app.access_logger = debug_logger()
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
with mock.patch('time.time',
mock.MagicMock(
side_effect=[10000000.0, 10000000.5, 10000001.0])):
resp = app(req.environ, start_response)
resp_body = b''.join(resp)
log_parts = self._log_parts(app)
self.assertEqual(len(log_parts), 21)
self.assertEqual(log_parts[0], '-')
self.assertEqual(log_parts[1], '-')
self.assertEqual(log_parts[2], '26/Apr/1970/17/46/41')
self.assertEqual(log_parts[3], 'GET')
self.assertEqual(log_parts[4], '/')
self.assertEqual(log_parts[5], 'HTTP/1.0')
self.assertEqual(log_parts[6], '200')
self.assertEqual(log_parts[7], '-')
self.assertEqual(log_parts[8], '-')
self.assertEqual(log_parts[9], '-')
self.assertEqual(log_parts[10], '-')
self.assertEqual(resp_body, b'FAKE APP')
self.assertEqual(log_parts[11], str(len(resp_body)))
self.assertEqual(log_parts[12], '-')
self.assertEqual(log_parts[13], '-')
self.assertEqual(log_parts[14], '-')
self.assertEqual(log_parts[15], '1.0000')
self.assertEqual(log_parts[16], '-')
self.assertEqual(log_parts[17], '-')
self.assertEqual(log_parts[18], '10000000.000000000')
self.assertEqual(log_parts[19], '10000001.000000000')
self.assertEqual(log_parts[20], '-')
def test_dual_logging_middlewares(self):
# Since no internal request is being made, outer most proxy logging
# middleware, log1, should have performed the logging.
app = FakeApp()
flg0 = debug_logger()
env = {}
log0 = proxy_logging.ProxyLoggingMiddleware(app, env, logger=flg0)
flg1 = debug_logger()
log1 = proxy_logging.ProxyLoggingMiddleware(log0, env, logger=flg1)
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
resp = log1(req.environ, start_response)
resp_body = b''.join(resp)
self._log_parts(log0, should_be_empty=True)
log_parts = self._log_parts(log1)
self.assertEqual(log_parts[3], 'GET')
self.assertEqual(log_parts[4], '/')
self.assertEqual(log_parts[5], 'HTTP/1.0')
self.assertEqual(log_parts[6], '200')
self.assertEqual(resp_body, b'FAKE APP')
self.assertEqual(log_parts[11], str(len(resp_body)))
def test_dual_logging_middlewares_w_inner(self):
class FakeMiddleware(object):
"""
Fake middleware to make a separate internal request, but construct
the response with different data.
"""
def __init__(self, app, conf):
self.app = app
self.conf = conf
def GET(self, req):
# Make the internal request
ireq = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
resp = self.app(ireq.environ, start_response)
resp_body = b''.join(resp)
if resp_body != b'FAKE APP':
return Response(request=req,
body=b"FAKE APP WAS NOT RETURNED",
content_type="text/plain")
# But our response is different
return Response(request=req, body=b"FAKE MIDDLEWARE",
content_type="text/plain")
def __call__(self, env, start_response):
req = Request(env)
return self.GET(req)(env, start_response)
# Since an internal request is being made, inner most proxy logging
# middleware, log0, should have performed the logging.
app = FakeApp()
flg0 = debug_logger()
env = {}
log0 = proxy_logging.ProxyLoggingMiddleware(app, env, logger=flg0)
fake = FakeMiddleware(log0, env)
flg1 = debug_logger()
log1 = proxy_logging.ProxyLoggingMiddleware(fake, env, logger=flg1)
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
resp = log1(req.environ, start_response)
resp_body = b''.join(resp)
# Inner most logger should have logged the app's response
log_parts = self._log_parts(log0)
self.assertEqual(log_parts[3], 'GET')
self.assertEqual(log_parts[4], '/')
self.assertEqual(log_parts[5], 'HTTP/1.0')
self.assertEqual(log_parts[6], '200')
self.assertEqual(log_parts[11], str(len('FAKE APP')))
# Outer most logger should have logged the other middleware's response
log_parts = self._log_parts(log1)
self.assertEqual(log_parts[3], 'GET')
self.assertEqual(log_parts[4], '/')
self.assertEqual(log_parts[5], 'HTTP/1.0')
self.assertEqual(log_parts[6], '200')
self.assertEqual(resp_body, b'FAKE MIDDLEWARE')
self.assertEqual(log_parts[11], str(len(resp_body)))
def test_policy_index(self):
# Policy index can be specified by X-Backend-Storage-Policy-Index
# in the request header for object API
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(policy_idx='1'), {})
app.access_logger = debug_logger()
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'})
resp = app(req.environ, start_response)
b''.join(resp)
log_parts = self._log_parts(app)
self.assertEqual(log_parts[20], '1')
# Policy index can be specified by X-Backend-Storage-Policy-Index
# in the response header for container API
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {})
app.access_logger = debug_logger()
req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': 'GET'})
def fake_call(app, env, start_response):
start_response(app.response_str,
[('Content-Type', 'text/plain'),
('Content-Length', str(sum(map(len, app.body)))),
('X-Backend-Storage-Policy-Index', '1')])
while env['wsgi.input'].read(5):
pass
return app.body
with mock.patch.object(FakeApp, '__call__', fake_call):
resp = app(req.environ, start_response)
b''.join(resp)
log_parts = self._log_parts(app)
self.assertEqual(log_parts[20], '1')
def test_obscure_req(self):
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {})
app.access_logger = debug_logger()
params = [('param_one',
'some_long_string_that_might_need_to_be_obscured'),
('param_two',
"super_secure_param_that_needs_to_be_obscured")]
headers = {'X-Auth-Token': 'this_is_my_auth_token',
'X-Other-Header': 'another_header_that_we_may_obscure'}
req = Request.blank('a/c/o', environ={'REQUEST_METHOD': 'GET'},
headers=headers)
req.params = params
# if nothing is sensitive, nothing will be obscured
with mock.patch.object(registry, '_sensitive_params', set()):
with mock.patch.object(registry, '_sensitive_headers', set()):
app.obscure_req(req)
# show that nothing changed
for header, expected_value in headers.items():
self.assertEqual(req.headers[header], expected_value)
for param, expected_value in params:
self.assertEqual(req.params[param], expected_value)
# If an obscured param or header doesn't exist in a req, that's fine
with mock.patch.object(registry, '_sensitive_params', set()):
with mock.patch.object(registry, '_sensitive_headers', set()):
register_sensitive_header('X-Not-Exist')
register_sensitive_param('non-existent-param')
app.obscure_req(req)
# show that nothing changed
for header, expected_value in headers.items():
self.assertEqual(req.headers[header], expected_value)
for param, expected_value in params:
self.assertEqual(req.params[param], expected_value)
def obscured_test(params, headers, params_to_add, headers_to_add,
expected_params, expected_headers):
with mock.patch.object(registry, '_sensitive_params', set()):
with mock.patch.object(registry, '_sensitive_headers', set()):
app = proxy_logging.ProxyLoggingMiddleware(FakeApp(), {})
app.access_logger = debug_logger()
req = Request.blank('a/c/o',
environ={'REQUEST_METHOD': 'GET'},
headers=dict(headers))
req.params = params
for param in params_to_add:
register_sensitive_param(param)
for header in headers_to_add:
register_sensitive_header(header)
app.obscure_req(req)
for header, expected_value in expected_headers.items():
self.assertEqual(req.headers[header], expected_value)
for param, expected_value in expected_params:
self.assertEqual(req.params[param], expected_value)
# first just 1 param
expected_params = list(params)
expected_params[0] = ('param_one', 'some_long_string...')
obscured_test(params, headers, ['param_one'], [], expected_params,
headers)
# case sensitive
expected_params = list(params)
obscured_test(params, headers, ['Param_one'], [], expected_params,
headers)
# Other param
expected_params = list(params)
expected_params[1] = ('param_two', 'super_secure_par...')
obscured_test(params, headers, ['param_two'], [], expected_params,
headers)
# both
expected_params[0] = ('param_one', 'some_long_string...')
obscured_test(params, headers, ['param_two', 'param_one'], [],
expected_params, headers)
# Now the headers
# first just 1 header
expected_headers = headers.copy()
expected_headers["X-Auth-Token"] = 'this_is_my_auth_...'
obscured_test(params, headers, [], ['X-Auth-Token'], params,
expected_headers)
# case insensitive
obscured_test(params, headers, [], ['x-auth-token'], params,
expected_headers)
# Other headers
expected_headers = headers.copy()
expected_headers["X-Other-Header"] = 'another_header_t...'
obscured_test(params, headers, [], ['X-Other-Header'], params,
expected_headers)
# both
expected_headers["X-Auth-Token"] = 'this_is_my_auth_...'
obscured_test(params, headers, [], ['X-Auth-Token', 'X-Other-Header'],
params, expected_headers)
# all together
obscured_test(params, headers, ['param_two', 'param_one'],
['X-Auth-Token', 'X-Other-Header'],
expected_params, expected_headers)
| swift-master | test/unit/common/middleware/test_proxy_logging.py |
# Copyright (c) 2011-2014 Greg Holt
# Copyright (c) 2012-2013 Peter Portante
# Copyright (c) 2012 Iryoung Jeong
# Copyright (c) 2012 Michael Barton
# Copyright (c) 2013 Alex Gaynor
# Copyright (c) 2013 Chuck Thier
# Copyright (c) 2013 David Goetz
# Copyright (c) 2015 Donagh McCabe
# Copyright (c) 2013 Greg Lange
# Copyright (c) 2013 John Dickinson
# Copyright (c) 2013 Kun Huang
# Copyright (c) 2013 Richard Hawkins
# Copyright (c) 2013 Samuel Merritt
# Copyright (c) 2013 Shri Javadekar
# Copyright (c) 2013 Tong Li
# Copyright (c) 2013 ZhiQiang Fan
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import hmac
import itertools
import mock
import unittest
import hashlib
import six
from six.moves.urllib.parse import quote
from time import time, strftime, gmtime
from swift.common.middleware import tempauth, tempurl, proxy_logging
from swift.common.header_key_dict import HeaderKeyDict
from swift.common.swob import Request, Response
from swift.common import utils, registry
from test.debug_logger import debug_logger
class FakeApp(object):
def __init__(self, status_headers_body_iter=None):
self.calls = 0
self.status_headers_body_iter = status_headers_body_iter
if not self.status_headers_body_iter:
self.status_headers_body_iter = iter(
itertools.repeat((
'404 Not Found', {
'x-test-header-one-a': 'value1',
'x-test-header-two-a': 'value2',
'x-test-header-two-b': 'value3'},
'')))
self.request = None
def __call__(self, env, start_response):
self.calls += 1
self.request = Request.blank('', environ=env)
if 'swift.authorize' in env:
resp = env['swift.authorize'](self.request)
if resp:
return resp(env, start_response)
status, headers, body = next(self.status_headers_body_iter)
return Response(status=status, headers=headers,
body=body)(env, start_response)
class TestTempURL(unittest.TestCase):
def setUp(self):
self.app = FakeApp()
self.auth = tempauth.filter_factory({'reseller_prefix': ''})(self.app)
self.tempurl = tempurl.filter_factory({})(self.auth)
self.logger = self.tempurl.logger = debug_logger()
def _make_request(self, path, environ=None, keys=(), container_keys=None,
**kwargs):
if environ is None:
environ = {}
_junk, account, _junk, _junk = utils.split_path(path, 2, 4, True)
self._fake_cache_environ(environ, account, keys,
container_keys=container_keys)
req = Request.blank(path, environ=environ, **kwargs)
return req
def _fake_cache_environ(self, environ, account, keys, container_keys=None):
"""
Fake out the caching layer for get_account_info(). Injects account data
into environ such that keys are the tempurl keys, if set.
"""
meta = {'swash': 'buckle'}
for idx, key in enumerate(keys):
meta_name = 'Temp-URL-key' + (("-%d" % (idx + 1) if idx else ""))
if key:
meta[meta_name] = key
ic = environ.setdefault('swift.infocache', {})
ic['account/' + account] = {
'status': 204,
'container_count': '0',
'total_object_count': '0',
'bytes': '0',
'meta': meta}
meta = {}
for i, key in enumerate(container_keys or []):
meta_name = 'Temp-URL-key' + (("-%d" % (i + 1) if i else ""))
meta[meta_name] = key
container_cache_key = 'container/' + account + '/c'
ic.setdefault(container_cache_key, {'meta': meta})
def test_passthrough(self):
resp = self._make_request('/v1/a/c/o').get_response(self.tempurl)
self.assertEqual(resp.status_int, 401)
self.assertNotIn(b'Temp URL invalid', resp.body)
def test_allow_options(self):
self.app.status_headers_body_iter = iter([('200 Ok', {}, '')])
resp = self._make_request(
'/v1/a/c/o?temp_url_sig=abcde&temp_url_expires=12345',
environ={'REQUEST_METHOD': 'OPTIONS'}).get_response(self.tempurl)
self.assertEqual(resp.status_int, 200)
def assert_valid_sig(self, expires, path, keys, sig, environ=None,
prefix=None):
if not environ:
environ = {}
if six.PY3 and isinstance(sig, six.binary_type):
sig = sig.decode('utf-8')
environ['QUERY_STRING'] = 'temp_url_sig=%s&temp_url_expires=%s' % (
sig.replace('+', '%2B'), expires)
if prefix is not None:
environ['QUERY_STRING'] += '&temp_url_prefix=%s' % prefix
req = self._make_request(path, keys=keys, environ=environ)
self.tempurl.app = FakeApp(iter([('200 Ok', (), '123')]))
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['content-disposition'],
'attachment; filename="o"; ' + "filename*=UTF-8''o")
self.assertEqual(resp.headers['expires'],
strftime('%a, %d %b %Y %H:%M:%S GMT',
gmtime(expires)))
self.assertEqual(req.environ['swift.authorize_override'], True)
self.assertEqual(req.environ['REMOTE_USER'], '.wsgi.tempurl')
def test_get_valid(self):
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha1).hexdigest()
self.assert_valid_sig(expires, path, [key], sig)
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
self.assert_valid_sig(expires, path, [key], sig)
sig = base64.b64encode(hmac.new(
key, hmac_body, hashlib.sha256).digest())
self.assert_valid_sig(expires, path, [key], b'sha256:' + sig)
sig = base64.b64encode(hmac.new(
key, hmac_body, hashlib.sha512).digest())
self.assert_valid_sig(expires, path, [key], b'sha512:' + sig)
self.assertEqual(self.logger.statsd_client.get_increment_counts(), {
'tempurl.digests.sha1': 1,
'tempurl.digests.sha256': 2,
'tempurl.digests.sha512': 1
})
def test_get_valid_key2(self):
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key1 = b'abc123'
key2 = b'def456'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig1 = hmac.new(key1, hmac_body, hashlib.sha256).hexdigest()
sig2 = hmac.new(key2, hmac_body, hashlib.sha256).hexdigest()
for sig in (sig1, sig2):
self.assert_valid_sig(expires, path, [key1, key2], sig)
def test_get_valid_container_keys(self):
ic = {}
environ = {'swift.infocache': ic}
# Add two static container keys
container_keys = ['me', 'other']
meta = {}
for idx, key in enumerate(container_keys):
meta_name = 'Temp-URL-key' + (("-%d" % (idx + 1) if idx else ""))
if key:
meta[meta_name] = key
ic['container/a/c'] = {'meta': meta}
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key1 = b'me'
key2 = b'other'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig1 = hmac.new(key1, hmac_body, hashlib.sha256).hexdigest()
sig2 = hmac.new(key2, hmac_body, hashlib.sha256).hexdigest()
account_keys = []
for sig in (sig1, sig2):
self.assert_valid_sig(expires, path, account_keys, sig, environ)
def test_signature_trim(self):
# Insert proxy logging into the pipeline
p_logging = proxy_logging.filter_factory({})(self.app)
self.auth = tempauth.filter_factory({
'reseller_prefix': ''})(p_logging)
self.tempurl = tempurl.filter_factory({
'allowed_digests': 'sha1'})(self.auth)
# valid sig should be exactly 40 hex chars
sig = 'deadbeefdeadbeefdeadbeefdeadbeefdeadbeef'
expires = int(time() + 1000)
p_logging.access_logger.logger = debug_logger('fake')
resp = self._make_request(
'/v1/a/c/o?temp_url_sig=%s&temp_url_expires=%d' % (sig, expires))
with mock.patch('swift.common.middleware.tempurl.TempURL._get_keys',
return_value=[('key', tempurl.CONTAINER_SCOPE)]):
with mock.patch(
'swift.common.middleware.tempurl.TempURL._get_hmacs',
return_value=[(sig, tempurl.CONTAINER_SCOPE)]):
resp.get_response(self.tempurl)
trimmed_sig_qs = '%s...' % sig[:16]
info_lines = p_logging.access_logger. \
logger.get_lines_for_level('info')
self.assertIn(trimmed_sig_qs, info_lines[0])
@mock.patch('swift.common.middleware.tempurl.time', return_value=0)
def test_get_valid_with_filename(self, mock_time):
method = 'GET'
expires = (((24 + 1) * 60 + 1) * 60) + 1
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(path, keys=[key], environ={
'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s&'
'filename=bob%%20%%22killer%%22.txt' % (sig, expires)})
self.tempurl.app = FakeApp(iter([('200 Ok', (), '123')]))
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['content-disposition'],
'attachment; filename="bob %22killer%22.txt"; ' +
"filename*=UTF-8''bob%20%22killer%22.txt")
self.assertIn('expires', resp.headers)
self.assertEqual('Fri, 02 Jan 1970 01:01:01 GMT',
resp.headers['expires'])
self.assertEqual(req.environ['swift.authorize_override'], True)
self.assertEqual(req.environ['REMOTE_USER'], '.wsgi.tempurl')
def test_head_valid_with_filename(self):
method = 'HEAD'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(path, keys=[key], environ={
'REQUEST_METHOD': 'HEAD',
'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s&'
'filename=bob_killer.txt' % (sig, expires)})
self.tempurl.app = FakeApp(iter([('200 Ok', (), '123')]))
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['content-disposition'],
'attachment; filename="bob_killer.txt"; ' +
"filename*=UTF-8''bob_killer.txt")
def test_head_and_get_headers_match(self):
method = 'HEAD'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(path, keys=[key], environ={
'REQUEST_METHOD': 'HEAD',
'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s'
% (sig, expires)})
self.tempurl.app = FakeApp(iter([('200 Ok', (), '123')]))
resp = req.get_response(self.tempurl)
get_method = 'GET'
get_hmac_body = ('%s\n%i\n%s' %
(get_method, expires, path)).encode('utf-8')
get_sig = hmac.new(key, get_hmac_body, hashlib.sha256).hexdigest()
get_req = self._make_request(path, keys=[key], environ={
'REQUEST_METHOD': 'GET',
'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s'
% (get_sig, expires)})
self.tempurl.app = FakeApp(iter([('200 Ok', (), '123')]))
get_resp = get_req.get_response(self.tempurl)
self.assertEqual(resp.headers, get_resp.headers)
@mock.patch('swift.common.middleware.tempurl.time', return_value=0)
def test_get_valid_with_filename_and_inline(self, mock_time):
method = 'GET'
expires = 1
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(path, keys=[key], environ={
'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s&'
'filename=bob%%20%%22killer%%22.txt&inline=' % (sig, expires)})
self.tempurl.app = FakeApp(iter([('200 Ok', (), '123')]))
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['content-disposition'],
'inline; filename="bob %22killer%22.txt"; ' +
"filename*=UTF-8''bob%20%22killer%22.txt")
self.assertIn('expires', resp.headers)
self.assertEqual('Thu, 01 Jan 1970 00:00:01 GMT',
resp.headers['expires'])
self.assertEqual(req.environ['swift.authorize_override'], True)
self.assertEqual(req.environ['REMOTE_USER'], '.wsgi.tempurl')
def test_get_valid_with_inline(self):
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(path, keys=[key], environ={
'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s&'
'inline=' % (sig, expires)})
self.tempurl.app = FakeApp(iter([('200 Ok', (), '123')]))
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['content-disposition'], 'inline')
self.assertIn('expires', resp.headers)
self.assertEqual(req.environ['swift.authorize_override'], True)
self.assertEqual(req.environ['REMOTE_USER'], '.wsgi.tempurl')
def test_get_valid_with_prefix(self):
method = 'GET'
expires = int(time() + 86400)
prefix = 'p1/p2/'
sig_path = 'prefix:/v1/a/c/' + prefix
query_path = '/v1/a/c/' + prefix + 'o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' %
(method, expires, sig_path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
self.assert_valid_sig(expires, query_path, [key], sig, prefix=prefix)
query_path = query_path[:-1] + 'p3/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' %
(method, expires, sig_path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
self.assert_valid_sig(expires, query_path, [key], sig, prefix=prefix)
def test_get_valid_with_prefix_empty(self):
method = 'GET'
expires = int(time() + 86400)
sig_path = 'prefix:/v1/a/c/'
query_path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' %
(method, expires, sig_path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
self.assert_valid_sig(expires, query_path, [key], sig, prefix='')
def test_obj_odd_chars(self):
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/a\r\nb'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(quote(path), keys=[key], environ={
'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
self.tempurl.app = FakeApp(iter([('200 Ok', (), '123')]))
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['content-disposition'],
'attachment; filename="a%0D%0Ab"; ' +
"filename*=UTF-8''a%0D%0Ab")
self.assertIn('expires', resp.headers)
self.assertEqual(req.environ['swift.authorize_override'], True)
self.assertEqual(req.environ['REMOTE_USER'], '.wsgi.tempurl')
def test_obj_odd_chars_in_content_disposition_metadata(self):
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(path, keys=[key], environ={
'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
headers = [('Content-Disposition', 'attachment; filename="fu\nbar"')]
self.tempurl.app = FakeApp(iter([('200 Ok', headers, '123')]))
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['content-disposition'],
'attachment; filename="fu%0Abar"')
self.assertIn('expires', resp.headers)
self.assertEqual(req.environ['swift.authorize_override'], True)
self.assertEqual(req.environ['REMOTE_USER'], '.wsgi.tempurl')
def test_obj_trailing_slash(self):
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o/'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(path, keys=[key], environ={
'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
self.tempurl.app = FakeApp(iter([('200 Ok', (), '123')]))
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['content-disposition'],
'attachment; filename="o"; ' +
"filename*=UTF-8''o")
self.assertIn('expires', resp.headers)
self.assertEqual(req.environ['swift.authorize_override'], True)
self.assertEqual(req.environ['REMOTE_USER'], '.wsgi.tempurl')
def test_filename_trailing_slash(self):
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(path, keys=[key], environ={
'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s&'
'filename=/i/want/this/just/as/it/is/' % (sig, expires)})
self.tempurl.app = FakeApp(iter([('200 Ok', (), '123')]))
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 200)
self.assertEqual(
resp.headers['content-disposition'],
'attachment; filename="/i/want/this/just/as/it/is/"; ' +
"filename*=UTF-8''/i/want/this/just/as/it/is/")
self.assertIn('expires', resp.headers)
self.assertEqual(req.environ['swift.authorize_override'], True)
self.assertEqual(req.environ['REMOTE_USER'], '.wsgi.tempurl')
def test_get_valid_but_404(self):
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(
path, keys=[key],
environ={'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 404)
self.assertNotIn('content-disposition', resp.headers)
self.assertNotIn('expires', resp.headers)
self.assertEqual(req.environ['swift.authorize_override'], True)
self.assertEqual(req.environ['REMOTE_USER'], '.wsgi.tempurl')
def test_put_not_allowed_by_get(self):
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(
path, keys=[key],
environ={'REQUEST_METHOD': 'PUT',
'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 401)
self.assertIn(b'Temp URL invalid', resp.body)
self.assertIn('Www-Authenticate', resp.headers)
def test_put_valid(self):
method = 'PUT'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(
path, keys=[key],
environ={'REQUEST_METHOD': 'PUT',
'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 404)
self.assertEqual(req.environ['swift.authorize_override'], True)
self.assertEqual(req.environ['REMOTE_USER'], '.wsgi.tempurl')
def test_put_response_headers_in_list(self):
class Validator(object):
def __init__(self, app):
self.app = app
self.status = None
self.headers = None
self.exc_info = None
def start_response(self, status, headers, exc_info=None):
self.status = status
self.headers = headers
self.exc_info = exc_info
def __call__(self, env, start_response):
resp_iter = self.app(env, self.start_response)
start_response(self.status, self.headers, self.exc_info)
return resp_iter
method = 'PUT'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(
path, keys=[key],
environ={'REQUEST_METHOD': 'PUT',
'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
validator = Validator(self.tempurl)
resp = req.get_response(validator)
self.assertIsInstance(validator.headers, list)
self.assertEqual(resp.status_int, 404)
self.assertEqual(req.environ['swift.authorize_override'], True)
self.assertEqual(req.environ['REMOTE_USER'], '.wsgi.tempurl')
def test_get_not_allowed_by_put(self):
method = 'PUT'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(
path, keys=[key],
environ={'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 401)
self.assertIn(b'Temp URL invalid', resp.body)
self.assertIn('Www-Authenticate', resp.headers)
def test_missing_sig(self):
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(
path, keys=[key],
environ={'QUERY_STRING': 'temp_url_expires=%s' % expires})
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 401)
self.assertIn(b'Temp URL invalid', resp.body)
self.assertIn('Www-Authenticate', resp.headers)
def test_missing_expires(self):
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(
path, keys=[key],
environ={'QUERY_STRING': 'temp_url_sig=%s' % sig})
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 401)
self.assertIn(b'Temp URL invalid', resp.body)
self.assertIn('Www-Authenticate', resp.headers)
def test_bad_path(self):
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(
path, keys=[key],
environ={'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 401)
self.assertIn(b'Temp URL invalid', resp.body)
self.assertIn('Www-Authenticate', resp.headers)
def test_no_key(self):
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(
path, keys=[],
environ={'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 401)
self.assertIn(b'Temp URL invalid', resp.body)
self.assertIn('Www-Authenticate', resp.headers)
def test_head_allowed_by_get(self):
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(
path, keys=[key],
environ={'REQUEST_METHOD': 'HEAD',
'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 404)
self.assertEqual(req.environ['swift.authorize_override'], True)
self.assertEqual(req.environ['REMOTE_USER'], '.wsgi.tempurl')
def test_head_allowed_by_put(self):
method = 'PUT'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(
path, keys=[key],
environ={'REQUEST_METHOD': 'HEAD',
'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 404)
self.assertEqual(req.environ['swift.authorize_override'], True)
self.assertEqual(req.environ['REMOTE_USER'], '.wsgi.tempurl')
def test_head_allowed_by_post(self):
method = 'POST'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(
path, keys=[key],
environ={'REQUEST_METHOD': 'HEAD',
'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 404)
self.assertEqual(req.environ['swift.authorize_override'], True)
self.assertEqual(req.environ['REMOTE_USER'], '.wsgi.tempurl')
def test_head_otherwise_not_allowed(self):
method = 'PUT'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
# Deliberately fudge expires to show HEADs aren't just automatically
# allowed.
expires += 1
req = self._make_request(
path, keys=[key],
environ={'REQUEST_METHOD': 'HEAD',
'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 401)
self.assertIn('Www-Authenticate', resp.headers)
def test_post_when_forbidden_by_config(self):
self.tempurl.conf['methods'].remove('POST')
method = 'POST'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(
path, keys=[key],
environ={'REQUEST_METHOD': 'POST',
'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 401)
self.assertIn(b'Temp URL invalid', resp.body)
self.assertIn('Www-Authenticate', resp.headers)
def test_delete_when_forbidden_by_config(self):
self.tempurl.conf['methods'].remove('DELETE')
method = 'DELETE'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(
path, keys=[key],
environ={'REQUEST_METHOD': 'DELETE',
'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 401)
self.assertIn(b'Temp URL invalid', resp.body)
self.assertIn('Www-Authenticate', resp.headers)
def test_delete_allowed(self):
method = 'DELETE'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(
path, keys=[key],
environ={'REQUEST_METHOD': 'DELETE',
'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 404)
def test_unknown_not_allowed(self):
method = 'UNKNOWN'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(
path, keys=[key],
environ={'REQUEST_METHOD': 'UNKNOWN',
'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 401)
self.assertIn(b'Temp URL invalid', resp.body)
self.assertIn('Www-Authenticate', resp.headers)
def test_authorize_limits_scope(self):
req_other_object = Request.blank("/v1/a/c/o2")
req_other_container = Request.blank("/v1/a/c2/o2")
req_other_account = Request.blank("/v1/a2/c2/o2")
key_kwargs = {
'keys': ['account-key', 'shared-key'],
'container_keys': ['container-key', 'shared-key'],
}
# A request with the account key limits the pre-authed scope to the
# account level.
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(b'account-key', hmac_body, hashlib.sha256).hexdigest()
qs = '?temp_url_sig=%s&temp_url_expires=%s' % (sig, expires)
# make request will setup the environ cache for us
req = self._make_request(path + qs, **key_kwargs)
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 404) # sanity check
authorize = req.environ['swift.authorize']
# Requests for other objects happen if, for example, you're
# downloading a large object or creating a large-object manifest.
oo_resp = authorize(req_other_object)
self.assertIsNone(oo_resp)
oc_resp = authorize(req_other_container)
self.assertIsNone(oc_resp)
oa_resp = authorize(req_other_account)
self.assertEqual(oa_resp.status_int, 401)
# A request with the container key limits the pre-authed scope to
# the container level; a different container in the same account is
# out of scope and thus forbidden.
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(b'container-key', hmac_body, hashlib.sha256).hexdigest()
qs = '?temp_url_sig=%s&temp_url_expires=%s' % (sig, expires)
req = self._make_request(path + qs, **key_kwargs)
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 404) # sanity check
authorize = req.environ['swift.authorize']
oo_resp = authorize(req_other_object)
self.assertIsNone(oo_resp)
oc_resp = authorize(req_other_container)
self.assertEqual(oc_resp.status_int, 401)
oa_resp = authorize(req_other_account)
self.assertEqual(oa_resp.status_int, 401)
# If account and container share a key (users set these, so this can
# happen by accident, stupidity, *or* malice!), limit the scope to
# account level. This prevents someone from shrinking the scope of
# account-level tempurls by reusing one of the account's keys on a
# container.
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(b'shared-key', hmac_body, hashlib.sha256).hexdigest()
qs = '?temp_url_sig=%s&temp_url_expires=%s' % (sig, expires)
req = self._make_request(path + qs, **key_kwargs)
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 404) # sanity check
authorize = req.environ['swift.authorize']
oo_resp = authorize(req_other_object)
self.assertIsNone(oo_resp)
oc_resp = authorize(req_other_container)
self.assertIsNone(oc_resp)
oa_resp = authorize(req_other_account)
self.assertEqual(oa_resp.status_int, 401)
def test_changed_path_invalid(self):
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(
path + '2', keys=[key],
environ={'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 401)
self.assertIn(b'Temp URL invalid', resp.body)
self.assertIn('Www-Authenticate', resp.headers)
def test_changed_sig_invalid(self):
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
if sig[-1] != '0':
sig = sig[:-1] + '0'
else:
sig = sig[:-1] + '1'
req = self._make_request(
path, keys=[key],
environ={'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 401)
self.assertIn(b'Temp URL invalid', resp.body)
self.assertIn('Www-Authenticate', resp.headers)
def test_changed_expires_invalid(self):
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(
path, keys=[key],
environ={'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires + 1)})
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 401)
self.assertIn(b'Temp URL invalid', resp.body)
self.assertIn('Www-Authenticate', resp.headers)
def test_ip_range_value_error(self):
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
ip = '127.0.0.1'
not_an_ip = 'abcd'
hmac_body = ('ip=%s\n%s\n%i\n%s' %
(ip, method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha1).hexdigest()
req = self._make_request(
path, keys=[key],
environ={
'QUERY_STRING':
'temp_url_sig=%s&temp_url_expires=%s&temp_url_ip_range=%s'
% (sig, expires, not_an_ip),
'REMOTE_ADDR': '127.0.0.1'
},
)
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 401)
self.assertIn(b'Temp URL invalid', resp.body)
self.assertIn('Www-Authenticate', resp.headers)
def test_bad_ip_range_invalid(self):
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
ip = '127.0.0.1'
bad_ip = '127.0.0.2'
hmac_body = ('ip=%s\n%s\n%i\n%s' %
(ip, method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha1).hexdigest()
req = self._make_request(
path, keys=[key],
environ={
'QUERY_STRING':
'temp_url_sig=%s&temp_url_expires=%s&temp_url_ip_range=%s'
% (sig, expires, ip),
'REMOTE_ADDR': bad_ip
},
)
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 401)
self.assertIn(b'Temp URL invalid', resp.body)
self.assertIn('Www-Authenticate', resp.headers)
def test_different_key_invalid(self):
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(
path, keys=[key + b'2'],
environ={'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 401)
self.assertTrue(b'Temp URL invalid' in resp.body)
self.assertTrue('Www-Authenticate' in resp.headers)
def test_no_prefix_match_invalid(self):
method = 'GET'
expires = int(time() + 86400)
sig_path = 'prefix:/v1/a/c/p1/p2/'
query_path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' %
(method, expires, sig_path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(
query_path, keys=[key],
environ={'QUERY_STRING':
'temp_url_sig=%s&temp_url_expires=%s&temp_url_prefix=%s' %
(sig, expires, 'p1/p2/')})
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 401)
self.assertTrue(b'Temp URL invalid' in resp.body)
self.assertTrue('Www-Authenticate' in resp.headers)
def test_object_url_with_prefix_invalid(self):
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' %
(method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(
path, keys=[key],
environ={'QUERY_STRING':
'temp_url_sig=%s&temp_url_expires=%s&temp_url_prefix=o' %
(sig, expires)})
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 401)
self.assertIn(b'Temp URL invalid', resp.body)
self.assertIn('Www-Authenticate', resp.headers)
def test_disallowed_header_object_manifest(self):
self.tempurl = tempurl.filter_factory({})(self.auth)
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
for method in ('PUT', 'POST'):
for hdr, value in [('X-Object-Manifest', 'private/secret'),
('X-Symlink-Target', 'cont/symlink')]:
hmac_body = ('%s\n%i\n%s' %
(method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(
path, method=method, keys=[key],
headers={hdr: value},
environ={'QUERY_STRING':
'temp_url_sig=%s&temp_url_expires=%s'
% (sig, expires)})
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 400)
self.assertIn(b'header', resp.body)
self.assertIn(b'not allowed', resp.body)
self.assertIn(hdr.encode('utf-8'), resp.body)
def test_removed_incoming_header(self):
self.tempurl = tempurl.filter_factory({
'incoming_remove_headers': 'x-remove-this'})(self.auth)
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(
path, keys=[key],
headers={'x-remove-this': 'value'},
environ={'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 404)
self.assertNotIn('x-remove-this', self.app.request.headers)
def test_removed_incoming_headers_match(self):
self.tempurl = tempurl.filter_factory({
'incoming_remove_headers': 'x-remove-this-*',
'incoming_allow_headers': 'x-remove-this-except-this'})(self.auth)
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(
path, keys=[key],
headers={'x-remove-this-one': 'value1',
'x-remove-this-except-this': 'value2'},
environ={'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 404)
self.assertNotIn('x-remove-this-one', self.app.request.headers)
self.assertEqual(
self.app.request.headers['x-remove-this-except-this'], 'value2')
def test_allow_trumps_incoming_header_conflict(self):
self.tempurl = tempurl.filter_factory({
'incoming_remove_headers': 'x-conflict-header',
'incoming_allow_headers': 'x-conflict-header'})(self.auth)
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(
path, keys=[key],
headers={'x-conflict-header': 'value'},
environ={'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 404)
self.assertTrue('x-conflict-header' in self.app.request.headers)
def test_allow_trumps_incoming_header_startswith_conflict(self):
self.tempurl = tempurl.filter_factory({
'incoming_remove_headers': 'x-conflict-header-*',
'incoming_allow_headers': 'x-conflict-header-*'})(self.auth)
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(
path, keys=[key],
headers={'x-conflict-header-test': 'value'},
environ={'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 404)
self.assertTrue('x-conflict-header-test' in self.app.request.headers)
def test_removed_outgoing_header(self):
self.tempurl = tempurl.filter_factory({
'outgoing_remove_headers': 'x-test-header-one-a'})(self.auth)
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(
path, keys=[key],
environ={'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 404)
self.assertNotIn('x-test-header-one-a', resp.headers)
self.assertEqual(resp.headers['x-test-header-two-a'], 'value2')
def test_removed_outgoing_headers_match(self):
self.tempurl = tempurl.filter_factory({
'outgoing_remove_headers': 'x-test-header-two-*',
'outgoing_allow_headers': 'x-test-header-two-b'})(self.auth)
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(
path, keys=[key],
environ={'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['x-test-header-one-a'], 'value1')
self.assertNotIn('x-test-header-two-a', resp.headers)
self.assertEqual(resp.headers['x-test-header-two-b'], 'value3')
def test_allow_trumps_outgoing_header_conflict(self):
self.tempurl = tempurl.filter_factory({
'outgoing_remove_headers': 'x-conflict-header',
'outgoing_allow_headers': 'x-conflict-header'})(self.auth)
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(
path, keys=[key],
headers={},
environ={'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
self.tempurl.app = FakeApp(iter([('200 Ok', {
'X-Conflict-Header': 'value'}, '123')]))
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 200)
self.assertTrue('x-conflict-header' in resp.headers)
self.assertEqual(resp.headers['x-conflict-header'], 'value')
def test_allow_trumps_outgoing_header_startswith_conflict(self):
self.tempurl = tempurl.filter_factory({
'outgoing_remove_headers': 'x-conflict-header-*',
'outgoing_allow_headers': 'x-conflict-header-*'})(self.auth)
method = 'GET'
expires = int(time() + 86400)
path = '/v1/a/c/o'
key = b'abc'
hmac_body = ('%s\n%i\n%s' % (method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(
path, keys=[key],
headers={},
environ={'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
sig, expires)})
self.tempurl.app = FakeApp(iter([('200 Ok', {
'X-Conflict-Header-Test': 'value'}, '123')]))
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 200)
self.assertTrue('x-conflict-header-test' in resp.headers)
self.assertEqual(resp.headers['x-conflict-header-test'], 'value')
def test_get_path_parts(self):
self.assertEqual(self.tempurl._get_path_parts({
'REQUEST_METHOD': 'HEAD', 'PATH_INFO': '/v1/a/c/o'}),
('a', 'c', 'o'))
self.assertEqual(self.tempurl._get_path_parts({
'REQUEST_METHOD': 'GET', 'PATH_INFO': '/v1/a/c/o'}),
('a', 'c', 'o'))
self.assertEqual(self.tempurl._get_path_parts({
'REQUEST_METHOD': 'PUT', 'PATH_INFO': '/v1/a/c/o'}),
('a', 'c', 'o'))
self.assertEqual(self.tempurl._get_path_parts({
'REQUEST_METHOD': 'POST', 'PATH_INFO': '/v1/a/c/o'}),
('a', 'c', 'o'))
self.assertEqual(self.tempurl._get_path_parts({
'REQUEST_METHOD': 'DELETE', 'PATH_INFO': '/v1/a/c/o'}),
('a', 'c', 'o'))
self.assertEqual(self.tempurl._get_path_parts({
'REQUEST_METHOD': 'UNKNOWN', 'PATH_INFO': '/v1/a/c/o'}),
(None, None, None))
self.assertEqual(self.tempurl._get_path_parts({
'REQUEST_METHOD': 'GET', 'PATH_INFO': '/v1/a/c/'}),
(None, None, None))
self.assertEqual(self.tempurl._get_path_parts({
'REQUEST_METHOD': 'GET', 'PATH_INFO': '/v1/a/c//////'}),
(None, None, None))
self.assertEqual(self.tempurl._get_path_parts({
'REQUEST_METHOD': 'GET', 'PATH_INFO': '/v1/a/c///o///'}),
('a', 'c', '//o///'))
self.assertEqual(self.tempurl._get_path_parts({
'REQUEST_METHOD': 'GET', 'PATH_INFO': '/v1/a/c'}),
(None, None, None))
self.assertEqual(self.tempurl._get_path_parts({
'REQUEST_METHOD': 'GET', 'PATH_INFO': '/v1/a//o'}),
(None, None, None))
self.assertEqual(self.tempurl._get_path_parts({
'REQUEST_METHOD': 'GET', 'PATH_INFO': '/v1//c/o'}),
(None, None, None))
self.assertEqual(self.tempurl._get_path_parts({
'REQUEST_METHOD': 'GET', 'PATH_INFO': '//a/c/o'}),
(None, None, None))
self.assertEqual(self.tempurl._get_path_parts({
'REQUEST_METHOD': 'GET', 'PATH_INFO': '/v2/a/c/o'}),
(None, None, None))
def test_get_temp_url_info(self):
s = 'f5d5051bddf5df7e27c628818738334f'
e_ts = int(time() + 86400)
e_8601 = strftime(tempurl.EXPIRES_ISO8601_FORMAT, gmtime(e_ts))
for e in (e_ts, e_8601):
self.assertEqual(
self.tempurl._get_temp_url_info(
{'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
s, e)}),
(s, e_ts, None, None, None, None))
self.assertEqual(
self.tempurl._get_temp_url_info(
{'QUERY_STRING':
'temp_url_sig=%s&temp_url_expires=%s&temp_url_prefix=%s'
% (s, e, 'prefix')}),
(s, e_ts, 'prefix', None, None, None))
self.assertEqual(
self.tempurl._get_temp_url_info(
{'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s&'
'filename=bobisyouruncle' % (s, e)}),
(s, e_ts, None, 'bobisyouruncle', None, None))
self.assertEqual(
self.tempurl._get_temp_url_info({}),
(None, None, None, None, None, None))
self.assertEqual(
self.tempurl._get_temp_url_info(
{'QUERY_STRING': 'temp_url_expires=%s' % e}),
(None, e_ts, None, None, None, None))
self.assertEqual(
self.tempurl._get_temp_url_info(
{'QUERY_STRING': 'temp_url_sig=%s' % s}),
(s, None, None, None, None, None))
self.assertEqual(
self.tempurl._get_temp_url_info(
{'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=bad' % (
s)}),
(s, 0, None, None, None, None))
self.assertEqual(
self.tempurl._get_temp_url_info(
{'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s&'
'inline=' % (s, e)}),
(s, e_ts, None, None, True, None))
self.assertEqual(
self.tempurl._get_temp_url_info(
{'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s&'
'filename=bobisyouruncle&inline=' % (s, e)}),
(s, e_ts, None, 'bobisyouruncle', True, None))
self.assertEqual(
self.tempurl._get_temp_url_info(
{'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s&'
'filename=bobisyouruncle&inline='
'&temp_url_ip_range=127.0.0.1' % (s, e)}),
(s, e_ts, None, 'bobisyouruncle', True, '127.0.0.1'))
e_ts = int(time() - 1)
e_8601 = strftime(tempurl.EXPIRES_ISO8601_FORMAT, gmtime(e_ts))
for e in (e_ts, e_8601):
self.assertEqual(
self.tempurl._get_temp_url_info(
{'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
s, e)}),
(s, 0, None, None, None, None))
# Offsets not supported (yet?).
e_8601 = strftime('%Y-%m-%dT%H:%M:%S+0000', gmtime(e_ts))
self.assertEqual(
self.tempurl._get_temp_url_info(
{'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % (
s, e_8601)}),
(s, 0, None, None, None, None))
def test_get_hmacs(self):
self.assertEqual(
self.tempurl._get_hmacs(
{'REQUEST_METHOD': 'GET'}, 1, '/v1/a/c/o',
[('abc', 'account')], 'sha1'),
[('026d7f7cc25256450423c7ad03fc9f5ffc1dab6d', 'account')])
self.assertEqual(
self.tempurl._get_hmacs(
{'REQUEST_METHOD': 'HEAD'}, 1, '/v1/a/c/o',
[('abc', 'account')], 'sha512', request_method='GET'),
[('240866478d94bbe683ab1d25fba52c7d0df21a60951'
'4fe6a493dc30f951d2748abc51da0cbc633cd1e0acf'
'6fadd3af3aedff00ee3d3434dc6a4c423e74adfc4a', 'account')])
self.assertEqual(
self.tempurl._get_hmacs(
{'REQUEST_METHOD': 'HEAD'}, 1, '/v1/a/c/o',
[('abc', 'account')], 'sha512', request_method='GET',
ip_range='127.0.0.1'
),
[('b713f99a66911cdf41dbcdff16db3efbd1ca89340a20'
'86cc2ed88f0d3a74c7159e7687a312b12345d3721b7b'
'94e36c2753d7cc01e9a91cc318c5081d788f2cfe', 'account')])
def test_invalid(self):
def _start_response(status, headers, exc_info=None):
self.assertTrue(status, '401 Unauthorized')
self.assertIn(b'Temp URL invalid', b''.join(
self.tempurl._invalid({'REQUEST_METHOD': 'GET'},
_start_response)))
self.assertIn(b'', b''.join(
self.tempurl._invalid({'REQUEST_METHOD': 'HEAD'},
_start_response)))
def test_auth_scheme_value(self):
# Passthrough
environ = {}
resp = self._make_request('/v1/a/c/o', environ=environ).get_response(
self.tempurl)
self.assertEqual(resp.status_int, 401)
self.assertNotIn(b'Temp URL invalid', resp.body)
self.assertIn('Www-Authenticate', resp.headers)
self.assertNotIn('swift.auth_scheme', environ)
# Rejected by TempURL
environ = {'REQUEST_METHOD': 'PUT',
'QUERY_STRING':
'temp_url_sig=dummy&temp_url_expires=1234'}
req = self._make_request('/v1/a/c/o', keys=['abc'],
environ=environ)
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 401)
self.assertIn(b'Temp URL invalid', resp.body)
self.assertIn('Www-Authenticate', resp.headers)
def test_clean_incoming_headers(self):
irh = []
iah = []
env = {'HTTP_TEST_HEADER': 'value'}
tempurl.TempURL(
None, {'incoming_remove_headers': irh,
'incoming_allow_headers': iah}
)._clean_incoming_headers(env)
self.assertIn('HTTP_TEST_HEADER', env)
irh = ['test-header']
iah = []
env = {'HTTP_TEST_HEADER': 'value'}
tempurl.TempURL(
None, {'incoming_remove_headers': irh,
'incoming_allow_headers': iah}
)._clean_incoming_headers(env)
self.assertNotIn('HTTP_TEST_HEADER', env)
irh = ['test-header-*']
iah = []
env = {'HTTP_TEST_HEADER_ONE': 'value',
'HTTP_TEST_HEADER_TWO': 'value'}
tempurl.TempURL(
None, {'incoming_remove_headers': irh,
'incoming_allow_headers': iah}
)._clean_incoming_headers(env)
self.assertNotIn('HTTP_TEST_HEADER_ONE', env)
self.assertNotIn('HTTP_TEST_HEADER_TWO', env)
irh = ['test-header-*']
iah = ['test-header-two']
env = {'HTTP_TEST_HEADER_ONE': 'value',
'HTTP_TEST_HEADER_TWO': 'value'}
tempurl.TempURL(
None, {'incoming_remove_headers': irh,
'incoming_allow_headers': iah}
)._clean_incoming_headers(env)
self.assertNotIn('HTTP_TEST_HEADER_ONE', env)
self.assertIn('HTTP_TEST_HEADER_TWO', env)
irh = ['test-header-*', 'test-other-header']
iah = ['test-header-two', 'test-header-yes-*']
env = {'HTTP_TEST_HEADER_ONE': 'value',
'HTTP_TEST_HEADER_TWO': 'value',
'HTTP_TEST_OTHER_HEADER': 'value',
'HTTP_TEST_HEADER_YES': 'value',
'HTTP_TEST_HEADER_YES_THIS': 'value'}
tempurl.TempURL(
None, {'incoming_remove_headers': irh,
'incoming_allow_headers': iah}
)._clean_incoming_headers(env)
self.assertNotIn('HTTP_TEST_HEADER_ONE', env)
self.assertIn('HTTP_TEST_HEADER_TWO', env)
self.assertNotIn('HTTP_TEST_OTHER_HEADER', env)
self.assertNotIn('HTTP_TEST_HEADER_YES', env)
self.assertIn('HTTP_TEST_HEADER_YES_THIS', env)
def test_clean_outgoing_headers(self):
orh = []
oah = []
hdrs = {'test-header': 'value'}
hdrs = HeaderKeyDict(tempurl.TempURL(
None,
{'outgoing_remove_headers': orh, 'outgoing_allow_headers': oah}
)._clean_outgoing_headers(hdrs.items()))
self.assertIn('test-header', hdrs)
orh = ['test-header']
oah = []
hdrs = {'test-header': 'value'}
hdrs = HeaderKeyDict(tempurl.TempURL(
None,
{'outgoing_remove_headers': orh, 'outgoing_allow_headers': oah}
)._clean_outgoing_headers(hdrs.items()))
self.assertNotIn('test-header', hdrs)
orh = ['test-header-*']
oah = []
hdrs = {'test-header-one': 'value',
'test-header-two': 'value'}
hdrs = HeaderKeyDict(tempurl.TempURL(
None,
{'outgoing_remove_headers': orh, 'outgoing_allow_headers': oah}
)._clean_outgoing_headers(hdrs.items()))
self.assertNotIn('test-header-one', hdrs)
self.assertNotIn('test-header-two', hdrs)
orh = ['test-header-*']
oah = ['test-header-two']
hdrs = {'test-header-one': 'value',
'test-header-two': 'value'}
hdrs = HeaderKeyDict(tempurl.TempURL(
None,
{'outgoing_remove_headers': orh, 'outgoing_allow_headers': oah}
)._clean_outgoing_headers(hdrs.items()))
self.assertNotIn('test-header-one', hdrs)
self.assertIn('test-header-two', hdrs)
orh = ['test-header-*', 'test-other-header']
oah = ['test-header-two', 'test-header-yes-*']
hdrs = {'test-header-one': 'value',
'test-header-two': 'value',
'test-other-header': 'value',
'test-header-yes': 'value',
'test-header-yes-this': 'value'}
hdrs = HeaderKeyDict(tempurl.TempURL(
None,
{'outgoing_remove_headers': orh, 'outgoing_allow_headers': oah}
)._clean_outgoing_headers(hdrs.items()))
self.assertNotIn('test-header-one', hdrs)
self.assertIn('test-header-two', hdrs)
self.assertNotIn('test-other-header', hdrs)
self.assertNotIn('test-header-yes', hdrs)
self.assertIn('test-header-yes-this', hdrs)
def test_unicode_metadata_value(self):
meta = {"temp-url-key": "test", "temp-url-key-2": u"test2"}
results = tempurl.get_tempurl_keys_from_metadata(meta)
for str_value in results:
self.assertIsInstance(str_value, str)
@mock.patch('swift.common.middleware.tempurl.time', return_value=0)
def test_get_valid_with_ip_range(self, mock_time):
method = 'GET'
expires = (((24 + 1) * 60 + 1) * 60) + 1
path = '/v1/a/c/o'
key = b'abc'
ip_range = '127.0.0.0/29'
hmac_body = ('ip=%s\n%s\n%i\n%s' %
(ip_range, method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(path, keys=[key], environ={
'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s&'
'temp_url_ip_range=%s' % (sig, expires, ip_range),
'REMOTE_ADDR': '127.0.0.1'},
)
self.tempurl.app = FakeApp(iter([('200 Ok', (), '123')]))
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 200)
self.assertIn('expires', resp.headers)
self.assertEqual('Fri, 02 Jan 1970 01:01:01 GMT',
resp.headers['expires'])
self.assertEqual(req.environ['swift.authorize_override'], True)
self.assertEqual(req.environ['REMOTE_USER'], '.wsgi.tempurl')
@mock.patch('swift.common.middleware.tempurl.time', return_value=0)
def test_get_valid_with_ip_from_remote_addr(self, mock_time):
method = 'GET'
expires = (((24 + 1) * 60 + 1) * 60) + 1
path = '/v1/a/c/o'
key = b'abc'
ip = '127.0.0.1'
hmac_body = ('ip=%s\n%s\n%i\n%s' %
(ip, method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(path, keys=[key], environ={
'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s&'
'temp_url_ip_range=%s' % (sig, expires, ip),
'REMOTE_ADDR': ip},
)
self.tempurl.app = FakeApp(iter([('200 Ok', (), '123')]))
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 200)
self.assertIn('expires', resp.headers)
self.assertEqual('Fri, 02 Jan 1970 01:01:01 GMT',
resp.headers['expires'])
self.assertEqual(req.environ['swift.authorize_override'], True)
self.assertEqual(req.environ['REMOTE_USER'], '.wsgi.tempurl')
def test_get_valid_with_fake_ip_from_x_forwarded_for(self):
method = 'GET'
expires = (((24 + 1) * 60 + 1) * 60) + 1
path = '/v1/a/c/o'
key = b'abc'
ip = '127.0.0.1'
remote_addr = '127.0.0.2'
hmac_body = ('ip=%s\n%s\n%i\n%s' %
(ip, method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha1).hexdigest()
req = self._make_request(path, keys=[key], environ={
'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s&'
'temp_url_ip_range=%s' % (sig, expires, ip),
'REMOTE_ADDR': remote_addr},
headers={'x-forwarded-for': ip})
self.tempurl.app = FakeApp(iter([('200 Ok', (), '123')]))
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 401)
self.assertIn(b'Temp URL invalid', resp.body)
self.assertIn('Www-Authenticate', resp.headers)
@mock.patch('swift.common.middleware.tempurl.time', return_value=0)
def test_get_valid_with_single_ipv6(self, mock_time):
method = 'GET'
expires = (((24 + 1) * 60 + 1) * 60) + 1
path = '/v1/a/c/o'
key = b'abc'
ip = '2001:db8::'
hmac_body = ('ip=%s\n%s\n%i\n%s' %
(ip, method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(path, keys=[key], environ={
'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s&'
'temp_url_ip_range=%s' % (sig, expires, ip),
'REMOTE_ADDR': '2001:db8::'},
)
self.tempurl.app = FakeApp(iter([('200 Ok', (), '123')]))
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 200)
self.assertIn('expires', resp.headers)
self.assertEqual('Fri, 02 Jan 1970 01:01:01 GMT',
resp.headers['expires'])
self.assertEqual(req.environ['swift.authorize_override'], True)
self.assertEqual(req.environ['REMOTE_USER'], '.wsgi.tempurl')
@mock.patch('swift.common.middleware.tempurl.time', return_value=0)
def test_get_valid_with_ipv6_range(self, mock_time):
method = 'GET'
expires = (((24 + 1) * 60 + 1) * 60) + 1
path = '/v1/a/c/o'
key = b'abc'
ip_range = '2001:db8::/127'
hmac_body = ('ip=%s\n%s\n%i\n%s' %
(ip_range, method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha256).hexdigest()
req = self._make_request(path, keys=[key], environ={
'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s&'
'temp_url_ip_range=%s' % (sig, expires, ip_range),
'REMOTE_ADDR': '2001:db8::'},
)
self.tempurl.app = FakeApp(iter([('200 Ok', (), '123')]))
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 200)
self.assertIn('expires', resp.headers)
self.assertEqual('Fri, 02 Jan 1970 01:01:01 GMT',
resp.headers['expires'])
self.assertEqual(req.environ['swift.authorize_override'], True)
self.assertEqual(req.environ['REMOTE_USER'], '.wsgi.tempurl')
def test_get_valid_with_no_client_address(self):
method = 'GET'
expires = (((24 + 1) * 60 + 1) * 60) + 1
path = '/v1/a/c/o'
key = b'abc'
ip = '127.0.0.1'
hmac_body = ('%s\n%s\n%i\n%s' %
(ip, method, expires, path)).encode('utf-8')
sig = hmac.new(key, hmac_body, hashlib.sha1).hexdigest()
req = self._make_request(path, keys=[key], environ={
'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s&'
'temp_url_ip_range=%s' % (sig, expires, ip)},
)
self.tempurl.app = FakeApp(iter([('200 Ok', (), '123')]))
resp = req.get_response(self.tempurl)
self.assertEqual(resp.status_int, 401)
self.assertIn(b'Temp URL invalid', resp.body)
self.assertIn('Www-Authenticate', resp.headers)
class TestSwiftInfo(unittest.TestCase):
def setUp(self):
registry._swift_info = {}
registry._swift_admin_info = {}
def test_registered_defaults(self):
tempurl.filter_factory({})
swift_info = registry.get_swift_info()
self.assertIn('tempurl', swift_info)
info = swift_info['tempurl']
self.assertEqual(set(info['methods']),
set(('GET', 'HEAD', 'PUT', 'POST', 'DELETE')))
self.assertEqual(set(info['incoming_remove_headers']),
set(('x-timestamp',)))
self.assertEqual(set(info['incoming_allow_headers']), set())
self.assertEqual(set(info['outgoing_remove_headers']),
set(('x-object-meta-*',)))
self.assertEqual(set(info['outgoing_allow_headers']),
set(('x-object-meta-public-*',)))
self.assertEqual(info['allowed_digests'], ['sha1', 'sha256', 'sha512'])
self.assertEqual(info['deprecated_digests'], ['sha1'])
def test_non_default_methods(self):
tempurl.filter_factory({
'methods': 'GET HEAD PUT DELETE BREW',
'incoming_remove_headers': '',
'incoming_allow_headers': 'x-timestamp x-versions-location',
'outgoing_remove_headers': 'x-*',
'outgoing_allow_headers': 'x-object-meta-* content-type',
'allowed_digests': 'sha1 sha512 md5 not-a-valid-digest',
})
swift_info = registry.get_swift_info()
self.assertIn('tempurl', swift_info)
info = swift_info['tempurl']
self.assertEqual(set(info['methods']),
set(('GET', 'HEAD', 'PUT', 'DELETE', 'BREW')))
self.assertEqual(set(info['incoming_remove_headers']), set())
self.assertEqual(set(info['incoming_allow_headers']),
set(('x-timestamp', 'x-versions-location')))
self.assertEqual(set(info['outgoing_remove_headers']), set(('x-*', )))
self.assertEqual(set(info['outgoing_allow_headers']),
set(('x-object-meta-*', 'content-type')))
self.assertEqual(info['allowed_digests'], ['sha1', 'sha512'])
self.assertEqual(info['deprecated_digests'], ['sha1'])
def test_no_deprecated_digests(self):
tempurl.filter_factory({'allowed_digests': 'sha256 sha512'})
swift_info = registry.get_swift_info()
self.assertIn('tempurl', swift_info)
info = swift_info['tempurl']
self.assertEqual(set(info['methods']),
set(('GET', 'HEAD', 'PUT', 'POST', 'DELETE')))
self.assertEqual(set(info['incoming_remove_headers']),
set(('x-timestamp',)))
self.assertEqual(set(info['incoming_allow_headers']), set())
self.assertEqual(set(info['outgoing_remove_headers']),
set(('x-object-meta-*',)))
self.assertEqual(set(info['outgoing_allow_headers']),
set(('x-object-meta-public-*',)))
self.assertEqual(info['allowed_digests'], ['sha256', 'sha512'])
self.assertNotIn('deprecated_digests', info)
def test_bad_config(self):
with self.assertRaises(ValueError):
tempurl.filter_factory({
'allowed_digests': 'md4',
})
class TestTempurlWarning(unittest.TestCase):
def test_deprecation_warning(self):
logger = debug_logger()
with mock.patch('swift.common.middleware.tempurl.get_logger',
lambda *a, **kw: logger):
tempurl.filter_factory({'allowed_digests': 'sha1'})
log_lines = logger.get_lines_for_level('warning')
self.assertIn(
'The following digest algorithms are configured but deprecated:'
' sha1. Support will be removed in a future release.',
log_lines)
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/test_tempurl.py |
# Copyright (c) 2010-2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import unittest
from swift.common.middleware import read_only
from swift.common.swob import Request
from test.debug_logger import debug_logger
class FakeApp(object):
def __call__(self, env, start_response):
start_response('200 OK', [])
return [b'Some Content']
def start_response(*args):
pass
read_methods = 'GET HEAD'.split()
write_methods = 'COPY DELETE POST PUT'.split()
ro_resp = [b'Writes are disabled for this account.']
class TestReadOnly(unittest.TestCase):
def test_global_read_only_off(self):
conf = {
'read_only': 'false',
}
ro = read_only.filter_factory(conf)(FakeApp())
ro.logger = debug_logger()
with mock.patch('swift.common.middleware.read_only.get_info',
return_value={}):
for method in read_methods + write_methods:
req = Request.blank('/v1/a')
req.method = method
resp = ro(req.environ, start_response)
self.assertEqual(resp, [b'Some Content'])
def test_global_read_only_on(self):
conf = {
'read_only': 'true',
}
ro = read_only.filter_factory(conf)(FakeApp())
ro.logger = debug_logger()
with mock.patch('swift.common.middleware.read_only.get_info',
return_value={}):
for method in read_methods:
req = Request.blank('/v1/a')
req.method = method
resp = ro(req.environ, start_response)
self.assertEqual(resp, [b'Some Content'])
for method in write_methods:
req = Request.blank('/v1/a')
req.method = method
resp = ro(req.environ, start_response)
self.assertEqual(ro_resp, resp)
def test_account_read_only_on(self):
conf = {}
ro = read_only.filter_factory(conf)(FakeApp())
ro.logger = debug_logger()
with mock.patch('swift.common.middleware.read_only.get_info',
return_value={'sysmeta': {'read-only': 'true'}}):
for method in read_methods:
req = Request.blank('/v1/a')
req.method = method
resp = ro(req.environ, start_response)
self.assertEqual(resp, [b'Some Content'])
for method in write_methods:
req = Request.blank('/v1/a')
req.method = method
resp = ro(req.environ, start_response)
self.assertEqual(ro_resp, resp)
def test_account_read_only_off(self):
conf = {}
ro = read_only.filter_factory(conf)(FakeApp())
ro.logger = debug_logger()
with mock.patch('swift.common.middleware.read_only.get_info',
return_value={'sysmeta': {'read-only': 'false'}}):
for method in read_methods + write_methods:
req = Request.blank('/v1/a')
req.method = method
resp = ro(req.environ, start_response)
self.assertEqual(resp, [b'Some Content'])
def test_global_read_only_on_account_off(self):
conf = {
'read_only': 'true',
}
ro = read_only.filter_factory(conf)(FakeApp())
ro.logger = debug_logger()
with mock.patch('swift.common.middleware.read_only.get_info',
return_value={'sysmeta': {'read-only': 'false'}}):
for method in read_methods + write_methods:
req = Request.blank('/v1/a')
req.method = method
resp = ro(req.environ, start_response)
self.assertEqual(resp, [b'Some Content'])
def test_global_read_only_on_allow_deletes(self):
conf = {
'read_only': 'true',
'allow_deletes': 'true',
}
ro = read_only.filter_factory(conf)(FakeApp())
ro.logger = debug_logger()
with mock.patch('swift.common.middleware.read_only.get_info',
return_value={}):
req = Request.blank('/v1/a')
req.method = "DELETE"
resp = ro(req.environ, start_response)
self.assertEqual(resp, [b'Some Content'])
def test_account_read_only_on_allow_deletes(self):
conf = {
'allow_deletes': 'true',
}
ro = read_only.filter_factory(conf)(FakeApp())
ro.logger = debug_logger()
with mock.patch('swift.common.middleware.read_only.get_info',
return_value={'sysmeta': {'read-only': 'on'}}):
req = Request.blank('/v1/a')
req.method = "DELETE"
resp = ro(req.environ, start_response)
self.assertEqual(resp, [b'Some Content'])
def test_global_read_only_on_destination_account_off_on_copy(self):
conf = {
'read_only': 'true',
}
ro = read_only.filter_factory(conf)(FakeApp())
ro.logger = debug_logger()
def get_fake_read_only(*args, **kwargs):
if 'b' in args:
return {'sysmeta': {'read-only': 'false'}}
return {}
with mock.patch('swift.common.middleware.read_only.get_info',
get_fake_read_only):
headers = {'Destination-Account': 'b'}
req = Request.blank('/v1/a', headers=headers)
req.method = "COPY"
resp = ro(req.environ, start_response)
self.assertEqual(resp, [b'Some Content'])
def test_global_read_only_off_destination_account_on_on_copy(self):
conf = {}
ro = read_only.filter_factory(conf)(FakeApp())
ro.logger = debug_logger()
def get_fake_read_only(*args, **kwargs):
if 'b' in args:
return {'sysmeta': {'read-only': 'true'}}
return {}
with mock.patch('swift.common.middleware.read_only.get_info',
get_fake_read_only):
headers = {'Destination-Account': 'b'}
req = Request.blank('/v1/a', headers=headers)
req.method = "COPY"
resp = ro(req.environ, start_response)
self.assertEqual(ro_resp, resp)
def test_global_read_only_off_src_acct_on_dest_acct_off_on_copy(self):
conf = {}
ro = read_only.filter_factory(conf)(FakeApp())
ro.logger = debug_logger()
def fake_account_read_only(self, req, account):
if account == 'a':
return 'on'
return ''
with mock.patch(
'swift.common.middleware.read_only.ReadOnlyMiddleware.' +
'account_read_only',
fake_account_read_only):
headers = {'Destination-Account': 'b'}
req = Request.blank('/v1/a', headers=headers)
req.method = "COPY"
resp = ro(req.environ, start_response)
self.assertEqual(resp, [b'Some Content'])
def test_global_read_only_off_src_acct_on_dest_acct_on_on_copy(self):
conf = {}
ro = read_only.filter_factory(conf)(FakeApp())
ro.logger = debug_logger()
with mock.patch(
'swift.common.middleware.read_only.ReadOnlyMiddleware.' +
'account_read_only',
return_value='true'):
headers = {'Destination-Account': 'b'}
req = Request.blank('/v1/a', headers=headers)
req.method = "COPY"
resp = ro(req.environ, start_response)
self.assertEqual(ro_resp, resp)
def test_global_read_only_non_swift_path(self):
conf = {}
ro = read_only.filter_factory(conf)(FakeApp())
ro.logger = debug_logger()
def fake_account_read_only(self, req, account):
return 'on'
with mock.patch(
'swift.common.middleware.read_only.ReadOnlyMiddleware.' +
'account_read_only',
fake_account_read_only):
req = Request.blank('/auth/v3.14')
req.method = "POST"
resp = ro(req.environ, start_response)
self.assertEqual(resp, [b'Some Content'])
req = Request.blank('/v1')
req.method = "PUT"
resp = ro(req.environ, start_response)
self.assertEqual(resp, [b'Some Content'])
req = Request.blank('/v1.0/')
req.method = "DELETE"
resp = ro(req.environ, start_response)
self.assertEqual(resp, [b'Some Content'])
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/test_read_only.py |
# Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import shutil
import tempfile
import unittest
import uuid
import mock
from swift.common import swob
from swift.common.middleware import container_sync
from swift.proxy.controllers.base import get_cache_key
from swift.proxy.controllers.info import InfoController
from test.debug_logger import debug_logger
class FakeApp(object):
def __call__(self, env, start_response):
if env.get('PATH_INFO') == '/info':
controller = InfoController(
app=mock.Mock(logger=debug_logger()),
version=None, expose_info=True,
disallowed_sections=[], admin_key=None)
handler = getattr(controller, env.get('REQUEST_METHOD'))
return handler(swob.Request(env))(env, start_response)
if env.get('swift.authorize_override'):
body = b'Response to Authorized Request'
else:
body = b'Pass-Through Response'
headers = [('Content-Length', str(len(body)))]
if 'HTTP_X_TIMESTAMP' in env:
headers.append(('X-Timestamp', env['HTTP_X_TIMESTAMP']))
start_response('200 OK', headers)
return [body]
class TestContainerSync(unittest.TestCase):
def setUp(self):
self.tempdir = tempfile.mkdtemp()
with open(
os.path.join(self.tempdir, 'container-sync-realms.conf'),
'w') as fp:
fp.write('''
[US]
key = 9ff3b71c849749dbaec4ccdd3cbab62b
key2 = 1a0a5a0cbd66448084089304442d6776
cluster_dfw1 = http://dfw1.host/v1/
''')
self.app = FakeApp()
self.conf = {'swift_dir': self.tempdir}
self.sync = container_sync.ContainerSync(self.app, self.conf)
def tearDown(self):
shutil.rmtree(self.tempdir, ignore_errors=1)
def test_current_not_set(self):
# no 'current' option set by default
self.assertIsNone(self.sync.realm)
self.assertIsNone(self.sync.cluster)
info = {}
def capture_swift_info(key, **options):
info[key] = options
with mock.patch(
'swift.common.middleware.container_sync.register_swift_info',
new=capture_swift_info):
self.sync.register_info()
for realm, realm_info in info['container_sync']['realms'].items():
for cluster, options in realm_info['clusters'].items():
self.assertEqual(options.get('current', False), False)
def test_current_invalid(self):
self.conf = {'swift_dir': self.tempdir, 'current': 'foo'}
self.sync = container_sync.ContainerSync(self.app, self.conf,
logger=debug_logger())
self.assertIsNone(self.sync.realm)
self.assertIsNone(self.sync.cluster)
info = {}
def capture_swift_info(key, **options):
info[key] = options
with mock.patch(
'swift.common.middleware.container_sync.register_swift_info',
new=capture_swift_info):
self.sync.register_info()
for realm, realm_info in info['container_sync']['realms'].items():
for cluster, options in realm_info['clusters'].items():
self.assertEqual(options.get('current', False), False)
error_lines = self.sync.logger.get_lines_for_level('error')
self.assertEqual(error_lines, ['Invalid current '
'//REALM/CLUSTER (foo)'])
def test_current_in_realms_conf(self):
self.conf = {'swift_dir': self.tempdir, 'current': '//us/dfw1'}
self.sync = container_sync.ContainerSync(self.app, self.conf)
self.assertEqual('US', self.sync.realm)
self.assertEqual('DFW1', self.sync.cluster)
info = {}
def capture_swift_info(key, **options):
info[key] = options
with mock.patch(
'swift.common.middleware.container_sync.register_swift_info',
new=capture_swift_info):
self.sync.register_info()
for realm, realm_info in info['container_sync']['realms'].items():
for cluster, options in realm_info['clusters'].items():
if options.get('current'):
break
self.assertEqual(realm, self.sync.realm)
self.assertEqual(cluster, self.sync.cluster)
def test_missing_from_realms_conf(self):
self.conf = {'swift_dir': self.tempdir, 'current': 'foo/bar'}
self.sync = container_sync.ContainerSync(self.app, self.conf,
logger=debug_logger())
self.assertEqual('FOO', self.sync.realm)
self.assertEqual('BAR', self.sync.cluster)
info = {}
def capture_swift_info(key, **options):
info[key] = options
with mock.patch(
'swift.common.middleware.container_sync.register_swift_info',
new=capture_swift_info):
self.sync.register_info()
for realm, realm_info in info['container_sync']['realms'].items():
for cluster, options in realm_info['clusters'].items():
self.assertEqual(options.get('current', False), False)
for line in self.sync.logger.get_lines_for_level('error'):
self.assertEqual(line, 'Unknown current '
'//REALM/CLUSTER (//FOO/BAR)')
def test_pass_through(self):
req = swob.Request.blank('/v1/a/c')
resp = req.get_response(self.sync)
self.assertEqual(resp.status, '200 OK')
self.assertEqual(resp.body, b'Pass-Through Response')
def test_not_enough_args(self):
req = swob.Request.blank(
'/v1/a/c', headers={'x-container-sync-auth': 'a'})
resp = req.get_response(self.sync)
self.assertEqual(resp.status, '401 Unauthorized')
self.assertEqual(
resp.body,
b'X-Container-Sync-Auth header not valid; '
b'contact cluster operator for support.')
self.assertTrue(
'cs:not-3-args' in req.environ.get('swift.log_info'),
req.environ.get('swift.log_info'))
def test_realm_miss(self):
req = swob.Request.blank(
'/v1/a/c', headers={'x-container-sync-auth': 'invalid nonce sig'})
resp = req.get_response(self.sync)
self.assertEqual(resp.status, '401 Unauthorized')
self.assertEqual(
resp.body,
b'X-Container-Sync-Auth header not valid; '
b'contact cluster operator for support.')
self.assertTrue(
'cs:no-local-realm-key' in req.environ.get('swift.log_info'),
req.environ.get('swift.log_info'))
def test_user_key_miss(self):
req = swob.Request.blank(
'/v1/a/c', headers={'x-container-sync-auth': 'US nonce sig'})
resp = req.get_response(self.sync)
self.assertEqual(resp.status, '401 Unauthorized')
self.assertEqual(
resp.body,
b'X-Container-Sync-Auth header not valid; '
b'contact cluster operator for support.')
self.assertTrue(
'cs:no-local-user-key' in req.environ.get('swift.log_info'),
req.environ.get('swift.log_info'))
def test_invalid_sig(self):
req = swob.Request.blank(
'/v1/a/c', headers={'x-container-sync-auth': 'US nonce sig'})
infocache = req.environ.setdefault('swift.infocache', {})
infocache[get_cache_key('a', 'c')] = {'sync_key': 'abc'}
resp = req.get_response(self.sync)
self.assertEqual(resp.status, '401 Unauthorized')
self.assertEqual(
resp.body,
b'X-Container-Sync-Auth header not valid; '
b'contact cluster operator for support.')
self.assertIn('cs:invalid-sig', req.environ.get('swift.log_info'))
self.assertNotIn('swift.authorize_override', req.environ)
self.assertNotIn('swift.slo_override', req.environ)
self.assertNotIn('swift.symlink_override', req.environ)
def test_valid_sig(self):
ts = '1455221706.726999_0123456789abcdef'
sig = self.sync.realms_conf.get_sig(
'GET', '/v1/a/c', ts, 'nonce',
self.sync.realms_conf.key('US'), 'abc')
req = swob.Request.blank('/v1/a/c', headers={
'x-container-sync-auth': 'US nonce ' + sig,
'x-backend-inbound-x-timestamp': ts})
infocache = req.environ.setdefault('swift.infocache', {})
infocache[get_cache_key('a', 'c')] = {'sync_key': 'abc'}
resp = req.get_response(self.sync)
self.assertEqual(resp.status, '200 OK')
self.assertEqual(resp.body, b'Response to Authorized Request')
self.assertIn('cs:valid', req.environ.get('swift.log_info'))
self.assertIn('X-Timestamp', resp.headers)
self.assertEqual(ts, resp.headers['X-Timestamp'])
self.assertIn('swift.authorize_override', req.environ)
self.assertIn('swift.slo_override', req.environ)
self.assertIn('swift.symlink_override', req.environ)
def test_valid_sig2(self):
sig = self.sync.realms_conf.get_sig(
'GET', '/v1/a/c', '0', 'nonce',
self.sync.realms_conf.key2('US'), 'abc')
req = swob.Request.blank(
'/v1/a/c', headers={'x-container-sync-auth': 'US nonce ' + sig})
infocache = req.environ.setdefault('swift.infocache', {})
infocache[get_cache_key('a', 'c')] = {'sync_key': 'abc'}
resp = req.get_response(self.sync)
self.assertEqual(resp.status, '200 OK')
self.assertEqual(resp.body, b'Response to Authorized Request')
self.assertIn('cs:valid', req.environ.get('swift.log_info'))
self.assertIn('swift.authorize_override', req.environ)
self.assertIn('swift.slo_override', req.environ)
self.assertIn('swift.symlink_override', req.environ)
def test_info(self):
req = swob.Request.blank('/info')
resp = req.get_response(self.sync)
self.assertEqual(resp.status, '200 OK')
result = json.loads(resp.body)
self.assertEqual(
result.get('container_sync'),
{'realms': {'US': {'clusters': {'DFW1': {}}}}})
def test_info_always_fresh(self):
req = swob.Request.blank('/info')
resp = req.get_response(self.sync)
self.assertEqual(resp.status, '200 OK')
result = json.loads(resp.body)
self.assertEqual(
result.get('container_sync'),
{'realms': {'US': {'clusters': {'DFW1': {}}}}})
with open(
os.path.join(self.tempdir, 'container-sync-realms.conf'),
'w') as fp:
fp.write('''
[US]
key = 9ff3b71c849749dbaec4ccdd3cbab62b
key2 = 1a0a5a0cbd66448084089304442d6776
cluster_dfw1 = http://dfw1.host/v1/
[UK]
key = 400b3b357a80413f9d956badff1d9dfe
cluster_lon3 = http://lon3.host/v1/
''')
self.sync.realms_conf.reload()
req = swob.Request.blank('/info')
resp = req.get_response(self.sync)
self.assertEqual(resp.status, '200 OK')
result = json.loads(resp.body)
self.assertEqual(
result.get('container_sync'),
{'realms': {
'US': {'clusters': {'DFW1': {}}},
'UK': {'clusters': {'LON3': {}}}}})
def test_allow_full_urls_setting(self):
req = swob.Request.blank(
'/v1/a/c',
environ={'REQUEST_METHOD': 'PUT'},
headers={'x-container-sync-to': 'http://host/v1/a/c'})
resp = req.get_response(self.sync)
self.assertEqual(resp.status, '200 OK')
self.conf = {'swift_dir': self.tempdir, 'allow_full_urls': 'false'}
self.sync = container_sync.ContainerSync(self.app, self.conf)
req = swob.Request.blank(
'/v1/a/c',
environ={'REQUEST_METHOD': 'PUT'},
headers={'x-container-sync-to': 'http://host/v1/a/c'})
resp = req.get_response(self.sync)
self.assertEqual(resp.status, '400 Bad Request')
self.assertEqual(
resp.body,
b'Full URLs are not allowed for X-Container-Sync-To values. Only '
b'realm values of the format //realm/cluster/account/container '
b'are allowed.\n')
def test_filter(self):
app = FakeApp()
unique = uuid.uuid4().hex
sync = container_sync.filter_factory(
{'global': 'global_value', 'swift_dir': unique},
**{'local': 'local_value'})(app)
self.assertEqual(sync.app, app)
self.assertEqual(sync.conf, {
'global': 'global_value', 'swift_dir': unique,
'local': 'local_value'})
req = swob.Request.blank('/info')
resp = req.get_response(sync)
self.assertEqual(resp.status, '200 OK')
result = json.loads(resp.body)
self.assertEqual(result.get('container_sync'), {'realms': {}})
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/test_container_sync.py |
# Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import json
import os
import time
import mock
import unittest
from swift.common import swob, utils, registry
from swift.common.middleware import versioned_writes, copy
from swift.common.swob import Request
from test.unit.common.middleware import helpers
class FakeCache(object):
def __init__(self, val):
if 'status' not in val:
val['status'] = 200
self.val = val
def get(self, *args):
return self.val
def local_tz(func):
'''
Decorator to change the timezone when running a test.
This uses the Eastern Time Zone definition from the time module's docs.
Note that the timezone affects things like time.time() and time.mktime().
'''
@functools.wraps(func)
def wrapper(*args, **kwargs):
tz = os.environ.get('TZ', '')
try:
os.environ['TZ'] = 'EST+05EDT,M4.1.0,M10.5.0'
time.tzset()
return func(*args, **kwargs)
finally:
os.environ['TZ'] = tz
time.tzset()
return wrapper
class VersionedWritesBaseTestCase(unittest.TestCase):
def setUp(self):
self.app = helpers.FakeSwift()
conf = {'allow_versioned_writes': 'true'}
self.vw = versioned_writes.legacy.VersionedWritesMiddleware(
self.app, conf)
def tearDown(self):
self.assertEqual(self.app.unclosed_requests, {})
self.assertEqual(self.app.unread_requests, {})
def call_app(self, req, app=None):
if app is None:
app = self.app
self.authorized = []
def authorize(req):
self.authorized.append(req)
if 'swift.authorize' not in req.environ:
req.environ['swift.authorize'] = authorize
req.headers.setdefault("User-Agent", "Marula Kruger")
status = [None]
headers = [None]
def start_response(s, h, ei=None):
status[0] = s
headers[0] = h
body_iter = app(req.environ, start_response)
with utils.closing_if_possible(body_iter):
body = b''.join(body_iter)
return status[0], headers[0], body
def call_vw(self, req):
return self.call_app(req, app=self.vw)
def assertRequestEqual(self, req, other):
self.assertEqual(req.method, other.method)
self.assertEqual(req.path, other.path)
class VersionedWritesTestCase(VersionedWritesBaseTestCase):
def test_put_container(self):
self.app.register('PUT', '/v1/a/c', swob.HTTPOk, {}, 'passed')
req = Request.blank('/v1/a/c',
headers={'X-Versions-Location': 'ver_cont'},
environ={'REQUEST_METHOD': 'PUT'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
# check for sysmeta header
calls = self.app.calls_with_headers
method, path, req_headers = calls[0]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a/c', path)
self.assertIn('x-container-sysmeta-versions-location', req_headers)
self.assertEqual(req.headers['x-container-sysmeta-versions-location'],
'ver_cont')
self.assertIn('x-container-sysmeta-versions-mode', req_headers)
self.assertEqual(req.headers['x-container-sysmeta-versions-mode'],
'stack')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_put_container_history_header(self):
self.app.register('PUT', '/v1/a/c', swob.HTTPOk, {}, 'passed')
req = Request.blank('/v1/a/c',
headers={'X-History-Location': 'ver_cont'},
environ={'REQUEST_METHOD': 'PUT'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
# check for sysmeta header
calls = self.app.calls_with_headers
method, path, req_headers = calls[0]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a/c', path)
self.assertIn('x-container-sysmeta-versions-location', req_headers)
self.assertEqual('ver_cont',
req_headers['x-container-sysmeta-versions-location'])
self.assertIn('x-container-sysmeta-versions-mode', req_headers)
self.assertEqual('history',
req_headers['x-container-sysmeta-versions-mode'])
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_put_container_both_headers(self):
req = Request.blank('/v1/a/c',
headers={'X-Versions-Location': 'ver_cont',
'X-History-Location': 'ver_cont'},
environ={'REQUEST_METHOD': 'PUT'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '400 Bad Request')
self.assertFalse(self.app.calls)
def test_container_allow_versioned_writes_false(self):
self.vw.conf = {'allow_versioned_writes': 'false'}
# PUT/POST container must fail as 412 when allow_versioned_writes
# set to false
for method in ('PUT', 'POST'):
for header in ('X-Versions-Location', 'X-History-Location'):
req = Request.blank('/v1/a/c',
headers={header: 'ver_cont'},
environ={'REQUEST_METHOD': method})
status, headers, body = self.call_vw(req)
self.assertEqual(status, "412 Precondition Failed",
'Got %s instead of 412 when %sing '
'with %s header' % (status, method, header))
# GET performs as normal
self.app.register('GET', '/v1/a/c', swob.HTTPOk, {}, 'passed')
for method in ('GET', 'HEAD'):
req = Request.blank('/v1/a/c',
headers={'X-Versions-Location': 'ver_cont'},
environ={'REQUEST_METHOD': method})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
def _test_removal(self, headers):
self.app.register('POST', '/v1/a/c', swob.HTTPNoContent, {}, 'passed')
req = Request.blank('/v1/a/c',
headers=headers,
environ={'REQUEST_METHOD': 'POST'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '204 No Content')
# check for sysmeta header
calls = self.app.calls_with_headers
method, path, req_headers = calls[0]
self.assertEqual('POST', method)
self.assertEqual('/v1/a/c', path)
for header in ['x-container-sysmeta-versions-location',
'x-container-sysmeta-versions-mode',
'x-versions-location']:
self.assertIn(header, req_headers)
self.assertEqual('', req_headers[header])
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_remove_headers(self):
self._test_removal({'X-Remove-Versions-Location': 'x'})
self._test_removal({'X-Remove-History-Location': 'x'})
def test_empty_versions_location(self):
self._test_removal({'X-Versions-Location': ''})
self._test_removal({'X-History-Location': ''})
def test_remove_add_versions_precedence(self):
self.app.register(
'POST', '/v1/a/c', swob.HTTPOk,
{'x-container-sysmeta-versions-location': 'ver_cont'},
'passed')
req = Request.blank('/v1/a/c',
headers={'X-Remove-Versions-Location': 'x',
'X-Versions-Location': 'ver_cont'},
environ={'REQUEST_METHOD': 'POST'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertIn(('X-Versions-Location', 'ver_cont'), headers)
# check for sysmeta header
calls = self.app.calls_with_headers
method, path, req_headers = calls[0]
self.assertEqual('POST', method)
self.assertEqual('/v1/a/c', path)
self.assertIn('x-container-sysmeta-versions-location', req_headers)
self.assertNotIn('x-remove-versions-location', req_headers)
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def _test_blank_add_versions_precedence(self, blank_header, add_header):
self.app.register(
'POST', '/v1/a/c', swob.HTTPOk,
{'x-container-sysmeta-versions-location': 'ver_cont'},
'passed')
req = Request.blank('/v1/a/c',
headers={blank_header: '',
add_header: 'ver_cont'},
environ={'REQUEST_METHOD': 'POST'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
# check for sysmeta header
calls = self.app.calls_with_headers
method, path, req_headers = calls[-1]
self.assertEqual('POST', method)
self.assertEqual('/v1/a/c', path)
self.assertIn('x-container-sysmeta-versions-location', req_headers)
self.assertEqual('ver_cont',
req_headers['x-container-sysmeta-versions-location'])
self.assertIn('x-container-sysmeta-versions-mode', req_headers)
self.assertEqual('history' if add_header == 'X-History-Location'
else 'stack',
req_headers['x-container-sysmeta-versions-mode'])
self.assertNotIn('x-remove-versions-location', req_headers)
self.assertIn('x-versions-location', req_headers)
self.assertEqual('', req_headers['x-versions-location'])
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_blank_add_versions_precedence(self):
self._test_blank_add_versions_precedence(
'X-Versions-Location', 'X-History-Location')
self._test_blank_add_versions_precedence(
'X-History-Location', 'X-Versions-Location')
def test_get_container(self):
self.app.register(
'GET', '/v1/a/c', swob.HTTPOk,
{'x-container-sysmeta-versions-location': 'ver_cont',
'x-container-sysmeta-versions-mode': 'stack'}, None)
req = Request.blank(
'/v1/a/c',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertIn(('X-Versions-Location', 'ver_cont'), headers)
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_head_container(self):
self.app.register(
'HEAD', '/v1/a/c', swob.HTTPOk,
{'x-container-sysmeta-versions-location': 'other_ver_cont',
'x-container-sysmeta-versions-mode': 'history'}, None)
req = Request.blank(
'/v1/a/c',
environ={'REQUEST_METHOD': 'HEAD'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertIn(('X-History-Location', 'other_ver_cont'), headers)
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_get_head(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, None)
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.app.register('HEAD', '/v1/a/c/o', swob.HTTPOk, {}, None)
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_put_object_no_versioning(self):
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
cache = FakeCache({})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_put_first_object_success(self):
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPNotFound, {}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100',
'swift.trans_id': 'fake_trans_id'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 2)
# Versioned writes middleware now calls auth on the incoming request
# before we try the GET and then at the proxy, so there are 2
# atuhorized for the same request.
self.assertRequestEqual(req, self.authorized[0])
self.assertRequestEqual(req, self.authorized[1])
self.assertEqual(2, self.app.call_count)
self.assertEqual(['VW', None], self.app.swift_sources)
self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids))
def test_put_versioned_object_including_url_encoded_name_success(self):
self.app.register(
'PUT', '/v1/a/c/%ff', swob.HTTPOk, {}, 'passed')
self.app.register(
'GET', '/v1/a/c/%ff', swob.HTTPNotFound, {}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/%25ff',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100',
'swift.trans_id': 'fake_trans_id'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 2)
# Versioned writes middleware now calls auth on the incoming request
# before we try the GET and then at the proxy, so there are 2
# atuhorized for the same request.
self.assertRequestEqual(req, self.authorized[0])
self.assertRequestEqual(req, self.authorized[1])
self.assertEqual(2, self.app.call_count)
self.assertEqual(['VW', None], self.app.swift_sources)
self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids))
def test_put_object_no_versioning_with_container_config_true(self):
# set False to versions_write and expect no GET occurred
self.vw.conf = {'allow_versioned_writes': 'false'}
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed')
cache = FakeCache({'versions': 'ver_cont'})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '201 Created')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
called_method = [method for (method, path, hdrs) in self.app._calls]
self.assertNotIn('GET', called_method)
def test_put_request_is_dlo_manifest_with_container_config_true(self):
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed')
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPOk,
{'last-modified': 'Thu, 1 Jan 1970 00:01:00 GMT'}, 'old version')
self.app.register(
'PUT', '/v1/a/ver_cont/001o/0000000060.00000', swob.HTTPCreated,
{}, '')
cache = FakeCache({'versions': 'ver_cont'})
req = Request.blank(
'/v1/a/c/o',
headers={'X-Object-Manifest': 'req/manifest'},
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '201 Created')
self.assertEqual(len(self.authorized), 2)
self.assertRequestEqual(req, self.authorized[0])
self.assertRequestEqual(req, self.authorized[1])
self.assertEqual(3, self.app.call_count)
self.assertEqual([
('GET', '/v1/a/c/o?symlink=get'),
('PUT', '/v1/a/ver_cont/001o/0000000060.00000'),
('PUT', '/v1/a/c/o'),
], self.app.calls)
self.assertIn('x-object-manifest',
self.app.calls_with_headers[2].headers)
def test_put_version_is_dlo_manifest_with_container_config_true(self):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk,
{'X-Object-Manifest': 'resp/manifest',
'last-modified': 'Thu, 1 Jan 1970 01:00:00 GMT'},
'passed')
self.app.register(
'PUT', '/v1/a/ver_cont/001o/0000003600.00000', swob.HTTPCreated,
{}, '')
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed')
cache = FakeCache({'versions': 'ver_cont'})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '201 Created')
# The middleware now auths the request before the initial GET, the
# same GET that gets the X-Object-Manifest back. So a second auth is
# now done.
self.assertEqual(len(self.authorized), 2)
self.assertRequestEqual(req, self.authorized[0])
self.assertRequestEqual(req, self.authorized[1])
self.assertEqual(3, self.app.call_count)
self.assertEqual([
('GET', '/v1/a/c/o?symlink=get'),
('PUT', '/v1/a/ver_cont/001o/0000003600.00000'),
('PUT', '/v1/a/c/o'),
], self.app.calls)
self.assertIn('x-object-manifest',
self.app.calls_with_headers[1].headers)
def test_delete_object_no_versioning_with_container_config_true(self):
# set False to versions_write obviously and expect no GET versioning
# container and GET/PUT called (just delete object as normal)
self.vw.conf = {'allow_versioned_writes': 'false'}
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPNoContent, {}, 'passed')
cache = FakeCache({'versions': 'ver_cont'})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '204 No Content')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
called_method = \
[method for (method, path, rheaders) in self.app._calls]
self.assertNotIn('PUT', called_method)
self.assertNotIn('GET', called_method)
self.assertEqual(1, self.app.call_count)
def test_new_version_success(self):
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed')
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPOk,
{'last-modified': 'Thu, 1 Jan 1970 00:00:01 GMT'}, 'passed')
self.app.register(
'PUT', '/v1/a/ver_cont/001o/0000000001.00000', swob.HTTPCreated,
{}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100',
'swift.trans_id': 'fake_trans_id'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '201 Created')
# authorized twice now because versioned_writes now makes a check on
# PUT
self.assertEqual(len(self.authorized), 2)
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual(['VW', 'VW', None], self.app.swift_sources)
self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids))
def test_new_version_get_errors(self):
# GET on source fails, expect client error response,
# no PUT should happen
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPBadRequest, {}, None)
cache = FakeCache({'versions': 'ver_cont'})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '412 Precondition Failed')
self.assertEqual(1, self.app.call_count)
# GET on source fails, expect server error response
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPBadGateway, {}, None)
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '503 Service Unavailable')
self.assertEqual(2, self.app.call_count)
def test_new_version_put_errors(self):
# PUT of version fails, expect client error response
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPOk,
{'last-modified': 'Thu, 1 Jan 1970 00:00:01 GMT'}, 'passed')
self.app.register(
'PUT', '/v1/a/ver_cont/001o/0000000001.00000',
swob.HTTPUnauthorized, {}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '412 Precondition Failed')
self.assertEqual(2, self.app.call_count)
# PUT of version fails, expect server error response
self.app.register(
'PUT', '/v1/a/ver_cont/001o/0000000001.00000', swob.HTTPBadGateway,
{}, None)
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '503 Service Unavailable')
self.assertEqual(4, self.app.call_count)
@local_tz
def test_new_version_sysmeta_precedence(self):
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPOk,
{'last-modified': 'Thu, 1 Jan 1970 00:00:00 GMT'}, 'passed')
self.app.register(
'PUT', '/v1/a/ver_cont/001o/0000000000.00000', swob.HTTPOk,
{}, None)
# fill cache with two different values for versions location
# new middleware should use sysmeta first
cache = FakeCache({'versions': 'old_ver_cont',
'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
# authorized twice now because versioned_writes now makes a check on
# PUT
self.assertEqual(len(self.authorized), 2)
self.assertRequestEqual(req, self.authorized[0])
# check that sysmeta header was used
calls = self.app.calls_with_headers
method, path, req_headers = calls[1]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a/ver_cont/001o/0000000000.00000', path)
def test_delete_no_versions_container_success(self):
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register(
'GET', helpers.normalize_path(
'/v1/a/ver_cont?prefix=001o/&marker=&reverse=on'),
swob.HTTPNotFound, {}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0', 'swift.trans_id': 'fake_trans_id'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual(2, self.app.call_count)
self.assertEqual(['VW', None], self.app.swift_sources)
self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids))
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', helpers.normalize_path(
prefix_listing_prefix + 'marker=&reverse=on')),
('DELETE', '/v1/a/c/o'),
])
def test_delete_first_object_success(self):
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&marker=&reverse=on',
swob.HTTPOk, {}, '[]')
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', helpers.normalize_path(
prefix_listing_prefix + 'marker=&reverse=on')),
('DELETE', '/v1/a/c/o'),
])
def test_delete_latest_version_no_marker_success(self):
self.app.register(
'GET',
'/v1/a/ver_cont?prefix=001o/&marker=&reverse=on',
swob.HTTPOk, {},
'[{"hash": "y", '
'"last_modified": "2014-11-21T14:23:02.206740", '
'"bytes": 3, '
'"name": "001o/2", '
'"content_type": "text/plain"}, '
'{"hash": "x", '
'"last_modified": "2014-11-21T14:14:27.409100", '
'"bytes": 3, '
'"name": "001o/1", '
'"content_type": "text/plain"}]')
self.app.register(
'GET', '/v1/a/ver_cont/001o/2', swob.HTTPCreated,
{'content-length': '3'}, None)
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, None)
self.app.register(
'DELETE', '/v1/a/ver_cont/001o/2', swob.HTTPOk,
{}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
headers={'X-If-Delete-At': 1},
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0', 'swift.trans_id': 'fake_trans_id'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual(4, self.app.call_count)
self.assertEqual(['VW', 'VW', 'VW', 'VW'], self.app.swift_sources)
self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids))
# check that X-If-Delete-At was removed from DELETE request
req_headers = self.app.headers[-1]
self.assertNotIn('x-if-delete-at', [h.lower() for h in req_headers])
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', helpers.normalize_path(
prefix_listing_prefix + 'marker=&reverse=on')),
('GET', '/v1/a/ver_cont/001o/2?symlink=get'),
('PUT', '/v1/a/c/o'),
('DELETE', '/v1/a/ver_cont/001o/2'),
])
def test_delete_latest_version_restores_marker_success(self):
self.app.register(
'GET',
'/v1/a/ver_cont?prefix=001o/&marker=&reverse=on',
swob.HTTPOk, {},
'[{"hash": "x", '
'"last_modified": "2014-11-21T14:23:02.206740", '
'"bytes": 3, '
'"name": "001o/2", '
'"content_type": "application/x-deleted;swift_versions_deleted=1"'
'}, {"hash": "y", '
'"last_modified": "2014-11-21T14:14:27.409100", '
'"bytes": 3, '
'"name": "001o/1", '
'"content_type": "text/plain"'
'}]')
self.app.register(
'HEAD', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPNoContent, {})
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
headers={'X-If-Delete-At': 1},
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '204 No Content')
self.assertEqual(len(self.authorized), 2)
self.assertRequestEqual(req, self.authorized[0])
self.assertRequestEqual(req, self.authorized[1])
calls = self.app.calls_with_headers
self.assertEqual(['GET', 'HEAD', 'DELETE'],
[c.method for c in calls])
self.assertIn('X-Newest', calls[1].headers)
self.assertEqual('True', calls[1].headers['X-Newest'])
method, path, req_headers = calls.pop()
self.assertTrue(path.startswith('/v1/a/c/o'))
# Since we're deleting the original, this *should* still be present:
self.assertEqual('1', req_headers.get('X-If-Delete-At'))
def test_delete_latest_version_is_marker_success(self):
# Test popping a delete marker off the stack. So, there's data in the
# versions container, topped by a delete marker, and there's nothing
# in the base versioned container.
self.app.register(
'GET',
helpers.normalize_path(
'/v1/a/ver_cont?prefix=001o/&marker=&reverse=on'),
swob.HTTPOk, {},
'[{"hash": "y", '
'"last_modified": "2014-11-21T14:23:02.206740", '
'"bytes": 3, '
'"name": "001o/2", '
'"content_type": "application/x-deleted;swift_versions_deleted=1"'
'},{"hash": "x", '
'"last_modified": "2014-11-21T14:14:27.409100", '
'"bytes": 3, '
'"name": "001o/1", '
'"content_type": "text/plain"'
'}]')
self.app.register(
'HEAD', '/v1/a/c/o', swob.HTTPNotFound, {}, 'passed')
self.app.register(
'GET', '/v1/a/ver_cont/001o/1', swob.HTTPOk, {}, 'passed')
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, None)
self.app.register(
'DELETE', '/v1/a/ver_cont/001o/2', swob.HTTPOk, {}, 'passed')
self.app.register(
'DELETE', '/v1/a/ver_cont/001o/1', swob.HTTPOk, {}, 'passed')
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
headers={'X-If-Delete-At': 1},
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', helpers.normalize_path(
prefix_listing_prefix + 'marker=&reverse=on')),
('HEAD', '/v1/a/c/o'),
('GET', '/v1/a/ver_cont/001o/1?symlink=get'),
('PUT', '/v1/a/c/o'),
('DELETE', '/v1/a/ver_cont/001o/1'),
('DELETE', '/v1/a/ver_cont/001o/2'),
])
self.assertIn('X-Newest', self.app.headers[1])
self.assertEqual('True', self.app.headers[1]['X-Newest'])
self.assertIn('X-Newest', self.app.headers[2])
self.assertEqual('True', self.app.headers[2]['X-Newest'])
# check that X-If-Delete-At was removed from DELETE request
for req_headers in self.app.headers[-2:]:
self.assertNotIn('x-if-delete-at',
[h.lower() for h in req_headers])
def test_delete_latest_version_doubled_up_markers_success(self):
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/'
'&marker=&reverse=on',
swob.HTTPOk, {},
'[{"hash": "x", '
'"last_modified": "2014-11-21T14:23:02.206740", '
'"bytes": 3, '
'"name": "001o/3", '
'"content_type": "application/x-deleted;swift_versions_deleted=1"'
'}, {"hash": "y", '
'"last_modified": "2014-11-21T14:14:27.409100", '
'"bytes": 3, '
'"name": "001o/2", '
'"content_type": "application/x-deleted;swift_versions_deleted=1"'
'}, {"hash": "y", '
'"last_modified": "2014-11-20T14:23:02.206740", '
'"bytes": 30, '
'"name": "001o/1", '
'"content_type": "text/plain"'
'}]')
self.app.register(
'HEAD', '/v1/a/c/o', swob.HTTPNotFound, {}, 'passed')
self.app.register(
'DELETE', '/v1/a/ver_cont/001o/3', swob.HTTPOk, {}, 'passed')
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
headers={'X-If-Delete-At': 1},
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
# check that X-If-Delete-At was removed from DELETE request
calls = self.app.calls_with_headers
self.assertEqual(['GET', 'HEAD', 'DELETE'],
[c.method for c in calls])
method, path, req_headers = calls.pop()
self.assertTrue(path.startswith('/v1/a/ver_cont/001o/3'))
self.assertNotIn('x-if-delete-at', [h.lower() for h in req_headers])
@mock.patch('swift.common.middleware.versioned_writes.legacy.time.time',
return_value=1234)
def test_history_delete_marker_no_object_success(self, mock_time):
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPNotFound,
{}, 'passed')
self.app.register(
'PUT', '/v1/a/ver_cont/001o/0000001234.00000', swob.HTTPCreated,
{}, 'passed')
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPNotFound, {}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont',
'versions-mode': 'history'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '404 Not Found')
self.assertEqual(len(self.authorized), 2)
req.environ['REQUEST_METHOD'] = 'PUT'
self.assertRequestEqual(req, self.authorized[0])
calls = self.app.calls_with_headers
self.assertEqual(['GET', 'PUT', 'DELETE'], [c.method for c in calls])
self.assertEqual('application/x-deleted;swift_versions_deleted=1',
calls[1].headers.get('Content-Type'))
@mock.patch('swift.common.middleware.versioned_writes.legacy.time.time',
return_value=123456789.54321)
def test_history_delete_marker_over_object_success(self, mock_time):
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPOk,
{'last-modified': 'Wed, 19 Nov 2014 18:19:02 GMT'}, 'passed')
self.app.register(
'PUT', '/v1/a/ver_cont/001o/1416421142.00000', swob.HTTPCreated,
{}, 'passed')
self.app.register(
'PUT', '/v1/a/ver_cont/001o/0123456789.54321', swob.HTTPCreated,
{}, 'passed')
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPNoContent, {}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont',
'versions-mode': 'history'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '204 No Content')
self.assertEqual(b'', body)
self.assertEqual(len(self.authorized), 2)
req.environ['REQUEST_METHOD'] = 'PUT'
self.assertRequestEqual(req, self.authorized[0])
calls = self.app.calls_with_headers
self.assertEqual(['GET', 'PUT', 'PUT', 'DELETE'],
[c.method for c in calls])
self.assertEqual('/v1/a/ver_cont/001o/1416421142.00000',
calls[1].path)
self.assertEqual('application/x-deleted;swift_versions_deleted=1',
calls[2].headers.get('Content-Type'))
def test_delete_single_version_success(self):
# check that if the first listing page has just a single item then
# it is not erroneously inferred to be a non-reversed listing
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register(
'GET',
'/v1/a/ver_cont?prefix=001o/&marker=&reverse=on',
swob.HTTPOk, {},
'[{"hash": "y", '
'"last_modified": "2014-11-21T14:23:02.206740", '
'"bytes": 3, '
'"name": "001o/1", '
'"content_type": "text/plain"}]')
self.app.register(
'GET', '/v1/a/ver_cont/001o/1', swob.HTTPOk,
{'content-length': '3'}, None)
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, None)
self.app.register(
'DELETE', '/v1/a/ver_cont/001o/1', swob.HTTPOk,
{}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', helpers.normalize_path(
prefix_listing_prefix + 'marker=&reverse=on')),
('GET', '/v1/a/ver_cont/001o/1?symlink=get'),
('PUT', '/v1/a/c/o'),
('DELETE', '/v1/a/ver_cont/001o/1'),
])
def test_DELETE_on_expired_versioned_object(self):
self.app.register(
'GET',
'/v1/a/ver_cont?prefix=001o/&marker=&reverse=on',
swob.HTTPOk, {},
'[{"hash": "y", '
'"last_modified": "2014-11-21T14:23:02.206740", '
'"bytes": 3, '
'"name": "001o/2", '
'"content_type": "text/plain"}, '
'{"hash": "x", '
'"last_modified": "2014-11-21T14:14:27.409100", '
'"bytes": 3, '
'"name": "001o/1", '
'"content_type": "text/plain"}]')
# expired object
self.app.register(
'GET', '/v1/a/ver_cont/001o/2', swob.HTTPNotFound,
{}, None)
self.app.register(
'GET', '/v1/a/ver_cont/001o/1', swob.HTTPCreated,
{'content-length': '3'}, None)
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPOk, {}, None)
self.app.register(
'DELETE', '/v1/a/ver_cont/001o/1', swob.HTTPOk,
{}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual(5, self.app.call_count)
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', helpers.normalize_path(
prefix_listing_prefix + 'marker=&reverse=on')),
('GET', helpers.normalize_path(
'/v1/a/ver_cont/001o/2?symlink=get')),
('GET', helpers.normalize_path(
'/v1/a/ver_cont/001o/1?symlink=get')),
('PUT', helpers.normalize_path('/v1/a/c/o')),
('DELETE', helpers.normalize_path('/v1/a/ver_cont/001o/1')),
])
def test_denied_DELETE_of_versioned_object(self):
authorize_call = []
self.app.register(
'GET',
'/v1/a/ver_cont?prefix=001o/&marker=&reverse=on',
swob.HTTPOk, {},
'[{"hash": "y", '
'"last_modified": "2014-11-21T14:23:02.206740", '
'"bytes": 3, '
'"name": "001o/2", '
'"content_type": "text/plain"}, '
'{"hash": "x", '
'"last_modified": "2014-11-21T14:14:27.409100", '
'"bytes": 3, '
'"name": "001o/1", '
'"content_type": "text/plain"}]')
def fake_authorize(req):
# the container GET is pre-auth'd so here we deny the object DELETE
authorize_call.append(req)
return swob.HTTPForbidden()
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'swift.authorize': fake_authorize,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '403 Forbidden')
self.assertEqual(len(authorize_call), 1)
self.assertRequestEqual(req, authorize_call[0])
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', helpers.normalize_path(
prefix_listing_prefix + 'marker=&reverse=on')),
])
def test_denied_PUT_of_versioned_object(self):
authorize_call = []
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPOk,
{'last-modified': 'Thu, 1 Jan 1970 00:00:01 GMT'}, 'passed')
def fake_authorize(req):
# we should deny the object PUT
authorize_call.append(req)
return swob.HTTPForbidden()
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'swift.authorize': fake_authorize,
'CONTENT_LENGTH': '0'})
# Save off a copy, as the middleware may modify the original
expected_req = Request(req.environ.copy())
status, headers, body = self.call_vw(req)
self.assertEqual(status, '403 Forbidden')
self.assertEqual(len(authorize_call), 1)
self.assertRequestEqual(expected_req, authorize_call[0])
self.assertEqual(self.app.calls, [])
class VersionedWritesOldContainersTestCase(VersionedWritesBaseTestCase):
def test_delete_latest_version_success(self):
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=&reverse=on',
swob.HTTPOk, {},
'[{"hash": "x", '
'"last_modified": "2014-11-21T14:14:27.409100", '
'"bytes": 3, '
'"name": "001o/1", '
'"content_type": "text/plain"}, '
'{"hash": "y", '
'"last_modified": "2014-11-21T14:23:02.206740", '
'"bytes": 3, '
'"name": "001o/2", '
'"content_type": "text/plain"}]')
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/'
'&marker=001o/2',
swob.HTTPNotFound, {}, None)
self.app.register(
'GET', '/v1/a/ver_cont/001o/2', swob.HTTPCreated,
{'content-length': '3'}, None)
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, None)
self.app.register(
'DELETE', '/v1/a/ver_cont/001o/2', swob.HTTPOk,
{}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
headers={'X-If-Delete-At': 1},
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0', 'swift.trans_id': 'fake_trans_id'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual(5, self.app.call_count)
self.assertEqual(['VW', 'VW', 'VW', 'VW', 'VW'],
self.app.swift_sources)
self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids))
# check that X-If-Delete-At was removed from DELETE request
req_headers = self.app.headers[-1]
self.assertNotIn('x-if-delete-at', [h.lower() for h in req_headers])
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', helpers.normalize_path(
prefix_listing_prefix + 'marker=&reverse=on')),
('GET', helpers.normalize_path(
prefix_listing_prefix + 'marker=001o/2')),
('GET', '/v1/a/ver_cont/001o/2?symlink=get'),
('PUT', '/v1/a/c/o'),
('DELETE', '/v1/a/ver_cont/001o/2'),
])
def test_DELETE_on_expired_versioned_object(self):
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=&reverse=on',
swob.HTTPOk, {},
'[{"hash": "x", '
'"last_modified": "2014-11-21T14:14:27.409100", '
'"bytes": 3, '
'"name": "001o/1", '
'"content_type": "text/plain"}, '
'{"hash": "y", '
'"last_modified": "2014-11-21T14:23:02.206740", '
'"bytes": 3, '
'"name": "001o/2", '
'"content_type": "text/plain"}]')
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/'
'&marker=001o/2',
swob.HTTPNotFound, {}, None)
# expired object
self.app.register(
'GET', '/v1/a/ver_cont/001o/2', swob.HTTPNotFound,
{}, None)
self.app.register(
'GET', '/v1/a/ver_cont/001o/1', swob.HTTPCreated,
{'content-length': '3'}, None)
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPOk, {}, None)
self.app.register(
'DELETE', '/v1/a/ver_cont/001o/1', swob.HTTPOk,
{}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual(6, self.app.call_count)
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', helpers.normalize_path(
prefix_listing_prefix + 'marker=&reverse=on')),
('GET', helpers.normalize_path(
prefix_listing_prefix + 'marker=001o/2')),
('GET', '/v1/a/ver_cont/001o/2?symlink=get'),
('GET', '/v1/a/ver_cont/001o/1?symlink=get'),
('PUT', '/v1/a/c/o'),
('DELETE', '/v1/a/ver_cont/001o/1'),
])
def test_denied_DELETE_of_versioned_object(self):
authorize_call = []
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=&reverse=on',
swob.HTTPOk, {},
'[{"hash": "x", '
'"last_modified": "2014-11-21T14:14:27.409100", '
'"bytes": 3, '
'"name": "001o/1", '
'"content_type": "text/plain"}, '
'{"hash": "y", '
'"last_modified": "2014-11-21T14:23:02.206740", '
'"bytes": 3, '
'"name": "001o/2", '
'"content_type": "text/plain"}]')
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/'
'&marker=001o/2',
swob.HTTPNotFound, {}, None)
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPForbidden,
{}, None)
def fake_authorize(req):
authorize_call.append(req)
return swob.HTTPForbidden()
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'swift.authorize': fake_authorize,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '403 Forbidden')
self.assertEqual(len(authorize_call), 1)
self.assertRequestEqual(req, authorize_call[0])
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', helpers.normalize_path(
prefix_listing_prefix + 'marker=&reverse=on')),
('GET', helpers.normalize_path(
prefix_listing_prefix + 'marker=001o/2')),
])
def test_partially_upgraded_cluster(self):
old_versions = [
{'hash': 'etag%d' % x,
'last_modified': "2014-11-21T14:14:%02d.409100" % x,
'bytes': 3,
'name': '001o/%d' % x,
'content_type': 'text/plain'}
for x in range(5)]
# first container server can reverse
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=&reverse=on',
swob.HTTPOk, {}, json.dumps(list(reversed(old_versions[2:]))))
# but all objects are already gone
self.app.register(
'GET', '/v1/a/ver_cont/001o/4', swob.HTTPNotFound,
{}, None)
self.app.register(
'GET', '/v1/a/ver_cont/001o/3', swob.HTTPNotFound,
{}, None)
self.app.register(
'GET', '/v1/a/ver_cont/001o/2', swob.HTTPNotFound,
{}, None)
# second container server can't reverse
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=001o/2&reverse=on',
swob.HTTPOk, {}, json.dumps(old_versions[3:]))
# subsequent requests shouldn't reverse
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=&end_marker=001o/2',
swob.HTTPOk, {}, json.dumps(old_versions[:1]))
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=001o/0&end_marker=001o/2',
swob.HTTPOk, {}, json.dumps(old_versions[1:2]))
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=001o/1&end_marker=001o/2',
swob.HTTPOk, {}, '[]')
self.app.register(
'GET', '/v1/a/ver_cont/001o/1', swob.HTTPOk,
{'content-length': '3'}, None)
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, None)
self.app.register(
'DELETE', '/v1/a/ver_cont/001o/1', swob.HTTPNoContent,
{}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '204 No Content')
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', helpers.normalize_path(
prefix_listing_prefix + 'marker=&reverse=on')),
('GET', '/v1/a/ver_cont/001o/4?symlink=get'),
('GET', '/v1/a/ver_cont/001o/3?symlink=get'),
('GET', '/v1/a/ver_cont/001o/2?symlink=get'),
('GET', helpers.normalize_path(
prefix_listing_prefix + 'marker=001o/2&reverse=on')),
('GET', helpers.normalize_path(
prefix_listing_prefix + 'marker=&end_marker=001o/2')),
('GET', helpers.normalize_path(
prefix_listing_prefix + 'marker=001o/0&end_marker=001o/2')),
('GET', helpers.normalize_path(
prefix_listing_prefix + 'marker=001o/1&end_marker=001o/2')),
('GET', '/v1/a/ver_cont/001o/1?symlink=get'),
('PUT', '/v1/a/c/o'),
('DELETE', '/v1/a/ver_cont/001o/1'),
])
def test_partially_upgraded_cluster_single_result_on_second_page(self):
old_versions = [
{'hash': 'etag%d' % x,
'last_modified': "2014-11-21T14:14:%02d.409100" % x,
'bytes': 3,
'name': '001o/%d' % x,
'content_type': 'text/plain'}
for x in range(5)]
# first container server can reverse
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=&reverse=on',
swob.HTTPOk, {}, json.dumps(list(reversed(old_versions[-2:]))))
# but both objects are already gone
self.app.register(
'GET', '/v1/a/ver_cont/001o/4', swob.HTTPNotFound,
{}, None)
self.app.register(
'GET', '/v1/a/ver_cont/001o/3', swob.HTTPNotFound,
{}, None)
# second container server can't reverse
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=001o/3&reverse=on',
swob.HTTPOk, {}, json.dumps(old_versions[4:]))
# subsequent requests shouldn't reverse
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=&end_marker=001o/3',
swob.HTTPOk, {}, json.dumps(old_versions[:2]))
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=001o/1&end_marker=001o/3',
swob.HTTPOk, {}, json.dumps(old_versions[2:3]))
self.app.register(
'GET', '/v1/a/ver_cont?prefix=001o/&'
'marker=001o/2&end_marker=001o/3',
swob.HTTPOk, {}, '[]')
self.app.register(
'GET', '/v1/a/ver_cont/001o/2', swob.HTTPOk,
{'content-length': '3'}, None)
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, None)
self.app.register(
'DELETE', '/v1/a/ver_cont/001o/2', swob.HTTPNoContent,
{}, None)
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_vw(req)
self.assertEqual(status, '204 No Content')
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [
('GET', helpers.normalize_path(
prefix_listing_prefix + 'marker=&reverse=on')),
('GET', '/v1/a/ver_cont/001o/4?symlink=get'),
('GET', '/v1/a/ver_cont/001o/3?symlink=get'),
('GET', helpers.normalize_path(
prefix_listing_prefix + 'marker=001o/3&reverse=on')),
('GET', helpers.normalize_path(
prefix_listing_prefix + 'marker=&end_marker=001o/3')),
('GET', helpers.normalize_path(
prefix_listing_prefix + 'marker=001o/1&end_marker=001o/3')),
('GET', helpers.normalize_path(
prefix_listing_prefix + 'marker=001o/2&end_marker=001o/3')),
('GET', '/v1/a/ver_cont/001o/2?symlink=get'),
('PUT', '/v1/a/c/o'),
('DELETE', '/v1/a/ver_cont/001o/2'),
])
class VersionedWritesCopyingTestCase(VersionedWritesBaseTestCase):
# verify interaction of copy and versioned_writes middlewares
def setUp(self):
self.app = helpers.FakeSwift()
conf = {'allow_versioned_writes': 'true'}
self.vw = versioned_writes.filter_factory(conf)(self.app)
self.filter = copy.filter_factory({})(self.vw)
def call_filter(self, req, **kwargs):
return self.call_app(req, app=self.filter, **kwargs)
def test_copy_first_version(self):
# no existing object to move to the versions container
self.app.register(
'GET', '/v1/a/tgt_cont/tgt_obj', swob.HTTPNotFound, {}, None)
self.app.register(
'GET', '/v1/a/src_cont/src_obj', swob.HTTPOk, {}, 'passed')
self.app.register(
'PUT', '/v1/a/tgt_cont/tgt_obj', swob.HTTPCreated, {}, 'passed')
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/src_cont/src_obj',
environ={'REQUEST_METHOD': 'COPY', 'swift.cache': cache,
'CONTENT_LENGTH': '100'},
headers={'Destination': 'tgt_cont/tgt_obj'})
status, headers, body = self.call_filter(req)
self.assertEqual(status, '201 Created')
self.assertEqual(len(self.authorized), 3)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/src_cont/src_obj', self.authorized[0].path)
# At the moment we are calling authorize on the incoming request in
# the middleware before we do the PUT (and the source GET) and again
# on the incoming request when it gets to the proxy. So the 2nd and
# 3rd auths look the same.
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/tgt_cont/tgt_obj', self.authorized[1].path)
self.assertEqual('PUT', self.authorized[2].method)
self.assertEqual('/v1/a/tgt_cont/tgt_obj', self.authorized[2].path)
# note the GET on tgt_cont/tgt_obj is pre-authed
self.assertEqual(3, self.app.call_count, self.app.calls)
def test_copy_new_version(self):
# existing object should be moved to versions container
self.app.register(
'GET', '/v1/a/src_cont/src_obj', swob.HTTPOk, {}, 'passed')
self.app.register(
'GET', '/v1/a/tgt_cont/tgt_obj', swob.HTTPOk,
{'last-modified': 'Thu, 1 Jan 1970 00:00:01 GMT'}, 'passed')
self.app.register(
'PUT', '/v1/a/ver_cont/007tgt_obj/0000000001.00000', swob.HTTPOk,
{}, None)
self.app.register(
'PUT', '/v1/a/tgt_cont/tgt_obj', swob.HTTPCreated, {}, 'passed')
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/a/src_cont/src_obj',
environ={'REQUEST_METHOD': 'COPY', 'swift.cache': cache,
'CONTENT_LENGTH': '100'},
headers={'Destination': 'tgt_cont/tgt_obj'})
status, headers, body = self.call_filter(req)
self.assertEqual(status, '201 Created')
self.assertEqual(len(self.authorized), 3)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/src_cont/src_obj', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/tgt_cont/tgt_obj', self.authorized[1].path)
self.assertEqual(4, self.app.call_count)
def test_copy_new_version_different_account(self):
self.app.register(
'GET', '/v1/src_a/src_cont/src_obj', swob.HTTPOk, {}, 'passed')
self.app.register(
'GET', '/v1/tgt_a/tgt_cont/tgt_obj', swob.HTTPOk,
{'last-modified': 'Thu, 1 Jan 1970 00:00:01 GMT'}, 'passed')
self.app.register(
'PUT', '/v1/tgt_a/ver_cont/007tgt_obj/0000000001.00000',
swob.HTTPOk, {}, None)
self.app.register(
'PUT', '/v1/tgt_a/tgt_cont/tgt_obj', swob.HTTPCreated, {},
'passed')
cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}})
req = Request.blank(
'/v1/src_a/src_cont/src_obj',
environ={'REQUEST_METHOD': 'COPY', 'swift.cache': cache,
'CONTENT_LENGTH': '100'},
headers={'Destination': 'tgt_cont/tgt_obj',
'Destination-Account': 'tgt_a'})
status, headers, body = self.call_filter(req)
self.assertEqual(status, '201 Created')
self.assertEqual(len(self.authorized), 3)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/src_a/src_cont/src_obj', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/tgt_a/tgt_cont/tgt_obj', self.authorized[1].path)
self.assertEqual(4, self.app.call_count)
def test_copy_object_no_versioning_with_container_config_true(self):
# set False to versions_write obviously and expect no extra
# COPY called (just copy object as normal)
self.vw.conf = {'allow_versioned_writes': 'false'}
self.app.register(
'GET', '/v1/a/src_cont/src_obj', swob.HTTPOk, {}, 'passed')
self.app.register(
'PUT', '/v1/a/tgt_cont/tgt_obj', swob.HTTPCreated, {}, 'passed')
cache = FakeCache({'versions': 'ver_cont'})
req = Request.blank(
'/v1/a/src_cont/src_obj',
environ={'REQUEST_METHOD': 'COPY', 'swift.cache': cache},
headers={'Destination': '/tgt_cont/tgt_obj'})
status, headers, body = self.call_filter(req)
self.assertEqual(status, '201 Created')
self.assertEqual(len(self.authorized), 2)
self.assertEqual('GET', self.authorized[0].method)
self.assertEqual('/v1/a/src_cont/src_obj', self.authorized[0].path)
self.assertEqual('PUT', self.authorized[1].method)
self.assertEqual('/v1/a/tgt_cont/tgt_obj', self.authorized[1].path)
self.assertEqual(2, self.app.call_count)
class TestSwiftInfo(unittest.TestCase):
def setUp(self):
registry._swift_info = {}
registry._swift_admin_info = {}
def test_registered_defaults(self):
versioned_writes.filter_factory({})('have to pass in an app')
swift_info = registry.get_swift_info()
# in default, versioned_writes is not in swift_info
self.assertNotIn('versioned_writes', swift_info)
def test_registered_explicitly_set(self):
versioned_writes.filter_factory(
{'allow_versioned_writes': 'true'})('have to pass in an app')
swift_info = registry.get_swift_info()
self.assertIn('versioned_writes', swift_info)
self.assertEqual(
swift_info['versioned_writes'].get('allowed_flags'),
('x-versions-location', 'x-history-location'))
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/test_versioned_writes.py |
# -*- coding: utf-8 -*-
# Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import json
import time
import unittest
from mock import patch
import six
from io import BytesIO
from swift.common import swob, registry
from swift.common.header_key_dict import HeaderKeyDict
from swift.common.middleware import slo
from swift.common.swob import Request, HTTPException, str_to_wsgi, \
bytes_to_wsgi
from swift.common.utils import quote, closing_if_possible, close_if_possible, \
parse_content_type, iter_multipart_mime_documents, parse_mime_headers, \
Timestamp, get_expirer_container, md5
from test.unit.common.middleware.helpers import FakeSwift
test_xml_data = '''<?xml version="1.0" encoding="UTF-8"?>
<static_large_object>
<object_segment>
<path>/cont/object</path>
<etag>etagoftheobjectsegment</etag>
<size_bytes>100</size_bytes>
</object_segment>
</static_large_object>
'''
test_json_data = json.dumps([{'path': '/cont/object',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100}]).encode('ascii')
def fake_start_response(*args, **kwargs):
pass
def md5hex(s):
if not isinstance(s, bytes):
s = s.encode('ascii')
return md5(s, usedforsecurity=False).hexdigest()
class SloTestCase(unittest.TestCase):
def setUp(self):
self.app = FakeSwift()
slo_conf = {'rate_limit_under_size': '0'}
self.slo = slo.filter_factory(slo_conf)(self.app)
self.slo.logger = self.app.logger
self.manifest_abcd_etag = md5hex(
md5hex("a" * 5) + md5hex(md5hex("b" * 10) + md5hex("c" * 15)) +
md5hex("d" * 20))
def call_app(self, req, app=None):
if app is None:
app = self.app
req.headers.setdefault("User-Agent", "Mozzarella Foxfire")
status = [None]
headers = [None]
def start_response(s, h, ei=None):
status[0] = s
headers[0] = h
body_iter = app(req.environ, start_response)
body = b''
# appease the close-checker
with closing_if_possible(body_iter):
for chunk in body_iter:
body += chunk
return status[0], headers[0], body
def call_slo(self, req, **kwargs):
return self.call_app(req, app=self.slo, **kwargs)
class TestSloMiddleware(SloTestCase):
def setUp(self):
super(TestSloMiddleware, self).setUp()
self.app.register(
'GET', '/', swob.HTTPOk, {}, b'passed')
self.app.register(
'PUT', '/', swob.HTTPOk, {}, b'passed')
def test_handle_multipart_no_obj(self):
req = Request.blank('/')
resp_iter = self.slo(req.environ, fake_start_response)
self.assertEqual(self.app.calls, [('GET', '/')])
self.assertEqual(b''.join(resp_iter), b'passed')
def test_slo_header_assigned(self):
req = Request.blank(
'/v1/a/c/o', headers={'x-static-large-object': "true"},
environ={'REQUEST_METHOD': 'PUT'})
resp = b''.join(self.slo(req.environ, fake_start_response))
self.assertTrue(
resp.startswith(b'X-Static-Large-Object is a reserved header'))
def test_slo_PUT_env_override(self):
path = '/v1/a/c/o'
body = b'manifest body not checked when override flag set'
resp_status = []
def start_response(status, headers, *args):
resp_status.append(status)
req = Request.blank(
path, headers={'x-static-large-object': "true"},
environ={'REQUEST_METHOD': 'PUT', 'swift.slo_override': True},
body=body)
self.app.register('PUT', path, swob.HTTPCreated, {})
resp_iter = self.slo(req.environ, start_response)
self.assertEqual(b'', b''.join(resp_iter))
self.assertEqual(self.app.calls, [('PUT', path)])
self.assertEqual(body, self.app.uploaded[path][1])
self.assertEqual(resp_status[0], '201 Created')
def _put_bogus_slo(self, manifest_text,
manifest_path='/v1/a/c/the-manifest'):
with self.assertRaises(HTTPException) as catcher:
slo.parse_and_validate_input(manifest_text, manifest_path)
self.assertEqual(400, catcher.exception.status_int)
return catcher.exception.body.decode('utf-8')
def _put_slo(self, manifest_text, manifest_path='/v1/a/c/the-manifest'):
return slo.parse_and_validate_input(manifest_text, manifest_path)
def test_bogus_input(self):
self.assertEqual('Manifest must be valid JSON.\n',
self._put_bogus_slo('some non json'))
self.assertEqual('Manifest must be a list.\n',
self._put_bogus_slo('{}'))
self.assertEqual('Index 0: not a JSON object\n',
self._put_bogus_slo('["zombocom"]'))
def test_bogus_input_bad_keys(self):
self.assertEqual(
"Index 0: extraneous keys \"baz\", \"foo\"\n",
self._put_bogus_slo(json.dumps(
[{'path': '/cont/object', 'etag': 'etagoftheobjectsegment',
'size_bytes': 100,
'foo': 'bar', 'baz': 'quux'}])))
# This also catches typos
self.assertEqual(
'Index 0: extraneous keys "egat"\n',
self._put_bogus_slo(json.dumps(
[{'path': '/cont/object', 'egat': 'etagoftheobjectsegment',
'size_bytes': 100}])))
self.assertEqual(
'Index 0: extraneous keys "siez_bytes"\n',
self._put_bogus_slo(json.dumps(
[{'path': '/cont/object', 'etag': 'etagoftheobjectsegment',
'siez_bytes': 100}])))
def test_bogus_input_ranges(self):
self.assertEqual(
"Index 0: invalid range\n",
self._put_bogus_slo(json.dumps(
[{'path': '/cont/object', 'etag': 'blah',
'size_bytes': 100, 'range': 'non-range value'}])))
self.assertEqual(
"Index 0: multiple ranges (only one allowed)\n",
self._put_bogus_slo(json.dumps(
[{'path': '/cont/object', 'etag': 'blah',
'size_bytes': 100, 'range': '1-20,30-40'}])))
def test_bogus_input_unsatisfiable_range(self):
self.assertEqual(
"Index 0: unsatisfiable range\n",
self._put_bogus_slo(json.dumps(
[{'path': '/cont/object', 'etag': 'blah',
'size_bytes': 100, 'range': '8888-9999'}])))
# since size is optional, we have to be able to defer this check
segs = self._put_slo(json.dumps(
[{'path': '/cont/object', 'etag': 'blah',
'size_bytes': None, 'range': '8888-9999'}]))
self.assertEqual(1, len(segs))
def test_bogus_input_path(self):
self.assertEqual(
"Index 0: path does not refer to an object. Path must be of the "
"form /container/object.\n"
"Index 1: path does not refer to an object. Path must be of the "
"form /container/object.\n",
self._put_bogus_slo(json.dumps(
[{'path': '/cont', 'etag': 'etagoftheobjectsegment',
'size_bytes': 100},
{'path': '/c-trailing-slash/', 'etag': 'e',
'size_bytes': 100},
{'path': '/con/obj', 'etag': 'e',
'size_bytes': 100},
{'path': '/con/obj-trailing-slash/', 'etag': 'e',
'size_bytes': 100},
{'path': '/con/obj/with/slashes', 'etag': 'e',
'size_bytes': 100}])))
def test_bogus_input_multiple(self):
self.assertEqual(
"Index 0: invalid range\nIndex 1: not a JSON object\n",
self._put_bogus_slo(json.dumps(
[{'path': '/cont/object', 'etag': 'etagoftheobjectsegment',
'size_bytes': 100, 'range': 'non-range value'},
None])))
def test_bogus_input_size_bytes(self):
self.assertEqual(
"Index 0: invalid size_bytes\n",
self._put_bogus_slo(json.dumps(
[{'path': '/cont/object', 'etag': 'blah', 'size_bytes': "fht"},
{'path': '/cont/object', 'etag': 'blah', 'size_bytes': None},
{'path': '/cont/object', 'etag': 'blah', 'size_bytes': 100}],
)))
self.assertEqual(
"Index 0: invalid size_bytes\n",
self._put_bogus_slo(json.dumps(
[{'path': '/cont/object', 'etag': 'blah', 'size_bytes': []}],
)))
def test_bogus_input_self_referential(self):
self.assertEqual(
"Index 0: manifest must not include itself as a segment\n",
self._put_bogus_slo(json.dumps(
[{'path': '/c/the-manifest', 'etag': 'gate',
'size_bytes': 100, 'range': 'non-range value'}])))
def test_bogus_input_self_referential_non_ascii(self):
self.assertEqual(
"Index 0: manifest must not include itself as a segment\n",
self._put_bogus_slo(
json.dumps([{'path': u'/c/あ_1',
'etag': 'a', 'size_bytes': 1}]),
manifest_path=quote(u'/v1/a/c/あ_1')))
def test_bogus_input_self_referential_last_segment(self):
test_json_data = json.dumps([
{'path': '/c/seg_1', 'etag': 'a', 'size_bytes': 1},
{'path': '/c/seg_2', 'etag': 'a', 'size_bytes': 1},
{'path': '/c/seg_3', 'etag': 'a', 'size_bytes': 1},
{'path': '/c/the-manifest', 'etag': 'a', 'size_bytes': 1},
]).encode('ascii')
self.assertEqual(
"Index 3: manifest must not include itself as a segment\n",
self._put_bogus_slo(
test_json_data,
manifest_path=quote('/v1/a/c/the-manifest')))
def test_bogus_input_undersize_segment(self):
self.assertEqual(
"Index 1: too small; each segment "
"must be at least 1 byte.\n"
"Index 2: too small; each segment "
"must be at least 1 byte.\n",
self._put_bogus_slo(
json.dumps([
{'path': u'/c/s1', 'etag': 'a', 'size_bytes': 1},
{'path': u'/c/s2', 'etag': 'b', 'size_bytes': 0},
{'path': u'/c/s3', 'etag': 'c', 'size_bytes': 0},
# No error for this one since size_bytes is unspecified
{'path': u'/c/s4', 'etag': 'd', 'size_bytes': None},
{'path': u'/c/s5', 'etag': 'e', 'size_bytes': 1000}])))
def test_valid_input(self):
data = json.dumps(
[{'path': '/cont/object', 'etag': 'etagoftheobjectsegment',
'size_bytes': 100}])
self.assertEqual(
'/cont/object',
slo.parse_and_validate_input(data, '/v1/a/cont/man')[0]['path'])
data = json.dumps(
[{'path': '/cont/object', 'etag': 'etagoftheobjectsegment',
'size_bytes': 100, 'range': '0-40'}])
parsed = slo.parse_and_validate_input(data, '/v1/a/cont/man')
self.assertEqual('/cont/object', parsed[0]['path'])
self.assertEqual([(0, 40)], parsed[0]['range'].ranges)
data = json.dumps(
[{'path': '/cont/object', 'etag': 'etagoftheobjectsegment',
'size_bytes': None, 'range': '0-40'}])
parsed = slo.parse_and_validate_input(data, '/v1/a/cont/man')
self.assertEqual('/cont/object', parsed[0]['path'])
self.assertIsNone(parsed[0]['size_bytes'])
self.assertEqual([(0, 40)], parsed[0]['range'].ranges)
def test_container_listing(self):
listing_json = json.dumps([{
"bytes": 104857600,
"content_type": "application/x-troff-me",
"hash": "8de7b0b1551660da51d8d96a53b85531; this=that;"
"slo_etag=dc9947c2b53a3f55fe20c1394268e216",
"last_modified": "2018-07-12T03:14:39.532020",
"name": "test.me"
}]).encode('ascii')
self.app.register(
'GET', '/v1/a/c',
swob.HTTPOk,
{'Content-Type': 'application/json',
'Content-Length': len(listing_json)},
listing_json)
req = Request.blank('/v1/a/c', method='GET')
status, headers, body = self.call_slo(req)
self.assertEqual(json.loads(body), [{
"slo_etag": '"dc9947c2b53a3f55fe20c1394268e216"',
"hash": "8de7b0b1551660da51d8d96a53b85531; this=that",
"name": "test.me",
"bytes": 104857600,
"last_modified": "2018-07-12T03:14:39.532020",
"content_type": "application/x-troff-me",
}])
class TestSloPutManifest(SloTestCase):
def setUp(self):
super(TestSloPutManifest, self).setUp()
self.app.register(
'GET', '/', swob.HTTPOk, {}, b'passed')
self.app.register(
'PUT', '/', swob.HTTPOk, {}, b'passed')
self.app.register(
'HEAD', '/v1/AUTH_test/cont/missing-object',
swob.HTTPNotFound, {}, None)
self.app.register(
'HEAD', '/v1/AUTH_test/cont/object',
swob.HTTPOk,
{'Content-Length': '100', 'Etag': 'etagoftheobjectsegment'},
None)
self.app.register(
'HEAD', '/v1/AUTH_test/cont/object2',
swob.HTTPOk,
{'Content-Length': '100', 'Etag': 'etagoftheobjectsegment'},
None)
self.app.register(
'HEAD', '/v1/AUTH_test/cont/object\xe2\x99\xa1',
swob.HTTPOk,
{'Content-Length': '100', 'Etag': 'etagoftheobjectsegment'},
None)
self.app.register(
'HEAD', '/v1/AUTH_test/cont/small_object',
swob.HTTPOk,
{'Content-Length': '10', 'Etag': 'etagoftheobjectsegment'},
None)
self.app.register(
'HEAD', '/v1/AUTH_test/cont/empty_object',
swob.HTTPOk,
{'Content-Length': '0', 'Etag': 'etagoftheobjectsegment'},
None)
self.app.register(
'HEAD', u'/v1/AUTH_test/cont/あ_1',
swob.HTTPOk,
{'Content-Length': '1', 'Etag': 'a'},
None)
self.app.register(
'PUT', '/v1/AUTH_test/c/man', swob.HTTPCreated,
{'Last-Modified': 'Fri, 01 Feb 2012 20:38:36 GMT'}, None)
self.app.register(
'DELETE', '/v1/AUTH_test/c/man', swob.HTTPNoContent, {}, None)
self.app.register(
'HEAD', '/v1/AUTH_test/checktest/a_1',
swob.HTTPOk,
{'Content-Length': '1', 'Etag': 'a'},
None)
self.app.register(
'HEAD', '/v1/AUTH_test/checktest/badreq',
swob.HTTPBadRequest, {}, None)
self.app.register(
'HEAD', '/v1/AUTH_test/checktest/b_2',
swob.HTTPOk,
{'Content-Length': '2', 'Etag': 'b',
'Last-Modified': 'Fri, 01 Feb 2012 20:38:36 GMT'},
None)
_manifest_json = json.dumps(
[{'name': '/checktest/a_5', 'hash': md5hex("a" * 5),
'content_type': 'text/plain', 'bytes': '5'}]).encode('ascii')
self.app.register(
'GET', '/v1/AUTH_test/checktest/slob',
swob.HTTPOk,
{'X-Static-Large-Object': 'true', 'Etag': 'slob-etag',
'Content-Type': 'cat/picture',
'Content-Length': len(_manifest_json)},
_manifest_json)
self.app.register(
'PUT', '/v1/AUTH_test/checktest/man_3', swob.HTTPCreated, {}, None)
def test_put_manifest_too_quick_fail(self):
req = Request.blank('/v1/a/c/o?multipart-manifest=put', method='PUT')
req.content_length = self.slo.max_manifest_size + 1
status, headers, body = self.call_slo(req)
self.assertEqual(status, '413 Request Entity Too Large')
with patch.object(self.slo, 'max_manifest_segments', 0):
req = Request.blank('/v1/a/c/o?multipart-manifest=put',
method='PUT', body=test_json_data)
status, headers, body = self.call_slo(req)
self.assertEqual(status, '413 Request Entity Too Large')
req = Request.blank('/v1/a/c/o?multipart-manifest=put', method='PUT',
headers={'X-Copy-From': 'lala'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '405 Method Not Allowed')
# we already validated that there are enough path segments in __call__
for path in ('/', '/v1/', '/v1/a/', '/v1/a/c/'):
req = Request.blank(
path + '?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT'}, body=test_json_data)
with self.assertRaises(ValueError):
list(self.slo.handle_multipart_put(req, fake_start_response))
req = Request.blank(
path.rstrip('/') + '?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT'}, body=test_json_data)
with self.assertRaises(ValueError):
list(self.slo.handle_multipart_put(req, fake_start_response))
def test_handle_multipart_put_success(self):
override_header = 'X-Object-Sysmeta-Container-Update-Override-Etag'
headers = {
'Accept': 'test',
override_header: '; params=are important',
}
req = Request.blank(
'/v1/AUTH_test/c/man?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT'}, headers=headers,
body=test_json_data)
for h in ('X-Static-Large-Object', 'X-Object-Sysmeta-Slo-Etag',
'X-Object-Sysmeta-Slo-Size'):
# Sanity
self.assertNotIn(h, req.headers)
status, headers, body = self.call_slo(req)
gen_etag = '"' + md5hex('etagoftheobjectsegment') + '"'
self.assertIn(('Etag', gen_etag), headers)
self.assertIn('X-Static-Large-Object', req.headers)
self.assertEqual(req.headers['X-Static-Large-Object'], 'True')
self.assertIn('Etag', req.headers)
self.assertIn('X-Object-Sysmeta-Slo-Etag', req.headers)
self.assertIn('X-Object-Sysmeta-Container-Update-Override-Etag',
req.headers)
self.assertEqual(req.headers['X-Object-Sysmeta-Slo-Etag'],
gen_etag.strip('"'))
self.assertEqual(
req.headers['X-Object-Sysmeta-Container-Update-Override-Etag'],
'%s; params=are important; slo_etag=%s' % (
req.headers['Etag'], gen_etag.strip('"')))
self.assertIn('X-Object-Sysmeta-Slo-Size', req.headers)
self.assertEqual(req.headers['X-Object-Sysmeta-Slo-Size'], '100')
self.assertIn('Content-Type', req.headers)
self.assertTrue(
req.headers['Content-Type'].endswith(';swift_bytes=100'),
'Content-Type %r does not end with swift_bytes=100' %
req.headers['Content-Type'])
@patch('swift.common.middleware.slo.time')
def test_handle_multipart_put_fast_heartbeat(self, mock_time):
mock_time.time.side_effect = [
0, # start time
1, # first segment's fast
2, # second segment's also fast!
]
test_json_data = json.dumps([{'path': u'/cont/object\u2661',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100},
{'path': '/cont/object',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100}]).encode('ascii')
req = Request.blank(
'/v1/AUTH_test/c/man?multipart-manifest=put&heartbeat=on',
environ={'REQUEST_METHOD': 'PUT'}, headers={'Accept': 'test'},
body=test_json_data)
status, headers, body = self.call_slo(req)
self.assertEqual('202 Accepted', status)
headers_found = [h.lower() for h, v in headers]
self.assertNotIn('etag', headers_found)
gen_etag = '"' + md5hex('etagoftheobjectsegment' * 2) + '"'
self.assertTrue(body.startswith(b' \r\n\r\n'),
'Expected body to start with single space and two '
'blank lines; got %r' % body)
self.assertIn(b'\nResponse Status: 201 Created\n', body)
self.assertIn(b'\nResponse Body: \n', body)
self.assertIn(('\nEtag: %s\n' % gen_etag).encode('ascii'), body)
self.assertIn(b'\nLast Modified: Fri, 01 Feb 2012 20:38:36 GMT\n',
body)
@patch('swift.common.middleware.slo.time')
def test_handle_multipart_long_running_put_success(self, mock_time):
mock_time.time.side_effect = [
0, # start time
1, # first segment's fast
20, # second segment's slow
]
test_json_data = json.dumps([{'path': u'/cont/object\u2661',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100},
{'path': '/cont/object',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100}]).encode('ascii')
req = Request.blank(
'/v1/AUTH_test/c/man?multipart-manifest=put&heartbeat=on',
environ={'REQUEST_METHOD': 'PUT'}, headers={'Accept': 'test'},
body=test_json_data)
status, headers, body = self.call_slo(req)
self.assertEqual('202 Accepted', status)
headers_found = [h.lower() for h, v in headers]
self.assertNotIn('etag', headers_found)
gen_etag = '"' + md5hex('etagoftheobjectsegment' * 2) + '"'
self.assertTrue(body.startswith(b' \r\n\r\n'),
'Expected body to start with two spaces and two '
'blank lines; got %r' % body)
self.assertIn(b'\nResponse Status: 201 Created\n', body)
self.assertIn(b'\nResponse Body: \n', body)
self.assertIn(('\nEtag: %s\n' % gen_etag).encode('ascii'), body)
self.assertIn(b'\nLast Modified: Fri, 01 Feb 2012 20:38:36 GMT\n',
body)
@patch('swift.common.middleware.slo.time')
def test_handle_multipart_long_running_put_success_json(self, mock_time):
mock_time.time.side_effect = [
0, # start time
11, # first segment's slow
22, # second segment's also slow
]
test_json_data = json.dumps([{'path': u'/cont/object\u2661',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100},
{'path': '/cont/object',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100}]).encode('ascii')
req = Request.blank(
'/v1/AUTH_test/c/man?multipart-manifest=put&heartbeat=on',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Accept': 'application/json'},
body=test_json_data)
status, headers, body = self.call_slo(req)
self.assertEqual('202 Accepted', status)
headers_found = [h.lower() for h, v in headers]
self.assertNotIn('etag', headers_found)
gen_etag = '"' + md5hex('etagoftheobjectsegment' * 2) + '"'
self.assertTrue(body.startswith(b' \r\n\r\n'),
'Expected body to start with three spaces and two '
'blank lines; got %r' % body)
body = json.loads(body)
self.assertEqual(body['Response Status'], '201 Created')
self.assertEqual(body['Response Body'], '')
self.assertEqual(body['Etag'], gen_etag)
self.assertEqual(body['Last Modified'],
'Fri, 01 Feb 2012 20:38:36 GMT')
@patch('swift.common.middleware.slo.time')
def test_handle_multipart_long_running_put_failure(self, mock_time):
mock_time.time.side_effect = [
0, # start time
1, # first segment's fast
20, # second segment's slow
]
test_json_data = json.dumps([{'path': u'/cont/missing-object',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100},
{'path': '/cont/object',
'etag': 'etagoftheobjectsegment',
'size_bytes': 99}]).encode('ascii')
req = Request.blank(
'/v1/AUTH_test/c/man?multipart-manifest=put&heartbeat=on',
environ={'REQUEST_METHOD': 'PUT'}, headers={'Accept': 'test'},
body=test_json_data)
status, headers, body = self.call_slo(req)
self.assertEqual('202 Accepted', status)
headers_found = [h.lower() for h, v in headers]
self.assertNotIn('etag', headers_found)
body = body.split(b'\n')
self.assertEqual([b' \r', b'\r'], body[:2],
'Expected body to start with two spaces and two '
'blank lines; got %r' % b'\n'.join(body))
self.assertIn(b'Response Status: 400 Bad Request', body[2:5])
self.assertIn(b'Response Body: Bad Request', body)
self.assertIn(b'The server could not comply with the request since it '
b'is either malformed or otherwise incorrect.', body)
self.assertFalse(any(line.startswith(b'Etag: ') for line in body))
self.assertFalse(any(line.startswith(b'Last Modified: ')
for line in body))
self.assertEqual(body[-4], b'Errors:')
self.assertEqual(sorted(body[-3:-1]), [
b'/cont/missing-object, 404 Not Found',
b'/cont/object, Size Mismatch',
])
self.assertEqual(body[-1], b'')
@patch('swift.common.middleware.slo.time')
def test_handle_multipart_long_running_put_failure_json(self, mock_time):
mock_time.time.side_effect = [
0, # start time
11, # first segment's slow
22, # second segment's also slow
]
test_json_data = json.dumps([{'path': u'/cont/object\u2661',
'etag': 'etagoftheobjectsegment',
'size_bytes': 99},
{'path': '/cont/object',
'etag': 'some other etag',
'size_bytes': 100}]).encode('ascii')
req = Request.blank(
'/v1/AUTH_test/c/man?multipart-manifest=put&heartbeat=on',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Accept': 'application/json'},
body=test_json_data)
status, headers, body = self.call_slo(req)
self.assertEqual('202 Accepted', status)
headers_found = [h.lower() for h, v in headers]
self.assertNotIn('etag', headers_found)
self.assertTrue(body.startswith(b' \r\n\r\n'),
'Expected body to start with three spaces and two '
'blank lines; got %r' % body)
body = json.loads(body)
self.assertEqual(body['Response Status'], '400 Bad Request')
self.assertEqual(body['Response Body'], 'Bad Request\nThe server '
'could not comply with the request since it is '
'either malformed or otherwise incorrect.')
self.assertNotIn('Etag', body)
self.assertNotIn('Last Modified', body)
self.assertEqual(sorted(body['Errors']), [
['/cont/object', 'Etag Mismatch'],
[quote(u'/cont/object\u2661'.encode('utf8')).decode('ascii'),
'Size Mismatch'],
])
@patch('swift.common.middleware.slo.time')
def test_handle_multipart_long_running_put_bad_etag_json(self, mock_time):
mock_time.time.side_effect = [
0, # start time
11, # first segment's slow
22, # second segment's also slow
]
test_json_data = json.dumps([{'path': u'/cont/object\u2661',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100},
{'path': '/cont/object',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100}]).encode('ascii')
req = Request.blank(
'/v1/AUTH_test/c/man?multipart-manifest=put&heartbeat=on',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Accept': 'application/json', 'ETag': 'bad etag'},
body=test_json_data)
status, headers, body = self.call_slo(req)
self.assertEqual('202 Accepted', status)
headers_found = [h.lower() for h, v in headers]
self.assertNotIn('etag', headers_found)
self.assertTrue(body.startswith(b' \r\n\r\n'),
'Expected body to start with three spaces and two '
'blank lines; got %r' % body)
body = json.loads(body)
self.assertEqual(body['Response Status'], '422 Unprocessable Entity')
self.assertEqual('Unprocessable Entity\nUnable to process the '
'contained instructions', body['Response Body'])
self.assertNotIn('Etag', body)
self.assertNotIn('Last Modified', body)
self.assertEqual(body['Errors'], [])
def test_manifest_put_no_etag_success(self):
req = Request.blank(
'/v1/AUTH_test/c/man?multipart-manifest=put',
method='PUT', body=test_json_data)
resp = req.get_response(self.slo)
self.assertEqual(resp.status_int, 201)
def test_manifest_put_with_etag_success(self):
req = Request.blank(
'/v1/AUTH_test/c/man?multipart-manifest=put',
method='PUT', body=test_json_data)
req.headers['Etag'] = md5hex('etagoftheobjectsegment')
resp = req.get_response(self.slo)
self.assertEqual(resp.status_int, 201)
def test_manifest_put_with_etag_with_quotes_success(self):
req = Request.blank(
'/v1/AUTH_test/c/man?multipart-manifest=put',
method='PUT', body=test_json_data)
req.headers['Etag'] = '"%s"' % md5hex('etagoftheobjectsegment')
resp = req.get_response(self.slo)
self.assertEqual(resp.status_int, 201)
def test_manifest_put_bad_etag_fail(self):
req = Request.blank(
'/v1/AUTH_test/c/man?multipart-manifest=put',
method='PUT', body=test_json_data)
req.headers['Etag'] = md5hex('NOTetagoftheobjectsegment')
resp = req.get_response(self.slo)
self.assertEqual(resp.status_int, 422)
def test_handle_multipart_put_disallow_empty_first_segment(self):
test_json_data = json.dumps([{'path': '/cont/small_object',
'etag': 'etagoftheobjectsegment',
'size_bytes': 0},
{'path': '/cont/object',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100}]).encode('ascii')
req = Request.blank('/v1/a/c/o?multipart-manifest=put',
method='PUT', body=test_json_data)
status, headers, body = self.call_slo(req)
self.assertEqual(status, '400 Bad Request')
def test_handle_multipart_put_allow_empty_last_segment(self):
test_json_data = json.dumps([{'path': '/cont/object',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100},
{'path': '/cont/empty_object',
'etag': 'etagoftheobjectsegment',
'size_bytes': 0}]).encode('ascii')
req = Request.blank('/v1/AUTH_test/c/man?multipart-manifest=put',
method='PUT', body=test_json_data)
status, headers, body = self.call_slo(req)
self.assertEqual(status, '201 Created')
def test_handle_multipart_put_invalid_data(self):
def do_test(bad_data):
test_json_data = json.dumps([{'path': '/cont/object',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100},
{'data': bad_data}]).encode('ascii')
req = Request.blank('/v1/a/c/o', body=test_json_data)
with self.assertRaises(HTTPException) as catcher:
self.slo.handle_multipart_put(req, fake_start_response)
self.assertEqual(catcher.exception.status_int, 400)
do_test('invalid') # insufficient padding
do_test(12345)
do_test(0)
do_test(True)
do_test(False)
do_test(None)
do_test({})
do_test([])
# Empties are no good, either
do_test('')
do_test('====')
def test_handle_multipart_put_success_unicode(self):
test_json_data = json.dumps([{'path': u'/cont/object\u2661',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100}]).encode('ascii')
req = Request.blank(
'/v1/AUTH_test/c/man?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT'}, headers={'Accept': 'test'},
body=test_json_data)
self.assertNotIn('X-Static-Large-Object', req.headers)
self.call_slo(req)
self.assertIn('X-Static-Large-Object', req.headers)
self.assertEqual(req.environ['PATH_INFO'], '/v1/AUTH_test/c/man')
self.assertIn(('HEAD', '/v1/AUTH_test/cont/object\xe2\x99\xa1'),
self.app.calls)
def test_handle_multipart_put_no_xml(self):
req = Request.blank(
'/test_good/AUTH_test/c/man?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT'}, headers={'Accept': 'test'},
body=test_xml_data)
no_xml = list(self.slo(req.environ, fake_start_response))
self.assertEqual(no_xml, [b'Manifest must be valid JSON.\n'])
def test_handle_multipart_put_bad_data(self):
bad_data = json.dumps([{'path': '/cont/object',
'etag': 'etagoftheobj',
'size_bytes': 'lala'}])
req = Request.blank(
'/test_good/AUTH_test/c/man?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT'}, body=bad_data)
status, headers, body = self.call_slo(req)
self.assertEqual(status, '400 Bad Request')
self.assertIn(b'invalid size_bytes', body)
for bad_data in [
json.dumps([{'path': '/cont', 'etag': 'etagoftheobj',
'size_bytes': 100}]),
json.dumps('asdf'), json.dumps(None), json.dumps(5),
'not json', '1234', '', json.dumps({'path': None}),
json.dumps([{'path': '/cont/object', 'etag': None,
'size_bytes': 12}]),
json.dumps([{'path': '/cont/object', 'etag': 'asdf',
'size_bytes': 'sd'}]),
json.dumps([{'path': 12, 'etag': 'etagoftheobj',
'size_bytes': 100}]),
json.dumps([{'path': u'/cont/object\u2661',
'etag': 'etagoftheobj', 'size_bytes': 100}]),
json.dumps([{'path': 12, 'size_bytes': 100}]),
json.dumps([{'path': 12, 'size_bytes': 100}]),
json.dumps([{'path': '/c/o', 'etag': 123, 'size_bytes': 100}]),
json.dumps([{'path': None, 'etag': 'etagoftheobj',
'size_bytes': 100}])]:
req = Request.blank(
'/v1/AUTH_test/c/man?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT'}, body=bad_data)
status, headers, body = self.call_slo(req)
self.assertEqual(status, '400 Bad Request')
req = Request.blank(
'/v1/AUTH_test/c/man?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT'}, body=None)
status, headers, body = self.call_slo(req)
self.assertEqual(status, '411 Length Required')
def test_handle_multipart_put_check_data(self):
good_data = json.dumps(
[{'path': '/checktest/a_1', 'etag': 'a', 'size_bytes': '1'},
{'path': '/checktest/b_2', 'etag': 'b', 'size_bytes': '2'}])
req = Request.blank(
'/v1/AUTH_test/checktest/man_3?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT'}, body=good_data)
status, headers, body = self.call_slo(req)
self.assertEqual(self.app.call_count, 3)
# go behind SLO's back and see what actually got stored
req = Request.blank(
'/v1/AUTH_test/checktest/man_3?multipart-manifest=get',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_app(req)
headers = dict(headers)
manifest_data = json.loads(body)
self.assertTrue(headers['Content-Type'].endswith(';swift_bytes=3'))
self.assertEqual(len(manifest_data), 2)
self.assertEqual(manifest_data[0]['hash'], 'a')
self.assertEqual(manifest_data[0]['bytes'], 1)
self.assertTrue(
not manifest_data[0]['last_modified'].startswith('2012'))
self.assertTrue(manifest_data[1]['last_modified'].startswith('2012'))
def test_handle_multipart_put_check_data_bad(self):
bad_data = json.dumps(
[{'path': '/checktest/a_1', 'etag': 'a', 'size_bytes': '2'},
{'path': '/checktest/badreq', 'etag': 'a', 'size_bytes': '1'},
{'path': '/checktest/b_2', 'etag': 'not-b', 'size_bytes': '2'},
{'path': '/checktest/slob', 'etag': 'not-slob',
'size_bytes': '12345'}])
req = Request.blank(
'/v1/AUTH_test/checktest/man?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Accept': 'application/json'},
body=bad_data)
status, headers, body = self.call_slo(req)
self.assertEqual(self.app.call_count, 5)
errors = json.loads(body)['Errors']
self.assertEqual([
[u'/checktest/a_1', u'Size Mismatch'],
[u'/checktest/b_2', u'Etag Mismatch'],
[u'/checktest/badreq', u'400 Bad Request'],
[u'/checktest/slob', u'Etag Mismatch'],
[u'/checktest/slob', u'Size Mismatch'],
], sorted(errors))
def test_handle_multipart_put_skip_size_check(self):
good_data = json.dumps([
# Explicit None will skip it
{'path': '/checktest/a_1', 'etag': 'a', 'size_bytes': None},
# ...as will omitting it entirely
{'path': '/checktest/b_2', 'etag': 'b'}])
req = Request.blank(
'/v1/AUTH_test/checktest/man_3?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT'}, body=good_data)
status, headers, body = self.call_slo(req)
self.assertEqual(self.app.call_count, 3)
# Check that we still populated the manifest properly from our HEADs
req = Request.blank(
'/v1/AUTH_test/checktest/man_3?multipart-manifest=get',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_app(req)
manifest_data = json.loads(body)
self.assertEqual(1, manifest_data[0]['bytes'])
self.assertEqual(2, manifest_data[1]['bytes'])
def test_handle_multipart_put_skip_size_check_still_uses_min_size(self):
test_json_data = json.dumps([{'path': '/cont/empty_object',
'etag': 'etagoftheobjectsegment',
'size_bytes': None},
{'path': '/cont/small_object',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100}]).encode('ascii')
req = Request.blank('/v1/AUTH_test/c/o?multipart-manifest=put',
method='PUT', body=test_json_data)
status, headers, body = self.call_slo(req)
self.assertEqual(status, '400 Bad Request')
self.assertIn(b'Too small; each segment must be at least 1 byte', body)
def test_handle_multipart_put_skip_size_check_no_early_bailout(self):
# The first is too small (it's 0 bytes), and
# the second has a bad etag. Make sure both errors show up in
# the response.
test_json_data = json.dumps([{'path': '/cont/empty_object',
'etag': 'etagoftheobjectsegment',
'size_bytes': None},
{'path': '/cont/object2',
'etag': 'wrong wrong wrong',
'size_bytes': 100}]).encode('ascii')
req = Request.blank('/v1/AUTH_test/c/o?multipart-manifest=put',
method='PUT', body=test_json_data)
status, headers, body = self.call_slo(req)
self.assertEqual(status, '400 Bad Request')
self.assertIn(b'at least 1 byte', body)
self.assertIn(b'Etag Mismatch', body)
def test_handle_multipart_put_skip_etag_check(self):
good_data = json.dumps([
# Explicit None will skip it
{'path': '/checktest/a_1', 'etag': None, 'size_bytes': 1},
# ...as will omitting it entirely
{'path': '/checktest/b_2', 'size_bytes': 2}])
req = Request.blank(
'/v1/AUTH_test/checktest/man_3?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT'}, body=good_data)
status, headers, body = self.call_slo(req)
self.assertEqual(self.app.call_count, 3)
# Check that we still populated the manifest properly from our HEADs
req = Request.blank(
'/v1/AUTH_test/checktest/man_3?multipart-manifest=get',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_app(req)
manifest_data = json.loads(body)
self.assertEqual('a', manifest_data[0]['hash'])
self.assertEqual('b', manifest_data[1]['hash'])
def test_handle_multipart_put_with_manipulator_callback(self):
def data_inserter(manifest):
for i in range(len(manifest), -1, -1):
manifest.insert(i, {'data': 'WA=='})
good_data = json.dumps([
{'path': '/checktest/a_1'},
{'path': '/checktest/b_2'}])
req = Request.blank(
'/v1/AUTH_test/checktest/man_3?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT',
'swift.callback.slo_manifest_hook': data_inserter},
body=good_data)
status, headers, body = self.call_slo(req)
self.assertEqual(self.app.call_count, 3)
# Check that we still populated the manifest properly from our HEADs
req = Request.blank(
'/v1/AUTH_test/checktest/man_3?multipart-manifest=get',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_app(req)
manifest_data = json.loads(body)
self.assertEqual([
{k: v for k, v in item.items()
if k in ('name', 'bytes', 'hash', 'data')}
for item in manifest_data
], [
{'data': 'WA=='},
{'name': '/checktest/a_1', 'bytes': 1, 'hash': 'a'},
{'data': 'WA=='},
{'name': '/checktest/b_2', 'bytes': 2, 'hash': 'b'},
{'data': 'WA=='},
])
def test_handle_multipart_put_with_validator_callback(self):
def complainer(manifest):
return [(item['name'], "Don't wanna") for item in manifest]
good_data = json.dumps([
{'path': '/checktest/a_1'},
{'path': '/checktest/b_2'}])
req = Request.blank(
'/v1/AUTH_test/checktest/man_3?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT',
'swift.callback.slo_manifest_hook': complainer},
body=good_data)
status, headers, body = self.call_slo(req)
self.assertEqual(self.app.call_count, 2)
self.assertEqual(status, '400 Bad Request')
body = body.split(b'\n')
self.assertIn(b"/checktest/a_1, Don't wanna", body)
self.assertIn(b"/checktest/b_2, Don't wanna", body)
def test_handle_unsatisfiable_ranges(self):
bad_data = json.dumps(
[{'path': '/checktest/a_1', 'etag': None,
'size_bytes': None, 'range': '1-'}])
req = Request.blank(
'/v1/AUTH_test/checktest/man_3?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT'}, body=bad_data)
status, headers, body = self.call_slo(req)
self.assertEqual('400 Bad Request', status)
self.assertIn(b"Unsatisfiable Range", body)
def test_handle_multipart_put_success_conditional(self):
test_json_data = json.dumps([{'path': u'/cont/object',
'etag': 'etagoftheobjectsegment',
'size_bytes': 100}]).encode('ascii')
req = Request.blank(
'/v1/AUTH_test/c/man?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT'}, headers={'If-None-Match': '*'},
body=test_json_data)
status, headers, body = self.call_slo(req)
self.assertEqual(('201 Created', b''), (status, body))
self.assertEqual([
('HEAD', '/v1/AUTH_test/cont/object'),
('PUT', '/v1/AUTH_test/c/man?multipart-manifest=put'),
], self.app.calls)
# HEAD shouldn't be conditional
self.assertNotIn('If-None-Match', self.app.headers[0])
# But the PUT should be
self.assertIn('If-None-Match', self.app.headers[1])
self.assertEqual('*', self.app.headers[1]['If-None-Match'])
def test_handle_single_ranges(self):
good_data = json.dumps(
[{'path': '/checktest/a_1', 'etag': None,
'size_bytes': None, 'range': '0-0'},
{'path': '/checktest/b_2', 'etag': None,
'size_bytes': 2, 'range': '-1'},
{'path': '/checktest/b_2', 'etag': None,
'size_bytes': 2, 'range': '0-0'},
{'path': '/checktest/a_1', 'etag': None,
'size_bytes': None},
{'path': '/cont/object', 'etag': None,
'size_bytes': None, 'range': '10-40'}])
override_header = 'X-Object-Sysmeta-Container-Update-Override-Etag'
req = Request.blank(
'/v1/AUTH_test/checktest/man_3?multipart-manifest=put',
environ={'REQUEST_METHOD': 'PUT'}, body=good_data,
headers={override_header: 'my custom etag'})
status, headers, body = self.call_slo(req)
self.assertEqual(('201 Created', b''), (status, body))
expected_etag = '"%s"' % md5hex(
'ab:1-1;b:0-0;aetagoftheobjectsegment:10-40;')
self.assertEqual(expected_etag, dict(headers)['Etag'])
self.assertEqual([
('HEAD', '/v1/AUTH_test/checktest/a_1'), # Only once!
('HEAD', '/v1/AUTH_test/checktest/b_2'), # Only once!
('HEAD', '/v1/AUTH_test/cont/object'),
], sorted(self.app.calls[:-1]))
self.assertEqual(
('PUT', '/v1/AUTH_test/checktest/man_3?multipart-manifest=put'),
self.app.calls[-1])
self.assertEqual(
'my custom etag; slo_etag=%s' % expected_etag.strip('"'),
self.app.headers[-1].get(override_header))
# Check that we still populated the manifest properly from our HEADs
req = Request.blank(
'/v1/AUTH_test/checktest/man_3?multipart-manifest=get',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_app(req)
manifest_data = json.loads(body)
self.assertEqual(len(manifest_data), 5)
self.assertEqual('a', manifest_data[0]['hash'])
self.assertNotIn('range', manifest_data[0])
self.assertEqual('b', manifest_data[1]['hash'])
self.assertEqual('1-1', manifest_data[1]['range'])
self.assertEqual('b', manifest_data[2]['hash'])
self.assertEqual('0-0', manifest_data[2]['range'])
self.assertEqual('a', manifest_data[3]['hash'])
self.assertNotIn('range', manifest_data[3])
self.assertEqual('etagoftheobjectsegment', manifest_data[4]['hash'])
self.assertEqual('10-40', manifest_data[4]['range'])
class TestSloDeleteManifest(SloTestCase):
def setUp(self):
super(TestSloDeleteManifest, self).setUp()
_submanifest_data = json.dumps(
[{'name': '/deltest/b_2', 'hash': 'a', 'bytes': '1'},
{'name': '/deltest/c_3', 'hash': 'b', 'bytes': '2'}])
_submanifest_data = _submanifest_data.encode('ascii')
self.app.register(
'GET', '/v1/AUTH_test/deltest/man_404',
swob.HTTPNotFound, {}, None)
self.app.register(
'GET', '/v1/AUTH_test/deltest/man',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/deltest/gone', 'hash': 'a', 'bytes': '1'},
{'name': '/deltest/b_2', 'hash': 'b', 'bytes': '2'}]).
encode('ascii'))
self.app.register(
'DELETE', '/v1/AUTH_test/deltest/man',
swob.HTTPNoContent, {}, None)
self.app.register(
'GET', '/v1/AUTH_test/deltest/man-all-there',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/deltest/b_2', 'hash': 'a', 'bytes': '1'},
{'name': '/deltest/c_3', 'hash': 'b', 'bytes': '2'}]).
encode('ascii'))
self.app.register(
'GET', '/v1/AUTH_test-un\xc3\xafcode',
swob.HTTPOk, {}, None)
self.app.register(
'GET', '/v1/AUTH_test-un\xc3\xafcode/deltest', swob.HTTPOk, {
'X-Container-Read': 'diff read',
'X-Container-Write': 'diff write',
}, None)
self.app.register(
'GET', '/v1/AUTH_test-un\xc3\xafcode/\xe2\x98\x83', swob.HTTPOk, {
'X-Container-Read': 'same read',
'X-Container-Write': 'same write',
}, None)
self.app.register(
'GET', '/v1/AUTH_test-un\xc3\xafcode/deltest/man-all-there',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([
{'name': u'/\N{SNOWMAN}/b_2', 'hash': 'a', 'bytes': '1'},
{'name': u'/\N{SNOWMAN}/c_3', 'hash': 'b', 'bytes': '2'},
]).encode('ascii'))
self.app.register(
'GET', '/v1/AUTH_test-un\xc3\xafcode/\xe2\x98\x83/same-container',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([
{'name': u'/\N{SNOWMAN}/b_2', 'hash': 'a', 'bytes': '1'},
{'name': u'/\N{SNOWMAN}/c_3', 'hash': 'b', 'bytes': '2'},
]).encode('ascii'))
self.app.register(
'DELETE', '/v1/AUTH_test/deltest/man-all-there',
swob.HTTPNoContent, {}, None)
self.app.register(
'DELETE', '/v1/AUTH_test/deltest/gone',
swob.HTTPNotFound, {}, None)
self.app.register(
'GET', '/v1/AUTH_test/deltest/a_1',
swob.HTTPOk, {'Content-Length': '1'}, 'a')
self.app.register(
'DELETE', '/v1/AUTH_test/deltest/a_1',
swob.HTTPNoContent, {}, None)
self.app.register(
'DELETE', '/v1/AUTH_test/deltest/b_2',
swob.HTTPNoContent, {}, None)
self.app.register(
'DELETE', '/v1/AUTH_test/deltest/c_3',
swob.HTTPNoContent, {}, None)
self.app.register(
'DELETE', '/v1/AUTH_test/deltest/d_3',
swob.HTTPNoContent, {}, None)
self.app.register(
'DELETE', '/v1/AUTH_test-un\xc3\xafcode/deltest/man-all-there',
swob.HTTPNoContent, {}, None)
self.app.register(
'DELETE',
'/v1/AUTH_test-un\xc3\xafcode/\xe2\x98\x83/same-container',
swob.HTTPNoContent, {}, None)
self.app.register(
'DELETE', '/v1/AUTH_test-un\xc3\xafcode/\xe2\x98\x83/b_2',
swob.HTTPNoContent, {}, None)
self.app.register(
'DELETE', '/v1/AUTH_test-un\xc3\xafcode/\xe2\x98\x83/c_3',
swob.HTTPNoContent, {}, None)
self.app.register(
'GET', '/v1/AUTH_test/deltest/manifest-with-submanifest',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/deltest/a_1',
'hash': 'a', 'bytes': '1'},
{'name': '/deltest/submanifest', 'sub_slo': True,
'hash': 'submanifest-etag',
'bytes': len(_submanifest_data)},
{'name': '/deltest/d_3',
'hash': 'd', 'bytes': '3'}]).encode('ascii'))
self.app.register(
'DELETE', '/v1/AUTH_test/deltest/manifest-with-submanifest',
swob.HTTPNoContent, {}, None)
self.app.register(
'GET', '/v1/AUTH_test/deltest/submanifest',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
_submanifest_data)
self.app.register(
'DELETE', '/v1/AUTH_test/deltest/submanifest',
swob.HTTPNoContent, {}, None)
self.app.register(
'GET', '/v1/AUTH_test/deltest/manifest-missing-submanifest',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/deltest/a_1', 'hash': 'a', 'bytes': '1'},
{'name': '/deltest/missing-submanifest',
'hash': 'a', 'bytes': '2', 'sub_slo': True},
{'name': '/deltest/d_3', 'hash': 'd', 'bytes': '3'}]).
encode('ascii'))
self.app.register(
'DELETE', '/v1/AUTH_test/deltest/manifest-missing-submanifest',
swob.HTTPNoContent, {}, None)
self.app.register(
'GET', '/v1/AUTH_test/deltest/missing-submanifest',
swob.HTTPNotFound, {}, None)
self.app.register(
'GET', '/v1/AUTH_test/deltest/manifest-badjson',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
b"[not {json (at ++++all")
self.app.register(
'GET', '/v1/AUTH_test/deltest/manifest-with-unauth-segment',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/deltest/a_1', 'hash': 'a', 'bytes': '1'},
{'name': '/deltest-unauth/q_17',
'hash': '11', 'bytes': '17'}]).encode('ascii'))
self.app.register(
'DELETE', '/v1/AUTH_test/deltest/manifest-with-unauth-segment',
swob.HTTPNoContent, {}, None)
self.app.register(
'DELETE', '/v1/AUTH_test/deltest-unauth/q_17',
swob.HTTPUnauthorized, {}, None)
self.app.register(
'GET', '/v1/AUTH_test/deltest/manifest-with-too-many-segs',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/deltest/a_1',
'hash': 'a', 'bytes': '1'},
{'name': '/deltest/multi-submanifest', 'sub_slo': True,
'hash': 'submanifest-etag',
'bytes': len(_submanifest_data)},
{'name': '/deltest/b_2',
'hash': 'b', 'bytes': '1'},
{'name': '/deltest/c_3',
'hash': 'c', 'bytes': '1'},
{'name': '/deltest/d_4',
'hash': 'b', 'bytes': '1'},
{'name': '/deltest/e_5',
'hash': 'c', 'bytes': '1'},
{'name': '/deltest/f_6',
'hash': 'b', 'bytes': '1'},
{'name': '/deltest/g_8',
'hash': 'c', 'bytes': '1'},
{'name': '/deltest/g_8',
'hash': 'c', 'bytes': '1'},
{'name': '/deltest/h_9',
'hash': 'd', 'bytes': '3'}]))
def test_handle_multipart_delete_man(self):
req = Request.blank(
'/v1/AUTH_test/deltest/man',
environ={'REQUEST_METHOD': 'DELETE'})
self.slo(req.environ, fake_start_response)
self.assertEqual(self.app.call_count, 1)
def test_handle_multipart_delete_bad_utf8(self):
req = Request.blank(
b'/v1/AUTH_test/deltest/man\xff\xfe?multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE',
'HTTP_ACCEPT': 'application/json'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
resp_data = json.loads(body)
self.assertEqual(resp_data['Response Status'],
'412 Precondition Failed')
def test_handle_multipart_delete_whole_404(self):
req = Request.blank(
'/v1/AUTH_test/deltest/man_404?multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE',
'HTTP_ACCEPT': 'application/json'})
status, headers, body = self.call_slo(req)
resp_data = json.loads(body)
self.assertEqual(
self.app.calls,
[('GET',
'/v1/AUTH_test/deltest/man_404?multipart-manifest=get')])
self.assertEqual(resp_data['Response Status'], '200 OK')
self.assertEqual(resp_data['Response Body'], '')
self.assertEqual(resp_data['Number Deleted'], 0)
self.assertEqual(resp_data['Number Not Found'], 1)
self.assertEqual(resp_data['Errors'], [])
def test_handle_multipart_delete_segment_404(self):
self.app.can_ignore_range = True
req = Request.blank(
'/v1/AUTH_test/deltest/man?multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE',
'HTTP_ACCEPT': 'application/json'})
status, headers, body = self.call_slo(req)
resp_data = json.loads(body)
self.assertEqual(
set(self.app.calls),
set([('GET',
'/v1/AUTH_test/deltest/man?multipart-manifest=get'),
('DELETE', '/v1/AUTH_test/deltest/gone'),
('DELETE', '/v1/AUTH_test/deltest/b_2'),
('DELETE', '/v1/AUTH_test/deltest/man')]))
self.assertEqual(resp_data['Response Status'], '200 OK')
self.assertEqual(resp_data['Number Deleted'], 2)
self.assertEqual(resp_data['Number Not Found'], 1)
def test_handle_multipart_delete_whole(self):
self.app.can_ignore_range = True
req = Request.blank(
'/v1/AUTH_test/deltest/man-all-there?multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE'})
self.call_slo(req)
self.assertEqual(set(self.app.calls), set([
('GET',
'/v1/AUTH_test/deltest/man-all-there?multipart-manifest=get'),
('DELETE', '/v1/AUTH_test/deltest/b_2'),
('DELETE', '/v1/AUTH_test/deltest/c_3'),
('DELETE', ('/v1/AUTH_test/deltest/man-all-there'))]))
def test_handle_multipart_delete_whole_old_swift(self):
# behave like pre-2.24.0 swift; initial GET will return just one byte
self.app.can_ignore_range = False
req = Request.blank(
'/v1/AUTH_test/deltest/man-all-there?multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE'})
self.call_slo(req)
self.assertEqual(self.app.calls_with_headers[:2], [
('GET',
'/v1/AUTH_test/deltest/man-all-there?multipart-manifest=get',
{'Host': 'localhost:80',
'User-Agent': 'Mozzarella Foxfire MultipartDELETE',
'Range': 'bytes=-1',
'X-Backend-Ignore-Range-If-Metadata-Present':
'X-Static-Large-Object',
'X-Backend-Storage-Policy-Index': '2',
'Content-Length': '0'}),
('GET',
'/v1/AUTH_test/deltest/man-all-there?multipart-manifest=get',
{'Host': 'localhost:80',
'User-Agent': 'Mozzarella Foxfire MultipartDELETE',
'X-Backend-Storage-Policy-Index': '2',
'Content-Length': '0'}),
])
self.assertEqual(set(self.app.calls), set([
('GET',
'/v1/AUTH_test/deltest/man-all-there?multipart-manifest=get'),
('DELETE', '/v1/AUTH_test/deltest/b_2'),
('DELETE', '/v1/AUTH_test/deltest/c_3'),
('DELETE', ('/v1/AUTH_test/deltest/man-all-there'))]))
def test_handle_multipart_delete_non_ascii(self):
self.app.can_ignore_range = True
unicode_acct = u'AUTH_test-un\u00efcode'
wsgi_acct = bytes_to_wsgi(unicode_acct.encode('utf-8'))
req = Request.blank(
'/v1/%s/deltest/man-all-there?'
'multipart-manifest=delete' % wsgi_acct,
environ={'REQUEST_METHOD': 'DELETE'})
status, _, body = self.call_slo(req)
self.assertEqual('200 OK', status)
lines = body.split(b'\n')
for l in lines:
parts = l.split(b':')
if len(parts) == 1:
continue
key, value = parts
if key == 'Response Status':
delete_status = int(value.split()[0])
self.assertEqual(200, delete_status)
self.assertEqual(set(self.app.calls), set([
('GET',
'/v1/%s/deltest/man-all-there'
'?multipart-manifest=get' % wsgi_acct),
('DELETE', '/v1/%s/\xe2\x98\x83/b_2' % wsgi_acct),
('DELETE', '/v1/%s/\xe2\x98\x83/c_3' % wsgi_acct),
('DELETE', ('/v1/%s/deltest/man-all-there' % wsgi_acct))]))
def test_handle_multipart_delete_nested(self):
self.app.can_ignore_range = True
req = Request.blank(
'/v1/AUTH_test/deltest/manifest-with-submanifest?' +
'multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE'})
self.call_slo(req)
self.assertEqual(
set(self.app.calls),
{('GET', '/v1/AUTH_test/deltest/' +
'manifest-with-submanifest?multipart-manifest=get'),
('GET', '/v1/AUTH_test/deltest/' +
'submanifest?multipart-manifest=get'),
('DELETE', '/v1/AUTH_test/deltest/a_1'),
('DELETE', '/v1/AUTH_test/deltest/b_2'),
('DELETE', '/v1/AUTH_test/deltest/c_3'),
('DELETE', '/v1/AUTH_test/deltest/submanifest'),
('DELETE', '/v1/AUTH_test/deltest/d_3'),
('DELETE', '/v1/AUTH_test/deltest/manifest-with-submanifest')})
def test_handle_multipart_delete_nested_too_many_segments(self):
self.app.can_ignore_range = True
req = Request.blank(
'/v1/AUTH_test/deltest/manifest-with-too-many-segs?' +
'multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE',
'HTTP_ACCEPT': 'application/json'})
with patch.object(self.slo, 'max_manifest_segments', 1):
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
resp_data = json.loads(body)
self.assertEqual(resp_data['Response Status'], '400 Bad Request')
self.assertEqual(resp_data['Response Body'],
'Too many buffered slo segments to delete.')
def test_handle_multipart_delete_nested_404(self):
self.app.can_ignore_range = True
req = Request.blank(
'/v1/AUTH_test/deltest/manifest-missing-submanifest' +
'?multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE',
'HTTP_ACCEPT': 'application/json'})
status, headers, body = self.call_slo(req)
resp_data = json.loads(body)
self.assertEqual(set(self.app.calls), {
('GET', '/v1/AUTH_test/deltest/' +
'manifest-missing-submanifest?multipart-manifest=get'),
('DELETE', '/v1/AUTH_test/deltest/a_1'),
('GET', '/v1/AUTH_test/deltest/' +
'missing-submanifest?multipart-manifest=get'),
('DELETE', '/v1/AUTH_test/deltest/d_3'),
('DELETE', '/v1/AUTH_test/deltest/manifest-missing-submanifest'),
})
self.assertEqual(resp_data['Response Status'], '200 OK')
self.assertEqual(resp_data['Response Body'], '')
self.assertEqual(resp_data['Number Deleted'], 3)
self.assertEqual(resp_data['Number Not Found'], 1)
self.assertEqual(resp_data['Errors'], [])
def test_handle_multipart_delete_nested_401(self):
self.app.can_ignore_range = True
self.app.register(
'GET', '/v1/AUTH_test/deltest/submanifest',
swob.HTTPUnauthorized, {}, None)
req = Request.blank(
('/v1/AUTH_test/deltest/manifest-with-submanifest' +
'?multipart-manifest=delete'),
environ={'REQUEST_METHOD': 'DELETE',
'HTTP_ACCEPT': 'application/json'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
resp_data = json.loads(body)
self.assertEqual(resp_data['Response Status'], '400 Bad Request')
self.assertEqual(resp_data['Errors'],
[['/deltest/submanifest', '401 Unauthorized']])
def test_handle_multipart_delete_nested_500(self):
self.app.can_ignore_range = True
self.app.register(
'GET', '/v1/AUTH_test/deltest/submanifest',
swob.HTTPServerError, {}, None)
req = Request.blank(
('/v1/AUTH_test/deltest/manifest-with-submanifest' +
'?multipart-manifest=delete'),
environ={'REQUEST_METHOD': 'DELETE',
'HTTP_ACCEPT': 'application/json'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
resp_data = json.loads(body)
self.assertEqual(resp_data['Response Status'], '400 Bad Request')
self.assertEqual(resp_data['Errors'],
[['/deltest/submanifest',
'Unable to load SLO manifest or segment.']])
def test_handle_multipart_delete_not_a_manifest(self):
self.app.can_ignore_range = True
req = Request.blank(
'/v1/AUTH_test/deltest/a_1?multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE',
'HTTP_ACCEPT': 'application/json'})
status, headers, body = self.call_slo(req)
resp_data = json.loads(body)
self.assertEqual(
self.app.calls,
[('GET', '/v1/AUTH_test/deltest/a_1?multipart-manifest=get')])
self.assertEqual(resp_data['Response Status'], '400 Bad Request')
self.assertEqual(resp_data['Response Body'], '')
self.assertEqual(resp_data['Number Deleted'], 0)
self.assertEqual(resp_data['Number Not Found'], 0)
self.assertEqual(resp_data['Errors'],
[['/deltest/a_1', 'Not an SLO manifest']])
self.assertFalse(self.app.unread_requests)
def test_handle_multipart_delete_bad_json(self):
self.app.can_ignore_range = True
req = Request.blank(
'/v1/AUTH_test/deltest/manifest-badjson?multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE',
'HTTP_ACCEPT': 'application/json'})
status, headers, body = self.call_slo(req)
resp_data = json.loads(body)
self.assertEqual(self.app.calls,
[('GET', '/v1/AUTH_test/deltest/' +
'manifest-badjson?multipart-manifest=get')])
self.assertEqual(resp_data['Response Status'], '400 Bad Request')
self.assertEqual(resp_data['Response Body'], '')
self.assertEqual(resp_data['Number Deleted'], 0)
self.assertEqual(resp_data['Number Not Found'], 0)
self.assertEqual(resp_data['Errors'],
[['/deltest/manifest-badjson',
'Unable to load SLO manifest']])
def test_handle_multipart_delete_401(self):
self.app.can_ignore_range = True
req = Request.blank(
'/v1/AUTH_test/deltest/manifest-with-unauth-segment' +
'?multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE',
'HTTP_ACCEPT': 'application/json'})
status, headers, body = self.call_slo(req)
resp_data = json.loads(body)
self.assertEqual(
set(self.app.calls),
set([('GET', '/v1/AUTH_test/deltest/' +
'manifest-with-unauth-segment?multipart-manifest=get'),
('DELETE', '/v1/AUTH_test/deltest/a_1'),
('DELETE', '/v1/AUTH_test/deltest-unauth/q_17'),
('DELETE', '/v1/AUTH_test/deltest/' +
'manifest-with-unauth-segment')]))
self.assertEqual(resp_data['Response Status'], '400 Bad Request')
self.assertEqual(resp_data['Response Body'], '')
self.assertEqual(resp_data['Number Deleted'], 2)
self.assertEqual(resp_data['Number Not Found'], 0)
self.assertEqual(resp_data['Errors'],
[['/deltest-unauth/q_17', '401 Unauthorized']])
def test_handle_multipart_delete_client_content_type(self):
self.app.can_ignore_range = True
req = Request.blank(
'/v1/AUTH_test/deltest/man-all-there?multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE', 'CONTENT_TYPE': 'foo/bar'},
headers={'Accept': 'application/json'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
resp_data = json.loads(body)
self.assertEqual(resp_data["Number Deleted"], 3)
self.assertEqual(set(self.app.calls), set([
('GET',
'/v1/AUTH_test/deltest/man-all-there?multipart-manifest=get'),
('DELETE', '/v1/AUTH_test/deltest/b_2'),
('DELETE', '/v1/AUTH_test/deltest/c_3'),
('DELETE', '/v1/AUTH_test/deltest/man-all-there')]))
def test_handle_async_delete_whole_404(self):
self.slo.allow_async_delete = True
req = Request.blank(
'/v1/AUTH_test/deltest/man_404?async=t&multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE',
'HTTP_ACCEPT': 'application/json'})
status, headers, body = self.call_slo(req)
self.assertEqual('404 Not Found', status)
self.assertEqual(
self.app.calls,
[('GET',
'/v1/AUTH_test/deltest/man_404?multipart-manifest=get')])
def test_handle_async_delete_turned_off(self):
self.app.can_ignore_range = True
self.slo.allow_async_delete = False
req = Request.blank(
'/v1/AUTH_test/deltest/man-all-there?'
'multipart-manifest=delete&async=on&heartbeat=on',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'Accept': 'application/json'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
resp_data = json.loads(body)
self.assertEqual(resp_data["Number Deleted"], 3)
self.assertEqual(set(self.app.calls), set([
('GET',
'/v1/AUTH_test/deltest/man-all-there?multipart-manifest=get'),
('DELETE', '/v1/AUTH_test/deltest/b_2'),
('DELETE', '/v1/AUTH_test/deltest/c_3'),
('DELETE', '/v1/AUTH_test/deltest/man-all-there')]))
def test_handle_async_delete_whole(self):
self.app.can_ignore_range = True
self.slo.allow_async_delete = True
now = Timestamp(time.time())
exp_obj_cont = get_expirer_container(
int(now), 86400, 'AUTH_test', 'deltest', 'man-all-there')
self.app.register(
'UPDATE', '/v1/.expiring_objects/%s' % exp_obj_cont,
swob.HTTPNoContent, {}, None)
req = Request.blank(
'/v1/AUTH_test/deltest/man-all-there'
'?async=true&multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE'})
with patch('swift.common.utils.Timestamp.now', return_value=now):
status, headers, body = self.call_slo(req)
self.assertEqual('204 No Content', status)
self.assertEqual(b'', body)
self.assertEqual(self.app.calls, [
('GET',
'/v1/AUTH_test/deltest/man-all-there?multipart-manifest=get'),
('UPDATE', '/v1/.expiring_objects/%s'
'?async=true&multipart-manifest=delete' % exp_obj_cont),
('DELETE', '/v1/AUTH_test/deltest/man-all-there'
'?async=true&multipart-manifest=delete'),
])
for header, expected in (
('Content-Type', 'application/json'),
('X-Backend-Storage-Policy-Index', '0'),
('X-Backend-Allow-Private-Methods', 'True'),
):
self.assertIn(header, self.app.calls_with_headers[1].headers)
value = self.app.calls_with_headers[1].headers[header]
msg = 'Expected %s header to be %r, not %r'
self.assertEqual(value, expected, msg % (header, expected, value))
self.assertEqual(json.loads(self.app.req_bodies[1]), [
{'content_type': 'application/async-deleted',
'created_at': now.internal,
'deleted': 0,
'etag': 'd41d8cd98f00b204e9800998ecf8427e',
'name': '%s-AUTH_test/deltest/b_2' % now.internal,
'size': 0,
'storage_policy_index': 0},
{'content_type': 'application/async-deleted',
'created_at': now.internal,
'deleted': 0,
'etag': 'd41d8cd98f00b204e9800998ecf8427e',
'name': '%s-AUTH_test/deltest/c_3' % now.internal,
'size': 0,
'storage_policy_index': 0},
])
def test_handle_async_delete_non_ascii(self):
self.app.can_ignore_range = True
self.slo.allow_async_delete = True
unicode_acct = u'AUTH_test-un\u00efcode'
wsgi_acct = bytes_to_wsgi(unicode_acct.encode('utf-8'))
now = Timestamp(time.time())
exp_obj_cont = get_expirer_container(
int(now), 86400, unicode_acct, 'deltest', 'man-all-there')
self.app.register(
'UPDATE', '/v1/.expiring_objects/%s' % exp_obj_cont,
swob.HTTPNoContent, {}, None)
authorize_calls = []
def authorize(req):
authorize_calls.append((req.method, req.acl))
req = Request.blank(
'/v1/%s/deltest/man-all-there?'
'async=1&multipart-manifest=delete&heartbeat=1' % wsgi_acct,
environ={'REQUEST_METHOD': 'DELETE', 'swift.authorize': authorize})
with patch('swift.common.utils.Timestamp.now', return_value=now):
status, _, body = self.call_slo(req)
# Every async delete should only need to make 3 requests during the
# client request/response cycle, so no need to support heart-beating
self.assertEqual('204 No Content', status)
self.assertEqual(b'', body)
self.assertEqual(self.app.calls, [
('GET',
'/v1/%s/deltest/man-all-there?'
'multipart-manifest=get' % wsgi_acct),
('HEAD', '/v1/%s' % wsgi_acct),
('HEAD', '/v1/%s/deltest' % wsgi_acct),
('HEAD', '/v1/%s/\xe2\x98\x83' % wsgi_acct),
('UPDATE',
'/v1/.expiring_objects/%s'
'?async=1&heartbeat=1&multipart-manifest=delete' % exp_obj_cont),
('DELETE',
'/v1/%s/deltest/man-all-there'
'?async=1&heartbeat=1&multipart-manifest=delete' % wsgi_acct),
])
self.assertEqual(authorize_calls, [
('GET', None), # Original GET
('DELETE', 'diff write'),
('DELETE', 'same write'),
('DELETE', None), # Final DELETE
])
for header, expected in (
('Content-Type', 'application/json'),
('X-Backend-Storage-Policy-Index', '0'),
('X-Backend-Allow-Private-Methods', 'True'),
):
self.assertIn(header, self.app.calls_with_headers[-2].headers)
value = self.app.calls_with_headers[-2].headers[header]
msg = 'Expected %s header to be %r, not %r'
self.assertEqual(value, expected, msg % (header, expected, value))
self.assertEqual(json.loads(self.app.req_bodies[-2]), [
{'content_type': 'application/async-deleted',
'created_at': now.internal,
'deleted': 0,
'etag': 'd41d8cd98f00b204e9800998ecf8427e',
'name': u'%s-%s/\N{SNOWMAN}/b_2' % (now.internal, unicode_acct),
'size': 0,
'storage_policy_index': 0},
{'content_type': 'application/async-deleted',
'created_at': now.internal,
'deleted': 0,
'etag': 'd41d8cd98f00b204e9800998ecf8427e',
'name': u'%s-%s/\N{SNOWMAN}/c_3' % (now.internal, unicode_acct),
'size': 0,
'storage_policy_index': 0},
])
def test_handle_async_delete_non_ascii_same_container(self):
self.app.can_ignore_range = True
self.slo.allow_async_delete = True
unicode_acct = u'AUTH_test-un\u00efcode'
wsgi_acct = bytes_to_wsgi(unicode_acct.encode('utf-8'))
now = Timestamp(time.time())
exp_obj_cont = get_expirer_container(
int(now), 86400, unicode_acct, u'\N{SNOWMAN}', 'same-container')
self.app.register(
'UPDATE', '/v1/.expiring_objects/%s' % exp_obj_cont,
swob.HTTPNoContent, {}, None)
authorize_calls = []
def authorize(req):
authorize_calls.append((req.method, req.acl))
req = Request.blank(
'/v1/%s/\xe2\x98\x83/same-container?'
'async=yes&multipart-manifest=delete' % wsgi_acct,
environ={'REQUEST_METHOD': 'DELETE', 'swift.authorize': authorize})
with patch('swift.common.utils.Timestamp.now', return_value=now):
status, _, body = self.call_slo(req)
self.assertEqual('204 No Content', status)
self.assertEqual(b'', body)
self.assertEqual(self.app.calls, [
('GET',
'/v1/%s/\xe2\x98\x83/same-container?'
'multipart-manifest=get' % wsgi_acct),
('HEAD', '/v1/%s' % wsgi_acct),
('HEAD', '/v1/%s/\xe2\x98\x83' % wsgi_acct),
('UPDATE',
'/v1/.expiring_objects/%s'
'?async=yes&multipart-manifest=delete' % exp_obj_cont),
('DELETE',
'/v1/%s/\xe2\x98\x83/same-container'
'?async=yes&multipart-manifest=delete' % wsgi_acct),
])
self.assertEqual(authorize_calls, [
('GET', None), # Original GET
('DELETE', 'same write'), # Only need one auth check
('DELETE', None), # Final DELETE
])
for header, expected in (
('Content-Type', 'application/json'),
('X-Backend-Storage-Policy-Index', '0'),
('X-Backend-Allow-Private-Methods', 'True'),
):
self.assertIn(header, self.app.calls_with_headers[-2].headers)
value = self.app.calls_with_headers[-2].headers[header]
msg = 'Expected %s header to be %r, not %r'
self.assertEqual(value, expected, msg % (header, expected, value))
self.assertEqual(json.loads(self.app.req_bodies[-2]), [
{'content_type': 'application/async-deleted',
'created_at': now.internal,
'deleted': 0,
'etag': 'd41d8cd98f00b204e9800998ecf8427e',
'name': u'%s-%s/\N{SNOWMAN}/b_2' % (now.internal, unicode_acct),
'size': 0,
'storage_policy_index': 0},
{'content_type': 'application/async-deleted',
'created_at': now.internal,
'deleted': 0,
'etag': 'd41d8cd98f00b204e9800998ecf8427e',
'name': u'%s-%s/\N{SNOWMAN}/c_3' % (now.internal, unicode_acct),
'size': 0,
'storage_policy_index': 0},
])
def test_handle_async_delete_nested(self):
self.app.can_ignore_range = True
self.slo.allow_async_delete = True
req = Request.blank(
'/v1/AUTH_test/deltest/manifest-with-submanifest' +
'?async=on&multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE'})
status, _, body = self.call_slo(req)
self.assertEqual('400 Bad Request', status)
self.assertEqual(b'No segments may be large objects.', body)
self.assertEqual(self.app.calls, [
('GET', '/v1/AUTH_test/deltest/' +
'manifest-with-submanifest?multipart-manifest=get')])
def test_handle_async_delete_too_many_containers(self):
self.app.can_ignore_range = True
self.slo.allow_async_delete = True
self.app.register(
'GET', '/v1/AUTH_test/deltest/man',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/cont1/a_1', 'hash': 'a', 'bytes': '1'},
{'name': '/cont2/b_2', 'hash': 'b', 'bytes': '2'}]).
encode('ascii'))
req = Request.blank(
'/v1/AUTH_test/deltest/man?async=on&multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE'})
status, _, body = self.call_slo(req)
self.assertEqual('400 Bad Request', status)
expected = b'All segments must be in one container. Found segments in '
self.assertEqual(expected, body[:len(expected)])
self.assertEqual(self.app.calls, [
('GET', '/v1/AUTH_test/deltest/man?multipart-manifest=get')])
class TestSloHeadOldManifest(SloTestCase):
"""
Exercise legacy manifests written before we added etag/size SLO Sysmeta
N.B. We used to GET the whole manifest to calculate etag/size, just to
respond to HEAD requests.
"""
slo_etag = md5hex("seg01-hashseg02-hash")
def setUp(self):
super(TestSloHeadOldManifest, self).setUp()
manifest_json = json.dumps([
{'name': '/gettest/seg01',
'bytes': '100',
'hash': 'seg01-hash',
'content_type': 'text/plain',
'last_modified': '2013-11-19T11:33:45.137446'},
{'name': '/gettest/seg02',
'bytes': '200',
'hash': 'seg02-hash',
'content_type': 'text/plain',
'last_modified': '2013-11-19T11:33:45.137447'}])
self.manifest_json_etag = md5hex(manifest_json)
manifest_headers = {
'Content-Length': str(len(manifest_json)),
'Content-Type': 'test/data',
'X-Static-Large-Object': 'true',
'X-Object-Sysmeta-Artisanal-Etag': 'bespoke',
'Etag': self.manifest_json_etag}
# see TestSloHeadManifest for tests w/ manifest_has_sysmeta = True
manifest_headers.update(getattr(self, 'extra_manifest_headers', {}))
self.manifest_has_sysmeta = all(h in manifest_headers for h in (
'X-Object-Sysmeta-Slo-Etag', 'X-Object-Sysmeta-Slo-Size'))
self.app.register(
'GET', '/v1/AUTH_test/headtest/man',
swob.HTTPOk, manifest_headers, manifest_json.encode('ascii'))
def test_etag_is_hash_of_segment_etags(self):
req = Request.blank(
'/v1/AUTH_test/headtest/man',
environ={'REQUEST_METHOD': 'HEAD'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
self.assertIn(('Etag', '"%s"' % self.slo_etag), headers)
self.assertIn(('X-Manifest-Etag', self.manifest_json_etag), headers)
self.assertIn(('Content-Length', '300'), headers)
self.assertIn(('Content-Type', 'test/data'), headers)
self.assertEqual(body, b'') # it's a HEAD request, after all
expected_app_calls = [('HEAD', '/v1/AUTH_test/headtest/man')]
if not self.manifest_has_sysmeta:
expected_app_calls.append(('GET', '/v1/AUTH_test/headtest/man'))
self.assertEqual(self.app.calls, expected_app_calls)
def test_get_manifest_passthrough(self):
req = Request.blank(
'/v1/AUTH_test/headtest/man?multipart-manifest=get',
environ={'REQUEST_METHOD': 'HEAD'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
self.assertIn(('Etag', self.manifest_json_etag), headers)
self.assertIn(('Content-Type', 'application/json; charset=utf-8'),
headers)
self.assertIn(('X-Static-Large-Object', 'true'), headers)
self.assertIn(('X-Object-Sysmeta-Artisanal-Etag', 'bespoke'), headers)
self.assertEqual(body, b'') # it's a HEAD request, after all
expected_app_calls = [(
'HEAD', '/v1/AUTH_test/headtest/man?multipart-manifest=get')]
self.assertEqual(self.app.calls, expected_app_calls)
def test_if_none_match_etag_matching(self):
req = Request.blank(
'/v1/AUTH_test/headtest/man',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-None-Match': self.slo_etag})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '304 Not Modified')
self.assertIn(('Etag', '"%s"' % self.slo_etag), headers)
self.assertIn(('Content-Length', '0'), headers)
self.assertIn(('Content-Type', 'test/data'), headers)
expected_app_calls = [('HEAD', '/v1/AUTH_test/headtest/man')]
if not self.manifest_has_sysmeta:
expected_app_calls.append(('GET', '/v1/AUTH_test/headtest/man'))
self.assertEqual(self.app.calls, expected_app_calls)
def test_if_match_etag_not_matching(self):
req = Request.blank(
'/v1/AUTH_test/headtest/man',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'If-Match': 'zzz'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '412 Precondition Failed')
self.assertIn(('Etag', '"%s"' % self.slo_etag), headers)
self.assertIn(('Content-Length', '0'), headers)
self.assertIn(('Content-Type', 'test/data'), headers)
expected_app_calls = [('HEAD', '/v1/AUTH_test/headtest/man')]
if not self.manifest_has_sysmeta:
expected_app_calls.append(('GET', '/v1/AUTH_test/headtest/man'))
self.assertEqual(self.app.calls, expected_app_calls)
def test_if_none_match_etag_matching_with_override(self):
req = Request.blank(
'/v1/AUTH_test/headtest/man',
environ={'REQUEST_METHOD': 'HEAD'},
headers={
'If-None-Match': 'bespoke',
'X-Backend-Etag-Is-At': 'X-Object-Sysmeta-Artisanal-Etag'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '304 Not Modified')
# We *are not* responsible for replacing the etag; whoever set
# x-backend-etag-is-at is responsible
self.assertIn(('Etag', '"%s"' % self.slo_etag), headers)
self.assertIn(('Content-Length', '0'), headers)
self.assertIn(('Content-Type', 'test/data'), headers)
expected_app_calls = [('HEAD', '/v1/AUTH_test/headtest/man')]
if not self.manifest_has_sysmeta:
expected_app_calls.append(('GET', '/v1/AUTH_test/headtest/man'))
self.assertEqual(self.app.calls, expected_app_calls)
def test_if_match_etag_not_matching_with_override(self):
req = Request.blank(
'/v1/AUTH_test/headtest/man',
environ={'REQUEST_METHOD': 'HEAD'},
headers={
'If-Match': self.slo_etag,
'X-Backend-Etag-Is-At': 'X-Object-Sysmeta-Artisanal-Etag'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '412 Precondition Failed')
# We *are not* responsible for replacing the etag; whoever set
# x-backend-etag-is-at is responsible
self.assertIn(('Etag', '"%s"' % self.slo_etag), headers)
self.assertIn(('Content-Length', '0'), headers)
self.assertIn(('Content-Type', 'test/data'), headers)
expected_app_calls = [('HEAD', '/v1/AUTH_test/headtest/man')]
if not self.manifest_has_sysmeta:
expected_app_calls.append(('GET', '/v1/AUTH_test/headtest/man'))
self.assertEqual(self.app.calls, expected_app_calls)
class TestSloHeadManifest(TestSloHeadOldManifest):
"""
Exercise manifests written after we added etag/size SLO Sysmeta
"""
def setUp(self):
self.extra_manifest_headers = {
'X-Object-Sysmeta-Slo-Etag': self.slo_etag,
'X-Object-Sysmeta-Slo-Size': '300',
}
super(TestSloHeadManifest, self).setUp()
class TestSloGetRawManifest(SloTestCase):
def setUp(self):
super(TestSloGetRawManifest, self).setUp()
_bc_manifest_json = json.dumps(
[{'name': '/gettest/b_10', 'hash': md5hex('b' * 10), 'bytes': '10',
'content_type': 'text/plain',
'last_modified': '1970-01-01T00:00:00.000000'},
{'name': '/gettest/c_15', 'hash': md5hex('c' * 15), 'bytes': '15',
'content_type': 'text/plain',
'last_modified': '1970-01-01T00:00:00.000000'},
{'name': '/gettest/d_10',
'hash': md5hex(md5hex("e" * 5) + md5hex("f" * 5)), 'bytes': '10',
'content_type': 'application/json',
'sub_slo': True,
'last_modified': '1970-01-01T00:00:00.000000'}])
self.bc_etag = md5hex(_bc_manifest_json)
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-bc',
# proxy obj controller removes swift_bytes from content-type
swob.HTTPOk, {'Content-Type': 'text/plain',
'X-Static-Large-Object': 'true',
'X-Object-Meta-Plant': 'Ficus',
'Etag': md5hex(_bc_manifest_json)},
_bc_manifest_json)
_bc_manifest_json_ranges = json.dumps(
[{'name': '/gettest/b_10', 'hash': md5hex('b' * 10), 'bytes': '10',
'last_modified': '1970-01-01T00:00:00.000000',
'content_type': 'text/plain', 'range': '1-99'},
{'name': '/gettest/c_15', 'hash': md5hex('c' * 15), 'bytes': '15',
'last_modified': '1970-01-01T00:00:00.000000',
'content_type': 'text/plain', 'range': '100-200'}])
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-bc-r',
# proxy obj controller removes swift_bytes from content-type
swob.HTTPOk, {'Content-Type': 'text/plain',
'X-Static-Large-Object': 'true',
'X-Object-Meta-Plant': 'Ficus',
'Etag': md5hex(_bc_manifest_json_ranges)},
_bc_manifest_json_ranges)
def test_get_raw_manifest(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-bc'
'?multipart-manifest=get&format=raw',
environ={'REQUEST_METHOD': 'GET',
'HTTP_ACCEPT': 'application/json'})
status, headers, body = self.call_slo(req)
expected_body = json.dumps([
{'etag': md5hex('b' * 10), 'size_bytes': '10',
'path': '/gettest/b_10'},
{'etag': md5hex('c' * 15), 'size_bytes': '15',
'path': '/gettest/c_15'},
{'etag': md5hex(md5hex("e" * 5) + md5hex("f" * 5)),
'size_bytes': '10',
'path': '/gettest/d_10'}], sort_keys=True)
expected_etag = md5hex(expected_body)
if six.PY3:
expected_body = expected_body.encode('utf-8')
self.assertEqual(body, expected_body)
self.assertEqual(status, '200 OK')
self.assertTrue(('Etag', expected_etag) in headers, headers)
self.assertTrue(('X-Static-Large-Object', 'true') in headers, headers)
# raw format should return the actual manifest object content-type
self.assertIn(('Content-Type', 'text/plain'), headers)
try:
json.loads(body)
except ValueError:
self.fail("Invalid JSON in manifest GET: %r" % body)
def test_get_raw_manifest_passthrough_with_ranges(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-bc-r'
'?multipart-manifest=get&format=raw',
environ={'REQUEST_METHOD': 'GET',
'HTTP_ACCEPT': 'application/json'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
# raw format should return the actual manifest object content-type
self.assertIn(('Content-Type', 'text/plain'), headers)
try:
resp_data = json.loads(body)
except ValueError:
self.fail("Invalid JSON in manifest GET: %r" % body)
self.assertEqual(
resp_data,
[{'etag': md5hex('b' * 10), 'size_bytes': '10',
'path': '/gettest/b_10', 'range': '1-99'},
{'etag': md5hex('c' * 15), 'size_bytes': '15',
'path': '/gettest/c_15', 'range': '100-200'}],
body)
class TestSloGetManifest(SloTestCase):
def setUp(self):
super(TestSloGetManifest, self).setUp()
# some plain old objects
self.app.register(
'GET', '/v1/AUTH_test/gettest/a_5',
swob.HTTPOk, {'Content-Length': '5',
'Etag': md5hex('a' * 5)},
'a' * 5)
self.app.register(
'GET', '/v1/AUTH_test/gettest/b_10',
swob.HTTPOk, {'Content-Length': '10',
'Etag': md5hex('b' * 10)},
'b' * 10)
self.app.register(
'GET', '/v1/AUTH_test/gettest/c_15',
swob.HTTPOk, {'Content-Length': '15',
'Etag': md5hex('c' * 15)},
'c' * 15)
self.app.register(
'GET', '/v1/AUTH_test/gettest/d_20',
swob.HTTPOk, {'Content-Length': '20',
'Etag': md5hex('d' * 20)},
'd' * 20)
self.app.register(
'GET', '/v1/AUTH_test/gettest/e_25',
swob.HTTPOk, {'Content-Length': '25',
'Etag': md5hex('e' * 25)},
'e' * 25)
self.app.register(
'GET', '/v1/AUTH_test/gettest/f_30',
swob.HTTPOk, {'Content-Length': '30',
'Etag': md5hex('f' * 30)},
'f' * 30)
self.app.register(
'GET', '/v1/AUTH_test/gettest/g_35',
swob.HTTPOk, {'Content-Length': '35',
'Etag': md5hex('g' * 35)},
'g' * 35)
self.app.register(
'GET', '/v1/AUTH_test/gettest/h_40',
swob.HTTPOk, {'Content-Length': '40',
'Etag': md5hex('h' * 40)},
'h' * 40)
self.app.register(
'GET', '/v1/AUTH_test/gettest/i_45',
swob.HTTPOk, {'Content-Length': '45',
'Etag': md5hex('i' * 45)},
'i' * 45)
self.app.register(
'GET', '/v1/AUTH_test/gettest/j_50',
swob.HTTPOk, {'Content-Length': '50',
'Etag': md5hex('j' * 50)},
'j' * 50)
self.app.register(
'GET', '/v1/AUTH_test/gettest/k_55',
swob.HTTPOk, {'Content-Length': '55',
'Etag': md5hex('k' * 55)},
'k' * 55)
self.app.register(
'GET', '/v1/AUTH_test/gettest/l_60',
swob.HTTPOk, {'Content-Length': '60',
'Etag': md5hex('l' * 60)},
'l' * 60)
_bc_manifest_json = json.dumps(
[{'name': '/gettest/b_10', 'hash': md5hex('b' * 10), 'bytes': '10',
'content_type': 'text/plain'},
{'name': '/gettest/c_15', 'hash': md5hex('c' * 15), 'bytes': '15',
'content_type': 'text/plain'}])
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-bc',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'X-Object-Meta-Plant': 'Ficus',
'Etag': md5hex(_bc_manifest_json)},
_bc_manifest_json)
_abcd_manifest_json = json.dumps(
[{'name': '/gettest/a_5', 'hash': md5hex("a" * 5),
'content_type': 'text/plain', 'bytes': '5'},
{'name': '/gettest/manifest-bc', 'sub_slo': True,
'content_type': 'application/json',
'hash': md5hex(md5hex("b" * 10) + md5hex("c" * 15)),
'bytes': 25},
{'name': '/gettest/d_20', 'hash': md5hex("d" * 20),
'content_type': 'text/plain', 'bytes': '20'}])
self.abcd_manifest_json_etag = md5hex(_abcd_manifest_json)
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-abcd',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'Etag': self.abcd_manifest_json_etag},
_abcd_manifest_json)
# A submanifest segment is created using the response headers from a
# HEAD on the submanifest. That HEAD is passed through SLO which will
# modify the response content-length to be equal to the size of the
# submanifest's large object. The swift_bytes value appended to the
# submanifest's content-type will have been removed. So the sub-slo
# segment dict that is written to the parent manifest should have the
# correct bytes and content-type values. However, if somehow the
# submanifest HEAD response wasn't modified by SLO (maybe
# historically?) and we ended up with the parent manifest sub-slo entry
# having swift_bytes appended to it's content-type and the actual
# submanifest size in its bytes field, then SLO can cope, so we create
# a deviant manifest to verify that SLO can deal with it.
_abcd_manifest_json_alt = json.dumps(
[{'name': '/gettest/a_5', 'hash': md5hex("a" * 5),
'content_type': 'text/plain', 'bytes': '5'},
{'name': '/gettest/manifest-bc', 'sub_slo': True,
'content_type': 'application/json; swift_bytes=25',
'hash': md5hex(md5hex("b" * 10) + md5hex("c" * 15)),
'bytes': len(_bc_manifest_json)},
{'name': '/gettest/d_20', 'hash': md5hex("d" * 20),
'content_type': 'text/plain', 'bytes': '20'}])
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-abcd-alt',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'Etag': md5hex(_abcd_manifest_json_alt)},
_abcd_manifest_json_alt)
_abcdefghijkl_manifest_json = json.dumps(
[{'name': '/gettest/a_5', 'hash': md5hex("a" * 5),
'content_type': 'text/plain', 'bytes': '5'},
{'name': '/gettest/b_10', 'hash': md5hex("b" * 10),
'content_type': 'text/plain', 'bytes': '10'},
{'name': '/gettest/c_15', 'hash': md5hex("c" * 15),
'content_type': 'text/plain', 'bytes': '15'},
{'name': '/gettest/d_20', 'hash': md5hex("d" * 20),
'content_type': 'text/plain', 'bytes': '20'},
{'name': '/gettest/e_25', 'hash': md5hex("e" * 25),
'content_type': 'text/plain', 'bytes': '25'},
{'name': '/gettest/f_30', 'hash': md5hex("f" * 30),
'content_type': 'text/plain', 'bytes': '30'},
{'name': '/gettest/g_35', 'hash': md5hex("g" * 35),
'content_type': 'text/plain', 'bytes': '35'},
{'name': '/gettest/h_40', 'hash': md5hex("h" * 40),
'content_type': 'text/plain', 'bytes': '40'},
{'name': '/gettest/i_45', 'hash': md5hex("i" * 45),
'content_type': 'text/plain', 'bytes': '45'},
{'name': '/gettest/j_50', 'hash': md5hex("j" * 50),
'content_type': 'text/plain', 'bytes': '50'},
{'name': '/gettest/k_55', 'hash': md5hex("k" * 55),
'content_type': 'text/plain', 'bytes': '55'},
{'name': '/gettest/l_60', 'hash': md5hex("l" * 60),
'content_type': 'text/plain', 'bytes': '60'}])
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-abcdefghijkl',
swob.HTTPOk, {
'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'Etag': md5hex(_abcdefghijkl_manifest_json)},
_abcdefghijkl_manifest_json)
_bc_ranges_manifest_json = json.dumps(
[{'name': '/gettest/b_10', 'hash': md5hex('b' * 10),
'content_type': 'text/plain', 'bytes': '10',
'range': '4-7'},
{'name': '/gettest/b_10', 'hash': md5hex('b' * 10),
'content_type': 'text/plain', 'bytes': '10',
'range': '2-5'},
{'name': '/gettest/c_15', 'hash': md5hex('c' * 15),
'content_type': 'text/plain', 'bytes': '15',
'range': '0-3'},
{'name': '/gettest/c_15', 'hash': md5hex('c' * 15),
'content_type': 'text/plain', 'bytes': '15',
'range': '11-14'}])
self.bc_ranges_etag = md5hex(_bc_ranges_manifest_json)
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-bc-ranges',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'X-Object-Meta-Plant': 'Ficus',
'Etag': self.bc_ranges_etag},
_bc_ranges_manifest_json)
_abcd_ranges_manifest_json = json.dumps(
[{'name': '/gettest/a_5', 'hash': md5hex("a" * 5),
'content_type': 'text/plain', 'bytes': '5',
'range': '0-3'},
{'name': '/gettest/a_5', 'hash': md5hex("a" * 5),
'content_type': 'text/plain', 'bytes': '5',
'range': '1-4'},
{'name': '/gettest/manifest-bc-ranges', 'sub_slo': True,
'content_type': 'application/json',
'hash': self.bc_ranges_etag,
'bytes': 16,
'range': '8-15'},
{'name': '/gettest/manifest-bc-ranges', 'sub_slo': True,
'content_type': 'application/json',
'hash': self.bc_ranges_etag,
'bytes': len(_bc_ranges_manifest_json),
'range': '0-7'},
{'name': '/gettest/d_20', 'hash': md5hex("d" * 20),
'content_type': 'text/plain', 'bytes': '20',
'range': '0-3'},
{'name': '/gettest/d_20', 'hash': md5hex("d" * 20),
'content_type': 'text/plain', 'bytes': '20',
'range': '8-11'}])
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-abcd-ranges',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'Etag': md5hex(_abcd_ranges_manifest_json)},
_abcd_ranges_manifest_json)
_abcd_subranges_manifest_json = json.dumps(
[{'name': '/gettest/manifest-abcd-ranges', 'sub_slo': True,
'hash': md5hex("a" * 8),
'content_type': 'text/plain', 'bytes': '32',
'range': '6-10'},
{'name': '/gettest/manifest-abcd-ranges', 'sub_slo': True,
'hash': md5hex("a" * 8),
'content_type': 'text/plain', 'bytes': '32',
'range': '31-31'},
{'name': '/gettest/manifest-abcd-ranges', 'sub_slo': True,
'hash': md5hex("a" * 8),
'content_type': 'text/plain', 'bytes': '32',
'range': '14-18'},
{'name': '/gettest/manifest-abcd-ranges', 'sub_slo': True,
'hash': md5hex("a" * 8),
'content_type': 'text/plain', 'bytes': '32',
'range': '0-0'},
{'name': '/gettest/manifest-abcd-ranges', 'sub_slo': True,
'hash': md5hex("a" * 8),
'content_type': 'text/plain', 'bytes': '32',
'range': '22-26'}])
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-abcd-subranges',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'Etag': md5hex(_abcd_subranges_manifest_json)},
_abcd_subranges_manifest_json)
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-badjson',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'X-Object-Meta-Fish': 'Bass'},
"[not {json (at ++++all")
def tearDown(self):
self.assertEqual(self.app.unclosed_requests, {})
def test_get_manifest_passthrough(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-bc?multipart-manifest=get',
environ={'REQUEST_METHOD': 'GET',
'HTTP_ACCEPT': 'application/json'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
self.assertIn(
('Content-Type', 'application/json; charset=utf-8'), headers)
try:
resp_data = json.loads(body)
except ValueError:
self.fail("Invalid JSON in manifest GET: %r" % body)
self.assertEqual(
resp_data,
[{'hash': md5hex('b' * 10), 'bytes': '10', 'name': '/gettest/b_10',
'content_type': 'text/plain'},
{'hash': md5hex('c' * 15), 'bytes': '15', 'name': '/gettest/c_15',
'content_type': 'text/plain'}],
body)
self.assertIn(('Etag', md5hex(body)), headers)
def test_get_nonmanifest_passthrough(self):
req = Request.blank(
'/v1/AUTH_test/gettest/a_5',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
self.assertEqual(body, b'aaaaa')
def test_get_manifest(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-bc',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
manifest_etag = md5hex(md5hex("b" * 10) + md5hex("c" * 15))
self.assertEqual(status, '200 OK')
self.assertEqual(headers['Content-Length'], '25')
self.assertEqual(headers['Etag'], '"%s"' % manifest_etag)
self.assertEqual(headers['X-Object-Meta-Plant'], 'Ficus')
self.assertEqual(body, b'bbbbbbbbbbccccccccccccccc')
for _, _, hdrs in self.app.calls_with_headers[1:]:
ua = hdrs.get("User-Agent", "")
self.assertTrue("SLO MultipartGET" in ua)
self.assertFalse("SLO MultipartGET SLO MultipartGET" in ua)
# the first request goes through unaltered
first_ua = self.app.calls_with_headers[0][2].get("User-Agent")
self.assertFalse(
"SLO MultipartGET" in first_ua)
def test_get_manifest_repeated_segments(self):
_aabbccdd_manifest_json = json.dumps(
[{'name': '/gettest/a_5', 'hash': md5hex("a" * 5),
'content_type': 'text/plain', 'bytes': '5'},
{'name': '/gettest/a_5', 'hash': md5hex("a" * 5),
'content_type': 'text/plain', 'bytes': '5'},
{'name': '/gettest/b_10', 'hash': md5hex("b" * 10),
'content_type': 'text/plain', 'bytes': '10'},
{'name': '/gettest/b_10', 'hash': md5hex("b" * 10),
'content_type': 'text/plain', 'bytes': '10'},
{'name': '/gettest/c_15', 'hash': md5hex("c" * 15),
'content_type': 'text/plain', 'bytes': '15'},
{'name': '/gettest/c_15', 'hash': md5hex("c" * 15),
'content_type': 'text/plain', 'bytes': '15'},
{'name': '/gettest/d_20', 'hash': md5hex("d" * 20),
'content_type': 'text/plain', 'bytes': '20'},
{'name': '/gettest/d_20', 'hash': md5hex("d" * 20),
'content_type': 'text/plain', 'bytes': '20'}])
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-aabbccdd',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'Etag': md5hex(_aabbccdd_manifest_json)},
_aabbccdd_manifest_json)
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-aabbccdd',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '200 OK')
self.assertEqual(body, (
b'aaaaaaaaaabbbbbbbbbbbbbbbbbbbbcccccccccccccccccccccccccccccc'
b'dddddddddddddddddddddddddddddddddddddddd'))
self.assertEqual(self.app.calls, [
('GET', '/v1/AUTH_test/gettest/manifest-aabbccdd'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/d_20?multipart-manifest=get')])
ranges = [c[2].get('Range') for c in self.app.calls_with_headers]
self.assertEqual(ranges, [
None,
'bytes=0-4,0-4',
'bytes=0-9,0-9',
'bytes=0-14,0-14',
'bytes=0-19,0-19'])
def test_get_manifest_ratelimiting(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcdefghijkl',
environ={'REQUEST_METHOD': 'GET'})
the_time = [time.time()]
sleeps = []
def mock_time():
return the_time[0]
def mock_sleep(duration):
sleeps.append(duration)
the_time[0] += duration
with patch('time.time', mock_time), \
patch('eventlet.sleep', mock_sleep), \
patch.object(self.slo, 'rate_limit_under_size', 999999999), \
patch.object(self.slo, 'rate_limit_after_segment', 0):
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK') # sanity check
self.assertEqual(sleeps, [1.0] * 11)
# give the client the first 4 segments without ratelimiting; we'll
# sleep less
del sleeps[:]
with patch('time.time', mock_time), \
patch('eventlet.sleep', mock_sleep), \
patch.object(self.slo, 'rate_limit_under_size', 999999999), \
patch.object(self.slo, 'rate_limit_after_segment', 4):
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK') # sanity check
self.assertEqual(sleeps, [1.0] * 7)
# ratelimit segments under 35 bytes; this affects a-f
del sleeps[:]
with patch('time.time', mock_time), \
patch('eventlet.sleep', mock_sleep), \
patch.object(self.slo, 'rate_limit_under_size', 35), \
patch.object(self.slo, 'rate_limit_after_segment', 0):
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK') # sanity check
self.assertEqual(sleeps, [1.0] * 5)
# ratelimit segments under 36 bytes; this now affects a-g, netting
# us one more sleep than before
del sleeps[:]
with patch('time.time', mock_time), \
patch('eventlet.sleep', mock_sleep), \
patch.object(self.slo, 'rate_limit_under_size', 36), \
patch.object(self.slo, 'rate_limit_after_segment', 0):
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK') # sanity check
self.assertEqual(sleeps, [1.0] * 6)
def test_get_manifest_with_submanifest(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '200 OK')
self.assertEqual(headers['Content-Length'], '50')
self.assertEqual(headers['Etag'], '"%s"' % self.manifest_abcd_etag)
self.assertEqual(headers['X-Manifest-Etag'],
self.abcd_manifest_json_etag)
self.assertEqual(
body, b'aaaaabbbbbbbbbbcccccccccccccccdddddddddddddddddddd')
def test_get_manifest_with_submanifest_bytes_in_content_type(self):
# verify correct content-length when the sub-slo segment in the
# manifest has its actual object content-length appended as swift_bytes
# to the content-type, and the submanifest length in the bytes field.
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd-alt',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '200 OK')
self.assertEqual(headers['Content-Length'], '50')
self.assertEqual(headers['Etag'], '"%s"' % self.manifest_abcd_etag)
self.assertEqual(
body, b'aaaaabbbbbbbbbbcccccccccccccccdddddddddddddddddddd')
def test_range_get_manifest(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=3-17'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '206 Partial Content')
self.assertEqual(headers['Content-Length'], '15')
self.assertEqual(headers['Etag'], '"%s"' % self.manifest_abcd_etag)
self.assertEqual(body, b'aabbbbbbbbbbccc')
self.assertEqual(
self.app.calls,
[('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get')])
ranges = [c[2].get('Range') for c in self.app.calls_with_headers]
self.assertEqual(ranges, [
'bytes=3-17',
None,
None,
'bytes=3-',
None,
'bytes=0-2'])
ignore_range_headers = [
c[2].get('X-Backend-Ignore-Range-If-Metadata-Present')
for c in self.app.calls_with_headers]
self.assertEqual(ignore_range_headers, [
'X-Static-Large-Object',
None,
None,
None,
None,
None])
# we set swift.source for everything but the first request
self.assertIsNone(self.app.swift_sources[0])
self.assertEqual(self.app.swift_sources[1:],
['SLO'] * (len(self.app.swift_sources) - 1))
def test_multiple_ranges_get_manifest(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=3-17,20-24,35-999999'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '206 Partial Content')
ct, params = parse_content_type(headers['Content-Type'])
params = dict(params)
self.assertEqual(ct, 'multipart/byteranges')
boundary = params.get('boundary')
self.assertTrue(boundary is not None)
if six.PY3:
boundary = boundary.encode('utf-8')
self.assertEqual(len(body), int(headers['Content-Length']))
got_mime_docs = []
for mime_doc_fh in iter_multipart_mime_documents(
BytesIO(body), boundary):
headers = parse_mime_headers(mime_doc_fh)
body = mime_doc_fh.read()
got_mime_docs.append((headers, body))
self.assertEqual(len(got_mime_docs), 3)
first_range_headers = got_mime_docs[0][0]
first_range_body = got_mime_docs[0][1]
self.assertEqual(first_range_headers['Content-Range'],
'bytes 3-17/50')
self.assertEqual(first_range_headers['Content-Type'],
'application/json')
self.assertEqual(first_range_body, b'aabbbbbbbbbbccc')
second_range_headers = got_mime_docs[1][0]
second_range_body = got_mime_docs[1][1]
self.assertEqual(second_range_headers['Content-Range'],
'bytes 20-24/50')
self.assertEqual(second_range_headers['Content-Type'],
'application/json')
self.assertEqual(second_range_body, b'ccccc')
third_range_headers = got_mime_docs[2][0]
third_range_body = got_mime_docs[2][1]
self.assertEqual(third_range_headers['Content-Range'],
'bytes 35-49/50')
self.assertEqual(third_range_headers['Content-Type'],
'application/json')
self.assertEqual(third_range_body, b'ddddddddddddddd')
self.assertEqual(
self.app.calls,
[('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/d_20?multipart-manifest=get')])
ranges = [c[2].get('Range') for c in self.app.calls_with_headers]
self.assertEqual(ranges, [
'bytes=3-17,20-24,35-999999', # initial GET
None, # re-fetch top-level manifest
None, # fetch manifest-bc as sub-slo
'bytes=3-', # a_5
None, # b_10
'bytes=0-2,5-9', # c_15
'bytes=5-']) # d_20
# we set swift.source for everything but the first request
self.assertIsNone(self.app.swift_sources[0])
self.assertEqual(self.app.swift_sources[1:],
['SLO'] * (len(self.app.swift_sources) - 1))
def test_multiple_ranges_including_suffix_get_manifest(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=3-17,-21'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '206 Partial Content')
ct, params = parse_content_type(headers['Content-Type'])
params = dict(params)
self.assertEqual(ct, 'multipart/byteranges')
boundary = params.get('boundary')
self.assertTrue(boundary is not None)
if six.PY3:
boundary = boundary.encode('utf-8')
got_mime_docs = []
for mime_doc_fh in iter_multipart_mime_documents(
BytesIO(body), boundary):
headers = parse_mime_headers(mime_doc_fh)
body = mime_doc_fh.read()
got_mime_docs.append((headers, body))
self.assertEqual(len(got_mime_docs), 2)
first_range_headers = got_mime_docs[0][0]
first_range_body = got_mime_docs[0][1]
self.assertEqual(first_range_headers['Content-Range'],
'bytes 3-17/50')
self.assertEqual(first_range_body, b'aabbbbbbbbbbccc')
second_range_headers = got_mime_docs[1][0]
second_range_body = got_mime_docs[1][1]
self.assertEqual(second_range_headers['Content-Range'],
'bytes 29-49/50')
self.assertEqual(second_range_body, b'cdddddddddddddddddddd')
def test_range_get_includes_whole_manifest(self):
# If the first range GET results in retrieval of the entire manifest
# body (which we can detect by looking at Content-Range), then we
# should not go make a second, non-ranged request just to retrieve the
# same bytes again.
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=0-999999999'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '206 Partial Content')
self.assertEqual(
body, b'aaaaabbbbbbbbbbcccccccccccccccdddddddddddddddddddd')
self.assertEqual(
self.app.calls,
[('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/d_20?multipart-manifest=get')])
def test_range_get_beyond_manifest(self):
big = 'e' * 1024 * 1024
big_etag = md5hex(big)
self.app.register(
'GET', '/v1/AUTH_test/gettest/big_seg',
swob.HTTPOk, {'Content-Type': 'application/foo',
'Etag': big_etag}, big)
big_manifest = json.dumps(
[{'name': '/gettest/big_seg', 'hash': big_etag,
'bytes': 1024 * 1024, 'content_type': 'application/foo'}])
self.app.register(
'GET', '/v1/AUTH_test/gettest/big_manifest',
swob.HTTPOk, {'Content-Type': 'application/octet-stream',
'X-Static-Large-Object': 'true',
'Etag': md5hex(big_manifest)},
big_manifest)
req = Request.blank(
'/v1/AUTH_test/gettest/big_manifest',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=100000-199999'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '206 Partial Content')
if six.PY3:
count_e = sum(1 if x == 'e' else 0
for x in body.decode('ascii', errors='replace'))
else:
count_e = sum(1 if x == 'e' else 0 for x in body)
self.assertEqual(count_e, 100000)
self.assertEqual(len(body) - count_e, 0)
self.assertEqual(
self.app.calls, [
# has Range header, gets 416
('GET', '/v1/AUTH_test/gettest/big_manifest'),
# retry the first one
('GET', '/v1/AUTH_test/gettest/big_manifest'),
('GET',
'/v1/AUTH_test/gettest/big_seg?multipart-manifest=get')])
def test_range_get_beyond_manifest_refetch_fails(self):
big = 'e' * 1024 * 1024
big_etag = md5hex(big)
big_manifest = json.dumps(
[{'name': '/gettest/big_seg', 'hash': big_etag,
'bytes': 1024 * 1024, 'content_type': 'application/foo'}])
self.app.register_responses(
'GET', '/v1/AUTH_test/gettest/big_manifest',
[(swob.HTTPOk, {'Content-Type': 'application/octet-stream',
'X-Static-Large-Object': 'true',
'X-Backend-Timestamp': '1234',
'Etag': md5hex(big_manifest)},
big_manifest),
(swob.HTTPNotFound, {}, None)])
req = Request.blank(
'/v1/AUTH_test/gettest/big_manifest',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=100000-199999'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '503 Service Unavailable')
self.assertNotIn('X-Static-Large-Object', headers)
self.assertEqual(self.app.calls, [
# has Range header, gets 416
('GET', '/v1/AUTH_test/gettest/big_manifest'),
# retry the first one
('GET', '/v1/AUTH_test/gettest/big_manifest'),
])
def test_range_get_beyond_manifest_refetch_finds_old(self):
big = 'e' * 1024 * 1024
big_etag = md5hex(big)
big_manifest = json.dumps(
[{'name': '/gettest/big_seg', 'hash': big_etag,
'bytes': 1024 * 1024, 'content_type': 'application/foo'}])
self.app.register_responses(
'GET', '/v1/AUTH_test/gettest/big_manifest',
[(swob.HTTPOk, {'Content-Type': 'application/octet-stream',
'X-Static-Large-Object': 'true',
'X-Backend-Timestamp': '1234',
'Etag': md5hex(big_manifest)},
big_manifest),
(swob.HTTPOk, {'X-Backend-Timestamp': '1233'}, [b'small body'])])
req = Request.blank(
'/v1/AUTH_test/gettest/big_manifest',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=100000-199999'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '503 Service Unavailable')
self.assertNotIn('X-Static-Large-Object', headers)
self.assertEqual(self.app.calls, [
# has Range header, gets 416
('GET', '/v1/AUTH_test/gettest/big_manifest'),
# retry the first one
('GET', '/v1/AUTH_test/gettest/big_manifest'),
])
def test_range_get_beyond_manifest_refetch_small_non_slo(self):
big = 'e' * 1024 * 1024
big_etag = md5hex(big)
big_manifest = json.dumps(
[{'name': '/gettest/big_seg', 'hash': big_etag,
'bytes': 1024 * 1024, 'content_type': 'application/foo'}])
self.app.register_responses(
'GET', '/v1/AUTH_test/gettest/big_manifest',
[(swob.HTTPOk, {'Content-Type': 'application/octet-stream',
'X-Static-Large-Object': 'true',
'X-Backend-Timestamp': '1234',
'Etag': md5hex(big_manifest)},
big_manifest),
(swob.HTTPOk, {'X-Backend-Timestamp': '1235'}, [b'small body'])])
req = Request.blank(
'/v1/AUTH_test/gettest/big_manifest',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=100000-199999'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '416 Requested Range Not Satisfiable')
self.assertNotIn('X-Static-Large-Object', headers)
self.assertEqual(self.app.calls, [
# has Range header, gets 416
('GET', '/v1/AUTH_test/gettest/big_manifest'),
# retry the first one
('GET', '/v1/AUTH_test/gettest/big_manifest'),
])
def test_range_get_beyond_manifest_refetch_big_non_slo(self):
big = 'e' * 1024 * 1024
big_etag = md5hex(big)
big_manifest = json.dumps(
[{'name': '/gettest/big_seg', 'hash': big_etag,
'bytes': 1024 * 1024, 'content_type': 'application/foo'}])
self.app.register_responses(
'GET', '/v1/AUTH_test/gettest/big_manifest',
[(swob.HTTPOk, {'Content-Type': 'application/octet-stream',
'X-Static-Large-Object': 'true',
'X-Backend-Timestamp': '1234',
'Etag': md5hex(big_manifest)},
big_manifest),
(swob.HTTPOk, {'X-Backend-Timestamp': '1235'},
[b'x' * 1024 * 1024])])
req = Request.blank(
'/v1/AUTH_test/gettest/big_manifest',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=100000-199999'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '200 OK') # NOT 416 or 206!
self.assertNotIn('X-Static-Large-Object', headers)
self.assertEqual(len(body), 1024 * 1024)
self.assertEqual(body, b'x' * 1024 * 1024)
self.assertEqual(self.app.calls, [
# has Range header, gets 416
('GET', '/v1/AUTH_test/gettest/big_manifest'),
# retry the first one
('GET', '/v1/AUTH_test/gettest/big_manifest'),
])
def test_range_get_beyond_manifest_refetch_tombstone(self):
big = 'e' * 1024 * 1024
big_etag = md5hex(big)
big_manifest = json.dumps(
[{'name': '/gettest/big_seg', 'hash': big_etag,
'bytes': 1024 * 1024, 'content_type': 'application/foo'}])
self.app.register_responses(
'GET', '/v1/AUTH_test/gettest/big_manifest',
[(swob.HTTPOk, {'Content-Type': 'application/octet-stream',
'X-Static-Large-Object': 'true',
'X-Backend-Timestamp': '1234',
'Etag': md5hex(big_manifest)},
big_manifest),
(swob.HTTPNotFound, {'X-Backend-Timestamp': '1345'}, None)])
req = Request.blank(
'/v1/AUTH_test/gettest/big_manifest',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=100000-199999'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '404 Not Found')
self.assertNotIn('X-Static-Large-Object', headers)
self.assertEqual(self.app.calls, [
# has Range header, gets 416
('GET', '/v1/AUTH_test/gettest/big_manifest'),
# retry the first one
('GET', '/v1/AUTH_test/gettest/big_manifest'),
])
def test_range_get_bogus_content_range(self):
# Just a little paranoia; Swift currently sends back valid
# Content-Range headers, but if somehow someone sneaks an invalid one
# in there, we'll ignore it.
def content_range_breaker_factory(app):
def content_range_breaker(env, start_response):
req = swob.Request(env)
resp = req.get_response(app)
resp.headers['Content-Range'] = 'triscuits'
return resp(env, start_response)
return content_range_breaker
self.slo = slo.filter_factory({})(
content_range_breaker_factory(self.app))
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=0-999999999'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '206 Partial Content')
self.assertEqual(
body, b'aaaaabbbbbbbbbbcccccccccccccccdddddddddddddddddddd')
self.assertEqual(
self.app.calls,
[('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/d_20?multipart-manifest=get')])
def test_range_get_manifest_on_segment_boundaries(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=5-29'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '206 Partial Content')
self.assertEqual(headers['Content-Length'], '25')
self.assertEqual(headers['Etag'], '"%s"' % self.manifest_abcd_etag)
self.assertEqual(body, b'bbbbbbbbbbccccccccccccccc')
self.assertEqual(
self.app.calls,
[('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get')])
headers = [c[2] for c in self.app.calls_with_headers]
self.assertEqual(headers[0].get('Range'), 'bytes=5-29')
self.assertIsNone(headers[1].get('Range'))
self.assertIsNone(headers[2].get('Range'))
self.assertIsNone(headers[3].get('Range'))
self.assertIsNone(headers[4].get('Range'))
def test_range_get_manifest_first_byte(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=0-0'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '206 Partial Content')
self.assertEqual(headers['Content-Length'], '1')
self.assertEqual(body, b'a')
# Make sure we don't get any objects we don't need, including
# submanifests.
self.assertEqual(
self.app.calls,
[('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get')])
def test_range_get_manifest_sub_slo(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=25-30'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '206 Partial Content')
self.assertEqual(headers['Content-Length'], '6')
self.assertEqual(body, b'cccccd')
# Make sure we don't get any objects we don't need, including
# submanifests.
self.assertEqual(
self.app.calls,
[('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/d_20?multipart-manifest=get')])
def test_range_get_manifest_overlapping_end(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=45-55'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '206 Partial Content')
self.assertEqual(headers['Content-Length'], '5')
self.assertEqual(body, b'ddddd')
def test_range_get_manifest_unsatisfiable(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=100-200'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '416 Requested Range Not Satisfiable')
def test_get_segment_with_non_ascii_path(self):
segment_body = u"a møøse once bit my sister".encode("utf-8")
segment_etag = md5(segment_body, usedforsecurity=False).hexdigest()
if six.PY2:
path = u'/v1/AUTH_test/ünicode/öbject-segment'.encode('utf-8')
else:
path = str_to_wsgi(u'/v1/AUTH_test/ünicode/öbject-segment')
self.app.register(
'GET', path,
swob.HTTPOk, {'Content-Length': str(len(segment_body)),
'Etag': segment_etag},
segment_body)
manifest_json = json.dumps([{'name': u'/ünicode/öbject-segment',
'hash': segment_etag,
'content_type': 'text/plain',
'bytes': len(segment_body)}])
if six.PY2:
path = u'/v1/AUTH_test/ünicode/manifest'.encode('utf-8')
else:
path = str_to_wsgi(u'/v1/AUTH_test/ünicode/manifest')
self.app.register(
'GET', path,
swob.HTTPOk, {'Content-Type': 'application/json',
'Content-Length': str(len(manifest_json)),
'X-Static-Large-Object': 'true'},
manifest_json.encode('ascii'))
req = Request.blank(
str_to_wsgi('/v1/AUTH_test/ünicode/manifest'),
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '200 OK')
self.assertEqual(body, segment_body)
def test_get_range_manifest(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd-ranges',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '200 OK')
self.assertEqual(headers['Content-Length'], '32')
self.assertEqual(headers['Content-Type'], 'application/json')
self.assertEqual(body, b'aaaaaaaaccccccccbbbbbbbbdddddddd')
self.assertEqual(
self.app.calls,
[('GET', '/v1/AUTH_test/gettest/manifest-abcd-ranges'),
('GET', '/v1/AUTH_test/gettest/manifest-bc-ranges'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/d_20?multipart-manifest=get')])
ranges = [c[2].get('Range') for c in self.app.calls_with_headers]
self.assertEqual(ranges, [
None,
None,
'bytes=0-3,1-',
'bytes=0-3,11-',
'bytes=4-7,2-5',
'bytes=0-3,8-11'])
# we set swift.source for everything but the first request
self.assertIsNone(self.app.swift_sources[0])
self.assertEqual(
self.app.swift_sources[1:],
['SLO'] * (len(self.app.swift_sources) - 1)
)
self.assertEqual(md5hex(''.join([
md5hex('a' * 5), ':0-3;',
md5hex('a' * 5), ':1-4;',
self.bc_ranges_etag, ':8-15;',
self.bc_ranges_etag, ':0-7;',
md5hex('d' * 20), ':0-3;',
md5hex('d' * 20), ':8-11;',
])), headers['Etag'].strip('"'))
def test_get_subrange_manifest(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd-subranges',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '200 OK')
self.assertEqual(headers['Content-Length'], '17')
self.assertEqual(headers['Content-Type'], 'application/json')
self.assertEqual(body, b'aacccdccbbbabbddd')
self.assertEqual(
self.app.calls,
[('GET', '/v1/AUTH_test/gettest/manifest-abcd-subranges'),
('GET', '/v1/AUTH_test/gettest/manifest-abcd-ranges'),
('GET', '/v1/AUTH_test/gettest/manifest-bc-ranges'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/d_20?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/d_20?multipart-manifest=get')])
ranges = [c[2].get('Range') for c in self.app.calls_with_headers]
self.assertEqual(ranges, [
None,
None,
None,
'bytes=3-',
'bytes=0-2',
'bytes=11-11',
'bytes=13-',
'bytes=4-6',
'bytes=0-0',
'bytes=4-5',
'bytes=0-2'])
# we set swift.source for everything but the first request
self.assertIsNone(self.app.swift_sources[0])
self.assertEqual(self.app.swift_sources[1:],
['SLO'] * (len(self.app.swift_sources) - 1))
def test_range_get_range_manifest(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd-ranges',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=7-26'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '206 Partial Content')
self.assertEqual(headers['Content-Length'], '20')
self.assertEqual(headers['Content-Type'], 'application/json')
self.assertIn('Etag', headers)
self.assertEqual(body, b'accccccccbbbbbbbbddd')
self.assertEqual(
self.app.calls,
[('GET', '/v1/AUTH_test/gettest/manifest-abcd-ranges'),
('GET', '/v1/AUTH_test/gettest/manifest-abcd-ranges'),
('GET', '/v1/AUTH_test/gettest/manifest-bc-ranges'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/d_20?multipart-manifest=get')])
ranges = [c[2].get('Range') for c in self.app.calls_with_headers]
self.assertEqual(ranges, [
'bytes=7-26',
None,
None,
'bytes=4-',
'bytes=0-3,11-',
'bytes=4-7,2-5',
'bytes=0-2'])
# we set swift.source for everything but the first request
self.assertIsNone(self.app.swift_sources[0])
self.assertEqual(self.app.swift_sources[1:],
['SLO'] * (len(self.app.swift_sources) - 1))
def test_range_get_subrange_manifest(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd-subranges',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=4-12'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '206 Partial Content')
self.assertEqual(headers['Content-Length'], '9')
self.assertEqual(headers['Content-Type'], 'application/json')
self.assertEqual(body, b'cdccbbbab')
self.assertEqual(
self.app.calls,
[('GET', '/v1/AUTH_test/gettest/manifest-abcd-subranges'),
('GET', '/v1/AUTH_test/gettest/manifest-abcd-subranges'),
('GET', '/v1/AUTH_test/gettest/manifest-abcd-ranges'),
('GET', '/v1/AUTH_test/gettest/manifest-bc-ranges'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/d_20?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get')])
ranges = [c[2].get('Range') for c in self.app.calls_with_headers]
self.assertEqual(ranges, [
'bytes=4-12',
None,
None,
None,
'bytes=2-2',
'bytes=11-11',
'bytes=13-',
'bytes=4-6',
'bytes=0-0',
'bytes=4-4'])
# we set swift.source for everything but the first request
self.assertIsNone(self.app.swift_sources[0])
self.assertEqual(self.app.swift_sources[1:],
['SLO'] * (len(self.app.swift_sources) - 1))
def test_range_get_includes_whole_range_manifest(self):
# If the first range GET results in retrieval of the entire manifest
# body (which we can detect by looking at Content-Range), then we
# should not go make a second, non-ranged request just to retrieve the
# same bytes again.
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd-ranges',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=0-999999999'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '206 Partial Content')
self.assertEqual(headers['Content-Length'], '32')
self.assertEqual(headers['Content-Type'], 'application/json')
self.assertEqual(body, b'aaaaaaaaccccccccbbbbbbbbdddddddd')
self.assertEqual(
self.app.calls,
[('GET', '/v1/AUTH_test/gettest/manifest-abcd-ranges'),
('GET', '/v1/AUTH_test/gettest/manifest-bc-ranges'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/d_20?multipart-manifest=get')])
ranges = [c[2].get('Range') for c in self.app.calls_with_headers]
self.assertEqual(ranges, [
'bytes=0-999999999',
None,
'bytes=0-3,1-',
'bytes=0-3,11-',
'bytes=4-7,2-5',
'bytes=0-3,8-11'])
# we set swift.source for everything but the first request
self.assertIsNone(self.app.swift_sources[0])
self.assertEqual(self.app.swift_sources[1:],
['SLO'] * (len(self.app.swift_sources) - 1))
def test_get_bogus_manifest(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-badjson',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '200 OK')
self.assertEqual(headers['Content-Length'], '0')
self.assertEqual(headers['X-Object-Meta-Fish'], 'Bass')
self.assertEqual(body, b'')
def _do_test_generator_closure(self, leaks):
# Test that the SLO WSGI iterable closes its internal .app_iter when
# it receives a close() message.
#
# This is sufficient to fix a memory leak. The memory leak arises
# due to cyclic references involving a running generator; a running
# generator sometimes preventes the GC from collecting it in the
# same way that an object with a defined __del__ does.
#
# There are other ways to break the cycle and fix the memory leak as
# well; calling .close() on the generator is sufficient, but not
# necessary. However, having this test is better than nothing for
# preventing regressions.
class LeakTracker(object):
def __init__(self, inner_iter):
leaks[0] += 1
self.inner_iter = iter(inner_iter)
def __iter__(self):
return self
def next(self):
return next(self.inner_iter)
__next__ = next
def close(self):
leaks[0] -= 1
close_if_possible(self.inner_iter)
class LeakTrackingSegmentedIterable(slo.SegmentedIterable):
def _internal_iter(self, *a, **kw):
it = super(
LeakTrackingSegmentedIterable, self)._internal_iter(
*a, **kw)
return LeakTracker(it)
status = [None]
headers = [None]
def start_response(s, h, ei=None):
status[0] = s
headers[0] = h
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET',
'HTTP_ACCEPT': 'application/json'})
# can't self.call_slo() here since we don't want to consume the
# whole body
with patch.object(slo, 'SegmentedIterable',
LeakTrackingSegmentedIterable):
app_resp = self.slo(req.environ, start_response)
self.assertEqual(status[0], '200 OK') # sanity check
return app_resp
def test_generator_closure(self):
leaks = [0]
app_resp = self._do_test_generator_closure(leaks)
body_iter = iter(app_resp)
chunk = next(body_iter)
self.assertEqual(chunk, b'aaaaa') # sanity check
app_resp.close()
self.assertEqual(0, leaks[0])
def test_generator_closure_iter_app_resp(self):
# verify that the result of iter(app_resp) has a close method that
# closes app_resp
leaks = [0]
app_resp = self._do_test_generator_closure(leaks)
body_iter = iter(app_resp)
chunk = next(body_iter)
self.assertEqual(chunk, b'aaaaa') # sanity check
close_method = getattr(body_iter, 'close', None)
self.assertIsNotNone(close_method)
self.assertTrue(callable(close_method))
close_method()
self.assertEqual(0, leaks[0])
def test_head_manifest_is_efficient(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'HEAD'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '200 OK')
self.assertEqual(headers['Content-Length'], '50')
self.assertEqual(headers['Etag'], '"%s"' % self.manifest_abcd_etag)
self.assertEqual(headers['X-Manifest-Etag'],
self.abcd_manifest_json_etag)
self.assertEqual(body, b'')
# Note the lack of recursive descent into manifest-bc. We know the
# content-length from the outer manifest, so there's no need for any
# submanifest fetching here, but a naïve implementation might do it
# anyway.
self.assertEqual(self.app.calls, [
('HEAD', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-abcd')])
def test_recursion_limit(self):
# man1 points to obj1 and man2, man2 points to obj2 and man3...
for i in range(20):
self.app.register('GET', '/v1/AUTH_test/gettest/obj%d' % i,
swob.HTTPOk, {'Content-Type': 'text/plain',
'Etag': md5hex('body%02d' % i)},
b'body%02d' % i)
manifest_json = json.dumps([{'name': '/gettest/obj20',
'hash': md5hex('body20'),
'content_type': 'text/plain',
'bytes': '6'}])
self.app.register(
'GET', '/v1/AUTH_test/gettest/man%d' % i,
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'Etag': 'man%d' % i},
manifest_json.encode('ascii'))
submanifest_bytes = 6
for i in range(19, 0, -1):
manifest_data = [
{'name': '/gettest/obj%d' % i,
'hash': md5hex('body%02d' % i),
'bytes': '6',
'content_type': 'text/plain'},
{'data': base64.b64encode(b'-' * 3).decode('ascii')},
{'name': '/gettest/man%d' % (i + 1),
'hash': 'man%d' % (i + 1),
'sub_slo': True,
'bytes': submanifest_bytes,
'content_type': 'application/json'}]
submanifest_bytes += 9
manifest_json = json.dumps(manifest_data)
self.app.register(
'GET', '/v1/AUTH_test/gettest/man%d' % i,
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'Etag': 'man%d' % i},
manifest_json.encode('ascii'))
req = Request.blank(
'/v1/AUTH_test/gettest/man1',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
# we don't know at header-sending time that things are going to go
# wrong, so we end up with a 200 and a truncated body
self.assertEqual(status, '200 OK')
self.assertEqual(headers['Content-Length'], str(9 * 19 + 6))
self.assertEqual(body, (
b'body01---body02---body03---body04---body05---'
b'body06---body07---body08---body09---body10---'))
# but the error shows up in logs
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [
"While processing manifest '/v1/AUTH_test/gettest/man1', "
"max recursion depth was exceeded"
])
# make sure we didn't keep asking for segments
self.assertEqual(self.app.call_count, 20)
def test_sub_slo_recursion(self):
# man1 points to man2 and obj1, man2 points to man3 and obj2...
for i in range(11):
self.app.register('GET', '/v1/AUTH_test/gettest/obj%d' % i,
swob.HTTPOk, {'Content-Type': 'text/plain',
'Content-Length': '6',
'Etag': md5hex('body%02d' % i)},
b'body%02d' % i)
manifest_json = json.dumps([{'name': '/gettest/obj%d' % i,
'hash': md5hex('body%2d' % i),
'content_type': 'text/plain',
'bytes': '6'}])
self.app.register(
'GET', '/v1/AUTH_test/gettest/man%d' % i,
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'Etag': 'man%d' % i},
manifest_json.encode('ascii'))
self.app.register(
'HEAD', '/v1/AUTH_test/gettest/obj%d' % i,
swob.HTTPOk, {'Content-Length': '6',
'Etag': md5hex('body%2d' % i)},
None)
for i in range(9, 0, -1):
manifest_data = [
{'name': '/gettest/man%d' % (i + 1),
'hash': 'man%d' % (i + 1),
'sub_slo': True,
'bytes': (10 - i) * 6,
'content_type': 'application/json'},
{'name': '/gettest/obj%d' % i,
'hash': md5hex('body%02d' % i),
'bytes': '6',
'content_type': 'text/plain'}]
manifest_json = json.dumps(manifest_data)
self.app.register(
'GET', '/v1/AUTH_test/gettest/man%d' % i,
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'Etag': 'man%d' % i},
manifest_json.encode('ascii'))
req = Request.blank(
'/v1/AUTH_test/gettest/man1',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
self.assertEqual(body, (b'body10body09body08body07body06'
b'body05body04body03body02body01'))
self.assertEqual(self.app.call_count, 20)
def test_sub_slo_recursion_limit(self):
# man1 points to man2 and obj1, man2 points to man3 and obj2...
for i in range(12):
self.app.register('GET', '/v1/AUTH_test/gettest/obj%d' % i,
swob.HTTPOk,
{'Content-Type': 'text/plain',
'Content-Length': '6',
'Etag': md5hex('body%02d' % i)},
b'body%02d' % i)
manifest_json = json.dumps([{'name': '/gettest/obj%d' % i,
'hash': md5hex('body%2d' % i),
'content_type': 'text/plain',
'bytes': '6'}])
self.app.register(
'GET', '/v1/AUTH_test/gettest/man%d' % i,
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'Etag': 'man%d' % i},
manifest_json.encode('ascii'))
self.app.register(
'HEAD', '/v1/AUTH_test/gettest/obj%d' % i,
swob.HTTPOk, {'Content-Length': '6',
'Etag': md5hex('body%2d' % i)},
None)
for i in range(11, 0, -1):
manifest_data = [
{'name': '/gettest/man%d' % (i + 1),
'hash': 'man%d' % (i + 1),
'sub_slo': True,
'bytes': (12 - i) * 6,
'content_type': 'application/json'},
{'name': '/gettest/obj%d' % i,
'hash': md5hex('body%02d' % i),
'bytes': '6',
'content_type': 'text/plain'}]
manifest_json = json.dumps(manifest_data)
self.app.register('GET', '/v1/AUTH_test/gettest/man%d' % i,
swob.HTTPOk,
{'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'Etag': 'man%d' % i},
manifest_json.encode('ascii'))
req = Request.blank(
'/v1/AUTH_test/gettest/man1',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '409 Conflict')
self.assertEqual(self.app.call_count, 10)
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [
"While processing manifest '/v1/AUTH_test/gettest/man1', "
"max recursion depth was exceeded"
])
def test_get_with_if_modified_since(self):
# It's important not to pass the If-[Un]Modified-Since header to the
# proxy for segment or submanifest GET requests, as it may result in
# 304 Not Modified responses, and those don't contain any useful data.
req = swob.Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-Modified-Since': 'Wed, 12 Feb 2014 22:24:52 GMT',
'If-Unmodified-Since': 'Thu, 13 Feb 2014 23:25:53 GMT'})
status, headers, body = self.call_slo(req)
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [])
for _, _, hdrs in self.app.calls_with_headers[1:]:
self.assertFalse('If-Modified-Since' in hdrs)
self.assertFalse('If-Unmodified-Since' in hdrs)
def test_error_fetching_segment(self):
self.app.register('GET', '/v1/AUTH_test/gettest/c_15',
swob.HTTPUnauthorized, {}, None)
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '200 OK')
self.assertEqual(b"aaaaabbbbbbbbbb", body)
self.assertEqual(self.app.unread_requests, {})
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [
'While processing manifest /v1/AUTH_test/gettest/manifest-abcd, '
'got 401 (<html><h1>Unauthorized</h1><p>This server could not '
'verif...) while retrieving /v1/AUTH_test/gettest/c_15'
])
self.assertEqual(self.app.calls, [
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
# This one has the error, and so is the last one we fetch.
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get')])
def test_error_fetching_submanifest(self):
self.app.register('GET', '/v1/AUTH_test/gettest/manifest-bc',
swob.HTTPUnauthorized, {}, None)
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual("200 OK", status)
self.assertEqual(b"aaaaa", body)
self.assertEqual(self.app.unread_requests, {})
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [
'while fetching /v1/AUTH_test/gettest/manifest-abcd, GET of '
'submanifest /v1/AUTH_test/gettest/manifest-bc failed with '
'status 401 (<html><h1>Unauthorized</h1><p>This server could '
'not verif...)'
])
self.assertEqual(self.app.calls, [
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
# This one has the error, and so is the last one we fetch.
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
# But we were looking ahead to see if we could combine ranges,
# so we still get the first segment out
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get')])
def test_error_fetching_first_segment_submanifest(self):
# This differs from the normal submanifest error because this one
# happens before we've actually sent any response body.
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-a',
swob.HTTPForbidden, {}, None)
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-manifest-a',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/gettest/manifest-a', 'sub_slo': True,
'content_type': 'application/json',
'hash': 'manifest-a',
'bytes': '12345'}]))
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-manifest-a',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual('409 Conflict', status)
self.assertEqual(self.app.unread_requests, {})
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [
'while fetching /v1/AUTH_test/gettest/manifest-manifest-a, GET '
'of submanifest /v1/AUTH_test/gettest/manifest-a failed with '
'status 403 (<html><h1>Forbidden</h1><p>Access was denied to '
'this reso...)'
])
def test_invalid_json_submanifest(self):
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-bc',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'X-Object-Meta-Plant': 'Ficus'},
"[this {isn't (JSON")
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual('200 OK', status)
self.assertEqual(body, b'aaaaa')
if six.PY2:
error = "No JSON object could be decoded"
else:
error = "Expecting value: line 1 column 2 (char 1)"
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [
'while fetching /v1/AUTH_test/gettest/manifest-abcd, '
'JSON-decoding of submanifest /v1/AUTH_test/gettest/manifest-bc '
'failed with %s' % error
])
def test_mismatched_etag(self):
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-a-b-badetag-c',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/gettest/a_5', 'hash': md5hex('a' * 5),
'content_type': 'text/plain', 'bytes': '5'},
{'name': '/gettest/b_10', 'hash': 'wrong!',
'content_type': 'text/plain', 'bytes': '10'},
{'name': '/gettest/c_15', 'hash': md5hex('c' * 15),
'content_type': 'text/plain', 'bytes': '15'}]))
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-a-b-badetag-c',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual('200 OK', status)
self.assertEqual(body, b'aaaaa')
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [
'Object segment no longer valid: /v1/AUTH_test/gettest/b_10 '
'etag: 82136b4240d6ce4ea7d03e51469a393b != wrong! or 10 != 10.'
])
def test_mismatched_size(self):
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-a-b-badsize-c',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/gettest/a_5', 'hash': md5hex('a' * 5),
'content_type': 'text/plain', 'bytes': '5'},
{'name': '/gettest/b_10', 'hash': md5hex('b' * 10),
'content_type': 'text/plain', 'bytes': '999999'},
{'name': '/gettest/c_15', 'hash': md5hex('c' * 15),
'content_type': 'text/plain', 'bytes': '15'}]))
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-a-b-badsize-c',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual('200 OK', status)
self.assertEqual(body, b'aaaaa')
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [
'Object segment no longer valid: /v1/AUTH_test/gettest/b_10 '
'etag: 82136b4240d6ce4ea7d03e51469a393b != '
'82136b4240d6ce4ea7d03e51469a393b or 10 != 999999.'
])
def test_mismatched_checksum(self):
self.app.register(
'GET', '/v1/AUTH_test/gettest/a_5',
swob.HTTPOk, {'Content-Length': '5',
'Etag': md5hex('a' * 5)},
# this segment has invalid content
'x' * 5)
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/gettest/b_10', 'hash': md5hex('b' * 10),
'content_type': 'text/plain', 'bytes': '10'},
{'name': '/gettest/a_5', 'hash': md5hex('a' * 5),
'content_type': 'text/plain', 'bytes': '5'},
{'name': '/gettest/c_15', 'hash': md5hex('c' * 15),
'content_type': 'text/plain', 'bytes': '15'}]))
req = Request.blank('/v1/AUTH_test/gettest/manifest')
status, headers, body = self.call_slo(req)
self.assertEqual('200 OK', status)
self.assertEqual(body, (b'b' * 10 + b'x' * 5))
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [
'Bad MD5 checksum for /v1/AUTH_test/gettest/a_5 as part of '
'/v1/AUTH_test/gettest/manifest: headers had '
'594f803b380a41396ed63dca39503542, but object MD5 was '
'actually fb0e22c79ac75679e9881e6ba183b354',
])
def test_mismatched_length(self):
self.app.register(
'GET', '/v1/AUTH_test/gettest/a_5',
swob.HTTPOk, {'Content-Length': '5',
'Etag': md5hex('a' * 5)},
# this segment comes up short
[b'a' * 4])
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/gettest/b_10', 'hash': md5hex('b' * 10),
'content_type': 'text/plain', 'bytes': '10'},
{'name': '/gettest/a_5', 'hash': md5hex('a' * 5),
'content_type': 'text/plain', 'bytes': '5'},
{'name': '/gettest/c_15', 'hash': md5hex('c' * 15),
'content_type': 'text/plain', 'bytes': '15'}]))
req = Request.blank('/v1/AUTH_test/gettest/manifest')
status, headers, body = self.call_slo(req)
self.assertEqual('200 OK', status)
self.assertEqual(body, (b'b' * 10 + b'a' * 4))
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [
'Bad response length for /v1/AUTH_test/gettest/a_5 as part of '
'/v1/AUTH_test/gettest/manifest: headers had 5, but '
'response length was actually 4',
])
def test_first_segment_mismatched_etag(self):
self.app.register('GET', '/v1/AUTH_test/gettest/manifest-badetag',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/gettest/a_5',
'hash': 'wrong!',
'content_type': 'text/plain',
'bytes': '5'}]))
req = Request.blank('/v1/AUTH_test/gettest/manifest-badetag',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual('409 Conflict', status)
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [
'Object segment no longer valid: /v1/AUTH_test/gettest/a_5 '
'etag: 594f803b380a41396ed63dca39503542 != wrong! or 5 != 5.'
])
def test_first_segment_mismatched_size(self):
self.app.register('GET', '/v1/AUTH_test/gettest/manifest-badsize',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/gettest/a_5',
'hash': md5hex('a' * 5),
'content_type': 'text/plain',
'bytes': '999999'}]))
req = Request.blank('/v1/AUTH_test/gettest/manifest-badsize',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual('409 Conflict', status)
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [
'Object segment no longer valid: /v1/AUTH_test/gettest/a_5 '
'etag: 594f803b380a41396ed63dca39503542 != '
'594f803b380a41396ed63dca39503542 or 5 != 999999.'
])
@patch('swift.common.request_helpers.time')
def test_download_takes_too_long(self, mock_time):
mock_time.time.side_effect = [
0, # start time
10 * 3600, # a_5
20 * 3600, # b_10
30 * 3600, # c_15, but then we time out
]
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [
'While processing manifest /v1/AUTH_test/gettest/manifest-abcd, '
'max LO GET time of 86400s exceeded'
])
self.assertEqual(self.app.calls, [
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get')])
def test_first_segment_not_exists(self):
self.app.register('GET', '/v1/AUTH_test/gettest/not_exists_obj',
swob.HTTPNotFound, {}, None)
self.app.register('GET', '/v1/AUTH_test/gettest/manifest-not-exists',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/gettest/not_exists_obj',
'hash': md5hex('not_exists_obj'),
'content_type': 'text/plain',
'bytes': '%d' % len('not_exists_obj')
}]))
req = Request.blank('/v1/AUTH_test/gettest/manifest-not-exists',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual('409 Conflict', status)
self.assertEqual(self.app.unread_requests, {})
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [
'While processing manifest /v1/AUTH_test/gettest/'
'manifest-not-exists, got 404 (<html><h1>Not Found</h1><p>The '
'resource could not be foun...) while retrieving /v1/AUTH_test/'
'gettest/not_exists_obj'
])
def test_first_segment_not_available(self):
self.app.register('GET', '/v1/AUTH_test/gettest/not_avail_obj',
swob.HTTPServiceUnavailable, {}, None)
self.app.register('GET', '/v1/AUTH_test/gettest/manifest-not-avail',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/gettest/not_avail_obj',
'hash': md5hex('not_avail_obj'),
'content_type': 'text/plain',
'bytes': '%d' % len('not_avail_obj')
}]))
req = Request.blank('/v1/AUTH_test/gettest/manifest-not-avail',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual('503 Service Unavailable', status)
self.assertEqual(self.app.unread_requests, {})
self.assertEqual(self.slo.logger.get_lines_for_level('error'), [
'While processing manifest /v1/AUTH_test/gettest/'
'manifest-not-avail, got 503 (<html><h1>Service Unavailable</h1>'
'<p>The server is curren...) while retrieving /v1/AUTH_test/'
'gettest/not_avail_obj'
])
self.assertIn(b'Service Unavailable', body)
def test_leading_data_segment(self):
slo_etag = md5hex(
md5hex('preamble') +
md5hex('a' * 5)
)
preamble = base64.b64encode(b'preamble')
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-single-preamble',
swob.HTTPOk,
{
'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'
},
json.dumps([{
'data': preamble.decode('ascii')
}, {
'name': '/gettest/a_5',
'hash': md5hex('a' * 5),
'content_type': 'text/plain',
'bytes': '5',
}])
)
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-single-preamble',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual('200 OK', status)
self.assertEqual(body, b'preambleaaaaa')
self.assertIn(('Etag', '"%s"' % slo_etag), headers)
self.assertIn(('Content-Length', '13'), headers)
def test_trailing_data_segment(self):
slo_etag = md5hex(
md5hex('a' * 5) +
md5hex('postamble')
)
postamble = base64.b64encode(b'postamble')
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-single-postamble',
swob.HTTPOk,
{
'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'
},
json.dumps([{
'name': '/gettest/a_5',
'hash': md5hex('a' * 5),
'content_type': 'text/plain',
'bytes': '5',
}, {
'data': postamble.decode('ascii')
}]).encode('ascii')
)
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-single-postamble',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual('200 OK', status)
self.assertEqual(body, b'aaaaapostamble')
self.assertIn(('Etag', '"%s"' % slo_etag), headers)
self.assertIn(('Content-Length', '14'), headers)
def test_data_segment_sandwich(self):
slo_etag = md5hex(
md5hex('preamble') +
md5hex('a' * 5) +
md5hex('postamble')
)
preamble = base64.b64encode(b'preamble')
postamble = base64.b64encode(b'postamble')
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-single-prepostamble',
swob.HTTPOk,
{
'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'
},
json.dumps([{
'data': preamble.decode('ascii'),
}, {
'name': '/gettest/a_5',
'hash': md5hex('a' * 5),
'content_type': 'text/plain',
'bytes': '5',
}, {
'data': postamble.decode('ascii')
}])
)
# Test the whole SLO
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-single-prepostamble',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual('200 OK', status)
self.assertEqual(body, b'preambleaaaaapostamble')
self.assertIn(('Etag', '"%s"' % slo_etag), headers)
self.assertIn(('Content-Length', '22'), headers)
# Test complete preamble only
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-single-prepostamble',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=0-7'})
status, headers, body = self.call_slo(req)
self.assertEqual('206 Partial Content', status)
self.assertEqual(body, b'preamble')
# Test range within preamble only
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-single-prepostamble',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=1-5'})
status, headers, body = self.call_slo(req)
self.assertEqual('206 Partial Content', status)
self.assertEqual(body, b'reamb')
# Test complete postamble only
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-single-prepostamble',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=13-21'})
status, headers, body = self.call_slo(req)
self.assertEqual('206 Partial Content', status)
self.assertEqual(body, b'postamble')
# Test partial pre and postamble
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-single-prepostamble',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=4-16'})
status, headers, body = self.call_slo(req)
self.assertEqual('206 Partial Content', status)
self.assertEqual(body, b'mbleaaaaapost')
# Test partial preamble and first byte of data
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-single-prepostamble',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=1-8'})
status, headers, body = self.call_slo(req)
self.assertEqual('206 Partial Content', status)
self.assertEqual(body, b'reamblea')
# Test last byte of segment data and partial postamble
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-single-prepostamble',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=12-16'})
status, headers, body = self.call_slo(req)
self.assertEqual('206 Partial Content', status)
self.assertEqual(body, b'apost')
def test_bunches_of_data_segments(self):
slo_etag = md5hex(
md5hex('ABCDEF') +
md5hex('a' * 5) +
md5hex('123456') +
md5hex('GHIJKL') +
md5hex('b' * 10) +
md5hex('7890@#')
)
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-multi-prepostamble',
swob.HTTPOk,
{
'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'
},
json.dumps([
{
'data': base64.b64encode(b'ABCDEF').decode('ascii')
},
{
'name': '/gettest/a_5',
'hash': md5hex('a' * 5),
'content_type': 'text/plain',
'bytes': '5',
},
{
'data': base64.b64encode(b'123456').decode('ascii')
},
{
'data': base64.b64encode(b'GHIJKL').decode('ascii')
},
{
'name': '/gettest/b_10',
'hash': md5hex('b' * 10),
'content_type': 'text/plain',
'bytes': '10',
},
{
'data': base64.b64encode(b'7890@#').decode('ascii')
}
])
)
# Test the whole SLO
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-multi-prepostamble',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual('200 OK', status)
self.assertEqual(body, b'ABCDEFaaaaa123456GHIJKLbbbbbbbbbb7890@#')
self.assertIn(('Etag', '"%s"' % slo_etag), headers)
self.assertIn(('Content-Length', '39'), headers)
# Test last byte first pre-amble to first byte of second postamble
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-multi-prepostamble',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=5-33'})
status, headers, body = self.call_slo(req)
self.assertEqual('206 Partial Content', status)
self.assertEqual(body, b'Faaaaa123456GHIJKLbbbbbbbbbb7')
# Test only second complete preamble
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-multi-prepostamble',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=17-22'})
status, headers, body = self.call_slo(req)
self.assertEqual('206 Partial Content', status)
self.assertEqual(body, b'GHIJKL')
# Test only first complete postamble
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-multi-prepostamble',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=11-16'})
status, headers, body = self.call_slo(req)
self.assertEqual('206 Partial Content', status)
self.assertEqual(body, b'123456')
# Test only range within first postamble
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-multi-prepostamble',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=12-15'})
status, headers, body = self.call_slo(req)
self.assertEqual('206 Partial Content', status)
self.assertEqual(body, b'2345')
# Test only range within first postamble and second preamble
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-multi-prepostamble',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=12-18'})
status, headers, body = self.call_slo(req)
self.assertEqual('206 Partial Content', status)
self.assertEqual(body, b'23456GH')
class TestSloConditionalGetOldManifest(SloTestCase):
slo_data = [
{'name': '/gettest/a_5', 'hash': md5hex("a" * 5),
'content_type': 'text/plain', 'bytes': '5'},
{'name': '/gettest/manifest-bc', 'sub_slo': True,
'content_type': 'application/json',
'hash': md5hex(md5hex("b" * 10) + md5hex("c" * 15)),
'bytes': 25},
{'name': '/gettest/d_20', 'hash': md5hex("d" * 20),
'content_type': 'text/plain', 'bytes': '20'}]
slo_etag = md5hex(''.join(seg['hash'] for seg in slo_data))
def setUp(self):
super(TestSloConditionalGetOldManifest, self).setUp()
# some plain old objects
self.app.register(
'GET', '/v1/AUTH_test/gettest/a_5',
swob.HTTPOk, {'Content-Length': '5',
'Etag': md5hex('a' * 5)},
b'a' * 5)
self.app.register(
'GET', '/v1/AUTH_test/gettest/b_10',
swob.HTTPOk, {'Content-Length': '10',
'Etag': md5hex('b' * 10)},
b'b' * 10)
self.app.register(
'GET', '/v1/AUTH_test/gettest/c_15',
swob.HTTPOk, {'Content-Length': '15',
'Etag': md5hex('c' * 15)},
b'c' * 15)
self.app.register(
'GET', '/v1/AUTH_test/gettest/d_20',
swob.HTTPOk, {'Content-Length': '20',
'Etag': md5hex('d' * 20)},
b'd' * 20)
_bc_manifest_json = json.dumps(
[{'name': '/gettest/b_10', 'hash': md5hex('b' * 10), 'bytes': '10',
'content_type': 'text/plain'},
{'name': '/gettest/c_15', 'hash': md5hex('c' * 15), 'bytes': '15',
'content_type': 'text/plain'}])
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-bc',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'X-Object-Meta-Plant': 'Ficus',
'Etag': md5hex(_bc_manifest_json)},
_bc_manifest_json)
_abcd_manifest_json = json.dumps(self.slo_data)
self.abcd_manifest_json_etag = md5hex(_abcd_manifest_json)
manifest_headers = {
'Content-Length': str(len(_abcd_manifest_json)),
'Content-Type': 'application/json',
'X-Static-Large-Object': 'true',
'Etag': self.abcd_manifest_json_etag,
'X-Object-Sysmeta-Custom-Etag': 'a custom etag'}
manifest_headers.update(getattr(self, 'extra_manifest_headers', {}))
self.manifest_has_sysmeta = all(h in manifest_headers for h in (
'X-Object-Sysmeta-Slo-Etag', 'X-Object-Sysmeta-Slo-Size'))
self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-abcd',
swob.HTTPOk, manifest_headers,
_abcd_manifest_json.encode('ascii'))
def test_if_none_match_matches(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-None-Match': self.slo_etag})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '304 Not Modified')
self.assertIn(('Content-Length', '0'), headers)
self.assertIn(('Etag', '"%s"' % self.slo_etag), headers)
self.assertEqual(body, b'')
expected_app_calls = [('GET', '/v1/AUTH_test/gettest/manifest-abcd')]
if not self.manifest_has_sysmeta:
# We *still* verify the first segment
expected_app_calls.extend([
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
])
self.assertEqual(self.app.calls, expected_app_calls)
self.assertEqual(self.app.headers[0].get('X-Backend-Etag-Is-At'),
'x-object-sysmeta-slo-etag')
def test_if_none_match_does_not_match(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-None-Match': "not-%s" % self.slo_etag})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
self.assertIn(('Content-Length', '50'), headers)
self.assertIn(('Etag', '"%s"' % self.slo_etag), headers)
self.assertEqual(
body, b'aaaaabbbbbbbbbbcccccccccccccccdddddddddddddddddddd')
expected_app_calls = [
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/d_20?multipart-manifest=get'),
]
self.assertEqual(self.app.calls, expected_app_calls)
self.assertEqual(self.app.headers[0].get('X-Backend-Etag-Is-At'),
'x-object-sysmeta-slo-etag')
def test_if_match_matches(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-Match': self.slo_etag})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
self.assertIn(('Content-Length', '50'), headers)
self.assertIn(('Etag', '"%s"' % self.slo_etag), headers)
self.assertEqual(
body, b'aaaaabbbbbbbbbbcccccccccccccccdddddddddddddddddddd')
expected_app_calls = [('GET', '/v1/AUTH_test/gettest/manifest-abcd')]
if not self.manifest_has_sysmeta:
# Manifest never matches -> got back a 412; need to re-fetch
expected_app_calls.append(
('GET', '/v1/AUTH_test/gettest/manifest-abcd'))
expected_app_calls.extend([
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/d_20?multipart-manifest=get'),
])
self.assertEqual(self.app.calls, expected_app_calls)
self.assertEqual(self.app.headers[0].get('X-Backend-Etag-Is-At'),
'x-object-sysmeta-slo-etag')
def test_if_match_does_not_match(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-Match': "not-%s" % self.slo_etag})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '412 Precondition Failed')
self.assertIn(('Content-Length', '0'), headers)
self.assertIn(('Etag', '"%s"' % self.slo_etag), headers)
self.assertEqual(body, b'')
expected_app_calls = [('GET', '/v1/AUTH_test/gettest/manifest-abcd')]
if not self.manifest_has_sysmeta:
# We *still* verify the first segment
expected_app_calls.extend([
# Manifest never matches -> got back a 412; need to re-fetch
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
])
self.assertEqual(self.app.calls, expected_app_calls)
self.assertEqual(self.app.headers[0].get('X-Backend-Etag-Is-At'),
'x-object-sysmeta-slo-etag')
def test_if_none_match_matches_with_override(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-None-Match': '"a custom etag"',
'X-Backend-Etag-Is-At': 'X-Object-Sysmeta-Custom-Etag'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '304 Not Modified')
self.assertIn(('Content-Length', '0'), headers)
self.assertIn(('Etag', '"%s"' % self.slo_etag), headers)
self.assertIn(('X-Object-Sysmeta-Custom-Etag', 'a custom etag'),
headers)
self.assertEqual(body, b'')
expected_app_calls = [('GET', '/v1/AUTH_test/gettest/manifest-abcd')]
if not self.manifest_has_sysmeta:
# NB: no known middleware would have written a custom etag with
# old-style manifests. but if there *was*, here's what'd happen
expected_app_calls.extend([
# 304, so gotta refetch
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
# Since the "authoritative" etag didn't come from slo, we still
# verify the first segment
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
])
self.assertEqual(self.app.calls, expected_app_calls)
self.assertEqual(
self.app.headers[0].get('X-Backend-Etag-Is-At'),
'X-Object-Sysmeta-Custom-Etag,x-object-sysmeta-slo-etag')
def test_if_none_match_does_not_match_with_override(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-None-Match': "%s" % self.slo_etag,
'X-Backend-Etag-Is-At': 'X-Object-Sysmeta-Custom-Etag'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
self.assertIn(('Content-Length', '50'), headers)
self.assertIn(('Etag', '"%s"' % self.slo_etag), headers)
self.assertIn(('X-Object-Sysmeta-Custom-Etag', 'a custom etag'),
headers)
self.assertEqual(
body, b'aaaaabbbbbbbbbbcccccccccccccccdddddddddddddddddddd')
expected_app_calls = [
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/d_20?multipart-manifest=get'),
]
self.assertEqual(self.app.calls, expected_app_calls)
self.assertEqual(
self.app.headers[0].get('X-Backend-Etag-Is-At'),
'X-Object-Sysmeta-Custom-Etag,x-object-sysmeta-slo-etag')
def test_if_match_matches_with_override(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-Match': '"a custom etag"',
'X-Backend-Etag-Is-At': 'X-Object-Sysmeta-Custom-Etag'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
self.assertIn(('Content-Length', '50'), headers)
self.assertIn(('Etag', '"%s"' % self.slo_etag), headers)
self.assertIn(('X-Object-Sysmeta-Custom-Etag', 'a custom etag'),
headers)
self.assertEqual(
body, b'aaaaabbbbbbbbbbcccccccccccccccdddddddddddddddddddd')
expected_app_calls = [
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
# Match on the override from left of us; no need to refetch
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/c_15?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/d_20?multipart-manifest=get'),
]
self.assertEqual(self.app.calls, expected_app_calls)
self.assertEqual(
self.app.headers[0].get('X-Backend-Etag-Is-At'),
'X-Object-Sysmeta-Custom-Etag,x-object-sysmeta-slo-etag')
def test_if_match_does_not_match_with_override(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-Match': "%s" % self.slo_etag,
'X-Backend-Etag-Is-At': 'X-Object-Sysmeta-Custom-Etag'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '412 Precondition Failed')
self.assertIn(('Content-Length', '0'), headers)
self.assertIn(('Etag', '"%s"' % self.slo_etag), headers)
self.assertIn(('X-Object-Sysmeta-Custom-Etag', 'a custom etag'),
headers)
self.assertEqual(body, b'')
expected_app_calls = [('GET', '/v1/AUTH_test/gettest/manifest-abcd')]
if not self.manifest_has_sysmeta:
# NB: no known middleware would have written a custom etag with
# old-style manifests. but if there *was*, here's what'd happen
expected_app_calls.extend([
# Manifest never matches -> got back a 412; need to re-fetch
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
# We *still* verify the first segment, even though we'll 412
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
])
self.assertEqual(self.app.calls, expected_app_calls)
self.assertEqual(
self.app.headers[0].get('X-Backend-Etag-Is-At'),
'X-Object-Sysmeta-Custom-Etag,x-object-sysmeta-slo-etag')
def test_if_match_matches_and_range(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-Match': self.slo_etag,
'Range': 'bytes=3-6'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '206 Partial Content')
self.assertIn(('Content-Length', '4'), headers)
self.assertIn(('Etag', '"%s"' % self.manifest_abcd_etag), headers)
self.assertEqual(body, b'aabb')
expected_app_calls = [
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
# Needed to re-fetch because Range (and, for old manifests, 412)
('GET', '/v1/AUTH_test/gettest/manifest-abcd'),
('GET', '/v1/AUTH_test/gettest/manifest-bc'),
('GET', '/v1/AUTH_test/gettest/a_5?multipart-manifest=get'),
('GET', '/v1/AUTH_test/gettest/b_10?multipart-manifest=get'),
]
self.assertEqual(self.app.calls, expected_app_calls)
self.assertEqual(self.app.headers[0].get('X-Backend-Etag-Is-At'),
'x-object-sysmeta-slo-etag')
def test_if_match_matches_passthrough(self):
# first fetch and stash the manifest etag
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd?multipart-manifest=get',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
headers = HeaderKeyDict(headers)
self.assertEqual('application/json; charset=utf-8',
headers['Content-Type'])
manifest_etag = headers['Etag']
# now use it as a condition and expect to match
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd?multipart-manifest=get',
environ={'REQUEST_METHOD': 'GET'},
headers={'If-Match': manifest_etag})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK')
headers = HeaderKeyDict(headers)
self.assertEqual(manifest_etag, headers['Etag'])
expected_app_calls = [
('GET',
'/v1/AUTH_test/gettest/manifest-abcd?multipart-manifest=get')] * 2
self.assertEqual(self.app.calls, expected_app_calls)
self.assertNotIn('X-Backend-Etag-Is-At', self.app.headers[0])
self.assertNotIn('X-Backend-Etag-Is-At', self.app.headers[1])
def test_range_resume_download(self):
req = Request.blank(
'/v1/AUTH_test/gettest/manifest-abcd',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=20-'})
status, headers, body = self.call_slo(req)
self.assertEqual(status, '206 Partial Content')
self.assertEqual(body, b'ccccccccccdddddddddddddddddddd')
class TestSloConditionalGetNewManifest(TestSloConditionalGetOldManifest):
def setUp(self):
self.extra_manifest_headers = {
'X-Object-Sysmeta-Slo-Etag': self.slo_etag,
'X-Object-Sysmeta-Slo-Size': '50',
}
super(TestSloConditionalGetNewManifest, self).setUp()
class TestSloBulkDeleter(unittest.TestCase):
def test_reused_logger(self):
slo_mware = slo.filter_factory({})('fake app')
self.assertTrue(slo_mware.logger is slo_mware.bulk_deleter.logger)
def test_passes_through_concurrency(self):
slo_mware = slo.filter_factory({'delete_concurrency': 5})('fake app')
self.assertEqual(5, slo_mware.bulk_deleter.delete_concurrency)
def test_uses_big_max_deletes(self):
slo_mware = slo.filter_factory(
{'max_manifest_segments': 123456789})('fake app')
self.assertGreaterEqual(
slo_mware.bulk_deleter.max_deletes_per_request,
123456789)
class TestSwiftInfo(unittest.TestCase):
def setUp(self):
registry._swift_info = {}
registry._swift_admin_info = {}
def test_registered_defaults(self):
mware = slo.filter_factory({})('have to pass in an app')
swift_info = registry.get_swift_info()
self.assertTrue('slo' in swift_info)
self.assertEqual(swift_info['slo'].get('max_manifest_segments'),
mware.max_manifest_segments)
self.assertEqual(swift_info['slo'].get('min_segment_size'), 1)
self.assertEqual(swift_info['slo'].get('max_manifest_size'),
mware.max_manifest_size)
self.assertIs(swift_info['slo'].get('allow_async_delete'), True)
self.assertEqual(1000, mware.max_manifest_segments)
self.assertEqual(8388608, mware.max_manifest_size)
self.assertEqual(1048576, mware.rate_limit_under_size)
self.assertEqual(10, mware.rate_limit_after_segment)
self.assertEqual(1, mware.rate_limit_segments_per_sec)
self.assertEqual(10, mware.yield_frequency)
self.assertEqual(2, mware.concurrency)
self.assertEqual(2, mware.bulk_deleter.delete_concurrency)
self.assertIs(True, mware.allow_async_delete)
def test_registered_non_defaults(self):
conf = dict(
max_manifest_segments=500, max_manifest_size=1048576,
rate_limit_under_size=2097152, rate_limit_after_segment=20,
rate_limit_segments_per_sec=2, yield_frequency=5, concurrency=1,
delete_concurrency=3, allow_async_delete='n')
mware = slo.filter_factory(conf)('have to pass in an app')
swift_info = registry.get_swift_info()
self.assertTrue('slo' in swift_info)
self.assertEqual(swift_info['slo'].get('max_manifest_segments'), 500)
self.assertEqual(swift_info['slo'].get('min_segment_size'), 1)
self.assertEqual(swift_info['slo'].get('max_manifest_size'), 1048576)
self.assertIs(swift_info['slo'].get('allow_async_delete'), False)
self.assertEqual(500, mware.max_manifest_segments)
self.assertEqual(1048576, mware.max_manifest_size)
self.assertEqual(2097152, mware.rate_limit_under_size)
self.assertEqual(20, mware.rate_limit_after_segment)
self.assertEqual(2, mware.rate_limit_segments_per_sec)
self.assertEqual(5, mware.yield_frequency)
self.assertEqual(1, mware.concurrency)
self.assertEqual(3, mware.bulk_deleter.delete_concurrency)
self.assertIs(False, mware.allow_async_delete)
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/test_slo.py |
# Copyright (c) 2010-2012 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import json
import shutil
import tempfile
import unittest
from io import BytesIO
from swift import gettext_ as _
from swift.common.swob import Request, Response
try:
from swift.common.middleware import xprofile
from swift.common.middleware.xprofile import ProfileMiddleware
from swift.common.middleware.x_profile.exceptions import (
MethodNotAllowed, NotFoundException, ODFLIBNotInstalled,
PLOTLIBNotInstalled)
from swift.common.middleware.x_profile.html_viewer import (
HTMLViewer, PLOTLIB_INSTALLED)
from swift.common.middleware.x_profile.profile_model import (
ODFLIB_INSTALLED, ProfileLog, Stats2)
except ImportError:
xprofile = None
class FakeApp(object):
def __call__(self, env, start_response):
req = Request(env)
return Response(request=req, body='FAKE APP')(
env, start_response)
class TestXProfile(unittest.TestCase):
@unittest.skipIf(xprofile is None, "can't import xprofile")
def test_get_profiler(self):
self.assertTrue(xprofile.get_profiler('cProfile') is not None)
self.assertTrue(xprofile.get_profiler('eventlet.green.profile')
is not None)
class TestProfilers(unittest.TestCase):
@unittest.skipIf(xprofile is None, "can't import xprofile")
def setUp(self):
self.profilers = [xprofile.get_profiler('cProfile'),
xprofile.get_profiler('eventlet.green.profile')]
def fake_func(self, *args, **kw):
return len(args) + len(kw)
def test_runcall(self):
for p in self.profilers:
v = p.runcall(self.fake_func, 'one', 'two', {'key1': 'value1'})
self.assertEqual(v, 3)
def test_runctx(self):
for p in self.profilers:
p.runctx('import os;os.getcwd();', globals(), locals())
p.snapshot_stats()
self.assertTrue(p.stats is not None)
self.assertGreater(len(p.stats.keys()), 0)
class TestProfileMiddleware(unittest.TestCase):
@unittest.skipIf(xprofile is None, "can't import xprofile")
def setUp(self):
self.got_statuses = []
self.app = ProfileMiddleware(FakeApp, {})
self.tempdir = os.path.dirname(self.app.log_filename_prefix)
self.pids = ['123', '456', str(os.getpid())]
profiler = xprofile.get_profiler('eventlet.green.profile')
for pid in self.pids:
path = self.app.log_filename_prefix + pid
profiler.runctx('import os;os.getcwd();', globals(), locals())
profiler.dump_stats(path)
profiler.runctx('import os;os.getcwd();', globals(), locals())
profiler.dump_stats(path + '.tmp')
def tearDown(self):
shutil.rmtree(self.tempdir, ignore_errors=True)
def get_app(self, app, global_conf, **local_conf):
factory = xprofile.filter_factory(global_conf, **local_conf)
return factory(app)
def start_response(self, status, headers):
self.got_statuses = [status]
self.headers = headers
def test_combine_body_qs(self):
body = (b"profile=all&sort=time&limit=-1&fulldirs=1"
b"&nfl_filter=__call__&query=query&metric=nc&format=default")
wsgi_input = BytesIO(body)
environ = {'REQUEST_METHOD': 'GET',
'QUERY_STRING': 'profile=all&format=json',
'wsgi.input': wsgi_input}
req = Request.blank('/__profile__/', environ=environ)
query_dict = self.app._combine_body_qs(req)
self.assertEqual(query_dict['profile'], ['all'])
self.assertEqual(query_dict['sort'], ['time'])
self.assertEqual(query_dict['limit'], ['-1'])
self.assertEqual(query_dict['fulldirs'], ['1'])
self.assertEqual(query_dict['nfl_filter'], ['__call__'])
self.assertEqual(query_dict['query'], ['query'])
self.assertEqual(query_dict['metric'], ['nc'])
self.assertEqual(query_dict['format'], ['default'])
def test_call(self):
body = b"sort=time&limit=-1&fulldirs=1&nfl_filter=&metric=nc"
wsgi_input = BytesIO(body + b'&query=query')
environ = {'HTTP_HOST': 'localhost:8080',
'PATH_INFO': '/__profile__',
'REQUEST_METHOD': 'GET',
'QUERY_STRING': 'profile=all&format=json',
'wsgi.input': wsgi_input}
resp = self.app(environ, self.start_response)
self.assertTrue(resp[0].find(b'<html>') > 0, resp)
self.assertEqual(self.got_statuses, ['200 OK'])
self.assertEqual(self.headers, [('content-type', 'text/html')])
wsgi_input = BytesIO(body + b'&plot=plot')
environ['wsgi.input'] = wsgi_input
if PLOTLIB_INSTALLED:
resp = self.app(environ, self.start_response)
self.assertEqual(self.got_statuses, ['200 OK'])
self.assertEqual(self.headers, [('content-type', 'image/jpg')])
else:
resp = self.app(environ, self.start_response)
self.assertEqual(self.got_statuses, ['500 Internal Server Error'])
wsgi_input = BytesIO(body + b'&download=download&format=default')
environ['wsgi.input'] = wsgi_input
resp = self.app(environ, self.start_response)
self.assertEqual(self.headers, [('content-type',
HTMLViewer.format_dict['default'])])
wsgi_input = BytesIO(body + b'&download=download&format=json')
environ['wsgi.input'] = wsgi_input
resp = self.app(environ, self.start_response)
self.assertTrue(self.headers == [('content-type',
HTMLViewer.format_dict['json'])])
env2 = environ.copy()
env2['REQUEST_METHOD'] = 'DELETE'
resp = self.app(env2, self.start_response)
self.assertEqual(self.got_statuses, ['405 Method Not Allowed'], resp)
# use a totally bogus profile identifier
wsgi_input = BytesIO(body + b'&profile=ABC&download=download')
environ['wsgi.input'] = wsgi_input
resp = self.app(environ, self.start_response)
self.assertEqual(self.got_statuses, ['404 Not Found'], resp)
wsgi_input = BytesIO(body + b'&download=download&format=ods')
environ['wsgi.input'] = wsgi_input
resp = self.app(environ, self.start_response)
if ODFLIB_INSTALLED:
self.assertEqual(self.headers, [('content-type',
HTMLViewer.format_dict['ods'])])
else:
self.assertEqual(self.got_statuses, ['500 Internal Server Error'])
def test_dump_checkpoint(self):
self.app.dump_checkpoint()
self.assertTrue(self.app.last_dump_at is not None)
def test_renew_profile(self):
old_profiler = self.app.profiler
self.app.renew_profile()
new_profiler = self.app.profiler
self.assertTrue(old_profiler != new_profiler)
class Test_profile_log(unittest.TestCase):
@unittest.skipIf(xprofile is None, "can't import xprofile")
def setUp(self):
self.dir1 = tempfile.mkdtemp()
self.log_filename_prefix1 = self.dir1 + '/unittest.profile'
self.profile_log1 = ProfileLog(self.log_filename_prefix1, False)
self.pids1 = ['123', '456', str(os.getpid())]
profiler1 = xprofile.get_profiler('eventlet.green.profile')
for pid in self.pids1:
profiler1.runctx('import os;os.getcwd();', globals(), locals())
self.profile_log1.dump_profile(profiler1, pid)
self.dir2 = tempfile.mkdtemp()
self.log_filename_prefix2 = self.dir2 + '/unittest.profile'
self.profile_log2 = ProfileLog(self.log_filename_prefix2, True)
self.pids2 = ['321', '654', str(os.getpid())]
profiler2 = xprofile.get_profiler('eventlet.green.profile')
for pid in self.pids2:
profiler2.runctx('import os;os.getcwd();', globals(), locals())
self.profile_log2.dump_profile(profiler2, pid)
def tearDown(self):
self.profile_log1.clear('all')
self.profile_log2.clear('all')
shutil.rmtree(self.dir1, ignore_errors=True)
shutil.rmtree(self.dir2, ignore_errors=True)
def test_get_all_pids(self):
self.assertEqual(self.profile_log1.get_all_pids(),
sorted(self.pids1, reverse=True))
for pid in self.profile_log2.get_all_pids():
self.assertTrue(pid.split('-')[0] in self.pids2)
def test_clear(self):
self.profile_log1.clear('123')
self.assertFalse(os.path.exists(self.log_filename_prefix1 + '123'))
self.profile_log1.clear('current')
self.assertFalse(os.path.exists(self.log_filename_prefix1 +
str(os.getpid())))
self.profile_log1.clear('all')
for pid in self.pids1:
self.assertFalse(os.path.exists(self.log_filename_prefix1 + pid))
self.profile_log2.clear('321')
self.assertFalse(os.path.exists(self.log_filename_prefix2 + '321'))
self.profile_log2.clear('current')
self.assertFalse(os.path.exists(self.log_filename_prefix2 +
str(os.getpid())))
self.profile_log2.clear('all')
for pid in self.pids2:
self.assertFalse(os.path.exists(self.log_filename_prefix2 + pid))
def test_get_logfiles(self):
log_files = self.profile_log1.get_logfiles('all')
self.assertEqual(len(log_files), 3)
self.assertEqual(len(log_files), len(self.pids1))
log_files = self.profile_log1.get_logfiles('current')
self.assertEqual(len(log_files), 1)
self.assertEqual(log_files, [self.log_filename_prefix1
+ str(os.getpid())])
log_files = self.profile_log1.get_logfiles(self.pids1[0])
self.assertEqual(len(log_files), 1)
self.assertEqual(log_files, [self.log_filename_prefix1
+ self.pids1[0]])
log_files = self.profile_log2.get_logfiles('all')
self.assertEqual(len(log_files), 3)
self.assertEqual(len(log_files), len(self.pids2))
log_files = self.profile_log2.get_logfiles('current')
self.assertEqual(len(log_files), 1)
self.assertTrue(log_files[0].find(self.log_filename_prefix2 +
str(os.getpid())) > -1)
log_files = self.profile_log2.get_logfiles(self.pids2[0])
self.assertEqual(len(log_files), 1)
self.assertTrue(log_files[0].find(self.log_filename_prefix2 +
self.pids2[0]) > -1)
def test_dump_profile(self):
prof = xprofile.get_profiler('eventlet.green.profile')
prof.runctx('import os;os.getcwd();', globals(), locals())
prof.create_stats()
pfn = self.profile_log1.dump_profile(prof, os.getpid())
self.assertTrue(os.path.exists(pfn))
os.remove(pfn)
pfn = self.profile_log2.dump_profile(prof, os.getpid())
self.assertTrue(os.path.exists(pfn))
os.remove(pfn)
class Test_html_viewer(unittest.TestCase):
@unittest.skipIf(xprofile is None, "can't import xprofile")
def setUp(self):
self.app = ProfileMiddleware(FakeApp, {})
self.log_files = []
self.tempdir = tempfile.mkdtemp()
self.log_filename_prefix = self.tempdir + '/unittest.profile'
self.profile_log = ProfileLog(self.log_filename_prefix, False)
self.pids = ['123', '456', str(os.getpid())]
profiler = xprofile.get_profiler('eventlet.green.profile')
for pid in self.pids:
profiler.runctx('import os;os.getcwd();', globals(), locals())
self.log_files.append(self.profile_log.dump_profile(profiler, pid))
self.viewer = HTMLViewer('__profile__', 'eventlet.green.profile',
self.profile_log)
body = (b"profile=123&profile=456&sort=time&sort=nc&limit=10"
b"&fulldirs=1&nfl_filter=getcwd&query=query&metric=nc")
wsgi_input = BytesIO(body)
environ = {'REQUEST_METHOD': 'GET',
'QUERY_STRING': 'profile=all',
'wsgi.input': wsgi_input}
req = Request.blank('/__profile__/', environ=environ)
self.query_dict = self.app._combine_body_qs(req)
def tearDown(self):
shutil.rmtree(self.tempdir, ignore_errors=True)
def fake_call_back(self):
pass
def test_get_param(self):
query_dict = self.query_dict
get_param = self.viewer._get_param
self.assertEqual(get_param(query_dict, 'profile', 'current', True),
['123', '456'])
self.assertEqual(get_param(query_dict, 'profile', 'current'), '123')
self.assertEqual(get_param(query_dict, 'sort', 'time'), 'time')
self.assertEqual(get_param(query_dict, 'sort', 'time', True),
['time', 'nc'])
self.assertEqual(get_param(query_dict, 'limit', -1), 10)
self.assertEqual(get_param(query_dict, 'fulldirs', '0'), '1')
self.assertEqual(get_param(query_dict, 'nfl_filter', ''), 'getcwd')
self.assertEqual(get_param(query_dict, 'query', ''), 'query')
self.assertEqual(get_param(query_dict, 'metric', 'time'), 'nc')
self.assertEqual(get_param(query_dict, 'format', 'default'), 'default')
def test_render(self):
url = 'http://localhost:8080/__profile__'
path_entries = ['/__profile__'.split('/'),
'/__profile__/'.split('/'),
'/__profile__/123'.split('/'),
'/__profile__/123/'.split('/'),
'/__profile__/123/:0(getcwd)'.split('/'),
'/__profile__/all'.split('/'),
'/__profile__/all/'.split('/'),
'/__profile__/all/:0(getcwd)'.split('/'),
'/__profile__/current'.split('/'),
'/__profile__/current/'.split('/'),
'/__profile__/current/:0(getcwd)'.split('/')]
content, headers = self.viewer.render(url, 'GET', path_entries[0],
self.query_dict, None)
self.assertTrue(content is not None)
self.assertEqual(headers, [('content-type', 'text/html')])
content, headers = self.viewer.render(url, 'POST', path_entries[0],
self.query_dict, None)
self.assertTrue(content is not None)
self.assertEqual(headers, [('content-type', 'text/html')])
plot_dict = self.query_dict.copy()
plot_dict['plot'] = ['plot']
if PLOTLIB_INSTALLED:
content, headers = self.viewer.render(url, 'POST', path_entries[0],
plot_dict, None)
self.assertEqual(headers, [('content-type', 'image/jpg')])
else:
self.assertRaises(PLOTLIBNotInstalled, self.viewer.render,
url, 'POST', path_entries[0], plot_dict, None)
clear_dict = self.query_dict.copy()
clear_dict['clear'] = ['clear']
del clear_dict['query']
clear_dict['profile'] = ['xxx']
content, headers = self.viewer.render(url, 'POST', path_entries[0],
clear_dict, None)
self.assertEqual(headers, [('content-type', 'text/html')])
download_dict = self.query_dict.copy()
download_dict['download'] = ['download']
content, headers = self.viewer.render(url, 'POST', path_entries[0],
download_dict, None)
self.assertTrue(headers == [('content-type',
self.viewer.format_dict['default'])])
content, headers = self.viewer.render(url, 'GET', path_entries[1],
self.query_dict, None)
self.assertTrue(isinstance(json.loads(content), dict))
for method in ['HEAD', 'PUT', 'DELETE', 'XYZMethod']:
self.assertRaises(MethodNotAllowed, self.viewer.render, url,
method, path_entries[10], self.query_dict, None)
for entry in path_entries[2:]:
download_dict['format'] = 'default'
content, headers = self.viewer.render(url, 'GET', entry,
download_dict, None)
self.assertTrue(
('content-type', self.viewer.format_dict['default'])
in headers, entry)
download_dict['format'] = 'json'
content, headers = self.viewer.render(url, 'GET', entry,
download_dict, None)
self.assertTrue(isinstance(json.loads(content), dict))
def test_index(self):
content, headers = self.viewer.index_page(self.log_files[0:1],
profile_id='current')
self.assertTrue(content.find('<html>') > -1)
self.assertTrue(headers == [('content-type', 'text/html')])
def test_index_all(self):
content, headers = self.viewer.index_page(self.log_files,
profile_id='all')
for f in self.log_files:
self.assertTrue(content.find(f) > 0, content)
self.assertTrue(headers == [('content-type', 'text/html')])
def test_download(self):
content, headers = self.viewer.download(self.log_files)
self.assertTrue(content is not None)
self.assertEqual(headers, [('content-type',
self.viewer.format_dict['default'])])
content, headers = self.viewer.download(self.log_files, sort='calls',
limit=10, nfl_filter='os')
self.assertTrue(content is not None)
self.assertEqual(headers, [('content-type',
self.viewer.format_dict['default'])])
content, headers = self.viewer.download(self.log_files,
output_format='default')
self.assertEqual(headers, [('content-type',
self.viewer.format_dict['default'])])
content, headers = self.viewer.download(self.log_files,
output_format='json')
self.assertTrue(isinstance(json.loads(content), dict))
self.assertEqual(headers, [('content-type',
self.viewer.format_dict['json'])])
content, headers = self.viewer.download(self.log_files,
output_format='csv')
self.assertEqual(headers, [('content-type',
self.viewer.format_dict['csv'])])
if ODFLIB_INSTALLED:
content, headers = self.viewer.download(self.log_files,
output_format='ods')
self.assertEqual(headers, [('content-type',
self.viewer.format_dict['ods'])])
else:
self.assertRaises(ODFLIBNotInstalled, self.viewer.download,
self.log_files, output_format='ods')
content, headers = self.viewer.download(self.log_files,
nfl_filter=__file__,
output_format='python')
self.assertEqual(headers, [('content-type',
self.viewer.format_dict['python'])])
def test_plot(self):
if PLOTLIB_INSTALLED:
content, headers = self.viewer.plot(self.log_files)
self.assertTrue(content is not None)
self.assertEqual(headers, [('content-type', 'image/jpg')])
self.assertRaises(NotFoundException, self.viewer.plot, [])
else:
self.assertRaises(PLOTLIBNotInstalled, self.viewer.plot,
self.log_files)
def test_format_source_code(self):
osfile = os.__file__.rstrip('c')
nfl_os = '%s:%d(%s)' % (osfile, 136, 'makedirs')
self.assertIn('makedirs', self.viewer.format_source_code(nfl_os))
self.assertNotIn('makedirsXYZ', self.viewer.format_source_code(nfl_os))
nfl_illegal = '%sc:136(makedirs)' % osfile
self.assertIn(_('The file type are forbidden to access!'),
self.viewer.format_source_code(nfl_illegal))
nfl_not_exist = '%s.py:136(makedirs)' % osfile
expected_msg = _('Can not access the file %s.py.') % osfile
self.assertIn(expected_msg,
self.viewer.format_source_code(nfl_not_exist))
class TestStats2(unittest.TestCase):
@unittest.skipIf(xprofile is None, "can't import xprofile")
def setUp(self):
self.profile_file = tempfile.mktemp('profile', 'unittest')
self.profilers = [xprofile.get_profiler('cProfile'),
xprofile.get_profiler('eventlet.green.profile')]
for p in self.profilers:
p.runctx('import os;os.getcwd();', globals(), locals())
p.dump_stats(self.profile_file)
self.stats2 = Stats2(self.profile_file)
self.selections = [['getcwd'], ['getcwd', -1],
['getcwd', -10], ['getcwd', 0.1]]
def tearDown(self):
os.remove(self.profile_file)
def test_func_to_dict(self):
func = ['profile.py', 100, '__call__']
self.assertEqual({'module': 'profile.py', 'line': 100, 'function':
'__call__'}, self.stats2.func_to_dict(func))
func = ['', 0, '__call__']
self.assertEqual({'module': '', 'line': 0, 'function':
'__call__'}, self.stats2.func_to_dict(func))
def test_to_json(self):
for selection in self.selections:
js = self.stats2.to_json(selection)
self.assertTrue(isinstance(json.loads(js), dict))
self.assertTrue(json.loads(js)['stats'] is not None)
self.assertTrue(json.loads(js)['stats'][0] is not None)
def test_to_ods(self):
if ODFLIB_INSTALLED:
for selection in self.selections:
self.assertTrue(self.stats2.to_ods(selection) is not None)
def test_to_csv(self):
for selection in self.selections:
self.assertTrue(self.stats2.to_csv(selection) is not None)
self.assertTrue('function calls' in self.stats2.to_csv(selection))
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/test_xprofile.py |
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import time
import eventlet
import mock
from test.debug_logger import debug_logger
from test.unit import FakeMemcache
from swift.common.middleware import ratelimit
from swift.proxy.controllers.base import get_cache_key, \
headers_to_container_info
from swift.common.swob import Request
from swift.common import registry
threading = eventlet.patcher.original('threading')
class FakeApp(object):
skip_handled_check = False
def __call__(self, env, start_response):
assert self.skip_handled_check or env.get('swift.ratelimit.handled')
start_response('200 OK', [])
return [b'Some Content']
class FakeReq(object):
def __init__(self, method, env=None):
self.method = method
self.environ = env or {}
def start_response(*args):
pass
time_ticker = 0
time_override = []
def mock_sleep(x):
global time_ticker
time_ticker += x
def mock_time():
global time_override
global time_ticker
if time_override:
cur_time = time_override.pop(0)
if cur_time is None:
time_override = [None if i is None else i + time_ticker
for i in time_override]
return time_ticker
return cur_time
return time_ticker
class TestRateLimit(unittest.TestCase):
def _reset_time(self):
global time_ticker
time_ticker = 0
def setUp(self):
self.was_sleep = eventlet.sleep
eventlet.sleep = mock_sleep
self.was_time = time.time
time.time = mock_time
self._reset_time()
def tearDown(self):
eventlet.sleep = self.was_sleep
time.time = self.was_time
def _run(self, callable_func, num, rate, check_time=True):
global time_ticker
begin = time.time()
for x in range(num):
callable_func()
end = time.time()
total_time = float(num) / rate - 1.0 / rate # 1st request not limited
# Allow for one second of variation in the total time.
time_diff = abs(total_time - (end - begin))
if check_time:
self.assertEqual(round(total_time, 1), round(time_ticker, 1))
return time_diff
def test_get_maxrate(self):
conf_dict = {'container_ratelimit_10': 200,
'container_ratelimit_50': 100,
'container_ratelimit_75': 30}
test_ratelimit = ratelimit.filter_factory(conf_dict)(FakeApp())
test_ratelimit.logger = debug_logger()
self.assertIsNone(ratelimit.get_maxrate(
test_ratelimit.container_ratelimits, 0))
self.assertIsNone(ratelimit.get_maxrate(
test_ratelimit.container_ratelimits, 5))
self.assertEqual(ratelimit.get_maxrate(
test_ratelimit.container_ratelimits, 10), 200)
self.assertEqual(ratelimit.get_maxrate(
test_ratelimit.container_ratelimits, 60), 72)
self.assertEqual(ratelimit.get_maxrate(
test_ratelimit.container_ratelimits, 160), 30)
def test_get_ratelimitable_key_tuples(self):
current_rate = 13
conf_dict = {'account_ratelimit': current_rate,
'container_ratelimit_3': 200}
fake_memcache = FakeMemcache()
fake_memcache.store[get_cache_key('a', 'c')] = \
{'object_count': '5'}
the_app = ratelimit.filter_factory(conf_dict)(FakeApp())
the_app.memcache_client = fake_memcache
environ = {'swift.cache': fake_memcache, 'PATH_INFO': '/v1/a/c/o'}
with mock.patch('swift.common.middleware.ratelimit.get_account_info',
lambda *args, **kwargs: {}):
self.assertEqual(len(the_app.get_ratelimitable_key_tuples(
FakeReq('DELETE', environ), 'a', None, None)), 0)
self.assertEqual(len(the_app.get_ratelimitable_key_tuples(
FakeReq('PUT', environ), 'a', 'c', None)), 1)
self.assertEqual(len(the_app.get_ratelimitable_key_tuples(
FakeReq('DELETE', environ), 'a', 'c', None)), 1)
self.assertEqual(len(the_app.get_ratelimitable_key_tuples(
FakeReq('GET', environ), 'a', 'c', 'o')), 0)
self.assertEqual(len(the_app.get_ratelimitable_key_tuples(
FakeReq('PUT', environ), 'a', 'c', 'o')), 1)
self.assertEqual(len(the_app.get_ratelimitable_key_tuples(
FakeReq('PUT', environ), 'a', 'c', None, global_ratelimit=10)), 2)
self.assertEqual(the_app.get_ratelimitable_key_tuples(
FakeReq('PUT', environ), 'a', 'c', None, global_ratelimit=10)[1],
('ratelimit/global-write/a', 10))
self.assertEqual(len(the_app.get_ratelimitable_key_tuples(
FakeReq('PUT', environ), 'a', 'c', None,
global_ratelimit='notafloat')), 1)
def test_memcached_container_info_dict(self):
mdict = headers_to_container_info({'x-container-object-count': '45'})
self.assertEqual(mdict['object_count'], '45')
def test_ratelimit_old_memcache_format(self):
current_rate = 13
conf_dict = {'account_ratelimit': current_rate,
'container_ratelimit_3': 200}
fake_memcache = FakeMemcache()
fake_memcache.store[get_cache_key('a', 'c')] = \
{'container_size': 5}
the_app = ratelimit.filter_factory(conf_dict)(FakeApp())
the_app.memcache_client = fake_memcache
req = FakeReq('PUT', {
'PATH_INFO': '/v1/a/c/o', 'swift.cache': fake_memcache})
with mock.patch('swift.common.middleware.ratelimit.get_account_info',
lambda *args, **kwargs: {}):
tuples = the_app.get_ratelimitable_key_tuples(req, 'a', 'c', 'o')
self.assertEqual(tuples, [('ratelimit/a/c', 200.0)])
def test_account_ratelimit(self):
current_rate = 5
num_calls = 50
conf_dict = {'account_ratelimit': current_rate}
self.test_ratelimit = ratelimit.filter_factory(conf_dict)(FakeApp())
with mock.patch('swift.common.middleware.ratelimit.get_container_info',
lambda *args, **kwargs: {}):
with mock.patch(
'swift.common.middleware.ratelimit.get_account_info',
lambda *args, **kwargs: {}):
for meth, exp_time in [('DELETE', 9.8), ('GET', 0),
('POST', 0), ('PUT', 9.8)]:
req = Request.blank('/v1/a%s/c' % meth)
req.method = meth
req.environ['swift.cache'] = FakeMemcache()
make_app_call = lambda: self.test_ratelimit(
req.environ.copy(), start_response)
begin = time.time()
self._run(make_app_call, num_calls, current_rate,
check_time=bool(exp_time))
self.assertEqual(round(time.time() - begin, 1), exp_time)
self._reset_time()
def test_ratelimit_set_incr(self):
current_rate = 5
num_calls = 50
conf_dict = {'account_ratelimit': current_rate}
self.test_ratelimit = ratelimit.filter_factory(conf_dict)(FakeApp())
req = Request.blank('/v1/a/c')
req.method = 'PUT'
req.environ['swift.cache'] = FakeMemcache()
req.environ['swift.cache'].init_incr_return_neg = True
make_app_call = lambda: self.test_ratelimit(req.environ.copy(),
start_response)
begin = time.time()
with mock.patch('swift.common.middleware.ratelimit.get_account_info',
lambda *args, **kwargs: {}):
self._run(make_app_call, num_calls, current_rate, check_time=False)
self.assertEqual(round(time.time() - begin, 1), 9.8)
def test_ratelimit_old_white_black_list(self):
global time_ticker
current_rate = 2
conf_dict = {'account_ratelimit': current_rate,
'max_sleep_time_seconds': 2,
'account_whitelist': 'a',
'account_blacklist': 'b'}
self.test_ratelimit = ratelimit.filter_factory(conf_dict)(FakeApp())
with mock.patch.object(self.test_ratelimit,
'memcache_client', FakeMemcache()):
self.assertEqual(
self.test_ratelimit.handle_ratelimit(
Request.blank('/'), 'a', 'c', 'o'),
None)
self.assertEqual(
self.test_ratelimit.handle_ratelimit(
Request.blank('/'), 'b', 'c', 'o').status_int,
497)
def test_ratelimit_whitelist_sysmeta(self):
global time_ticker
current_rate = 2
conf_dict = {'account_ratelimit': current_rate,
'max_sleep_time_seconds': 2,
'account_whitelist': 'a',
'account_blacklist': 'b'}
self.test_ratelimit = ratelimit.filter_factory(conf_dict)(FakeApp())
req = Request.blank('/v1/a/c')
req.environ['swift.cache'] = FakeMemcache()
class rate_caller(threading.Thread):
def __init__(self, parent):
threading.Thread.__init__(self)
self.parent = parent
def run(self):
self.result = self.parent.test_ratelimit(req.environ,
start_response)
def get_fake_ratelimit(*args, **kwargs):
return {'sysmeta': {'global-write-ratelimit': 'WHITELIST'}}
with mock.patch('swift.common.middleware.ratelimit.get_account_info',
get_fake_ratelimit):
nt = 5
threads = []
for i in range(nt):
rc = rate_caller(self)
rc.start()
threads.append(rc)
for thread in threads:
thread.join()
the_498s = [
t for t in threads
if b''.join(t.result).startswith(b'Slow down')]
self.assertEqual(len(the_498s), 0)
self.assertEqual(time_ticker, 0)
def test_ratelimit_blacklist(self):
global time_ticker
current_rate = 2
conf_dict = {'account_ratelimit': current_rate,
'max_sleep_time_seconds': 2,
'account_whitelist': 'a',
'account_blacklist': 'b'}
self.test_ratelimit = ratelimit.filter_factory(conf_dict)(FakeApp())
self.test_ratelimit.logger = debug_logger()
self.test_ratelimit.BLACK_LIST_SLEEP = 0
req = Request.blank('/v1/b/c')
req.environ['swift.cache'] = FakeMemcache()
class rate_caller(threading.Thread):
def __init__(self, parent):
threading.Thread.__init__(self)
self.parent = parent
def run(self):
self.result = self.parent.test_ratelimit(req.environ.copy(),
start_response)
def get_fake_ratelimit(*args, **kwargs):
return {'sysmeta': {'global-write-ratelimit': 'BLACKLIST'}}
with mock.patch('swift.common.middleware.ratelimit.get_account_info',
get_fake_ratelimit):
nt = 5
threads = []
for i in range(nt):
rc = rate_caller(self)
rc.start()
threads.append(rc)
for thread in threads:
thread.join()
the_497s = [
t for t in threads
if b''.join(t.result).startswith(b'Your account')]
self.assertEqual(len(the_497s), 5)
self.assertEqual(time_ticker, 0)
def test_ratelimit_max_rate_double(self):
global time_ticker
global time_override
current_rate = 2
conf_dict = {'account_ratelimit': current_rate,
'clock_accuracy': 100,
'max_sleep_time_seconds': 1}
self.test_ratelimit = ratelimit.filter_factory(conf_dict)(FakeApp())
self.test_ratelimit.log_sleep_time_seconds = .00001
req = Request.blank('/v1/a/c')
req.method = 'PUT'
req.environ['swift.cache'] = FakeMemcache()
time_override = [0, 0, 0, 0, None]
# simulates 4 requests coming in at same time, then sleeping
with mock.patch('swift.common.middleware.ratelimit.get_account_info',
lambda *args, **kwargs: {}):
r = self.test_ratelimit(req.environ.copy(), start_response)
mock_sleep(.1)
r = self.test_ratelimit(req.environ.copy(), start_response)
mock_sleep(.1)
r = self.test_ratelimit(req.environ.copy(), start_response)
self.assertEqual(r[0], b'Slow down')
mock_sleep(.1)
r = self.test_ratelimit(req.environ.copy(), start_response)
self.assertEqual(r[0], b'Slow down')
mock_sleep(.1)
r = self.test_ratelimit(req.environ.copy(), start_response)
self.assertEqual(r[0], b'Some Content')
def test_ratelimit_max_rate_double_container(self):
global time_ticker
global time_override
current_rate = 2
conf_dict = {'container_ratelimit_0': current_rate,
'clock_accuracy': 100,
'max_sleep_time_seconds': 1}
self.test_ratelimit = ratelimit.filter_factory(conf_dict)(FakeApp())
self.test_ratelimit.log_sleep_time_seconds = .00001
req = Request.blank('/v1/a/c/o')
req.method = 'PUT'
req.environ['swift.cache'] = FakeMemcache()
req.environ['swift.cache'].set(
get_cache_key('a', 'c'),
{'object_count': 1})
time_override = [0, 0, 0, 0, None]
# simulates 4 requests coming in at same time, then sleeping
with mock.patch('swift.common.middleware.ratelimit.get_account_info',
lambda *args, **kwargs: {}):
r = self.test_ratelimit(req.environ.copy(), start_response)
mock_sleep(.1)
r = self.test_ratelimit(req.environ.copy(), start_response)
mock_sleep(.1)
r = self.test_ratelimit(req.environ.copy(), start_response)
self.assertEqual(r[0], b'Slow down')
mock_sleep(.1)
r = self.test_ratelimit(req.environ.copy(), start_response)
self.assertEqual(r[0], b'Slow down')
mock_sleep(.1)
r = self.test_ratelimit(req.environ.copy(), start_response)
self.assertEqual(r[0], b'Some Content')
def test_ratelimit_max_rate_double_container_listing(self):
global time_ticker
global time_override
current_rate = 2
conf_dict = {'container_listing_ratelimit_0': current_rate,
'clock_accuracy': 100,
'max_sleep_time_seconds': 1}
self.test_ratelimit = ratelimit.filter_factory(conf_dict)(FakeApp())
self.test_ratelimit.log_sleep_time_seconds = .00001
req = Request.blank('/v1/a/c')
req.method = 'GET'
req.environ['swift.cache'] = FakeMemcache()
req.environ['swift.cache'].set(
get_cache_key('a', 'c'),
{'object_count': 1})
with mock.patch('swift.common.middleware.ratelimit.get_account_info',
lambda *args, **kwargs: {}):
time_override = [0, 0, 0, 0, None]
# simulates 4 requests coming in at same time, then sleeping
r = self.test_ratelimit(req.environ.copy(), start_response)
mock_sleep(.1)
r = self.test_ratelimit(req.environ.copy(), start_response)
mock_sleep(.1)
r = self.test_ratelimit(req.environ.copy(), start_response)
self.assertEqual(r[0], b'Slow down')
mock_sleep(.1)
r = self.test_ratelimit(req.environ.copy(), start_response)
self.assertEqual(r[0], b'Slow down')
mock_sleep(.1)
r = self.test_ratelimit(req.environ.copy(), start_response)
self.assertEqual(r[0], b'Some Content')
mc = self.test_ratelimit.memcache_client
try:
self.test_ratelimit.memcache_client = None
self.assertIsNone(
self.test_ratelimit.handle_ratelimit(req, 'n', 'c', None))
finally:
self.test_ratelimit.memcache_client = mc
def test_ratelimit_max_rate_multiple_acc(self):
num_calls = 4
current_rate = 2
conf_dict = {'account_ratelimit': current_rate,
'max_sleep_time_seconds': 2}
fake_memcache = FakeMemcache()
the_app = ratelimit.filter_factory(conf_dict)(FakeApp())
the_app.memcache_client = fake_memcache
class rate_caller(threading.Thread):
def __init__(self, name):
self.myname = name
threading.Thread.__init__(self)
def run(self):
for j in range(num_calls):
self.result = the_app.handle_ratelimit(
FakeReq('PUT'), self.myname, 'c', None)
with mock.patch('swift.common.middleware.ratelimit.get_account_info',
lambda *args, **kwargs: {}):
nt = 15
begin = time.time()
threads = []
for i in range(nt):
rc = rate_caller('a%s' % i)
rc.start()
threads.append(rc)
for thread in threads:
thread.join()
time_took = time.time() - begin
self.assertEqual(1.5, round(time_took, 1))
def test_call_invalid_path(self):
env = {'REQUEST_METHOD': 'GET',
'SCRIPT_NAME': '',
'PATH_INFO': '//v1/AUTH_1234567890',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '80',
'swift.cache': FakeMemcache(),
'SERVER_PROTOCOL': 'HTTP/1.0'}
app = lambda *args, **kwargs: ['fake_app']
rate_mid = ratelimit.filter_factory({})(app)
class a_callable(object):
def __call__(self, *args, **kwargs):
pass
resp = rate_mid.__call__(env, a_callable())
self.assertEqual('fake_app', resp[0])
def test_call_non_swift_api_path(self):
env = {'REQUEST_METHOD': 'GET',
'SCRIPT_NAME': '',
'PATH_INFO': '/ive/got/a/lovely/bunch/of/coconuts',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '80',
'swift.cache': FakeMemcache(),
'SERVER_PROTOCOL': 'HTTP/1.0'}
app = lambda *args, **kwargs: ['some response']
rate_mid = ratelimit.filter_factory({})(app)
class a_callable(object):
def __call__(self, *args, **kwargs):
pass
with mock.patch('swift.common.middleware.ratelimit.get_account_info',
side_effect=Exception("you shouldn't call this")):
resp = rate_mid(env, a_callable())
self.assertEqual(resp[0], 'some response')
def test_no_memcache(self):
current_rate = 13
num_calls = 5
conf_dict = {'account_ratelimit': current_rate}
fake_app = FakeApp()
fake_app.skip_handled_check = True
self.test_ratelimit = ratelimit.filter_factory(conf_dict)(fake_app)
req = Request.blank('/v1/a')
req.environ['swift.cache'] = None
make_app_call = lambda: self.test_ratelimit(req.environ,
start_response)
begin = time.time()
self._run(make_app_call, num_calls, current_rate, check_time=False)
time_took = time.time() - begin
self.assertEqual(round(time_took, 1), 0) # no memcache, no limiting
def test_already_handled(self):
current_rate = 13
num_calls = 5
conf_dict = {'container_listing_ratelimit_0': current_rate}
self.test_ratelimit = ratelimit.filter_factory(conf_dict)(FakeApp())
fake_cache = FakeMemcache()
fake_cache.set(
get_cache_key('a', 'c'),
{'object_count': 1})
req = Request.blank('/v1/a/c', environ={'swift.cache': fake_cache})
req.environ['swift.ratelimit.handled'] = True
make_app_call = lambda: self.test_ratelimit(req.environ,
start_response)
begin = time.time()
self._run(make_app_call, num_calls, current_rate, check_time=False)
time_took = time.time() - begin
self.assertEqual(round(time_took, 1), 0) # no memcache, no limiting
def test_restarting_memcache(self):
current_rate = 2
num_calls = 5
conf_dict = {'account_ratelimit': current_rate}
self.test_ratelimit = ratelimit.filter_factory(conf_dict)(FakeApp())
req = Request.blank('/v1/a/c')
req.method = 'PUT'
req.environ['swift.cache'] = FakeMemcache()
req.environ['swift.cache'].error_on_incr = True
make_app_call = lambda: self.test_ratelimit(req.environ,
start_response)
begin = time.time()
with mock.patch('swift.common.middleware.ratelimit.get_account_info',
lambda *args, **kwargs: {}):
self._run(make_app_call, num_calls, current_rate, check_time=False)
time_took = time.time() - begin
self.assertEqual(round(time_took, 1), 0) # no memcache, no limit
class TestSwiftInfo(unittest.TestCase):
def setUp(self):
registry._swift_info = {}
registry._swift_admin_info = {}
def test_registered_defaults(self):
def check_key_is_absent(key):
try:
swift_info[key]
except KeyError as err:
if key not in str(err):
raise
test_limits = {'account_ratelimit': 1,
'max_sleep_time_seconds': 60,
'container_ratelimit_0': 0,
'container_ratelimit_10': 10,
'container_ratelimit_50': 50,
'container_listing_ratelimit_0': 0,
'container_listing_ratelimit_10': 10,
'container_listing_ratelimit_50': 50}
ratelimit.filter_factory(test_limits)('have to pass in an app')
swift_info = registry.get_swift_info()
self.assertIn('ratelimit', swift_info)
self.assertEqual(swift_info['ratelimit']
['account_ratelimit'], 1.0)
self.assertEqual(swift_info['ratelimit']
['max_sleep_time_seconds'], 60.0)
self.assertEqual(swift_info['ratelimit']
['container_ratelimits'][0][0], 0)
self.assertEqual(swift_info['ratelimit']
['container_ratelimits'][0][1], 0.0)
self.assertEqual(swift_info['ratelimit']
['container_ratelimits'][1][0], 10)
self.assertEqual(swift_info['ratelimit']
['container_ratelimits'][1][1], 10.0)
self.assertEqual(swift_info['ratelimit']
['container_ratelimits'][2][0], 50)
self.assertEqual(swift_info['ratelimit']
['container_ratelimits'][2][1], 50.0)
self.assertEqual(swift_info['ratelimit']
['container_listing_ratelimits'][0][0], 0)
self.assertEqual(swift_info['ratelimit']
['container_listing_ratelimits'][0][1], 0.0)
self.assertEqual(swift_info['ratelimit']
['container_listing_ratelimits'][1][0], 10)
self.assertEqual(swift_info['ratelimit']
['container_listing_ratelimits'][1][1], 10.0)
self.assertEqual(swift_info['ratelimit']
['container_listing_ratelimits'][2][0], 50)
self.assertEqual(swift_info['ratelimit']
['container_listing_ratelimits'][2][1], 50.0)
# these were left out on purpose
for key in ['log_sleep_time_seconds', 'clock_accuracy',
'rate_buffer_seconds', 'ratelimit_whitelis',
'ratelimit_blacklist']:
check_key_is_absent(key)
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/test_ratelimit.py |
# -*- coding: utf-8 -*-
# Copyright (c) 2023 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from swift.common.swob import Request, HTTPOk, HTTPNotFound, HTTPCreated
from test.unit.common.middleware.helpers import FakeSwift
class TestFakeSwift(unittest.TestCase):
def test_not_registered(self):
swift = FakeSwift()
def do_test(method):
req = Request.blank('/v1/a/c/o')
req.method = method
with self.assertRaises(KeyError):
req.get_response(swift)
do_test('GET')
do_test('HEAD')
do_test('POST')
do_test('PUT')
do_test('DELETE')
def test_GET_registered(self):
# verify that a single registered GET response is sufficient to handle
# GETs and HEADS, with and without query strings
swift = FakeSwift()
swift.register('GET', '/v1/a/c/o', HTTPOk, {'X-Foo': 'Bar'}, b'stuff')
req = Request.blank('/v1/a/c/o')
req.method = 'GET'
resp = req.get_response(swift)
self.assertEqual(200, resp.status_int)
self.assertEqual({'Content-Length': '5',
'Content-Type': 'text/html; charset=UTF-8',
'X-Foo': 'Bar'},
resp.headers)
self.assertEqual(b'stuff', resp.body)
self.assertEqual(1, swift.call_count)
self.assertEqual(('GET', '/v1/a/c/o'), swift.calls[-1])
req = Request.blank('/v1/a/c/o')
req.method = 'GET'
req.query_string = 'p=q'
resp = req.get_response(swift)
self.assertEqual(200, resp.status_int)
self.assertEqual({'Content-Length': '5',
'Content-Type': 'text/html; charset=UTF-8',
'X-Foo': 'Bar'},
resp.headers)
self.assertEqual(b'stuff', resp.body)
self.assertEqual(2, swift.call_count)
self.assertEqual(('GET', '/v1/a/c/o?p=q'), swift.calls[-1])
req = Request.blank('/v1/a/c/o')
req.method = 'HEAD'
resp = req.get_response(swift)
self.assertEqual(200, resp.status_int)
self.assertEqual({'Content-Type': 'text/html; charset=UTF-8',
'X-Foo': 'Bar'},
resp.headers)
self.assertEqual(b'', resp.body)
self.assertEqual(3, swift.call_count)
self.assertEqual(('HEAD', '/v1/a/c/o'), swift.calls[-1])
req = Request.blank('/v1/a/c/o')
req.method = 'HEAD'
req.query_string = 'p=q'
resp = req.get_response(swift)
self.assertEqual(200, resp.status_int)
self.assertEqual({'Content-Type': 'text/html; charset=UTF-8',
'X-Foo': 'Bar'},
resp.headers)
self.assertEqual(b'', resp.body)
self.assertEqual(4, swift.call_count)
self.assertEqual(('HEAD', '/v1/a/c/o?p=q'), swift.calls[-1])
def test_GET_registered_with_query_string(self):
# verify that a single registered GET response is sufficient to handle
# GETs and HEADS, with and without query strings
swift = FakeSwift()
swift.register('GET', '/v1/a/c/o?p=q', HTTPOk,
{'X-Foo': 'Bar'}, b'stuff')
req = Request.blank('/v1/a/c/o')
req.method = 'GET'
with self.assertRaises(KeyError):
resp = req.get_response(swift)
req.query_string = 'p=q'
resp = req.get_response(swift)
self.assertEqual(200, resp.status_int)
self.assertEqual({'Content-Length': '5',
'Content-Type': 'text/html; charset=UTF-8',
'X-Foo': 'Bar'},
resp.headers)
self.assertEqual(b'stuff', resp.body)
self.assertEqual(1, swift.call_count)
self.assertEqual(('GET', '/v1/a/c/o?p=q'), swift.calls[-1])
def test_GET_and_HEAD_registered(self):
# verify that a registered HEAD response will be preferred over GET for
# HEAD request
swift = FakeSwift()
swift.register('GET', '/v1/a/c/o', HTTPOk, {'X-Foo': 'Bar'}, b'stuff')
swift.register('HEAD', '/v1/a/c/o', HTTPNotFound, {}, b'')
req = Request.blank('/v1/a/c/o')
req.method = 'GET'
resp = req.get_response(swift)
self.assertEqual(200, resp.status_int)
self.assertEqual({'Content-Length': '5',
'Content-Type': 'text/html; charset=UTF-8',
'X-Foo': 'Bar'},
resp.headers)
self.assertEqual(b'stuff', resp.body)
self.assertEqual(1, swift.call_count)
self.assertEqual(('GET', '/v1/a/c/o'), swift.calls[-1])
req = Request.blank('/v1/a/c/o')
req.method = 'HEAD'
resp = req.get_response(swift)
self.assertEqual(404, resp.status_int)
self.assertEqual({'Content-Type': 'text/html; charset=UTF-8'},
resp.headers)
self.assertEqual(b'', resp.body)
self.assertEqual(2, swift.call_count)
self.assertEqual(('HEAD', '/v1/a/c/o'), swift.calls[-1])
req = Request.blank('/v1/a/c/o')
req.method = 'HEAD'
req.query_string = 'p=q'
resp = req.get_response(swift)
self.assertEqual(404, resp.status_int)
self.assertEqual({'Content-Type': 'text/html; charset=UTF-8'},
resp.headers)
self.assertEqual(b'', resp.body)
self.assertEqual(3, swift.call_count)
self.assertEqual(('HEAD', '/v1/a/c/o?p=q'), swift.calls[-1])
def test_PUT_uploaded(self):
# verify an uploaded object is sufficient to handle GETs and HEADS,
# with and without query strings
swift = FakeSwift()
swift.register('PUT', '/v1/a/c/o', HTTPCreated, {}, None)
req = Request.blank('/v1/a/c/o', body=b'stuff')
req.method = 'PUT'
resp = req.get_response(swift)
self.assertEqual(201, resp.status_int)
self.assertEqual({'Content-Length': '5',
'Etag': 'c13d88cb4cb02003daedb8a84e5d272a',
'Content-Type': 'text/html; charset=UTF-8'},
resp.headers)
self.assertEqual(b'', resp.body)
self.assertEqual(1, swift.call_count)
self.assertEqual(('PUT', '/v1/a/c/o'), swift.calls[-1])
req = Request.blank('/v1/a/c/o')
req.method = 'GET'
resp = req.get_response(swift)
self.assertEqual(200, resp.status_int)
self.assertEqual({'Content-Length': '5',
'Content-Type': 'text/html; charset=UTF-8',
'Host': 'localhost:80'},
resp.headers)
self.assertEqual(b'stuff', resp.body)
self.assertEqual(2, swift.call_count)
self.assertEqual(('GET', '/v1/a/c/o'), swift.calls[-1])
req = Request.blank('/v1/a/c/o')
req.method = 'GET'
req.query_string = 'p=q'
resp = req.get_response(swift)
self.assertEqual(200, resp.status_int)
self.assertEqual({'Content-Length': '5',
'Content-Type': 'text/html; charset=UTF-8',
'Host': 'localhost:80'},
resp.headers)
self.assertEqual(b'stuff', resp.body)
self.assertEqual(3, swift.call_count)
self.assertEqual(('GET', '/v1/a/c/o?p=q'), swift.calls[-1])
req = Request.blank('/v1/a/c/o')
req.method = 'HEAD'
resp = req.get_response(swift)
self.assertEqual(200, resp.status_int)
self.assertEqual({'Content-Length': '5',
'Content-Type': 'text/html; charset=UTF-8',
'Host': 'localhost:80'},
resp.headers)
self.assertEqual(b'', resp.body)
self.assertEqual(4, swift.call_count)
self.assertEqual(('HEAD', '/v1/a/c/o'), swift.calls[-1])
req = Request.blank('/v1/a/c/o')
req.method = 'HEAD'
req.query_string = 'p=q'
resp = req.get_response(swift)
self.assertEqual(200, resp.status_int)
self.assertEqual({'Content-Length': '5',
'Content-Type': 'text/html; charset=UTF-8',
'Host': 'localhost:80'},
resp.headers)
self.assertEqual(b'', resp.body)
self.assertEqual(5, swift.call_count)
self.assertEqual(('HEAD', '/v1/a/c/o?p=q'), swift.calls[-1])
def test_PUT_uploaded_with_query_string(self):
# verify an uploaded object with query string is sufficient to handle
# GETs and HEADS, with and without query strings
swift = FakeSwift()
swift.register('PUT', '/v1/a/c/o', HTTPCreated, {}, None)
req = Request.blank('/v1/a/c/o', body=b'stuff')
req.method = 'PUT'
req.query_string = 'multipart-manifest=put'
resp = req.get_response(swift)
self.assertEqual(201, resp.status_int)
self.assertEqual({'Content-Length': '5',
'Etag': 'c13d88cb4cb02003daedb8a84e5d272a',
'Content-Type': 'text/html; charset=UTF-8'},
resp.headers)
self.assertEqual(b'', resp.body)
self.assertEqual(1, swift.call_count)
self.assertEqual(('PUT', '/v1/a/c/o?multipart-manifest=put'),
swift.calls[-1])
# note: query string is not included in uploaded key
self.assertEqual(
{'/v1/a/c/o': ({'Host': 'localhost:80',
'Content-Length': '5'},
b'stuff')},
swift.uploaded)
req = Request.blank('/v1/a/c/o')
req.method = 'GET'
resp = req.get_response(swift)
self.assertEqual(200, resp.status_int)
self.assertEqual({'Content-Length': '5',
'Content-Type': 'text/html; charset=UTF-8',
'Host': 'localhost:80'},
resp.headers)
self.assertEqual(b'stuff', resp.body)
self.assertEqual(2, swift.call_count)
self.assertEqual(('GET', '/v1/a/c/o'), swift.calls[-1])
req = Request.blank('/v1/a/c/o')
req.method = 'GET'
req.query_string = 'p=q' # note: differs from PUT query string
resp = req.get_response(swift)
self.assertEqual(200, resp.status_int)
self.assertEqual({'Content-Length': '5',
'Content-Type': 'text/html; charset=UTF-8',
'Host': 'localhost:80'},
resp.headers)
self.assertEqual(b'stuff', resp.body)
self.assertEqual(3, swift.call_count)
self.assertEqual(('GET', '/v1/a/c/o?p=q'), swift.calls[-1])
req = Request.blank('/v1/a/c/o')
req.method = 'HEAD'
resp = req.get_response(swift)
self.assertEqual(200, resp.status_int)
self.assertEqual({'Content-Length': '5',
'Content-Type': 'text/html; charset=UTF-8',
'Host': 'localhost:80'},
resp.headers)
self.assertEqual(b'', resp.body)
self.assertEqual(4, swift.call_count)
self.assertEqual(('HEAD', '/v1/a/c/o'), swift.calls[-1])
req = Request.blank('/v1/a/c/o')
req.method = 'HEAD'
req.query_string = 'p=q'
resp = req.get_response(swift)
self.assertEqual(200, resp.status_int)
self.assertEqual({'Content-Length': '5',
'Content-Type': 'text/html; charset=UTF-8',
'Host': 'localhost:80'},
resp.headers)
self.assertEqual(b'', resp.body)
self.assertEqual(5, swift.call_count)
self.assertEqual(('HEAD', '/v1/a/c/o?p=q'), swift.calls[-1])
def test_PUT_POST(self):
# verify an uploaded object is updated by a POST
swift = FakeSwift()
swift.register('PUT', '/v1/a/c/o', HTTPCreated, {}, None)
# Note: the POST must be registered
swift.register('POST', '/v1/a/c/o', HTTPCreated, {}, None)
req = Request.blank('/v1/a/c/o', body=b'stuff',
headers={'X-Object-Meta-Foo': 'Bar'})
req.method = 'PUT'
resp = req.get_response(swift)
self.assertEqual(201, resp.status_int)
self.assertEqual({'Content-Length': '5',
'Etag': 'c13d88cb4cb02003daedb8a84e5d272a',
'Content-Type': 'text/html; charset=UTF-8'},
resp.headers)
self.assertEqual(b'', resp.body)
self.assertEqual(1, swift.call_count)
self.assertEqual(('PUT', '/v1/a/c/o'), swift.calls[-1])
self.assertEqual(
{'/v1/a/c/o': ({'Host': 'localhost:80',
'Content-Length': '5',
'X-Object-Meta-Foo': 'Bar'},
b'stuff')},
swift.uploaded)
# POST should update the uploaded object
req = Request.blank('/v1/a/c/o', body=b'stuff',
headers={'X-Object-Meta-Foo': 'Baz'})
req.method = 'POST'
resp = req.get_response(swift)
self.assertEqual(201, resp.status_int)
self.assertEqual({'Content-Type': 'text/html; charset=UTF-8'},
resp.headers)
self.assertEqual(b'', resp.body)
self.assertEqual(2, swift.call_count)
self.assertEqual(('POST', '/v1/a/c/o'), swift.calls[-1])
self.assertEqual(
{'/v1/a/c/o': ({'Host': 'localhost:80',
'Content-Length': '5',
'X-Object-Meta-Foo': 'Baz'},
b'stuff')},
swift.uploaded)
def test_PUT_with_query_string_POST(self):
# verify an uploaded object with query string is updated by a POST
swift = FakeSwift()
swift.register('PUT', '/v1/a/c/o', HTTPCreated, {}, None)
# Note: the POST must be registered
swift.register('POST', '/v1/a/c/o', HTTPCreated, {}, None)
req = Request.blank('/v1/a/c/o', body=b'stuff',
headers={'X-Object-Meta-Foo': 'Bar'})
req.method = 'PUT'
req.query_string = 'p=q'
resp = req.get_response(swift)
self.assertEqual(201, resp.status_int)
self.assertEqual({'Content-Length': '5',
'Etag': 'c13d88cb4cb02003daedb8a84e5d272a',
'Content-Type': 'text/html; charset=UTF-8'},
resp.headers)
self.assertEqual(b'', resp.body)
self.assertEqual(1, swift.call_count)
self.assertEqual(('PUT', '/v1/a/c/o?p=q'), swift.calls[-1])
# note: query string is not included in uploaded key
self.assertEqual(
{'/v1/a/c/o': ({'Host': 'localhost:80',
'Content-Length': '5',
'X-Object-Meta-Foo': 'Bar'},
b'stuff')},
swift.uploaded)
# POST without query string should update the uploaded object
req = Request.blank('/v1/a/c/o', body=b'stuff',
headers={'X-Object-Meta-Foo': 'Baz'})
req.method = 'POST'
resp = req.get_response(swift)
self.assertEqual(201, resp.status_int)
self.assertEqual({'Content-Type': 'text/html; charset=UTF-8'},
resp.headers)
self.assertEqual(b'', resp.body)
self.assertEqual(2, swift.call_count)
self.assertEqual(('POST', '/v1/a/c/o'), swift.calls[-1])
self.assertEqual(
{'/v1/a/c/o': ({'Host': 'localhost:80',
'Content-Length': '5',
'X-Object-Meta-Foo': 'Baz'},
b'stuff')},
swift.uploaded)
# POST with different query string should update the uploaded object
req = Request.blank('/v1/a/c/o', body=b'stuff',
headers={'X-Object-Meta-Foo': 'Bof'})
req.method = 'POST'
req.query_string = 'x=y'
resp = req.get_response(swift)
self.assertEqual(201, resp.status_int)
self.assertEqual({'Content-Type': 'text/html; charset=UTF-8'},
resp.headers)
self.assertEqual(b'', resp.body)
self.assertEqual(3, swift.call_count)
self.assertEqual(('POST', '/v1/a/c/o?x=y'), swift.calls[-1])
self.assertEqual(
{'/v1/a/c/o': ({'Host': 'localhost:80',
'Content-Length': '5',
'X-Object-Meta-Foo': 'Bof'},
b'stuff')},
swift.uploaded)
req = Request.blank('/v1/a/c/o')
req.method = 'GET'
resp = req.get_response(swift)
self.assertEqual(200, resp.status_int)
self.assertEqual({'Content-Length': '5',
'Content-Type': 'text/html; charset=UTF-8',
'Host': 'localhost:80',
'X-Object-Meta-Foo': 'Bof'},
resp.headers)
self.assertEqual(b'stuff', resp.body)
self.assertEqual(4, swift.call_count)
self.assertEqual(('GET', '/v1/a/c/o'), swift.calls[-1])
def test_GET_registered_overrides_uploaded(self):
swift = FakeSwift()
swift.register('PUT', '/v1/a/c/o', HTTPCreated, {}, None)
swift.register('GET', '/v1/a/c/o', HTTPOk, {}, b'not stuff')
req = Request.blank('/v1/a/c/o', body=b'stuff',
headers={'X-Object-Meta-Foo': 'Bar'})
req.method = 'PUT'
resp = req.get_response(swift)
self.assertEqual(201, resp.status_int)
self.assertEqual({'Content-Length': '5',
'Etag': 'c13d88cb4cb02003daedb8a84e5d272a',
'Content-Type': 'text/html; charset=UTF-8'},
resp.headers)
self.assertEqual(b'', resp.body)
self.assertEqual(1, swift.call_count)
self.assertEqual(('PUT', '/v1/a/c/o'), swift.calls[-1])
self.assertEqual(
{'/v1/a/c/o': ({'Host': 'localhost:80',
'Content-Length': '5',
'X-Object-Meta-Foo': 'Bar'},
b'stuff')},
swift.uploaded)
req = Request.blank('/v1/a/c/o')
req.method = 'GET'
resp = req.get_response(swift)
self.assertEqual(200, resp.status_int)
self.assertEqual({'Content-Length': '9',
'Content-Type': 'text/html; charset=UTF-8'},
resp.headers)
self.assertEqual(b'not stuff', resp.body)
self.assertEqual(2, swift.call_count)
self.assertEqual(('GET', '/v1/a/c/o'), swift.calls[-1])
| swift-master | test/unit/common/middleware/test_helpers.py |
# Copyright (c) 2013 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from swift.common.swob import Request
from swift.common.middleware import crossdomain
class FakeApp(object):
def __call__(self, env, start_response):
return b"FAKE APP"
def start_response(*args):
pass
class TestCrossDomain(unittest.TestCase):
def setUp(self):
self.app = crossdomain.filter_factory({})(FakeApp())
# GET of /crossdomain.xml (default)
def test_crossdomain_default(self):
expectedResponse = b'<?xml version="1.0"?>\n' \
b'<!DOCTYPE cross-domain-policy SYSTEM ' \
b'"http://www.adobe.com/xml/dtds/cross-domain-policy.dtd" >\n' \
b'<cross-domain-policy>\n' \
b'<allow-access-from domain="*" secure="false" />\n' \
b'</cross-domain-policy>'
req = Request.blank('/crossdomain.xml',
environ={'REQUEST_METHOD': 'GET'})
resp = self.app(req.environ, start_response)
self.assertEqual(resp, [expectedResponse])
# GET of /crossdomain.xml (custom)
def test_crossdomain_custom(self):
conf = {'cross_domain_policy': '<dummy 1>\n<dummy 2>'}
self.app = crossdomain.CrossDomainMiddleware(FakeApp(), conf)
expectedResponse = b'<?xml version="1.0"?>\n' \
b'<!DOCTYPE cross-domain-policy SYSTEM ' \
b'"http://www.adobe.com/xml/dtds/cross-domain-policy.dtd" >\n' \
b'<cross-domain-policy>\n' \
b'<dummy 1>\n' \
b'<dummy 2>\n' \
b'</cross-domain-policy>'
req = Request.blank('/crossdomain.xml',
environ={'REQUEST_METHOD': 'GET'})
resp = self.app(req.environ, start_response)
self.assertEqual(resp, [expectedResponse])
# GET to a different resource should be passed on
def test_crossdomain_pass(self):
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
resp = self.app(req.environ, start_response)
self.assertEqual(resp, b'FAKE APP')
# Only GET is allowed on the /crossdomain.xml resource
def test_crossdomain_get_only(self):
for method in ['HEAD', 'PUT', 'POST', 'COPY', 'OPTIONS']:
req = Request.blank('/crossdomain.xml',
environ={'REQUEST_METHOD': method})
resp = self.app(req.environ, start_response)
self.assertEqual(resp, b'FAKE APP')
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/test_crossdomain.py |
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from swift.common.middleware import acl
class TestACL(unittest.TestCase):
def test_clean_acl(self):
value = acl.clean_acl('header', '.r:*')
self.assertEqual(value, '.r:*')
value = acl.clean_acl('header', '.r:specific.host')
self.assertEqual(value, '.r:specific.host')
value = acl.clean_acl('header', '.r:.ending.with')
self.assertEqual(value, '.r:.ending.with')
value = acl.clean_acl('header', '.r:*.ending.with')
self.assertEqual(value, '.r:.ending.with')
value = acl.clean_acl('header', '.r:-*.ending.with')
self.assertEqual(value, '.r:-.ending.with')
value = acl.clean_acl('header', '.r:one,.r:two')
self.assertEqual(value, '.r:one,.r:two')
value = acl.clean_acl('header', '.r:*,.r:-specific.host')
self.assertEqual(value, '.r:*,.r:-specific.host')
value = acl.clean_acl('header', '.r:*,.r:-.ending.with')
self.assertEqual(value, '.r:*,.r:-.ending.with')
value = acl.clean_acl('header', '.r:one,.r:-two')
self.assertEqual(value, '.r:one,.r:-two')
value = acl.clean_acl('header', '.r:one,.r:-two,account,account:user')
self.assertEqual(value, '.r:one,.r:-two,account,account:user')
value = acl.clean_acl('header', 'TEST_account')
self.assertEqual(value, 'TEST_account')
value = acl.clean_acl('header', '.ref:*')
self.assertEqual(value, '.r:*')
value = acl.clean_acl('header', '.referer:*')
self.assertEqual(value, '.r:*')
value = acl.clean_acl('header', '.referrer:*')
self.assertEqual(value, '.r:*')
value = acl.clean_acl('header',
' .r : one , ,, .r:two , .r : - three ')
self.assertEqual(value, '.r:one,.r:two,.r:-three')
self.assertRaises(ValueError, acl.clean_acl, 'header', '.unknown:test')
self.assertRaises(ValueError, acl.clean_acl, 'header', '.r:')
self.assertRaises(ValueError, acl.clean_acl, 'header', '.r:*.')
self.assertRaises(ValueError, acl.clean_acl, 'header', '.r : * . ')
self.assertRaises(ValueError, acl.clean_acl, 'header', '.r:-*.')
self.assertRaises(ValueError, acl.clean_acl, 'header', '.r : - * . ')
self.assertRaises(ValueError, acl.clean_acl, 'header', ' .r : ')
self.assertRaises(ValueError, acl.clean_acl, 'header', 'user , .r : ')
self.assertRaises(ValueError, acl.clean_acl, 'header', '.r:-')
self.assertRaises(ValueError, acl.clean_acl, 'header', ' .r : - ')
self.assertRaises(ValueError, acl.clean_acl, 'header',
'user , .r : - ')
self.assertRaises(ValueError, acl.clean_acl, 'write-header', '.r:r')
def test_parse_acl(self):
self.assertEqual(acl.parse_acl(None), ([], []))
self.assertEqual(acl.parse_acl(''), ([], []))
self.assertEqual(acl.parse_acl('.r:ref1'), (['ref1'], []))
self.assertEqual(acl.parse_acl('.r:-ref1'), (['-ref1'], []))
self.assertEqual(acl.parse_acl('account:user'),
([], ['account:user']))
self.assertEqual(acl.parse_acl('account'), ([], ['account']))
self.assertEqual(acl.parse_acl('acc1,acc2:usr2,.r:ref3,.r:-ref4'),
(['ref3', '-ref4'], ['acc1', 'acc2:usr2']))
self.assertEqual(acl.parse_acl(
'acc1,acc2:usr2,.r:ref3,acc3,acc4:usr4,.r:ref5,.r:-ref6'),
(['ref3', 'ref5', '-ref6'],
['acc1', 'acc2:usr2', 'acc3', 'acc4:usr4']))
def test_parse_v2_acl(self):
# For all these tests, the header name will be "hdr".
tests = [
# Simple case: all ACL data in one header line
({'hdr': '{"a":1,"b":"foo"}'}, {'a': 1, 'b': 'foo'}),
# No header "hdr" exists -- should return None
({}, None),
({'junk': 'junk'}, None),
# Empty ACLs should return empty dict
({'hdr': ''}, {}),
({'hdr': '{}'}, {}),
({'hdr': '{ }'}, {}),
# Bad input -- should return None
({'hdr': '["array"]'}, None),
({'hdr': 'null'}, None),
({'hdr': '"some_string"'}, None),
({'hdr': '123'}, None),
]
for hdrs_in, expected in tests:
result = acl.parse_acl(version=2, data=hdrs_in.get('hdr'))
self.assertEqual(expected, result,
'%r: %r != %r' % (hdrs_in, result, expected))
def test_format_v1_acl(self):
tests = [
((['a', 'b'], ['c.com']), 'a,b,.r:c.com'),
((['a', 'b'], ['c.com', '-x.c.com']), 'a,b,.r:c.com,.r:-x.c.com'),
((['a', 'b'], None), 'a,b'),
((None, ['c.com']), '.r:c.com'),
((None, None), ''),
]
for (groups, refs), expected in tests:
result = acl.format_acl(
version=1, groups=groups, referrers=refs, header_name='hdr')
self.assertEqual(expected, result, 'groups=%r, refs=%r: %r != %r'
% (groups, refs, result, expected))
def test_format_v2_acl(self):
tests = [
({}, '{}'),
({'foo': 'bar'}, '{"foo":"bar"}'),
({'groups': ['a', 'b'], 'referrers': ['c.com', '-x.c.com']},
'{"groups":["a","b"],"referrers":["c.com","-x.c.com"]}'),
]
for data, expected in tests:
result = acl.format_acl(version=2, acl_dict=data)
self.assertEqual(expected, result,
'data=%r: %r *!=* %r' % (data, result, expected))
def test_acls_from_account_info(self):
test_data = [
({}, None),
({'sysmeta': {}}, None),
({'sysmeta':
{'core-access-control': '{"VERSION":1,"admin":["a","b"]}'}},
{'admin': ['a', 'b'], 'read-write': [], 'read-only': []}),
({
'some-key': 'some-value',
'other-key': 'other-value',
'sysmeta': {
'core-access-control': '{"VERSION":1,"admin":["a","b"],"r'
'ead-write":["c"],"read-only":[]}',
}},
{'admin': ['a', 'b'], 'read-write': ['c'], 'read-only': []}),
]
for args, expected in test_data:
result = acl.acls_from_account_info(args)
self.assertEqual(expected, result, "%r: Got %r, expected %r" %
(args, result, expected))
def test_referrer_allowed(self):
self.assertTrue(not acl.referrer_allowed('host', None))
self.assertTrue(not acl.referrer_allowed('host', []))
self.assertTrue(acl.referrer_allowed(None, ['*']))
self.assertTrue(acl.referrer_allowed('', ['*']))
self.assertTrue(not acl.referrer_allowed(None, ['specific.host']))
self.assertTrue(not acl.referrer_allowed('', ['specific.host']))
self.assertTrue(
acl.referrer_allowed('http://www.example.com/index.html',
['.example.com']))
self.assertTrue(acl.referrer_allowed(
'http://user@www.example.com/index.html', ['.example.com']))
self.assertTrue(acl.referrer_allowed(
'http://user:pass@www.example.com/index.html', ['.example.com']))
self.assertTrue(acl.referrer_allowed(
'http://www.example.com:8080/index.html', ['.example.com']))
self.assertTrue(acl.referrer_allowed(
'http://user@www.example.com:8080/index.html', ['.example.com']))
self.assertTrue(acl.referrer_allowed(
'http://user:pass@www.example.com:8080/index.html',
['.example.com']))
self.assertTrue(acl.referrer_allowed(
'http://user:pass@www.example.com:8080', ['.example.com']))
self.assertTrue(acl.referrer_allowed('http://www.example.com',
['.example.com']))
self.assertTrue(not acl.referrer_allowed(
'http://thief.example.com',
['.example.com', '-thief.example.com']))
self.assertTrue(not acl.referrer_allowed(
'http://thief.example.com',
['*', '-thief.example.com']))
self.assertTrue(acl.referrer_allowed(
'http://www.example.com',
['.other.com', 'www.example.com']))
self.assertTrue(acl.referrer_allowed(
'http://www.example.com',
['-.example.com', 'www.example.com']))
# This is considered a relative uri to the request uri, a mode not
# currently supported.
self.assertTrue(not acl.referrer_allowed('www.example.com',
['.example.com']))
self.assertTrue(not acl.referrer_allowed('../index.html',
['.example.com']))
self.assertTrue(acl.referrer_allowed('www.example.com', ['*']))
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/test_acl.py |
# Copyright (c) 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from swift.common.middleware import keystoneauth
from swift.common.swob import Request, Response
from swift.common.http import HTTP_FORBIDDEN
from swift.common.utils import split_path
from swift.proxy.controllers.base import get_cache_key
from test.debug_logger import debug_logger
UNKNOWN_ID = keystoneauth.UNKNOWN_ID
def _fake_token_info(version='2'):
if version == '2':
return {'access': 'fake_value'}
if version == '3':
return {'token': 'fake_value'}
def operator_roles(test_auth):
# Return copy -- not a reference
return list(test_auth.account_rules[test_auth.reseller_prefixes[0]].get(
'operator_roles'))
def get_account_for_tenant(test_auth, tenant_id):
"""Convenience function reduces unit test churn"""
return '%s%s' % (test_auth.reseller_prefixes[0], tenant_id)
def get_identity_headers(status='Confirmed', tenant_id='1',
tenant_name='acct', project_domain_name='domA',
project_domain_id='99',
user_name='usr', user_id='42',
user_domain_name='domA', user_domain_id='99',
role='admin',
service_role=None):
if role is None:
role = []
if isinstance(role, list):
role = ','.join(role)
res = dict(X_IDENTITY_STATUS=status,
X_TENANT_ID=tenant_id,
X_TENANT_NAME=tenant_name,
X_PROJECT_ID=tenant_id,
X_PROJECT_NAME=tenant_name,
X_PROJECT_DOMAIN_ID=project_domain_id,
X_PROJECT_DOMAIN_NAME=project_domain_name,
X_ROLES=role,
X_USER_NAME=user_name,
X_USER_ID=user_id,
X_USER_DOMAIN_NAME=user_domain_name,
X_USER_DOMAIN_ID=user_domain_id)
if service_role:
res.update(X_SERVICE_ROLES=service_role)
return res
class FakeApp(object):
def __init__(self, status_headers_body_iter=None):
self.calls = 0
self.call_contexts = []
self.status_headers_body_iter = status_headers_body_iter
if not self.status_headers_body_iter:
self.status_headers_body_iter = iter([('404 Not Found', {}, '')])
def __call__(self, env, start_response):
self.calls += 1
self.request = Request.blank('', environ=env)
if 'swift.authorize' in env:
resp = env['swift.authorize'](self.request)
if resp:
return resp(env, start_response)
context = {'method': self.request.method,
'headers': self.request.headers}
self.call_contexts.append(context)
status, headers, body = next(self.status_headers_body_iter)
return Response(status=status, headers=headers,
body=body)(env, start_response)
class SwiftAuth(unittest.TestCase):
def setUp(self):
self.test_auth = keystoneauth.filter_factory({})(FakeApp())
self.test_auth.logger = debug_logger()
def _make_request(self, path=None, headers=None, **kwargs):
if not path:
path = '/v1/%s/c/o' % get_account_for_tenant(self.test_auth, 'foo')
return Request.blank(path, headers=headers, **kwargs)
def _get_successful_middleware(self):
response_iter = iter([('200 OK', {}, '')])
return keystoneauth.filter_factory({})(FakeApp(response_iter))
def test_invalid_request_authorized(self):
role = self.test_auth.reseller_admin_role
headers = get_identity_headers(role=role)
req = self._make_request('/', headers=headers)
resp = req.get_response(self._get_successful_middleware())
self.assertEqual(resp.status_int, 404)
def test_invalid_request_non_authorized(self):
req = self._make_request('/')
resp = req.get_response(self._get_successful_middleware())
self.assertEqual(resp.status_int, 404)
def test_confirmed_identity_is_authorized(self):
role = self.test_auth.reseller_admin_role
headers = get_identity_headers(role=role)
req = self._make_request('/v1/AUTH_acct/c', headers)
resp = req.get_response(self._get_successful_middleware())
self.assertEqual(resp.status_int, 200)
def test_detect_reseller_request(self):
role = self.test_auth.reseller_admin_role
headers = get_identity_headers(role=role)
req = self._make_request('/v1/AUTH_acct/c', headers)
req.get_response(self._get_successful_middleware())
self.assertTrue(req.environ.get('reseller_request'))
def test_confirmed_identity_is_not_authorized(self):
headers = get_identity_headers()
req = self._make_request('/v1/AUTH_acct/c', headers)
resp = req.get_response(self.test_auth)
self.assertEqual(resp.status_int, 403)
def test_anonymous_is_authorized_for_permitted_referrer(self):
req = self._make_request(headers={'X_IDENTITY_STATUS': 'Invalid'})
req.acl = '.r:*'
resp = req.get_response(self._get_successful_middleware())
self.assertEqual(resp.status_int, 200)
def test_anonymous_with_validtoken_authorized_for_permitted_referrer(self):
req = self._make_request(headers={'X_IDENTITY_STATUS': 'Confirmed'})
req.acl = '.r:*'
resp = req.get_response(self._get_successful_middleware())
self.assertEqual(resp.status_int, 200)
def test_anonymous_is_not_authorized_for_unknown_reseller_prefix(self):
req = self._make_request(path='/v1/BLAH_foo/c/o',
headers={'X_IDENTITY_STATUS': 'Invalid'})
resp = req.get_response(self.test_auth)
self.assertEqual(resp.status_int, 401)
def test_denied_responses(self):
def get_resp_status(headers):
req = self._make_request(headers=headers)
resp = req.get_response(self.test_auth)
return resp.status_int
self.assertEqual(get_resp_status({'X_IDENTITY_STATUS': 'Confirmed'}),
403)
self.assertEqual(get_resp_status(
{'X_IDENTITY_STATUS': 'Confirmed',
'X_SERVICE_IDENTITY_STATUS': 'Confirmed'}), 403)
self.assertEqual(get_resp_status({}), 401)
self.assertEqual(get_resp_status(
{'X_IDENTITY_STATUS': 'Invalid'}), 401)
self.assertEqual(get_resp_status(
{'X_IDENTITY_STATUS': 'Invalid',
'X_SERVICE_IDENTITY_STATUS': 'Confirmed'}), 401)
self.assertEqual(get_resp_status(
{'X_IDENTITY_STATUS': 'Confirmed',
'X_SERVICE_IDENTITY_STATUS': 'Invalid'}), 401)
self.assertEqual(get_resp_status(
{'X_IDENTITY_STATUS': 'Invalid',
'X_SERVICE_IDENTITY_STATUS': 'Invalid'}), 401)
def test_blank_reseller_prefix(self):
conf = {'reseller_prefix': ''}
test_auth = keystoneauth.filter_factory(conf)(FakeApp())
account = tenant_id = 'foo'
self.assertTrue(test_auth._account_matches_tenant(account, tenant_id))
def test_reseller_prefix_added_underscore(self):
conf = {'reseller_prefix': 'AUTH'}
test_auth = keystoneauth.filter_factory(conf)(FakeApp())
self.assertEqual(test_auth.reseller_prefixes[0], "AUTH_")
def test_reseller_prefix_not_added_double_underscores(self):
conf = {'reseller_prefix': 'AUTH_'}
test_auth = keystoneauth.filter_factory(conf)(FakeApp())
self.assertEqual(test_auth.reseller_prefixes[0], "AUTH_")
def test_override_asked_for_but_not_allowed(self):
conf = {'allow_overrides': 'false'}
self.test_auth = keystoneauth.filter_factory(conf)(FakeApp())
req = self._make_request('/v1/AUTH_account',
environ={'swift.authorize_override': True})
resp = req.get_response(self.test_auth)
self.assertEqual(resp.status_int, 401)
def test_override_asked_for_and_allowed(self):
conf = {'allow_overrides': 'true'}
self.test_auth = keystoneauth.filter_factory(conf)(FakeApp())
req = self._make_request('/v1/AUTH_account',
environ={'swift.authorize_override': True})
resp = req.get_response(self.test_auth)
self.assertEqual(resp.status_int, 404)
def test_override_default_allowed(self):
req = self._make_request('/v1/AUTH_account',
environ={'swift.authorize_override': True})
resp = req.get_response(self.test_auth)
self.assertEqual(resp.status_int, 404)
def test_anonymous_options_allowed(self):
req = self._make_request('/v1/AUTH_account',
environ={'REQUEST_METHOD': 'OPTIONS'})
resp = req.get_response(self._get_successful_middleware())
self.assertEqual(resp.status_int, 200)
def test_identified_options_allowed(self):
headers = get_identity_headers()
headers['REQUEST_METHOD'] = 'OPTIONS'
req = self._make_request('/v1/AUTH_account',
headers=get_identity_headers(),
environ={'REQUEST_METHOD': 'OPTIONS'})
resp = req.get_response(self._get_successful_middleware())
self.assertEqual(resp.status_int, 200)
def test_auth_scheme(self):
req = self._make_request(path='/v1/BLAH_foo/c/o',
headers={'X_IDENTITY_STATUS': 'Invalid'})
resp = req.get_response(self.test_auth)
self.assertEqual(resp.status_int, 401)
self.assertTrue('Www-Authenticate' in resp.headers)
def test_project_domain_id_sysmeta_set(self):
proj_id = '12345678'
proj_domain_id = '13'
headers = get_identity_headers(tenant_id=proj_id,
project_domain_id=proj_domain_id)
account = get_account_for_tenant(self.test_auth, proj_id)
path = '/v1/' + account
# fake cached account info
info_key = get_cache_key(account)
env = {'swift.infocache': {info_key: {'status': 0, 'sysmeta': {}}},
'keystone.token_info': _fake_token_info(version='3')}
req = Request.blank(path, environ=env, headers=headers)
req.method = 'POST'
headers_out = {'X-Account-Sysmeta-Project-Domain-Id': proj_domain_id}
fake_app = FakeApp(iter([('200 OK', headers_out, '')]))
test_auth = keystoneauth.filter_factory({})(fake_app)
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 200)
self.assertEqual(len(fake_app.call_contexts), 1)
headers_sent = fake_app.call_contexts[0]['headers']
self.assertTrue('X-Account-Sysmeta-Project-Domain-Id' in headers_sent,
headers_sent)
self.assertEqual(headers_sent['X-Account-Sysmeta-Project-Domain-Id'],
proj_domain_id)
self.assertTrue('X-Account-Project-Domain-Id' in resp.headers)
self.assertEqual(resp.headers['X-Account-Project-Domain-Id'],
proj_domain_id)
def test_project_domain_id_sysmeta_set_to_unknown(self):
proj_id = '12345678'
# token scoped to a different project
headers = get_identity_headers(tenant_id='87654321',
project_domain_id='default',
role='reselleradmin')
account = get_account_for_tenant(self.test_auth, proj_id)
path = '/v1/' + account
# fake cached account info
info_key = get_cache_key(account)
env = {'swift.infocache': {info_key: {'status': 0, 'sysmeta': {}}},
'keystone.token_info': _fake_token_info(version='3')}
req = Request.blank(path, environ=env, headers=headers)
req.method = 'POST'
fake_app = FakeApp(iter([('200 OK', {}, '')]))
test_auth = keystoneauth.filter_factory({})(fake_app)
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 200)
self.assertEqual(len(fake_app.call_contexts), 1)
headers_sent = fake_app.call_contexts[0]['headers']
self.assertTrue('X-Account-Sysmeta-Project-Domain-Id' in headers_sent,
headers_sent)
self.assertEqual(headers_sent['X-Account-Sysmeta-Project-Domain-Id'],
UNKNOWN_ID)
def test_project_domain_id_sysmeta_not_set(self):
proj_id = '12345678'
headers = get_identity_headers(tenant_id=proj_id, role='admin')
account = get_account_for_tenant(self.test_auth, proj_id)
path = '/v1/' + account
info_key = get_cache_key(account)
# v2 token
env = {'swift.infocache': {info_key: {'status': 0, 'sysmeta': {}}},
'keystone.token_info': _fake_token_info(version='2')}
req = Request.blank(path, environ=env, headers=headers)
req.method = 'POST'
fake_app = FakeApp(iter([('200 OK', {}, '')]))
test_auth = keystoneauth.filter_factory({})(fake_app)
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 200)
self.assertEqual(len(fake_app.call_contexts), 1)
headers_sent = fake_app.call_contexts[0]['headers']
self.assertFalse('X-Account-Sysmeta-Project-Domain-Id' in headers_sent,
headers_sent)
def test_project_domain_id_sysmeta_set_unknown_with_v2(self):
proj_id = '12345678'
# token scoped to a different project
headers = get_identity_headers(tenant_id='87654321',
role='reselleradmin')
account = get_account_for_tenant(self.test_auth, proj_id)
path = '/v1/' + account
info_key = get_cache_key(account)
# v2 token
env = {'swift.infocache': {info_key: {'status': 0, 'sysmeta': {}}},
'keystone.token_info': _fake_token_info(version='2')}
req = Request.blank(path, environ=env, headers=headers)
req.method = 'POST'
fake_app = FakeApp(iter([('200 OK', {}, '')]))
test_auth = keystoneauth.filter_factory({})(fake_app)
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 200)
self.assertEqual(len(fake_app.call_contexts), 1)
headers_sent = fake_app.call_contexts[0]['headers']
self.assertTrue('X-Account-Sysmeta-Project-Domain-Id' in headers_sent,
headers_sent)
self.assertEqual(headers_sent['X-Account-Sysmeta-Project-Domain-Id'],
UNKNOWN_ID)
class SwiftAuthMultiple(SwiftAuth):
"""Runs same tests as SwiftAuth with multiple reseller prefixes
Runs SwiftAuth tests while a second reseller prefix item exists.
Validates that there is no regression against the original
single prefix configuration.
"""
def setUp(self):
self.test_auth = keystoneauth.filter_factory(
{'reseller_prefix': 'AUTH, PRE2'})(FakeApp())
self.test_auth.logger = debug_logger()
class ServiceTokenFunctionality(unittest.TestCase):
def _make_authed_request(self, conf, project_id, path, method='GET',
user_role='admin', service_role=None,
environ=None):
"""Make a request with keystoneauth as auth
By default, acts as though the user had presented a token
containing the 'admin' role in X-Auth-Token scoped to the specified
project_id.
:param conf: configuration for keystoneauth
:param project_id: the project_id of the token
:param path: the path of the request
:param method: the method (defaults to GET)
:param user_role: the role of X-Auth-Token (defaults to 'admin')
:param service_role: the role in X-Service-Token (defaults to none)
:param environ: a dict of items to be added to the request environ
(defaults to none)
:returns: response object
"""
headers = get_identity_headers(tenant_id=project_id,
role=user_role,
service_role=service_role)
(version, account, _junk, _junk) = split_path(path, 2, 4, True)
info_key = get_cache_key(account)
env = {'swift.infocache': {info_key: {'status': 0, 'sysmeta': {}}},
'keystone.token_info': _fake_token_info(version='2')}
if environ:
env.update(environ)
req = Request.blank(path, environ=env, headers=headers)
req.method = method
fake_app = FakeApp(iter([('200 OK', {}, '')]))
test_auth = keystoneauth.filter_factory(conf)(fake_app)
resp = req.get_response(test_auth)
return resp
def test_existing_swift_owner_ignored(self):
# a request without admin role is denied
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH'}, '12345678', '/v1/AUTH_12345678',
environ={'swift_owner': False},
user_role='something_else')
self.assertEqual(resp.status_int, 403)
# ... even when swift_owner has previously been set True in request env
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH'}, '12345678', '/v1/AUTH_12345678',
environ={'swift_owner': True},
user_role='something_else')
self.assertEqual(resp.status_int, 403)
# a request with admin role but to different account prefix is denied
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH'}, '12345678', '/v1/SERVICE_12345678',
environ={'swift_owner': False})
self.assertEqual(resp.status_int, 403)
# ... even when swift_owner has previously been set True in request env
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH'}, '12345678', '/v1/SERVICE_12345678',
environ={'swift_owner': True})
self.assertEqual(resp.status_int, 403)
def test_unknown_prefix(self):
resp = self._make_authed_request({}, '12345678', '/v1/BLAH_12345678')
self.assertEqual(resp.status_int, 403)
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH, PRE2'}, '12345678', '/v1/BLAH_12345678')
self.assertEqual(resp.status_int, 403)
def test_authed_for_path_single(self):
resp = self._make_authed_request({}, '12345678', '/v1/AUTH_12345678')
self.assertEqual(resp.status_int, 200)
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH'}, '12345678', '/v1/AUTH_12345678')
self.assertEqual(resp.status_int, 200)
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH'}, '12345678', '/v1/AUTH_12345678/c')
self.assertEqual(resp.status_int, 200)
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH'}, '12345678', '/v1/AUTH_12345678',
user_role='ResellerAdmin')
self.assertEqual(resp.status_int, 200)
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH'}, '12345678', '/v1/AUTH_anything',
user_role='ResellerAdmin')
self.assertEqual(resp.status_int, 200)
def test_denied_for_path_single(self):
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH'}, '12345678', '/v1/AUTH_789')
self.assertEqual(resp.status_int, 403)
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH'}, '12345678', '/v1/AUTH_12345678',
user_role='something_else')
self.assertEqual(resp.status_int, 403)
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH'}, '12345678', '/v1/AUTH_12345678',
method='DELETE')
self.assertEqual(resp.status_int, 403)
def test_authed_for_primary_path_multiple(self):
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH, PRE2',
'PRE2_service_roles': 'service'},
'12345678', '/v1/AUTH_12345678')
self.assertEqual(resp.status_int, 200)
def test_denied_for_second_path_with_only_operator_role(self):
# User only presents X-Auth-Token
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH, PRE2',
'PRE2_service_roles': 'service'},
'12345678', '/v1/PRE2_12345678')
self.assertEqual(resp.status_int, 403)
# User puts token in X-Service-Token
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH, PRE2',
'PRE2_service_roles': 'service'},
'12345678', '/v1/PRE2_12345678',
user_role='', service_role='admin')
self.assertEqual(resp.status_int, 403)
# User puts token in both X-Auth-Token and X-Service-Token
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH, PRE2',
'PRE2_service_roles': 'service'},
'12345678', '/v1/PRE2_12345678',
user_role='admin', service_role='admin')
self.assertEqual(resp.status_int, 403)
def test_authed_for_second_path_with_operator_role_and_service(self):
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH, PRE2',
'PRE2_service_roles': 'service'},
'12345678', '/v1/PRE2_12345678', service_role='service')
self.assertEqual(resp.status_int, 200)
def test_denied_for_second_path_with_only_service(self):
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH, PRE2',
'PRE2_service_roles': 'service'},
'12345678', '/v1/PRE2_12345678', user_role='something_else',
service_role='service')
self.assertEqual(resp.status_int, 403)
def test_denied_for_second_path_for_service_user(self):
# User presents token with 'service' role in X-Auth-Token
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH, PRE2',
'PRE2_service_roles': 'service'},
'12345678', '/v1/PRE2_12345678', user_role='service')
self.assertEqual(resp.status_int, 403)
# User presents token with 'service' role in X-Auth-Token
# and also in X-Service-Token
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH, PRE2',
'PRE2_service_roles': 'service'},
'12345678', '/v1/PRE2_12345678', user_role='service',
service_role='service')
self.assertEqual(resp.status_int, 403)
def test_delete_denied_for_second_path(self):
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH, PRE2',
'PRE2_service_roles': 'service'},
'12345678', '/v1/PRE2_12345678', service_role='service',
method='DELETE')
self.assertEqual(resp.status_int, 403)
def test_delete_of_second_path_by_reseller_admin(self):
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH, PRE2',
'PRE2_service_roles': 'service'},
'12345678', '/v1/PRE2_12345678', user_role='ResellerAdmin',
method='DELETE')
self.assertEqual(resp.status_int, 200)
class BaseTestAuthorize(unittest.TestCase):
def setUp(self):
self.test_auth = keystoneauth.filter_factory({})(FakeApp())
self.test_auth.logger = debug_logger()
def _make_request(self, path, **kwargs):
return Request.blank(path, **kwargs)
def _get_account(self, identity=None):
if not identity:
identity = self._get_identity()
return get_account_for_tenant(self.test_auth,
identity.get('HTTP_X_PROJECT_ID') or
identity.get('HTTP_X_TENANT_ID'))
def _get_identity(self, tenant_id='tenant_id', tenant_name='tenant_name',
user_id='user_id', user_name='user_name', roles=None,
project_domain_name='domA', project_domain_id='foo',
user_domain_name='domA', user_domain_id='foo'):
if roles is None:
roles = []
if isinstance(roles, list):
roles = ','.join(roles)
return {'HTTP_X_USER_ID': user_id,
'HTTP_X_USER_NAME': user_name,
'HTTP_X_USER_DOMAIN_NAME': user_domain_name,
'HTTP_X_USER_DOMAIN_ID': user_domain_id,
'HTTP_X_PROJECT_ID': tenant_id,
'HTTP_X_PROJECT_NAME': tenant_name,
'HTTP_X_PROJECT_DOMAIN_ID': project_domain_id,
'HTTP_X_PROJECT_DOMAIN_NAME': project_domain_name,
'HTTP_X_ROLES': roles,
'HTTP_X_IDENTITY_STATUS': 'Confirmed'}
def _get_identity_for_v2(self, **kwargs):
identity = self._get_identity(**kwargs)
for suffix in ['ID', 'NAME']:
identity['HTTP_X_TENANT_{0}'.format(suffix)] = identity.pop(
'HTTP_X_PROJECT_{0}'.format(suffix))
return identity
def _get_env_id(self, tenant_id='tenant_id', tenant_name='tenant_name',
user_id='user_id', user_name='user_name', roles=[],
project_domain_name='domA', project_domain_id='99',
user_domain_name='domA', user_domain_id='99',
auth_version='3'):
env = self._get_identity(tenant_id, tenant_name, user_id, user_name,
roles, project_domain_name,
project_domain_id, user_domain_name,
user_domain_id)
token_info = _fake_token_info(version=auth_version)
env.update({'keystone.token_info': token_info})
return self.test_auth._keystone_identity(env)
class BaseTestAuthorizeCheck(BaseTestAuthorize):
def _check_authenticate(self, account=None, identity=None, headers=None,
exception=None, acl=None, env=None, path=None):
if not identity:
identity = self._get_identity()
if not account:
account = self._get_account(identity)
if not path:
path = '/v1/%s/c' % account
# fake cached account info
info_key = get_cache_key(account)
default_env = {
'REMOTE_USER': (identity.get('HTTP_X_PROJECT_ID') or
identity.get('HTTP_X_TENANT_ID')),
'swift.infocache': {info_key: {'status': 200, 'sysmeta': {}}}}
default_env.update(identity)
if env:
default_env.update(env)
req = self._make_request(path, headers=headers, environ=default_env)
req.acl = acl
env_identity = self.test_auth._keystone_identity(req.environ)
result = self.test_auth.authorize(env_identity, req)
# if we have requested an exception but nothing came back then
if exception and not result:
self.fail("error %s was not returned" % (str(exception)))
elif exception:
self.assertEqual(result.status_int, exception)
else:
self.assertIsNone(result)
return req
class TestAuthorize(BaseTestAuthorizeCheck):
def test_authorize_fails_for_unauthorized_user(self):
self._check_authenticate(exception=HTTP_FORBIDDEN)
def test_authorize_fails_for_invalid_reseller_prefix(self):
self._check_authenticate(account='BLAN_a',
exception=HTTP_FORBIDDEN)
def test_authorize_succeeds_for_reseller_admin(self):
roles = [self.test_auth.reseller_admin_role]
identity = self._get_identity(roles=roles)
req = self._check_authenticate(identity=identity)
self.assertTrue(req.environ.get('swift_owner'))
def test_authorize_succeeds_for_insensitive_reseller_admin(self):
roles = [self.test_auth.reseller_admin_role.upper()]
identity = self._get_identity(roles=roles)
req = self._check_authenticate(identity=identity)
self.assertTrue(req.environ.get('swift_owner'))
def test_authorize_succeeds_as_owner_for_operator_role(self):
roles = operator_roles(self.test_auth)
identity = self._get_identity(roles=roles)
req = self._check_authenticate(identity=identity)
self.assertTrue(req.environ.get('swift_owner'))
def test_authorize_succeeds_as_owner_for_insensitive_operator_role(self):
roles = [r.upper() for r in operator_roles(self.test_auth)]
identity = self._get_identity(roles=roles)
req = self._check_authenticate(identity=identity)
self.assertTrue(req.environ.get('swift_owner'))
def test_authorize_fails_same_user_and_tenant(self):
# Historically the is_admin option allowed access when user_name
# matched tenant_name, but it is no longer supported. This test is a
# sanity check that the option no longer works.
self.test_auth.is_admin = True
identity = self._get_identity(user_name='same_name',
tenant_name='same_name')
req = self._check_authenticate(identity=identity,
exception=HTTP_FORBIDDEN)
self.assertFalse(bool(req.environ.get('swift_owner')))
def test_authorize_succeeds_for_container_sync(self):
env = {'swift_sync_key': 'foo', 'REMOTE_ADDR': '127.0.0.1'}
headers = {'x-container-sync-key': 'foo', 'x-timestamp': '1'}
self._check_authenticate(env=env, headers=headers)
def test_authorize_fails_for_invalid_referrer(self):
env = {'HTTP_REFERER': 'http://invalid.com/index.html'}
self._check_authenticate(acl='.r:example.com', env=env,
exception=HTTP_FORBIDDEN)
def test_authorize_fails_for_referrer_without_rlistings(self):
env = {'HTTP_REFERER': 'http://example.com/index.html'}
self._check_authenticate(acl='.r:example.com', env=env,
exception=HTTP_FORBIDDEN)
def test_authorize_succeeds_for_referrer_with_rlistings(self):
env = {'HTTP_REFERER': 'http://example.com/index.html'}
self._check_authenticate(acl='.r:example.com,.rlistings', env=env)
def test_authorize_succeeds_for_referrer_with_obj(self):
path = '/v1/%s/c/o' % self._get_account()
env = {'HTTP_REFERER': 'http://example.com/index.html'}
self._check_authenticate(acl='.r:example.com', env=env, path=path)
def test_authorize_succeeds_for_user_role_in_roles(self):
acl = 'allowme'
identity = self._get_identity(roles=[acl])
self._check_authenticate(identity=identity, acl=acl)
def test_authorize_succeeds_for_tenant_name_user_in_roles(self):
identity = self._get_identity_for_v2()
user_name = identity['HTTP_X_USER_NAME']
user_id = identity['HTTP_X_USER_ID']
tenant_name = identity['HTTP_X_TENANT_NAME']
for user in [user_id, user_name, '*']:
acl = '%s:%s' % (tenant_name, user)
self._check_authenticate(identity=identity, acl=acl)
def test_authorize_succeeds_for_project_name_user_in_roles(self):
identity = self._get_identity()
user_name = identity['HTTP_X_USER_NAME']
user_id = identity['HTTP_X_USER_ID']
project_name = identity['HTTP_X_PROJECT_NAME']
for user in [user_id, user_name, '*']:
acl = '%s:%s' % (project_name, user)
self._check_authenticate(identity=identity, acl=acl)
def test_authorize_succeeds_for_tenant_id_user_in_roles(self):
identity = self._get_identity_for_v2()
user_name = identity['HTTP_X_USER_NAME']
user_id = identity['HTTP_X_USER_ID']
tenant_id = identity['HTTP_X_TENANT_ID']
for user in [user_id, user_name, '*']:
acl = '%s:%s' % (tenant_id, user)
self._check_authenticate(identity=identity, acl=acl)
def test_authorize_succeeds_for_project_id_user_in_roles(self):
identity = self._get_identity()
user_name = identity['HTTP_X_USER_NAME']
user_id = identity['HTTP_X_USER_ID']
project_id = identity['HTTP_X_PROJECT_ID']
for user in [user_id, user_name, '*']:
acl = '%s:%s' % (project_id, user)
self._check_authenticate(identity=identity, acl=acl)
def test_authorize_succeeds_for_wildcard_tenant_user_in_roles(self):
identity = self._get_identity()
user_name = identity['HTTP_X_USER_NAME']
user_id = identity['HTTP_X_USER_ID']
for user in [user_id, user_name, '*']:
acl = '*:%s' % user
self._check_authenticate(identity=identity, acl=acl)
def test_cross_tenant_authorization_success(self):
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME',
['tenantID:userA']),
'tenantID:userA')
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME',
['tenantNAME:userA']),
'tenantNAME:userA')
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME', ['*:userA']),
'*:userA')
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME',
['tenantID:userID']),
'tenantID:userID')
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME',
['tenantNAME:userID']),
'tenantNAME:userID')
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME', ['*:userID']),
'*:userID')
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME', ['tenantID:*']),
'tenantID:*')
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME', ['tenantNAME:*']),
'tenantNAME:*')
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME', ['*:*']),
'*:*')
def test_cross_tenant_authorization_failure(self):
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME',
['tenantXYZ:userA']),
None)
def test_cross_tenant_authorization_allow_names(self):
# tests that the allow_names arg does the right thing
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME',
['tenantNAME:userA'], allow_names=True),
'tenantNAME:userA')
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME',
['tenantNAME:userID'], allow_names=True),
'tenantNAME:userID')
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME',
['tenantID:userA'], allow_names=True),
'tenantID:userA')
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME',
['tenantID:userID'], allow_names=True),
'tenantID:userID')
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME',
['tenantNAME:userA'], allow_names=False),
None)
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME',
['tenantID:userA'], allow_names=False),
None)
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME',
['tenantNAME:userID'], allow_names=False),
None)
self.assertEqual(
self.test_auth._authorize_cross_tenant(
'userID', 'userA', 'tenantID', 'tenantNAME',
['tenantID:userID'], allow_names=False),
'tenantID:userID')
def test_delete_own_account_not_allowed(self):
roles = operator_roles(self.test_auth)
identity = self._get_identity(roles=roles)
account = self._get_account(identity)
self._check_authenticate(account=account,
identity=identity,
exception=HTTP_FORBIDDEN,
path='/v1/' + account,
env={'REQUEST_METHOD': 'DELETE'})
def test_delete_own_account_when_reseller_allowed(self):
roles = [self.test_auth.reseller_admin_role]
identity = self._get_identity(roles=roles)
account = self._get_account(identity)
req = self._check_authenticate(account=account,
identity=identity,
path='/v1/' + account,
env={'REQUEST_METHOD': 'DELETE'})
self.assertEqual(bool(req.environ.get('swift_owner')), True)
def test_identity_set_up_at_call(self):
def fake_start_response(*args, **kwargs):
pass
the_env = self._get_identity(
tenant_id='test', roles=['reselleradmin'])
self.test_auth(the_env, fake_start_response)
subreq = Request.blank(
'/v1/%s/c/o' % get_account_for_tenant(self.test_auth, 'test'))
subreq.environ.update(
self._get_identity(tenant_id='test', roles=['got_erased']))
authorize_resp = the_env['swift.authorize'](subreq)
self.assertIsNone(authorize_resp)
def test_names_disallowed_in_acls_outside_default_domain(self):
id = self._get_identity_for_v2(user_domain_id='non-default',
project_domain_id='non-default')
env = {'keystone.token_info': _fake_token_info(version='3')}
acl = '%s:%s' % (id['HTTP_X_TENANT_NAME'], id['HTTP_X_USER_NAME'])
self._check_authenticate(acl=acl, identity=id, env=env,
exception=HTTP_FORBIDDEN)
acl = '%s:%s' % (id['HTTP_X_TENANT_NAME'], id['HTTP_X_USER_ID'])
self._check_authenticate(acl=acl, identity=id, env=env,
exception=HTTP_FORBIDDEN)
acl = '%s:%s' % (id['HTTP_X_TENANT_ID'], id['HTTP_X_USER_NAME'])
self._check_authenticate(acl=acl, identity=id, env=env,
exception=HTTP_FORBIDDEN)
acl = '%s:%s' % (id['HTTP_X_TENANT_ID'], id['HTTP_X_USER_ID'])
self._check_authenticate(acl=acl, identity=id, env=env)
id = self._get_identity(user_domain_id='non-default',
project_domain_id='non-default')
acl = '%s:%s' % (id['HTTP_X_PROJECT_NAME'], id['HTTP_X_USER_NAME'])
self._check_authenticate(acl=acl, identity=id, env=env,
exception=HTTP_FORBIDDEN)
acl = '%s:%s' % (id['HTTP_X_PROJECT_NAME'], id['HTTP_X_USER_ID'])
self._check_authenticate(acl=acl, identity=id, env=env,
exception=HTTP_FORBIDDEN)
acl = '%s:%s' % (id['HTTP_X_PROJECT_ID'], id['HTTP_X_USER_NAME'])
self._check_authenticate(acl=acl, identity=id, env=env,
exception=HTTP_FORBIDDEN)
acl = '%s:%s' % (id['HTTP_X_PROJECT_ID'], id['HTTP_X_USER_ID'])
self._check_authenticate(acl=acl, identity=id, env=env)
def test_names_allowed_in_acls_inside_default_domain(self):
id = self._get_identity_for_v2(user_domain_id='default',
project_domain_id='default')
env = {'keystone.token_info': _fake_token_info(version='3')}
acl = '%s:%s' % (id['HTTP_X_TENANT_NAME'], id['HTTP_X_USER_NAME'])
self._check_authenticate(acl=acl, identity=id, env=env)
acl = '%s:%s' % (id['HTTP_X_TENANT_NAME'], id['HTTP_X_USER_ID'])
self._check_authenticate(acl=acl, identity=id, env=env)
acl = '%s:%s' % (id['HTTP_X_TENANT_ID'], id['HTTP_X_USER_NAME'])
self._check_authenticate(acl=acl, identity=id, env=env)
acl = '%s:%s' % (id['HTTP_X_TENANT_ID'], id['HTTP_X_USER_ID'])
self._check_authenticate(acl=acl, identity=id, env=env)
id = self._get_identity(user_domain_id='default',
project_domain_id='default')
acl = '%s:%s' % (id['HTTP_X_PROJECT_NAME'], id['HTTP_X_USER_NAME'])
self._check_authenticate(acl=acl, identity=id, env=env)
acl = '%s:%s' % (id['HTTP_X_PROJECT_NAME'], id['HTTP_X_USER_ID'])
self._check_authenticate(acl=acl, identity=id, env=env)
acl = '%s:%s' % (id['HTTP_X_PROJECT_ID'], id['HTTP_X_USER_NAME'])
self._check_authenticate(acl=acl, identity=id, env=env)
acl = '%s:%s' % (id['HTTP_X_PROJECT_ID'], id['HTTP_X_USER_ID'])
self._check_authenticate(acl=acl, identity=id, env=env)
def test_names_allowed_in_acls_inside_default_domain_with_config(self):
conf = {'allow_names_in_acls': 'yes'}
self.test_auth = keystoneauth.filter_factory(conf)(FakeApp())
self.test_auth.logger = debug_logger()
id = self._get_identity_for_v2(user_domain_id='default',
project_domain_id='default')
env = {'keystone.token_info': _fake_token_info(version='3')}
acl = '%s:%s' % (id['HTTP_X_TENANT_NAME'], id['HTTP_X_USER_NAME'])
self._check_authenticate(acl=acl, identity=id, env=env)
acl = '%s:%s' % (id['HTTP_X_TENANT_NAME'], id['HTTP_X_USER_ID'])
self._check_authenticate(acl=acl, identity=id, env=env)
acl = '%s:%s' % (id['HTTP_X_TENANT_ID'], id['HTTP_X_USER_NAME'])
self._check_authenticate(acl=acl, identity=id, env=env)
acl = '%s:%s' % (id['HTTP_X_TENANT_ID'], id['HTTP_X_USER_ID'])
self._check_authenticate(acl=acl, identity=id, env=env)
id = self._get_identity(user_domain_id='default',
project_domain_id='default')
acl = '%s:%s' % (id['HTTP_X_PROJECT_NAME'], id['HTTP_X_USER_NAME'])
self._check_authenticate(acl=acl, identity=id, env=env)
acl = '%s:%s' % (id['HTTP_X_PROJECT_NAME'], id['HTTP_X_USER_ID'])
self._check_authenticate(acl=acl, identity=id, env=env)
acl = '%s:%s' % (id['HTTP_X_PROJECT_ID'], id['HTTP_X_USER_NAME'])
self._check_authenticate(acl=acl, identity=id, env=env)
acl = '%s:%s' % (id['HTTP_X_PROJECT_ID'], id['HTTP_X_USER_ID'])
self._check_authenticate(acl=acl, identity=id, env=env)
def test_names_disallowed_in_acls_inside_default_domain(self):
conf = {'allow_names_in_acls': 'false'}
self.test_auth = keystoneauth.filter_factory(conf)(FakeApp())
self.test_auth.logger = debug_logger()
id = self._get_identity_for_v2(user_domain_id='default',
project_domain_id='default')
env = {'keystone.token_info': _fake_token_info(version='3')}
acl = '%s:%s' % (id['HTTP_X_TENANT_NAME'], id['HTTP_X_USER_NAME'])
self._check_authenticate(acl=acl, identity=id, env=env,
exception=HTTP_FORBIDDEN)
acl = '%s:%s' % (id['HTTP_X_TENANT_NAME'], id['HTTP_X_USER_ID'])
self._check_authenticate(acl=acl, identity=id, env=env,
exception=HTTP_FORBIDDEN)
acl = '%s:%s' % (id['HTTP_X_TENANT_ID'], id['HTTP_X_USER_NAME'])
self._check_authenticate(acl=acl, identity=id, env=env,
exception=HTTP_FORBIDDEN)
acl = '%s:%s' % (id['HTTP_X_TENANT_ID'], id['HTTP_X_USER_ID'])
self._check_authenticate(acl=acl, identity=id, env=env)
id = self._get_identity(user_domain_id='default',
project_domain_id='default')
acl = '%s:%s' % (id['HTTP_X_PROJECT_NAME'], id['HTTP_X_USER_NAME'])
self._check_authenticate(acl=acl, identity=id, env=env,
exception=HTTP_FORBIDDEN)
acl = '%s:%s' % (id['HTTP_X_PROJECT_NAME'], id['HTTP_X_USER_ID'])
self._check_authenticate(acl=acl, identity=id, env=env,
exception=HTTP_FORBIDDEN)
acl = '%s:%s' % (id['HTTP_X_PROJECT_ID'], id['HTTP_X_USER_NAME'])
self._check_authenticate(acl=acl, identity=id, env=env,
exception=HTTP_FORBIDDEN)
acl = '%s:%s' % (id['HTTP_X_PROJECT_ID'], id['HTTP_X_USER_ID'])
self._check_authenticate(acl=acl, identity=id, env=env)
def test_keystone_identity(self):
user = ('U_ID', 'U_NAME')
roles = ('ROLE1', 'ROLE2')
service_roles = ('ROLE3', 'ROLE4')
project = ('P_ID', 'P_NAME')
user_domain = ('UD_ID', 'UD_NAME')
project_domain = ('PD_ID', 'PD_NAME')
# no valid identity info in headers
req = Request.blank('/v/a/c/o')
data = self.test_auth._keystone_identity(req.environ)
self.assertIsNone(data)
# valid identity info in headers, but status unconfirmed
req.headers.update({'X-Identity-Status': 'Blah',
'X-Roles': '%s,%s' % roles,
'X-User-Id': user[0],
'X-User-Name': user[1],
'X-Tenant-Id': project[0],
'X-Tenant-Name': project[1],
'X-User-Domain-Id': user_domain[0],
'X-User-Domain-Name': user_domain[1],
'X-Project-Domain-Id': project_domain[0],
'X-Project-Domain-Name': project_domain[1]})
data = self.test_auth._keystone_identity(req.environ)
self.assertIsNone(data)
# valid identity info in headers, no token info in environ
req.headers.update({'X-Identity-Status': 'Confirmed'})
expected = {'user': user,
'tenant': project,
'roles': list(roles),
'service_roles': [],
'user_domain': (None, None),
'project_domain': (None, None),
'auth_version': 0}
data = self.test_auth._keystone_identity(req.environ)
self.assertEqual(expected, data)
# v2 token info in environ
req.environ['keystone.token_info'] = _fake_token_info(version='2')
expected = {'user': user,
'tenant': project,
'roles': list(roles),
'service_roles': [],
'user_domain': (None, None),
'project_domain': (None, None),
'auth_version': 2}
data = self.test_auth._keystone_identity(req.environ)
self.assertEqual(expected, data)
# v3 token info in environ
req.environ['keystone.token_info'] = _fake_token_info(version='3')
expected = {'user': user,
'tenant': project,
'roles': list(roles),
'service_roles': [],
'user_domain': user_domain,
'project_domain': project_domain,
'auth_version': 3}
data = self.test_auth._keystone_identity(req.environ)
self.assertEqual(expected, data)
# service token in environ
req.headers.update({'X-Service-Roles': '%s,%s' % service_roles})
expected = {'user': user,
'tenant': project,
'roles': list(roles),
'service_roles': list(service_roles),
'user_domain': user_domain,
'project_domain': project_domain,
'auth_version': 3}
data = self.test_auth._keystone_identity(req.environ)
self.assertEqual(expected, data)
def test_get_project_domain_id(self):
sysmeta = {}
info = {'sysmeta': sysmeta}
info_key = get_cache_key('AUTH_1234')
env = {'PATH_INFO': '/v1/AUTH_1234',
'swift.infocache': {info_key: info}}
# account does not exist
info['status'] = 404
self.assertEqual(self.test_auth._get_project_domain_id(env),
(False, None))
info['status'] = 0
self.assertEqual(self.test_auth._get_project_domain_id(env),
(False, None))
# account exists, no project domain id in sysmeta
info['status'] = 200
self.assertEqual(self.test_auth._get_project_domain_id(env),
(True, None))
# account exists with project domain id in sysmeta
sysmeta['project-domain-id'] = 'default'
self.assertEqual(self.test_auth._get_project_domain_id(env),
(True, 'default'))
class TestIsNameAllowedInACL(BaseTestAuthorize):
def setUp(self):
super(TestIsNameAllowedInACL, self).setUp()
self.default_id = 'default'
def _assert_names_allowed(self, expected, user_domain_id=None,
req_project_domain_id=None,
sysmeta_project_domain_id=None,
scoped='account'):
project_name = 'foo'
account_id = '12345678'
account = get_account_for_tenant(self.test_auth, account_id)
parts = ('v1', account, None, None)
path = '/%s/%s' % parts[0:2]
sysmeta = {}
if sysmeta_project_domain_id:
sysmeta = {'project-domain-id': sysmeta_project_domain_id}
# pretend account exists
info = {'status': 200, 'sysmeta': sysmeta}
info_key = get_cache_key(account)
req = Request.blank(path,
environ={'swift.infocache': {info_key: info}})
if scoped == 'account':
project_name = 'account_name'
project_id = account_id
elif scoped == 'other':
project_name = 'other_name'
project_id = '87654321'
else:
# unscoped token
project_name, project_id, req_project_domain_id = None, None, None
if user_domain_id:
id = self._get_env_id(tenant_name=project_name,
tenant_id=project_id,
user_domain_id=user_domain_id,
project_domain_id=req_project_domain_id)
else:
# must be v2 token info
id = self._get_env_id(tenant_name=project_name,
tenant_id=project_id,
auth_version='2')
actual = self.test_auth._is_name_allowed_in_acl(req, parts, id)
self.assertEqual(actual, expected, '%s, %s, %s, %s'
% (user_domain_id, req_project_domain_id,
sysmeta_project_domain_id, scoped))
def test_is_name_allowed_in_acl_with_token_scoped_to_tenant(self):
# no user or project domain ids in request token so must be v2,
# user and project should be assumed to be in default domain
self._assert_names_allowed(True, user_domain_id=None,
req_project_domain_id=None,
sysmeta_project_domain_id=None)
self._assert_names_allowed(True, user_domain_id=None,
req_project_domain_id=None,
sysmeta_project_domain_id=self.default_id)
self._assert_names_allowed(True, user_domain_id=None,
req_project_domain_id=None,
sysmeta_project_domain_id=UNKNOWN_ID)
self._assert_names_allowed(True, user_domain_id=None,
req_project_domain_id=None,
sysmeta_project_domain_id='foo')
# user in default domain, project domain in token info takes precedence
self._assert_names_allowed(True, user_domain_id=self.default_id,
req_project_domain_id=self.default_id,
sysmeta_project_domain_id=None)
self._assert_names_allowed(True, user_domain_id=self.default_id,
req_project_domain_id=self.default_id,
sysmeta_project_domain_id=UNKNOWN_ID)
self._assert_names_allowed(True, user_domain_id=self.default_id,
req_project_domain_id=self.default_id,
sysmeta_project_domain_id='bar')
self._assert_names_allowed(False, user_domain_id=self.default_id,
req_project_domain_id='foo',
sysmeta_project_domain_id=None)
self._assert_names_allowed(False, user_domain_id=self.default_id,
req_project_domain_id='foo',
sysmeta_project_domain_id=self.default_id)
self._assert_names_allowed(False, user_domain_id=self.default_id,
req_project_domain_id='foo',
sysmeta_project_domain_id='foo')
# user in non-default domain so names should never be allowed
self._assert_names_allowed(False, user_domain_id='foo',
req_project_domain_id=self.default_id,
sysmeta_project_domain_id=None)
self._assert_names_allowed(False, user_domain_id='foo',
req_project_domain_id=self.default_id,
sysmeta_project_domain_id=self.default_id)
self._assert_names_allowed(False, user_domain_id='foo',
req_project_domain_id=self.default_id,
sysmeta_project_domain_id=UNKNOWN_ID)
self._assert_names_allowed(False, user_domain_id='foo',
req_project_domain_id=self.default_id,
sysmeta_project_domain_id='foo')
def test_is_name_allowed_in_acl_with_unscoped_token(self):
# user in default domain
self._assert_names_allowed(True, user_domain_id=self.default_id,
sysmeta_project_domain_id=None,
scoped=False)
self._assert_names_allowed(True, user_domain_id=self.default_id,
sysmeta_project_domain_id=self.default_id,
scoped=False)
self._assert_names_allowed(False, user_domain_id=self.default_id,
sysmeta_project_domain_id=UNKNOWN_ID,
scoped=False)
self._assert_names_allowed(False, user_domain_id=self.default_id,
sysmeta_project_domain_id='foo',
scoped=False)
# user in non-default domain so names should never be allowed
self._assert_names_allowed(False, user_domain_id='foo',
sysmeta_project_domain_id=None,
scoped=False)
self._assert_names_allowed(False, user_domain_id='foo',
sysmeta_project_domain_id=self.default_id,
scoped=False)
self._assert_names_allowed(False, user_domain_id='foo',
sysmeta_project_domain_id=UNKNOWN_ID,
scoped=False)
self._assert_names_allowed(False, user_domain_id='foo',
sysmeta_project_domain_id='foo',
scoped=False)
def test_is_name_allowed_in_acl_with_token_scoped_to_other_tenant(self):
# user and scoped tenant in default domain
self._assert_names_allowed(True, user_domain_id=self.default_id,
req_project_domain_id=self.default_id,
sysmeta_project_domain_id=None,
scoped='other')
self._assert_names_allowed(True, user_domain_id=self.default_id,
req_project_domain_id=self.default_id,
sysmeta_project_domain_id=self.default_id,
scoped='other')
self._assert_names_allowed(False, user_domain_id=self.default_id,
req_project_domain_id=self.default_id,
sysmeta_project_domain_id=UNKNOWN_ID,
scoped='other')
self._assert_names_allowed(False, user_domain_id=self.default_id,
req_project_domain_id=self.default_id,
sysmeta_project_domain_id='foo',
scoped='other')
# user in default domain, but scoped tenant in non-default domain
self._assert_names_allowed(False, user_domain_id=self.default_id,
req_project_domain_id='foo',
sysmeta_project_domain_id=None,
scoped='other')
self._assert_names_allowed(False, user_domain_id=self.default_id,
req_project_domain_id='foo',
sysmeta_project_domain_id=self.default_id,
scoped='other')
self._assert_names_allowed(False, user_domain_id=self.default_id,
req_project_domain_id='foo',
sysmeta_project_domain_id=UNKNOWN_ID,
scoped='other')
self._assert_names_allowed(False, user_domain_id=self.default_id,
req_project_domain_id='foo',
sysmeta_project_domain_id='foo',
scoped='other')
# user in non-default domain, scoped tenant in default domain
self._assert_names_allowed(False, user_domain_id='foo',
req_project_domain_id=self.default_id,
sysmeta_project_domain_id=None,
scoped='other')
self._assert_names_allowed(False, user_domain_id='foo',
req_project_domain_id=self.default_id,
sysmeta_project_domain_id=self.default_id,
scoped='other')
self._assert_names_allowed(False, user_domain_id='foo',
req_project_domain_id=self.default_id,
sysmeta_project_domain_id=UNKNOWN_ID,
scoped='other')
self._assert_names_allowed(False, user_domain_id='foo',
req_project_domain_id=self.default_id,
sysmeta_project_domain_id='foo',
scoped='other')
class TestIsNameAllowedInACLWithConfiguredDomain(TestIsNameAllowedInACL):
def setUp(self):
super(TestIsNameAllowedInACLWithConfiguredDomain, self).setUp()
conf = {'default_domain_id': 'mydefault'}
self.test_auth = keystoneauth.filter_factory(conf)(FakeApp())
self.test_auth.logger = debug_logger()
self.default_id = 'mydefault'
class TestSetProjectDomain(BaseTestAuthorize):
def _assert_set_project_domain(self, expected, account, req_project_id,
req_project_domain_id,
sysmeta_project_domain_id,
warning=False):
hdr = 'X-Account-Sysmeta-Project-Domain-Id'
# set up fake account info in req env
status = 0 if sysmeta_project_domain_id is None else 200
sysmeta = {}
if sysmeta_project_domain_id:
sysmeta['project-domain-id'] = sysmeta_project_domain_id
info = {'status': status, 'sysmeta': sysmeta}
info_key = get_cache_key(account)
env = {'swift.infocache': {info_key: info}}
# create fake env identity
env_id = self._get_env_id(tenant_id=req_project_id,
project_domain_id=req_project_domain_id)
# reset fake logger
self.test_auth.logger = debug_logger()
num_warnings = 0
# check account requests
path = '/v1/%s' % account
for method in ['PUT', 'POST']:
req = Request.blank(path, environ=env)
req.method = method
path_parts = req.split_path(1, 4, True)
self.test_auth._set_project_domain_id(req, path_parts, env_id)
if warning:
num_warnings += 1
warnings = self.test_auth.logger.get_lines_for_level('warning')
self.assertEqual(len(warnings), num_warnings)
self.assertTrue(warnings[-1].startswith('Inconsistent proj'))
if expected is not None:
self.assertTrue(hdr in req.headers)
self.assertEqual(req.headers[hdr], expected)
else:
self.assertFalse(hdr in req.headers, req.headers)
for method in ['GET', 'HEAD', 'DELETE', 'OPTIONS']:
req = Request.blank(path, environ=env)
req.method = method
self.test_auth._set_project_domain_id(req, path_parts, env_id)
self.assertFalse(hdr in req.headers)
# check container requests
path = '/v1/%s/c' % account
for method in ['PUT']:
req = Request.blank(path, environ=env)
req.method = method
path_parts = req.split_path(1, 4, True)
self.test_auth._set_project_domain_id(req, path_parts, env_id)
if warning:
num_warnings += 1
warnings = self.test_auth.logger.get_lines_for_level('warning')
self.assertEqual(len(warnings), num_warnings)
self.assertTrue(warnings[-1].startswith('Inconsistent proj'))
if expected is not None:
self.assertTrue(hdr in req.headers)
self.assertEqual(req.headers[hdr], expected)
else:
self.assertFalse(hdr in req.headers)
for method in ['POST', 'GET', 'HEAD', 'DELETE', 'OPTIONS']:
req = Request.blank(path, environ=env)
req.method = method
self.test_auth._set_project_domain_id(req, path_parts, env_id)
self.assertFalse(hdr in req.headers)
# never set for object requests
path = '/v1/%s/c/o' % account
for method in ['PUT', 'COPY', 'POST', 'GET', 'HEAD', 'DELETE',
'OPTIONS']:
req = Request.blank(path, environ=env)
req.method = method
path_parts = req.split_path(1, 4, True)
self.test_auth._set_project_domain_id(req, path_parts, env_id)
self.assertFalse(hdr in req.headers)
def test_set_project_domain_id_new_account(self):
# scoped token with project domain info
self._assert_set_project_domain('test_id',
account='AUTH_1234',
req_project_id='1234',
req_project_domain_id='test_id',
sysmeta_project_domain_id=None)
# scoped v2 token without project domain id
self._assert_set_project_domain(None,
account='AUTH_1234',
req_project_id='1234',
req_project_domain_id=None,
sysmeta_project_domain_id=None)
# unscoped v2 token without project domain id
self._assert_set_project_domain(UNKNOWN_ID,
account='AUTH_1234',
req_project_id=None,
req_project_domain_id=None,
sysmeta_project_domain_id=None)
# token scoped on another project
self._assert_set_project_domain(UNKNOWN_ID,
account='AUTH_1234',
req_project_id='4321',
req_project_domain_id='default',
sysmeta_project_domain_id=None)
def test_set_project_domain_id_existing_v2_account(self):
# project domain id provided in scoped request token,
# update empty value
self._assert_set_project_domain('default',
account='AUTH_1234',
req_project_id='1234',
req_project_domain_id='default',
sysmeta_project_domain_id='')
# inconsistent project domain id provided in scoped request token,
# leave known value
self._assert_set_project_domain(None,
account='AUTH_1234',
req_project_id='1234',
req_project_domain_id='unexpected_id',
sysmeta_project_domain_id='',
warning=True)
# project domain id not provided, scoped request token,
# no change to empty value
self._assert_set_project_domain(None,
account='AUTH_1234',
req_project_id='1234',
req_project_domain_id=None,
sysmeta_project_domain_id='')
# unscoped request token, no change to empty value
self._assert_set_project_domain(None,
account='AUTH_1234',
req_project_id=None,
req_project_domain_id=None,
sysmeta_project_domain_id='')
# token scoped on another project,
# update empty value
self._assert_set_project_domain(None,
account='AUTH_1234',
req_project_id='4321',
req_project_domain_id=None,
sysmeta_project_domain_id='')
def test_set_project_domain_id_existing_account_unknown_domain(self):
# project domain id provided in scoped request token,
# set known value
self._assert_set_project_domain('test_id',
account='AUTH_1234',
req_project_id='1234',
req_project_domain_id='test_id',
sysmeta_project_domain_id=UNKNOWN_ID)
# project domain id not provided, scoped request token,
# set empty value
self._assert_set_project_domain('',
account='AUTH_1234',
req_project_id='1234',
req_project_domain_id=None,
sysmeta_project_domain_id=UNKNOWN_ID)
# project domain id not provided, unscoped request token,
# leave unknown value
self._assert_set_project_domain(None,
account='AUTH_1234',
req_project_id=None,
req_project_domain_id=None,
sysmeta_project_domain_id=UNKNOWN_ID)
# token scoped on another project, leave unknown value
self._assert_set_project_domain(None,
account='AUTH_1234',
req_project_id='4321',
req_project_domain_id='default',
sysmeta_project_domain_id=UNKNOWN_ID)
def test_set_project_domain_id_existing_known_domain(self):
# project domain id provided in scoped request token,
# leave known value
self._assert_set_project_domain(None,
account='AUTH_1234',
req_project_id='1234',
req_project_domain_id='test_id',
sysmeta_project_domain_id='test_id')
# inconsistent project domain id provided in scoped request token,
# leave known value
self._assert_set_project_domain(None,
account='AUTH_1234',
req_project_id='1234',
req_project_domain_id='unexpected_id',
sysmeta_project_domain_id='test_id',
warning=True)
# project domain id not provided, scoped request token,
# leave known value
self._assert_set_project_domain(None,
account='AUTH_1234',
req_project_id='1234',
req_project_domain_id=None,
sysmeta_project_domain_id='test_id')
# project domain id not provided, unscoped request token,
# leave known value
self._assert_set_project_domain(None,
account='AUTH_1234',
req_project_id=None,
req_project_domain_id=None,
sysmeta_project_domain_id='test_id')
# project domain id not provided, token scoped on another project,
# leave known value
self._assert_set_project_domain(None,
account='AUTH_1234',
req_project_id='4321',
req_project_domain_id='default',
sysmeta_project_domain_id='test_id')
class TestAuthorizeReaderSystem(BaseTestAuthorizeCheck):
system_reader_role_1 = 'compliance'
system_reader_role_2 = 'integrity'
# This cannot be in SetUp because it takes arguments from tests.
def _setup(self, system_reader_roles):
# We could rifle in the KeystoneAuth internals and tweak the list,
# but to create the middleware fresh is a clean, future-resistant way.
self.test_auth = keystoneauth.filter_factory(
{}, system_reader_roles=system_reader_roles)(FakeApp())
self.test_auth.logger = debug_logger()
# Zero test: make sure that reader role has no default access
# when not in the list of system_reader_roles[].
def test_reader_none(self):
self._setup(None)
identity = self._get_identity(roles=[self.system_reader_role_1])
self._check_authenticate(exception=HTTP_FORBIDDEN,
identity=identity)
# HEAD is the same, right? No need to check, right?
def test_reader_get(self):
# While we're at it, test that our parsing of CSV works.
self._setup("%s, %s" %
(self.system_reader_role_1, self.system_reader_role_2))
identity = self._get_identity(roles=[self.system_reader_role_1])
self._check_authenticate(identity=identity)
def test_reader_put(self):
self._setup(self.system_reader_role_1)
identity = self._get_identity(roles=[self.system_reader_role_1])
self._check_authenticate(exception=HTTP_FORBIDDEN,
identity=identity,
env={'REQUEST_METHOD': 'PUT'})
self._check_authenticate(exception=HTTP_FORBIDDEN,
identity=identity,
env={'REQUEST_METHOD': 'POST'})
def test_reader_put_to_own(self):
roles = operator_roles(self.test_auth) + [self.system_reader_role_1]
identity = self._get_identity(roles=roles)
req = self._check_authenticate(identity=identity,
env={'REQUEST_METHOD': 'PUT'})
self.assertTrue(req.environ.get('swift_owner'))
# This should not be happening, but let's make sure that reader did not
# obtain any extra authorizations by combining with swiftoperator,
# because that is how reader is going to be used in practice.
def test_reader_put_elsewhere_fails(self):
roles = operator_roles(self.test_auth) + [self.system_reader_role_1]
identity = self._get_identity(roles=roles)
account = "%s%s" % (self._get_account(identity), "2")
self._check_authenticate(exception=HTTP_FORBIDDEN,
identity=identity,
account=account,
env={'REQUEST_METHOD': 'PUT'})
class TestAuthorizeReaderProject(BaseTestAuthorizeCheck):
project_reader_role_1 = 'rdr1'
project_reader_role_2 = 'rdr2'
# This cannot be in SetUp because it takes arguments from tests.
def _setup(self, project_reader_roles):
self.test_auth = keystoneauth.filter_factory(
{}, project_reader_roles=project_reader_roles)(FakeApp())
self.test_auth.logger = debug_logger()
# The project reader tests do not have a zero test because it literally
# is the same code as system reader tests already run. See above.
# Reading is what a reader does.
def test_reader_get(self):
self._setup("%s, %s" %
(self.project_reader_role_1, self.project_reader_role_2))
identity = self._get_identity(roles=[self.project_reader_role_2])
self._check_authenticate(identity=identity)
# Writing would otherwise be allowed, but not for a reader.
def test_reader_put(self):
self._setup(self.project_reader_role_1)
identity = self._get_identity(roles=[self.project_reader_role_1])
self._check_authenticate(exception=HTTP_FORBIDDEN,
identity=identity,
env={'REQUEST_METHOD': 'PUT'})
self._check_authenticate(exception=HTTP_FORBIDDEN,
identity=identity,
env={'REQUEST_METHOD': 'POST'})
class ResellerInInfo(unittest.TestCase):
def setUp(self):
self.default_rules = {'operator_roles': ['admin', 'swiftoperator'],
'project_reader_roles': [],
'service_roles': []}
def test_defaults(self):
test_auth = keystoneauth.filter_factory({})(FakeApp())
self.assertEqual(test_auth.account_rules['AUTH_'], self.default_rules)
def test_multiple(self):
conf = {"reseller_prefix": "AUTH, '', PRE2"}
test_auth = keystoneauth.filter_factory(conf)(FakeApp())
self.assertEqual(test_auth.account_rules['AUTH_'], self.default_rules)
self.assertEqual(test_auth.account_rules[''], self.default_rules)
self.assertEqual(test_auth.account_rules['PRE2_'], self.default_rules)
class PrefixAccount(unittest.TestCase):
def test_default(self):
conf = {}
test_auth = keystoneauth.filter_factory(conf)(FakeApp())
self.assertEqual(get_account_for_tenant(test_auth,
'1234'), 'AUTH_1234')
self.assertEqual(test_auth._get_account_prefix(
'AUTH_1234'), 'AUTH_')
self.assertEqual(test_auth._get_account_prefix(
'JUNK_1234'), None)
self.assertTrue(test_auth._account_matches_tenant(
'AUTH_1234', '1234'))
self.assertFalse(test_auth._account_matches_tenant(
'AUTH_1234', '5678'))
self.assertFalse(test_auth._account_matches_tenant(
'JUNK_1234', '1234'))
def test_same_as_default(self):
conf = {'reseller_prefix': 'AUTH'}
test_auth = keystoneauth.filter_factory(conf)(FakeApp())
self.assertEqual(get_account_for_tenant(test_auth,
'1234'), 'AUTH_1234')
self.assertEqual(test_auth._get_account_prefix(
'AUTH_1234'), 'AUTH_')
self.assertEqual(test_auth._get_account_prefix(
'JUNK_1234'), None)
self.assertTrue(test_auth._account_matches_tenant(
'AUTH_1234', '1234'))
self.assertFalse(test_auth._account_matches_tenant(
'AUTH_1234', '5678'))
def test_blank_reseller(self):
conf = {'reseller_prefix': ''}
test_auth = keystoneauth.filter_factory(conf)(FakeApp())
self.assertEqual(get_account_for_tenant(test_auth,
'1234'), '1234')
self.assertEqual(test_auth._get_account_prefix(
'1234'), '')
self.assertEqual(test_auth._get_account_prefix(
'JUNK_1234'), '') # yes, it should return ''
self.assertTrue(test_auth._account_matches_tenant(
'1234', '1234'))
self.assertFalse(test_auth._account_matches_tenant(
'1234', '5678'))
self.assertFalse(test_auth._account_matches_tenant(
'JUNK_1234', '1234'))
def test_multiple_resellers(self):
conf = {'reseller_prefix': 'AUTH, PRE2'}
test_auth = keystoneauth.filter_factory(conf)(FakeApp())
self.assertEqual(get_account_for_tenant(test_auth,
'1234'), 'AUTH_1234')
self.assertEqual(test_auth._get_account_prefix(
'AUTH_1234'), 'AUTH_')
self.assertEqual(test_auth._get_account_prefix(
'JUNK_1234'), None)
self.assertTrue(test_auth._account_matches_tenant(
'AUTH_1234', '1234'))
self.assertTrue(test_auth._account_matches_tenant(
'PRE2_1234', '1234'))
self.assertFalse(test_auth._account_matches_tenant(
'AUTH_1234', '5678'))
self.assertFalse(test_auth._account_matches_tenant(
'PRE2_1234', '5678'))
def test_blank_plus_other_reseller(self):
conf = {'reseller_prefix': " '', PRE2"}
test_auth = keystoneauth.filter_factory(conf)(FakeApp())
self.assertEqual(get_account_for_tenant(test_auth,
'1234'), '1234')
self.assertEqual(test_auth._get_account_prefix(
'PRE2_1234'), 'PRE2_')
self.assertEqual(test_auth._get_account_prefix('JUNK_1234'), '')
self.assertTrue(test_auth._account_matches_tenant(
'1234', '1234'))
self.assertTrue(test_auth._account_matches_tenant(
'PRE2_1234', '1234'))
self.assertFalse(test_auth._account_matches_tenant(
'1234', '5678'))
self.assertFalse(test_auth._account_matches_tenant(
'PRE2_1234', '5678'))
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/test_keystoneauth.py |
# -*- coding: utf-8 -*-
# Copyright (c) 2011-2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import unittest
from base64 import b64encode as _b64encode
from time import time
import six
from six.moves.urllib.parse import quote, urlparse
from swift.common.middleware import tempauth as auth
from swift.common.middleware.acl import format_acl
from swift.common.swob import Request, Response, bytes_to_wsgi
from swift.common.utils import split_path, StatsdClient
from test.unit import FakeMemcache
NO_CONTENT_RESP = (('204 No Content', {}, ''),) # mock server response
def b64encode(str_or_bytes):
if not isinstance(str_or_bytes, bytes):
str_or_bytes = str_or_bytes.encode('utf8')
return _b64encode(str_or_bytes).decode('ascii')
class FakeApp(object):
def __init__(self, status_headers_body_iter=None, acl=None, sync_key=None):
self.calls = 0
self.status_headers_body_iter = status_headers_body_iter
if not self.status_headers_body_iter:
self.status_headers_body_iter = iter([('404 Not Found', {}, '')])
self.acl = acl
self.sync_key = sync_key
def __call__(self, env, start_response):
self.calls += 1
self.request = Request(env)
if self.acl:
self.request.acl = self.acl
if self.sync_key:
self.request.environ['swift_sync_key'] = self.sync_key
if 'swift.authorize' in env:
resp = env['swift.authorize'](self.request)
if resp:
return resp(env, start_response)
status, headers, body = next(self.status_headers_body_iter)
return Response(status=status, headers=headers,
body=body)(env, start_response)
class FakeConn(object):
def __init__(self, status_headers_body_iter=None):
self.calls = 0
self.status_headers_body_iter = status_headers_body_iter
if not self.status_headers_body_iter:
self.status_headers_body_iter = iter([('404 Not Found', {}, '')])
def request(self, method, path, headers):
self.calls += 1
self.request_path = path
self.status, self.headers, self.body = \
next(self.status_headers_body_iter)
self.status, self.reason = self.status.split(' ', 1)
self.status = int(self.status)
def getresponse(self):
return self
def read(self):
body = self.body
self.body = ''
return body
class TestAuth(unittest.TestCase):
def setUp(self):
self.test_auth = auth.filter_factory({})(FakeApp())
def _make_request(self, path, **kwargs):
req = Request.blank(path, **kwargs)
req.environ['swift.cache'] = FakeMemcache()
return req
def test_statsd_prefix(self):
app = FakeApp()
ath = auth.filter_factory({'log_statsd_host': 'example.com'})(app)
self.assertIsNotNone(ath.logger.logger.statsd_client)
self.assertIsInstance(ath.logger.logger.statsd_client,
StatsdClient)
self.assertEqual('tempauth.AUTH_.',
ath.logger.logger.statsd_client._prefix)
ath = auth.filter_factory({'log_statsd_metric_prefix': 'foo',
'log_name': 'bar',
'log_statsd_host': 'example.com'})(app)
self.assertIsNotNone(ath.logger.logger.statsd_client)
self.assertIsInstance(ath.logger.logger.statsd_client,
StatsdClient)
self.assertEqual('foo.tempauth.AUTH_.',
ath.logger.logger.statsd_client._prefix)
ath = auth.filter_factory({'log_statsd_metric_prefix': 'foo',
'log_name': 'bar',
'log_statsd_host': 'example.com',
'reseller_prefix': 'TEST'})(app)
self.assertIsNotNone(ath.logger.logger.statsd_client)
self.assertIsInstance(ath.logger.logger.statsd_client,
StatsdClient)
self.assertEqual('foo.tempauth.TEST_.',
ath.logger.logger.statsd_client._prefix)
def test_reseller_prefix_init(self):
app = FakeApp()
ath = auth.filter_factory({})(app)
self.assertEqual(ath.reseller_prefix, 'AUTH_')
self.assertEqual(ath.reseller_prefixes, ['AUTH_'])
ath = auth.filter_factory({'reseller_prefix': 'TEST'})(app)
self.assertEqual(ath.reseller_prefix, 'TEST_')
self.assertEqual(ath.reseller_prefixes, ['TEST_'])
ath = auth.filter_factory({'reseller_prefix': 'TEST_'})(app)
self.assertEqual(ath.reseller_prefix, 'TEST_')
self.assertEqual(ath.reseller_prefixes, ['TEST_'])
ath = auth.filter_factory({'reseller_prefix': ''})(app)
self.assertEqual(ath.reseller_prefix, '')
self.assertEqual(ath.reseller_prefixes, [''])
ath = auth.filter_factory({'reseller_prefix': ' '})(app)
self.assertEqual(ath.reseller_prefix, '')
self.assertEqual(ath.reseller_prefixes, [''])
ath = auth.filter_factory({'reseller_prefix': ' '' '})(app)
self.assertEqual(ath.reseller_prefix, '')
self.assertEqual(ath.reseller_prefixes, [''])
ath = auth.filter_factory({'reseller_prefix': " '', TEST"})(app)
self.assertEqual(ath.reseller_prefix, '')
self.assertTrue('' in ath.reseller_prefixes)
self.assertTrue('TEST_' in ath.reseller_prefixes)
def test_auth_prefix_init(self):
app = FakeApp()
ath = auth.filter_factory({})(app)
self.assertEqual(ath.auth_prefix, '/auth/')
ath = auth.filter_factory({'auth_prefix': ''})(app)
self.assertEqual(ath.auth_prefix, '/auth/')
ath = auth.filter_factory({'auth_prefix': '/'})(app)
self.assertEqual(ath.auth_prefix, '/auth/')
ath = auth.filter_factory({'auth_prefix': '/test/'})(app)
self.assertEqual(ath.auth_prefix, '/test/')
ath = auth.filter_factory({'auth_prefix': '/test'})(app)
self.assertEqual(ath.auth_prefix, '/test/')
ath = auth.filter_factory({'auth_prefix': 'test/'})(app)
self.assertEqual(ath.auth_prefix, '/test/')
ath = auth.filter_factory({'auth_prefix': 'test'})(app)
self.assertEqual(ath.auth_prefix, '/test/')
def test_top_level_deny(self):
req = self._make_request('/')
resp = req.get_response(self.test_auth)
self.assertEqual(resp.status_int, 401)
self.assertEqual(req.environ['swift.authorize'],
self.test_auth.denied_response)
self.assertEqual(resp.headers.get('Www-Authenticate'),
'Swift realm="unknown"')
def test_anon(self):
req = self._make_request('/v1/AUTH_account')
resp = req.get_response(self.test_auth)
self.assertEqual(resp.status_int, 401)
self.assertEqual(req.environ['swift.authorize'],
self.test_auth.authorize)
self.assertEqual(resp.headers.get('Www-Authenticate'),
'Swift realm="AUTH_account"')
def test_anon_badpath(self):
req = self._make_request('/v1')
resp = req.get_response(self.test_auth)
self.assertEqual(resp.status_int, 401)
self.assertEqual(resp.headers.get('Www-Authenticate'),
'Swift realm="unknown"')
def test_override_asked_for_but_not_allowed(self):
self.test_auth = \
auth.filter_factory({'allow_overrides': 'false'})(FakeApp())
req = self._make_request('/v1/AUTH_account',
environ={'swift.authorize_override': True})
resp = req.get_response(self.test_auth)
self.assertEqual(resp.status_int, 401)
self.assertEqual(resp.headers.get('Www-Authenticate'),
'Swift realm="AUTH_account"')
self.assertEqual(req.environ['swift.authorize'],
self.test_auth.authorize)
def test_override_asked_for_and_allowed(self):
self.test_auth = \
auth.filter_factory({'allow_overrides': 'true'})(FakeApp())
req = self._make_request('/v1/AUTH_account',
environ={'swift.authorize_override': True})
resp = req.get_response(self.test_auth)
self.assertEqual(resp.status_int, 404)
self.assertNotIn('swift.authorize', req.environ)
def test_override_default_allowed(self):
req = self._make_request('/v1/AUTH_account',
environ={'swift.authorize_override': True})
resp = req.get_response(self.test_auth)
self.assertEqual(resp.status_int, 404)
self.assertNotIn('swift.authorize', req.environ)
def test_auth_deny_non_reseller_prefix(self):
req = self._make_request('/v1/BLAH_account',
headers={'X-Auth-Token': 'BLAH_t'})
resp = req.get_response(self.test_auth)
self.assertEqual(resp.status_int, 401)
self.assertEqual(resp.headers.get('Www-Authenticate'),
'Swift realm="BLAH_account"')
self.assertEqual(req.environ['swift.authorize'],
self.test_auth.denied_response)
def test_auth_deny_non_reseller_prefix_no_override(self):
fake_authorize = lambda x: Response(status='500 Fake')
req = self._make_request('/v1/BLAH_account',
headers={'X-Auth-Token': 'BLAH_t'},
environ={'swift.authorize': fake_authorize}
)
resp = req.get_response(self.test_auth)
self.assertEqual(resp.status_int, 500)
self.assertEqual(req.environ['swift.authorize'], fake_authorize)
def test_auth_no_reseller_prefix_deny(self):
# Ensures that when we have no reseller prefix, we don't deny a request
# outright but set up a denial swift.authorize and pass the request on
# down the chain.
local_app = FakeApp()
local_auth = auth.filter_factory({'reseller_prefix': ''})(local_app)
req = self._make_request('/v1/account',
headers={'X-Auth-Token': 't'})
resp = req.get_response(local_auth)
self.assertEqual(resp.status_int, 401)
self.assertEqual(resp.headers.get('Www-Authenticate'),
'Swift realm="account"')
self.assertEqual(local_app.calls, 1)
self.assertEqual(req.environ['swift.authorize'],
local_auth.denied_response)
def test_auth_reseller_prefix_with_s3_deny(self):
# Ensures that when we have a reseller prefix and using a middleware
# relying on Http-Authorization (for example swift3), we don't deny a
# request outright but set up a denial swift.authorize and pass the
# request on down the chain.
local_app = FakeApp()
local_auth = auth.filter_factory({'reseller_prefix': 'PRE'})(local_app)
req = self._make_request('/v1/account',
headers={'X-Auth-Token': 't',
'Authorization': 'AWS user:pw'})
resp = req.get_response(local_auth)
self.assertEqual(resp.status_int, 401)
self.assertEqual(local_app.calls, 1)
self.assertEqual(req.environ['swift.authorize'],
local_auth.denied_response)
def test_auth_with_swift3_authorization_good(self):
local_app = FakeApp()
local_auth = auth.filter_factory(
{'user_s3_s3': 'secret .admin'})(local_app)
req = self._make_request('/v1/s3:s3', environ={
'swift3.auth_details': {
'access_key': 's3:s3',
'signature': b64encode('sig'),
'string_to_sign': 't',
'check_signature': lambda secret: True}})
resp = req.get_response(local_auth)
self.assertEqual(resp.status_int, 404)
self.assertEqual(local_app.calls, 1)
self.assertEqual(req.environ['PATH_INFO'], '/v1/AUTH_s3')
self.assertEqual(req.environ['swift.authorize'],
local_auth.authorize)
def test_auth_with_s3api_authorization_good(self):
local_app = FakeApp()
local_auth = auth.filter_factory(
{'user_s3_s3': 'secret .admin'})(local_app)
req = self._make_request('/v1/s3:s3', environ={
's3api.auth_details': {
'access_key': 's3:s3',
'signature': b64encode('sig'),
'string_to_sign': 't',
'check_signature': lambda secret: True}})
resp = req.get_response(local_auth)
self.assertEqual(resp.status_int, 404)
self.assertEqual(local_app.calls, 1)
self.assertEqual(req.environ['PATH_INFO'], '/v1/AUTH_s3')
self.assertEqual(req.environ['swift.authorize'],
local_auth.authorize)
def test_auth_with_swift3_authorization_invalid(self):
local_app = FakeApp()
local_auth = auth.filter_factory(
{'user_s3_s3': 'secret .admin'})(local_app)
req = self._make_request('/v1/s3:s3', environ={
'swift3.auth_details': {
'access_key': 's3:s3',
'signature': b64encode('sig'),
'string_to_sign': 't',
'check_signature': lambda secret: False}})
resp = req.get_response(local_auth)
self.assertEqual(resp.status_int, 401)
self.assertEqual(local_app.calls, 1)
self.assertEqual(req.environ['PATH_INFO'], '/v1/s3:s3')
self.assertEqual(req.environ['swift.authorize'],
local_auth.denied_response)
def test_auth_with_s3api_authorization_invalid(self):
local_app = FakeApp()
local_auth = auth.filter_factory(
{'user_s3_s3': 'secret .admin'})(local_app)
req = self._make_request('/v1/s3:s3', environ={
's3api.auth_details': {
'access_key': 's3:s3',
'signature': b64encode('sig'),
'string_to_sign': 't',
'check_signature': lambda secret: False}})
resp = req.get_response(local_auth)
self.assertEqual(resp.status_int, 401)
self.assertEqual(local_app.calls, 1)
self.assertEqual(req.environ['PATH_INFO'], '/v1/s3:s3')
self.assertEqual(req.environ['swift.authorize'],
local_auth.denied_response)
def test_auth_with_old_swift3_details(self):
local_app = FakeApp()
local_auth = auth.filter_factory(
{'user_s3_s3': 'secret .admin'})(local_app)
req = self._make_request('/v1/s3:s3', environ={
'swift3.auth_details': {
'access_key': 's3:s3',
'signature': b64encode('sig'),
'string_to_sign': 't'}})
resp = req.get_response(local_auth)
self.assertEqual(resp.status_int, 401)
self.assertEqual(local_app.calls, 1)
self.assertEqual(req.environ['PATH_INFO'], '/v1/s3:s3')
self.assertEqual(req.environ['swift.authorize'],
local_auth.denied_response)
def test_auth_with_old_s3api_details(self):
local_app = FakeApp()
local_auth = auth.filter_factory(
{'user_s3_s3': 'secret .admin'})(local_app)
req = self._make_request('/v1/s3:s3', environ={
's3api.auth_details': {
'access_key': 's3:s3',
'signature': b64encode('sig'),
'string_to_sign': 't'}})
resp = req.get_response(local_auth)
self.assertEqual(resp.status_int, 401)
self.assertEqual(local_app.calls, 1)
self.assertEqual(req.environ['PATH_INFO'], '/v1/s3:s3')
self.assertEqual(req.environ['swift.authorize'],
local_auth.denied_response)
def test_auth_no_reseller_prefix_no_token(self):
# Check that normally we set up a call back to our authorize.
local_auth = auth.filter_factory({'reseller_prefix': ''})(FakeApp())
req = self._make_request('/v1/account')
resp = req.get_response(local_auth)
self.assertEqual(resp.status_int, 401)
self.assertEqual(resp.headers.get('Www-Authenticate'),
'Swift realm="account"')
self.assertEqual(req.environ['swift.authorize'],
local_auth.authorize)
# Now make sure we don't override an existing swift.authorize when we
# have no reseller prefix.
local_auth = \
auth.filter_factory({'reseller_prefix': ''})(FakeApp())
local_authorize = lambda req: Response('test')
req = self._make_request('/v1/account', environ={'swift.authorize':
local_authorize})
resp = req.get_response(local_auth)
self.assertEqual(req.environ['swift.authorize'], local_authorize)
self.assertEqual(resp.status_int, 200)
def test_auth_fail(self):
resp = self._make_request(
'/v1/AUTH_cfa',
headers={'X-Auth-Token': 'AUTH_t'}).get_response(self.test_auth)
self.assertEqual(resp.status_int, 401)
self.assertEqual(resp.headers.get('Www-Authenticate'),
'Swift realm="AUTH_cfa"')
def test_authorize_bad_path(self):
req = self._make_request('/badpath')
resp = self.test_auth.authorize(req)
self.assertEqual(resp.status_int, 401)
self.assertEqual(resp.headers.get('Www-Authenticate'),
'Swift realm="unknown"')
req = self._make_request('/badpath')
req.remote_user = 'act:usr,act,AUTH_cfa'
resp = self.test_auth.authorize(req)
self.assertEqual(resp.status_int, 403)
def test_authorize_account_access(self):
req = self._make_request('/v1/AUTH_cfa')
req.remote_user = 'act:usr,act,AUTH_cfa'
self.assertIsNone(self.test_auth.authorize(req))
req = self._make_request('/v1/AUTH_cfa')
req.remote_user = 'act:usr,act'
resp = self.test_auth.authorize(req)
self.assertEqual(resp.status_int, 403)
def test_authorize_acl_group_access(self):
self.test_auth = auth.filter_factory({})(
FakeApp(iter(NO_CONTENT_RESP * 3)))
req = self._make_request('/v1/AUTH_cfa')
req.remote_user = 'act:usr,act'
resp = self.test_auth.authorize(req)
self.assertEqual(resp.status_int, 403)
req = self._make_request('/v1/AUTH_cfa')
req.remote_user = 'act:usr,act'
req.acl = 'act'
self.assertIsNone(self.test_auth.authorize(req))
req = self._make_request('/v1/AUTH_cfa')
req.remote_user = 'act:usr,act'
req.acl = 'act:usr'
self.assertIsNone(self.test_auth.authorize(req))
req = self._make_request('/v1/AUTH_cfa')
req.remote_user = 'act:usr,act'
req.acl = 'act2'
resp = self.test_auth.authorize(req)
self.assertEqual(resp.status_int, 403)
req = self._make_request('/v1/AUTH_cfa')
req.remote_user = 'act:usr,act'
req.acl = 'act:usr2'
resp = self.test_auth.authorize(req)
self.assertEqual(resp.status_int, 403)
def test_deny_cross_reseller(self):
# Tests that cross-reseller is denied, even if ACLs/group names match
req = self._make_request('/v1/OTHER_cfa')
req.remote_user = 'act:usr,act,AUTH_cfa'
req.acl = 'act'
resp = self.test_auth.authorize(req)
self.assertEqual(resp.status_int, 403)
def test_authorize_acl_referer_after_user_groups(self):
req = self._make_request('/v1/AUTH_cfa/c')
req.remote_user = 'act:usr'
req.acl = '.r:*,act:usr'
self.assertIsNone(self.test_auth.authorize(req))
def test_authorize_acl_referrer_access(self):
self.test_auth = auth.filter_factory({})(
FakeApp(iter(NO_CONTENT_RESP * 6)))
req = self._make_request('/v1/AUTH_cfa/c')
req.remote_user = 'act:usr,act'
resp = self.test_auth.authorize(req)
self.assertEqual(resp.status_int, 403)
req = self._make_request('/v1/AUTH_cfa/c')
req.remote_user = 'act:usr,act'
req.acl = '.r:*,.rlistings'
self.assertIsNone(self.test_auth.authorize(req))
req = self._make_request('/v1/AUTH_cfa/c')
req.remote_user = 'act:usr,act'
req.acl = '.r:*' # No listings allowed
resp = self.test_auth.authorize(req)
self.assertEqual(resp.status_int, 403)
req = self._make_request('/v1/AUTH_cfa/c')
req.remote_user = 'act:usr,act'
req.acl = '.r:.example.com,.rlistings'
resp = self.test_auth.authorize(req)
self.assertEqual(resp.status_int, 403)
req = self._make_request('/v1/AUTH_cfa/c')
req.remote_user = 'act:usr,act'
req.referer = 'http://www.example.com/index.html'
req.acl = '.r:.example.com,.rlistings'
self.assertIsNone(self.test_auth.authorize(req))
req = self._make_request('/v1/AUTH_cfa/c')
resp = self.test_auth.authorize(req)
self.assertEqual(resp.status_int, 401)
self.assertEqual(resp.headers.get('Www-Authenticate'),
'Swift realm="AUTH_cfa"')
req = self._make_request('/v1/AUTH_cfa/c')
req.acl = '.r:*,.rlistings'
self.assertIsNone(self.test_auth.authorize(req))
req = self._make_request('/v1/AUTH_cfa/c')
req.acl = '.r:*' # No listings allowed
resp = self.test_auth.authorize(req)
self.assertEqual(resp.status_int, 401)
self.assertEqual(resp.headers.get('Www-Authenticate'),
'Swift realm="AUTH_cfa"')
req = self._make_request('/v1/AUTH_cfa/c')
req.acl = '.r:.example.com,.rlistings'
resp = self.test_auth.authorize(req)
self.assertEqual(resp.status_int, 401)
self.assertEqual(resp.headers.get('Www-Authenticate'),
'Swift realm="AUTH_cfa"')
req = self._make_request('/v1/AUTH_cfa/c')
req.referer = 'http://www.example.com/index.html'
req.acl = '.r:.example.com,.rlistings'
self.assertIsNone(self.test_auth.authorize(req))
def test_detect_reseller_request(self):
req = self._make_request('/v1/AUTH_admin',
headers={'X-Auth-Token': 'AUTH_t'})
cache_key = 'AUTH_/token/AUTH_t'
cache_entry = (time() + 3600, '.reseller_admin')
req.environ['swift.cache'].set(cache_key, cache_entry)
req.get_response(self.test_auth)
self.assertTrue(req.environ.get('reseller_request', False))
def test_account_put_permissions(self):
self.test_auth = auth.filter_factory({})(
FakeApp(iter(NO_CONTENT_RESP * 5)))
req = self._make_request('/v1/AUTH_new',
environ={'REQUEST_METHOD': 'PUT'})
req.remote_user = 'act:usr,act'
resp = self.test_auth.authorize(req)
self.assertEqual(resp.status_int, 403)
req = self._make_request('/v1/AUTH_new',
environ={'REQUEST_METHOD': 'PUT'})
req.remote_user = 'act:usr,act,AUTH_other'
resp = self.test_auth.authorize(req)
self.assertEqual(resp.status_int, 403)
# Even PUTs to your own account as account admin should fail
req = self._make_request('/v1/AUTH_old',
environ={'REQUEST_METHOD': 'PUT'})
req.remote_user = 'act:usr,act,AUTH_old'
resp = self.test_auth.authorize(req)
self.assertEqual(resp.status_int, 403)
req = self._make_request('/v1/AUTH_new',
environ={'REQUEST_METHOD': 'PUT'})
req.remote_user = 'act:usr,act,.reseller_admin'
resp = self.test_auth.authorize(req)
self.assertIsNone(resp)
req = self._make_request('/v1/AUTH_new',
environ={'REQUEST_METHOD': 'PUT'})
req.remote_user = 'act:usr,act,.reseller_reader'
resp = self.test_auth.authorize(req)
self.assertEqual(resp.status_int, 403)
# .super_admin is not something the middleware should ever see or care
# about
req = self._make_request('/v1/AUTH_new',
environ={'REQUEST_METHOD': 'PUT'})
req.remote_user = 'act:usr,act,.super_admin'
resp = self.test_auth.authorize(req)
self.assertEqual(resp.status_int, 403)
def test_account_delete_permissions(self):
self.test_auth = auth.filter_factory({})(
FakeApp(iter(NO_CONTENT_RESP * 5)))
req = self._make_request('/v1/AUTH_new',
environ={'REQUEST_METHOD': 'DELETE'})
req.remote_user = 'act:usr,act'
resp = self.test_auth.authorize(req)
self.assertEqual(resp.status_int, 403)
req = self._make_request('/v1/AUTH_new',
environ={'REQUEST_METHOD': 'DELETE'})
req.remote_user = 'act:usr,act,AUTH_other'
resp = self.test_auth.authorize(req)
self.assertEqual(resp.status_int, 403)
# Even DELETEs to your own account as account admin should fail
req = self._make_request('/v1/AUTH_old',
environ={'REQUEST_METHOD': 'DELETE'})
req.remote_user = 'act:usr,act,AUTH_old'
resp = self.test_auth.authorize(req)
self.assertEqual(resp.status_int, 403)
req = self._make_request('/v1/AUTH_new',
environ={'REQUEST_METHOD': 'DELETE'})
req.remote_user = 'act:usr,act,.reseller_admin'
resp = self.test_auth.authorize(req)
self.assertIsNone(resp)
req = self._make_request('/v1/AUTH_new',
environ={'REQUEST_METHOD': 'DELETE'})
req.remote_user = 'act:usr,act,.reseller_reader'
resp = self.test_auth.authorize(req)
self.assertEqual(resp.status_int, 403)
# .super_admin is not something the middleware should ever see or care
# about
req = self._make_request('/v1/AUTH_new',
environ={'REQUEST_METHOD': 'DELETE'})
req.remote_user = 'act:usr,act,.super_admin'
resp = self.test_auth.authorize(req)
self.assertEqual(resp.status_int, 403)
def test_get_token_success(self):
# Example of how to simulate the auth transaction
test_auth = auth.filter_factory({'user_ac_user': 'testing'})(FakeApp())
req = self._make_request(
'/auth/v1.0',
headers={'X-Auth-User': 'ac:user', 'X-Auth-Key': 'testing'})
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 200)
self.assertTrue(resp.headers['x-storage-url'].endswith('/v1/AUTH_ac'))
self.assertTrue(resp.headers['x-auth-token'].startswith('AUTH_'))
self.assertEqual(resp.headers['x-auth-token'],
resp.headers['x-storage-token'])
self.assertAlmostEqual(int(resp.headers['x-auth-token-expires']),
auth.DEFAULT_TOKEN_LIFE - 0.5, delta=0.5)
self.assertGreater(len(resp.headers['x-auth-token']), 10)
def test_get_token_memcache_error(self):
test_auth = auth.filter_factory({'user_ac_user': 'testing'})(FakeApp())
req = self._make_request(
'/auth/v1.0',
headers={'X-Auth-User': 'ac:user', 'X-Auth-Key': 'testing'})
req.environ['swift.cache'] = FakeMemcache(error_on_set=[True])
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 503)
def test_get_token_success_other_auth_prefix(self):
test_auth = auth.filter_factory({'user_ac_user': 'testing',
'auth_prefix': '/other/'})(FakeApp())
req = self._make_request(
'/other/v1.0',
headers={'X-Auth-User': 'ac:user', 'X-Auth-Key': 'testing'})
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 200)
self.assertTrue(resp.headers['x-storage-url'].endswith('/v1/AUTH_ac'))
self.assertTrue(resp.headers['x-auth-token'].startswith('AUTH_'))
self.assertTrue(len(resp.headers['x-auth-token']) > 10)
def test_use_token_success(self):
# Example of how to simulate an authorized request
test_auth = auth.filter_factory({'user_acct_user': 'testing'})(
FakeApp(iter(NO_CONTENT_RESP * 1)))
req = self._make_request('/v1/AUTH_acct',
headers={'X-Auth-Token': 'AUTH_t'})
cache_key = 'AUTH_/token/AUTH_t'
cache_entry = (time() + 3600, 'AUTH_acct')
req.environ['swift.cache'].set(cache_key, cache_entry)
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 204)
def test_get_token_fail(self):
resp = self._make_request('/auth/v1.0').get_response(self.test_auth)
self.assertEqual(resp.status_int, 401)
self.assertEqual(resp.headers.get('Www-Authenticate'),
'Swift realm="unknown"')
resp = self._make_request(
'/auth/v1.0',
headers={'X-Auth-User': 'act:usr',
'X-Auth-Key': 'key'}).get_response(self.test_auth)
self.assertEqual(resp.status_int, 401)
self.assertTrue('Www-Authenticate' in resp.headers)
self.assertEqual(resp.headers.get('Www-Authenticate'),
'Swift realm="act"')
def test_get_token_fail_invalid_x_auth_user_format(self):
resp = self._make_request(
'/auth/v1/act/auth',
headers={'X-Auth-User': 'usr',
'X-Auth-Key': 'key'}).get_response(self.test_auth)
self.assertEqual(resp.status_int, 401)
self.assertEqual(resp.headers.get('Www-Authenticate'),
'Swift realm="act"')
def test_get_token_fail_non_matching_account_in_request(self):
resp = self._make_request(
'/auth/v1/act/auth',
headers={'X-Auth-User': 'act2:usr',
'X-Auth-Key': 'key'}).get_response(self.test_auth)
self.assertEqual(resp.status_int, 401)
self.assertEqual(resp.headers.get('Www-Authenticate'),
'Swift realm="act"')
def test_get_token_fail_bad_path(self):
resp = self._make_request(
'/auth/v1/act/auth/invalid',
headers={'X-Auth-User': 'act:usr',
'X-Auth-Key': 'key'}).get_response(self.test_auth)
self.assertEqual(resp.status_int, 400)
def test_get_token_fail_missing_key(self):
resp = self._make_request(
'/auth/v1/act/auth',
headers={'X-Auth-User': 'act:usr'}).get_response(self.test_auth)
self.assertEqual(resp.status_int, 401)
self.assertEqual(resp.headers.get('Www-Authenticate'),
'Swift realm="act"')
def test_object_name_containing_slash(self):
test_auth = auth.filter_factory({'user_acct_user': 'testing'})(
FakeApp(iter(NO_CONTENT_RESP * 1)))
req = self._make_request('/v1/AUTH_acct/cont/obj/name/with/slash',
headers={'X-Auth-Token': 'AUTH_t'})
cache_key = 'AUTH_/token/AUTH_t'
cache_entry = (time() + 3600, 'AUTH_acct')
req.environ['swift.cache'].set(cache_key, cache_entry)
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 204)
def test_storage_url_default(self):
self.test_auth = \
auth.filter_factory({'user_test_tester': 'testing'})(FakeApp())
req = self._make_request(
'/auth/v1.0',
headers={'X-Auth-User': 'test:tester', 'X-Auth-Key': 'testing'})
del req.environ['HTTP_HOST']
req.environ['SERVER_NAME'] = 'bob'
req.environ['SERVER_PORT'] = '1234'
resp = req.get_response(self.test_auth)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['x-storage-url'],
'http://bob:1234/v1/AUTH_test')
def test_storage_url_based_on_host(self):
self.test_auth = \
auth.filter_factory({'user_test_tester': 'testing'})(FakeApp())
req = self._make_request(
'/auth/v1.0',
headers={'X-Auth-User': 'test:tester', 'X-Auth-Key': 'testing'})
req.environ['HTTP_HOST'] = 'somehost:5678'
req.environ['SERVER_NAME'] = 'bob'
req.environ['SERVER_PORT'] = '1234'
resp = req.get_response(self.test_auth)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['x-storage-url'],
'http://somehost:5678/v1/AUTH_test')
def test_storage_url_overridden_scheme(self):
self.test_auth = \
auth.filter_factory({'user_test_tester': 'testing',
'storage_url_scheme': 'fake'})(FakeApp())
req = self._make_request(
'/auth/v1.0',
headers={'X-Auth-User': 'test:tester', 'X-Auth-Key': 'testing'})
req.environ['HTTP_HOST'] = 'somehost:5678'
req.environ['SERVER_NAME'] = 'bob'
req.environ['SERVER_PORT'] = '1234'
resp = req.get_response(self.test_auth)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['x-storage-url'],
'fake://somehost:5678/v1/AUTH_test')
def test_use_old_token_from_memcached(self):
self.test_auth = \
auth.filter_factory({'user_test_tester': 'testing',
'storage_url_scheme': 'fake'})(FakeApp())
req = self._make_request(
'/auth/v1.0',
headers={'X-Auth-User': 'test:tester', 'X-Auth-Key': 'testing'})
req.environ['HTTP_HOST'] = 'somehost:5678'
req.environ['SERVER_NAME'] = 'bob'
req.environ['SERVER_PORT'] = '1234'
req.environ['swift.cache'].set('AUTH_/user/test:tester', 'uuid_token')
expires = time() + 180
req.environ['swift.cache'].set('AUTH_/token/uuid_token',
(expires, 'test,test:tester'))
resp = req.get_response(self.test_auth)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['x-auth-token'], 'uuid_token')
self.assertEqual(resp.headers['x-auth-token'],
resp.headers['x-storage-token'])
self.assertAlmostEqual(int(resp.headers['x-auth-token-expires']),
179.5, delta=0.5)
def test_old_token_overdate(self):
self.test_auth = \
auth.filter_factory({'user_test_tester': 'testing',
'storage_url_scheme': 'fake'})(FakeApp())
req = self._make_request(
'/auth/v1.0',
headers={'X-Auth-User': 'test:tester', 'X-Auth-Key': 'testing'})
req.environ['HTTP_HOST'] = 'somehost:5678'
req.environ['SERVER_NAME'] = 'bob'
req.environ['SERVER_PORT'] = '1234'
req.environ['swift.cache'].set('AUTH_/user/test:tester', 'uuid_token')
req.environ['swift.cache'].set('AUTH_/token/uuid_token',
(0, 'test,test:tester'))
resp = req.get_response(self.test_auth)
self.assertEqual(resp.status_int, 200)
self.assertNotEqual(resp.headers['x-auth-token'], 'uuid_token')
self.assertEqual(resp.headers['x-auth-token'][:7], 'AUTH_tk')
self.assertAlmostEqual(int(resp.headers['x-auth-token-expires']),
auth.DEFAULT_TOKEN_LIFE - 0.5, delta=0.5)
def test_old_token_with_old_data(self):
self.test_auth = \
auth.filter_factory({'user_test_tester': 'testing',
'storage_url_scheme': 'fake'})(FakeApp())
req = self._make_request(
'/auth/v1.0',
headers={'X-Auth-User': 'test:tester', 'X-Auth-Key': 'testing'})
req.environ['HTTP_HOST'] = 'somehost:5678'
req.environ['SERVER_NAME'] = 'bob'
req.environ['SERVER_PORT'] = '1234'
req.environ['swift.cache'].set('AUTH_/user/test:tester', 'uuid_token')
req.environ['swift.cache'].set('AUTH_/token/uuid_token',
(time() + 99, 'test,test:tester,.role'))
resp = req.get_response(self.test_auth)
self.assertEqual(resp.status_int, 200)
self.assertNotEqual(resp.headers['x-auth-token'], 'uuid_token')
self.assertEqual(resp.headers['x-auth-token'][:7], 'AUTH_tk')
self.assertAlmostEqual(int(resp.headers['x-auth-token-expires']),
auth.DEFAULT_TOKEN_LIFE - 0.5, delta=0.5)
def test_reseller_admin_is_owner(self):
orig_authorize = self.test_auth.authorize
owner_values = []
def mitm_authorize(req):
rv = orig_authorize(req)
owner_values.append(req.environ.get('swift_owner', False))
return rv
self.test_auth.authorize = mitm_authorize
req = self._make_request('/v1/AUTH_cfa',
headers={'X-Auth-Token': 'AUTH_t'})
req.remote_user = '.reseller_admin'
resp = self.test_auth.authorize(req)
self.assertIsNone(resp)
self.assertEqual(owner_values, [True])
owner_values = []
req = self._make_request('/v1/AUTH_cfa',
headers={'X-Auth-Token': 'AUTH_t'})
req.remote_user = '.reseller_reader'
resp = self.test_auth.authorize(req)
self.assertIsNone(resp)
self.assertEqual(owner_values, [False])
def test_admin_is_owner(self):
orig_authorize = self.test_auth.authorize
owner_values = []
def mitm_authorize(req):
rv = orig_authorize(req)
owner_values.append(req.environ.get('swift_owner', False))
return rv
self.test_auth.authorize = mitm_authorize
req = self._make_request(
'/v1/AUTH_cfa',
headers={'X-Auth-Token': 'AUTH_t'})
req.remote_user = 'AUTH_cfa'
self.test_auth.authorize(req)
self.assertEqual(owner_values, [True])
def test_regular_is_not_owner(self):
orig_authorize = self.test_auth.authorize
owner_values = []
def mitm_authorize(req):
rv = orig_authorize(req)
owner_values.append(req.environ.get('swift_owner', False))
return rv
self.test_auth.authorize = mitm_authorize
req = self._make_request(
'/v1/AUTH_cfa/c',
headers={'X-Auth-Token': 'AUTH_t'})
req.remote_user = 'act:usr'
self.test_auth.authorize(req)
self.assertEqual(owner_values, [False])
def test_sync_request_success(self):
self.test_auth.app = FakeApp(iter(NO_CONTENT_RESP * 1),
sync_key='secret')
req = self._make_request(
'/v1/AUTH_cfa/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'x-container-sync-key': 'secret',
'x-timestamp': '123.456'})
req.remote_addr = '127.0.0.1'
resp = req.get_response(self.test_auth)
self.assertEqual(resp.status_int, 204)
def test_sync_request_fail_key(self):
self.test_auth.app = FakeApp(sync_key='secret')
req = self._make_request(
'/v1/AUTH_cfa/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'x-container-sync-key': 'wrongsecret',
'x-timestamp': '123.456'})
req.remote_addr = '127.0.0.1'
resp = req.get_response(self.test_auth)
self.assertEqual(resp.status_int, 401)
self.assertEqual(resp.headers.get('Www-Authenticate'),
'Swift realm="AUTH_cfa"')
self.test_auth.app = FakeApp(sync_key='othersecret')
req = self._make_request(
'/v1/AUTH_cfa/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'x-container-sync-key': 'secret',
'x-timestamp': '123.456'})
req.remote_addr = '127.0.0.1'
resp = req.get_response(self.test_auth)
self.assertEqual(resp.status_int, 401)
self.assertEqual(resp.headers.get('Www-Authenticate'),
'Swift realm="AUTH_cfa"')
self.test_auth.app = FakeApp(sync_key=None)
req = self._make_request(
'/v1/AUTH_cfa/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'x-container-sync-key': 'secret',
'x-timestamp': '123.456'})
req.remote_addr = '127.0.0.1'
resp = req.get_response(self.test_auth)
self.assertEqual(resp.status_int, 401)
self.assertEqual(resp.headers.get('Www-Authenticate'),
'Swift realm="AUTH_cfa"')
def test_sync_request_fail_no_timestamp(self):
self.test_auth.app = FakeApp(sync_key='secret')
req = self._make_request(
'/v1/AUTH_cfa/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'x-container-sync-key': 'secret'})
req.remote_addr = '127.0.0.1'
resp = req.get_response(self.test_auth)
self.assertEqual(resp.status_int, 401)
self.assertEqual(resp.headers.get('Www-Authenticate'),
'Swift realm="AUTH_cfa"')
def test_sync_request_success_lb_sync_host(self):
self.test_auth.app = FakeApp(iter(NO_CONTENT_RESP * 1),
sync_key='secret')
req = self._make_request(
'/v1/AUTH_cfa/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'x-container-sync-key': 'secret',
'x-timestamp': '123.456',
'x-forwarded-for': '127.0.0.1'})
req.remote_addr = '127.0.0.2'
resp = req.get_response(self.test_auth)
self.assertEqual(resp.status_int, 204)
self.test_auth.app = FakeApp(iter(NO_CONTENT_RESP * 1),
sync_key='secret')
req = self._make_request(
'/v1/AUTH_cfa/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'x-container-sync-key': 'secret',
'x-timestamp': '123.456',
'x-cluster-client-ip': '127.0.0.1'})
req.remote_addr = '127.0.0.2'
resp = req.get_response(self.test_auth)
self.assertEqual(resp.status_int, 204)
def test_options_call(self):
req = self._make_request('/v1/AUTH_cfa/c/o',
environ={'REQUEST_METHOD': 'OPTIONS'})
resp = self.test_auth.authorize(req)
self.assertIsNone(resp)
def test_get_user_group(self):
# More tests in TestGetUserGroups class
app = FakeApp()
ath = auth.filter_factory({})(app)
ath.users = {'test:tester': {'groups': ['.admin']}}
groups = ath._get_user_groups('test', 'test:tester', 'AUTH_test')
self.assertEqual(groups, 'test,test:tester,AUTH_test')
ath.users = {'test:tester': {'groups': []}}
groups = ath._get_user_groups('test', 'test:tester', 'AUTH_test')
self.assertEqual(groups, 'test,test:tester')
def test_auth_scheme(self):
req = self._make_request('/v1/BLAH_account',
headers={'X-Auth-Token': 'BLAH_t'})
resp = req.get_response(self.test_auth)
self.assertEqual(resp.status_int, 401)
self.assertTrue('Www-Authenticate' in resp.headers)
self.assertEqual(resp.headers.get('Www-Authenticate'),
'Swift realm="BLAH_account"')
def test_successful_token_unicode_user(self):
app = FakeApp(iter(NO_CONTENT_RESP * 2))
conf = {u'user_t\u00e9st_t\u00e9ster': u'p\u00e1ss .admin'}
if six.PY2:
conf = {k.encode('utf8'): v.encode('utf8')
for k, v in conf.items()}
ath = auth.filter_factory(conf)(app)
quoted_acct = quote(u'/v1/AUTH_t\u00e9st'.encode('utf8'))
memcache = FakeMemcache()
wsgi_user = bytes_to_wsgi(u't\u00e9st:t\u00e9ster'.encode('utf8'))
req = self._make_request(
'/auth/v1.0',
headers={'X-Auth-User': wsgi_user,
'X-Auth-Key': bytes_to_wsgi(u'p\u00e1ss'.encode('utf8'))})
req.environ['swift.cache'] = memcache
resp = req.get_response(ath)
self.assertEqual(resp.status_int, 200)
auth_token = resp.headers['X-Auth-Token']
self.assertEqual(quoted_acct,
urlparse(resp.headers['X-Storage-Url']).path)
req = self._make_request(
'/auth/v1.0',
headers={'X-Auth-User': wsgi_user,
'X-Auth-Key': bytes_to_wsgi(u'p\u00e1ss'.encode('utf8'))})
req.environ['swift.cache'] = memcache
resp = req.get_response(ath)
self.assertEqual(resp.status_int, 200)
self.assertEqual(auth_token, resp.headers['X-Auth-Token'])
self.assertEqual(quoted_acct,
urlparse(resp.headers['X-Storage-Url']).path)
# storage urls should be url-encoded...
req = self._make_request(
quoted_acct, headers={'X-Auth-Token': auth_token})
req.environ['swift.cache'] = memcache
resp = req.get_response(ath)
self.assertEqual(204, resp.status_int)
# ...but it also works if you send the account raw
req = self._make_request(
u'/v1/AUTH_t\u00e9st'.encode('utf8'),
headers={'X-Auth-Token': auth_token})
req.environ['swift.cache'] = memcache
resp = req.get_response(ath)
self.assertEqual(204, resp.status_int)
def test_request_method_not_allowed(self):
test_auth = auth.filter_factory({'user_ac_user': 'testing'})(FakeApp())
req = self._make_request(
'/auth/v1.0',
headers={'X-Auth-User': 'ac:user', 'X-Auth-Key': 'testing'},
environ={'REQUEST_METHOD': 'PUT'})
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 405)
req = self._make_request(
'/auth/v1.0',
headers={'X-Auth-User': 'ac:user', 'X-Auth-Key': 'testing'},
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 405)
req = self._make_request(
'/auth/v1.0',
headers={'X-Auth-User': 'ac:user', 'X-Auth-Key': 'testing'},
environ={'REQUEST_METHOD': 'POST'})
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 405)
req = self._make_request(
'/auth/v1.0',
headers={'X-Auth-User': 'ac:user', 'X-Auth-Key': 'testing'},
environ={'REQUEST_METHOD': 'DELETE'})
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 405)
class TestAuthWithMultiplePrefixes(TestAuth):
"""
Repeats all tests in TestAuth except adds multiple
reseller_prefix items
"""
def setUp(self):
self.test_auth = auth.filter_factory(
{'reseller_prefix': 'AUTH_, SOMEOTHER_, YETANOTHER_'})(FakeApp())
class TestGetUserGroups(unittest.TestCase):
def test_custom_url_config(self):
app = FakeApp()
ath = auth.filter_factory({
'user_test_tester':
'testing .admin http://saio:8080/v1/AUTH_monkey'})(app)
groups = ath._get_user_groups('test', 'test:tester', 'AUTH_monkey')
self.assertEqual(groups, 'test,test:tester,AUTH_test,AUTH_monkey')
def test_no_prefix_reseller(self):
app = FakeApp()
ath = auth.filter_factory({'reseller_prefix': ''})(app)
ath.users = {'test:tester': {'groups': ['.admin']}}
groups = ath._get_user_groups('test', 'test:tester', 'test')
self.assertEqual(groups, 'test,test:tester')
ath.users = {'test:tester': {'groups': []}}
groups = ath._get_user_groups('test', 'test:tester', 'test')
self.assertEqual(groups, 'test,test:tester')
def test_single_reseller(self):
app = FakeApp()
ath = auth.filter_factory({})(app)
ath.users = {'test:tester': {'groups': ['.admin']}}
groups = ath._get_user_groups('test', 'test:tester', 'AUTH_test')
self.assertEqual(groups, 'test,test:tester,AUTH_test')
ath.users = {'test:tester': {'groups': []}}
groups = ath._get_user_groups('test', 'test:tester', 'AUTH_test')
self.assertEqual(groups, 'test,test:tester')
def test_multiple_reseller(self):
app = FakeApp()
ath = auth.filter_factory(
{'reseller_prefix': 'AUTH_, SOMEOTHER_, YETANOTHER_'})(app)
self.assertEqual(ath.reseller_prefixes, ['AUTH_', 'SOMEOTHER_',
'YETANOTHER_'])
ath.users = {'test:tester': {'groups': ['.admin']}}
groups = ath._get_user_groups('test', 'test:tester', 'AUTH_test')
self.assertEqual(groups,
'test,test:tester,AUTH_test,'
'SOMEOTHER_test,YETANOTHER_test')
ath.users = {'test:tester': {'groups': []}}
groups = ath._get_user_groups('test', 'test:tester', 'AUTH_test')
self.assertEqual(groups, 'test,test:tester')
class TestDefinitiveAuth(unittest.TestCase):
def setUp(self):
self.test_auth = auth.filter_factory(
{'reseller_prefix': 'AUTH_, SOMEOTHER_'})(FakeApp())
def test_noreseller_prefix(self):
ath = auth.filter_factory({'reseller_prefix': ''})(FakeApp())
result = ath._is_definitive_auth(path='/v1/test')
self.assertEqual(result, False)
result = ath._is_definitive_auth(path='/v1/AUTH_test')
self.assertEqual(result, False)
result = ath._is_definitive_auth(path='/v1/BLAH_test')
self.assertEqual(result, False)
def test_blank_prefix(self):
ath = auth.filter_factory({'reseller_prefix':
" '', SOMEOTHER"})(FakeApp())
result = ath._is_definitive_auth(path='/v1/test')
self.assertEqual(result, False)
result = ath._is_definitive_auth(path='/v1/SOMEOTHER_test')
self.assertEqual(result, True)
result = ath._is_definitive_auth(path='/v1/SOMEOTHERtest')
self.assertEqual(result, False)
def test_default_prefix(self):
ath = auth.filter_factory({})(FakeApp())
result = ath._is_definitive_auth(path='/v1/AUTH_test')
self.assertEqual(result, True)
result = ath._is_definitive_auth(path='/v1/BLAH_test')
self.assertEqual(result, False)
ath = auth.filter_factory({'reseller_prefix': 'AUTH'})(FakeApp())
result = ath._is_definitive_auth(path='/v1/AUTH_test')
self.assertEqual(result, True)
result = ath._is_definitive_auth(path='/v1/BLAH_test')
self.assertEqual(result, False)
def test_multiple_prefixes(self):
ath = auth.filter_factory({'reseller_prefix':
'AUTH, SOMEOTHER'})(FakeApp())
result = ath._is_definitive_auth(path='/v1/AUTH_test')
self.assertEqual(result, True)
result = ath._is_definitive_auth(path='/v1/SOMEOTHER_test')
self.assertEqual(result, True)
result = ath._is_definitive_auth(path='/v1/BLAH_test')
self.assertEqual(result, False)
class TestParseUserCreation(unittest.TestCase):
def test_parse_user_creation(self):
auth_filter = auth.filter_factory({
'reseller_prefix': 'ABC',
'user_test_tester3': 'testing',
'user_has_url': 'urlly .admin http://a.b/v1/DEF_has',
'user_admin_admin': 'admin .admin .reseller_admin',
'user_admin_auditor': 'admin_ro .reseller_reader',
})(FakeApp())
self.assertEqual(auth_filter.users, {
'admin:admin': {
'url': '$HOST/v1/ABC_admin',
'groups': ['.admin', '.reseller_admin'],
'key': 'admin'
}, 'admin:auditor': {
'url': '$HOST/v1/ABC_admin',
'groups': ['.reseller_reader'],
'key': 'admin_ro'
}, 'test:tester3': {
'url': '$HOST/v1/ABC_test',
'groups': [],
'key': 'testing'
}, 'has:url': {
'url': 'http://a.b/v1/DEF_has',
'groups': ['.admin'],
'key': 'urlly'
},
})
def test_base64_encoding(self):
auth_filter = auth.filter_factory({
'reseller_prefix': 'ABC',
'user64_%s_%s' % (
b64encode('test').rstrip('='),
b64encode('tester3').rstrip('=')):
'testing .reseller_admin',
'user64_%s_%s' % (
b64encode('user_foo').rstrip('='),
b64encode('ab').rstrip('=')):
'urlly .admin http://a.b/v1/DEF_has',
})(FakeApp())
self.assertEqual(auth_filter.users, {
'test:tester3': {
'url': '$HOST/v1/ABC_test',
'groups': ['.reseller_admin'],
'key': 'testing'
}, 'user_foo:ab': {
'url': 'http://a.b/v1/DEF_has',
'groups': ['.admin'],
'key': 'urlly'
},
})
def test_key_with_no_value(self):
self.assertRaises(ValueError, auth.filter_factory({
'user_test_tester3': 'testing',
'user_bob_bobby': '',
'user_admin_admin': 'admin .admin .reseller_admin',
}), FakeApp())
def test_account_with_no_user(self):
expected_msg = 'key user_testtester was provided in an invalid format'
with self.assertRaises(ValueError) as ctx:
auth.filter_factory({
'user_testtester': 'testing',
})(FakeApp())
self.assertEqual(str(ctx.exception), expected_msg)
class TestAccountAcls(unittest.TestCase):
"""
These tests use a single reseller prefix (AUTH_) and the
target paths are /v1/AUTH_<blah>
"""
def setUp(self):
self.reseller_prefix = {}
self.accpre = 'AUTH'
def _make_request(self, path, **kwargs):
# Our TestAccountAcls default request will have a valid auth token
version, acct, _ = split_path(path, 1, 3, True)
headers = kwargs.pop('headers', {'X-Auth-Token': 'AUTH_t'})
user_groups = kwargs.pop('user_groups', 'AUTH_firstacct')
# The account being accessed will have account ACLs
acl = {'admin': ['AUTH_admin'], 'read-write': ['AUTH_rw'],
'read-only': ['AUTH_ro']}
header_data = {'core-access-control':
format_acl(version=2, acl_dict=acl)}
acls = kwargs.pop('acls', header_data)
req = Request.blank(path, headers=headers, **kwargs)
# Authorize the token by populating the request's cache
req.environ['swift.cache'] = FakeMemcache()
cache_key = 'AUTH_/token/AUTH_t'
cache_entry = (time() + 3600, user_groups)
req.environ['swift.cache'].set(cache_key, cache_entry)
# Pretend get_account_info returned ACLs in sysmeta, and we cached that
cache_key = 'account/%s' % acct
cache_entry = {'sysmeta': acls}
req.environ['swift.cache'].set(cache_key, cache_entry)
return req
def _conf(self, moreconf):
conf = self.reseller_prefix
conf.update(moreconf)
return conf
def test_account_acl_success(self):
test_auth = auth.filter_factory(
self._conf({'user_admin_user': 'testing'}))(
FakeApp(iter(NO_CONTENT_RESP * 1)))
# admin (not a swift admin) wants to read from otheracct
req = self._make_request('/v1/%s_otheract' % self.accpre,
user_groups="AUTH_admin")
# The request returned by _make_request should be allowed
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 204)
def test_account_acl_failures(self):
test_auth = auth.filter_factory(
self._conf({'user_admin_user': 'testing'}))(
FakeApp())
# If I'm not authed as anyone on the ACLs, I shouldn't get in
req = self._make_request('/v1/%s_otheract' % self.accpre,
user_groups="AUTH_bob")
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 403)
# If the target account has no ACLs, a non-owner shouldn't get in
req = self._make_request('/v1/%s_otheract' % self.accpre,
user_groups="AUTH_admin",
acls={})
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 403)
def test_admin_privileges(self):
test_auth = auth.filter_factory(
self._conf({'user_admin_user': 'testing'}))(
FakeApp(iter(NO_CONTENT_RESP * 18)))
for target in (
'/v1/%s_otheracct' % self.accpre,
'/v1/%s_otheracct/container' % self.accpre,
'/v1/%s_otheracct/container/obj' % self.accpre):
for method in ('GET', 'HEAD', 'OPTIONS', 'PUT', 'POST', 'DELETE'):
# Admin ACL user can do anything
req = self._make_request(target, user_groups="AUTH_admin",
environ={'REQUEST_METHOD': method})
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 204)
# swift_owner should be set to True
if method != 'OPTIONS':
self.assertTrue(req.environ.get('swift_owner'))
def test_readwrite_privileges(self):
test_auth = auth.filter_factory(
self._conf({'user_rw_user': 'testing'}))(
FakeApp(iter(NO_CONTENT_RESP * 15)))
for target in ('/v1/%s_otheracct' % self.accpre,):
for method in ('GET', 'HEAD', 'OPTIONS'):
# Read-Write user can read account data
req = self._make_request(target, user_groups="AUTH_rw",
environ={'REQUEST_METHOD': method})
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 204)
# swift_owner should NOT be set to True
self.assertFalse(req.environ.get('swift_owner'))
# RW user should NOT be able to PUT, POST, or DELETE to the account
for method in ('PUT', 'POST', 'DELETE'):
req = self._make_request(target, user_groups="AUTH_rw",
environ={'REQUEST_METHOD': method})
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 403)
# RW user should be able to GET, PUT, POST, or DELETE to containers
# and objects
for target in ('/v1/%s_otheracct/c' % self.accpre,
'/v1/%s_otheracct/c/o' % self.accpre):
for method in ('GET', 'HEAD', 'OPTIONS', 'PUT', 'POST', 'DELETE'):
req = self._make_request(target, user_groups="AUTH_rw",
environ={'REQUEST_METHOD': method})
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 204)
def test_readonly_privileges(self):
test_auth = auth.filter_factory(
self._conf({'user_ro_user': 'testing'}))(
FakeApp(iter(NO_CONTENT_RESP * 9)))
# ReadOnly user should NOT be able to PUT, POST, or DELETE to account,
# container, or object
for target in ('/v1/%s_otheracct' % self.accpre,
'/v1/%s_otheracct/cont' % self.accpre,
'/v1/%s_otheracct/cont/obj' % self.accpre):
for method in ('GET', 'HEAD', 'OPTIONS'):
req = self._make_request(target, user_groups="AUTH_ro",
environ={'REQUEST_METHOD': method})
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 204)
# swift_owner should NOT be set to True for the ReadOnly ACL
self.assertFalse(req.environ.get('swift_owner'))
for method in ('PUT', 'POST', 'DELETE'):
req = self._make_request(target, user_groups="AUTH_ro",
environ={'REQUEST_METHOD': method})
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 403)
# swift_owner should NOT be set to True for the ReadOnly ACL
self.assertFalse(req.environ.get('swift_owner'))
def test_user_gets_best_acl(self):
test_auth = auth.filter_factory(
self._conf({'user_acct_username': 'testing'}))(
FakeApp(iter(NO_CONTENT_RESP * 18)))
mygroups = "AUTH_acct,AUTH_ro,AUTH_something,AUTH_admin"
for target in ('/v1/%s_otheracct' % self.accpre,
'/v1/%s_otheracct/container' % self.accpre,
'/v1/%s_otheracct/container/obj' % self.accpre):
for method in ('GET', 'HEAD', 'OPTIONS', 'PUT', 'POST', 'DELETE'):
# Admin ACL user can do anything
req = self._make_request(target, user_groups=mygroups,
environ={'REQUEST_METHOD': method})
resp = req.get_response(test_auth)
self.assertEqual(
resp.status_int, 204, "%s (%s) - expected 204, got %d" %
(target, method, resp.status_int))
# swift_owner should be set to True
if method != 'OPTIONS':
self.assertTrue(req.environ.get('swift_owner'))
def test_acl_syntax_verification(self):
test_auth = auth.filter_factory(
self._conf({'user_admin_user': 'testing .admin'}))(
FakeApp(iter(NO_CONTENT_RESP * 5)))
user_groups = test_auth._get_user_groups('admin', 'admin:user',
'AUTH_admin')
good_headers = {'X-Auth-Token': 'AUTH_t'}
good_acl = json.dumps({"read-only": [u"á", "b"]})
bad_list_types = '{"read-only": ["a", 99]}'
bad_acl = 'syntactically invalid acl -- this does not parse as JSON'
wrong_acl = '{"other-auth-system":["valid","json","but","wrong"]}'
bad_value_acl = '{"read-write":["fine"],"admin":"should be a list"}'
not_dict_acl = '["read-only"]'
not_dict_acl2 = 1
empty_acls = ['{}', '', '{ }']
target = '/v1/%s_firstacct' % self.accpre
# no acls -- no problem!
req = self._make_request(target, headers=good_headers,
user_groups=user_groups)
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 204)
# syntactically valid acls should go through
update = {'x-account-access-control': good_acl}
req = self._make_request(target, user_groups=user_groups,
headers=dict(good_headers, **update))
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 204,
'Expected 204, got %s, response body: %s'
% (resp.status_int, resp.body))
# syntactically valid empty acls should go through
for acl in empty_acls:
update = {'x-account-access-control': acl}
req = self._make_request(target, user_groups=user_groups,
headers=dict(good_headers, **update))
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 204)
errmsg = b'X-Account-Access-Control invalid: %s'
# syntactically invalid acls get a 400
update = {'x-account-access-control': bad_acl}
req = self._make_request(target, headers=dict(good_headers, **update))
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 400)
self.assertEqual(errmsg % b"Syntax error", resp.body[:46])
# syntactically valid acls with bad keys also get a 400
update = {'x-account-access-control': wrong_acl}
req = self._make_request(target, headers=dict(good_headers, **update))
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 400)
self.assertTrue(resp.body.startswith(
errmsg % b'Key "other-auth-system" not recognized'), resp.body)
# and do something sane with crazy data
update = {'x-account-access-control': u'{"\u1234": []}'.encode('utf8')}
req = self._make_request(target, headers=dict(good_headers, **update))
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 400)
self.assertTrue(resp.body.startswith(
errmsg % b'Key "\\u1234" not recognized'), resp.body)
# acls with good keys but bad values also get a 400
update = {'x-account-access-control': bad_value_acl}
req = self._make_request(target, headers=dict(good_headers, **update))
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 400)
self.assertTrue(resp.body.startswith(
errmsg % b'Value for key "admin" must be a list'), resp.body)
# acls with non-string-types in list also get a 400
update = {'x-account-access-control': bad_list_types}
req = self._make_request(target, headers=dict(good_headers, **update))
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 400)
self.assertTrue(resp.body.startswith(
errmsg % b'Elements of "read-only" list must be strings'),
resp.body)
# acls with wrong json structure also get a 400
update = {'x-account-access-control': not_dict_acl}
req = self._make_request(target, headers=dict(good_headers, **update))
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 400)
self.assertEqual(errmsg % b"Syntax error", resp.body[:46])
# acls with wrong json structure also get a 400
update = {'x-account-access-control': not_dict_acl2}
req = self._make_request(target, headers=dict(good_headers, **update))
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 400)
self.assertEqual(errmsg % b"Syntax error", resp.body[:46])
def test_acls_propagate_to_sysmeta(self):
test_auth = auth.filter_factory({'user_admin_user': 'testing'})(
FakeApp(iter(NO_CONTENT_RESP * 3)))
sysmeta_hdr = 'x-account-sysmeta-core-access-control'
target = '/v1/AUTH_firstacct'
good_headers = {'X-Auth-Token': 'AUTH_t'}
good_acl = '{"read-only":["a","b"]}'
# no acls -- no problem!
req = self._make_request(target, headers=good_headers)
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 204)
self.assertIsNone(req.headers.get(sysmeta_hdr))
# syntactically valid acls should go through
update = {'x-account-access-control': good_acl}
req = self._make_request(target, headers=dict(good_headers, **update))
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 204)
self.assertEqual(good_acl, req.headers.get(sysmeta_hdr))
def test_bad_acls_get_denied(self):
test_auth = auth.filter_factory({'user_admin_user': 'testing'})(
FakeApp(iter(NO_CONTENT_RESP * 3)))
target = '/v1/AUTH_firstacct'
good_headers = {'X-Auth-Token': 'AUTH_t'}
bad_acls = (
'syntax error',
'{"bad_key":"should_fail"}',
'{"admin":"not a list, should fail"}',
'{"admin":["valid"],"read-write":"not a list, should fail"}',
)
for bad_acl in bad_acls:
hdrs = dict(good_headers, **{'x-account-access-control': bad_acl})
req = self._make_request(target, headers=hdrs)
resp = req.get_response(test_auth)
self.assertEqual(resp.status_int, 400)
class TestAuthMultiplePrefixes(TestAccountAcls):
"""
These tests repeat the same tests as TestAccountACLs,
but use multiple reseller prefix items (AUTH_ and SOMEOTHER_).
The target paths are /v1/SOMEOTHER_<blah>
"""
def setUp(self):
self.reseller_prefix = {'reseller_prefix': 'AUTH_, SOMEOTHER_'}
self.accpre = 'SOMEOTHER'
class PrefixAccount(unittest.TestCase):
def test_default(self):
conf = {}
test_auth = auth.filter_factory(conf)(FakeApp())
self.assertEqual(test_auth._get_account_prefix(
'AUTH_1234'), 'AUTH_')
self.assertIsNone(test_auth._get_account_prefix('JUNK_1234'))
def test_same_as_default(self):
conf = {'reseller_prefix': 'AUTH'}
test_auth = auth.filter_factory(conf)(FakeApp())
self.assertEqual(test_auth._get_account_prefix(
'AUTH_1234'), 'AUTH_')
self.assertIsNone(test_auth._get_account_prefix('JUNK_1234'))
def test_blank_reseller(self):
conf = {'reseller_prefix': ''}
test_auth = auth.filter_factory(conf)(FakeApp())
self.assertEqual(test_auth._get_account_prefix(
'1234'), '')
self.assertEqual(test_auth._get_account_prefix(
'JUNK_1234'), '') # yes, it should return ''
def test_multiple_resellers(self):
conf = {'reseller_prefix': 'AUTH, PRE2'}
test_auth = auth.filter_factory(conf)(FakeApp())
self.assertEqual(test_auth._get_account_prefix(
'AUTH_1234'), 'AUTH_')
self.assertIsNone(test_auth._get_account_prefix('JUNK_1234'))
class ServiceTokenFunctionality(unittest.TestCase):
def _make_authed_request(self, conf, remote_user, path, method='GET'):
"""Make a request with tempauth as auth
Acts as though the user had presented a token
granting groups as described in remote_user.
If remote_user contains the .service group, it emulates presenting
X-Service-Token containing a .service group.
:param conf: configuration for tempauth
:param remote_user: the groups the user belongs to. Examples:
acct:joe,acct user joe, no .admin
acct:joe,acct,AUTH_joeacct user joe, jas .admin group
acct:joe,acct,AUTH_joeacct,.service adds .service group
:param path: the path of the request
:param method: the method (defaults to GET)
:returns: response object
"""
self.req = Request.blank(path)
self.req.method = method
self.req.remote_user = remote_user
fake_app = FakeApp(iter([('200 OK', {}, '')]))
test_auth = auth.filter_factory(conf)(fake_app)
resp = self.req.get_response(test_auth)
return resp
def test_authed_for_path_single(self):
resp = self._make_authed_request({}, 'acct:joe,acct,AUTH_acct',
'/v1/AUTH_acct')
self.assertEqual(resp.status_int, 200)
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH'}, 'acct:joe,acct,AUTH_acct',
'/v1/AUTH_acct/c', method='PUT')
self.assertEqual(resp.status_int, 200)
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH'},
'admin:mary,admin,AUTH_admin,.reseller_reader',
'/v1/AUTH_acct', method='GET')
self.assertEqual(resp.status_int, 200)
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH'},
'admin:mary,admin,AUTH_admin,.reseller_reader',
'/v1/AUTH_acct/c', method='GET')
self.assertEqual(resp.status_int, 200)
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH'},
'admin:mary,admin,AUTH_admin,.reseller_admin',
'/v1/AUTH_acct', method='GET')
self.assertEqual(resp.status_int, 200)
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH'},
'admin:mary,admin,AUTH_admin,.reseller_admin',
'/v1/AUTH_acct', method='DELETE')
self.assertEqual(resp.status_int, 200)
def test_denied_for_path_single(self):
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH'},
'fredacc:fred,fredacct,AUTH_fredacc',
'/v1/AUTH_acct')
self.assertEqual(resp.status_int, 403)
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH'},
'acct:joe,acct',
'/v1/AUTH_acct',
method='PUT')
self.assertEqual(resp.status_int, 403)
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH'},
'acct:joe,acct,AUTH_acct',
'/v1/AUTH_acct',
method='DELETE')
self.assertEqual(resp.status_int, 403)
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH'},
'admin:mary,admin,.admin,.reseller_reader',
'/v1/AUTH_acct', method='PUT')
self.assertEqual(resp.status_int, 403)
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH'},
'admin:mary,admin,.admin,.reseller_reader',
'/v1/AUTH_acct', method='DELETE')
self.assertEqual(resp.status_int, 403)
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH'},
'admin:mary,admin,.admin,.reseller_reader',
'/v1/AUTH_acct/c', method='PUT')
self.assertEqual(resp.status_int, 403)
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH'},
'admin:mary,admin,.admin,.reseller_reader',
'/v1/AUTH_acct/c', method='DELETE')
self.assertEqual(resp.status_int, 403)
def test_authed_for_primary_path_multiple(self):
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH, PRE2'},
'acct:joe,acct,AUTH_acct,PRE2_acct',
'/v1/PRE2_acct')
self.assertEqual(resp.status_int, 200)
def test_denied_for_second_path_with_only_operator_role(self):
# User only presents a token in X-Auth-Token (or in X-Service-Token)
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH, PRE2',
'PRE2_require_group': '.service'},
'acct:joe,acct,AUTH_acct,PRE2_acct',
'/v1/PRE2_acct')
self.assertEqual(resp.status_int, 403)
# User puts token in both X-Auth-Token and X-Service-Token
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH, PRE2',
'PRE2_require_group': '.service'},
'acct:joe,acct,AUTH_acct,PRE2_acct,AUTH_acct,PRE2_acct',
'/v1/PRE2_acct')
self.assertEqual(resp.status_int, 403)
def test_authed_for_second_path_with_operator_role_and_service(self):
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH, PRE2',
'PRE2_require_group': '.service'},
'acct:joe,acct,AUTH_acct,PRE2_acct,'
'admin:mary,admin,AUTH_admin,PRE2_admin,.service',
'/v1/PRE2_acct')
self.assertEqual(resp.status_int, 200)
def test_denied_for_second_path_with_only_service(self):
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH, PRE2',
'PRE2_require_group': '.service'},
'admin:mary,admin,AUTH_admin,PRE2_admin,.service',
'/v1/PRE2_acct')
self.assertEqual(resp.status_int, 403)
def test_denied_for_second_path_for_service_user(self):
# User presents token with 'service' role in X-Auth-Token
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH, PRE2',
'PRE2_require_group': '.service'},
'admin:mary,admin,AUTH_admin,PRE2_admin,.service',
'/v1/PRE2_acct')
self.assertEqual(resp.status_int, 403)
# User presents token with 'service' role in X-Auth-Token
# and also in X-Service-Token
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH, PRE2',
'PRE2_require_group': '.service'},
'admin:mary,admin,AUTH_admin,PRE2_admin,.service,'
'admin:mary,admin,AUTH_admin,PRE2_admin,.service',
'/v1/PRE2_acct')
self.assertEqual(resp.status_int, 403)
def test_delete_denied_for_second_path(self):
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH, PRE2',
'PRE2_require_group': '.service'},
'acct:joe,acct,AUTH_acct,PRE2_acct,'
'admin:mary,admin,AUTH_admin,PRE2_admin,.service',
'/v1/PRE2_acct',
method='DELETE')
self.assertEqual(resp.status_int, 403)
def test_delete_of_second_path_by_reseller_admin(self):
resp = self._make_authed_request(
{'reseller_prefix': 'AUTH, PRE2',
'PRE2_require_group': '.service'},
'acct:joe,acct,AUTH_acct,PRE2_acct,'
'admin:mary,admin,AUTH_admin,PRE2_admin,.reseller_admin',
'/v1/PRE2_acct',
method='DELETE')
self.assertEqual(resp.status_int, 200)
class TestTokenHandling(unittest.TestCase):
def _make_request(self, conf, path, headers, method='GET'):
"""Make a request with tempauth as auth
It sets up AUTH_t and AUTH_s as tokens in memcache, where "joe"
has .admin role on /v1/AUTH_acct and user "glance" has .service
role on /v1/AUTH_admin.
:param conf: configuration for tempauth
:param path: the path of the request
:param headers: allows you to pass X-Auth-Token, etc.
:param method: the method (defaults to GET)
:returns: response object
"""
fake_app = FakeApp(iter([('200 OK', {}, '')]))
self.test_auth = auth.filter_factory(conf)(fake_app)
self.req = Request.blank(path, headers=headers)
self.req.method = method
self.req.environ['swift.cache'] = FakeMemcache()
self._setup_user_and_token('AUTH_t', 'acct', 'acct:joe',
'.admin')
self._setup_user_and_token('AUTH_s', 'admin', 'admin:glance',
'.service')
resp = self.req.get_response(self.test_auth)
return resp
def _setup_user_and_token(self, token_name, account, account_user,
groups):
"""Setup named token in memcache
:param token_name: name of token
:param account: example: acct
:param account_user: example: acct_joe
:param groups: example: .admin
"""
self.test_auth.users[account_user] = dict(groups=[groups])
account_id = 'AUTH_%s' % account
cache_key = 'AUTH_/token/%s' % token_name
cache_entry = (time() + 3600,
self.test_auth._get_user_groups(account,
account_user,
account_id))
self.req.environ['swift.cache'].set(cache_key, cache_entry)
def test_tokens_set_remote_user(self):
conf = {} # Default conf
resp = self._make_request(conf, '/v1/AUTH_acct',
{'x-auth-token': 'AUTH_t'})
self.assertEqual(self.req.environ['REMOTE_USER'],
'acct,acct:joe,AUTH_acct')
self.assertEqual(resp.status_int, 200)
# Add x-service-token
resp = self._make_request(conf, '/v1/AUTH_acct',
{'x-auth-token': 'AUTH_t',
'x-service-token': 'AUTH_s'})
self.assertEqual(self.req.environ['REMOTE_USER'],
'acct,acct:joe,AUTH_acct,admin,admin:glance,.service')
self.assertEqual(resp.status_int, 200)
# Put x-auth-token value into x-service-token
resp = self._make_request(conf, '/v1/AUTH_acct',
{'x-auth-token': 'AUTH_t',
'x-service-token': 'AUTH_t'})
self.assertEqual(self.req.environ['REMOTE_USER'],
'acct,acct:joe,AUTH_acct,acct,acct:joe,AUTH_acct')
self.assertEqual(resp.status_int, 200)
def test_service_token_given_and_needed(self):
conf = {'reseller_prefix': 'AUTH, PRE2',
'PRE2_require_group': '.service'}
resp = self._make_request(conf, '/v1/PRE2_acct',
{'x-auth-token': 'AUTH_t',
'x-service-token': 'AUTH_s'})
self.assertEqual(resp.status_int, 200)
def test_service_token_omitted(self):
conf = {'reseller_prefix': 'AUTH, PRE2',
'PRE2_require_group': '.service'}
resp = self._make_request(conf, '/v1/PRE2_acct',
{'x-auth-token': 'AUTH_t'})
self.assertEqual(resp.status_int, 403)
def test_invalid_tokens(self):
conf = {'reseller_prefix': 'AUTH, PRE2',
'PRE2_require_group': '.service'}
resp = self._make_request(conf, '/v1/PRE2_acct',
{'x-auth-token': 'AUTH_junk'})
self.assertEqual(resp.status_int, 401)
resp = self._make_request(conf, '/v1/PRE2_acct',
{'x-auth-token': 'AUTH_t',
'x-service-token': 'AUTH_junk'})
self.assertEqual(resp.status_int, 403)
resp = self._make_request(conf, '/v1/PRE2_acct',
{'x-auth-token': 'AUTH_junk',
'x-service-token': 'AUTH_s'})
self.assertEqual(resp.status_int, 401)
class TestUtilityMethods(unittest.TestCase):
def test_account_acls_bad_path_raises_exception(self):
auth_inst = auth.filter_factory({})(FakeApp())
req = Request({'PATH_INFO': '/'})
self.assertRaises(ValueError, auth_inst.account_acls, req)
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/test_tempauth.py |
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from textwrap import dedent
import unittest
from eventlet.green import ssl
import mock
from swift.common.middleware import memcache
from swift.common.memcached import MemcacheRing
from swift.common.swob import Request
from swift.common.wsgi import loadapp
from test.unit import with_tempdir, patch_policies
class FakeApp(object):
def __call__(self, env, start_response):
return env
def start_response(*args):
pass
class TestCacheMiddleware(unittest.TestCase):
def setUp(self):
self.app = memcache.MemcacheMiddleware(FakeApp(), {})
def test_cache_middleware(self):
req = Request.blank('/something', environ={'REQUEST_METHOD': 'GET'})
resp = self.app(req.environ, start_response)
self.assertTrue('swift.cache' in resp)
self.assertTrue(isinstance(resp['swift.cache'], MemcacheRing))
def test_filter_factory(self):
factory = memcache.filter_factory({'max_connections': '3'},
memcache_servers='10.10.10.10:10')
thefilter = factory('myapp')
self.assertEqual(thefilter.app, 'myapp')
self.assertEqual(thefilter.memcache.memcache_servers,
['10.10.10.10:10'])
self.assertEqual(
thefilter.memcache._client_cache['10.10.10.10:10'].max_size, 3)
@patch_policies
def _loadapp(self, proxy_config_path):
"""
Load a proxy from an app.conf to get the memcache_ring
:returns: the memcache_ring of the memcache middleware filter
"""
with mock.patch('swift.proxy.server.Ring'):
app = loadapp(proxy_config_path)
memcache_ring = None
while True:
memcache_ring = getattr(app, 'memcache', None)
if memcache_ring:
break
app = app.app
return memcache_ring
@with_tempdir
def test_real_config(self, tempdir):
config = """
[pipeline:main]
pipeline = cache proxy-server
[app:proxy-server]
use = egg:swift#proxy
[filter:cache]
use = egg:swift#memcache
"""
config_path = os.path.join(tempdir, 'test.conf')
with open(config_path, 'w') as f:
f.write(dedent(config))
memcache_ring = self._loadapp(config_path)
# only one server by default
self.assertEqual(list(memcache_ring._client_cache.keys()),
['127.0.0.1:11211'])
# extra options
self.assertEqual(memcache_ring._connect_timeout, 0.3)
self.assertEqual(memcache_ring._pool_timeout, 1.0)
# tries is limited to server count
self.assertEqual(memcache_ring._tries, 1)
self.assertEqual(memcache_ring._io_timeout, 2.0)
self.assertEqual(memcache_ring.item_size_warning_threshold, -1)
@with_tempdir
def test_real_config_with_options(self, tempdir):
config = """
[pipeline:main]
pipeline = cache proxy-server
[app:proxy-server]
use = egg:swift#proxy
[filter:cache]
use = egg:swift#memcache
memcache_servers = 10.0.0.1:11211,10.0.0.2:11211,10.0.0.3:11211,
10.0.0.4:11211
connect_timeout = 1.0
pool_timeout = 0.5
tries = 4
io_timeout = 1.0
tls_enabled = true
item_size_warning_threshold = 1000
"""
config_path = os.path.join(tempdir, 'test.conf')
with open(config_path, 'w') as f:
f.write(dedent(config))
memcache_ring = self._loadapp(config_path)
self.assertEqual(sorted(memcache_ring._client_cache.keys()),
['10.0.0.%d:11211' % i for i in range(1, 5)])
# extra options
self.assertEqual(memcache_ring._connect_timeout, 1.0)
self.assertEqual(memcache_ring._pool_timeout, 0.5)
# tries is limited to server count
self.assertEqual(memcache_ring._tries, 4)
self.assertEqual(memcache_ring._io_timeout, 1.0)
self.assertEqual(memcache_ring._error_limit_count, 10)
self.assertEqual(memcache_ring._error_limit_time, 60)
self.assertEqual(memcache_ring._error_limit_duration, 60)
self.assertIsInstance(
list(memcache_ring._client_cache.values())[0]._tls_context,
ssl.SSLContext)
self.assertEqual(memcache_ring.item_size_warning_threshold, 1000)
@with_tempdir
def test_real_memcache_config(self, tempdir):
proxy_config = """
[DEFAULT]
swift_dir = %s
[pipeline:main]
pipeline = cache proxy-server
[app:proxy-server]
use = egg:swift#proxy
[filter:cache]
use = egg:swift#memcache
connect_timeout = 1.0
""" % tempdir
proxy_config_path = os.path.join(tempdir, 'test.conf')
with open(proxy_config_path, 'w') as f:
f.write(dedent(proxy_config))
memcache_config = """
[memcache]
memcache_servers = 10.0.0.1:11211,10.0.0.2:11211,10.0.0.3:11211,
10.0.0.4:11211
connect_timeout = 0.5
io_timeout = 1.0
error_suppression_limit = 0
error_suppression_interval = 1.5
item_size_warning_threshold = 50
"""
memcache_config_path = os.path.join(tempdir, 'memcache.conf')
with open(memcache_config_path, 'w') as f:
f.write(dedent(memcache_config))
memcache_ring = self._loadapp(proxy_config_path)
self.assertEqual(sorted(memcache_ring._client_cache.keys()),
['10.0.0.%d:11211' % i for i in range(1, 5)])
# proxy option takes precedence
self.assertEqual(memcache_ring._connect_timeout, 1.0)
# default tries are not limited by servers
self.assertEqual(memcache_ring._tries, 3)
# memcache conf options are defaults
self.assertEqual(memcache_ring._io_timeout, 1.0)
self.assertEqual(memcache_ring._error_limit_count, 0)
self.assertEqual(memcache_ring._error_limit_time, 1.5)
self.assertEqual(memcache_ring._error_limit_duration, 1.5)
self.assertEqual(memcache_ring.item_size_warning_threshold, 50)
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/test_memcache.py |
# Copyright (c) 2019 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import json
import os
import time
import mock
import unittest
import six
from six.moves import urllib
from swift.common import swob, utils
from swift.common.middleware import versioned_writes, copy, symlink, \
listing_formats
from swift.common.swob import Request, wsgi_quote, str_to_wsgi
from swift.common.middleware.symlink import TGT_OBJ_SYSMETA_SYMLINK_HDR, \
ALLOW_RESERVED_NAMES, SYMLOOP_EXTEND
from swift.common.middleware.versioned_writes.object_versioning import \
SYSMETA_VERSIONS_CONT, SYSMETA_VERSIONS_ENABLED, \
SYSMETA_VERSIONS_SYMLINK, DELETE_MARKER_CONTENT_TYPE
from swift.common.request_helpers import get_reserved_name
from swift.common.storage_policy import StoragePolicy
from swift.common.utils import md5
from swift.proxy.controllers.base import get_cache_key
from test.unit import patch_policies, FakeMemcache, make_timestamp_iter
from test.unit.common.middleware.helpers import FakeSwift
def local_tz(func):
'''
Decorator to change the timezone when running a test.
This uses the Eastern Time Zone definition from the time module's docs.
Note that the timezone affects things like time.time() and time.mktime().
'''
@functools.wraps(func)
def wrapper(*args, **kwargs):
tz = os.environ.get('TZ', '')
try:
os.environ['TZ'] = 'EST+05EDT,M4.1.0,M10.5.0'
time.tzset()
return func(*args, **kwargs)
finally:
os.environ['TZ'] = tz
time.tzset()
return wrapper
class ObjectVersioningBaseTestCase(unittest.TestCase):
def setUp(self):
self.app = FakeSwift()
conf = {}
self.sym = symlink.filter_factory(conf)(self.app)
self.sym.logger = self.app.logger
self.ov = versioned_writes.object_versioning.\
ObjectVersioningMiddleware(self.sym, conf)
self.ov.logger = self.app.logger
self.cp = copy.filter_factory({})(self.ov)
self.lf = listing_formats.ListingFilter(self.cp, {}, self.app.logger)
self.ts = make_timestamp_iter()
cont_cache_version_on = {'sysmeta': {
'versions-container': self.build_container_name('c'),
'versions-enabled': 'true'}}
self.cache_version_on = FakeMemcache()
self.cache_version_on.set(get_cache_key('a'), {'status': 200})
self.cache_version_on.set(get_cache_key('a', 'c'),
cont_cache_version_on)
self.cache_version_on.set(
get_cache_key('a', self.build_container_name('c')),
{'status': 200})
self.cache_version_on_but_busted = FakeMemcache()
self.cache_version_on_but_busted.set(get_cache_key('a'),
{'status': 200})
self.cache_version_on_but_busted.set(get_cache_key('a', 'c'),
cont_cache_version_on)
self.cache_version_on_but_busted.set(
get_cache_key('a', self.build_container_name('c')),
{'status': 404})
cont_cache_version_off = {'sysmeta': {
'versions-container': self.build_container_name('c'),
'versions-enabled': 'false'}}
self.cache_version_off = FakeMemcache()
self.cache_version_off.set(get_cache_key('a'), {'status': 200})
self.cache_version_off.set(get_cache_key('a', 'c'),
cont_cache_version_off)
self.cache_version_off.set(
get_cache_key('a', self.build_container_name('c')),
{'status': 200})
self.cache_version_never_on = FakeMemcache()
self.cache_version_never_on.set(get_cache_key('a'), {'status': 200})
self.cache_version_never_on.set(get_cache_key('a', 'c'),
{'status': 200})
self.expected_unread_requests = {}
def tearDown(self):
self.assertEqual(self.app.unclosed_requests, {})
self.assertEqual(self.app.unread_requests,
self.expected_unread_requests)
def call_ov(self, req):
# authorized gets reset everytime
self.authorized = []
def authorize(req):
self.authorized.append(req)
if 'swift.authorize' not in req.environ:
req.environ['swift.authorize'] = authorize
req.headers.setdefault("User-Agent", "Marula Kruger")
status = [None]
headers = [None]
def start_response(s, h, ei=None):
status[0] = s
headers[0] = h
body_iter = self.lf(req.environ, start_response)
with utils.closing_if_possible(body_iter):
body = b''.join(body_iter)
return status[0], headers[0], body
def assertRequestEqual(self, req, other):
self.assertEqual(req.method, other.method)
self.assertEqual(req.path, other.path)
def str_to_wsgi(self, native_str):
if six.PY2 and isinstance(native_str, six.text_type):
native_str = native_str.encode('utf8')
return str_to_wsgi(native_str)
def build_container_name(self, cont):
return get_reserved_name('versions', cont)
def build_object_name(self, obj, version):
return get_reserved_name(obj, version)
def build_symlink_path(self, cont, obj, version):
cont = self.build_container_name(cont)
obj = self.build_object_name(obj, version)
return wsgi_quote(self.str_to_wsgi("%s/%s" % (cont, obj)))
def build_versions_path(self, acc='a', cont='c', obj=None, version=None):
cont = self.build_container_name(cont)
if not obj:
return self.str_to_wsgi("/v1/%s/%s" % (acc, cont))
obj = self.build_object_name(obj, version)
return self.str_to_wsgi("/v1/%s/%s/%s" % (acc, cont, obj))
class ObjectVersioningTestCase(ObjectVersioningBaseTestCase):
def test_put_container(self):
self.app.register('HEAD', '/v1/a', swob.HTTPOk, {}, '')
self.app.register('HEAD', '/v1/a/c', swob.HTTPOk, {}, '')
self.app.register('PUT', self.build_versions_path(), swob.HTTPOk, {},
'passed')
self.app.register('PUT', '/v1/a/c', swob.HTTPAccepted, {}, 'passed')
req = Request.blank('/v1/a/c',
headers={'X-Versions-Enabled': 'true'},
environ={'REQUEST_METHOD': 'PUT'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '202 Accepted')
# check for sysmeta header
calls = self.app.calls_with_headers
self.assertEqual(4, len(calls))
method, path, headers = calls[3]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a/c', path)
self.assertIn(SYSMETA_VERSIONS_CONT, headers)
self.assertEqual(headers[SYSMETA_VERSIONS_CONT],
wsgi_quote(self.str_to_wsgi(
self.build_container_name('c'))))
self.assertIn(SYSMETA_VERSIONS_ENABLED, headers)
self.assertEqual(headers[SYSMETA_VERSIONS_ENABLED], 'True')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
@patch_policies([StoragePolicy(0, 'zero', True),
StoragePolicy(1, 'one', False)])
def test_same_policy_as_existing_container(self):
self.app.register('GET', '/v1/a', swob.HTTPOk, {}, '')
self.app.register('GET', '/v1/a/c', swob.HTTPOk, {
'x-backend-storage-policy-index': 1}, '')
self.app.register('PUT', self.build_versions_path(), swob.HTTPOk, {},
'passed')
self.app.register('POST', '/v1/a/c', swob.HTTPNoContent, {}, '')
req = Request.blank('/v1/a/c',
headers={'X-Versions-Enabled': 'true'},
environ={'REQUEST_METHOD': 'POST'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '204 No Content')
# check for sysmeta header
calls = self.app.calls_with_headers
self.assertEqual(4, len(calls))
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
# request to create versions container
method, path, headers = calls[2]
self.assertEqual('PUT', method)
self.assertEqual(self.build_versions_path(), path)
self.assertIn('X-Storage-Policy', headers)
self.assertEqual('one', headers['X-Storage-Policy'])
# request to enable versioning on primary container
method, path, headers = calls[3]
self.assertEqual('POST', method)
self.assertEqual('/v1/a/c', path)
self.assertIn(SYSMETA_VERSIONS_CONT, headers)
self.assertEqual(headers[SYSMETA_VERSIONS_CONT],
wsgi_quote(self.str_to_wsgi(
self.build_container_name('c'))))
self.assertIn(SYSMETA_VERSIONS_ENABLED, headers)
self.assertEqual(headers[SYSMETA_VERSIONS_ENABLED], 'True')
@patch_policies([StoragePolicy(0, 'zero', True),
StoragePolicy(1, 'one', False, is_deprecated=True)])
def test_existing_container_has_deprecated_policy(self):
self.app.register('GET', '/v1/a', swob.HTTPOk, {}, '')
self.app.register('GET', '/v1/a/c', swob.HTTPOk, {
'x-backend-storage-policy-index': 1}, '')
req = Request.blank('/v1/a/c',
headers={'X-Versions-Enabled': 'true'},
environ={'REQUEST_METHOD': 'POST'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '400 Bad Request')
self.assertEqual(body,
b'Cannot enable object versioning on a container '
b'that uses a deprecated storage policy.')
calls = self.app.calls_with_headers
self.assertEqual(2, len(calls))
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
@patch_policies([StoragePolicy(0, 'zero', True),
StoragePolicy(1, 'one', False, is_deprecated=True)])
def test_existing_container_has_deprecated_policy_unauthed(self):
self.app.register('GET', '/v1/a', swob.HTTPOk, {}, '')
self.app.register('GET', '/v1/a/c', swob.HTTPOk, {
'x-backend-storage-policy-index': 1}, '')
def fake_authorize(req):
self.authorized.append(req)
return swob.HTTPForbidden()
req = Request.blank('/v1/a/c',
headers={'X-Versions-Enabled': 'true'},
environ={'REQUEST_METHOD': 'POST',
'swift.authorize': fake_authorize})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '403 Forbidden')
calls = self.app.calls_with_headers
self.assertEqual(2, len(calls))
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_same_policy_as_primary_container(self):
self.app.register('GET', '/v1/a', swob.HTTPOk, {}, '')
self.app.register('GET', '/v1/a/c', swob.HTTPNotFound, {}, '')
self.app.register('PUT', self.build_versions_path(), swob.HTTPOk,
{}, '')
self.app.register('PUT', '/v1/a/c', swob.HTTPOk, {}, '')
req = Request.blank('/v1/a/c',
headers={'X-Versions-Enabled': 'true',
'X-Storage-Policy': 'ec42'},
environ={'REQUEST_METHOD': 'PUT'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
# check for sysmeta header
calls = self.app.calls_with_headers
self.assertEqual(4, len(calls))
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
# request to create versions container
method, path, headers = calls[2]
self.assertEqual('PUT', method)
self.assertEqual(self.build_versions_path(), path)
self.assertIn('X-Storage-Policy', headers)
self.assertEqual('ec42', headers['X-Storage-Policy'])
# request to enable versioning on primary container
method, path, headers = calls[3]
self.assertEqual('PUT', method)
self.assertEqual('/v1/a/c', path)
self.assertIn(SYSMETA_VERSIONS_CONT, headers)
self.assertEqual(headers[SYSMETA_VERSIONS_CONT],
wsgi_quote(self.str_to_wsgi(
self.build_container_name('c'))))
self.assertIn(SYSMETA_VERSIONS_ENABLED, headers)
self.assertEqual(headers[SYSMETA_VERSIONS_ENABLED], 'True')
self.assertIn('X-Storage-Policy', headers)
self.assertEqual('ec42', headers['X-Storage-Policy'])
def test_enable_versioning_failed_primary_container(self):
self.app.register('GET', '/v1/a', swob.HTTPOk, {}, 'passed')
self.app.register('GET', '/v1/a/c', swob.HTTPNotFound, {}, 'passed')
self.app.register('PUT', self.build_versions_path(),
swob.HTTPOk, {}, 'passed')
self.app.register('DELETE', self.build_versions_path(),
swob.HTTPNoContent, {}, '')
self.app.register('PUT', '/v1/a/c', swob.HTTPInternalServerError,
{}, '')
req = Request.blank('/v1/a/c',
headers={'X-Versions-Enabled': 'true'},
environ={'REQUEST_METHOD': 'PUT'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '500 Internal Error')
def test_enable_versioning_failed_versions_container(self):
self.app.register('GET', '/v1/a', swob.HTTPOk, {}, '')
self.app.register('GET', '/v1/a/c', swob.HTTPNotFound, {}, '')
self.app.register('PUT', self.build_versions_path(),
swob.HTTPInternalServerError, {}, '')
req = Request.blank('/v1/a/c',
headers={'X-Versions-Enabled': 'true'},
environ={'REQUEST_METHOD': 'PUT'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '500 Internal Error')
def test_enable_versioning_existing_container(self):
self.app.register('HEAD', '/v1/a', swob.HTTPOk, {}, '')
self.app.register('HEAD', self.build_versions_path(),
swob.HTTPOk, {}, '')
self.app.register('PUT', self.build_versions_path(),
swob.HTTPAccepted, {}, '')
self.app.register(
'GET', '/v1/a/c', swob.HTTPOk,
{SYSMETA_VERSIONS_CONT: self.build_container_name('c'),
SYSMETA_VERSIONS_ENABLED: False},
'passed')
self.app.register('POST', '/v1/a/c', swob.HTTPOk, {}, 'passed')
req = Request.blank('/v1/a/c',
headers={'X-Versions-Enabled': 'true'},
environ={'REQUEST_METHOD': 'POST'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
# check for sysmeta header
calls = self.app.calls_with_headers
self.assertEqual(5, len(calls))
method, path, req_headers = calls[-1]
self.assertEqual('POST', method)
self.assertEqual('/v1/a/c', path)
self.assertIn(SYSMETA_VERSIONS_ENABLED, req_headers)
self.assertEqual(req_headers[SYSMETA_VERSIONS_ENABLED],
'True')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_put_container_with_legacy_versioning(self):
self.app.register('GET', '/v1/a', swob.HTTPOk, {}, '')
self.app.register(
'GET', '/v1/a/c', swob.HTTPOk,
{'x-container-sysmeta-versions-location': 'ver_cont'},
'')
req = Request.blank('/v1/a/c',
headers={'X-Versions-Enabled': 'true'},
environ={'REQUEST_METHOD': 'POST'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '400 Bad Request')
def test_put_container_with_super_legacy_versioning(self):
# x-versions-location was used before versioned writes
# was pulled out to middleware
self.app.register('HEAD', '/v1/a', swob.HTTPOk, {}, '')
self.app.register(
'HEAD', '/v1/a/c', swob.HTTPOk,
{'x-versions-location': 'ver_cont'},
'')
req = Request.blank('/v1/a/c',
headers={'X-Versions-Enabled': 'true'},
environ={'REQUEST_METHOD': 'POST'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '400 Bad Request')
def test_get_container(self):
self.app.register('GET', '/v1/a', swob.HTTPOk, {}, 'passed')
self.app.register(
'GET', '/v1/a/c', swob.HTTPOk,
{SYSMETA_VERSIONS_CONT: self.build_container_name('c'),
SYSMETA_VERSIONS_ENABLED: True}, b'[]')
req = Request.blank(
'/v1/a/c',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertIn(('X-Versions-Enabled', 'True'), headers)
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_get_reserved_container_passthrough(self):
self.app.register('GET', '/v1/a', swob.HTTPOk, {}, 'passed')
self.app.register('GET', '/v1/a/%s' % get_reserved_name('foo'),
swob.HTTPOk, {}, b'[]')
req = Request.blank('/v1/a/%s' % get_reserved_name('foo'))
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_head_container(self):
self.app.register('GET', '/v1/a', swob.HTTPOk, {}, 'passed')
self.app.register(
'HEAD', '/v1/a/c', swob.HTTPOk,
{SYSMETA_VERSIONS_CONT: self.build_container_name('c'),
SYSMETA_VERSIONS_ENABLED: True}, None)
req = Request.blank(
'/v1/a/c',
environ={'REQUEST_METHOD': 'HEAD'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertIn(('X-Versions-Enabled', 'True'), headers)
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_delete_container_success(self):
self.app.register(
'DELETE', '/v1/a/c', swob.HTTPNoContent, {}, '')
self.app.register(
'DELETE', self.build_versions_path(),
swob.HTTPNoContent, {}, '')
self.app.register('HEAD', '/v1/a', swob.HTTPOk, {}, '')
self.app.register(
'HEAD', '/v1/a/c', swob.HTTPOk,
{SYSMETA_VERSIONS_CONT: self.build_container_name('c'),
SYSMETA_VERSIONS_ENABLED: True}, '')
self.app.register(
'HEAD', self.build_versions_path(), swob.HTTPOk,
{'x-container-object-count': 0}, '')
req = Request.blank(
'/v1/a/c', environ={'REQUEST_METHOD': 'DELETE'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '204 No Content')
self.assertEqual(self.app.calls, [
('HEAD', '/v1/a'),
('HEAD', '/v1/a/c'),
('HEAD', self.build_versions_path()),
('HEAD', self.build_versions_path()), # get_container_info
('DELETE', self.build_versions_path()),
('DELETE', '/v1/a/c'),
])
def test_delete_container_fail_object_count(self):
self.app.register('HEAD', '/v1/a', swob.HTTPOk, {}, '')
self.app.register(
'HEAD', '/v1/a/c', swob.HTTPOk,
{SYSMETA_VERSIONS_CONT: self.build_container_name('c'),
SYSMETA_VERSIONS_ENABLED: False}, '')
self.app.register(
'HEAD',
self.build_versions_path(),
swob.HTTPOk,
{'x-container-object-count': 1}, '')
req = Request.blank(
'/v1/a/c', environ={'REQUEST_METHOD': 'DELETE'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '409 Conflict')
self.assertEqual(self.app.calls, [
('HEAD', '/v1/a'),
('HEAD', '/v1/a/c'),
('HEAD', self.build_versions_path()),
('HEAD', self.build_versions_path()), # get_container_info
])
def test_delete_container_fail_delete_versions_cont(self):
# N.B.: Notice lack of a call to DELETE /v1/a/c
# Since deleting versions container failed, swift should
# not delete primary container
self.app.register(
'DELETE', self.build_versions_path(),
swob.HTTPServerError, {}, '')
self.app.register('HEAD', '/v1/a', swob.HTTPOk, {}, '')
self.app.register(
'HEAD', '/v1/a/c', swob.HTTPOk,
{SYSMETA_VERSIONS_CONT: self.build_container_name('c'),
SYSMETA_VERSIONS_ENABLED: False}, '')
self.app.register(
'HEAD', self.build_versions_path(), swob.HTTPOk,
{'x-container-object-count': 0}, '')
req = Request.blank(
'/v1/a/c', environ={'REQUEST_METHOD': 'DELETE'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '500 Internal Error')
self.assertEqual(self.app.calls, [
('HEAD', '/v1/a'),
('HEAD', '/v1/a/c'),
('HEAD', self.build_versions_path()),
('HEAD', self.build_versions_path()), # get_container_info
('DELETE', self.build_versions_path()),
])
def test_get(self):
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPOk, {
'Content-Location': self.build_versions_path(
obj='o', version='9999998765.99999')},
'body')
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'GET',
'swift.cache': self.cache_version_on})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.assertIn(('X-Object-Version-Id', '0000001234.00000'), headers)
self.assertIn(
('Content-Location', '/v1/a/c/o?version-id=0000001234.00000'),
headers)
def test_get_symlink(self):
self.app.register(
'GET', '/v1/a/c/o?symlink=get', swob.HTTPOk, {
'X-Symlink-Target': '%s/%s' % (
self.build_container_name('c'),
self.build_object_name('o', '9999998765.99999'),
),
'X-Symlink-Target-Etag': 'versioned-obj-etag',
}, '')
req = Request.blank(
'/v1/a/c/o?symlink=get',
environ={'swift.cache': self.cache_version_on})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.assertIn(('X-Object-Version-Id', '0000001234.00000'), headers)
self.assertIn(
('X-Symlink-Target', 'c/o?version-id=0000001234.00000'),
headers)
self.assertEqual(body, b'')
# N.B. HEAD req already works with existing registered GET response
req = Request.blank(
'/v1/a/c/o?symlink=get', method='HEAD',
environ={'swift.cache': self.cache_version_on})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.assertIn(('X-Object-Version-Id', '0000001234.00000'), headers)
self.assertIn(
('X-Symlink-Target', 'c/o?version-id=0000001234.00000'),
headers)
self.assertEqual(body, b'')
def test_put_object_no_versioning(self):
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
cache = FakeMemcache()
cache.set(get_cache_key('a'), {'status': 200})
cache.set(get_cache_key('a', 'c'), {'status': 200})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
@mock.patch('swift.common.middleware.versioned_writes.object_versioning.'
'time.time', return_value=1234)
def test_PUT_overwrite(self, mock_time):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {
SYSMETA_VERSIONS_SYMLINK: 'true',
TGT_OBJ_SYSMETA_SYMLINK_HDR: 'c-unique/whatever'}, '')
self.app.register(
'PUT',
self.build_versions_path(obj='o', version='9999998765.99999'),
swob.HTTPCreated, {}, 'passed')
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed')
put_body = 'stuff' * 100
req = Request.blank(
'/v1/a/c/o', method='PUT', body=put_body,
headers={'Content-Type': 'text/plain',
'ETag': md5(
put_body.encode('utf8'),
usedforsecurity=False).hexdigest(),
'Content-Length': len(put_body)},
environ={'swift.cache': self.cache_version_on,
'swift.trans_id': 'fake_trans_id'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '201 Created')
self.assertEqual(len(self.authorized), 2)
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual(['OV', 'OV', 'OV'], self.app.swift_sources)
self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids))
self.assertEqual(self.app.calls, [
('GET', '/v1/a/c/o?symlink=get'),
('PUT', self.build_versions_path(
obj='o', version='9999998765.99999')),
('PUT', '/v1/a/c/o'),
])
calls = self.app.calls_with_headers
self.assertIn('X-Newest', calls[0].headers)
self.assertEqual('True', calls[0].headers['X-Newest'])
symlink_expected_headers = {
SYMLOOP_EXTEND: 'true',
ALLOW_RESERVED_NAMES: 'true',
TGT_OBJ_SYSMETA_SYMLINK_HDR:
self.build_symlink_path('c', 'o', '9999998765.99999'),
'x-object-sysmeta-symlink-target-etag': md5(
put_body.encode('utf8'), usedforsecurity=False).hexdigest(),
'x-object-sysmeta-symlink-target-bytes': str(len(put_body)),
}
symlink_put_headers = self.app._calls[-1].headers
for k, v in symlink_expected_headers.items():
self.assertEqual(symlink_put_headers[k], v)
def test_POST(self):
self.app.register(
'POST',
self.build_versions_path(obj='o', version='9999998765.99999'),
swob.HTTPAccepted, {}, '')
self.app.register(
'POST', '/v1/a/c/o', swob.HTTPTemporaryRedirect, {
SYSMETA_VERSIONS_SYMLINK: 'true',
'Location': self.build_versions_path(
obj='o', version='9999998765.99999')}, '')
# TODO: in symlink middleware, swift.leave_relative_location
# is added by the middleware during the response
# adding to the client request here, need to understand how
# to modify the response environ.
req = Request.blank(
'/v1/a/c/o', method='POST',
headers={'Content-Type': 'text/jibberish01',
'X-Object-Meta-Foo': 'bar'},
environ={'swift.cache': self.cache_version_on,
'swift.leave_relative_location': 'true',
'swift.trans_id': 'fake_trans_id'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '202 Accepted')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual([None, 'OV'], self.app.swift_sources)
self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids))
self.assertEqual(self.app.calls, [
('POST', '/v1/a/c/o'),
('POST', self.build_versions_path(
obj='o', version='9999998765.99999')),
])
expected_hdrs = {
'content-type': 'text/jibberish01',
'x-object-meta-foo': 'bar',
}
version_obj_post_headers = self.app._calls[1].headers
for k, v in expected_hdrs.items():
self.assertEqual(version_obj_post_headers[k], v)
def test_POST_mismatched_location(self):
# This is a defensive chech, ideally a mistmached
# versions container should never happen.
self.app.register(
'POST', '/v1/a/c/o', swob.HTTPTemporaryRedirect, {
SYSMETA_VERSIONS_SYMLINK: 'true',
'Location': self.build_versions_path(
cont='mismatched', obj='o', version='9999998765.99999')},
'')
# TODO: in symlink middleware, swift.leave_relative_location
# is added by the middleware during the response
# adding to the client request here, need to understand how
# to modify the response environ.
req = Request.blank(
'/v1/a/c/o', method='POST',
headers={'Content-Type': 'text/jibberish01',
'X-Object-Meta-Foo': 'bar'},
environ={'swift.cache': self.cache_version_on,
'swift.leave_relative_location': 'true',
'swift.trans_id': 'fake_trans_id'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '307 Temporary Redirect')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual([None], self.app.swift_sources)
self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids))
self.assertEqual(self.app.calls, [
('POST', '/v1/a/c/o'),
])
def test_POST_regular_symlink(self):
self.app.register(
'POST', '/v1/a/c/o', swob.HTTPTemporaryRedirect, {
'Location': '/v1/a/t/o'}, '')
# TODO: in symlink middleware, swift.leave_relative_location
# is added by the middleware during the response
# adding to the client request here, need to understand how
# to modify the response environ.
req = Request.blank(
'/v1/a/c/o', method='POST',
headers={'Content-Type': 'text/jibberish01',
'X-Object-Meta-Foo': 'bar'},
environ={'swift.cache': self.cache_version_on,
'swift.leave_relative_location': 'true',
'swift.trans_id': 'fake_trans_id'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '307 Temporary Redirect')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual([None], self.app.swift_sources)
self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids))
self.assertEqual(self.app.calls, [
('POST', '/v1/a/c/o'),
])
def test_denied_PUT_of_versioned_object(self):
authorize_call = []
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPOk,
{'last-modified': 'Thu, 1 Jan 1970 00:00:01 GMT'}, 'passed')
def fake_authorize(req):
# we should deny the object PUT
authorize_call.append(req)
return swob.HTTPForbidden()
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT',
'swift.cache': self.cache_version_on,
'swift.authorize': fake_authorize,
'CONTENT_LENGTH': '0'})
# Save off a copy, as the middleware may modify the original
expected_req = Request(req.environ.copy())
status, headers, body = self.call_ov(req)
self.assertEqual(status, '403 Forbidden')
self.assertEqual(len(authorize_call), 1)
self.assertRequestEqual(expected_req, authorize_call[0])
self.assertEqual(self.app.calls, [])
@mock.patch('swift.common.middleware.versioned_writes.object_versioning.'
'time.time', return_value=1234)
def test_PUT_overwrite_tombstone(self, mock_time):
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPNotFound, {}, None)
self.app.register(
'PUT',
self.build_versions_path(obj='o', version='9999998765.99999'),
swob.HTTPCreated, {}, 'passed')
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed')
put_body = 'stuff' * 100
req = Request.blank(
'/v1/a/c/o', method='PUT', body=put_body,
headers={'Content-Type': 'text/plain',
'ETag': md5(
put_body.encode('utf8'),
usedforsecurity=False).hexdigest(),
'Content-Length': len(put_body)},
environ={'swift.cache': self.cache_version_on,
'swift.trans_id': 'fake_trans_id'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '201 Created')
# authorized twice because of pre-flight check on PUT
self.assertEqual(len(self.authorized), 2)
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual(['OV', 'OV', 'OV'], self.app.swift_sources)
self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids))
self.assertEqual(self.app.calls, [
('GET', '/v1/a/c/o?symlink=get'),
('PUT', self.build_versions_path(
obj='o', version='9999998765.99999')),
('PUT', '/v1/a/c/o'),
])
calls = self.app.calls_with_headers
self.assertIn('X-Newest', calls[0].headers)
self.assertEqual('True', calls[0].headers['X-Newest'])
expected_headers = {
TGT_OBJ_SYSMETA_SYMLINK_HDR:
self.build_symlink_path('c', 'o', '9999998765.99999'),
'x-object-sysmeta-symlink-target-etag': md5(
put_body.encode('utf8'), usedforsecurity=False).hexdigest(),
'x-object-sysmeta-symlink-target-bytes': str(len(put_body)),
}
symlink_put_headers = self.app._calls[-1].headers
for k, v in expected_headers.items():
self.assertEqual(symlink_put_headers[k], v)
@mock.patch('swift.common.middleware.versioned_writes.object_versioning.'
'time.time', return_value=1234)
def test_PUT_overwrite_object_with_DLO(self, mock_time):
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPOk,
{'last-modified': 'Thu, 1 Jan 1970 00:01:00 GMT'}, 'old version')
self.app.register(
'PUT',
self.build_versions_path(obj='o', version='9999999939.99999'),
swob.HTTPCreated, {}, 'passed')
self.app.register(
'PUT',
self.build_versions_path(obj='o', version='9999998765.99999'),
swob.HTTPCreated, {}, 'passed')
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed')
put_body = ''
req = Request.blank('/v1/a/c/o', method='PUT', body=put_body,
headers={'Content-Type': 'text/plain',
'X-Object-Manifest': 'req/manifest'},
environ={'swift.cache': self.cache_version_on,
'swift.trans_id': 'fake_trans_id'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '201 Created')
self.assertEqual(len(self.authorized), 2)
self.assertEqual(4, self.app.call_count)
self.assertEqual(['OV', 'OV', 'OV', 'OV'], self.app.swift_sources)
self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids))
self.assertEqual([
('GET', '/v1/a/c/o?symlink=get'),
('PUT',
self.build_versions_path(obj='o', version='9999999939.99999')),
('PUT',
self.build_versions_path(obj='o', version='9999998765.99999')),
('PUT', '/v1/a/c/o'),
], self.app.calls)
calls = self.app.calls_with_headers
self.assertIn('X-Newest', calls[0].headers)
self.assertEqual('True', calls[0].headers['X-Newest'])
self.assertNotIn('x-object-manifest', calls[1].headers)
self.assertEqual('req/manifest',
calls[-2].headers['X-Object-Manifest'])
symlink_put_headers = calls[-1].headers
expected_headers = {
TGT_OBJ_SYSMETA_SYMLINK_HDR:
self.build_symlink_path('c', 'o', '9999998765.99999'),
'x-object-sysmeta-symlink-target-etag': md5(
put_body.encode('utf8'), usedforsecurity=False).hexdigest(),
'x-object-sysmeta-symlink-target-bytes': str(len(put_body)),
}
for k, v in expected_headers.items():
self.assertEqual(symlink_put_headers[k], v)
self.assertNotIn('x-object-manifest', symlink_put_headers)
@mock.patch('swift.common.middleware.versioned_writes.object_versioning.'
'time.time', return_value=1234)
def test_PUT_overwrite_DLO_with_object(self, mock_time):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk,
{'X-Object-Manifest': 'resp/manifest',
'last-modified': 'Thu, 1 Jan 1970 00:01:00 GMT'},
'passed')
self.app.register(
'PUT',
self.build_versions_path(obj='o', version='9999999939.99999'),
swob.HTTPCreated, {}, 'passed')
self.app.register(
'PUT',
self.build_versions_path(obj='o', version='9999998765.99999'),
swob.HTTPCreated, {}, 'passed')
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed')
put_body = 'stuff' * 100
req = Request.blank('/v1/a/c/o', method='PUT', body=put_body,
headers={'Content-Type': 'text/plain'},
environ={'swift.cache': self.cache_version_on,
'swift.trans_id': 'fake_trans_id'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '201 Created')
self.assertEqual(len(self.authorized), 2)
self.assertEqual(4, self.app.call_count)
self.assertEqual(['OV', 'OV', 'OV', 'OV'], self.app.swift_sources)
self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids))
self.assertEqual([
('GET', '/v1/a/c/o?symlink=get'),
('PUT',
self.build_versions_path(obj='o', version='9999999939.99999')),
('PUT',
self.build_versions_path(obj='o', version='9999998765.99999')),
('PUT', '/v1/a/c/o'),
], self.app.calls)
calls = self.app.calls_with_headers
self.assertIn('X-Newest', calls[0].headers)
self.assertEqual('True', calls[0].headers['X-Newest'])
self.assertEqual('resp/manifest',
calls[1].headers['X-Object-Manifest'])
self.assertNotIn(TGT_OBJ_SYSMETA_SYMLINK_HDR,
calls[1].headers)
self.assertNotIn('x-object-manifest', calls[2].headers)
self.assertNotIn(TGT_OBJ_SYSMETA_SYMLINK_HDR,
calls[2].headers)
symlink_put_headers = calls[-1].headers
expected_headers = {
TGT_OBJ_SYSMETA_SYMLINK_HDR:
self.build_symlink_path('c', 'o', '9999998765.99999'),
'x-object-sysmeta-symlink-target-etag': md5(
put_body.encode('utf8'), usedforsecurity=False).hexdigest(),
'x-object-sysmeta-symlink-target-bytes': str(len(put_body)),
}
for k, v in expected_headers.items():
self.assertEqual(symlink_put_headers[k], v)
self.assertNotIn('x-object-manifest', symlink_put_headers)
@mock.patch('swift.common.middleware.versioned_writes.object_versioning.'
'time.time', return_value=1234)
def test_PUT_overwrite_SLO_with_object(self, mock_time):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {
'X-Static-Large-Object': 'True',
# N.B. object-sever strips swift_bytes
'Content-Type': 'application/octet-stream',
'X-Object-Sysmeta-Container-Update-Override-Etag':
'656516af0f7474b857857dd2a327f3b9; '
'slo_etag=71e938d37c1d06dc634dd24660255a88',
'X-Object-Sysmeta-Slo-Etag': '71e938d37c1d06dc634dd24660255a88',
'X-Object-Sysmeta-Slo-Size': '10485760',
'last-modified': 'Thu, 1 Jan 1970 00:01:00 GMT',
}, 'passed')
self.app.register(
'PUT',
self.build_versions_path(obj='o', version='9999999939.99999'),
swob.HTTPCreated, {}, 'passed')
self.app.register(
'PUT',
self.build_versions_path(obj='o', version='9999998765.99999'),
swob.HTTPCreated, {}, 'passed')
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed')
put_body = 'stuff' * 100
req = Request.blank('/v1/a/c/o', method='PUT', body=put_body,
headers={'Content-Type': 'text/plain'},
environ={'swift.cache': self.cache_version_on,
'swift.trans_id': 'fake_trans_id'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '201 Created')
self.assertEqual(len(self.authorized), 2)
self.assertEqual(4, self.app.call_count)
self.assertEqual(['OV', 'OV', 'OV', 'OV'], self.app.swift_sources)
self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids))
self.assertEqual([
('GET', '/v1/a/c/o?symlink=get'),
('PUT',
self.build_versions_path(obj='o', version='9999999939.99999')),
('PUT',
self.build_versions_path(obj='o', version='9999998765.99999')),
('PUT', '/v1/a/c/o'),
], self.app.calls)
calls = self.app.calls_with_headers
self.assertIn('X-Newest', calls[0].headers)
self.assertEqual('True', calls[0].headers['X-Newest'])
slo_headers = {
'X-Static-Large-Object': 'True',
'Content-Type': 'application/octet-stream; swift_bytes=10485760',
'X-Object-Sysmeta-Container-Update-Override-Etag':
'656516af0f7474b857857dd2a327f3b9; '
'slo_etag=71e938d37c1d06dc634dd24660255a88',
'X-Object-Sysmeta-Slo-Etag': '71e938d37c1d06dc634dd24660255a88',
'X-Object-Sysmeta-Slo-Size': '10485760',
}
archive_put = calls[1]
for key, value in slo_headers.items():
self.assertEqual(archive_put.headers[key], value)
client_put = calls[2]
for key in slo_headers:
if key == 'Content-Type':
self.assertEqual('text/plain', client_put.headers[key])
else:
self.assertNotIn(key, client_put.headers)
symlink_put_headers = calls[-1].headers
expected_headers = {
TGT_OBJ_SYSMETA_SYMLINK_HDR:
self.build_symlink_path('c', 'o', '9999998765.99999'),
'x-object-sysmeta-symlink-target-etag': md5(
put_body.encode('utf8'), usedforsecurity=False).hexdigest(),
'x-object-sysmeta-symlink-target-bytes': str(len(put_body)),
}
for k, v in expected_headers.items():
self.assertEqual(symlink_put_headers[k], v)
self.assertNotIn('x-object-manifest', symlink_put_headers)
@mock.patch('swift.common.middleware.versioned_writes.object_versioning.'
'time.time', return_value=1234)
def test_PUT_overwrite_object(self, mock_time):
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPOk,
{'last-modified': 'Thu, 1 Jan 1970 00:01:00 GMT'}, 'passed')
self.app.register(
'PUT',
self.build_versions_path(obj='o', version='9999999939.99999'),
swob.HTTPCreated, {}, 'passed')
self.app.register(
'PUT',
self.build_versions_path(obj='o', version='9999998765.99999'),
swob.HTTPCreated, {}, 'passed')
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed')
put_body = 'stuff' * 100
req = Request.blank(
'/v1/a/c/o', method='PUT', body=put_body,
headers={'Content-Type': 'text/plain',
'ETag': md5(
put_body.encode('utf8'),
usedforsecurity=False).hexdigest(),
'Content-Length': len(put_body)},
environ={'swift.cache': self.cache_version_on,
'swift.trans_id': 'fake_trans_id'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '201 Created')
# authorized twice because of pre-flight check on PUT
self.assertEqual(len(self.authorized), 2)
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual(['OV', 'OV', 'OV', 'OV'], self.app.swift_sources)
self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids))
self.assertEqual(self.app.calls, [
('GET', '/v1/a/c/o?symlink=get'),
('PUT',
self.build_versions_path(obj='o', version='9999999939.99999')),
('PUT',
self.build_versions_path(obj='o', version='9999998765.99999')),
('PUT', '/v1/a/c/o'),
])
calls = self.app.calls_with_headers
self.assertIn('X-Newest', calls[0].headers)
self.assertEqual('True', calls[0].headers['X-Newest'])
expected_headers = {
TGT_OBJ_SYSMETA_SYMLINK_HDR:
self.build_symlink_path('c', 'o', '9999998765.99999'),
'x-object-sysmeta-symlink-target-etag': md5(
put_body.encode('utf8'), usedforsecurity=False).hexdigest(),
'x-object-sysmeta-symlink-target-bytes': str(len(put_body)),
}
symlink_put_headers = self.app._calls[-1].headers
for k, v in expected_headers.items():
self.assertEqual(symlink_put_headers[k], v)
def test_new_version_get_errors(self):
# GET on source fails, expect client error response,
# no PUT should happen
self.app.register('GET', '/v1/a/c/o',
swob.HTTPBadRequest, {}, None)
req = Request.blank('/v1/a/c/o', method='PUT',
environ={'swift.cache': self.cache_version_on,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '400 Bad Request')
self.assertEqual(1, self.app.call_count)
# GET on source fails, expect server error response
self.app.register('GET', '/v1/a/c/o',
swob.HTTPBadGateway, {}, None)
req = Request.blank('/v1/a/c/o', method='PUT',
environ={'swift.cache': self.cache_version_on,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '503 Service Unavailable')
self.assertEqual(2, self.app.call_count)
def test_new_version_put_errors(self):
# PUT of version fails, expect client error response
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPOk,
{'last-modified': 'Thu, 1 Jan 1970 00:01:00 GMT'}, 'passed')
self.app.register(
'PUT',
self.build_versions_path(obj='o', version='9999999939.99999'),
swob.HTTPUnauthorized, {}, None)
req = Request.blank('/v1/a/c/o', method='PUT',
environ={'swift.cache': self.cache_version_on,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '401 Unauthorized')
self.assertEqual(2, self.app.call_count)
# PUT of version fails, expect server error response
self.app.register(
'PUT',
self.build_versions_path(obj='o', version='9999999939.99999'),
swob.HTTPBadGateway, {}, None)
req = Request.blank(
'/v1/a/c/o', headers={'Content-Type': 'text/plain'},
environ={'REQUEST_METHOD': 'PUT',
'swift.cache': self.cache_version_on,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '503 Service Unavailable')
self.assertEqual(4, self.app.call_count)
# PUT fails because the reserved container is missing; server error
self.app.register(
'PUT',
self.build_versions_path(obj='o', version='9999999939.99999'),
swob.HTTPNotFound, {}, None)
req = Request.blank(
'/v1/a/c/o', headers={'Content-Type': 'text/plain'},
environ={'REQUEST_METHOD': 'PUT',
'swift.cache': self.cache_version_on_but_busted,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '500 Internal Error')
self.assertIn(b'container does not exist', body)
self.assertIn(b're-enable object versioning', body)
class ObjectVersioningTestDisabled(ObjectVersioningBaseTestCase):
def test_get_container(self):
self.app.register('GET', '/v1/a', swob.HTTPOk, {}, 'passed')
self.app.register(
'GET', '/v1/a/c', swob.HTTPOk,
{SYSMETA_VERSIONS_CONT: 'c\x01versions',
SYSMETA_VERSIONS_ENABLED: False}, b'[]')
req = Request.blank(
'/v1/a/c',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertIn(('X-Versions-Enabled', 'False'), headers)
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_head_container(self):
self.app.register('GET', '/v1/a', swob.HTTPOk, {}, 'passed')
self.app.register(
'HEAD', '/v1/a/c', swob.HTTPOk,
{SYSMETA_VERSIONS_CONT: 'c\x01versions',
SYSMETA_VERSIONS_ENABLED: False}, None)
req = Request.blank(
'/v1/a/c',
environ={'REQUEST_METHOD': 'HEAD'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertIn(('X-Versions-Enabled', 'False'), headers)
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_disable_versioning(self):
self.app.register('POST', '/v1/a/c', swob.HTTPOk, {}, 'passed')
req = Request.blank('/v1/a/c',
headers={'X-Versions-Enabled': 'false'},
environ={'REQUEST_METHOD': 'POST',
'swift.cache': self.cache_version_on})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
@mock.patch('swift.common.middleware.versioned_writes.object_versioning.'
'time.time', return_value=1234)
def test_PUT_overwrite_null_marker_versioning_disabled(self, mock_time):
# During object PUT with a versioning disabled, if the most
# recent versioned object is a DELETE marker will a *null*
# version-id, then the DELETE marker should be removed.
listing_body = [{
"hash": "y",
"last_modified": "2014-11-21T14:23:02.206740",
"bytes": 0,
"name": self.build_object_name('o', '0000000001.00000'),
"content_type": "application/x-deleted;swift_versions_deleted=1"
}, {
"hash": "x",
"last_modified": "2014-11-21T14:14:27.409100",
"bytes": 3,
"name": self.build_object_name('o', '0000000002.00000'),
"content_type": "text/plain"
}]
prefix_listing_path = \
'/v1/a/c\x01versions?prefix=o---&marker='
self.app.register(
'GET', prefix_listing_path, swob.HTTPOk, {},
json.dumps(listing_body).encode('utf8'))
self.app.register(
'HEAD',
self.build_versions_path(obj='o', version='0000000001.00000'),
swob.HTTPNoContent,
{'content-type': DELETE_MARKER_CONTENT_TYPE}, None)
self.app.register(
'DELETE',
self.build_versions_path(obj='o', version='0000000001.00000'),
swob.HTTPNoContent, {}, None)
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed')
put_body = 'stuff' * 100
req = Request.blank(
'/v1/a/c/o', method='PUT', body=put_body,
headers={'Content-Type': 'text/plain',
'ETag': md5(
put_body.encode('utf8'),
usedforsecurity=False).hexdigest(),
'Content-Length': len(put_body)},
environ={'swift.cache': self.cache_version_off,
'swift.trans_id': 'fake_trans_id'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '201 Created')
# authorized twice because of pre-flight check on PUT
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
# TODO self.assertEqual(['OV', None, 'OV'], self.app.swift_sources)
self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids))
self.assertEqual(self.app.calls, [
('PUT', '/v1/a/c/o'),
])
obj_put_headers = self.app.calls_with_headers[-1].headers
self.assertNotIn(SYSMETA_VERSIONS_SYMLINK, obj_put_headers)
def test_put_object_versioning_disabled(self):
listing_body = [{
"hash": "x",
"last_modified": "2014-11-21T14:14:27.409100",
"bytes": 3,
"name": self.build_object_name('o', '0000000001.00000'),
"content_type": "text/plain"
}]
prefix_listing_path = \
'/v1/a/c\x01versions?prefix=o---&marker='
self.app.register(
'GET', prefix_listing_path, swob.HTTPOk, {},
json.dumps(listing_body).encode('utf8'))
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT',
'swift.cache': self.cache_version_off,
'CONTENT_LENGTH': '100'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual(self.app.calls, [
('PUT', '/v1/a/c/o'),
])
obj_put_headers = self.app._calls[-1].headers
self.assertNotIn(SYSMETA_VERSIONS_SYMLINK, obj_put_headers)
@mock.patch('swift.common.middleware.versioned_writes.object_versioning.'
'time.time', return_value=1234)
def test_PUT_with_recent_versioned_marker_versioning_disabled(self,
mock_time):
# During object PUT with a versioning disabled, if the most
# recent versioned object is a DELETE marker will a non-null
# version-id, then the DELETE marker should not be removed.
listing_body = [{
"hash": "y",
"last_modified": "2014-11-21T14:23:02.206740",
"bytes": 0,
"name": self.build_object_name('o', '0000000001.00000'),
"content_type": "application/x-deleted;swift_versions_deleted=1"
}, {
"hash": "x",
"last_modified": "2014-11-21T14:14:27.409100",
"bytes": 3,
"name": self.build_object_name('o', '0000000002.00000'),
"content_type": "text/plain"
}]
prefix_listing_path = \
'/v1/a/c\x01versions?prefix=o---&marker='
self.app.register(
'GET', prefix_listing_path, swob.HTTPOk, {},
json.dumps(listing_body).encode('utf8'))
self.app.register(
'HEAD',
self.build_versions_path(obj='o', version='0000000001.00000'),
swob.HTTPNoContent,
{'content-type': DELETE_MARKER_CONTENT_TYPE}, None)
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed')
put_body = 'stuff' * 100
req = Request.blank(
'/v1/a/c/o', method='PUT', body=put_body,
headers={'Content-Type': 'text/plain',
'ETag': md5(
put_body.encode('utf8'),
usedforsecurity=False).hexdigest(),
'Content-Length': len(put_body)},
environ={'swift.cache': self.cache_version_off,
'swift.trans_id': 'fake_trans_id'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '201 Created')
# authorized twice because of pre-flight check on PUT
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
# TODO self.assertEqual(['OV', None, 'OV'], self.app.swift_sources)
self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids))
self.assertEqual(self.app.calls, [
('PUT', '/v1/a/c/o'),
])
obj_put_headers = self.app.calls_with_headers[-1].headers
self.assertNotIn(SYSMETA_VERSIONS_SYMLINK, obj_put_headers)
@mock.patch('swift.common.middleware.versioned_writes.object_versioning.'
'time.time', return_value=1234)
def test_delete_object_with_versioning_disabled(self, mock_time):
# When versioning is disabled, swift will simply issue the
# original request to the versioned container
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPNoContent, {}, 'passed')
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE',
'swift.cache': self.cache_version_off})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '204 No Content')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
def test_POST_symlink(self):
self.app.register(
'POST',
self.build_versions_path(obj='o', version='9999998765.99999'),
swob.HTTPAccepted, {}, '')
self.app.register(
'POST', '/v1/a/c/o', swob.HTTPTemporaryRedirect, {
SYSMETA_VERSIONS_SYMLINK: 'true',
'Location': self.build_versions_path(
obj='o', version='9999998765.99999')}, '')
# TODO: in symlink middleware, swift.leave_relative_location
# is added by the middleware during the response
# adding to the client request here, need to understand how
# to modify the response environ.
req = Request.blank(
'/v1/a/c/o', method='POST',
headers={'Content-Type': 'text/jibberish01',
'X-Object-Meta-Foo': 'bar'},
environ={'swift.cache': self.cache_version_off,
'swift.leave_relative_location': 'true',
'swift.trans_id': 'fake_trans_id'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '202 Accepted')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual([None, 'OV'], self.app.swift_sources)
self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids))
self.assertEqual(self.app.calls, [
('POST', '/v1/a/c/o'),
('POST',
self.build_versions_path(obj='o', version='9999998765.99999')),
])
expected_hdrs = {
'content-type': 'text/jibberish01',
'x-object-meta-foo': 'bar',
}
version_obj_post_headers = self.app._calls[1].headers
for k, v in expected_hdrs.items():
self.assertEqual(version_obj_post_headers[k], v)
def test_POST_unversioned_obj(self):
self.app.register(
'POST', '/v1/a/c/o', swob.HTTPAccepted, {}, '')
# TODO: in symlink middleware, swift.leave_relative_location
# is added by the middleware during the response
# adding to the client request here, need to understand how
# to modify the response environ.
req = Request.blank(
'/v1/a/c/o', method='POST',
headers={'Content-Type': 'text/jibberish01',
'X-Object-Meta-Foo': 'bar'},
environ={'swift.cache': self.cache_version_off,
'swift.trans_id': 'fake_trans_id'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '202 Accepted')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual([None], self.app.swift_sources)
self.assertEqual({'fake_trans_id'}, set(self.app.txn_ids))
self.assertEqual(self.app.calls, [
('POST', '/v1/a/c/o'),
])
expected_hdrs = {
'content-type': 'text/jibberish01',
'x-object-meta-foo': 'bar',
}
version_obj_post_headers = self.app._calls[0].headers
for k, v in expected_hdrs.items():
self.assertEqual(version_obj_post_headers[k], v)
class ObjectVersioningTestDelete(ObjectVersioningBaseTestCase):
def test_delete_object_with_versioning_never_enabled(self):
# should be a straight DELETE, versioning middleware
# does not get involved.
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPNoContent, {}, 'passed')
cache = FakeMemcache()
cache.set(get_cache_key('a'), {'status': 200})
cache.set(get_cache_key('a', 'c'), {'status': 200})
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE',
'swift.cache': cache})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '204 No Content')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
called_method = \
[method for (method, path, rheaders) in self.app._calls]
self.assertNotIn('PUT', called_method)
self.assertNotIn('GET', called_method)
self.assertEqual(1, self.app.call_count)
@mock.patch('swift.common.middleware.versioned_writes.object_versioning.'
'time.time', return_value=1234)
def test_put_delete_marker_no_object_success(self, mock_time):
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPNotFound,
{}, 'passed')
self.app.register(
'PUT',
self.build_versions_path(obj='o', version='9999998765.99999'),
swob.HTTPCreated, {}, 'passed')
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPNotFound, {}, None)
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE',
'swift.cache': self.cache_version_on,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '404 Not Found')
self.assertEqual(len(self.authorized), 2)
req.environ['REQUEST_METHOD'] = 'PUT'
self.assertRequestEqual(req, self.authorized[0])
calls = self.app.calls_with_headers
self.assertEqual(['GET', 'PUT', 'DELETE'], [c.method for c in calls])
self.assertEqual('application/x-deleted;swift_versions_deleted=1',
calls[1].headers.get('Content-Type'))
@mock.patch('swift.common.middleware.versioned_writes.object_versioning.'
'time.time', return_value=1234)
def test_delete_marker_over_object_success(self, mock_time):
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPOk,
{'last-modified': 'Thu, 1 Jan 1970 00:01:00 GMT'}, 'passed')
self.app.register(
'PUT',
self.build_versions_path(obj='o', version='9999999939.99999'),
swob.HTTPCreated, {}, 'passed')
self.app.register(
'PUT',
self.build_versions_path(obj='o', version='9999998765.99999'),
swob.HTTPCreated, {}, 'passed')
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPNoContent, {}, None)
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE',
'swift.cache': self.cache_version_on,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '204 No Content')
self.assertEqual(b'', body)
self.assertEqual(len(self.authorized), 2)
req.environ['REQUEST_METHOD'] = 'PUT'
self.assertRequestEqual(req, self.authorized[0])
calls = self.app.calls_with_headers
self.assertEqual(['GET', 'PUT', 'PUT', 'DELETE'],
[c.method for c in calls])
self.assertEqual(
self.build_versions_path(obj='o', version='9999999939.99999'),
calls[1].path)
self.assertEqual('application/x-deleted;swift_versions_deleted=1',
calls[2].headers.get('Content-Type'))
@mock.patch('swift.common.middleware.versioned_writes.object_versioning.'
'time.time', return_value=1234)
def test_delete_marker_over_versioned_object_success(self, mock_time):
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk,
{SYSMETA_VERSIONS_SYMLINK: 'true'}, 'passed')
self.app.register(
'PUT',
self.build_versions_path(obj='o', version='9999998765.99999'),
swob.HTTPCreated, {}, 'passed')
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPNoContent, {}, None)
req = Request.blank(
'/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE',
'swift.cache': self.cache_version_on,
'CONTENT_LENGTH': '0'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '204 No Content')
self.assertEqual(b'', body)
self.assertEqual(len(self.authorized), 2)
req.environ['REQUEST_METHOD'] = 'PUT'
self.assertRequestEqual(req, self.authorized[0])
calls = self.app.calls_with_headers
self.assertEqual(['GET', 'PUT', 'DELETE'],
[c.method for c in calls])
self.assertEqual(
self.build_versions_path(obj='o', version='9999998765.99999'),
calls[1].path)
self.assertEqual('application/x-deleted;swift_versions_deleted=1',
calls[1].headers.get('Content-Type'))
def test_denied_DELETE_of_versioned_object(self):
authorize_call = []
def fake_authorize(req):
authorize_call.append((req.method, req.path))
return swob.HTTPForbidden()
req = Request.blank('/v1/a/c/o', method='DELETE', body='',
headers={'X-If-Delete-At': 1},
environ={'swift.cache': self.cache_version_on,
'swift.authorize': fake_authorize,
'swift.trans_id': 'fake_trans_id'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '403 Forbidden')
self.assertEqual(len(authorize_call), 1)
self.assertEqual(('DELETE', '/v1/a/c/o'), authorize_call[0])
class ObjectVersioningTestCopy(ObjectVersioningBaseTestCase):
@mock.patch('swift.common.middleware.versioned_writes.object_versioning.'
'time.time', return_value=1234)
def test_COPY_overwrite_tombstone(self, mock_time):
self.cache_version_on.set(get_cache_key('a', 'src_cont'),
{'status': 200})
src_body = 'stuff' * 100
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPNotFound, {}, None)
self.app.register(
'GET', '/v1/a/src_cont/src_obj', swob.HTTPOk, {}, src_body)
self.app.register(
'PUT',
self.build_versions_path(obj='o', version='9999998765.99999'),
swob.HTTPCreated, {}, 'passed')
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed')
req = Request.blank(
'/v1/a/src_cont/src_obj',
environ={'REQUEST_METHOD': 'COPY',
'swift.cache': self.cache_version_on,
'CONTENT_LENGTH': '100'},
headers={'Destination': 'c/o'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '201 Created')
self.assertEqual(len(self.authorized), 3)
self.assertEqual(self.app.calls, [
('GET', '/v1/a/src_cont/src_obj'),
('GET', '/v1/a/c/o?symlink=get'),
('PUT',
self.build_versions_path(obj='o', version='9999998765.99999')),
('PUT', '/v1/a/c/o'),
])
expected_headers = {
TGT_OBJ_SYSMETA_SYMLINK_HDR:
self.build_symlink_path('c', 'o', '9999998765.99999'),
'x-object-sysmeta-symlink-target-etag': md5(
src_body.encode('utf8'), usedforsecurity=False).hexdigest(),
'x-object-sysmeta-symlink-target-bytes': str(len(src_body)),
}
symlink_put_headers = self.app._calls[-1].headers
for k, v in expected_headers.items():
self.assertEqual(symlink_put_headers[k], v)
@mock.patch('swift.common.middleware.versioned_writes.object_versioning.'
'time.time', return_value=1234)
def test_COPY_overwrite_object(self, mock_time):
self.cache_version_on.set(get_cache_key('a', 'src_cont'),
{'status': 200})
src_body = 'stuff' * 100
self.app.register(
'GET', '/v1/a/src_cont/src_obj', swob.HTTPOk, {}, src_body)
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPOk,
{'last-modified': 'Thu, 1 Jan 1970 00:01:00 GMT'}, 'old object')
self.app.register(
'PUT',
self.build_versions_path(obj='o', version='9999999939.99999'),
swob.HTTPCreated, {}, 'passed')
self.app.register(
'PUT',
self.build_versions_path(obj='o', version='9999998765.99999'),
swob.HTTPCreated, {}, 'passed')
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed')
req = Request.blank(
'/v1/a/src_cont/src_obj',
environ={'REQUEST_METHOD': 'COPY',
'swift.cache': self.cache_version_on,
'CONTENT_LENGTH': '100'},
headers={'Destination': 'c/o'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '201 Created')
self.assertEqual(len(self.authorized), 3)
self.assertEqual(self.app.calls, [
('GET', '/v1/a/src_cont/src_obj'),
('GET', '/v1/a/c/o?symlink=get'),
('PUT',
self.build_versions_path(obj='o', version='9999999939.99999')),
('PUT',
self.build_versions_path(obj='o', version='9999998765.99999')),
('PUT', '/v1/a/c/o'),
])
expected_headers = {
TGT_OBJ_SYSMETA_SYMLINK_HDR:
self.build_symlink_path('c', 'o', '9999998765.99999'),
'x-object-sysmeta-symlink-target-etag': md5(
src_body.encode('utf8'), usedforsecurity=False).hexdigest(),
'x-object-sysmeta-symlink-target-bytes': str(len(src_body)),
}
symlink_put_headers = self.app._calls[-1].headers
for k, v in expected_headers.items():
self.assertEqual(symlink_put_headers[k], v)
@mock.patch('swift.common.middleware.versioned_writes.object_versioning.'
'time.time', return_value=1234)
def test_COPY_overwrite_version_symlink(self, mock_time):
self.cache_version_on.set(get_cache_key('a', 'src_cont'),
{'status': 200})
src_body = 'stuff' * 100
self.app.register(
'GET', '/v1/a/src_cont/src_obj', swob.HTTPOk, {}, src_body)
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {
SYSMETA_VERSIONS_SYMLINK: 'true',
TGT_OBJ_SYSMETA_SYMLINK_HDR: 'c-unique/whatever'}, '')
self.app.register(
'PUT',
self.build_versions_path(obj='o', version='9999998765.99999'),
swob.HTTPCreated, {}, 'passed')
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed')
req = Request.blank(
'/v1/a/src_cont/src_obj',
environ={'REQUEST_METHOD': 'COPY',
'swift.cache': self.cache_version_on,
'CONTENT_LENGTH': '100'},
headers={'Destination': 'c/o'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '201 Created')
self.assertEqual(len(self.authorized), 3)
self.assertEqual(self.app.calls, [
('GET', '/v1/a/src_cont/src_obj'),
('GET', '/v1/a/c/o?symlink=get'),
('PUT',
self.build_versions_path(obj='o', version='9999998765.99999')),
('PUT', '/v1/a/c/o'),
])
expected_headers = {
TGT_OBJ_SYSMETA_SYMLINK_HDR:
self.build_symlink_path('c', 'o', '9999998765.99999'),
'x-object-sysmeta-symlink-target-etag': md5(
src_body.encode('utf8'), usedforsecurity=False).hexdigest(),
'x-object-sysmeta-symlink-target-bytes': str(len(src_body)),
}
symlink_put_headers = self.app._calls[-1].headers
for k, v in expected_headers.items():
self.assertEqual(symlink_put_headers[k], v)
@mock.patch('swift.common.middleware.versioned_writes.object_versioning.'
'time.time', return_value=1234)
def test_copy_new_version_different_account(self, mock_time):
self.cache_version_on.set(get_cache_key('src_acc'),
{'status': 200})
self.cache_version_on.set(get_cache_key('src_acc', 'src_cont'),
{'status': 200})
src_body = 'stuff' * 100
self.app.register(
'GET', '/v1/src_acc/src_cont/src_obj', swob.HTTPOk, {}, src_body)
self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {
SYSMETA_VERSIONS_SYMLINK: 'true',
TGT_OBJ_SYSMETA_SYMLINK_HDR: 'c-unique/whatever'}, '')
self.app.register(
'PUT',
self.build_versions_path(obj='o', version='9999998765.99999'),
swob.HTTPCreated, {}, 'passed')
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed')
req = Request.blank(
'/v1/src_acc/src_cont/src_obj',
environ={'REQUEST_METHOD': 'COPY',
'swift.cache': self.cache_version_on,
'CONTENT_LENGTH': '100'},
headers={'Destination': 'c/o',
'Destination-Account': 'a'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '201 Created')
self.assertEqual(len(self.authorized), 3)
self.assertEqual(self.app.calls, [
('GET', '/v1/src_acc/src_cont/src_obj'),
('GET', '/v1/a/c/o?symlink=get'),
('PUT',
self.build_versions_path(obj='o', version='9999998765.99999')),
('PUT', '/v1/a/c/o'),
])
expected_headers = {
TGT_OBJ_SYSMETA_SYMLINK_HDR:
self.build_symlink_path('c', 'o', '9999998765.99999'),
'x-object-sysmeta-symlink-target-etag': md5(
src_body.encode('utf8'), usedforsecurity=False).hexdigest(),
'x-object-sysmeta-symlink-target-bytes': str(len(src_body)),
}
symlink_put_headers = self.app._calls[-1].headers
for k, v in expected_headers.items():
self.assertEqual(symlink_put_headers[k], v)
def test_copy_object_versioning_disabled(self):
self.cache_version_off.set(get_cache_key('a', 'src_cont'),
{'status': 200})
listing_body = [{
"hash": "x",
"last_modified": "2014-11-21T14:14:27.409100",
"bytes": 3,
"name": self.build_object_name('o', '0000000001.00000'),
"content_type": "text/plain"
}]
prefix_listing_path = \
'/v1/a/c\x01versions?prefix=o---&marker='
self.app.register(
'GET', prefix_listing_path, swob.HTTPOk, {},
json.dumps(listing_body).encode('utf8'))
src_body = 'stuff' * 100
self.app.register(
'GET', '/v1/a/src_cont/src_obj', swob.HTTPOk, {}, src_body)
self.app.register(
'PUT', '/v1/a/c/o', swob.HTTPOk, {}, 'passed')
req = Request.blank(
'/v1/a/src_cont/src_obj',
environ={'REQUEST_METHOD': 'COPY',
'swift.cache': self.cache_version_off,
'CONTENT_LENGTH': '100'},
headers={'Destination': 'c/o'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 2)
self.assertEqual(self.app.calls, [
('GET', '/v1/a/src_cont/src_obj'),
('PUT', '/v1/a/c/o'),
])
obj_put_headers = self.app._calls[-1].headers
self.assertNotIn(SYSMETA_VERSIONS_SYMLINK, obj_put_headers)
class ObjectVersioningTestVersionAPI(ObjectVersioningBaseTestCase):
def test_fail_non_versioned_container(self):
self.app.register('HEAD', '/v1/a', swob.HTTPOk, {}, '')
self.app.register('HEAD', '/v1/a/c', swob.HTTPOk, {}, '')
req = Request.blank(
'/v1/a/c/o', method='GET',
params={'version-id': '0000000060.00000'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '400 Bad Request')
self.assertEqual(body, b'version-aware operations require'
b' that the container is versioned')
def test_PUT_version(self):
timestamp = next(self.ts)
version_path = '%s?symlink=get' % self.build_versions_path(
obj='o', version=(~timestamp).normal)
etag = md5(b'old-version-etag', usedforsecurity=False).hexdigest()
self.app.register('HEAD', version_path, swob.HTTPNoContent, {
'Content-Length': 10,
'Content-Type': 'application/old-version',
'ETag': etag,
}, '')
self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {}, '')
req = Request.blank(
'/v1/a/c/o', method='PUT',
environ={'swift.cache': self.cache_version_on},
params={'version-id': timestamp.normal})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '201 Created')
self.assertEqual(self.app.calls, [
('HEAD', version_path),
('PUT', '/v1/a/c/o?version-id=%s' % timestamp.normal),
])
obj_put_headers = self.app.calls_with_headers[-1].headers
symlink_expected_headers = {
SYSMETA_VERSIONS_SYMLINK: 'true',
TGT_OBJ_SYSMETA_SYMLINK_HDR: self.build_symlink_path(
'c', 'o', (~timestamp).normal),
'x-object-sysmeta-symlink-target-etag': etag,
'x-object-sysmeta-symlink-target-bytes': '10',
}
for k, v in symlink_expected_headers.items():
self.assertEqual(obj_put_headers[k], v)
def test_PUT_version_with_body(self):
req = Request.blank(
'/v1/a/c/o', method='PUT', body='foo',
environ={'swift.cache': self.cache_version_on},
params={'version-id': '1'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '400 Bad Request')
def test_PUT_version_not_found(self):
timestamp = next(self.ts)
version_path = '%s?symlink=get' % self.build_versions_path(
obj='o', version=(~timestamp).normal)
self.app.register('HEAD', version_path, swob.HTTPNotFound, {}, '')
req = Request.blank(
'/v1/a/c/o', method='PUT',
environ={'swift.cache': self.cache_version_on},
params={'version-id': timestamp.normal})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '404 Not Found')
self.assertIn(b'version does not exist', body)
def test_PUT_version_container_not_found(self):
timestamp = next(self.ts)
version_path = '%s?symlink=get' % self.build_versions_path(
obj='o', version=(~timestamp).normal)
self.app.register('HEAD', version_path, swob.HTTPNotFound, {}, '')
req = Request.blank(
'/v1/a/c/o', method='PUT',
environ={'swift.cache': self.cache_version_on_but_busted},
params={'version-id': timestamp.normal})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '500 Internal Error')
self.assertIn(b'container does not exist', body)
self.assertIn(b're-enable object versioning', body)
def test_PUT_version_invalid(self):
invalid_versions = ('null', 'something', '-10')
for version_id in invalid_versions:
req = Request.blank(
'/v1/a/c/o', method='PUT',
environ={'swift.cache': self.cache_version_on},
params={'version-id': invalid_versions})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '400 Bad Request')
def test_POST_error(self):
req = Request.blank(
'/v1/a/c/o', method='POST',
headers={'Content-Type': 'text/plain',
'X-Object-Meta-foo': 'bar'},
environ={'swift.cache': self.cache_version_on,
'swift.trans_id': 'fake_trans_id'},
params={'version-id': '1'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '400 Bad Request')
def test_GET_and_HEAD(self):
self.app.register(
'GET',
self.build_versions_path(obj='o', version='9999999939.99999'),
swob.HTTPOk, {}, 'foobar')
req = Request.blank(
'/v1/a/c/o', method='GET',
environ={'swift.cache': self.cache_version_on},
params={'version-id': '0000000060.00000'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertIn(('X-Object-Version-Id', '0000000060.00000'),
headers)
self.assertEqual(b'foobar', body)
# HEAD with same params find same registered GET
req = Request.blank(
'/v1/a/c/o', method='HEAD',
environ={'swift.cache': self.cache_version_on},
params={'version-id': '0000000060.00000'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertIn(('X-Object-Version-Id', '0000000060.00000'),
headers)
self.assertEqual(b'', body)
def test_GET_404(self):
self.app.register(
'GET',
self.build_versions_path(obj='o', version='9999999939.99999'),
swob.HTTPNotFound, {}, '')
req = Request.blank(
'/v1/a/c/o', method='GET',
environ={'swift.cache': self.cache_version_on},
params={'version-id': '0000000060.00000'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '404 Not Found')
self.assertNotIn(('X-Object-Version-Id', '0000000060.00000'),
headers)
def test_HEAD(self):
self.app.register(
'HEAD',
self.build_versions_path(obj='o', version='9999999939.99999'),
swob.HTTPOk, {
'X-Object-Meta-Foo': 'bar'},
'')
req = Request.blank(
'/v1/a/c/o', method='HEAD',
environ={'swift.cache': self.cache_version_on},
params={'version-id': '0000000060.00000'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertIn(('X-Object-Version-Id', '0000000060.00000'),
headers)
self.assertIn(('X-Object-Meta-Foo', 'bar'), headers)
def test_GET_null_id(self):
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPOk, {}, 'foobar')
req = Request.blank(
'/v1/a/c/o', method='GET',
environ={'swift.cache': self.cache_version_on},
params={'version-id': 'null'})
# N.B. GET w/ query param found registered raw_path GET
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertEqual(1, len(self.authorized))
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual(1, len(self.app.calls))
self.assertIn(('X-Object-Version-Id', 'null'), headers)
self.assertEqual(b'foobar', body)
# and HEAD w/ same params finds same registered GET
req = Request.blank(
'/v1/a/c/o?version-id=null', method='HEAD',
environ={'swift.cache': self.cache_version_on})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual(2, len(self.app.calls))
self.assertIn(('X-Object-Version-Id', 'null'), headers)
self.assertEqual(b'', body)
def test_GET_null_id_versioned_obj(self):
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPOk, {
'Content-Location': self.build_versions_path(
obj='o', version='9999998765.99999')},
'')
req = Request.blank(
'/v1/a/c/o', method='GET',
environ={'swift.cache': self.cache_version_on},
params={'version-id': 'null'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '404 Not Found')
self.assertEqual(1, len(self.authorized))
self.assertEqual(1, len(self.app.calls))
self.assertNotIn(('X-Object-Version-Id', '0000001234.00000'), headers)
# This will log a 499 but (at the moment, anyway)
# we don't have a good way to avoid it
self.expected_unread_requests[('GET', '/v1/a/c/o?version-id=null')] = 1
def test_GET_null_id_404(self):
self.app.register(
'GET', '/v1/a/c/o', swob.HTTPNotFound, {}, '')
req = Request.blank(
'/v1/a/c/o', method='GET',
environ={'swift.cache': self.cache_version_on},
params={'version-id': 'null'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '404 Not Found')
self.assertEqual(1, len(self.authorized))
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual(1, len(self.app.calls))
self.assertNotIn(('X-Object-Version-Id', 'null'), headers)
# and HEAD w/ same params finds same registered GET
# we have test_HEAD_null_id, the following test is meant to illustrate
# that FakeSwift works for HEADs even if only GETs are registered.
req = Request.blank(
'/v1/a/c/o', method='HEAD',
environ={'swift.cache': self.cache_version_on},
params={'version-id': 'null'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '404 Not Found')
self.assertEqual(1, len(self.authorized))
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual(2, len(self.app.calls))
self.assertNotIn(('X-Object-Version-Id', 'null'), headers)
def test_HEAD_null_id(self):
self.app.register(
'HEAD', '/v1/a/c/o', swob.HTTPOk, {'X-Object-Meta-Foo': 'bar'}, '')
req = Request.blank(
'/v1/a/c/o', method='HEAD',
environ={'swift.cache': self.cache_version_on},
params={'version-id': 'null'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertEqual(1, len(self.authorized))
self.assertEqual(1, len(self.app.calls))
self.assertIn(('X-Object-Version-Id', 'null'), headers)
self.assertIn(('X-Object-Meta-Foo', 'bar'), headers)
# N.B. GET on explicitly registered HEAD raised KeyError
req = Request.blank(
'/v1/a/c/o', method='GET',
environ={'swift.cache': self.cache_version_on},
params={'version-id': 'null'})
with self.assertRaises(KeyError):
status, headers, body = self.call_ov(req)
def test_HEAD_delete_marker(self):
self.app.register(
'HEAD',
self.build_versions_path(obj='o', version='9999999939.99999'),
swob.HTTPOk, {
'content-type':
'application/x-deleted;swift_versions_deleted=1'},
'')
req = Request.blank(
'/v1/a/c/o', method='HEAD',
environ={'swift.cache': self.cache_version_on},
params={'version-id': '0000000060.00000'})
status, headers, body = self.call_ov(req)
# a HEAD/GET of a delete-marker returns a 404
self.assertEqual(status, '404 Not Found')
self.assertEqual(len(self.authorized), 1)
self.assertIn(('X-Object-Version-Id', '0000000060.00000'),
headers)
def test_DELETE_not_current_version(self):
# This tests when version-id does not point to the
# current version, in this case, there's no need to
# re-link symlink
self.app.register('HEAD', '/v1/a/c/o', swob.HTTPOk, {
SYSMETA_VERSIONS_SYMLINK: 'true',
TGT_OBJ_SYSMETA_SYMLINK_HDR: self.build_symlink_path(
'c', 'o', '9999999940.99999')}, '')
self.app.register(
'DELETE',
self.build_versions_path(obj='o', version='9999999939.99999'),
swob.HTTPNoContent, {}, 'foobar')
req = Request.blank(
'/v1/a/c/o', method='DELETE',
environ={'swift.cache': self.cache_version_on},
params={'version-id': '0000000060.00000'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '204 No Content')
self.assertEqual('0000000059.00000',
dict(headers)['X-Object-Current-Version-Id'])
self.assertEqual(self.app.calls, [
('HEAD', '/v1/a/c/o?symlink=get'),
('DELETE',
'%s?version-id=0000000060.00000' % self.build_versions_path(
obj='o', version='9999999939.99999')),
])
calls = self.app.calls_with_headers
self.assertIn('X-Newest', calls[0].headers)
self.assertEqual('True', calls[0].headers['X-Newest'])
def test_DELETE_current_version(self):
self.app.register('HEAD', '/v1/a/c/o', swob.HTTPOk, {
SYSMETA_VERSIONS_SYMLINK: 'true',
TGT_OBJ_SYSMETA_SYMLINK_HDR: self.build_symlink_path(
'c', 'o', '9999999939.99999')}, '')
self.app.register(
'DELETE',
self.build_versions_path(obj='o', version='9999999939.99999'),
swob.HTTPNoContent, {}, '')
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPNoContent, {}, '')
req = Request.blank(
'/v1/a/c/o', method='DELETE',
environ={'swift.cache': self.cache_version_on},
params={'version-id': '0000000060.00000'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '204 No Content')
self.assertEqual('null',
dict(headers)['X-Object-Current-Version-Id'])
self.assertEqual('0000000060.00000',
dict(headers)['X-Object-Version-Id'])
self.assertEqual(self.app.calls, [
('HEAD', '/v1/a/c/o?symlink=get'),
('DELETE', '/v1/a/c/o'),
('DELETE',
self.build_versions_path(obj='o', version='9999999939.99999')),
])
def test_DELETE_current_version_is_delete_marker(self):
self.app.register('HEAD', '/v1/a/c/o', swob.HTTPNotFound, {}, '')
self.app.register(
'DELETE',
self.build_versions_path(obj='o', version='9999999939.99999'),
swob.HTTPNoContent, {}, '')
req = Request.blank(
'/v1/a/c/o', method='DELETE',
environ={'swift.cache': self.cache_version_on},
params={'version-id': '0000000060.00000'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '204 No Content')
self.assertEqual('null',
dict(headers)['X-Object-Current-Version-Id'])
self.assertEqual('0000000060.00000',
dict(headers)['X-Object-Version-Id'])
self.assertEqual(self.app.calls, [
('HEAD', '/v1/a/c/o?symlink=get'),
('DELETE',
'%s?version-id=0000000060.00000' % self.build_versions_path(
obj='o', version='9999999939.99999')),
])
def test_DELETE_current_obj_is_unversioned(self):
self.app.register('HEAD', '/v1/a/c/o', swob.HTTPOk, {}, '')
self.app.register(
'DELETE',
self.build_versions_path(obj='o', version='9999999939.99999'),
swob.HTTPNoContent, {}, '')
req = Request.blank(
'/v1/a/c/o', method='DELETE',
environ={'swift.cache': self.cache_version_on},
params={'version-id': '0000000060.00000'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '204 No Content')
self.assertEqual('null',
dict(headers)['X-Object-Current-Version-Id'])
self.assertEqual('0000000060.00000',
dict(headers)['X-Object-Version-Id'])
self.assertEqual(self.app.calls, [
('HEAD', '/v1/a/c/o?symlink=get'),
('DELETE',
'%s?version-id=0000000060.00000' % self.build_versions_path(
obj='o', version='9999999939.99999')),
])
def test_DELETE_null_version(self):
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPNoContent, {}, '')
req = Request.blank(
'/v1/a/c/o', method='DELETE',
environ={'swift.cache': self.cache_version_on},
params={'version-id': 'null'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '204 No Content')
self.assertEqual(self.app.calls, [
('DELETE', '/v1/a/c/o?version-id=null'),
])
class ObjectVersioningVersionAPIWhileDisabled(ObjectVersioningBaseTestCase):
def test_PUT_version_versioning_disbaled(self):
timestamp = next(self.ts)
version_path = '%s?symlink=get' % self.build_versions_path(
obj='o', version=(~timestamp).normal)
etag = md5(b'old-version-etag', usedforsecurity=False).hexdigest()
self.app.register('HEAD', version_path, swob.HTTPNoContent, {
'Content-Length': 10,
'Content-Type': 'application/old-version',
'ETag': etag,
}, '')
self.app.register('PUT', '/v1/a/c/o', swob.HTTPCreated, {}, '')
req = Request.blank(
'/v1/a/c/o', method='PUT',
environ={'swift.cache': self.cache_version_off},
params={'version-id': timestamp.normal})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '201 Created')
self.assertEqual(self.app.calls, [
('HEAD', version_path),
('PUT', '/v1/a/c/o?version-id=%s' % timestamp.normal),
])
obj_put_headers = self.app.calls_with_headers[-1].headers
symlink_expected_headers = {
SYSMETA_VERSIONS_SYMLINK: 'true',
TGT_OBJ_SYSMETA_SYMLINK_HDR: self.build_symlink_path(
'c', 'o', (~timestamp).normal),
'x-object-sysmeta-symlink-target-etag': etag,
'x-object-sysmeta-symlink-target-bytes': '10',
}
for k, v in symlink_expected_headers.items():
self.assertEqual(obj_put_headers[k], v)
def test_POST_error_versioning_disabled(self):
req = Request.blank(
'/v1/a/c/o', method='POST',
headers={'Content-Type': 'text/plain',
'X-Object-Meta-foo': 'bar'},
environ={'swift.cache': self.cache_version_off,
'swift.trans_id': 'fake_trans_id'},
params={'version-id': '1'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '400 Bad Request')
def test_DELETE_current_version(self):
self.app.register('HEAD', '/v1/a/c/o', swob.HTTPOk, {
SYSMETA_VERSIONS_SYMLINK: 'true',
TGT_OBJ_SYSMETA_SYMLINK_HDR: self.build_symlink_path(
'c', 'o', '9999999939.99999')}, '')
self.app.register(
'DELETE',
self.build_versions_path(obj='o', version='9999999939.99999'),
swob.HTTPNoContent, {}, '')
self.app.register(
'DELETE', '/v1/a/c/o', swob.HTTPNoContent, {}, '')
# request with versioning disabled
req = Request.blank(
'/v1/a/c/o', method='DELETE',
environ={'swift.cache': self.cache_version_off},
params={'version-id': '0000000060.00000'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '204 No Content')
self.assertEqual(self.app.calls, [
('HEAD', '/v1/a/c/o?symlink=get'),
('DELETE', '/v1/a/c/o'),
('DELETE',
self.build_versions_path(obj='o', version='9999999939.99999')),
])
class ObjectVersioningTestContainerOperations(ObjectVersioningBaseTestCase):
def test_container_listing_translation(self):
listing_body = [{
'bytes': 0,
'name': 'my-normal-obj',
'hash': 'd41d8cd98f00b204e9800998ecf8427e; '
'symlink_target=%s; '
'symlink_target_etag=e55cedc11adb39c404b7365f7d6291fa; '
'symlink_target_bytes=9' % self.build_symlink_path(
'c', 'my-normal-obj', '9999999989.99999'),
'last_modified': '2019-07-26T15:09:54.518990',
'content_type': 'application/foo',
}, {
'bytes': 8,
'name': 'my-old-object',
'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3',
'last_modified': '2019-07-26T15:54:38.326800',
'content_type': 'application/bar',
}, {
'bytes': 0,
'name': 'my-slo-manifest',
'hash': '387d1ab7d89eda2162bcf8e502667c86; '
'slo_etag=71e938d37c1d06dc634dd24660255a88; '
'symlink_target=%s; '
'symlink_target_etag=387d1ab7d89eda2162bcf8e502667c86; '
# N.B. symlink_target_bytes is set to the slo_size
'symlink_target_bytes=10485760' % self.build_symlink_path(
'c', 'my-slo-manifest', '9999999979.99999'),
'last_modified': '2019-07-26T15:00:28.499260',
'content_type': 'application/baz',
}, {
'bytes': 0,
'name': 'unexpected-symlink',
'hash': 'd41d8cd98f00b204e9800998ecf8427e; '
'symlink_target=tgt_container/tgt_obj; '
'symlink_target_etag=e55cedc11adb39c404b7365f7d6291fa; '
'symlink_target_bytes=9',
'last_modified': '2019-07-26T15:09:54.518990',
'content_type': 'application/symlink',
}]
self.app.register(
'GET', '/v1/a/c', swob.HTTPOk,
{SYSMETA_VERSIONS_CONT: self.build_container_name('c'),
SYSMETA_VERSIONS_ENABLED: True},
json.dumps(listing_body).encode('utf8'))
req = Request.blank(
'/v1/a/c',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertIn(('X-Versions-Enabled', 'True'), headers)
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
expected = [{
'bytes': 9,
'name': 'my-normal-obj',
'hash': 'e55cedc11adb39c404b7365f7d6291fa',
'last_modified': '2019-07-26T15:09:54.518990',
'content_type': 'application/foo',
'symlink_path':
'/v1/a/c/my-normal-obj?version-id=0000000010.00000',
'version_symlink': True,
}, {
'bytes': 8,
'name': 'my-old-object',
'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3',
'last_modified': '2019-07-26T15:54:38.326800',
'content_type': 'application/bar',
}, {
'bytes': 10485760,
'name': 'my-slo-manifest',
# since we don't have slo middleware in test pipeline, we expect
# slo_etag to stay in the hash key
'hash': '387d1ab7d89eda2162bcf8e502667c86; '
'slo_etag=71e938d37c1d06dc634dd24660255a88',
'last_modified': '2019-07-26T15:00:28.499260',
'content_type': 'application/baz',
'symlink_path':
'/v1/a/c/my-slo-manifest?version-id=0000000020.00000',
'version_symlink': True,
}, {
'bytes': 0,
'name': 'unexpected-symlink',
'hash': 'd41d8cd98f00b204e9800998ecf8427e',
'last_modified': '2019-07-26T15:09:54.518990',
'symlink_bytes': 9,
'symlink_path': '/v1/a/tgt_container/tgt_obj',
'symlink_etag': 'e55cedc11adb39c404b7365f7d6291fa',
'content_type': 'application/symlink',
}]
self.assertEqual(expected, json.loads(body))
def test_listing_translation_utf8(self):
listing_body = [{
'bytes': 0,
'name': u'\N{SNOWMAN}-obj',
'hash': 'd41d8cd98f00b204e9800998ecf8427e; '
'symlink_target=%s; '
'symlink_target_etag=e55cedc11adb39c404b7365f7d6291fa; '
'symlink_target_bytes=9' % self.build_symlink_path(
u'\N{COMET}-container', u'\N{CLOUD}-target',
'9999999989.99999'),
'last_modified': '2019-07-26T15:09:54.518990',
'content_type': 'application/snowman',
}]
self.app.register(
'GET', '/v1/a/\xe2\x98\x83-test', swob.HTTPOk,
{SYSMETA_VERSIONS_CONT: wsgi_quote(
self.str_to_wsgi(self.build_container_name(
u'\N{COMET}-container'))),
SYSMETA_VERSIONS_ENABLED: True},
json.dumps(listing_body).encode('utf8'))
req = Request.blank(
'/v1/a/\xe2\x98\x83-test',
environ={'REQUEST_METHOD': 'GET'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertIn(('X-Versions-Enabled', 'True'), headers)
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
expected = [{
'bytes': 9,
'name': u'\N{SNOWMAN}-obj',
'hash': 'e55cedc11adb39c404b7365f7d6291fa',
'last_modified': '2019-07-26T15:09:54.518990',
'symlink_path':
'/v1/a/%E2%98%83-test/%E2%98%81-target?'
'version-id=0000000010.00000',
'content_type': 'application/snowman',
'version_symlink': True,
}]
self.assertEqual(expected, json.loads(body))
def test_list_versions(self):
listing_body = [{
'bytes': 8,
'name': 'my-other-object',
'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3',
'last_modified': '1970-01-01T00:00:05.000000',
'content_type': 'application/bar',
}, {
'bytes': 0,
'name': 'obj',
'hash': 'd41d8cd98f00b204e9800998ecf8427e; '
'symlink_target=%s; '
'symlink_target_etag=e55cedc11adb39c404b7365f7d6291fa; '
'symlink_target_bytes=9' %
self.build_symlink_path('c', 'obj', '9999999979.99999'),
'last_modified': '1970-01-01T00:00:20.000000',
'content_type': 'text/plain',
}]
versions_listing_body = [{
'bytes': 9,
'name': self.build_object_name('obj', '9999999979.99999'),
'hash': 'e55cedc11adb39c404b7365f7d6291fa',
'last_modified': '1970-01-01T00:00:20.000000',
'content_type': 'text/plain',
}, {
'bytes': 8,
'name': self.build_object_name('obj', '9999999989.99999'),
'hash': 'ebdd8d46ecb4a07f6c433d67eb35d5f2',
'last_modified': '1970-01-01T00:00:10.000000',
'content_type': 'text/plain',
}]
self.app.register(
'GET', self.build_versions_path(), swob.HTTPOk, {},
json.dumps(versions_listing_body).encode('utf8'))
self.app.register(
'GET', '/v1/a/c', swob.HTTPOk,
{SYSMETA_VERSIONS_CONT: self.build_container_name('c'),
SYSMETA_VERSIONS_ENABLED: True},
json.dumps(listing_body).encode('utf8'))
req = Request.blank(
'/v1/a/c?versions',
environ={'REQUEST_METHOD': 'GET',
'swift.cache': self.cache_version_on})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertIn(('X-Versions-Enabled', 'True'), headers)
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
expected = [{
'bytes': 8,
'name': 'my-other-object',
'version_id': 'null',
'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3',
'last_modified': '1970-01-01T00:00:05.000000',
'content_type': 'application/bar',
'is_latest': True,
}, {
'bytes': 9,
'name': 'obj',
'version_id': '0000000020.00000',
'hash': 'e55cedc11adb39c404b7365f7d6291fa',
'last_modified': '1970-01-01T00:00:20.000000',
'content_type': 'text/plain',
'is_latest': True,
}, {
'bytes': 8,
'name': 'obj',
'version_id': '0000000010.00000',
'hash': 'ebdd8d46ecb4a07f6c433d67eb35d5f2',
'last_modified': '1970-01-01T00:00:10.000000',
'content_type': 'text/plain',
'is_latest': False,
}]
self.assertEqual(expected, json.loads(body))
# Can be explicitly JSON
req = Request.blank(
'/v1/a/c?versions&format=json',
environ={'REQUEST_METHOD': 'GET',
'swift.cache': self.cache_version_on})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
req = Request.blank(
'/v1/a/c?versions',
environ={'REQUEST_METHOD': 'GET',
'swift.cache': self.cache_version_on,
'HTTP_ACCEPT': 'application/json'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
# But everything else is unacceptable
req = Request.blank(
'/v1/a/c?versions&format=txt',
environ={'REQUEST_METHOD': 'GET',
'swift.cache': self.cache_version_on})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '406 Not Acceptable')
req = Request.blank(
'/v1/a/c?versions',
environ={'REQUEST_METHOD': 'GET',
'swift.cache': self.cache_version_on,
'HTTP_ACCEPT': 'text/plain'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '406 Not Acceptable')
req = Request.blank(
'/v1/a/c?versions&format=xml',
environ={'REQUEST_METHOD': 'GET',
'swift.cache': self.cache_version_on})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '406 Not Acceptable')
req = Request.blank(
'/v1/a/c?versions',
environ={'REQUEST_METHOD': 'GET',
'swift.cache': self.cache_version_on,
'HTTP_ACCEPT': 'text/xml'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '406 Not Acceptable')
req = Request.blank(
'/v1/a/c?versions',
environ={'REQUEST_METHOD': 'GET',
'swift.cache': self.cache_version_on,
'HTTP_ACCEPT': 'application/xml'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '406 Not Acceptable')
req = Request.blank(
'/v1/a/c?versions&format=asdf',
environ={'REQUEST_METHOD': 'GET',
'swift.cache': self.cache_version_on})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '406 Not Acceptable')
req = Request.blank(
'/v1/a/c?versions',
environ={'REQUEST_METHOD': 'GET',
'swift.cache': self.cache_version_on,
'HTTP_ACCEPT': 'foo/bar'})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '406 Not Acceptable')
def test_list_versions_marker_missing_marker(self):
self.app.register(
'GET', '/v1/a/c', swob.HTTPOk,
{SYSMETA_VERSIONS_CONT: self.build_container_name('c'),
SYSMETA_VERSIONS_ENABLED: True}, '{}')
req = Request.blank(
'/v1/a/c?versions&version_marker=1',
environ={'REQUEST_METHOD': 'GET',
'swift.cache': self.cache_version_on})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '400 Bad Request')
self.assertEqual(body, b'version_marker param requires marker')
req = Request.blank(
'/v1/a/c?versions&marker=obj&version_marker=id',
environ={'REQUEST_METHOD': 'GET',
'swift.cache': self.cache_version_on})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '400 Bad Request')
self.assertEqual(body, b'invalid version_marker param')
def test_list_versions_marker(self):
listing_body = [{
'bytes': 8,
'name': 'non-versioned-obj',
'hash': 'etag',
'last_modified': '1970-01-01T00:00:05.000000',
'content_type': 'application/bar',
}, {
'bytes': 0,
'name': 'obj',
'hash': 'd41d8cd98f00b204e9800998ecf8427e; '
'symlink_target=%s; '
'symlink_target_etag=e55cedc11adb39c404b7365f7d6291fa; '
'symlink_target_bytes=9' %
self.build_symlink_path('c', 'obj', '9999999969.99999'),
'last_modified': '1970-01-01T00:00:30.000000',
'content_type': 'text/plain',
}]
versions_listing_body = [{
'bytes': 9,
'name': self.build_object_name('obj', '9999999969.99999'),
'hash': 'etagv3',
'last_modified': '1970-01-01T00:00:30.000000',
'content_type': 'text/plain',
}, {
'bytes': 10,
'name': self.build_object_name('obj', '9999999979.99999'),
'hash': 'etagv2',
'last_modified': '1970-01-01T00:00:20.000000',
'content_type': 'text/plain',
}, {
'bytes': 8,
'name': self.build_object_name('obj', '9999999989.99999'),
'hash': 'etagv1',
'last_modified': '1970-01-01T00:00:10.000000',
'content_type': 'text/plain',
}]
expected = [{
'bytes': 8,
'name': 'non-versioned-obj',
'hash': 'etag',
'version_id': 'null',
'last_modified': '1970-01-01T00:00:05.000000',
'content_type': 'application/bar',
}, {
'bytes': 9,
'name': 'obj',
'version_id': '0000000030.00000',
'hash': 'etagv3',
'last_modified': '1970-01-01T00:00:30.000000',
'content_type': 'text/plain',
'is_latest': True,
}, {
'bytes': 10,
'name': 'obj',
'version_id': '0000000020.00000',
'hash': 'etagv2',
'last_modified': '1970-01-01T00:00:20.000000',
'content_type': 'text/plain',
'is_latest': False,
}, {
'bytes': 8,
'name': 'obj',
'version_id': '0000000010.00000',
'hash': 'etagv1',
'last_modified': '1970-01-01T00:00:10.000000',
'content_type': 'text/plain',
'is_latest': False,
}]
self.app.register(
'GET', self.build_versions_path(), swob.HTTPOk, {},
json.dumps(versions_listing_body).encode('utf8'))
self.app.register(
'GET', '/v1/a/c', swob.HTTPOk,
{SYSMETA_VERSIONS_CONT: self.build_container_name('c'),
SYSMETA_VERSIONS_ENABLED: True},
json.dumps(listing_body[1:]).encode('utf8'))
req = Request.blank(
'/v1/a/c?versions&marker=obj',
environ={'REQUEST_METHOD': 'GET',
'swift.cache': self.cache_version_on})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertIn(('X-Versions-Enabled', 'True'), headers)
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual(expected[1:], json.loads(body))
# version_marker
self.app.register(
'GET',
'%s?marker=%s' % (
self.build_versions_path(),
self.build_object_name('obj', '9999999989.99999')),
swob.HTTPOk, {},
json.dumps(versions_listing_body[2:]).encode('utf8'))
self.app.register(
'GET', '/v1/a/c', swob.HTTPOk,
{SYSMETA_VERSIONS_CONT: self.build_container_name('c'),
SYSMETA_VERSIONS_ENABLED: True},
json.dumps(listing_body[1:]).encode('utf8'))
req = Request.blank(
'/v1/a/c?versions&marker=obj&version_marker=0000000010.00000',
environ={'REQUEST_METHOD': 'GET',
'swift.cache': self.cache_version_on})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertIn(('X-Versions-Enabled', 'True'), headers)
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
self.assertEqual(expected[3:], json.loads(body))
def test_list_versions_invalid_delimiter(self):
self.app.register(
'GET', '/v1/a/c', swob.HTTPOk,
{SYSMETA_VERSIONS_CONT: self.build_container_name('c'),
SYSMETA_VERSIONS_ENABLED: True}, '{}')
req = Request.blank(
'/v1/a/c?versions&delimiter=1',
environ={'REQUEST_METHOD': 'GET',
'swift.cache': self.cache_version_on})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '400 Bad Request')
self.assertEqual(body, b'invalid delimiter param')
def test_list_versions_delete_markers(self):
listing_body = []
self.app.register(
'GET', '/v1/a/c', swob.HTTPOk,
{SYSMETA_VERSIONS_CONT: self.build_container_name('c'),
SYSMETA_VERSIONS_ENABLED: True},
json.dumps(listing_body).encode('utf8'))
versions_listing_body = [{
'name': self.build_object_name('obj', '9999999979.99999'),
'bytes': 0,
'hash': utils.MD5_OF_EMPTY_STRING,
'last_modified': '1970-01-01T00:00:20.000000',
'content_type': DELETE_MARKER_CONTENT_TYPE,
}, {
'name': self.build_object_name('obj', '9999999989.99999'),
'bytes': 0,
'hash': utils.MD5_OF_EMPTY_STRING,
'last_modified': '1970-01-01T00:00:10.000000',
'content_type': DELETE_MARKER_CONTENT_TYPE,
}]
self.app.register(
'GET', self.build_versions_path(), swob.HTTPOk, {},
json.dumps(versions_listing_body).encode('utf8'))
req = Request.blank('/v1/a/c?versions', method='GET',
environ={'swift.cache': self.cache_version_on})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
expected = [{
'name': 'obj',
'bytes': 0,
'version_id': '0000000020.00000',
'hash': utils.MD5_OF_EMPTY_STRING,
'last_modified': '1970-01-01T00:00:20.000000',
'content_type': DELETE_MARKER_CONTENT_TYPE,
'is_latest': True,
}, {
'name': 'obj',
'bytes': 0,
'version_id': '0000000010.00000',
'hash': utils.MD5_OF_EMPTY_STRING,
'last_modified': '1970-01-01T00:00:10.000000',
'content_type': DELETE_MARKER_CONTENT_TYPE,
'is_latest': False,
}]
self.assertEqual(expected, json.loads(body))
def test_list_versions_unversioned(self):
listing_body = [{
'bytes': 8,
'name': 'my-other-object',
'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3',
'last_modified': '1970-01-01T00:00:05.000000',
'content_type': 'application/bar',
}, {
# How did this get here??? Who knows -- maybe another container
# replica *does* know about versioning being enabled
'bytes': 0,
'name': 'obj',
'hash': 'd41d8cd98f00b204e9800998ecf8427e; '
'symlink_target=%s; '
'symlink_target_etag=e55cedc11adb39c404b7365f7d6291fa; '
'symlink_target_bytes=9' %
self.build_symlink_path('c', 'obj', '9999999979.99999'),
'last_modified': '1970-01-01T00:00:20.000000',
'content_type': 'text/plain',
}]
self.app.register(
'GET', self.build_versions_path(), swob.HTTPNotFound, {}, None)
self.app.register(
'GET', '/v1/a/c', swob.HTTPOk, {},
json.dumps(listing_body).encode('utf8'))
req = Request.blank(
'/v1/a/c?versions',
environ={'REQUEST_METHOD': 'GET',
'swift.cache': self.cache_version_off})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertNotIn(('X-Versions-Enabled', 'True'), headers)
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
expected = [{
'bytes': 8,
'name': 'my-other-object',
'version_id': 'null',
'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3',
'last_modified': '1970-01-01T00:00:05.000000',
'content_type': 'application/bar',
'is_latest': True,
}, {
'bytes': 9,
'name': 'obj',
'version_id': '0000000020.00000',
'hash': 'e55cedc11adb39c404b7365f7d6291fa',
'last_modified': '1970-01-01T00:00:20.000000',
'content_type': 'text/plain',
'is_latest': True,
}]
self.assertEqual(expected, json.loads(body))
def test_list_versions_delimiter(self):
listing_body = [{
'bytes': 8,
'name': 'my-other-object',
'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3',
'last_modified': '1970-01-01T00:00:05.000000',
'content_type': 'application/bar',
}, {
'bytes': 0,
'name': 'obj',
'hash': 'd41d8cd98f00b204e9800998ecf8427e; '
'symlink_target=%s; '
'symlink_target_etag=e55cedc11adb39c404b7365f7d6291fa; '
'symlink_target_bytes=9' %
self.build_symlink_path('c', 'obj', '9999999979.99999'),
'last_modified': '1970-01-01T00:00:20.000000',
'content_type': 'text/plain',
}, {
'subdir': 'subdir/'
}]
versions_listing_body = [{
'bytes': 9,
'name': self.build_object_name('obj', '9999999979.99999'),
'hash': 'e55cedc11adb39c404b7365f7d6291fa',
'last_modified': '1970-01-01T00:00:20.000000',
'content_type': 'text/plain',
}, {
'bytes': 8,
'name': self.build_object_name('obj', '9999999989.99999'),
'hash': 'ebdd8d46ecb4a07f6c433d67eb35d5f2',
'last_modified': '1970-01-01T00:00:10.000000',
'content_type': 'text/plain',
}, {
'subdir': get_reserved_name('subdir/')
}]
self.app.register(
'GET', self.build_versions_path(), swob.HTTPOk, {},
json.dumps(versions_listing_body).encode('utf8'))
self.app.register(
'GET', '/v1/a/c', swob.HTTPOk,
{SYSMETA_VERSIONS_CONT: self.build_container_name('c'),
SYSMETA_VERSIONS_ENABLED: True},
json.dumps(listing_body).encode('utf8'))
req = Request.blank(
'/v1/a/c?versions&delimiter=/',
environ={'REQUEST_METHOD': 'GET',
'swift.cache': self.cache_version_on})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertIn(('X-Versions-Enabled', 'True'), headers)
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
expected = [{
'bytes': 8,
'name': 'my-other-object',
'version_id': 'null',
'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3',
'last_modified': '1970-01-01T00:00:05.000000',
'content_type': 'application/bar',
'is_latest': True,
}, {
'bytes': 9,
'name': 'obj',
'version_id': '0000000020.00000',
'hash': 'e55cedc11adb39c404b7365f7d6291fa',
'last_modified': '1970-01-01T00:00:20.000000',
'content_type': 'text/plain',
'is_latest': True,
}, {
'bytes': 8,
'name': 'obj',
'version_id': '0000000010.00000',
'hash': 'ebdd8d46ecb4a07f6c433d67eb35d5f2',
'last_modified': '1970-01-01T00:00:10.000000',
'content_type': 'text/plain',
'is_latest': False,
}, {
'subdir': 'subdir/'
}]
self.assertEqual(expected, json.loads(body))
def test_list_versions_empty_primary(self):
versions_listing_body = [{
'bytes': 8,
'name': self.build_object_name('obj', '9999999979.99999'),
'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3',
'last_modified': '1970-01-01T00:00:20.000000',
'content_type': 'text/plain',
}, {
'bytes': 8,
'name': self.build_object_name('obj', '9999999989.99999'),
'hash': 'ebdd8d46ecb4a07f6c433d67eb35d5f2',
'last_modified': '1970-01-01T00:00:10.000000',
'content_type': 'text/plain',
}]
self.app.register(
'GET', self.build_versions_path(), swob.HTTPOk, {},
json.dumps(versions_listing_body).encode('utf8'))
self.app.register(
'GET', '/v1/a/c', swob.HTTPOk,
{SYSMETA_VERSIONS_CONT: self.build_container_name('c'),
SYSMETA_VERSIONS_ENABLED: True},
'{}')
req = Request.blank(
'/v1/a/c?versions',
environ={'REQUEST_METHOD': 'GET',
'swift.cache': self.cache_version_on})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertIn(('X-Versions-Enabled', 'True'), headers)
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
expected = [{
'bytes': 8,
'name': 'obj',
'version_id': '0000000020.00000',
'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3',
'last_modified': '1970-01-01T00:00:20.000000',
'content_type': 'text/plain',
'is_latest': False,
}, {
'bytes': 8,
'name': 'obj',
'version_id': '0000000010.00000',
'hash': 'ebdd8d46ecb4a07f6c433d67eb35d5f2',
'last_modified': '1970-01-01T00:00:10.000000',
'content_type': 'text/plain',
'is_latest': False,
}]
self.assertEqual(expected, json.loads(body))
def test_list_versions_error_versions_container(self):
listing_body = [{
'bytes': 8,
'name': 'my-other-object',
'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3',
'last_modified': '1970-01-01T00:00:05.000000',
'content_type': 'application/bar',
}, {
'bytes': 9,
'name': 'obj',
'hash': 'e55cedc11adb39c404b7365f7d6291fa',
'last_modified': '1970-01-01T00:00:20.000000',
'content_type': 'text/plain',
}]
self.app.register(
'GET', self.build_versions_path(),
swob.HTTPInternalServerError, {}, '')
self.app.register(
'GET', '/v1/a/c', swob.HTTPOk,
{SYSMETA_VERSIONS_CONT: self.build_container_name('c'),
SYSMETA_VERSIONS_ENABLED: True},
json.dumps(listing_body).encode('utf8'))
req = Request.blank(
'/v1/a/c?versions',
environ={'REQUEST_METHOD': 'GET',
'swift.cache': self.cache_version_on})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '500 Internal Error')
def test_list_versions_empty_versions_container(self):
listing_body = [{
'bytes': 8,
'name': 'my-other-object',
'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3',
'last_modified': '1970-01-01T00:00:05.000000',
'content_type': 'application/bar',
}, {
'bytes': 9,
'name': 'obj',
'hash': 'e55cedc11adb39c404b7365f7d6291fa',
'last_modified': '1970-01-01T00:00:20.000000',
'content_type': 'text/plain',
}]
self.app.register(
'GET', self.build_versions_path(), swob.HTTPOk, {}, '{}')
self.app.register(
'GET', '/v1/a/c', swob.HTTPOk,
{SYSMETA_VERSIONS_CONT: self.build_container_name('c'),
SYSMETA_VERSIONS_ENABLED: True},
json.dumps(listing_body).encode('utf8'))
req = Request.blank(
'/v1/a/c?versions',
environ={'REQUEST_METHOD': 'GET',
'swift.cache': self.cache_version_on})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertIn(('X-Versions-Enabled', 'True'), headers)
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
expected = [{
'bytes': 8,
'name': 'my-other-object',
'version_id': 'null',
'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3',
'last_modified': '1970-01-01T00:00:05.000000',
'content_type': 'application/bar',
'is_latest': True,
}, {
'bytes': 9,
'name': 'obj',
'version_id': 'null',
'hash': 'e55cedc11adb39c404b7365f7d6291fa',
'last_modified': '1970-01-01T00:00:20.000000',
'content_type': 'text/plain',
'is_latest': True,
}]
self.assertEqual(expected, json.loads(body))
def test_list_versions_404_versions_container(self):
listing_body = [{
'bytes': 8,
'name': 'my-other-object',
'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3',
'last_modified': '1970-01-01T00:00:05.000000',
'content_type': 'application/bar',
}, {
'bytes': 9,
'name': 'obj',
'hash': 'e55cedc11adb39c404b7365f7d6291fa',
'last_modified': '1970-01-01T00:00:20.000000',
'content_type': 'text/plain',
}]
self.app.register(
'GET', self.build_versions_path(), swob.HTTPNotFound, {}, '')
self.app.register(
'GET', '/v1/a/c', swob.HTTPOk,
{SYSMETA_VERSIONS_CONT: self.build_container_name('c'),
SYSMETA_VERSIONS_ENABLED: True},
json.dumps(listing_body).encode('utf8'))
req = Request.blank(
'/v1/a/c?versions',
environ={'REQUEST_METHOD': 'GET',
'swift.cache': self.cache_version_on})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertIn(('X-Versions-Enabled', 'True'), headers)
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
expected = [{
'bytes': 8,
'name': 'my-other-object',
'version_id': 'null',
'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3',
'last_modified': '1970-01-01T00:00:05.000000',
'content_type': 'application/bar',
'is_latest': True,
}, {
'bytes': 9,
'name': 'obj',
'version_id': 'null',
'hash': 'e55cedc11adb39c404b7365f7d6291fa',
'last_modified': '1970-01-01T00:00:20.000000',
'content_type': 'text/plain',
'is_latest': True,
}]
self.assertEqual(expected, json.loads(body))
def test_list_versions_never_enabled(self):
listing_body = [{
'bytes': 8,
'name': 'my-other-object',
'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3',
'last_modified': '1970-01-01T00:00:05.000000',
'content_type': 'application/bar',
}, {
'bytes': 9,
'name': 'obj',
'hash': 'e55cedc11adb39c404b7365f7d6291fa',
'last_modified': '1970-01-01T00:00:20.000000',
'content_type': 'text/plain',
}]
self.app.register(
'GET', self.build_versions_path(), swob.HTTPNotFound, {}, '')
self.app.register(
'GET', '/v1/a/c', swob.HTTPOk, {},
json.dumps(listing_body).encode('utf8'))
req = Request.blank(
'/v1/a/c?versions',
environ={'REQUEST_METHOD': 'GET',
'swift.cache': self.cache_version_never_on})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertNotIn('x-versions-enabled', [h.lower() for h, _ in headers])
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
expected = [{
'bytes': 8,
'name': 'my-other-object',
'version_id': 'null',
'hash': 'ebdd8d46ecb4a07f6c433d67eb05d5f3',
'last_modified': '1970-01-01T00:00:05.000000',
'content_type': 'application/bar',
'is_latest': True,
}, {
'bytes': 9,
'name': 'obj',
'version_id': 'null',
'hash': 'e55cedc11adb39c404b7365f7d6291fa',
'last_modified': '1970-01-01T00:00:20.000000',
'content_type': 'text/plain',
'is_latest': True,
}]
self.assertEqual(expected, json.loads(body))
self.assertEqual(self.app.calls, [
('GET', '/v1/a/c?format=json'),
('HEAD', self.build_versions_path()),
])
# if it's in cache, we won't even get the HEAD
self.app._calls = []
self.cache_version_never_on.set(
get_cache_key('a', self.build_container_name('c')),
{'status': 404})
req = Request.blank(
'/v1/a/c?versions',
environ={'REQUEST_METHOD': 'GET',
'swift.cache': self.cache_version_never_on})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertNotIn('x-versions-enabled', [h.lower() for h, _ in headers])
self.assertEqual(expected, json.loads(body))
self.assertEqual(self.app.calls, [('GET', '/v1/a/c?format=json')])
def test_bytes_count(self):
self.app.register(
'HEAD', self.build_versions_path(), swob.HTTPOk,
{'X-Container-Bytes-Used': '17',
'X-Container-Object-Count': '3'}, '')
self.app.register(
'HEAD', '/v1/a/c', swob.HTTPOk,
{SYSMETA_VERSIONS_CONT: self.build_container_name('c'),
SYSMETA_VERSIONS_ENABLED: True,
'X-Container-Bytes-Used': '8',
'X-Container-Object-Count': '1'}, '')
req = Request.blank(
'/v1/a/c?versions',
environ={'REQUEST_METHOD': 'HEAD',
'swift.cache': self.cache_version_on})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertIn(('X-Versions-Enabled', 'True'), headers)
self.assertIn(('X-Container-Bytes-Used', '25'), headers)
self.assertIn(('X-Container-Object-Count', '1'), headers)
self.assertEqual(len(self.authorized), 1)
self.assertRequestEqual(req, self.authorized[0])
class ObjectVersioningTestAccountOperations(ObjectVersioningBaseTestCase):
def test_list_containers(self):
listing_body = [{
'bytes': 10,
'count': 2,
'name': 'regular-cont',
'last_modified': '1970-01-01T00:00:05.000000',
}, {
'bytes': 0,
'count': 3,
'name': 'versioned-cont',
'last_modified': '1970-01-01T00:00:20.000000',
}]
versions_listing_body = [{
'bytes': 24,
'count': 3,
'name': self.build_container_name('versioned-cont'),
'last_modified': '1970-01-01T00:00:20.000000',
}]
cache = FakeMemcache()
self.app.register(
'GET', '/v1/a', swob.HTTPOk, {},
json.dumps(listing_body).encode('utf8'))
params = {
'format': 'json',
'prefix': self.str_to_wsgi(get_reserved_name('versions')),
}
path = '/v1/a?%s' % urllib.parse.urlencode(params)
self.app.register(
'GET', path, swob.HTTPOk, {},
json.dumps(versions_listing_body).encode('utf8'))
req = Request.blank(
'/v1/a',
environ={'REQUEST_METHOD': 'GET',
'swift.cache': cache})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
expected = [{
'bytes': 10,
'count': 2,
'name': 'regular-cont',
'last_modified': '1970-01-01T00:00:05.000000',
}, {
'bytes': 24,
'count': 3,
'name': 'versioned-cont',
'last_modified': '1970-01-01T00:00:20.000000',
}]
self.assertEqual(expected, json.loads(body))
req.query_string = 'limit=1'
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertEqual(1, len(json.loads(body)))
req.query_string = 'limit=foo'
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
self.assertEqual(2, len(json.loads(body)))
req.query_string = 'limit=100000000000000000000000'
status, headers, body = self.call_ov(req)
self.assertEqual(status, '412 Precondition Failed')
def test_list_containers_prefix(self):
listing_body = [{
'bytes': 0,
'count': 1,
'name': 'versioned-cont',
'last_modified': '1970-01-01T00:00:05.000000',
}]
versions_listing_body = [{
'bytes': 24,
'count': 3,
'name': self.build_container_name('versioned-cont'),
'last_modified': '1970-01-01T00:00:20.000000',
}]
cache = FakeMemcache()
path = '/v1/a?%s' % urllib.parse.urlencode({
'format': 'json', 'prefix': 'versioned-'})
self.app.register(
'GET', path, swob.HTTPOk, {},
json.dumps(listing_body).encode('utf8'))
path = '/v1/a?%s' % urllib.parse.urlencode({
'format': 'json', 'prefix': self.str_to_wsgi(
self.build_container_name('versioned-'))})
self.app.register(
'GET', path, swob.HTTPOk, {},
json.dumps(versions_listing_body).encode('utf8'))
req = Request.blank(
'/v1/a?prefix=versioned-',
environ={'REQUEST_METHOD': 'GET',
'swift.cache': cache})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
expected = [{
'bytes': 24,
'count': 1,
'name': 'versioned-cont',
'last_modified': '1970-01-01T00:00:05.000000',
}]
self.assertEqual(expected, json.loads(body))
def test_list_orphan_hidden_containers(self):
listing_body = [{
'bytes': 10,
'count': 2,
'name': 'alpha',
'last_modified': '1970-01-01T00:00:05.000000',
}, {
'bytes': 6,
'count': 3,
'name': 'bravo',
'last_modified': '1970-01-01T00:00:20.000000',
}, {
'bytes': 0,
'count': 5,
'name': 'charlie',
'last_modified': '1970-01-01T00:00:30.000000',
}, {
'bytes': 0,
'count': 8,
'name': 'zulu',
'last_modified': '1970-01-01T00:00:40.000000',
}]
versions_listing_body1 = [{
'bytes': 24,
'count': 8,
'name': self.build_container_name('bravo'),
'last_modified': '1970-01-01T00:00:20.000000',
}, {
'bytes': 123,
'count': 23,
'name': self.build_container_name('charlie'),
'last_modified': '1970-01-01T00:00:30.000000',
}, {
'bytes': 13,
'count': 30,
'name': self.build_container_name('kilo'),
'last_modified': '1970-01-01T00:00:35.000000',
}, {
'bytes': 83,
'count': 13,
'name': self.build_container_name('zulu'),
'last_modified': '1970-01-01T00:00:40.000000',
}]
cache = FakeMemcache()
self.app.register(
'GET', '/v1/a', swob.HTTPOk, {},
json.dumps(listing_body).encode('utf8'))
params = {
'format': 'json',
'prefix': self.str_to_wsgi(get_reserved_name('versions')),
}
path = '/v1/a?%s' % urllib.parse.urlencode(params)
self.app.register(
'GET', path, swob.HTTPOk, {},
json.dumps(versions_listing_body1).encode('utf8'))
req = Request.blank(
'/v1/a',
environ={'REQUEST_METHOD': 'GET',
'swift.cache': cache})
status, headers, body = self.call_ov(req)
self.assertEqual(status, '200 OK')
expected = [{
'bytes': 10,
'count': 2,
'name': 'alpha',
'last_modified': '1970-01-01T00:00:05.000000',
}, {
'bytes': 30,
'count': 3,
'name': 'bravo',
'last_modified': '1970-01-01T00:00:20.000000',
}, {
'bytes': 123,
'count': 5,
'name': 'charlie',
'last_modified': '1970-01-01T00:00:30.000000',
}, {
'bytes': 13,
'count': 0,
'name': 'kilo',
'last_modified': '1970-01-01T00:00:35.000000',
}, {
'bytes': 83,
'count': 8,
'name': 'zulu',
'last_modified': '1970-01-01T00:00:40.000000',
}]
self.assertEqual(expected, json.loads(body))
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/test_object_versioning.py |
# Copyright (c) 2010-2020 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import unittest
from swift.common import swob
from swift.common.middleware import etag_quoter
from swift.proxy.controllers.base import get_cache_key
from test.unit.common.middleware.helpers import FakeSwift
def set_info_cache(req, cache_data, account, container=None):
req.environ.setdefault('swift.infocache', {})[
get_cache_key(account, container)] = cache_data
class TestEtagQuoter(unittest.TestCase):
def get_mw(self, conf, etag='unquoted-etag', path=None):
if path is None:
path = '/v1/AUTH_acc/con/some/path/to/obj'
app = FakeSwift()
hdrs = {} if etag is None else {'ETag': etag}
app.register('GET', path, swob.HTTPOk, hdrs)
return etag_quoter.filter_factory({}, **conf)(app)
@mock.patch('swift.common.middleware.etag_quoter.register_swift_info')
def test_swift_info(self, mock_register):
self.get_mw({})
self.assertEqual(mock_register.mock_calls, [
mock.call('etag_quoter', enable_by_default=False)])
mock_register.reset_mock()
self.get_mw({'enable_by_default': '1'})
self.assertEqual(mock_register.mock_calls, [
mock.call('etag_quoter', enable_by_default=True)])
mock_register.reset_mock()
self.get_mw({'enable_by_default': 'no'})
self.assertEqual(mock_register.mock_calls, [
mock.call('etag_quoter', enable_by_default=False)])
def test_account_on_overrides_cluster_off(self):
req = swob.Request.blank('/v1/AUTH_acc/con/some/path/to/obj')
set_info_cache(req, {
'status': 200,
'sysmeta': {'rfc-compliant-etags': '1'},
}, 'AUTH_acc')
set_info_cache(req, {
'status': 200,
'sysmeta': {},
}, 'AUTH_acc', 'con')
resp = req.get_response(self.get_mw({'enable_by_default': 'false'}))
self.assertEqual(resp.headers['ETag'], '"unquoted-etag"')
def test_account_off_overrides_cluster_on(self):
req = swob.Request.blank('/v1/AUTH_acc/con/some/path/to/obj')
set_info_cache(req, {
'status': 200,
'sysmeta': {'rfc-compliant-etags': 'no'},
}, 'AUTH_acc')
set_info_cache(req, {
'status': 200,
'sysmeta': {},
}, 'AUTH_acc', 'con')
resp = req.get_response(self.get_mw({'enable_by_default': 'yes'}))
self.assertEqual(resp.headers['ETag'], 'unquoted-etag')
def test_container_on_overrides_cluster_off(self):
req = swob.Request.blank('/v1/AUTH_acc/con/some/path/to/obj')
set_info_cache(req, {
'status': 200,
'sysmeta': {},
}, 'AUTH_acc')
set_info_cache(req, {
'status': 200,
'sysmeta': {'rfc-compliant-etags': 't'},
}, 'AUTH_acc', 'con')
resp = req.get_response(self.get_mw({'enable_by_default': 'false'}))
self.assertEqual(resp.headers['ETag'], '"unquoted-etag"')
def test_container_off_overrides_cluster_on(self):
req = swob.Request.blank('/v1/AUTH_acc/con/some/path/to/obj')
set_info_cache(req, {
'status': 200,
'sysmeta': {},
}, 'AUTH_acc')
set_info_cache(req, {
'status': 200,
'sysmeta': {'rfc-compliant-etags': '0'},
}, 'AUTH_acc', 'con')
resp = req.get_response(self.get_mw({'enable_by_default': 'yes'}))
self.assertEqual(resp.headers['ETag'], 'unquoted-etag')
def test_container_on_overrides_account_off(self):
req = swob.Request.blank('/v1/AUTH_acc/con/some/path/to/obj')
set_info_cache(req, {
'status': 200,
'sysmeta': {'rfc-compliant-etags': 'no'},
}, 'AUTH_acc')
set_info_cache(req, {
'status': 200,
'sysmeta': {'rfc-compliant-etags': 't'},
}, 'AUTH_acc', 'con')
resp = req.get_response(self.get_mw({}))
self.assertEqual(resp.headers['ETag'], '"unquoted-etag"')
def test_container_off_overrides_account_on(self):
req = swob.Request.blank('/v1/AUTH_acc/con/some/path/to/obj')
set_info_cache(req, {
'status': 200,
'sysmeta': {'rfc-compliant-etags': 'yes'},
}, 'AUTH_acc')
set_info_cache(req, {
'status': 200,
'sysmeta': {'rfc-compliant-etags': 'false'},
}, 'AUTH_acc', 'con')
resp = req.get_response(self.get_mw({}))
self.assertEqual(resp.headers['ETag'], 'unquoted-etag')
def test_cluster_wide(self):
req = swob.Request.blank('/v1/AUTH_acc/con/some/path/to/obj')
set_info_cache(req, {'status': 200, 'sysmeta': {}}, 'AUTH_acc')
set_info_cache(req, {'status': 200, 'sysmeta': {}}, 'AUTH_acc', 'con')
resp = req.get_response(self.get_mw({'enable_by_default': 't'}))
self.assertEqual(resp.headers['ETag'], '"unquoted-etag"')
def test_already_valid(self):
req = swob.Request.blank('/v1/AUTH_acc/con/some/path/to/obj')
set_info_cache(req, {'status': 200, 'sysmeta': {}}, 'AUTH_acc')
set_info_cache(req, {'status': 200, 'sysmeta': {}}, 'AUTH_acc', 'con')
resp = req.get_response(self.get_mw({'enable_by_default': 't'},
'"quoted-etag"'))
self.assertEqual(resp.headers['ETag'], '"quoted-etag"')
def test_already_weak_but_valid(self):
req = swob.Request.blank('/v1/AUTH_acc/con/some/path/to/obj')
set_info_cache(req, {'status': 200, 'sysmeta': {}}, 'AUTH_acc')
set_info_cache(req, {'status': 200, 'sysmeta': {}}, 'AUTH_acc', 'con')
resp = req.get_response(self.get_mw({'enable_by_default': 't'},
'W/"weak-etag"'))
self.assertEqual(resp.headers['ETag'], 'W/"weak-etag"')
def test_only_half_valid(self):
req = swob.Request.blank('/v1/AUTH_acc/con/some/path/to/obj')
set_info_cache(req, {'status': 200, 'sysmeta': {}}, 'AUTH_acc')
set_info_cache(req, {'status': 200, 'sysmeta': {}}, 'AUTH_acc', 'con')
resp = req.get_response(self.get_mw({'enable_by_default': 't'},
'"weird-etag'))
self.assertEqual(resp.headers['ETag'], '""weird-etag"')
def test_no_etag(self):
req = swob.Request.blank('/v1/AUTH_acc/con/some/path/to/obj')
set_info_cache(req, {'status': 200, 'sysmeta': {}}, 'AUTH_acc')
set_info_cache(req, {'status': 200, 'sysmeta': {}}, 'AUTH_acc', 'con')
resp = req.get_response(self.get_mw({'enable_by_default': 't'},
etag=None))
self.assertNotIn('ETag', resp.headers)
def test_non_swift_path(self):
path = '/some/other/location/entirely'
req = swob.Request.blank(path)
resp = req.get_response(self.get_mw({'enable_by_default': 't'},
path=path))
self.assertEqual(resp.headers['ETag'], 'unquoted-etag')
def test_non_object_request(self):
path = '/v1/AUTH_acc/con'
req = swob.Request.blank(path)
resp = req.get_response(self.get_mw({'enable_by_default': 't'},
path=path))
self.assertEqual(resp.headers['ETag'], 'unquoted-etag')
def test_no_container_info(self):
mw = self.get_mw({'enable_by_default': 't'})
req = swob.Request.blank('/v1/AUTH_acc/con/some/path/to/obj')
set_info_cache(req, {'status': 200, 'sysmeta': {}}, 'AUTH_acc')
mw.app.register('HEAD', '/v1/AUTH_acc/con',
swob.HTTPServiceUnavailable, {})
resp = req.get_response(mw)
self.assertEqual(resp.headers['ETag'], 'unquoted-etag')
set_info_cache(req, {'status': 404, 'sysmeta': {}}, 'AUTH_acc', 'con')
resp = req.get_response(mw)
self.assertEqual(resp.headers['ETag'], 'unquoted-etag')
set_info_cache(req, {'status': 200, 'sysmeta': {}}, 'AUTH_acc', 'con')
resp = req.get_response(mw)
self.assertEqual(resp.headers['ETag'], '"unquoted-etag"')
def test_no_account_info(self):
mw = self.get_mw({'enable_by_default': 't'})
req = swob.Request.blank('/v1/AUTH_acc/con/some/path/to/obj')
mw.app.register('HEAD', '/v1/AUTH_acc',
swob.HTTPServiceUnavailable, {})
set_info_cache(req, {'status': 200, 'sysmeta': {}}, 'AUTH_acc', 'con')
resp = req.get_response(mw)
self.assertEqual(resp.headers['ETag'], 'unquoted-etag')
set_info_cache(req, {'status': 404, 'sysmeta': {}}, 'AUTH_acc')
resp = req.get_response(mw)
self.assertEqual(resp.headers['ETag'], 'unquoted-etag')
set_info_cache(req, {'status': 200, 'sysmeta': {}}, 'AUTH_acc')
resp = req.get_response(mw)
self.assertEqual(resp.headers['ETag'], '"unquoted-etag"')
| swift-master | test/unit/common/middleware/test_etag_quoter.py |
# Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This stuff can't live in test/unit/__init__.py due to its swob dependency.
from collections import defaultdict, namedtuple
from six.moves.urllib import parse
from swift.common import swob
from swift.common.header_key_dict import HeaderKeyDict
from swift.common.request_helpers import is_user_meta, \
is_object_transient_sysmeta, resolve_etag_is_at_header
from swift.common.swob import HTTPNotImplemented
from swift.common.utils import split_path, md5
from test.debug_logger import debug_logger
from test.unit import FakeRing
class LeakTrackingIter(object):
def __init__(self, inner_iter, mark_closed, mark_read, key):
if isinstance(inner_iter, bytes):
inner_iter = (inner_iter, )
self.inner_iter = iter(inner_iter)
self.mark_closed = mark_closed
self.mark_read = mark_read
self.key = key
def __iter__(self):
return self
def __next__(self):
try:
return next(self.inner_iter)
except StopIteration:
self.mark_read(self.key)
raise
next = __next__ # for py2
def close(self):
self.mark_closed(self.key)
FakeSwiftCall = namedtuple('FakeSwiftCall', ['method', 'path', 'headers'])
def normalize_query_string(qs):
if qs.startswith('?'):
qs = qs[1:]
if not qs:
return ''
else:
return '?%s' % parse.urlencode(sorted(parse.parse_qsl(qs)))
def normalize_path(path):
parsed = parse.urlparse(path)
return parsed.path + normalize_query_string(parsed.query)
class FakeSwift(object):
"""
A good-enough fake Swift proxy server to use in testing middleware.
Responses for expected requests should be registered using the ``register``
method. Registered requests are keyed by their method and path *including
query string*.
Received requests are matched to registered requests with the same method
as follows, in order of preference:
* A received request matches a registered request if the received
request's path, including query string, is the same as the registered
request's path, including query string.
* A received request matches a registered request if the received
request's path, excluding query string, is the same as the registered
request's path, including query string.
A received ``HEAD`` request will be matched to a registered ``GET``,
according to the same path preferences, if a match cannot be made to a
registered ``HEAD`` request.
A ``PUT`` request that matches a registered ``PUT`` request will create an
entry in the ``uploaded`` object cache that is keyed by the received
request's path, excluding query string. A subsequent ``GET`` or ``HEAD``
request that does not match a registered request will match an ``uploaded``
object based on the ``GET`` or ``HEAD`` request's path, excluding query
string.
A ``POST`` request whose path, excluding query string, matches an object in
the ``uploaded`` cache will modify the metadata of the object in the
``uploaded`` cache. However, the ``POST`` request must first match a
registered ``POST`` request.
Examples:
* received ``GET /v1/a/c/o`` will match registered ``GET /v1/a/c/o``
* received ``GET /v1/a/c/o?x=y`` will match registered ``GET /v1/a/c/o``
* received ``HEAD /v1/a/c/o?x=y`` will match registered ``GET /v1/a/c/o``
* received ``GET /v1/a/c/o`` will NOT match registered
``GET /v1/a/c/o?x=y``
* received ``PUT /v1/a/c/o?x=y``, if it matches a registered ``PUT``,
will create uploaded ``/v1/a/c/o``
* received ``POST /v1/a/c/o?x=y``, if it matches a registered ``POST``,
will update uploaded ``/v1/a/c/o``
"""
ALLOWED_METHODS = [
'PUT', 'POST', 'DELETE', 'GET', 'HEAD', 'OPTIONS', 'REPLICATE',
'SSYNC', 'UPDATE']
container_existence_skip_cache = 0.0
account_existence_skip_cache = 0.0
def __init__(self):
self._calls = []
self.req_bodies = []
self._unclosed_req_keys = defaultdict(int)
self._unread_req_paths = defaultdict(int)
self.req_method_paths = []
self.swift_sources = []
self.txn_ids = []
self.uploaded = {}
# mapping of (method, path) --> (response class, headers, body)
self._responses = {}
self.logger = debug_logger('fake-swift')
self.account_ring = FakeRing()
self.container_ring = FakeRing()
self.get_object_ring = lambda policy_index: FakeRing()
self.auto_create_account_prefix = '.'
self.backend_user_agent = "fake_swift"
self._pipeline_final_app = self
# some tests want to opt in to mimicking the
# X-Backend-Ignore-Range-If-Metadata-Present header behavior,
# but default to old-swift behavior
self.can_ignore_range = False
def _find_response(self, method, path):
path = normalize_path(path)
resp = self._responses[(method, path)]
if isinstance(resp, list):
try:
resp = resp.pop(0)
except IndexError:
raise IndexError("Didn't find any more %r "
"in allowed responses" % (
(method, path),))
return resp
def _select_response(self, env, method, path):
# in some cases we can borrow different registered response
# ... the order is brittle and significant
preferences = [(method, path)]
if env.get('QUERY_STRING'):
# we can always reuse response w/o query string
preferences.append((method, env['PATH_INFO']))
if method == 'HEAD':
# any path suitable for GET always works for HEAD
# N.B. list(preferences) to avoid iter+modify/sigkill
preferences.extend(('GET', p) for _, p in list(preferences))
for m, p in preferences:
try:
resp_class, headers, body = self._find_response(m, p)
except KeyError:
pass
else:
break
else:
# special case for re-reading an uploaded file
# ... uploaded is only objects and always raw path
if method in ('GET', 'HEAD') and env['PATH_INFO'] in self.uploaded:
resp_class = swob.HTTPOk
headers, body = self.uploaded[env['PATH_INFO']]
else:
raise KeyError("Didn't find %r in allowed responses" % (
(method, path),))
if method == 'HEAD':
# HEAD resp never has body
body = None
return resp_class, HeaderKeyDict(headers), body
def __call__(self, env, start_response):
if self.can_ignore_range:
# we might pop off the Range header
env = dict(env)
method = env['REQUEST_METHOD']
if method not in self.ALLOWED_METHODS:
raise HTTPNotImplemented()
path = env['PATH_INFO']
_, acc, cont, obj = split_path(env['PATH_INFO'], 0, 4,
rest_with_last=True)
if env.get('QUERY_STRING'):
path += '?' + env['QUERY_STRING']
path = normalize_path(path)
if 'swift.authorize' in env:
resp = env['swift.authorize'](swob.Request(env))
if resp:
return resp(env, start_response)
req = swob.Request(env)
self.swift_sources.append(env.get('swift.source'))
self.txn_ids.append(env.get('swift.trans_id'))
resp_class, headers, body = self._select_response(env, method, path)
ignore_range_meta = req.headers.get(
'x-backend-ignore-range-if-metadata-present')
if self.can_ignore_range and ignore_range_meta and set(
ignore_range_meta.split(',')).intersection(headers.keys()):
req.headers.pop('range', None)
# Update req.headers before capturing the request
if method in ('GET', 'HEAD') and obj:
req.headers['X-Backend-Storage-Policy-Index'] = headers.get(
'x-backend-storage-policy-index', '2')
# Capture the request before reading the body, in case the iter raises
# an exception.
# note: tests may assume this copy of req_headers is case insensitive
# so we deliberately use a HeaderKeyDict
req_headers_copy = HeaderKeyDict(req.headers)
self._calls.append(
FakeSwiftCall(method, path, req_headers_copy))
req_body = None # generally, we don't care and let eventlet discard()
if (cont and not obj and method == 'UPDATE') or (
obj and method == 'PUT'):
req_body = b''.join(iter(env['wsgi.input'].read, b''))
# simulate object PUT
if method == 'PUT' and obj:
if 'swift.callback.update_footers' in env:
footers = HeaderKeyDict()
env['swift.callback.update_footers'](footers)
req.headers.update(footers)
req_headers_copy.update(footers)
etag = md5(req_body, usedforsecurity=False).hexdigest()
headers.setdefault('Etag', etag)
headers.setdefault('Content-Length', len(req_body))
# keep it for subsequent GET requests later
resp_headers = dict(req.headers)
if "CONTENT_TYPE" in env:
resp_headers['Content-Type'] = env["CONTENT_TYPE"]
self.uploaded[env['PATH_INFO']] = (resp_headers, req_body)
# simulate object POST
elif method == 'POST' and obj:
metadata, data = self.uploaded.get(env['PATH_INFO'], ({}, None))
# select items to keep from existing...
new_metadata = dict(
(k, v) for k, v in metadata.items()
if (not is_user_meta('object', k) and not
is_object_transient_sysmeta(k)))
# apply from new
new_metadata.update(
dict((k, v) for k, v in req.headers.items()
if (is_user_meta('object', k) or
is_object_transient_sysmeta(k) or
k.lower == 'content-type')))
self.uploaded[env['PATH_INFO']] = new_metadata, data
self.req_bodies.append(req_body)
# Apply conditional etag overrides
conditional_etag = resolve_etag_is_at_header(req, headers)
# range requests ought to work, hence conditional_response=True
if isinstance(body, list):
resp = resp_class(
req=req, headers=headers, app_iter=body,
conditional_response=req.method in ('GET', 'HEAD'),
conditional_etag=conditional_etag)
else:
resp = resp_class(
req=req, headers=headers, body=body,
conditional_response=req.method in ('GET', 'HEAD'),
conditional_etag=conditional_etag)
wsgi_iter = resp(env, start_response)
self.mark_opened((method, path))
return LeakTrackingIter(wsgi_iter, self.mark_closed,
self.mark_read, (method, path))
def mark_opened(self, key):
self._unclosed_req_keys[key] += 1
self._unread_req_paths[key] += 1
def mark_closed(self, key):
self._unclosed_req_keys[key] -= 1
def mark_read(self, key):
self._unread_req_paths[key] -= 1
@property
def unclosed_requests(self):
return {key: count
for key, count in self._unclosed_req_keys.items()
if count > 0}
@property
def unread_requests(self):
return {path: count
for path, count in self._unread_req_paths.items()
if count > 0}
@property
def calls(self):
return [(method, path) for method, path, headers in self._calls]
@property
def headers(self):
return [headers for method, path, headers in self._calls]
@property
def calls_with_headers(self):
return self._calls
@property
def call_count(self):
return len(self._calls)
def register(self, method, path, response_class, headers, body=b''):
path = normalize_path(path)
self._responses[(method, path)] = (response_class, headers, body)
def register_responses(self, method, path, responses):
path = normalize_path(path)
self._responses[(method, path)] = list(responses)
class FakeAppThatExcepts(object):
MESSAGE = b"We take exception to that!"
def __init__(self, exception_class=Exception):
self.exception_class = exception_class
def __call__(self, env, start_response):
raise self.exception_class(self.MESSAGE)
| swift-master | test/unit/common/middleware/helpers.py |
# Copyright (c) 2010 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import unittest
import mock
from six.moves.urllib.parse import urlparse
from swift.common.swob import Request, Response, HTTPUnauthorized
from swift.common.middleware import staticweb
meta_map = {
'c1': {'status': 401},
'c2': {},
'c3': {'meta': {'web-index': 'index.html',
'web-listings': 't'}},
'c3b': {'meta': {'web-index': 'index.html',
'web-listings': 't'}},
'c4': {'meta': {'web-index': 'index.html',
'web-error': 'error.html',
'web-listings': 't',
'web-listings-css': 'listing.css',
'web-directory-type': 'text/dir'}},
'c5': {'meta': {'web-index': 'index.html',
'web-error': 'error.html',
'web-listings': 't',
'web-listings-css': 'listing.css'}},
'c6': {'meta': {'web-listings': 't',
'web-error': 'error.html'}},
'c6b': {'meta': {'web-listings': 't',
'web-listings-label': 'foo'}},
'c7': {'meta': {'web-listings': 'f',
'web-error': 'error.html'}},
'c8': {'meta': {'web-error': 'error.html',
'web-listings': 't',
'web-listings-css':
'http://localhost/stylesheets/listing.css'}},
'c9': {'meta': {'web-error': 'error.html',
'web-listings': 't',
'web-listings-css':
'/absolute/listing.css'}},
'c10': {'meta': {'web-listings': 't'}},
'c11': {'meta': {'web-index': 'index.html'}},
'c11a': {'meta': {'web-index': 'index.html',
'web-directory-type': 'text/directory'}},
'c12': {'meta': {'web-index': 'index.html',
'web-error': 'error.html'}},
'c13': {'meta': {'web-listings': 'f',
'web-listings-css': 'listing.css'}},
'c14': {'meta': {'web-listings': 't'}},
}
def mock_get_container_info(env, app, swift_source='SW'):
container = env['PATH_INFO'].rstrip('/').split('/')[3]
container_info = meta_map[container]
container_info.setdefault('status', 200)
container_info.setdefault('read_acl', '.r:*')
return container_info
class FakeApp(object):
def __init__(self, status_headers_body_iter=None):
self.calls = 0
self.get_c4_called = False
def __call__(self, env, start_response):
self.calls += 1
if 'swift.authorize' in env:
resp = env['swift.authorize'](Request(env))
if resp:
return resp(env, start_response)
if env['PATH_INFO'] == '/':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1':
return Response(
status='412 Precondition Failed')(env, start_response)
elif env['PATH_INFO'] == '/v1/a':
return Response(status='401 Unauthorized')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c1':
return Response(status='401 Unauthorized')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c2':
return self.listing(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c2/one.txt':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3':
return self.listing(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3/index.html':
return Response(status='200 Ok', body='''
<html>
<body>
<h1>Test main index.html file.</h1>
<p>Visit <a href="subdir">subdir</a>.</p>
<p>Don't visit <a href="subdir2/">subdir2</a> because it doesn't really
exist.</p>
<p>Visit <a href="subdir3">subdir3</a>.</p>
<p>Visit <a href="subdir3/subsubdir">subdir3/subsubdir</a>.</p>
</body>
</html>
''')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3b':
return self.listing(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3b/index.html':
resp = Response(status='204 No Content')
resp.app_iter = iter([])
return resp(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3/subdir':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3/subdir/':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3/subdir/index.html':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3/subdir3/subsubdir':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3/subdir3/subsubdir/':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3/subdir3/subsubdir/index.html':
return Response(status='200 Ok', body='index file')(env,
start_response)
elif env['PATH_INFO'] == '/v1/a/c3/subdirx/':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3/subdirx/index.html':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3/subdiry/':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3/subdiry/index.html':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3/subdirz':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3/subdirz/index.html':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3/unknown':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c3/unknown/index.html':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c4':
self.get_c4_called = True
return self.listing(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c4/one.txt':
return Response(
status='200 Ok',
headers={'x-object-meta-test': 'value'},
body='1')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c4/two.txt':
return Response(status='503 Service Unavailable')(env,
start_response)
elif env['PATH_INFO'] == '/v1/a/c4/index.html':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c4/subdir/':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c4/subdir/index.html':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c4/unknown':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c4/unknown/index.html':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c4/404error.html':
return Response(status='200 Ok', body='''
<html>
<body style="background: #000000; color: #ffaaaa">
<p>Chrome's 404 fancy-page sucks.</p>
</body>
</html>
'''.strip())(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c5':
return self.listing(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c5/index.html':
return Response(status='503 Service Unavailable')(env,
start_response)
elif env['PATH_INFO'] == '/v1/a/c5/503error.html':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c5/unknown':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c5/unknown/index.html':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c5/404error.html':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c6':
return self.listing(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c6b':
return self.listing(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c6/subdir':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c6/401error.html':
return Response(status='200 Ok', body='''
<html>
<body style="background: #000000; color: #ffaaaa">
<p>Hey, you're not authorized to see this!</p>
</body>
</html>
'''.strip())(env, start_response)
elif env['PATH_INFO'] in ('/v1/a/c7', '/v1/a/c7/'):
return self.listing(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c7/404error.html':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c7/401error.html':
return Response(status='200 Ok', body='''
<html>
<body style="background: #000000; color: #ffaaaa">
<p>Hey, you're not authorized to see this!</p>
</body>
</html>
'''.strip())(env, start_response)
elif env['PATH_INFO'] in ('/v1/a/c8', '/v1/a/c8/'):
return self.listing(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c8/subdir/':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] in ('/v1/a/c9', '/v1/a/c9/'):
return self.listing(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c9/subdir/':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] in ('/v1/a/c10', '/v1/a/c10/'):
return self.listing(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c10/\xe2\x98\x83/':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c10/\xe2\x98\x83/\xe2\x98\x83/':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] in ('/v1/a/c11', '/v1/a/c11/'):
return self.listing(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c11/subdir/':
return Response(status='200 Ok', headers={
'Content-Type': 'application/directory'})(
env, start_response)
elif env['PATH_INFO'] == '/v1/a/c11/subdir/index.html':
return Response(status='200 Ok', body='''
<html>
<body>
<h2>c11 subdir index</h2>
</body>
</html>
'''.strip())(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c11/subdir2/':
return Response(status='200 Ok', headers={'Content-Type':
'application/directory'})(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c11/subdir2/index.html':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] in ('/v1/a/c11a', '/v1/a/c11a/'):
return self.listing(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c11a/subdir/':
return Response(status='200 Ok', headers={'Content-Type':
'text/directory'})(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c11a/subdir/index.html':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c11a/subdir2/':
return Response(status='200 Ok', headers={'Content-Type':
'application/directory'})(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c11a/subdir2/index.html':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c11a/subdir3/':
return Response(status='200 Ok', headers={'Content-Type':
'not_a/directory'})(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c11a/subdir3/index.html':
return Response(status='404 Not Found')(env, start_response)
elif env['PATH_INFO'] == '/v1/a/c12/index.html':
return Response(status='200 Ok', body='index file')(env,
start_response)
elif env['PATH_INFO'] == '/v1/a/c12/200error.html':
return Response(status='200 Ok', body='error file')(env,
start_response)
elif env['PATH_INFO'] == '/v1/a/c14':
return self.listing(env, start_response)
else:
raise Exception('Unknown path %r' % env['PATH_INFO'])
def listing(self, env, start_response):
headers = {'x-container-read': '.r:*'}
if ((env['PATH_INFO'] in (
'/v1/a/c3', '/v1/a/c4', '/v1/a/c8', '/v1/a/c9'))
and (env['QUERY_STRING'] ==
'delimiter=/&prefix=subdir/')):
headers.update({'X-Container-Object-Count': '12',
'X-Container-Bytes-Used': '73763',
'X-Container-Read': '.r:*',
'Content-Type': 'application/json; charset=utf-8'})
body = '''
[{"name":"subdir/1.txt",
"hash":"5f595114a4b3077edfac792c61ca4fe4", "bytes":20,
"content_type":"text/plain",
"last_modified":"2011-03-24T04:27:52.709100"},
{"name":"subdir/2.txt",
"hash":"c85c1dcd19cf5cbac84e6043c31bb63e", "bytes":20,
"content_type":"text/plain",
"last_modified":"2011-03-24T04:27:52.734140"},
{"subdir":"subdir3/subsubdir/"}]
'''.strip()
elif env['PATH_INFO'] == '/v1/a/c3' and env['QUERY_STRING'] == \
'delimiter=/&prefix=subdiry/':
headers.update({'X-Container-Object-Count': '12',
'X-Container-Bytes-Used': '73763',
'X-Container-Read': '.r:*',
'Content-Type': 'application/json; charset=utf-8'})
body = '[]'
elif env['PATH_INFO'] == '/v1/a/c3' and env['QUERY_STRING'] == \
'limit=1&delimiter=/&prefix=subdirz/':
headers.update({'X-Container-Object-Count': '12',
'X-Container-Bytes-Used': '73763',
'X-Container-Read': '.r:*',
'Content-Type': 'application/json; charset=utf-8'})
body = '''
[{"name":"subdirz/1.txt",
"hash":"5f595114a4b3077edfac792c61ca4fe4", "bytes":20,
"content_type":"text/plain",
"last_modified":"2011-03-24T04:27:52.709100"}]
'''.strip()
elif env['PATH_INFO'] == '/v1/a/c6' and env['QUERY_STRING'] == \
'limit=1&delimiter=/&prefix=subdir/':
headers.update({'X-Container-Object-Count': '12',
'X-Container-Bytes-Used': '73763',
'X-Container-Read': '.r:*',
'X-Container-Web-Listings': 't',
'Content-Type': 'application/json; charset=utf-8'})
body = '''
[{"name":"subdir/1.txt",
"hash":"5f595114a4b3077edfac792c61ca4fe4", "bytes":20,
"content_type":"text/plain",
"last_modified":"2011-03-24T04:27:52.709100"}]
'''.strip()
elif env['PATH_INFO'] == '/v1/a/c10' and (
env['QUERY_STRING'] ==
'delimiter=/&prefix=%E2%98%83/' or
env['QUERY_STRING'] ==
'delimiter=/&prefix=%E2%98%83/%E2%98%83/'):
headers.update({'X-Container-Object-Count': '12',
'X-Container-Bytes-Used': '73763',
'X-Container-Read': '.r:*',
'X-Container-Web-Listings': 't',
'Content-Type': 'application/json; charset=utf-8'})
body = '''
[{"name":"\u2603/\u2603/one.txt",
"hash":"73f1dd69bacbf0847cc9cffa3c6b23a1", "bytes":22,
"content_type":"text/plain",
"last_modified":"2011-03-24T04:27:52.709100"},
{"subdir":"\u2603/\u2603/"}]
'''.strip()
elif env['PATH_INFO'] == '/v1/a/c14' and env['QUERY_STRING'] == \
'delimiter=/':
headers.update({'X-Container-Object-Count': '0',
'X-Container-Bytes-Used': '0',
'X-Container-Read': '.r:*',
'Content-Type': 'application/json; charset=utf-8'})
body = '[]'
elif 'prefix=' in env['QUERY_STRING']:
return Response(status='204 No Content')(env, start_response)
else:
headers.update({'X-Container-Object-Count': '12',
'X-Container-Bytes-Used': '73763',
'Content-Type': 'application/json; charset=utf-8'})
body = '''
[{"name":"401error.html",
"hash":"893f8d80692a4d3875b45be8f152ad18", "bytes":110,
"content_type":"text/html",
"last_modified":"2011-03-24T04:27:52.713710"},
{"name":"404error.html",
"hash":"62dcec9c34ed2b347d94e6ca707aff8c", "bytes":130,
"content_type":"text/html",
"last_modified":"2011-03-24T04:27:52.720850"},
{"name":"index.html",
"hash":"8b469f2ca117668a5131fe9ee0815421", "bytes":347,
"content_type":"text/html",
"last_modified":"2011-03-24T04:27:52.683590"},
{"name":"listing.css",
"hash":"7eab5d169f3fcd06a08c130fa10c5236", "bytes":17,
"content_type":"text/css",
"last_modified":"2011-03-24T04:27:52.721610"},
{"name":"one.txt", "hash":"73f1dd69bacbf0847cc9cffa3c6b23a1",
"bytes":22, "content_type":"text/plain",
"last_modified":"2011-03-24T04:27:52.722270"},
{"name":"subdir/1.txt",
"hash":"5f595114a4b3077edfac792c61ca4fe4", "bytes":20,
"content_type":"text/plain",
"last_modified":"2011-03-24T04:27:52.709100"},
{"name":"subdir/2.txt",
"hash":"c85c1dcd19cf5cbac84e6043c31bb63e", "bytes":20,
"content_type":"text/plain",
"last_modified":"2011-03-24T04:27:52.734140"},
{"name":"subdir/\u2603.txt",
"hash":"7337d028c093130898d937c319cc9865", "bytes":72981,
"content_type":"text/plain",
"last_modified":"2011-03-24T04:27:52.735460"},
{"name":"subdir2", "hash":"d41d8cd98f00b204e9800998ecf8427e",
"bytes":0, "content_type":"text/directory",
"last_modified":"2011-03-24T04:27:52.676690"},
{"name":"subdir3/subsubdir/index.html",
"hash":"04eea67110f883b1a5c97eb44ccad08c", "bytes":72,
"content_type":"text/html",
"last_modified":"2011-03-24T04:27:52.751260"},
{"name":"two.txt", "hash":"10abb84c63a5cff379fdfd6385918833",
"bytes":22, "content_type":"text/plain",
"last_modified":"2011-03-24T04:27:52.825110"},
{"name":"\u2603/\u2603/one.txt",
"hash":"73f1dd69bacbf0847cc9cffa3c6b23a1", "bytes":22,
"content_type":"text/plain",
"last_modified":"2011-03-24T04:27:52.935560"}]
'''.strip()
return Response(status='200 Ok', headers=headers,
body=body)(env, start_response)
class FakeAuthFilter(object):
def __init__(self, app, deny_objects=False, deny_listing=False):
self.app = app
self.deny_objects = deny_objects
self.deny_listing = deny_listing
def authorize(self, req):
path_parts = req.path.strip('/').split('/')
if ((self.deny_objects and len(path_parts) > 3)
or (self.deny_listing and len(path_parts) == 3)):
return HTTPUnauthorized()
def __call__(self, env, start_response):
env['swift.authorize'] = self.authorize
return self.app(env, start_response)
class TestStaticWeb(unittest.TestCase):
def setUp(self):
self.app = FakeApp()
self.test_staticweb = FakeAuthFilter(
staticweb.filter_factory({})(self.app))
self._orig_get_container_info = staticweb.get_container_info
staticweb.get_container_info = mock_get_container_info
def tearDown(self):
staticweb.get_container_info = self._orig_get_container_info
def test_app_set(self):
app = FakeApp()
sw = staticweb.filter_factory({})(app)
self.assertEqual(sw.app, app)
def test_conf_set(self):
conf = {'blah': 1}
sw = staticweb.filter_factory(conf)(FakeApp())
self.assertEqual(sw.conf, conf)
def test_root(self):
resp = Request.blank('/').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 404)
def test_version(self):
resp = Request.blank('/v1').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 412)
def test_account(self):
resp = Request.blank('/v1/a').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 401)
def test_container1(self):
resp = Request.blank('/v1/a/c1').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 401)
def test_container1_web_mode_explicitly_off(self):
resp = Request.blank('/v1/a/c1',
headers={'x-web-mode': 'false'}).get_response(
self.test_staticweb)
self.assertEqual(resp.status_int, 401)
def test_container1_web_mode_explicitly_on(self):
resp = Request.blank('/v1/a/c1',
headers={'x-web-mode': 'true'}).get_response(
self.test_staticweb)
self.assertEqual(resp.status_int, 404)
def test_container2(self):
resp = Request.blank('/v1/a/c2').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.content_type, 'application/json')
self.assertEqual(len(json.loads(resp.body)),
int(resp.headers['x-container-object-count']))
def test_container2_web_mode_explicitly_off(self):
resp = Request.blank(
'/v1/a/c2',
headers={'x-web-mode': 'false'}).get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.content_type, 'application/json')
self.assertEqual(len(json.loads(resp.body)),
int(resp.headers['x-container-object-count']))
def test_container2_web_mode_explicitly_on(self):
resp = Request.blank(
'/v1/a/c2',
headers={'x-web-mode': 'true'}).get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 404)
def test_container2onetxt(self):
resp = Request.blank(
'/v1/a/c2/one.txt').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 404)
def test_container2json(self):
resp = Request.blank(
'/v1/a/c2').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.content_type, 'application/json')
self.assertEqual(len(json.loads(resp.body)),
int(resp.headers['x-container-object-count']))
def test_container2json_web_mode_explicitly_off(self):
resp = Request.blank(
'/v1/a/c2',
headers={'x-web-mode': 'false'}).get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.content_type, 'application/json')
self.assertEqual(len(json.loads(resp.body)),
int(resp.headers['x-container-object-count']))
def test_container2json_web_mode_explicitly_on(self):
resp = Request.blank(
'/v1/a/c2',
headers={'x-web-mode': 'true'}).get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 404)
def test_container3(self):
resp = Request.blank('/v1/a/c3').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 301)
self.assertEqual(resp.headers['location'],
'http://localhost/v1/a/c3/')
def test_container3indexhtml(self):
resp = Request.blank('/v1/a/c3/').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 200)
self.assertIn(b'Test main index.html file.', resp.body)
def test_container3subsubdir(self):
resp = Request.blank(
'/v1/a/c3/subdir3/subsubdir').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 301)
def test_container3subsubdircontents(self):
resp = Request.blank(
'/v1/a/c3/subdir3/subsubdir/').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body, b'index file')
def test_container3subdir(self):
resp = Request.blank(
'/v1/a/c3/subdir/').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 200)
self.assertIn(b'Listing of /v1/a/c3/subdir/', resp.body)
self.assertIn(b'</style>', resp.body)
self.assertNotIn(b'<link', resp.body)
self.assertNotIn(b'listing.css', resp.body)
def test_container3subdirx(self):
resp = Request.blank(
'/v1/a/c3/subdirx/').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 404)
def test_container3subdiry(self):
resp = Request.blank(
'/v1/a/c3/subdiry/').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 404)
def test_container3subdirz(self):
resp = Request.blank(
'/v1/a/c3/subdirz').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 301)
def test_container3unknown(self):
resp = Request.blank(
'/v1/a/c3/unknown').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 404)
self.assertNotIn(b"Chrome's 404 fancy-page sucks.", resp.body)
def test_container3bindexhtml(self):
resp = Request.blank('/v1/a/c3b/').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.body, b'')
def test_container4indexhtml(self):
resp = Request.blank('/v1/a/c4/').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 200)
self.assertIn(b'Listing of /v1/a/c4/', resp.body)
self.assertIn(b'href="listing.css"', resp.body)
def test_container4indexhtmlauthed(self):
resp = Request.blank('/v1/a/c4').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 301)
resp = Request.blank(
'/v1/a/c4',
environ={'REMOTE_USER': 'authed'}).get_response(
self.test_staticweb)
self.assertEqual(resp.status_int, 200)
resp = Request.blank(
'/v1/a/c4', headers={'x-web-mode': 't'},
environ={'REMOTE_USER': 'authed'}).get_response(
self.test_staticweb)
self.assertEqual(resp.status_int, 301)
def test_container4unknown(self):
resp = Request.blank(
'/v1/a/c4/unknown').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 404)
self.assertIn(b"Chrome's 404 fancy-page sucks.", resp.body)
def test_container4subdir(self):
resp = Request.blank(
'/v1/a/c4/subdir/').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 200)
self.assertIn(b'Listing of /v1/a/c4/subdir/', resp.body)
self.assertNotIn(b'</style>', resp.body)
self.assertIn(b'<link', resp.body)
self.assertIn(b'href="../listing.css"', resp.body)
self.assertEqual(resp.headers['content-type'],
'text/html; charset=UTF-8')
def test_container4onetxt(self):
resp = Request.blank(
'/v1/a/c4/one.txt').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 200)
def test_container4twotxt(self):
resp = Request.blank(
'/v1/a/c4/two.txt').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 503)
def test_container5indexhtml(self):
resp = Request.blank('/v1/a/c5/').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 503)
def test_container5unknown(self):
resp = Request.blank(
'/v1/a/c5/unknown').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 404)
self.assertNotIn(b"Chrome's 404 fancy-page sucks.", resp.body)
def test_container6subdir(self):
resp = Request.blank(
'/v1/a/c6/subdir').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 301)
def test_container6listing(self):
# container6 has web-listings = t, web-error=error.html
resp = Request.blank('/v1/a/c6/').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 200)
# expect custom 401 if request is not auth'd for listing but is auth'd
# to GET objects
test_staticweb = FakeAuthFilter(
staticweb.filter_factory({})(self.app), deny_listing=True)
resp = Request.blank('/v1/a/c6/').get_response(test_staticweb)
self.assertEqual(resp.status_int, 401)
self.assertIn(b"Hey, you're not authorized to see this!", resp.body)
# expect default 401 if request is not auth'd for listing or object GET
test_staticweb = FakeAuthFilter(
staticweb.filter_factory({})(self.app), deny_listing=True,
deny_objects=True)
resp = Request.blank('/v1/a/c6/').get_response(test_staticweb)
self.assertEqual(resp.status_int, 401)
self.assertNotIn(b"Hey, you're not authorized to see this!", resp.body)
def test_container6blisting(self):
label = 'Listing of {0}/'.format(
meta_map['c6b']['meta']['web-listings-label'])
resp = Request.blank('/v1/a/c6b/').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 200)
self.assertIn(label.encode('utf-8'), resp.body)
def test_container7listing(self):
# container7 has web-listings = f, web-error=error.html
resp = Request.blank('/v1/a/c7/').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 404)
self.assertIn(b"Web Listing Disabled", resp.body)
# expect 301 if auth'd but no trailing '/'
resp = Request.blank('/v1/a/c7').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 301)
# expect default 401 if request is not auth'd and no trailing '/'
test_staticweb = FakeAuthFilter(
staticweb.filter_factory({})(self.app), deny_listing=True,
deny_objects=True)
resp = Request.blank('/v1/a/c7').get_response(test_staticweb)
self.assertEqual(resp.status_int, 401)
self.assertNotIn(b"Hey, you're not authorized to see this!", resp.body)
# expect custom 401 if request is not auth'd for listing
test_staticweb = FakeAuthFilter(
staticweb.filter_factory({})(self.app), deny_listing=True)
resp = Request.blank('/v1/a/c7/').get_response(test_staticweb)
self.assertEqual(resp.status_int, 401)
self.assertIn(b"Hey, you're not authorized to see this!", resp.body)
# expect default 401 if request is not auth'd for listing or object GET
test_staticweb = FakeAuthFilter(
staticweb.filter_factory({})(self.app), deny_listing=True,
deny_objects=True)
resp = Request.blank('/v1/a/c7/').get_response(test_staticweb)
self.assertEqual(resp.status_int, 401)
self.assertNotIn(b"Hey, you're not authorized to see this!", resp.body)
def test_container8listingcss(self):
resp = Request.blank(
'/v1/a/c8/').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 200)
self.assertIn(b'Listing of /v1/a/c8/', resp.body)
self.assertIn(b'<link', resp.body)
self.assertIn(b'href="http://localhost/stylesheets/listing.css"',
resp.body)
def test_container8subdirlistingcss(self):
resp = Request.blank(
'/v1/a/c8/subdir/').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 200)
self.assertIn(b'Listing of /v1/a/c8/subdir/', resp.body)
self.assertIn(b'<link', resp.body)
self.assertIn(b'href="http://localhost/stylesheets/listing.css"',
resp.body)
def test_container9listingcss(self):
resp = Request.blank(
'/v1/a/c9/').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 200)
self.assertIn(b'Listing of /v1/a/c9/', resp.body)
self.assertIn(b'<link', resp.body)
self.assertIn(b'href="/absolute/listing.css"', resp.body)
def test_container9subdirlistingcss(self):
resp = Request.blank(
'/v1/a/c9/subdir/').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 200)
self.assertIn(b'Listing of /v1/a/c9/subdir/', resp.body)
self.assertIn(b'<link', resp.body)
self.assertIn(b'href="/absolute/listing.css"', resp.body)
def test_container10unicodesubdirlisting(self):
resp = Request.blank(
'/v1/a/c10/').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 200)
self.assertIn(b'Listing of /v1/a/c10/', resp.body)
resp = Request.blank(
'/v1/a/c10/\xe2\x98\x83/').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 200)
self.assertIn(b'Listing of /v1/a/c10/\xe2\x98\x83/', resp.body)
resp = Request.blank(
'/v1/a/c10/\xe2\x98\x83/\xe2\x98\x83/'
).get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 200)
self.assertIn(
b'Listing of /v1/a/c10/\xe2\x98\x83/\xe2\x98\x83/', resp.body)
def test_container11subdirmarkerobjectindex(self):
resp = Request.blank('/v1/a/c11/subdir/').get_response(
self.test_staticweb)
self.assertEqual(resp.status_int, 200)
self.assertIn(b'<h2>c11 subdir index</h2>', resp.body)
def test_container11subdirmarkermatchdirtype(self):
resp = Request.blank('/v1/a/c11a/subdir/').get_response(
self.test_staticweb)
self.assertEqual(resp.status_int, 404)
self.assertIn(b'Index File Not Found', resp.body)
def test_container11subdirmarkeraltdirtype(self):
resp = Request.blank('/v1/a/c11a/subdir2/').get_response(
self.test_staticweb)
self.assertEqual(resp.status_int, 200)
def test_container11subdirmarkerinvaliddirtype(self):
resp = Request.blank('/v1/a/c11a/subdir3/').get_response(
self.test_staticweb)
self.assertEqual(resp.status_int, 200)
def test_container12unredirectedrequest(self):
resp = Request.blank('/v1/a/c12/').get_response(
self.test_staticweb)
self.assertEqual(resp.status_int, 200)
self.assertIn(b'index file', resp.body)
def test_container13empty(self):
resp = Request.blank(
'/v1/a/c14/').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 200)
self.assertIn(b'Listing of /v1/a/c14/', resp.body)
self.assertIn(b'</style>', resp.body)
self.assertNotIn(b'<link', resp.body)
self.assertNotIn(b'listing.css', resp.body)
self.assertNotIn(b'<td', resp.body)
def test_container_404_has_css(self):
resp = Request.blank('/v1/a/c13/').get_response(
self.test_staticweb)
self.assertEqual(resp.status_int, 404)
self.assertIn(b'listing.css', resp.body)
def test_container_404_has_no_css(self):
resp = Request.blank('/v1/a/c7/').get_response(
self.test_staticweb)
self.assertEqual(resp.status_int, 404)
self.assertNotIn(b'listing.css', resp.body)
self.assertIn(b'<style', resp.body)
def test_subrequest_once_if_possible(self):
resp = Request.blank(
'/v1/a/c4/one.txt').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['x-object-meta-test'], 'value')
self.assertEqual(resp.body, b'1')
self.assertEqual(self.app.calls, 1)
def test_no_auth_middleware(self):
resp = Request.blank('/v1/a/c3').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 301)
# Test without an authentication middleware before staticweb
# This is no longer handled by staticweb middleware, thus not returning
# a 301 redirect
self.test_staticweb = staticweb.filter_factory({})(self.app)
resp = Request.blank('/v1/a/c3').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 200)
def test_subrequest_not_override_auth(self):
app_call = \
'swift.common.middleware.staticweb._StaticWebContext._app_call'
orig_app_call = staticweb._StaticWebContext._app_call
_fail = self.fail
def hook_app_call(self, env):
if 'swift.authorize_override' in env:
_fail('staticweb must not create authorize info by itself')
return orig_app_call(self, env)
with mock.patch(app_call, hook_app_call):
# testing for _listing container
resp = Request.blank('/v1/a/c4/').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 200) # sanity
# testing for _listing object subdir
resp = Request.blank(
'/v1/a/c4/unknown').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 404)
# testing for _error_response
resp = Request.blank('/v1/a/c5/').get_response(self.test_staticweb)
self.assertEqual(resp.status_int, 503) # sanity
class TestStaticWebUrlBase(unittest.TestCase):
def setUp(self):
self.app = FakeApp()
self._orig_get_container_info = staticweb.get_container_info
staticweb.get_container_info = mock_get_container_info
def tearDown(self):
staticweb.get_container_info = self._orig_get_container_info
def test_container3subdirz_scheme(self):
path = '/v1/a/c3/subdirz'
scheme = 'https'
test_staticweb = FakeAuthFilter(
staticweb.filter_factory({'url_base': 'https://'})(self.app))
resp = Request.blank(path).get_response(test_staticweb)
self.assertEqual(resp.status_int, 301)
parsed = urlparse(resp.location)
self.assertEqual(parsed.scheme, scheme)
# We omit comparing netloc here, because swob is free to add port.
self.assertEqual(parsed.path, path + '/')
def test_container3subdirz_host(self):
path = '/v1/a/c3/subdirz'
netloc = 'example.com'
test_staticweb = FakeAuthFilter(
staticweb.filter_factory({
'url_base': '//%s' % (netloc,)})(self.app))
resp = Request.blank(path).get_response(test_staticweb)
self.assertEqual(resp.status_int, 301)
parsed = urlparse(resp.location)
# We compare scheme with the default. This may change, but unlikely.
self.assertEqual(parsed.scheme, 'http')
self.assertEqual(parsed.netloc, netloc)
self.assertEqual(parsed.path, path + '/')
def test_container3subdirz_both(self):
path = '/v1/a/c3/subdirz'
scheme = 'http'
netloc = 'example.com'
test_staticweb = FakeAuthFilter(
staticweb.filter_factory({
'url_base': 'http://example.com'})(self.app))
resp = Request.blank(path).get_response(test_staticweb)
self.assertEqual(resp.status_int, 301)
parsed = urlparse(resp.location)
self.assertEqual(parsed.scheme, scheme)
self.assertEqual(parsed.netloc, netloc)
self.assertEqual(parsed.path, path + '/')
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/test_staticweb.py |
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from swift.common.swob import Request
from swift.common.middleware import catch_errors
from swift.common.utils import get_logger
class StrangeException(BaseException):
pass
class FakeApp(object):
def __init__(self, error=False, body_iter=None):
self.error = error
self.body_iter = body_iter
def __call__(self, env, start_response):
if 'swift.trans_id' not in env:
raise Exception('Trans id should always be in env')
if self.error:
if self.error == 'strange':
raise StrangeException('whoa')
raise Exception('An error occurred')
if self.body_iter is None:
return [b"FAKE APP"]
else:
return self.body_iter
class TestCatchErrors(unittest.TestCase):
def setUp(self):
self.logger = get_logger({})
self.logger.txn_id = None
def start_response(self, status, headers, *args):
request_ids = ('X-Trans-Id', 'X-Openstack-Request-Id')
hdict = dict(headers)
for key in request_ids:
self.assertIn(key, hdict)
for key1, key2 in zip(request_ids, request_ids[1:]):
self.assertEqual(hdict[key1], hdict[key2])
def test_catcherrors_passthrough(self):
app = catch_errors.CatchErrorMiddleware(FakeApp(), {})
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
resp = app(req.environ, self.start_response)
self.assertEqual(list(resp), [b'FAKE APP'])
def test_catcherrors(self):
app = catch_errors.CatchErrorMiddleware(FakeApp(True), {})
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
resp = app(req.environ, self.start_response)
self.assertEqual(list(resp), [b'An error occurred'])
def test_trans_id_header_pass(self):
self.assertIsNone(self.logger.txn_id)
app = catch_errors.CatchErrorMiddleware(FakeApp(), {})
req = Request.blank('/v1/a/c/o')
app(req.environ, self.start_response)
self.assertEqual(len(self.logger.txn_id), 34) # 32 hex + 'tx'
def test_trans_id_header_fail(self):
self.assertIsNone(self.logger.txn_id)
app = catch_errors.CatchErrorMiddleware(FakeApp(True), {})
req = Request.blank('/v1/a/c/o')
app(req.environ, self.start_response)
self.assertEqual(len(self.logger.txn_id), 34)
def test_error_in_iterator(self):
app = catch_errors.CatchErrorMiddleware(
FakeApp(body_iter=(int(x) for x in 'abcd')), {})
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
resp = app(req.environ, self.start_response)
self.assertEqual(list(resp), [b'An error occurred'])
def test_trans_id_header_suffix(self):
self.assertIsNone(self.logger.txn_id)
app = catch_errors.CatchErrorMiddleware(
FakeApp(), {'trans_id_suffix': '-stuff'})
req = Request.blank('/v1/a/c/o')
app(req.environ, self.start_response)
self.assertTrue(self.logger.txn_id.endswith('-stuff'))
def test_trans_id_header_extra(self):
self.assertIsNone(self.logger.txn_id)
app = catch_errors.CatchErrorMiddleware(
FakeApp(), {'trans_id_suffix': '-fromconf'})
req = Request.blank('/v1/a/c/o',
headers={'X-Trans-Id-Extra': 'fromuser'})
app(req.environ, self.start_response)
self.assertTrue(self.logger.txn_id.endswith('-fromconf-fromuser'))
def test_trans_id_header_extra_length_limit(self):
self.assertIsNone(self.logger.txn_id)
app = catch_errors.CatchErrorMiddleware(
FakeApp(), {'trans_id_suffix': '-fromconf'})
req = Request.blank('/v1/a/c/o',
headers={'X-Trans-Id-Extra': 'a' * 1000})
app(req.environ, self.start_response)
self.assertTrue(self.logger.txn_id.endswith(
'-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'))
def test_trans_id_header_extra_quoted(self):
self.assertIsNone(self.logger.txn_id)
app = catch_errors.CatchErrorMiddleware(FakeApp(), {})
req = Request.blank('/v1/a/c/o',
headers={'X-Trans-Id-Extra': 'xan than"gum'})
app(req.environ, self.start_response)
self.assertTrue(self.logger.txn_id.endswith('-xan%20than%22gum'))
def test_catcherrors_with_unexpected_error(self):
app = catch_errors.CatchErrorMiddleware(FakeApp(error='strange'), {})
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
resp = app(req.environ, self.start_response)
self.assertEqual(list(resp), [b'An error occurred'])
def test_HEAD_with_content_length(self):
def cannot_count_app(env, sr):
sr("200 OK", [("Content-Length", "10")])
return [b""]
app = catch_errors.CatchErrorMiddleware(cannot_count_app, {})
list(app({'REQUEST_METHOD': 'HEAD'}, self.start_response))
def test_short_response_body(self):
def cannot_count_app(env, sr):
sr("200 OK", [("Content-Length", "2000")])
return [b"our staff tailor is Euripedes Imenedes"]
app = catch_errors.CatchErrorMiddleware(cannot_count_app, {})
with self.assertRaises(catch_errors.BadResponseLength):
list(app({'REQUEST_METHOD': 'GET'}, self.start_response))
def test_long_response_body(self):
def cannot_count_app(env, sr):
sr("200 OK", [("Content-Length", "10")])
return [b"our optometric firm is C.F. Eye Care"]
app = catch_errors.CatchErrorMiddleware(cannot_count_app, {})
with self.assertRaises(catch_errors.BadResponseLength):
list(app({'REQUEST_METHOD': 'GET'}, self.start_response))
def test_bogus_content_length(self):
def bogus_cl_app(env, sr):
sr("200 OK", [("Content-Length", "25 cm")])
return [b"our British cutlery specialist is Sir Irving Spoon"]
app = catch_errors.CatchErrorMiddleware(bogus_cl_app, {})
list(app({'REQUEST_METHOD': 'GET'}, self.start_response))
def test_no_content_length(self):
def no_cl_app(env, sr):
sr("200 OK", [("Content-Type", "application/names")])
return [b"our staff statistician is Marge Inovera"]
app = catch_errors.CatchErrorMiddleware(no_cl_app, {})
list(app({'REQUEST_METHOD': 'GET'}, self.start_response))
def test_multiple_content_lengths(self):
def poly_cl_app(env, sr):
sr("200 OK", [("Content-Length", "30"),
("Content-Length", "40")])
return [b"The head of our personal trainers is Jim Shortz"]
app = catch_errors.CatchErrorMiddleware(poly_cl_app, {})
list(app({'REQUEST_METHOD': 'GET'}, self.start_response))
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/test_catch_errors.py |
# Copyright (c) 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import time
import unittest
from swift.common.middleware.s3api import utils, s3request
strs = [
('Owner', 'owner'),
('DisplayName', 'display_name'),
('AccessControlPolicy', 'access_control_policy'),
]
class TestS3ApiUtils(unittest.TestCase):
def test_camel_to_snake(self):
for s1, s2 in strs:
self.assertEqual(utils.camel_to_snake(s1), s2)
def test_snake_to_camel(self):
for s1, s2 in strs:
self.assertEqual(s1, utils.snake_to_camel(s2))
def test_validate_bucket_name(self):
# good cases
self.assertTrue(utils.validate_bucket_name('bucket', True))
self.assertTrue(utils.validate_bucket_name('bucket1', True))
self.assertTrue(utils.validate_bucket_name('bucket-1', True))
self.assertTrue(utils.validate_bucket_name('b.u.c.k.e.t', True))
self.assertTrue(utils.validate_bucket_name('a' * 63, True))
# bad cases
self.assertFalse(utils.validate_bucket_name('a', True))
self.assertFalse(utils.validate_bucket_name('aa', True))
self.assertFalse(utils.validate_bucket_name('a+a', True))
self.assertFalse(utils.validate_bucket_name('a_a', True))
self.assertFalse(utils.validate_bucket_name('Bucket', True))
self.assertFalse(utils.validate_bucket_name('BUCKET', True))
self.assertFalse(utils.validate_bucket_name('bucket-', True))
self.assertFalse(utils.validate_bucket_name('bucket.', True))
self.assertFalse(utils.validate_bucket_name('bucket_', True))
self.assertFalse(utils.validate_bucket_name('bucket.-bucket', True))
self.assertFalse(utils.validate_bucket_name('bucket-.bucket', True))
self.assertFalse(utils.validate_bucket_name('bucket..bucket', True))
self.assertFalse(utils.validate_bucket_name('a' * 64, True))
def test_validate_bucket_name_with_dns_compliant_bucket_names_false(self):
# good cases
self.assertTrue(utils.validate_bucket_name('bucket', False))
self.assertTrue(utils.validate_bucket_name('bucket1', False))
self.assertTrue(utils.validate_bucket_name('bucket-1', False))
self.assertTrue(utils.validate_bucket_name('b.u.c.k.e.t', False))
self.assertTrue(utils.validate_bucket_name('a' * 63, False))
self.assertTrue(utils.validate_bucket_name('a' * 255, False))
self.assertTrue(utils.validate_bucket_name('a_a', False))
self.assertTrue(utils.validate_bucket_name('Bucket', False))
self.assertTrue(utils.validate_bucket_name('BUCKET', False))
self.assertTrue(utils.validate_bucket_name('bucket-', False))
self.assertTrue(utils.validate_bucket_name('bucket_', False))
self.assertTrue(utils.validate_bucket_name('bucket.-bucket', False))
self.assertTrue(utils.validate_bucket_name('bucket-.bucket', False))
self.assertTrue(utils.validate_bucket_name('bucket..bucket', False))
# bad cases
self.assertFalse(utils.validate_bucket_name('a', False))
self.assertFalse(utils.validate_bucket_name('aa', False))
self.assertFalse(utils.validate_bucket_name('a+a', False))
# ending with dot seems invalid in US standard, too
self.assertFalse(utils.validate_bucket_name('bucket.', False))
self.assertFalse(utils.validate_bucket_name('a' * 256, False))
def test_mktime(self):
date_headers = [
'Thu, 01 Jan 1970 00:00:00 -0000',
'Thu, 01 Jan 1970 00:00:00 GMT',
'Thu, 01 Jan 1970 00:00:00 UTC',
'Thu, 01 Jan 1970 08:00:00 +0800',
'Wed, 31 Dec 1969 16:00:00 -0800',
'Wed, 31 Dec 1969 16:00:00 PST',
]
for header in date_headers:
ts = utils.mktime(header)
self.assertEqual(0, ts, 'Got %r for header %s' % (ts, header))
# Last-Modified response style
self.assertEqual(0, utils.mktime('1970-01-01T00:00:00'))
# X-Amz-Date style
self.assertEqual(0, utils.mktime('19700101T000000Z',
s3request.SIGV4_X_AMZ_DATE_FORMAT))
def test_mktime_weird_tz(self):
orig_tz = os.environ.get('TZ', '')
try:
os.environ['TZ'] = 'EST+05EDT,M4.1.0,M10.5.0'
time.tzset()
os.environ['TZ'] = '+0000'
# No tzset! Simulating what Swift would do.
self.assertNotEqual(0, time.timezone)
self.test_mktime()
finally:
os.environ['TZ'] = orig_tz
time.tzset()
class TestS3Timestamp(unittest.TestCase):
def test_s3xmlformat(self):
expected = '1970-01-01T00:00:01.000Z'
# integer
ts = utils.S3Timestamp(1)
self.assertEqual(expected, ts.s3xmlformat)
# milliseconds unit should be rounded up
expected = '1970-01-01T00:00:02.000Z'
ts = utils.S3Timestamp(1.1)
self.assertEqual(expected, ts.s3xmlformat)
# float (microseconds) should be floored too
ts = utils.S3Timestamp(1.000001)
self.assertEqual(expected, ts.s3xmlformat)
# Bigger float (milliseconds) should be floored too
ts = utils.S3Timestamp(1.9)
self.assertEqual(expected, ts.s3xmlformat)
def test_from_s3xmlformat(self):
ts = utils.S3Timestamp.from_s3xmlformat('2014-06-10T22:47:32.000Z')
self.assertIsInstance(ts, utils.S3Timestamp)
self.assertEqual(1402440452, float(ts))
self.assertEqual('2014-06-10T22:47:32.000000', ts.isoformat)
ts = utils.S3Timestamp.from_s3xmlformat('1970-01-01T00:00:00.000Z')
self.assertIsInstance(ts, utils.S3Timestamp)
self.assertEqual(0.0, float(ts))
self.assertEqual('1970-01-01T00:00:00.000000', ts.isoformat)
ts = utils.S3Timestamp(1402440452.0)
self.assertIsInstance(ts, utils.S3Timestamp)
ts1 = utils.S3Timestamp.from_s3xmlformat(ts.s3xmlformat)
self.assertIsInstance(ts1, utils.S3Timestamp)
self.assertEqual(ts, ts1)
def test_from_isoformat(self):
ts = utils.S3Timestamp.from_isoformat('2014-06-10T22:47:32.054580')
self.assertIsInstance(ts, utils.S3Timestamp)
self.assertEqual(1402440452.05458, float(ts))
self.assertEqual('2014-06-10T22:47:32.054580', ts.isoformat)
self.assertEqual('2014-06-10T22:47:33.000Z', ts.s3xmlformat)
class TestConfig(unittest.TestCase):
def _assert_defaults(self, conf):
self.assertEqual([], conf.storage_domains)
self.assertEqual('us-east-1', conf.location)
self.assertFalse(conf.force_swift_request_proxy_log)
self.assertTrue(conf.dns_compliant_bucket_names)
self.assertTrue(conf.allow_multipart_uploads)
self.assertFalse(conf.allow_no_owner)
self.assertEqual(900, conf.allowable_clock_skew)
self.assertFalse(conf.ratelimit_as_client_error)
def test_defaults(self):
# deliberately brittle so new defaults will need to be added to test
conf = utils.Config()
self._assert_defaults(conf)
del conf.storage_domains
del conf.location
del conf.force_swift_request_proxy_log
del conf.dns_compliant_bucket_names
del conf.allow_multipart_uploads
del conf.allow_no_owner
del conf.allowable_clock_skew
del conf.ratelimit_as_client_error
self.assertEqual({}, conf)
def test_update(self):
conf = utils.Config()
conf.update({'key1': 'val1', 'key2': 'val2'})
self._assert_defaults(conf)
self.assertEqual(conf.key1, 'val1')
self.assertEqual(conf.key2, 'val2')
conf.update({'allow_multipart_uploads': False})
self.assertFalse(conf.allow_multipart_uploads)
def test_set_get_delete(self):
conf = utils.Config()
self.assertRaises(AttributeError, lambda: conf.new_attr)
conf.new_attr = 123
self.assertEqual(123, conf.new_attr)
del conf.new_attr
self.assertRaises(AttributeError, lambda: conf.new_attr)
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/s3api/test_utils.py |
# Copyright (c) 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import binascii
import hashlib
from mock import patch
import os
import time
import unittest
from six.moves.urllib.parse import parse_qs, quote, quote_plus
from swift.common import swob
from swift.common.swob import Request
from swift.common.utils import json, md5
from test.unit import FakeMemcache, patch_policies
from test.unit.common.middleware.s3api import S3ApiTestCase
from test.unit.common.middleware.s3api.helpers import UnreadableInput
from swift.common.middleware.s3api.etree import fromstring, tostring
from swift.common.middleware.s3api.subresource import Owner, Grant, User, \
ACL, encode_acl, decode_acl, ACLPublicRead
from test.unit.common.middleware.s3api.test_s3_acl import s3acl
from swift.common.middleware.s3api.utils import sysmeta_header, mktime, \
S3Timestamp
from swift.common.middleware.s3api.s3request import MAX_32BIT_INT
from swift.common.storage_policy import StoragePolicy
from swift.proxy.controllers.base import get_cache_key
XML = '<CompleteMultipartUpload>' \
'<Part>' \
'<PartNumber>1</PartNumber>' \
'<ETag>0123456789abcdef0123456789abcdef</ETag>' \
'</Part>' \
'<Part>' \
'<PartNumber>2</PartNumber>' \
'<ETag>"fedcba9876543210fedcba9876543210"</ETag>' \
'</Part>' \
'</CompleteMultipartUpload>'
OBJECTS_TEMPLATE = \
(('object/X/1', '2014-05-07T19:47:51.592270', '0123456789abcdef', 100,
'2014-05-07T19:47:52.000Z'),
('object/X/2', '2014-05-07T19:47:52.592270', 'fedcba9876543210', 200,
'2014-05-07T19:47:53.000Z'))
MULTIPARTS_TEMPLATE = \
(('object/X', '2014-05-07T19:47:50.592270', 'HASH', 1,
'2014-05-07T19:47:51.000Z'),
('object/X/1', '2014-05-07T19:47:51.592270', '0123456789abcdef', 11,
'2014-05-07T19:47:52.000Z'),
('object/X/2', '2014-05-07T19:47:52.592270', 'fedcba9876543210', 21,
'2014-05-07T19:47:53.000Z'),
('object/Y', '2014-05-07T19:47:53.592270', 'HASH', 2,
'2014-05-07T19:47:54.000Z'),
('object/Y/1', '2014-05-07T19:47:54.592270', '0123456789abcdef', 12,
'2014-05-07T19:47:55.000Z'),
('object/Y/2', '2014-05-07T19:47:55.592270', 'fedcba9876543210', 22,
'2014-05-07T19:47:56.000Z'),
('object/Z', '2014-05-07T19:47:56.592270', 'HASH', 3,
'2014-05-07T19:47:57.000Z'),
('object/Z/1', '2014-05-07T19:47:57.592270', '0123456789abcdef', 13,
'2014-05-07T19:47:58.000Z'),
('object/Z/2', '2014-05-07T19:47:58.592270', 'fedcba9876543210', 23,
'2014-05-07T19:47:59.000Z'),
('subdir/object/Z', '2014-05-07T19:47:58.592270', 'HASH', 4,
'2014-05-07T19:47:59.000Z'),
('subdir/object/Z/1', '2014-05-07T19:47:58.592270', '0123456789abcdef',
41, '2014-05-07T19:47:59.000Z'),
('subdir/object/Z/2', '2014-05-07T19:47:58.592270', 'fedcba9876543210',
41, '2014-05-07T19:47:59.000Z'),
# NB: wsgi strings
('subdir/object/completed\xe2\x98\x83/W/1', '2014-05-07T19:47:58.592270',
'0123456789abcdef', 41, '2014-05-07T19:47:59.000Z'),
('subdir/object/completed\xe2\x98\x83/W/2', '2014-05-07T19:47:58.592270',
'fedcba9876543210', 41, '2014-05-07T19:47:59'))
S3_ETAG = '"%s-2"' % md5(binascii.a2b_hex(
'0123456789abcdef0123456789abcdef'
'fedcba9876543210fedcba9876543210'), usedforsecurity=False).hexdigest()
class TestS3ApiMultiUpload(S3ApiTestCase):
def setUp(self):
super(TestS3ApiMultiUpload, self).setUp()
self.segment_bucket = '/v1/AUTH_test/bucket+segments'
self.etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
self.last_modified = 'Fri, 01 Apr 2014 12:00:00 GMT'
put_headers = {'etag': self.etag, 'last-modified': self.last_modified}
self.s3api.conf.min_segment_size = 1
objects = [{'name': item[0], 'last_modified': item[1],
'hash': item[2], 'bytes': item[3]}
for item in OBJECTS_TEMPLATE]
self.swift.register('PUT', self.segment_bucket,
swob.HTTPAccepted, {}, None)
# default to just returning everybody...
self.swift.register('GET', self.segment_bucket, swob.HTTPOk, {},
json.dumps(objects))
self.swift.register('GET', '%s?format=json&marker=%s' % (
self.segment_bucket, objects[-1]['name']),
swob.HTTPOk, {}, json.dumps([]))
# but for the listing when aborting an upload, break it up into pages
self.swift.register(
'GET', '%s?delimiter=/&format=json&marker=&prefix=object/X/' % (
self.segment_bucket, ),
swob.HTTPOk, {}, json.dumps(objects[:1]))
self.swift.register(
'GET', '%s?delimiter=/&format=json&marker=%s&prefix=object/X/' % (
self.segment_bucket, objects[0]['name']),
swob.HTTPOk, {}, json.dumps(objects[1:]))
self.swift.register(
'GET', '%s?delimiter=/&format=json&marker=%s&prefix=object/X/' % (
self.segment_bucket, objects[-1]['name']),
swob.HTTPOk, {}, '[]')
self.swift.register('HEAD', self.segment_bucket + '/object/X',
swob.HTTPOk,
{'x-object-meta-foo': 'bar',
'content-type': 'application/directory',
'x-object-sysmeta-s3api-has-content-type': 'yes',
'x-object-sysmeta-s3api-content-type':
'baz/quux'}, None)
self.swift.register('PUT', self.segment_bucket + '/object/X',
swob.HTTPCreated, {}, None)
self.swift.register('DELETE', self.segment_bucket + '/object/X',
swob.HTTPNoContent, {}, None)
self.swift.register('GET', self.segment_bucket + '/object/invalid',
swob.HTTPNotFound, {}, None)
self.swift.register('PUT', self.segment_bucket + '/object/X/1',
swob.HTTPCreated, put_headers, None)
self.swift.register('DELETE', self.segment_bucket + '/object/X/1',
swob.HTTPNoContent, {}, None)
self.swift.register('DELETE', self.segment_bucket + '/object/X/2',
swob.HTTPNoContent, {}, None)
@s3acl
def test_bucket_upload_part(self):
req = Request.blank('/bucket?partNumber=1&uploadId=x',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidRequest')
self.assertEqual([], self.swift.calls)
def test_bucket_upload_part_success(self):
req = Request.blank('/bucket/object?partNumber=1&uploadId=X',
method='PUT',
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
with patch('swift.common.middleware.s3api.s3request.'
'get_container_info',
lambda env, app, swift_source: {'status': 204}):
status, headers, body = self.call_s3api(req)
self.assertEqual(status, '200 OK')
self.assertEqual([
('HEAD', '/v1/AUTH_test/bucket+segments/object/X'),
('PUT', '/v1/AUTH_test/bucket+segments/object/X/1'),
], self.swift.calls)
def test_bucket_upload_part_v4_bad_hash(self):
authz_header = 'AWS4-HMAC-SHA256 ' + ', '.join([
'Credential=test:tester/%s/us-east-1/s3/aws4_request' %
self.get_v4_amz_date_header().split('T', 1)[0],
'SignedHeaders=host;x-amz-date',
'Signature=X',
])
req = Request.blank(
'/bucket/object?partNumber=1&uploadId=X',
method='PUT',
headers={'Authorization': authz_header,
'X-Amz-Date': self.get_v4_amz_date_header(),
'X-Amz-Content-SHA256': 'not_the_hash'},
body=b'test')
with patch('swift.common.middleware.s3api.s3request.'
'get_container_info',
lambda env, app, swift_source: {'status': 204}):
status, headers, body = self.call_s3api(req)
self.assertEqual(status, '400 Bad Request')
self.assertEqual(self._get_error_code(body), 'BadDigest')
self.assertEqual([
('HEAD', '/v1/AUTH_test/bucket+segments/object/X'),
('PUT', '/v1/AUTH_test/bucket+segments/object/X/1'),
], self.swift.calls)
self.assertEqual('/v1/AUTH_test/bucket+segments/object/X/1',
req.environ.get('swift.backend_path'))
@s3acl
def test_object_multipart_uploads_list(self):
req = Request.blank('/bucket/object?uploads',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidRequest')
@s3acl
def test_bucket_multipart_uploads_initiate(self):
req = Request.blank('/bucket?uploads',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidRequest')
@s3acl
def test_bucket_list_parts(self):
req = Request.blank('/bucket?uploadId=x',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidRequest')
@s3acl
def test_bucket_multipart_uploads_abort(self):
req = Request.blank('/bucket?uploadId=x',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidRequest')
self.assertEqual(self._get_error_message(body),
'A key must be specified')
@s3acl
def test_bucket_multipart_uploads_complete(self):
req = Request.blank('/bucket?uploadId=x',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidRequest')
def _test_bucket_multipart_uploads_GET(self, query='',
multiparts=None):
objects = multiparts or MULTIPARTS_TEMPLATE
objects = [{'name': item[0], 'last_modified': item[1],
'hash': item[2], 'bytes': item[3]}
for item in objects]
object_list = json.dumps(objects).encode('ascii')
query_parts = parse_qs(query)
swift_query = {'format': 'json'}
if 'upload-id-marker' in query_parts and 'key-marker' in query_parts:
swift_query['marker'] = '%s/%s' % (
query_parts['key-marker'][0],
query_parts['upload-id-marker'][0])
elif 'key-marker' in query_parts:
swift_query['marker'] = '%s/~' % (query_parts['key-marker'][0])
if 'prefix' in query_parts:
swift_query['prefix'] = query_parts['prefix'][0]
self.swift.register(
'GET', '%s?%s' % (self.segment_bucket,
'&'.join(['%s=%s' % (k, v)
for k, v in swift_query.items()])),
swob.HTTPOk, {}, object_list)
swift_query['marker'] = objects[-1]['name']
self.swift.register(
'GET', '%s?%s' % (self.segment_bucket,
'&'.join(['%s=%s' % (k, v)
for k, v in swift_query.items()])),
swob.HTTPOk, {}, json.dumps([]))
query = '?uploads&' + query if query else '?uploads'
req = Request.blank('/bucket/%s' % query,
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
return self.call_s3api(req)
def test_bucket_multipart_uploads_GET_paginated(self):
uploads = [
['object/abc'] + ['object/abc/%d' % i for i in range(1, 1000)],
['object/def'] + ['object/def/%d' % i for i in range(1, 1000)],
['object/ghi'] + ['object/ghi/%d' % i for i in range(1, 1000)],
]
objects = [
{'name': name, 'last_modified': '2014-05-07T19:47:50.592270',
'hash': 'HASH', 'bytes': 42}
for upload in uploads for name in upload
]
end = 1000
while True:
if end == 1000:
self.swift.register(
'GET', '%s?format=json' % (self.segment_bucket),
swob.HTTPOk, {}, json.dumps(objects[:end]))
else:
self.swift.register(
'GET', '%s?format=json&marker=%s' % (
self.segment_bucket, objects[end - 1001]['name']),
swob.HTTPOk, {}, json.dumps(objects[end - 1000:end]))
if not objects[end - 1000:end]:
break
end += 1000
req = Request.blank('/bucket/?uploads',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'ListMultipartUploadsResult')
self.assertEqual(elem.find('Bucket').text, 'bucket')
self.assertIsNone(elem.find('KeyMarker').text)
self.assertIsNone(elem.find('UploadIdMarker').text)
self.assertEqual(elem.find('NextUploadIdMarker').text, 'ghi')
self.assertEqual(elem.find('MaxUploads').text, '1000')
self.assertEqual(elem.find('IsTruncated').text, 'false')
self.assertEqual(len(elem.findall('Upload')), len(uploads))
expected_uploads = [(upload[0], '2014-05-07T19:47:51.000Z')
for upload in uploads]
for u in elem.findall('Upload'):
name = u.find('Key').text + '/' + u.find('UploadId').text
initiated = u.find('Initiated').text
self.assertIn((name, initiated), expected_uploads)
self.assertEqual(u.find('Initiator/ID').text, 'test:tester')
self.assertEqual(u.find('Initiator/DisplayName').text,
'test:tester')
self.assertEqual(u.find('Owner/ID').text, 'test:tester')
self.assertEqual(u.find('Owner/DisplayName').text, 'test:tester')
self.assertEqual(u.find('StorageClass').text, 'STANDARD')
self.assertEqual(status.split()[0], '200')
@s3acl
def test_bucket_multipart_uploads_GET(self):
status, headers, body = self._test_bucket_multipart_uploads_GET()
elem = fromstring(body, 'ListMultipartUploadsResult')
self.assertEqual(elem.find('Bucket').text, 'bucket')
self.assertIsNone(elem.find('KeyMarker').text)
self.assertIsNone(elem.find('UploadIdMarker').text)
self.assertEqual(elem.find('NextUploadIdMarker').text, 'Z')
self.assertEqual(elem.find('MaxUploads').text, '1000')
self.assertEqual(elem.find('IsTruncated').text, 'false')
self.assertEqual(len(elem.findall('Upload')), 4)
objects = [(o[0], o[4]) for o in MULTIPARTS_TEMPLATE]
for u in elem.findall('Upload'):
name = u.find('Key').text + '/' + u.find('UploadId').text
initiated = u.find('Initiated').text
self.assertTrue((name, initiated) in objects)
self.assertEqual(u.find('Initiator/ID').text, 'test:tester')
self.assertEqual(u.find('Initiator/DisplayName').text,
'test:tester')
self.assertEqual(u.find('Owner/ID').text, 'test:tester')
self.assertEqual(u.find('Owner/DisplayName').text, 'test:tester')
self.assertEqual(u.find('StorageClass').text, 'STANDARD')
self.assertEqual(status.split()[0], '200')
@s3acl
def test_bucket_multipart_uploads_GET_without_segment_bucket(self):
segment_bucket = '/v1/AUTH_test/bucket+segments'
self.swift.register('GET', segment_bucket, swob.HTTPNotFound, {}, '')
req = Request.blank('/bucket?uploads',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, haeaders, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'ListMultipartUploadsResult')
self.assertEqual(elem.find('Bucket').text, 'bucket')
self.assertIsNone(elem.find('KeyMarker').text)
self.assertIsNone(elem.find('UploadIdMarker').text)
self.assertIsNone(elem.find('NextUploadIdMarker').text)
self.assertEqual(elem.find('MaxUploads').text, '1000')
self.assertEqual(elem.find('IsTruncated').text, 'false')
self.assertEqual(len(elem.findall('Upload')), 0)
@s3acl
@patch('swift.common.middleware.s3api.s3request.get_container_info',
lambda env, app, swift_source: {'status': 404})
def test_bucket_multipart_uploads_GET_without_bucket(self):
self.swift.register('HEAD', '/v1/AUTH_test/bucket',
swob.HTTPNotFound, {}, '')
req = Request.blank('/bucket?uploads',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, haeaders, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '404')
self.assertEqual(self._get_error_code(body), 'NoSuchBucket')
@s3acl
def test_bucket_multipart_uploads_GET_encoding_type_error(self):
query = 'encoding-type=xml'
status, headers, body = \
self._test_bucket_multipart_uploads_GET(query)
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
@s3acl
def test_bucket_multipart_uploads_GET_maxuploads(self):
query = 'max-uploads=2'
status, headers, body = \
self._test_bucket_multipart_uploads_GET(query)
elem = fromstring(body, 'ListMultipartUploadsResult')
self.assertEqual(len(elem.findall('Upload/UploadId')), 2)
self.assertEqual(elem.find('NextKeyMarker').text, 'object')
self.assertEqual(elem.find('NextUploadIdMarker').text, 'Y')
self.assertEqual(elem.find('MaxUploads').text, '2')
self.assertEqual(elem.find('IsTruncated').text, 'true')
self.assertEqual(status.split()[0], '200')
@s3acl
def test_bucket_multipart_uploads_GET_str_maxuploads(self):
query = 'max-uploads=invalid'
status, headers, body = \
self._test_bucket_multipart_uploads_GET(query)
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
@s3acl
def test_bucket_multipart_uploads_GET_negative_maxuploads(self):
query = 'max-uploads=-1'
status, headers, body = \
self._test_bucket_multipart_uploads_GET(query)
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
@s3acl
def test_bucket_multipart_uploads_GET_maxuploads_over_default(self):
query = 'max-uploads=1001'
status, headers, body = \
self._test_bucket_multipart_uploads_GET(query)
elem = fromstring(body, 'ListMultipartUploadsResult')
self.assertEqual(len(elem.findall('Upload/UploadId')), 4)
self.assertEqual(elem.find('NextKeyMarker').text, 'subdir/object')
self.assertEqual(elem.find('NextUploadIdMarker').text, 'Z')
self.assertEqual(elem.find('MaxUploads').text, '1000')
self.assertEqual(elem.find('IsTruncated').text, 'false')
self.assertEqual(status.split()[0], '200')
@s3acl
def test_bucket_multipart_uploads_GET_maxuploads_over_max_32bit_int(self):
query = 'max-uploads=%s' % (MAX_32BIT_INT + 1)
status, headers, body = \
self._test_bucket_multipart_uploads_GET(query)
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
@s3acl
def test_bucket_multipart_uploads_GET_with_id_and_key_marker(self):
query = 'upload-id-marker=Y&key-marker=object'
multiparts = \
(('object/Y', '2014-05-07T19:47:53.592270', 'HASH', 2,
'2014-05-07T19:47:54.000Z'),
('object/Y/1', '2014-05-07T19:47:54.592270', 'HASH', 12,
'2014-05-07T19:47:55.000Z'),
('object/Y/2', '2014-05-07T19:47:55.592270', 'HASH', 22,
'2014-05-07T19:47:56.000Z'))
status, headers, body = \
self._test_bucket_multipart_uploads_GET(query, multiparts)
elem = fromstring(body, 'ListMultipartUploadsResult')
self.assertEqual(elem.find('KeyMarker').text, 'object')
self.assertEqual(elem.find('UploadIdMarker').text, 'Y')
self.assertEqual(len(elem.findall('Upload')), 1)
objects = [(o[0], o[4]) for o in multiparts]
for u in elem.findall('Upload'):
name = u.find('Key').text + '/' + u.find('UploadId').text
initiated = u.find('Initiated').text
self.assertTrue((name, initiated) in objects)
self.assertEqual(status.split()[0], '200')
_, path, _ = self.swift.calls_with_headers[-1]
path, query_string = path.split('?', 1)
query = {}
for q in query_string.split('&'):
key, arg = q.split('=')
query[key] = arg
self.assertEqual(query['format'], 'json')
self.assertEqual(query['marker'], quote_plus('object/Y/2'))
@s3acl
def test_bucket_multipart_uploads_GET_with_key_marker(self):
query = 'key-marker=object'
multiparts = \
(('object/X', '2014-05-07T19:47:50.592270', 'HASH', 1,
'2014-05-07T19:47:51.000Z'),
('object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 11,
'2014-05-07T19:47:52.000Z'),
('object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 21,
'2014-05-07T19:47:53.000Z'),
('object/Y', '2014-05-07T19:47:53.592270', 'HASH', 2,
'2014-05-07T19:47:54.000Z'),
('object/Y/1', '2014-05-07T19:47:54.592270', 'HASH', 12,
'2014-05-07T19:47:55.000Z'),
('object/Y/2', '2014-05-07T19:47:55.592270', 'HASH', 22,
'2014-05-07T19:47:56.000Z'))
status, headers, body = \
self._test_bucket_multipart_uploads_GET(query, multiparts)
elem = fromstring(body, 'ListMultipartUploadsResult')
self.assertEqual(elem.find('KeyMarker').text, 'object')
self.assertEqual(elem.find('NextKeyMarker').text, 'object')
self.assertEqual(elem.find('NextUploadIdMarker').text, 'Y')
self.assertEqual(len(elem.findall('Upload')), 2)
objects = [(o[0], o[4]) for o in multiparts]
for u in elem.findall('Upload'):
name = u.find('Key').text + '/' + u.find('UploadId').text
initiated = u.find('Initiated').text
self.assertIn((name, initiated), objects)
self.assertEqual(status.split()[0], '200')
_, path, _ = self.swift.calls_with_headers[-1]
path, query_string = path.split('?', 1)
query = {}
for q in query_string.split('&'):
key, arg = q.split('=')
query[key] = arg
self.assertEqual(query['format'], 'json')
self.assertEqual(query['marker'], quote_plus('object/Y/2'))
@s3acl
def test_bucket_multipart_uploads_GET_with_prefix(self):
query = 'prefix=X'
multiparts = \
(('object/X', '2014-05-07T19:47:50.592270', 'HASH', 1,
'2014-05-07T19:47:51.000Z'),
('object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 11,
'2014-05-07T19:47:52.000Z'),
('object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 21,
'2014-05-07T19:47:53.000Z'))
status, headers, body = \
self._test_bucket_multipart_uploads_GET(query, multiparts)
elem = fromstring(body, 'ListMultipartUploadsResult')
self.assertEqual(len(elem.findall('Upload')), 1)
objects = [(o[0], o[4]) for o in multiparts]
for u in elem.findall('Upload'):
name = u.find('Key').text + '/' + u.find('UploadId').text
initiated = u.find('Initiated').text
self.assertTrue((name, initiated) in objects)
self.assertEqual(status.split()[0], '200')
_, path, _ = self.swift.calls_with_headers[-1]
path, query_string = path.split('?', 1)
query = {}
for q in query_string.split('&'):
key, arg = q.split('=')
query[key] = arg
self.assertEqual(query['format'], 'json')
self.assertEqual(query['prefix'], 'X')
@s3acl
def test_bucket_multipart_uploads_GET_with_delimiter(self):
query = 'delimiter=/'
multiparts = \
(('object/X', '2014-05-07T19:47:50.592270', 'HASH', 1,
'2014-05-07T19:47:51.000Z'),
('object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 11,
'2014-05-07T19:47:52.000Z'),
('object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 21,
'2014-05-07T19:47:53.000Z'),
('object/Y', '2014-05-07T19:47:50.592270', 'HASH', 2,
'2014-05-07T19:47:51.000Z'),
('object/Y/1', '2014-05-07T19:47:51.592270', 'HASH', 21,
'2014-05-07T19:47:52.000Z'),
('object/Y/2', '2014-05-07T19:47:52.592270', 'HASH', 22,
'2014-05-07T19:47:53.000Z'),
('object/Z', '2014-05-07T19:47:50.592270', 'HASH', 3,
'2014-05-07T19:47:51.000Z'),
('object/Z/1', '2014-05-07T19:47:51.592270', 'HASH', 31,
'2014-05-07T19:47:52.000Z'),
('object/Z/2', '2014-05-07T19:47:52.592270', 'HASH', 32,
'2014-05-07T19:47:53.000Z'),
('subdir/object/X', '2014-05-07T19:47:50.592270', 'HASH', 4,
'2014-05-07T19:47:51.000Z'),
('subdir/object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 41,
'2014-05-07T19:47:52.000Z'),
('subdir/object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 42,
'2014-05-07T19:47:53.000Z'),
('subdir/object/Y', '2014-05-07T19:47:50.592270', 'HASH', 5,
'2014-05-07T19:47:51.000Z'),
('subdir/object/Y/1', '2014-05-07T19:47:51.592270', 'HASH', 51,
'2014-05-07T19:47:52.000Z'),
('subdir/object/Y/2', '2014-05-07T19:47:52.592270', 'HASH', 52,
'2014-05-07T19:47:53.000Z'),
('subdir2/object/Z', '2014-05-07T19:47:50.592270', 'HASH', 6,
'2014-05-07T19:47:51.000Z'),
('subdir2/object/Z/1', '2014-05-07T19:47:51.592270', 'HASH', 61,
'2014-05-07T19:47:52.000Z'),
('subdir2/object/Z/2', '2014-05-07T19:47:52.592270', 'HASH', 62,
'2014-05-07T19:47:53.000Z'))
status, headers, body = \
self._test_bucket_multipart_uploads_GET(query, multiparts)
elem = fromstring(body, 'ListMultipartUploadsResult')
self.assertEqual(len(elem.findall('Upload')), 3)
self.assertEqual(len(elem.findall('CommonPrefixes')), 2)
objects = [(o[0], o[4]) for o in multiparts
if o[0].startswith('o')]
prefixes = set([o[0].split('/')[0] + '/' for o in multiparts
if o[0].startswith('s')])
for u in elem.findall('Upload'):
name = u.find('Key').text + '/' + u.find('UploadId').text
initiated = u.find('Initiated').text
self.assertIn((name, initiated), objects)
for p in elem.findall('CommonPrefixes'):
prefix = p.find('Prefix').text
self.assertTrue(prefix in prefixes)
self.assertEqual(status.split()[0], '200')
_, path, _ = self.swift.calls_with_headers[-1]
path, query_string = path.split('?', 1)
query = {}
for q in query_string.split('&'):
key, arg = q.split('=')
query[key] = arg
self.assertEqual(query['format'], 'json')
self.assertTrue(query.get('delimiter') is None)
@s3acl
def test_bucket_multipart_uploads_GET_with_multi_chars_delimiter(self):
query = 'delimiter=subdir'
multiparts = \
(('object/X', '2014-05-07T19:47:50.592270', 'HASH', 1,
'2014-05-07T19:47:51.000Z'),
('object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 11,
'2014-05-07T19:47:52.000Z'),
('object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 21,
'2014-05-07T19:47:53.000Z'),
('dir/subdir/object/X', '2014-05-07T19:47:50.592270',
'HASH', 3, '2014-05-07T19:47:51.000Z'),
('dir/subdir/object/X/1', '2014-05-07T19:47:51.592270',
'HASH', 31, '2014-05-07T19:47:52.000Z'),
('dir/subdir/object/X/2', '2014-05-07T19:47:52.592270',
'HASH', 32, '2014-05-07T19:47:53.000Z'),
('subdir/object/X', '2014-05-07T19:47:50.592270', 'HASH', 4,
'2014-05-07T19:47:51.000Z'),
('subdir/object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 41,
'2014-05-07T19:47:52.000Z'),
('subdir/object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 42,
'2014-05-07T19:47:53.000Z'),
('subdir/object/Y', '2014-05-07T19:47:50.592270', 'HASH', 5,
'2014-05-07T19:47:51.000Z'),
('subdir/object/Y/1', '2014-05-07T19:47:51.592270', 'HASH', 51,
'2014-05-07T19:47:52.000Z'),
('subdir/object/Y/2', '2014-05-07T19:47:52.592270', 'HASH', 52,
'2014-05-07T19:47:53.000Z'),
('subdir2/object/Z', '2014-05-07T19:47:50.592270', 'HASH', 6,
'2014-05-07T19:47:51.000Z'),
('subdir2/object/Z/1', '2014-05-07T19:47:51.592270', 'HASH', 61,
'2014-05-07T19:47:52.000Z'),
('subdir2/object/Z/2', '2014-05-07T19:47:52.592270', 'HASH', 62,
'2014-05-07T19:47:53.000Z'))
status, headers, body = \
self._test_bucket_multipart_uploads_GET(query, multiparts)
elem = fromstring(body, 'ListMultipartUploadsResult')
self.assertEqual(len(elem.findall('Upload')), 1)
self.assertEqual(len(elem.findall('CommonPrefixes')), 2)
objects = [(o[0], o[4]) for o in multiparts
if o[0].startswith('object')]
prefixes = ('dir/subdir', 'subdir')
for u in elem.findall('Upload'):
name = u.find('Key').text + '/' + u.find('UploadId').text
initiated = u.find('Initiated').text
self.assertTrue((name, initiated) in objects)
for p in elem.findall('CommonPrefixes'):
prefix = p.find('Prefix').text
self.assertTrue(prefix in prefixes)
self.assertEqual(status.split()[0], '200')
_, path, _ = self.swift.calls_with_headers[-1]
path, query_string = path.split('?', 1)
query = {}
for q in query_string.split('&'):
key, arg = q.split('=')
query[key] = arg
self.assertEqual(query['format'], 'json')
self.assertTrue(query.get('delimiter') is None)
@s3acl
def test_bucket_multipart_uploads_GET_with_prefix_and_delimiter(self):
query = 'prefix=dir/&delimiter=/'
multiparts = \
(('dir/subdir/object/X', '2014-05-07T19:47:50.592270',
'HASH', 4, '2014-05-07T19:47:51.000Z'),
('dir/subdir/object/X/1', '2014-05-07T19:47:51.592270',
'HASH', 41, '2014-05-07T19:47:52.000Z'),
('dir/subdir/object/X/2', '2014-05-07T19:47:52.592270',
'HASH', 42, '2014-05-07T19:47:53.000Z'),
('dir/object/X', '2014-05-07T19:47:50.592270', 'HASH', 5,
'2014-05-07T19:47:51.000Z'),
('dir/object/X/1', '2014-05-07T19:47:51.592270', 'HASH', 51,
'2014-05-07T19:47:52.000Z'),
('dir/object/X/2', '2014-05-07T19:47:52.592270', 'HASH', 52,
'2014-05-07T19:47:53.000Z'))
status, headers, body = \
self._test_bucket_multipart_uploads_GET(query, multiparts)
elem = fromstring(body, 'ListMultipartUploadsResult')
self.assertEqual(len(elem.findall('Upload')), 1)
self.assertEqual(len(elem.findall('CommonPrefixes')), 1)
objects = [(o[0], o[4]) for o in multiparts
if o[0].startswith('dir/o')]
prefixes = ['dir/subdir/']
for u in elem.findall('Upload'):
name = u.find('Key').text + '/' + u.find('UploadId').text
initiated = u.find('Initiated').text
self.assertIn((name, initiated), objects)
for p in elem.findall('CommonPrefixes'):
prefix = p.find('Prefix').text
self.assertTrue(prefix in prefixes)
self.assertEqual(status.split()[0], '200')
_, path, _ = self.swift.calls_with_headers[-1]
path, query_string = path.split('?', 1)
query = {}
for q in query_string.split('&'):
key, arg = q.split('=')
query[key] = arg
self.assertEqual(query['format'], 'json')
self.assertEqual(query['prefix'], quote_plus('dir/'))
self.assertTrue(query.get('delimiter') is None)
@patch('swift.common.middleware.s3api.controllers.'
'multi_upload.unique_id', lambda: 'X')
def _test_object_multipart_upload_initiate(self, headers, cache=None,
bucket_exists=True,
expected_policy=None,
expected_read_acl=None,
expected_write_acl=None):
headers.update({
'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'x-amz-meta-foo': 'bar',
'content-encoding': 'gzip',
})
req = Request.blank('/bucket/object?uploads',
environ={'REQUEST_METHOD': 'POST',
'swift.cache': cache},
headers=headers)
status, headers, body = self.call_s3api(req)
fromstring(body, 'InitiateMultipartUploadResult')
self.assertEqual(status.split()[0], '200')
_, _, req_headers = self.swift.calls_with_headers[-1]
self.assertEqual(req_headers.get('X-Object-Meta-Foo'), 'bar')
self.assertEqual(req_headers.get('Content-Encoding'), 'gzip')
self.assertNotIn('Etag', req_headers)
self.assertNotIn('Content-MD5', req_headers)
if bucket_exists:
self.assertEqual([
('PUT', '/v1/AUTH_test/bucket+segments/object/X'),
], self.swift.calls)
else:
self.assertEqual([
('PUT', '/v1/AUTH_test/bucket+segments'),
('PUT', '/v1/AUTH_test/bucket+segments/object/X'),
], self.swift.calls)
if expected_policy:
_, _, req_headers = self.swift.calls_with_headers[-2]
self.assertEqual(req_headers.get('X-Storage-Policy'),
expected_policy)
if expected_read_acl:
_, _, req_headers = self.swift.calls_with_headers[-2]
self.assertEqual(req_headers.get('X-Container-Read'),
expected_read_acl)
else:
self.assertNotIn('X-Container-Read', req_headers)
if expected_write_acl:
_, _, req_headers = self.swift.calls_with_headers[-2]
self.assertEqual(req_headers.get('X-Container-Write'),
expected_write_acl)
else:
self.assertNotIn('X-Container-Write', req_headers)
self.swift.clear_calls()
def test_object_multipart_upload_initiate_with_segment_bucket(self):
fake_memcache = FakeMemcache()
fake_memcache.store[get_cache_key(
'AUTH_test', 'bucket+segments')] = {'status': 204}
fake_memcache.store[get_cache_key(
'AUTH_test', 'bucket')] = {'status': 204}
self._test_object_multipart_upload_initiate({}, fake_memcache)
self._test_object_multipart_upload_initiate({'Etag': 'blahblahblah'},
fake_memcache)
self._test_object_multipart_upload_initiate({
'Content-MD5': base64.b64encode(b'blahblahblahblah').strip()},
fake_memcache)
def test_object_multipart_upload_initiate_without_segment_bucket(self):
self.swift.register('PUT', '/v1/AUTH_test/bucket+segments',
swob.HTTPCreated, {}, None)
fake_memcache = FakeMemcache()
fake_memcache.store[get_cache_key(
'AUTH_test', 'bucket')] = {'status': 204}
fake_memcache.store[get_cache_key(
'AUTH_test', 'bucket+segments')] = {'status': 404}
self._test_object_multipart_upload_initiate({}, fake_memcache,
bucket_exists=False)
self._test_object_multipart_upload_initiate({'Etag': 'blahblahblah'},
fake_memcache,
bucket_exists=False)
self._test_object_multipart_upload_initiate(
{'Content-MD5': base64.b64encode(b'blahblahblahblah').strip()},
fake_memcache,
bucket_exists=False)
@patch_policies([
StoragePolicy(0, 'gold', is_default=True),
StoragePolicy(1, 'silver')])
def test_object_mpu_initiate_without_segment_bucket_same_policy(self):
self.swift.register('PUT', '/v1/AUTH_test/bucket+segments',
swob.HTTPCreated,
{'X-Storage-Policy': 'silver'}, None)
fake_memcache = FakeMemcache()
fake_memcache.store[get_cache_key(
'AUTH_test', 'bucket')] = {'status': 204,
'storage_policy': '1'}
fake_memcache.store[get_cache_key(
'AUTH_test', 'bucket+segments')] = {'status': 404}
self.s3api.conf.derived_container_policy_use_default = False
self._test_object_multipart_upload_initiate({}, fake_memcache,
bucket_exists=False,
expected_policy='silver')
self._test_object_multipart_upload_initiate({'Etag': 'blahblahblah'},
fake_memcache,
bucket_exists=False,
expected_policy='silver')
self._test_object_multipart_upload_initiate(
{'Content-MD5': base64.b64encode(b'blahblahblahblah').strip()},
fake_memcache,
bucket_exists=False,
expected_policy='silver')
def test_object_mpu_initiate_without_segment_bucket_same_acls(self):
self.swift.register('PUT', '/v1/AUTH_test/bucket+segments',
swob.HTTPCreated, {}, None)
fake_memcache = FakeMemcache()
fake_memcache.store[get_cache_key(
'AUTH_test', 'bucket')] = {'status': 204,
'read_acl': 'alice,bob',
'write_acl': 'bob,charles'}
fake_memcache.store[get_cache_key(
'AUTH_test', 'bucket+segments')] = {'status': 404}
self.s3api.conf.derived_container_policy_use_default = False
self._test_object_multipart_upload_initiate(
{}, fake_memcache,
bucket_exists=False,
expected_read_acl='alice,bob', expected_write_acl='bob,charles')
self._test_object_multipart_upload_initiate(
{'Etag': 'blahblahblah'}, fake_memcache,
bucket_exists=False,
expected_read_acl='alice,bob', expected_write_acl='bob,charles')
self._test_object_multipart_upload_initiate(
{'Content-MD5': base64.b64encode(b'blahblahblahblah').strip()},
fake_memcache,
bucket_exists=False,
expected_read_acl='alice,bob', expected_write_acl='bob,charles')
def test_object_mpu_initiate_without_segment_bucket_make_public(self):
self.swift.register('PUT', '/v1/AUTH_test/bucket+segments',
swob.HTTPCreated, {}, None)
fake_memcache = FakeMemcache()
fake_memcache.store[get_cache_key(
'AUTH_test', 'bucket')] = {'status': 204,
'read_acl': '.r:*,.rlistings'}
fake_memcache.store[get_cache_key(
'AUTH_test', 'bucket+segments')] = {'status': 404}
self.s3api.conf.derived_container_policy_use_default = False
self._test_object_multipart_upload_initiate(
{}, fake_memcache,
bucket_exists=False,
expected_read_acl='.r:*,.rlistings')
self._test_object_multipart_upload_initiate(
{'Etag': 'blahblahblah'}, fake_memcache,
bucket_exists=False,
expected_read_acl='.r:*,.rlistings')
self._test_object_multipart_upload_initiate(
{'Content-MD5': base64.b64encode(b'blahblahblahblah').strip()},
fake_memcache,
bucket_exists=False,
expected_read_acl='.r:*,.rlistings')
@patch('swift.common.middleware.s3api.controllers.multi_upload.'
'unique_id', lambda: 'X')
def _test_object_multipart_upload_initiate_s3acl(
self, cache, existance_cached, should_head, should_put):
# mostly inlining stuff from @s3acl(s3_acl_only=True)
self.s3api.conf.s3_acl = True
self.swift.s3_acl = True
container_headers = encode_acl('container', ACL(
Owner('test:tester', 'test:tester'),
[Grant(User('test:tester'), 'FULL_CONTROL')]))
self.swift.register('HEAD', '/v1/AUTH_test/bucket',
swob.HTTPNoContent, container_headers, None)
cache.store[get_cache_key('AUTH_test')] = {'status': 204}
req = Request.blank('/bucket/object?uploads',
environ={'REQUEST_METHOD': 'POST',
'swift.cache': cache},
headers={'Authorization':
'AWS test:tester:hmac',
'Date': self.get_date_header(),
'x-amz-acl': 'public-read',
'x-amz-meta-foo': 'bar',
'Content-Type': 'cat/picture'})
status, headers, body = self.call_s3api(req)
fromstring(body, 'InitiateMultipartUploadResult')
self.assertEqual(status.split()[0], '200')
# This is the get_container_info existance check :'(
expected = []
if not existance_cached:
expected.append(('HEAD', '/v1/AUTH_test/bucket'))
if should_head:
expected.append(('HEAD', '/v1/AUTH_test/bucket+segments'))
# XXX: For some reason check ACLs always does second HEAD (???)
expected.append(('HEAD', '/v1/AUTH_test/bucket'))
if should_put:
expected.append(('PUT', '/v1/AUTH_test/bucket+segments'))
expected.append(('PUT', '/v1/AUTH_test/bucket+segments/object/X'))
self.assertEqual(expected, self.swift.calls)
_, _, req_headers = self.swift.calls_with_headers[-1]
self.assertEqual(req_headers.get('X-Object-Meta-Foo'), 'bar')
self.assertEqual(req_headers.get(
'X-Object-Sysmeta-S3api-Has-Content-Type'), 'yes')
self.assertEqual(req_headers.get(
'X-Object-Sysmeta-S3api-Content-Type'), 'cat/picture')
tmpacl_header = req_headers.get(sysmeta_header('object', 'tmpacl'))
self.assertTrue(tmpacl_header)
acl_header = encode_acl('object',
ACLPublicRead(Owner('test:tester',
'test:tester')))
self.assertEqual(acl_header.get(sysmeta_header('object', 'acl')),
tmpacl_header)
def test_object_multipart_upload_initiate_s3acl_with_segment_bucket(self):
self.swift.register('HEAD', '/v1/AUTH_test/bucket+segments',
swob.HTTPNoContent, {}, None)
kwargs = {
'existance_cached': False,
'should_head': True,
'should_put': False,
}
self._test_object_multipart_upload_initiate_s3acl(
FakeMemcache(), **kwargs)
def test_object_multipart_upload_initiate_s3acl_with_cached_seg_buck(self):
fake_memcache = FakeMemcache()
fake_memcache.store.update({
get_cache_key('AUTH_test', 'bucket'): {'status': 204},
get_cache_key('AUTH_test', 'bucket+segments'): {'status': 204},
})
kwargs = {
'existance_cached': True,
'should_head': False,
'should_put': False,
}
self._test_object_multipart_upload_initiate_s3acl(
fake_memcache, **kwargs)
def test_object_multipart_upload_initiate_s3acl_without_segment_bucket(
self):
fake_memcache = FakeMemcache()
fake_memcache.store.update({
get_cache_key('AUTH_test', 'bucket'): {'status': 204},
get_cache_key('AUTH_test', 'bucket+segments'): {'status': 404},
})
self.swift.register('PUT', '/v1/AUTH_test/bucket+segments',
swob.HTTPCreated, {}, None)
kwargs = {
'existance_cached': True,
'should_head': False,
'should_put': True,
}
self._test_object_multipart_upload_initiate_s3acl(
fake_memcache, **kwargs)
@s3acl(s3acl_only=True)
@patch('swift.common.middleware.s3api.controllers.'
'multi_upload.unique_id', lambda: 'X')
def test_object_multipart_upload_initiate_no_content_type(self):
req = Request.blank('/bucket/object?uploads',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization':
'AWS test:tester:hmac',
'Date': self.get_date_header(),
'x-amz-acl': 'public-read',
'x-amz-meta-foo': 'bar'})
status, headers, body = self.call_s3api(req)
fromstring(body, 'InitiateMultipartUploadResult')
self.assertEqual(status.split()[0], '200')
_, _, req_headers = self.swift.calls_with_headers[-1]
self.assertEqual(req_headers.get('X-Object-Meta-Foo'), 'bar')
self.assertEqual(req_headers.get(
'X-Object-Sysmeta-S3api-Has-Content-Type'), 'no')
tmpacl_header = req_headers.get(sysmeta_header('object', 'tmpacl'))
self.assertTrue(tmpacl_header)
acl_header = encode_acl('object',
ACLPublicRead(Owner('test:tester',
'test:tester')))
self.assertEqual(acl_header.get(sysmeta_header('object', 'acl')),
tmpacl_header)
@patch('swift.common.middleware.s3api.controllers.'
'multi_upload.unique_id', lambda: 'X')
def test_object_multipart_upload_initiate_without_bucket(self):
self.swift.register('HEAD', '/v1/AUTH_test/bucket',
swob.HTTPNotFound, {}, None)
req = Request.blank('/bucket/object?uploads',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization':
'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '404')
self.assertEqual(self._get_error_code(body), 'NoSuchBucket')
@s3acl
def test_object_multipart_upload_complete_error(self):
malformed_xml = 'malformed_XML'
req = Request.blank('/bucket/object?uploadId=X',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()},
body=malformed_xml)
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'MalformedXML')
# without target bucket
req = Request.blank('/nobucket/object?uploadId=X',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(), },
body=XML)
with patch(
'swift.common.middleware.s3api.s3request.get_container_info',
lambda env, app, swift_source: {'status': 404}):
self.swift.register('HEAD', '/v1/AUTH_test/nobucket',
swob.HTTPNotFound, {}, None)
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'NoSuchBucket')
def _do_test_object_multipart_upload_complete(self):
content_md5 = base64.b64encode(md5(
XML.encode('ascii'), usedforsecurity=False).digest())
req = Request.blank('/bucket/object?uploadId=X',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'Content-MD5': content_md5, },
body=XML)
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'CompleteMultipartUploadResult')
self.assertNotIn('Etag', headers)
self.assertEqual(elem.find('ETag').text, S3_ETAG)
self.assertEqual(status.split()[0], '200')
self.assertEqual(self.swift.calls, [
# Bucket exists
('HEAD', '/v1/AUTH_test'),
('HEAD', '/v1/AUTH_test/bucket'),
# Upload marker exists
('HEAD', '/v1/AUTH_test/bucket+segments/object/X'),
# Create the SLO
('PUT', '/v1/AUTH_test/bucket/object'
'?heartbeat=on&multipart-manifest=put'),
# Delete the in-progress-upload marker
('DELETE', '/v1/AUTH_test/bucket+segments/object/X')
])
self.assertEqual(req.environ['swift.backend_path'],
'/v1/AUTH_test/bucket+segments/object/X')
_, _, headers = self.swift.calls_with_headers[-2]
self.assertEqual(headers.get('X-Object-Meta-Foo'), 'bar')
self.assertEqual(headers.get('Content-Type'), 'baz/quux')
# SLO will provide a base value
override_etag = '; s3_etag=%s' % S3_ETAG.strip('"')
h = 'X-Object-Sysmeta-Container-Update-Override-Etag'
self.assertEqual(headers.get(h), override_etag)
self.assertEqual(headers.get('X-Object-Sysmeta-S3Api-Upload-Id'), 'X')
def test_object_multipart_upload_complete(self):
self._do_test_object_multipart_upload_complete()
def test_object_multipart_upload_complete_other_headers(self):
headers = {'x-object-meta-foo': 'bar',
'content-type': 'application/directory',
'x-object-sysmeta-s3api-has-content-type': 'yes',
'x-object-sysmeta-s3api-content-type': 'baz/quux',
'content-encoding': 'gzip',
'content-language': 'de-DE',
'content-disposition': 'attachment',
'expires': 'Fri, 25 Mar 2022 09:34:00 GMT',
'cache-control': 'no-cache'
}
self.swift.register('HEAD', self.segment_bucket + '/object/X',
swob.HTTPOk, headers, None)
self._do_test_object_multipart_upload_complete()
_, _, headers = self.swift.calls_with_headers[-2]
self.assertEqual('gzip', headers.get('Content-Encoding'))
self.assertEqual('de-DE', headers.get('Content-Language'))
self.assertEqual('attachment', headers.get('Content-Disposition'))
self.assertEqual('Fri, 25 Mar 2022 09:34:00 GMT',
headers.get('Expires'))
self.assertEqual('no-cache', headers.get('Cache-Control'))
def test_object_multipart_upload_complete_non_ascii(self):
wsgi_snowman = '\xe2\x98\x83'
self.swift.register(
'HEAD', '/v1/AUTH_test/bucket+segments/%s/X' % wsgi_snowman,
swob.HTTPOk, {}, None)
self.swift.register('PUT', '/v1/AUTH_test/bucket/%s' % wsgi_snowman,
swob.HTTPCreated, {}, None)
self.swift.register(
'DELETE', '/v1/AUTH_test/bucket+segments/%s/X' % wsgi_snowman,
swob.HTTPOk, {}, None)
content_md5 = base64.b64encode(md5(
XML.encode('ascii'), usedforsecurity=False).digest())
req = Request.blank('/bucket/%s?uploadId=X' % wsgi_snowman,
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'Content-MD5': content_md5, },
body=XML)
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'CompleteMultipartUploadResult')
self.assertNotIn('Etag', headers)
self.assertEqual(elem.find('ETag').text, S3_ETAG)
self.assertEqual(status.split()[0], '200')
self.assertEqual(self.swift.calls, [
# Bucket exists
('HEAD', '/v1/AUTH_test'),
('HEAD', '/v1/AUTH_test/bucket'),
# Upload marker exists
('HEAD', '/v1/AUTH_test/bucket+segments/%s/X' % wsgi_snowman),
# Create the SLO
('PUT', '/v1/AUTH_test/bucket/%s'
'?heartbeat=on&multipart-manifest=put' % wsgi_snowman),
# Delete the in-progress-upload marker
('DELETE', '/v1/AUTH_test/bucket+segments/%s/X' % wsgi_snowman)
])
self.assertEqual(json.loads(self.swift.req_bodies[-2]), [
{"path": u"/bucket+segments/\N{SNOWMAN}/X/1",
"etag": "0123456789abcdef0123456789abcdef"},
{"path": u"/bucket+segments/\N{SNOWMAN}/X/2",
"etag": "fedcba9876543210fedcba9876543210"},
])
def test_object_multipart_upload_retry_complete(self):
content_md5 = base64.b64encode(md5(
XML.encode('ascii'), usedforsecurity=False).digest())
self.swift.register('HEAD', '/v1/AUTH_test/bucket+segments/object/X',
swob.HTTPNotFound, {}, None)
recent_ts = S3Timestamp.now(delta=-1000000).internal # 10s ago
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPOk,
{'x-object-meta-foo': 'bar',
'content-type': 'baz/quux',
'x-object-sysmeta-s3api-upload-id': 'X',
'x-object-sysmeta-s3api-etag': S3_ETAG.strip('"'),
'x-timestamp': recent_ts}, None)
req = Request.blank('/bucket/object?uploadId=X',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'Content-MD5': content_md5, },
body=XML)
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'CompleteMultipartUploadResult')
self.assertNotIn('Etag', headers)
self.assertEqual(elem.find('ETag').text, S3_ETAG)
self.assertEqual(status.split()[0], '200')
self.assertEqual(self.swift.calls, [
# Bucket exists
('HEAD', '/v1/AUTH_test'),
('HEAD', '/v1/AUTH_test/bucket'),
# Upload marker does not exist
('HEAD', '/v1/AUTH_test/bucket+segments/object/X'),
# But the object does, and with the same upload ID
('HEAD', '/v1/AUTH_test/bucket/object'),
# So no PUT necessary
])
self.assertEqual(req.environ['swift.backend_path'],
'/v1/AUTH_test/bucket+segments/object/X')
def test_object_multipart_upload_retry_complete_etag_mismatch(self):
content_md5 = base64.b64encode(md5(
XML.encode('ascii'), usedforsecurity=False).digest())
self.swift.register('HEAD', '/v1/AUTH_test/bucket+segments/object/X',
swob.HTTPNotFound, {}, None)
recent_ts = S3Timestamp.now(delta=-1000000).internal
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPOk,
{'x-object-meta-foo': 'bar',
'content-type': 'baz/quux',
'x-object-sysmeta-s3api-upload-id': 'X',
'x-object-sysmeta-s3api-etag': 'not-the-etag',
'x-timestamp': recent_ts}, None)
req = Request.blank('/bucket/object?uploadId=X',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'Content-MD5': content_md5, },
body=XML)
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'CompleteMultipartUploadResult')
self.assertNotIn('Etag', headers)
self.assertEqual(elem.find('ETag').text, S3_ETAG)
self.assertEqual(status.split()[0], '200')
self.assertEqual(self.swift.calls, [
# Bucket exists
('HEAD', '/v1/AUTH_test'),
('HEAD', '/v1/AUTH_test/bucket'),
# Upload marker does not exist
('HEAD', '/v1/AUTH_test/bucket+segments/object/X'),
# But the object does, and with the same upload ID
('HEAD', '/v1/AUTH_test/bucket/object'),
# Create the SLO
('PUT', '/v1/AUTH_test/bucket/object'
'?heartbeat=on&multipart-manifest=put'),
# Retry deleting the marker for the sake of completeness
('DELETE', '/v1/AUTH_test/bucket+segments/object/X')
])
self.assertEqual(req.environ['swift.backend_path'],
'/v1/AUTH_test/bucket+segments/object/X')
_, _, headers = self.swift.calls_with_headers[-2]
self.assertEqual(headers.get('X-Object-Meta-Foo'), 'bar')
self.assertEqual(headers.get('Content-Type'), 'baz/quux')
# SLO will provide a base value
override_etag = '; s3_etag=%s' % S3_ETAG.strip('"')
h = 'X-Object-Sysmeta-Container-Update-Override-Etag'
self.assertEqual(headers.get(h), override_etag)
self.assertEqual(headers.get('X-Object-Sysmeta-S3Api-Upload-Id'), 'X')
def test_object_multipart_upload_retry_complete_upload_id_mismatch(self):
content_md5 = base64.b64encode(md5(
XML.encode('ascii'), usedforsecurity=False).digest())
self.swift.register('HEAD', '/v1/AUTH_test/bucket+segments/object/X',
swob.HTTPNotFound, {}, None)
recent_ts = S3Timestamp.now(delta=-1000000).internal
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPOk,
{'x-object-meta-foo': 'bar',
'content-type': 'baz/quux',
'x-object-sysmeta-s3api-upload-id': 'Y',
'x-object-sysmeta-s3api-etag': S3_ETAG.strip('"'),
'x-timestamp': recent_ts}, None)
req = Request.blank('/bucket/object?uploadId=X',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'Content-MD5': content_md5, },
body=XML)
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'Error')
self.assertEqual(elem.find('Code').text, 'NoSuchUpload')
self.assertEqual(status.split()[0], '404')
self.assertEqual(self.swift.calls, [
# Bucket exists
('HEAD', '/v1/AUTH_test'),
('HEAD', '/v1/AUTH_test/bucket'),
# Upload marker does not exist
('HEAD', '/v1/AUTH_test/bucket+segments/object/X'),
# But the object does, and with the same upload ID
('HEAD', '/v1/AUTH_test/bucket/object'),
])
self.assertEqual(req.environ['swift.backend_path'],
'/v1/AUTH_test/bucket+segments/object/X')
def test_object_multipart_upload_retry_complete_nothing_there(self):
content_md5 = base64.b64encode(md5(
XML.encode('ascii'), usedforsecurity=False).digest())
self.swift.register('HEAD', '/v1/AUTH_test/bucket+segments/object/X',
swob.HTTPNotFound, {}, None)
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPNotFound, {}, None)
req = Request.blank('/bucket/object?uploadId=X',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'Content-MD5': content_md5, },
body=XML)
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'Error')
self.assertEqual(elem.find('Code').text, 'NoSuchUpload')
self.assertEqual(status.split()[0], '404')
self.assertEqual(self.swift.calls, [
# Bucket exists
('HEAD', '/v1/AUTH_test'),
('HEAD', '/v1/AUTH_test/bucket'),
# Upload marker does not exist
('HEAD', '/v1/AUTH_test/bucket+segments/object/X'),
# Neither does the object
('HEAD', '/v1/AUTH_test/bucket/object'),
])
self.assertEqual(req.environ['swift.backend_path'],
'/v1/AUTH_test/bucket+segments/object/X')
def test_object_multipart_upload_invalid_md5(self):
bad_md5 = base64.b64encode(md5(
XML.encode('ascii') + b'some junk', usedforsecurity=False)
.digest())
req = Request.blank('/bucket/object?uploadId=X',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'Content-MD5': bad_md5, },
body=XML)
status, headers, body = self.call_s3api(req)
self.assertEqual('400 Bad Request', status)
self.assertEqual(self._get_error_code(body), 'BadDigest')
def test_object_multipart_upload_invalid_sha256(self):
bad_sha = hashlib.sha256(
XML.encode('ascii') + b'some junk').hexdigest()
authz_header = 'AWS4-HMAC-SHA256 ' + ', '.join([
'Credential=test:tester/%s/us-east-1/s3/aws4_request' %
self.get_v4_amz_date_header().split('T', 1)[0],
'SignedHeaders=host;x-amz-date',
'Signature=X',
])
req = Request.blank(
'/bucket/object?uploadId=X',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': authz_header,
'X-Amz-Date': self.get_v4_amz_date_header(),
'X-Amz-Content-SHA256': bad_sha, },
body=XML)
status, headers, body = self.call_s3api(req)
self.assertEqual('400 Bad Request', status)
self.assertEqual(self._get_error_code(body), 'BadDigest')
self.assertEqual('/v1/AUTH_test/bucket+segments/object/X',
req.environ.get('swift.backend_path'))
def test_object_multipart_upload_upper_sha256(self):
upper_sha = hashlib.sha256(
XML.encode('ascii')).hexdigest().upper()
authz_header = 'AWS4-HMAC-SHA256 ' + ', '.join([
'Credential=test:tester/%s/us-east-1/s3/aws4_request' %
self.get_v4_amz_date_header().split('T', 1)[0],
'SignedHeaders=host;x-amz-date',
'Signature=X',
])
req = Request.blank(
'/bucket/object?uploadId=X',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': authz_header,
'X-Amz-Date': self.get_v4_amz_date_header(),
'X-Amz-Content-SHA256': upper_sha, },
body=XML)
status, headers, body = self.call_s3api(req)
self.assertEqual('200 OK', status)
@patch('swift.common.middleware.s3api.controllers.multi_upload.time')
def test_object_multipart_upload_complete_with_heartbeat(self, mock_time):
self.swift.register(
'HEAD', '/v1/AUTH_test/bucket+segments/heartbeat-ok/X',
swob.HTTPOk, {}, None)
self.swift.register(
'GET', '/v1/AUTH_test/bucket+segments', swob.HTTPOk, {},
json.dumps([
{'name': item[0].replace('object', 'heartbeat-ok'),
'last_modified': item[1], 'hash': item[2], 'bytes': item[3]}
for item in OBJECTS_TEMPLATE
]))
self.swift.register(
'PUT', '/v1/AUTH_test/bucket/heartbeat-ok',
swob.HTTPAccepted, {}, [b' ', b' ', b' ', json.dumps({
'Etag': '"slo-etag"',
'Response Status': '201 Created',
'Errors': [],
}).encode('ascii')])
mock_time.time.side_effect = (
1, # start_time
12, # first whitespace
13, # second...
14, # third...
15, # JSON body
)
self.swift.register(
'DELETE', '/v1/AUTH_test/bucket+segments/heartbeat-ok/X',
swob.HTTPNoContent, {}, None)
req = Request.blank('/bucket/heartbeat-ok?uploadId=X',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(), },
body=XML)
status, headers, body = self.call_s3api(req)
lines = body.split(b'\n')
self.assertTrue(lines[0].startswith(b'<?xml '))
self.assertTrue(lines[1])
self.assertFalse(lines[1].strip())
fromstring(body, 'CompleteMultipartUploadResult')
self.assertEqual(status.split()[0], '200')
# NB: S3_ETAG includes quotes
self.assertIn(('<ETag>%s</ETag>' % S3_ETAG).encode('ascii'), body)
self.assertEqual(self.swift.calls, [
('HEAD', '/v1/AUTH_test'),
('HEAD', '/v1/AUTH_test/bucket'),
('HEAD', '/v1/AUTH_test/bucket+segments/heartbeat-ok/X'),
('PUT', '/v1/AUTH_test/bucket/heartbeat-ok?'
'heartbeat=on&multipart-manifest=put'),
('DELETE', '/v1/AUTH_test/bucket+segments/heartbeat-ok/X'),
])
@patch('swift.common.middleware.s3api.controllers.multi_upload.time')
def test_object_multipart_upload_complete_failure_with_heartbeat(
self, mock_time):
self.swift.register(
'HEAD', '/v1/AUTH_test/bucket+segments/heartbeat-fail/X',
swob.HTTPOk, {}, None)
self.swift.register(
'GET', '/v1/AUTH_test/bucket+segments', swob.HTTPOk, {},
json.dumps([
{'name': item[0].replace('object', 'heartbeat-fail'),
'last_modified': item[1], 'hash': item[2], 'bytes': item[3]}
for item in OBJECTS_TEMPLATE
]))
self.swift.register(
'PUT', '/v1/AUTH_test/bucket/heartbeat-fail',
swob.HTTPAccepted, {}, [b' ', b' ', b' ', json.dumps({
'Response Status': '400 Bad Request',
'Errors': [['some/object', '403 Forbidden']],
}).encode('ascii')])
mock_time.time.side_effect = (
1, # start_time
12, # first whitespace
13, # second...
14, # third...
15, # JSON body
)
req = Request.blank('/bucket/heartbeat-fail?uploadId=X',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(), },
body=XML)
status, headers, body = self.call_s3api(req)
lines = body.split(b'\n')
self.assertTrue(lines[0].startswith(b'<?xml '), (status, lines))
self.assertTrue(lines[1])
self.assertFalse(lines[1].strip())
fromstring(body, 'Error')
self.assertEqual(status.split()[0], '200')
self.assertEqual(self._get_error_code(body), 'InvalidRequest')
self.assertEqual(self._get_error_message(body),
'some/object: 403 Forbidden')
self.assertEqual(self.swift.calls, [
('HEAD', '/v1/AUTH_test'),
('HEAD', '/v1/AUTH_test/bucket'),
('HEAD', '/v1/AUTH_test/bucket+segments/heartbeat-fail/X'),
('PUT', '/v1/AUTH_test/bucket/heartbeat-fail?'
'heartbeat=on&multipart-manifest=put'),
])
@patch('swift.common.middleware.s3api.controllers.multi_upload.time')
def test_object_multipart_upload_missing_part_with_heartbeat(
self, mock_time):
self.swift.register(
'HEAD', '/v1/AUTH_test/bucket+segments/heartbeat-fail/X',
swob.HTTPOk, {}, None)
self.swift.register(
'GET', '/v1/AUTH_test/bucket+segments', swob.HTTPOk, {},
json.dumps([
{'name': item[0].replace('object', 'heartbeat-fail'),
'last_modified': item[1], 'hash': item[2], 'bytes': item[3]}
for item in OBJECTS_TEMPLATE
]))
self.swift.register(
'PUT', '/v1/AUTH_test/bucket/heartbeat-fail',
swob.HTTPAccepted, {}, [b' ', b' ', b' ', json.dumps({
'Response Status': '400 Bad Request',
'Errors': [['some/object', '404 Not Found']],
}).encode('ascii')])
mock_time.time.side_effect = (
1, # start_time
12, # first whitespace
13, # second...
14, # third...
15, # JSON body
)
req = Request.blank('/bucket/heartbeat-fail?uploadId=X',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(), },
body=XML)
status, headers, body = self.call_s3api(req)
lines = body.split(b'\n')
self.assertTrue(lines[0].startswith(b'<?xml '))
self.assertTrue(lines[1])
self.assertFalse(lines[1].strip())
fromstring(body, 'Error')
self.assertEqual(status.split()[0], '200')
self.assertEqual(self._get_error_code(body), 'InvalidPart')
self.assertIn('One or more of the specified parts could not be found',
self._get_error_message(body))
self.assertEqual(self.swift.calls, [
('HEAD', '/v1/AUTH_test'),
('HEAD', '/v1/AUTH_test/bucket'),
('HEAD', '/v1/AUTH_test/bucket+segments/heartbeat-fail/X'),
('PUT', '/v1/AUTH_test/bucket/heartbeat-fail?'
'heartbeat=on&multipart-manifest=put'),
])
def test_object_multipart_upload_complete_404_on_marker_delete(self):
segment_bucket = '/v1/AUTH_test/bucket+segments'
self.swift.register('DELETE', segment_bucket + '/object/X',
swob.HTTPNotFound, {}, None)
req = Request.blank('/bucket/object?uploadId=X',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(), },
body=XML)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
fromstring(body, 'CompleteMultipartUploadResult')
_, _, headers = self.swift.calls_with_headers[-2]
self.assertEqual(headers.get('X-Object-Meta-Foo'), 'bar')
self.assertEqual(headers.get('Content-Type'), 'baz/quux')
def test_object_multipart_upload_complete_old_content_type(self):
self.swift.register_unconditionally(
'HEAD', '/v1/AUTH_test/bucket+segments/object/X',
swob.HTTPOk, {"Content-Type": "thingy/dingy"}, None)
req = Request.blank('/bucket/object?uploadId=X',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(), },
body=XML)
status, headers, body = self.call_s3api(req)
fromstring(body, 'CompleteMultipartUploadResult')
self.assertEqual(status.split()[0], '200')
_, _, headers = self.swift.calls_with_headers[-2]
self.assertEqual(headers.get('Content-Type'), 'thingy/dingy')
def test_object_multipart_upload_complete_no_content_type(self):
self.swift.register_unconditionally(
'HEAD', '/v1/AUTH_test/bucket+segments/object/X',
swob.HTTPOk, {"X-Object-Sysmeta-S3api-Has-Content-Type": "no"},
None)
req = Request.blank('/bucket/object?uploadId=X',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(), },
body=XML)
status, headers, body = self.call_s3api(req)
fromstring(body, 'CompleteMultipartUploadResult')
self.assertEqual(status.split()[0], '200')
_, _, headers = self.swift.calls_with_headers[-2]
self.assertNotIn('Content-Type', headers)
def test_object_multipart_upload_complete_weird_host_name(self):
# This happens via boto signature v4
req = Request.blank('/bucket/object?uploadId=X',
environ={'REQUEST_METHOD': 'POST',
'HTTP_HOST': 'localhost:8080:8080'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(), },
body=XML)
status, headers, body = self.call_s3api(req)
fromstring(body, 'CompleteMultipartUploadResult')
self.assertEqual(status.split()[0], '200')
_, _, headers = self.swift.calls_with_headers[-2]
self.assertEqual(headers.get('X-Object-Meta-Foo'), 'bar')
def test_object_multipart_upload_complete_segment_too_small(self):
msg = ('some/path: s3api requires that each segment be at least '
'%d bytes') % self.s3api.conf.min_segment_size
self.swift.register('PUT', '/v1/AUTH_test/bucket/object',
swob.HTTPBadRequest, {}, msg)
req = Request.blank(
'/bucket/object?uploadId=X',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(), },
body=XML)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '400')
self.assertEqual(self._get_error_code(body), 'EntityTooSmall')
self.assertEqual(self._get_error_message(body), msg)
# We punt to SLO to do the validation
self.assertEqual(self.swift.calls, [
('HEAD', '/v1/AUTH_test'),
('HEAD', '/v1/AUTH_test/bucket'),
('HEAD', '/v1/AUTH_test/bucket+segments/object/X'),
('PUT', '/v1/AUTH_test/bucket/object'
'?heartbeat=on&multipart-manifest=put'),
])
self.swift.clear_calls()
self.s3api.conf.min_segment_size = 5242880
msg = ('some/path: s3api requires that each segment be at least '
'%d bytes') % self.s3api.conf.min_segment_size
self.swift.register('PUT', '/v1/AUTH_test/bucket/object',
swob.HTTPBadRequest, {}, msg)
req = Request.blank(
'/bucket/object?uploadId=X',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(), },
body=XML)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '400')
self.assertEqual(self._get_error_code(body), 'EntityTooSmall')
self.assertEqual(self._get_error_message(body), msg)
# Again, we punt to SLO to do the validation
self.assertEqual(self.swift.calls, [
('HEAD', '/v1/AUTH_test'),
('HEAD', '/v1/AUTH_test/bucket'),
('HEAD', '/v1/AUTH_test/bucket+segments/object/X'),
('PUT', '/v1/AUTH_test/bucket/object'
'?heartbeat=on&multipart-manifest=put'),
])
def test_object_multipart_upload_complete_zero_segments(self):
segment_bucket = '/v1/AUTH_test/empty-bucket+segments'
object_list = [{
'name': 'object/X/1',
'last_modified': self.last_modified,
'hash': 'd41d8cd98f00b204e9800998ecf8427e',
'bytes': '0',
}]
self.swift.register('GET', segment_bucket, swob.HTTPOk, {},
json.dumps(object_list))
self.swift.register('HEAD', '/v1/AUTH_test/empty-bucket',
swob.HTTPNoContent, {}, None)
self.swift.register('HEAD', segment_bucket + '/object/X',
swob.HTTPOk, {'x-object-meta-foo': 'bar',
'content-type': 'baz/quux'}, None)
self.swift.register('PUT', '/v1/AUTH_test/empty-bucket/object',
swob.HTTPCreated, {}, None)
self.swift.register('DELETE', segment_bucket + '/object/X/1',
swob.HTTPOk, {}, None)
self.swift.register('DELETE', segment_bucket + '/object/X',
swob.HTTPOk, {}, None)
xml = '<CompleteMultipartUpload></CompleteMultipartUpload>'
req = Request.blank('/empty-bucket/object?uploadId=X',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(), },
body=xml)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '400')
fromstring(body, 'Error')
self.assertEqual(self.swift.calls, [
('HEAD', '/v1/AUTH_test'),
('HEAD', '/v1/AUTH_test/empty-bucket'),
('HEAD', '/v1/AUTH_test/empty-bucket+segments/object/X'),
])
def test_object_multipart_upload_complete_single_zero_length_segment(self):
segment_bucket = '/v1/AUTH_test/empty-bucket+segments'
put_headers = {'etag': self.etag, 'last-modified': self.last_modified}
object_list = [{
'name': 'object/X/1',
'last_modified': self.last_modified,
'hash': 'd41d8cd98f00b204e9800998ecf8427e',
'bytes': '0',
}]
self.swift.register('GET', segment_bucket, swob.HTTPOk, {},
json.dumps(object_list))
self.swift.register('HEAD', '/v1/AUTH_test/empty-bucket',
swob.HTTPNoContent, {}, None)
self.swift.register('HEAD', segment_bucket + '/object/X',
swob.HTTPOk, {'x-object-meta-foo': 'bar',
'content-type': 'baz/quux'}, None)
self.swift.register('PUT', '/v1/AUTH_test/empty-bucket/object',
swob.HTTPCreated, {}, None)
self.swift.register('DELETE', segment_bucket + '/object/X/1',
swob.HTTPOk, {}, None)
self.swift.register('DELETE', segment_bucket + '/object/X',
swob.HTTPOk, {}, None)
xml = '<CompleteMultipartUpload>' \
'<Part>' \
'<PartNumber>1</PartNumber>' \
'<ETag>d41d8cd98f00b204e9800998ecf8427e</ETag>' \
'</Part>' \
'</CompleteMultipartUpload>'
req = Request.blank('/empty-bucket/object?uploadId=X',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(), },
body=xml)
status, headers, body = self.call_s3api(req)
fromstring(body, 'CompleteMultipartUploadResult')
self.assertEqual(status.split()[0], '200')
self.assertEqual(self.swift.calls, [
('HEAD', '/v1/AUTH_test'),
('HEAD', '/v1/AUTH_test/empty-bucket'),
('HEAD', '/v1/AUTH_test/empty-bucket+segments/object/X'),
('PUT', '/v1/AUTH_test/empty-bucket/object?'
'heartbeat=on&multipart-manifest=put'),
('DELETE', '/v1/AUTH_test/empty-bucket+segments/object/X'),
])
_, _, put_headers = self.swift.calls_with_headers[-2]
self.assertEqual(put_headers.get('X-Object-Meta-Foo'), 'bar')
self.assertEqual(put_headers.get('Content-Type'), 'baz/quux')
def test_object_multipart_upload_complete_zero_length_final_segment(self):
segment_bucket = '/v1/AUTH_test/bucket+segments'
object_list = [{
'name': 'object/X/1',
'last_modified': self.last_modified,
'hash': '0123456789abcdef0123456789abcdef',
'bytes': '100',
}, {
'name': 'object/X/2',
'last_modified': self.last_modified,
'hash': 'fedcba9876543210fedcba9876543210',
'bytes': '1',
}, {
'name': 'object/X/3',
'last_modified': self.last_modified,
'hash': 'd41d8cd98f00b204e9800998ecf8427e',
'bytes': '0',
}]
self.swift.register('GET', segment_bucket, swob.HTTPOk, {},
json.dumps(object_list))
self.swift.register('HEAD', '/v1/AUTH_test/bucket',
swob.HTTPNoContent, {}, None)
self.swift.register('HEAD', segment_bucket + '/object/X',
swob.HTTPOk, {'x-object-meta-foo': 'bar',
'content-type': 'baz/quux'}, None)
self.swift.register('DELETE', segment_bucket + '/object/X/3',
swob.HTTPNoContent, {}, None)
xml = '<CompleteMultipartUpload>' \
'<Part>' \
'<PartNumber>1</PartNumber>' \
'<ETag>0123456789abcdef0123456789abcdef</ETag>' \
'</Part>' \
'<Part>' \
'<PartNumber>2</PartNumber>' \
'<ETag>fedcba9876543210fedcba9876543210</ETag>' \
'</Part>' \
'<Part>' \
'<PartNumber>3</PartNumber>' \
'<ETag>d41d8cd98f00b204e9800998ecf8427e</ETag>' \
'</Part>' \
'</CompleteMultipartUpload>'
req = Request.blank('/bucket/object?uploadId=X',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(), },
body=xml)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'CompleteMultipartUploadResult')
self.assertNotIn('Etag', headers)
expected_etag = ('"%s-3"' % md5(binascii.unhexlify(''.join(
x['hash'] for x in object_list)), usedforsecurity=False)
.hexdigest())
self.assertEqual(elem.find('ETag').text, expected_etag)
self.assertEqual(self.swift.calls, [
('HEAD', '/v1/AUTH_test'),
('HEAD', '/v1/AUTH_test/bucket'),
('HEAD', '/v1/AUTH_test/bucket+segments/object/X'),
('PUT', '/v1/AUTH_test/bucket/object?'
'heartbeat=on&multipart-manifest=put'),
('DELETE', '/v1/AUTH_test/bucket+segments/object/X'),
])
_, _, headers = self.swift.calls_with_headers[-2]
# SLO will provide a base value
override_etag = '; s3_etag=%s' % expected_etag.strip('"')
h = 'X-Object-Sysmeta-Container-Update-Override-Etag'
self.assertEqual(headers.get(h), override_etag)
@s3acl(s3acl_only=True)
def test_object_multipart_upload_complete_s3acl(self):
acl_headers = encode_acl('object', ACLPublicRead(Owner('test:tester',
'test:tester')))
headers = {}
headers[sysmeta_header('object', 'tmpacl')] = \
acl_headers.get(sysmeta_header('object', 'acl'))
headers['X-Object-Meta-Foo'] = 'bar'
headers['Content-Type'] = 'baz/quux'
self.swift.register('HEAD', '/v1/AUTH_test/bucket+segments/object/X',
swob.HTTPOk, headers, None)
req = Request.blank('/bucket/object?uploadId=X',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()},
body=XML)
status, headers, body = self.call_s3api(req)
fromstring(body, 'CompleteMultipartUploadResult')
self.assertEqual(status.split()[0], '200')
_, _, headers = self.swift.calls_with_headers[-2]
self.assertEqual(headers.get('X-Object-Meta-Foo'), 'bar')
self.assertEqual(headers.get('Content-Type'), 'baz/quux')
self.assertEqual(
tostring(ACLPublicRead(Owner('test:tester',
'test:tester')).elem()),
tostring(decode_acl('object', headers, False).elem()))
@s3acl
def test_object_multipart_upload_abort_error(self):
req = Request.blank('/bucket/object?uploadId=invalid',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'NoSuchUpload')
# without target bucket
req = Request.blank('/nobucket/object?uploadId=X',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
with patch(
'swift.common.middleware.s3api.s3request.get_container_info',
lambda env, app, swift_source: {'status': 404}):
self.swift.register('HEAD', '/v1/AUTH_test/nobucket',
swob.HTTPNotFound, {}, None)
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'NoSuchBucket')
@s3acl
def test_object_multipart_upload_abort(self):
req = Request.blank('/bucket/object?uploadId=X',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '204')
@s3acl
@patch('swift.common.middleware.s3api.s3request.get_container_info',
lambda env, app, swift_source: {'status': 204})
def test_object_upload_part_error(self):
# without upload id
req = Request.blank('/bucket/object?partNumber=1',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()},
body='part object')
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
# invalid part number
req = Request.blank('/bucket/object?partNumber=invalid&uploadId=X',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()},
body='part object')
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
# part number must be > 0
req = Request.blank('/bucket/object?partNumber=0&uploadId=X',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()},
body='part object')
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
# part number must be < 10001
req = Request.blank('/bucket/object?partNumber=10001&uploadId=X',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()},
body='part object')
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
# without target bucket
req = Request.blank('/nobucket/object?partNumber=1&uploadId=X',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()},
body='part object')
with patch(
'swift.common.middleware.s3api.s3request.get_container_info',
lambda env, app, swift_source: {'status': 404}):
self.swift.register('HEAD', '/v1/AUTH_test/nobucket',
swob.HTTPNotFound, {}, None)
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'NoSuchBucket')
@s3acl
def test_object_upload_part(self):
req = Request.blank('/bucket/object?partNumber=1&uploadId=X',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()},
body='part object')
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
@s3acl
def test_object_list_parts_error(self):
req = Request.blank('/bucket/object?uploadId=invalid',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'NoSuchUpload')
# without target bucket
req = Request.blank('/nobucket/object?uploadId=X',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
with patch(
'swift.common.middleware.s3api.s3request.get_container_info',
lambda env, app, swift_source: {'status': 404}):
self.swift.register('HEAD', '/v1/AUTH_test/nobucket',
swob.HTTPNotFound, {}, None)
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'NoSuchBucket')
@s3acl
def test_object_list_parts(self):
swift_parts = [
{'name': 'object/X/%d' % i,
'last_modified': '2014-05-07T19:47:%02d.592270' % (i % 60),
'hash': hex(i),
'bytes': 100 * i}
for i in range(1, 2000)]
ceil_last_modified = ['2014-05-07T19:%02d:%02d.000Z'
% (47 if (i + 1) % 60 else 48, (i + 1) % 60)
for i in range(1, 2000)]
swift_sorted = sorted(swift_parts, key=lambda part: part['name'])
self.swift.register('GET',
"%s?delimiter=/&format=json&marker=&"
"prefix=object/X/" % self.segment_bucket,
swob.HTTPOk, {}, json.dumps(swift_sorted))
self.swift.register('GET',
"%s?delimiter=/&format=json&marker=object/X/999&"
"prefix=object/X/" % self.segment_bucket,
swob.HTTPOk, {}, json.dumps({}))
req = Request.blank('/bucket/object?uploadId=X',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'ListPartsResult')
self.assertEqual(elem.find('Bucket').text, 'bucket')
self.assertEqual(elem.find('Key').text, 'object')
self.assertEqual(elem.find('UploadId').text, 'X')
self.assertEqual(elem.find('Initiator/ID').text, 'test:tester')
self.assertEqual(elem.find('Initiator/ID').text, 'test:tester')
self.assertEqual(elem.find('Owner/ID').text, 'test:tester')
self.assertEqual(elem.find('Owner/ID').text, 'test:tester')
self.assertEqual(elem.find('StorageClass').text, 'STANDARD')
self.assertEqual(elem.find('PartNumberMarker').text, '0')
self.assertEqual(elem.find('NextPartNumberMarker').text, '1000')
self.assertEqual(elem.find('MaxParts').text, '1000')
self.assertEqual(elem.find('IsTruncated').text, 'true')
self.assertEqual(len(elem.findall('Part')), 1000)
s3_parts = []
for p in elem.findall('Part'):
partnum = int(p.find('PartNumber').text)
s3_parts.append(partnum)
self.assertEqual(
p.find('LastModified').text,
ceil_last_modified[partnum - 1])
self.assertEqual(p.find('ETag').text.strip(),
'"%s"' % swift_parts[partnum - 1]['hash'])
self.assertEqual(p.find('Size').text,
str(swift_parts[partnum - 1]['bytes']))
self.assertEqual(status.split()[0], '200')
self.assertEqual(s3_parts, list(range(1, 1001)))
def test_object_list_parts_encoding_type(self):
self.swift.register('HEAD', '/v1/AUTH_test/bucket+segments/object@@/X',
swob.HTTPOk, {}, None)
self.swift.register('GET', "%s?delimiter=/&format=json&"
"marker=object/X/2&prefix=object@@/X/"
% self.segment_bucket, swob.HTTPOk, {},
json.dumps({}))
req = Request.blank('/bucket/object@@?uploadId=X&encoding-type=url',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'ListPartsResult')
self.assertEqual(elem.find('Key').text, quote('object@@'))
self.assertEqual(elem.find('EncodingType').text, 'url')
self.assertEqual(status.split()[0], '200')
def test_object_list_parts_without_encoding_type(self):
self.swift.register('HEAD', '/v1/AUTH_test/bucket+segments/object@@/X',
swob.HTTPOk, {}, None)
self.swift.register('GET', "%s?delimiter=/&format=json&"
"marker=object/X/2&prefix=object@@/X/"
% self.segment_bucket, swob.HTTPOk, {},
json.dumps({}))
req = Request.blank('/bucket/object@@?uploadId=X',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'ListPartsResult')
self.assertEqual(elem.find('Key').text, 'object@@')
self.assertEqual(status.split()[0], '200')
def test_object_list_parts_encoding_type_error(self):
req = Request.blank('/bucket/object?uploadId=X&encoding-type=xml',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
def test_object_list_parts_max_parts(self):
req = Request.blank('/bucket/object?uploadId=X&max-parts=1',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'ListPartsResult')
self.assertEqual(elem.find('IsTruncated').text, 'true')
self.assertEqual(len(elem.findall('Part')), 1)
self.assertEqual(status.split()[0], '200')
def test_object_list_parts_str_max_parts(self):
req = Request.blank('/bucket/object?uploadId=X&max-parts=invalid',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
def test_object_list_parts_negative_max_parts(self):
req = Request.blank('/bucket/object?uploadId=X&max-parts=-1',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
def test_object_list_parts_over_max_parts(self):
req = Request.blank('/bucket/object?uploadId=X&max-parts=%d' %
(self.s3api.conf.max_parts_listing + 1),
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'ListPartsResult')
self.assertEqual(elem.find('Bucket').text, 'bucket')
self.assertEqual(elem.find('Key').text, 'object')
self.assertEqual(elem.find('UploadId').text, 'X')
self.assertEqual(elem.find('Initiator/ID').text, 'test:tester')
self.assertEqual(elem.find('Owner/ID').text, 'test:tester')
self.assertEqual(elem.find('StorageClass').text, 'STANDARD')
self.assertEqual(elem.find('PartNumberMarker').text, '0')
self.assertEqual(elem.find('NextPartNumberMarker').text, '2')
self.assertEqual(elem.find('MaxParts').text, '1000')
self.assertEqual(elem.find('IsTruncated').text, 'false')
self.assertEqual(len(elem.findall('Part')), 2)
for p in elem.findall('Part'):
partnum = int(p.find('PartNumber').text)
self.assertEqual(p.find('LastModified').text,
OBJECTS_TEMPLATE[partnum - 1][4])
self.assertEqual(p.find('ETag').text,
'"%s"' % OBJECTS_TEMPLATE[partnum - 1][2])
self.assertEqual(p.find('Size').text,
str(OBJECTS_TEMPLATE[partnum - 1][3]))
self.assertEqual(status.split()[0], '200')
def test_object_list_parts_over_max_32bit_int(self):
req = Request.blank('/bucket/object?uploadId=X&max-parts=%d' %
(MAX_32BIT_INT + 1),
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
def test_object_list_parts_with_part_number_marker(self):
req = Request.blank('/bucket/object?uploadId=X&'
'part-number-marker=1',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'ListPartsResult')
self.assertEqual(len(elem.findall('Part')), 1)
self.assertEqual(elem.find('Part/PartNumber').text, '2')
self.assertEqual(elem.find('PartNumberMarker').text, '1')
self.assertEqual(status.split()[0], '200')
def test_object_list_parts_str_part_number_marker(self):
req = Request.blank('/bucket/object?uploadId=X&part-number-marker='
'invalid',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
def test_object_list_parts_negative_part_number_marker(self):
req = Request.blank('/bucket/object?uploadId=X&part-number-marker='
'-1',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
def test_object_list_parts_over_part_number_marker(self):
part_number_marker = str(self.s3api.conf.max_upload_part_num + 1)
req = Request.blank('/bucket/object?uploadId=X&'
'part-number-marker=%s' % part_number_marker,
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'ListPartsResult')
self.assertEqual(len(elem.findall('Part')), 0)
self.assertEqual(elem.find('PartNumberMarker').text,
part_number_marker)
self.assertEqual(status.split()[0], '200')
def test_object_list_parts_over_max_32bit_int_part_number_marker(self):
req = Request.blank('/bucket/object?uploadId=X&part-number-marker='
'%s' % ((MAX_32BIT_INT + 1)),
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
def test_object_list_parts_same_max_marts_as_objects_num(self):
req = Request.blank('/bucket/object?uploadId=X&max-parts=2',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'ListPartsResult')
self.assertEqual(len(elem.findall('Part')), 2)
self.assertEqual(status.split()[0], '200')
def _test_for_s3acl(self, method, query, account, hasObj=True, body=None):
path = '/bucket%s' % ('/object' + query if hasObj else query)
req = Request.blank(path,
environ={'REQUEST_METHOD': method},
headers={'Authorization': 'AWS %s:hmac' % account,
'Date': self.get_date_header()},
body=body)
return self.call_s3api(req)
@s3acl(s3acl_only=True)
def test_upload_part_acl_without_permission(self):
status, headers, body = \
self._test_for_s3acl('PUT', '?partNumber=1&uploadId=X',
'test:other')
self.assertEqual(status.split()[0], '403')
@s3acl(s3acl_only=True)
def test_upload_part_acl_with_write_permission(self):
status, headers, body = \
self._test_for_s3acl('PUT', '?partNumber=1&uploadId=X',
'test:write')
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_upload_part_acl_with_fullcontrol_permission(self):
status, headers, body = \
self._test_for_s3acl('PUT', '?partNumber=1&uploadId=X',
'test:full_control')
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_list_multipart_uploads_acl_without_permission(self):
status, headers, body = \
self._test_for_s3acl('GET', '?uploads', 'test:other',
hasObj=False)
self.assertEqual(status.split()[0], '403')
@s3acl(s3acl_only=True)
def test_list_multipart_uploads_acl_with_read_permission(self):
status, headers, body = \
self._test_for_s3acl('GET', '?uploads', 'test:read',
hasObj=False)
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_list_multipart_uploads_acl_with_fullcontrol_permission(self):
status, headers, body = \
self._test_for_s3acl('GET', '?uploads', 'test:full_control',
hasObj=False)
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
@patch('swift.common.middleware.s3api.controllers.'
'multi_upload.unique_id', lambda: 'X')
def test_initiate_multipart_upload_acl_without_permission(self):
status, headers, body = \
self._test_for_s3acl('POST', '?uploads', 'test:other')
self.assertEqual(status.split()[0], '403')
@s3acl(s3acl_only=True)
@patch('swift.common.middleware.s3api.controllers.'
'multi_upload.unique_id', lambda: 'X')
def test_initiate_multipart_upload_acl_with_write_permission(self):
status, headers, body = \
self._test_for_s3acl('POST', '?uploads', 'test:write')
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
@patch('swift.common.middleware.s3api.controllers.'
'multi_upload.unique_id', lambda: 'X')
def test_initiate_multipart_upload_acl_with_fullcontrol_permission(self):
status, headers, body = \
self._test_for_s3acl('POST', '?uploads', 'test:full_control')
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_list_parts_acl_without_permission(self):
status, headers, body = \
self._test_for_s3acl('GET', '?uploadId=X', 'test:other')
self.assertEqual(status.split()[0], '403')
@s3acl(s3acl_only=True)
def test_list_parts_acl_with_read_permission(self):
status, headers, body = \
self._test_for_s3acl('GET', '?uploadId=X', 'test:read')
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_list_parts_acl_with_fullcontrol_permission(self):
status, headers, body = \
self._test_for_s3acl('GET', '?uploadId=X', 'test:full_control')
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_abort_multipart_upload_acl_without_permission(self):
status, headers, body = \
self._test_for_s3acl('DELETE', '?uploadId=X', 'test:other')
self.assertEqual(status.split()[0], '403')
@s3acl(s3acl_only=True)
def test_abort_multipart_upload_acl_with_write_permission(self):
status, headers, body = \
self._test_for_s3acl('DELETE', '?uploadId=X', 'test:write')
self.assertEqual(status.split()[0], '204')
@s3acl(s3acl_only=True)
def test_abort_multipart_upload_acl_with_fullcontrol_permission(self):
status, headers, body = \
self._test_for_s3acl('DELETE', '?uploadId=X', 'test:full_control')
self.assertEqual(status.split()[0], '204')
self.assertEqual([
path for method, path in self.swift.calls if method == 'DELETE'
], [
'/v1/AUTH_test/bucket+segments/object/X',
'/v1/AUTH_test/bucket+segments/object/X/1',
'/v1/AUTH_test/bucket+segments/object/X/2',
])
@s3acl(s3acl_only=True)
def test_complete_multipart_upload_acl_without_permission(self):
status, headers, body = \
self._test_for_s3acl('POST', '?uploadId=X', 'test:other',
body=XML)
self.assertEqual(status.split()[0], '403')
@s3acl(s3acl_only=True)
def test_complete_multipart_upload_acl_with_write_permission(self):
status, headers, body = \
self._test_for_s3acl('POST', '?uploadId=X', 'test:write',
body=XML)
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_complete_multipart_upload_acl_with_fullcontrol_permission(self):
status, headers, body = \
self._test_for_s3acl('POST', '?uploadId=X', 'test:full_control',
body=XML)
self.assertEqual(status.split()[0], '200')
def _test_copy_for_s3acl(self, account, src_permission=None,
src_path='/src_bucket/src_obj', src_headers=None,
head_resp=swob.HTTPOk, put_header=None,
timestamp=None):
owner = 'test:tester'
grants = [Grant(User(account), src_permission)] \
if src_permission else [Grant(User(owner), 'FULL_CONTROL')]
src_o_headers = encode_acl('object', ACL(Owner(owner, owner), grants))
src_o_headers.update({'last-modified': self.last_modified})
src_o_headers.update(src_headers or {})
self.swift.register('HEAD', '/v1/AUTH_test/%s' % src_path.lstrip('/'),
head_resp, src_o_headers, None)
put_header = put_header or {}
put_headers = {'Authorization': 'AWS %s:hmac' % account,
'Date': self.get_date_header(),
'X-Amz-Copy-Source': src_path}
put_headers.update(put_header)
req = Request.blank(
'/bucket/object?partNumber=1&uploadId=X',
environ={'REQUEST_METHOD': 'PUT'},
headers=put_headers)
timestamp = timestamp or time.time()
with patch('swift.common.middleware.s3api.utils.time.time',
return_value=timestamp):
return self.call_s3api(req)
@s3acl
def test_upload_part_copy(self):
date_header = self.get_date_header()
timestamp = mktime(date_header)
last_modified = S3Timestamp(timestamp).s3xmlformat
status, headers, body = self._test_copy_for_s3acl(
'test:tester', put_header={'Date': date_header},
timestamp=timestamp)
self.assertEqual(status.split()[0], '200')
self.assertEqual(headers['Content-Type'], 'application/xml')
self.assertTrue(headers.get('etag') is None)
elem = fromstring(body, 'CopyPartResult')
self.assertEqual(elem.find('LastModified').text, last_modified)
self.assertEqual(elem.find('ETag').text, '"%s"' % self.etag)
_, _, headers = self.swift.calls_with_headers[-1]
self.assertEqual(headers['X-Copy-From'], '/src_bucket/src_obj')
self.assertEqual(headers['Content-Length'], '0')
# Some headers *need* to get cleared in case we're copying from
# another multipart upload
for header in (
'X-Object-Sysmeta-S3api-Etag',
'X-Object-Sysmeta-Slo-Etag',
'X-Object-Sysmeta-Slo-Size',
'X-Object-Sysmeta-Container-Update-Override-Etag',
'X-Object-Sysmeta-Swift3-Etag',
):
self.assertEqual(headers[header], '')
@s3acl(s3acl_only=True)
def test_upload_part_copy_acl_with_owner_permission(self):
status, headers, body = \
self._test_copy_for_s3acl('test:tester')
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_upload_part_copy_acl_without_permission(self):
status, headers, body = \
self._test_copy_for_s3acl('test:other', 'READ')
self.assertEqual(status.split()[0], '403')
@s3acl(s3acl_only=True)
def test_upload_part_copy_acl_with_write_permission(self):
status, headers, body = \
self._test_copy_for_s3acl('test:write', 'READ')
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_upload_part_copy_acl_with_fullcontrol_permission(self):
status, headers, body = \
self._test_copy_for_s3acl('test:full_control', 'READ')
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_upload_part_copy_acl_without_src_permission(self):
status, headers, body = \
self._test_copy_for_s3acl('test:write', 'WRITE')
self.assertEqual(status.split()[0], '403')
@s3acl(s3acl_only=True)
def test_upload_part_copy_acl_invalid_source(self):
status, headers, body = \
self._test_copy_for_s3acl('test:write', 'WRITE', '')
self.assertEqual(status.split()[0], '400')
status, headers, body = \
self._test_copy_for_s3acl('test:write', 'WRITE', '/')
self.assertEqual(status.split()[0], '400')
status, headers, body = \
self._test_copy_for_s3acl('test:write', 'WRITE', '/bucket')
self.assertEqual(status.split()[0], '400')
status, headers, body = \
self._test_copy_for_s3acl('test:write', 'WRITE', '/bucket/')
self.assertEqual(status.split()[0], '400')
@s3acl
def test_upload_part_copy_headers_error(self):
account = 'test:tester'
etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
last_modified_since = 'Fri, 01 Apr 2014 12:00:00 GMT'
header = {'X-Amz-Copy-Source-If-Match': etag}
status, header, body = \
self._test_copy_for_s3acl(account,
head_resp=swob.HTTPPreconditionFailed,
put_header=header)
self.assertEqual(self._get_error_code(body), 'PreconditionFailed')
header = {'X-Amz-Copy-Source-If-None-Match': etag}
status, header, body = \
self._test_copy_for_s3acl(account,
head_resp=swob.HTTPNotModified,
put_header=header)
self.assertEqual(self._get_error_code(body), 'PreconditionFailed')
header = {'X-Amz-Copy-Source-If-Modified-Since': last_modified_since}
status, header, body = \
self._test_copy_for_s3acl(account,
head_resp=swob.HTTPNotModified,
put_header=header)
self.assertEqual(self._get_error_code(body), 'PreconditionFailed')
header = \
{'X-Amz-Copy-Source-If-Unmodified-Since': last_modified_since}
status, header, body = \
self._test_copy_for_s3acl(account,
head_resp=swob.HTTPPreconditionFailed,
put_header=header)
self.assertEqual(self._get_error_code(body), 'PreconditionFailed')
def test_upload_part_copy_headers_with_match(self):
account = 'test:tester'
etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
last_modified_since = 'Fri, 01 Apr 2014 11:00:00 GMT'
header = {'X-Amz-Copy-Source-If-Match': etag,
'X-Amz-Copy-Source-If-Modified-Since': last_modified_since}
status, header, body = \
self._test_copy_for_s3acl(account, put_header=header)
self.assertEqual(status.split()[0], '200')
self.assertEqual(self.swift.calls, [
('HEAD', '/v1/AUTH_test'),
('HEAD', '/v1/AUTH_test/bucket'),
('HEAD', '/v1/AUTH_test/bucket+segments/object/X'),
('HEAD', '/v1/AUTH_test/src_bucket/src_obj'),
('PUT', '/v1/AUTH_test/bucket+segments/object/X/1'),
])
_, _, headers = self.swift.calls_with_headers[-2]
self.assertEqual(headers['If-Match'], etag)
self.assertEqual(headers['If-Modified-Since'], last_modified_since)
_, _, headers = self.swift.calls_with_headers[-1]
self.assertTrue(headers.get('If-Match') is None)
self.assertTrue(headers.get('If-Modified-Since') is None)
_, _, headers = self.swift.calls_with_headers[0]
self.assertTrue(headers.get('If-Match') is None)
self.assertTrue(headers.get('If-Modified-Since') is None)
@s3acl(s3acl_only=True)
def test_upload_part_copy_headers_with_match_and_s3acl(self):
account = 'test:tester'
etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
last_modified_since = 'Fri, 01 Apr 2014 11:00:00 GMT'
header = {'X-Amz-Copy-Source-If-Match': etag,
'X-Amz-Copy-Source-If-Modified-Since': last_modified_since}
status, header, body = \
self._test_copy_for_s3acl(account, put_header=header)
self.assertEqual(status.split()[0], '200')
self.assertEqual(len(self.swift.calls_with_headers), 4)
# Before the check of the copy source in the case of s3acl is valid,
# s3api check the bucket write permissions and the object existence
# of the destination.
_, _, headers = self.swift.calls_with_headers[-3]
self.assertTrue(headers.get('If-Match') is None)
self.assertTrue(headers.get('If-Modified-Since') is None)
_, _, headers = self.swift.calls_with_headers[-2]
self.assertEqual(headers['If-Match'], etag)
self.assertEqual(headers['If-Modified-Since'], last_modified_since)
_, _, headers = self.swift.calls_with_headers[-1]
self.assertTrue(headers.get('If-Match') is None)
self.assertTrue(headers.get('If-Modified-Since') is None)
_, _, headers = self.swift.calls_with_headers[0]
self.assertTrue(headers.get('If-Match') is None)
self.assertTrue(headers.get('If-Modified-Since') is None)
def test_upload_part_copy_headers_with_not_match(self):
account = 'test:tester'
etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
last_modified_since = 'Fri, 01 Apr 2014 12:00:00 GMT'
header = {'X-Amz-Copy-Source-If-None-Match': etag,
'X-Amz-Copy-Source-If-Unmodified-Since': last_modified_since}
status, header, body = \
self._test_copy_for_s3acl(account, put_header=header)
self.assertEqual(status.split()[0], '200')
self.assertEqual(self.swift.calls, [
('HEAD', '/v1/AUTH_test'),
('HEAD', '/v1/AUTH_test/bucket'),
('HEAD', '/v1/AUTH_test/bucket+segments/object/X'),
('HEAD', '/v1/AUTH_test/src_bucket/src_obj'),
('PUT', '/v1/AUTH_test/bucket+segments/object/X/1'),
])
_, _, headers = self.swift.calls_with_headers[-2]
self.assertEqual(headers['If-None-Match'], etag)
self.assertEqual(headers['If-Unmodified-Since'], last_modified_since)
_, _, headers = self.swift.calls_with_headers[-1]
self.assertTrue(headers.get('If-None-Match') is None)
self.assertTrue(headers.get('If-Unmodified-Since') is None)
_, _, headers = self.swift.calls_with_headers[0]
self.assertTrue(headers.get('If-None-Match') is None)
self.assertTrue(headers.get('If-Unmodified-Since') is None)
@s3acl(s3acl_only=True)
def test_upload_part_copy_headers_with_not_match_and_s3acl(self):
account = 'test:tester'
etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
last_modified_since = 'Fri, 01 Apr 2014 12:00:00 GMT'
header = {'X-Amz-Copy-Source-If-None-Match': etag,
'X-Amz-Copy-Source-If-Unmodified-Since': last_modified_since}
status, header, body = \
self._test_copy_for_s3acl(account, put_header=header)
self.assertEqual(status.split()[0], '200')
self.assertEqual(len(self.swift.calls_with_headers), 4)
# Before the check of the copy source in the case of s3acl is valid,
# s3api check the bucket write permissions and the object existence
# of the destination.
_, _, headers = self.swift.calls_with_headers[-3]
self.assertTrue(headers.get('If-Match') is None)
self.assertTrue(headers.get('If-Modified-Since') is None)
_, _, headers = self.swift.calls_with_headers[-2]
self.assertEqual(headers['If-None-Match'], etag)
self.assertEqual(headers['If-Unmodified-Since'], last_modified_since)
self.assertTrue(headers.get('If-Match') is None)
self.assertTrue(headers.get('If-Modified-Since') is None)
_, _, headers = self.swift.calls_with_headers[-1]
self.assertTrue(headers.get('If-None-Match') is None)
self.assertTrue(headers.get('If-Unmodified-Since') is None)
_, _, headers = self.swift.calls_with_headers[0]
def test_upload_part_copy_range_unsatisfiable(self):
account = 'test:tester'
header = {'X-Amz-Copy-Source-Range': 'bytes=1000-'}
status, header, body = self._test_copy_for_s3acl(
account, src_headers={'Content-Length': '10'}, put_header=header)
self.assertEqual(status.split()[0], '400')
self.assertIn(b'Range specified is not valid for '
b'source object of size: 10', body)
self.assertEqual([
('HEAD', '/v1/AUTH_test'),
('HEAD', '/v1/AUTH_test/bucket'),
('HEAD', '/v1/AUTH_test/bucket+segments/object/X'),
('HEAD', '/v1/AUTH_test/src_bucket/src_obj'),
], self.swift.calls)
def test_upload_part_copy_range_invalid(self):
account = 'test:tester'
header = {'X-Amz-Copy-Source-Range': '0-9'}
status, header, body = \
self._test_copy_for_s3acl(account, put_header=header)
self.assertEqual(status.split()[0], '400', body)
header = {'X-Amz-Copy-Source-Range': 'asdf'}
status, header, body = \
self._test_copy_for_s3acl(account, put_header=header)
self.assertEqual(status.split()[0], '400', body)
def test_upload_part_copy_range(self):
account = 'test:tester'
header = {'X-Amz-Copy-Source-Range': 'bytes=0-9'}
status, header, body = self._test_copy_for_s3acl(
account, src_headers={'Content-Length': '20'}, put_header=header)
self.assertEqual(status.split()[0], '200', body)
self.assertEqual([
('HEAD', '/v1/AUTH_test'),
('HEAD', '/v1/AUTH_test/bucket'),
('HEAD', '/v1/AUTH_test/bucket+segments/object/X'),
('HEAD', '/v1/AUTH_test/src_bucket/src_obj'),
('PUT', '/v1/AUTH_test/bucket+segments/object/X/1'),
], self.swift.calls)
put_headers = self.swift.calls_with_headers[-1][2]
self.assertEqual('bytes=0-9', put_headers['Range'])
self.assertEqual('/src_bucket/src_obj', put_headers['X-Copy-From'])
def _test_no_body(self, use_content_length=False,
use_transfer_encoding=False, string_to_md5=b''):
raw_md5 = md5(string_to_md5, usedforsecurity=False).digest()
content_md5 = base64.b64encode(raw_md5).strip()
with UnreadableInput(self) as fake_input:
req = Request.blank(
'/bucket/object?uploadId=X',
environ={
'REQUEST_METHOD': 'POST',
'wsgi.input': fake_input},
headers={
'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'Content-MD5': content_md5},
body='')
if not use_content_length:
req.environ.pop('CONTENT_LENGTH')
if use_transfer_encoding:
req.environ['HTTP_TRANSFER_ENCODING'] = 'chunked'
status, headers, body = self.call_s3api(req)
self.assertEqual(status, '400 Bad Request')
self.assertEqual(self._get_error_code(body), 'InvalidRequest')
self.assertEqual(self._get_error_message(body),
'You must specify at least one part')
@s3acl
def test_object_multi_upload_empty_body(self):
self._test_no_body()
self._test_no_body(string_to_md5=b'test')
self._test_no_body(use_content_length=True)
self._test_no_body(use_content_length=True, string_to_md5=b'test')
self._test_no_body(use_transfer_encoding=True)
self._test_no_body(use_transfer_encoding=True, string_to_md5=b'test')
class TestS3ApiMultiUploadNonUTC(TestS3ApiMultiUpload):
def setUp(self):
self.orig_tz = os.environ.get('TZ', '')
os.environ['TZ'] = 'EST+05EDT,M4.1.0,M10.5.0'
time.tzset()
super(TestS3ApiMultiUploadNonUTC, self).setUp()
def tearDown(self):
super(TestS3ApiMultiUploadNonUTC, self).tearDown()
os.environ['TZ'] = self.orig_tz
time.tzset()
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/s3api/test_multi_upload.py |
# Copyright (c) 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from swift.common.middleware.s3api.acl_handlers import S3AclHandler, \
BucketAclHandler, ObjectAclHandler, BaseAclHandler, PartAclHandler, \
UploadAclHandler, UploadsAclHandler, get_acl_handler
class TestAclHandlers(unittest.TestCase):
def test_get_acl_handler(self):
expected_handlers = (('Bucket', BucketAclHandler),
('Object', ObjectAclHandler),
('S3Acl', S3AclHandler),
('Part', PartAclHandler),
('Upload', UploadAclHandler),
('Uploads', UploadsAclHandler),
('Foo', BaseAclHandler))
for name, expected in expected_handlers:
handler = get_acl_handler(name)
self.assertTrue(issubclass(handler, expected))
def test_handle_acl(self):
# we have already have tests for s3_acl checking at test_s3_acl.py
pass
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/s3api/test_acl_handlers.py |
# Copyright (c) 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from swift.common.swob import Request
from test.unit.common.middleware.s3api import S3ApiTestCase
from swift.common.middleware.s3api.etree import fromstring
class TestS3ApiLogging(S3ApiTestCase):
def setUp(self):
super(TestS3ApiLogging, self).setUp()
def test_bucket_logging_GET(self):
req = Request.blank('/bucket?logging',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
xml = fromstring(body, 'BucketLoggingStatus')
self.assertEqual(xml.keys(), [])
self.assertEqual(status.split()[0], '200')
def test_object_logging_GET_error(self):
req = Request.blank('/bucket/object?logging',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'NoLoggingStatusForKey')
def test_bucket_logging_PUT(self):
req = Request.blank('/bucket?logging',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
# FIXME: Support PUT logging
# self.assertEqual(status, 201)
self.assertEqual(self._get_error_code(body), 'NotImplemented')
def test_object_logging_PUT_error(self):
req = Request.blank('/bucket/object?logging',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'NoLoggingStatusForKey')
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/s3api/test_logging.py |
# -*- coding: utf-8 -*-
# Copyright (c) 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import json
import unittest
from datetime import datetime
import mock
from swift.common import swob
from swift.common.swob import Request
from test.unit import make_timestamp_iter
from test.unit.common.middleware.s3api import S3ApiTestCase
from test.unit.common.middleware.s3api.helpers import UnreadableInput
from swift.common.middleware.s3api.etree import fromstring, tostring, \
Element, SubElement
from swift.common.utils import md5
from test.unit.common.middleware.s3api.test_s3_acl import s3acl
class TestS3ApiMultiDelete(S3ApiTestCase):
def setUp(self):
super(TestS3ApiMultiDelete, self).setUp()
self.swift.register('HEAD', '/v1/AUTH_test/bucket/Key1',
swob.HTTPOk, {}, None)
self.swift.register('HEAD', '/v1/AUTH_test/bucket/Key2',
swob.HTTPNotFound, {}, None)
self.swift.register('HEAD',
'/v1/AUTH_test/bucket/business/caf\xc3\xa9',
swob.HTTPOk, {}, None)
self.ts = make_timestamp_iter()
@s3acl
def test_object_multi_DELETE_to_object(self):
elem = Element('Delete')
obj = SubElement(elem, 'Object')
SubElement(obj, 'Key').text = 'object'
body = tostring(elem, use_s3ns=False)
content_md5 = base64.b64encode(
md5(body, usedforsecurity=False).digest()).strip()
req = Request.blank('/bucket/object?delete',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'Content-MD5': content_md5},
body=body)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
@s3acl
def test_object_multi_DELETE(self):
self.swift.register('DELETE', '/v1/AUTH_test/bucket/Key1',
swob.HTTPNoContent, {}, None)
self.swift.register('DELETE', '/v1/AUTH_test/bucket/Key2',
swob.HTTPNotFound, {}, None)
self.swift.register('HEAD', '/v1/AUTH_test/bucket/Key3',
swob.HTTPOk,
{'x-static-large-object': 'True'},
None)
self.swift.register('DELETE',
'/v1/AUTH_test/bucket/business/caf\xc3\xa9',
swob.HTTPNoContent, {}, None)
slo_delete_resp = {
'Number Not Found': 0,
'Response Status': '200 OK',
'Errors': [],
'Response Body': '',
'Number Deleted': 8
}
self.swift.register('DELETE', '/v1/AUTH_test/bucket/Key3',
swob.HTTPOk, {}, json.dumps(slo_delete_resp))
self.swift.register('HEAD', '/v1/AUTH_test/bucket/Key4',
swob.HTTPOk,
{'x-static-large-object': 'True',
'x-object-sysmeta-s3api-etag': 'some-etag'},
None)
self.swift.register('DELETE', '/v1/AUTH_test/bucket/Key4',
swob.HTTPNoContent, {}, None)
elem = Element('Delete')
for key in ['Key1', 'Key2', 'Key3', 'Key4', 'business/café']:
obj = SubElement(elem, 'Object')
SubElement(obj, 'Key').text = key
body = tostring(elem, use_s3ns=False)
content_md5 = base64.b64encode(
md5(body, usedforsecurity=False).digest()).strip()
req = Request.blank('/bucket?delete',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Content-Type': 'multipart/form-data',
'Date': self.get_date_header(),
'Content-MD5': content_md5},
body=body)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body)
self.assertEqual(len(elem.findall('Deleted')), 5)
self.assertEqual(len(elem.findall('Error')), 0)
self.assertEqual(self.swift.calls, [
('HEAD', '/v1/AUTH_test/bucket'),
('HEAD', '/v1/AUTH_test/bucket/Key1?symlink=get'),
('DELETE', '/v1/AUTH_test/bucket/Key1'),
('HEAD', '/v1/AUTH_test/bucket/Key2?symlink=get'),
('DELETE', '/v1/AUTH_test/bucket/Key2'),
('HEAD', '/v1/AUTH_test/bucket/Key3?symlink=get'),
('DELETE', '/v1/AUTH_test/bucket/Key3?multipart-manifest=delete'),
('HEAD', '/v1/AUTH_test/bucket/Key4?symlink=get'),
('DELETE',
'/v1/AUTH_test/bucket/Key4?async=on&multipart-manifest=delete'),
('HEAD', '/v1/AUTH_test/bucket/business/caf\xc3\xa9?symlink=get'),
('DELETE', '/v1/AUTH_test/bucket/business/caf\xc3\xa9'),
])
@s3acl
def test_object_multi_DELETE_with_error(self):
self.swift.register('DELETE', '/v1/AUTH_test/bucket/Key1',
swob.HTTPNoContent, {}, None)
self.swift.register('DELETE', '/v1/AUTH_test/bucket/Key2',
swob.HTTPNotFound, {}, None)
self.swift.register('HEAD', '/v1/AUTH_test/bucket/Key3',
swob.HTTPForbidden, {}, None)
self.swift.register('HEAD', '/v1/AUTH_test/bucket/Key4',
swob.HTTPOk,
{'x-static-large-object': 'True'},
None)
slo_delete_resp = {
'Number Not Found': 0,
'Response Status': '400 Bad Request',
'Errors': [
["/bucket+segments/obj1", "403 Forbidden"],
["/bucket+segments/obj2", "403 Forbidden"]
],
'Response Body': '',
'Number Deleted': 8
}
self.swift.register('DELETE', '/v1/AUTH_test/bucket/Key4',
swob.HTTPOk, {}, json.dumps(slo_delete_resp))
elem = Element('Delete')
for key in ['Key1', 'Key2', 'Key3', 'Key4']:
obj = SubElement(elem, 'Object')
SubElement(obj, 'Key').text = key
body = tostring(elem, use_s3ns=False)
content_md5 = base64.b64encode(
md5(body, usedforsecurity=False).digest()).strip()
req = Request.blank('/bucket?delete',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Content-Type': 'multipart/form-data',
'Date': self.get_date_header(),
'Content-MD5': content_md5},
body=body)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body)
self.assertEqual(len(elem.findall('Deleted')), 2)
self.assertEqual(len(elem.findall('Error')), 2)
self.assertEqual(
[(el.find('Code').text, el.find('Message').text)
for el in elem.findall('Error')],
[('AccessDenied', 'Access Denied.'),
('SLODeleteError', '\n'.join([
'400 Bad Request',
'/bucket+segments/obj1: 403 Forbidden',
'/bucket+segments/obj2: 403 Forbidden']))]
)
self.assertEqual(self.swift.calls, [
('HEAD', '/v1/AUTH_test/bucket'),
('HEAD', '/v1/AUTH_test/bucket/Key1?symlink=get'),
('DELETE', '/v1/AUTH_test/bucket/Key1'),
('HEAD', '/v1/AUTH_test/bucket/Key2?symlink=get'),
('DELETE', '/v1/AUTH_test/bucket/Key2'),
('HEAD', '/v1/AUTH_test/bucket/Key3?symlink=get'),
('HEAD', '/v1/AUTH_test/bucket/Key4?symlink=get'),
('DELETE', '/v1/AUTH_test/bucket/Key4?multipart-manifest=delete'),
])
@s3acl
def test_object_multi_DELETE_with_non_json(self):
self.swift.register('DELETE', '/v1/AUTH_test/bucket/Key1',
swob.HTTPNoContent, {}, None)
self.swift.register('DELETE', '/v1/AUTH_test/bucket/Key2',
swob.HTTPNotFound, {}, None)
self.swift.register('HEAD', '/v1/AUTH_test/bucket/Key3',
swob.HTTPForbidden, {}, None)
self.swift.register('HEAD', '/v1/AUTH_test/bucket/Key4',
swob.HTTPOk,
{'x-static-large-object': 'True'},
None)
self.swift.register('DELETE', '/v1/AUTH_test/bucket/Key4',
swob.HTTPOk, {}, b'asdf')
elem = Element('Delete')
for key in ['Key1', 'Key2', 'Key3', 'Key4']:
obj = SubElement(elem, 'Object')
SubElement(obj, 'Key').text = key
body = tostring(elem, use_s3ns=False)
content_md5 = base64.b64encode(
md5(body, usedforsecurity=False).digest()).strip()
req = Request.blank('/bucket?delete',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Content-Type': 'multipart/form-data',
'Date': self.get_date_header(),
'Content-MD5': content_md5},
body=body)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body)
self.assertEqual(len(elem.findall('Deleted')), 2)
self.assertEqual(len(elem.findall('Error')), 2)
self.assertEqual(
[tuple(el.find(x).text for x in ('Key', 'Code', 'Message'))
for el in elem.findall('Error')],
[('Key3', 'AccessDenied', 'Access Denied.'),
('Key4', 'SLODeleteError', 'Unexpected swift response')])
self.assertEqual(self.s3api.logger.get_lines_for_level('error'), [
'Could not parse SLO delete response (200 OK): %s: ' % b'asdf'])
self.s3api.logger.clear()
@s3acl
def test_object_multi_DELETE_quiet(self):
self.swift.register('DELETE', '/v1/AUTH_test/bucket/Key1',
swob.HTTPNoContent, {}, None)
self.swift.register('DELETE', '/v1/AUTH_test/bucket/Key2',
swob.HTTPNotFound, {}, None)
for true_value in ('true', 'True', 'TRUE', 'trUE'):
elem = Element('Delete')
SubElement(elem, 'Quiet').text = true_value
for key in ['Key1', 'Key2']:
obj = SubElement(elem, 'Object')
SubElement(obj, 'Key').text = key
body = tostring(elem, use_s3ns=False)
content_md5 = base64.b64encode(
md5(body, usedforsecurity=False).digest()).strip()
req = Request.blank('/bucket?delete',
environ={'REQUEST_METHOD': 'POST'},
headers={
'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'Content-MD5': content_md5},
body=body)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body)
self.assertEqual(len(elem.findall('Deleted')), 0)
@s3acl
def test_object_multi_DELETE_no_key(self):
self.swift.register('DELETE', '/v1/AUTH_test/bucket/Key1',
swob.HTTPNoContent, {}, None)
self.swift.register('DELETE', '/v1/AUTH_test/bucket/Key2',
swob.HTTPNotFound, {}, None)
elem = Element('Delete')
SubElement(elem, 'Quiet').text = 'true'
for key in ['Key1', 'Key2']:
obj = SubElement(elem, 'Object')
SubElement(obj, 'Key')
body = tostring(elem, use_s3ns=False)
content_md5 = base64.b64encode(
md5(body, usedforsecurity=False).digest()).strip()
req = Request.blank('/bucket?delete',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'Content-MD5': content_md5},
body=body)
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'UserKeyMustBeSpecified')
@s3acl
def test_object_multi_DELETE_versioned_enabled(self):
self.swift.register(
'HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent, {
'X-Container-Sysmeta-Versions-Enabled': 'True',
}, None)
t1 = next(self.ts)
key1 = '/v1/AUTH_test/bucket/Key1' \
'?symlink=get&version-id=%s' % t1.normal
self.swift.register('HEAD', key1, swob.HTTPOk, {}, None)
self.swift.register('DELETE', key1, swob.HTTPNoContent, {}, None)
t2 = next(self.ts)
key2 = '/v1/AUTH_test/bucket/Key2' \
'?symlink=get&version-id=%s' % t2.normal
# this 404 could just mean it's a delete marker
self.swift.register('HEAD', key2, swob.HTTPNotFound, {}, None)
self.swift.register('DELETE', key2, swob.HTTPNoContent, {}, None)
key3 = '/v1/AUTH_test/bucket/Key3'
self.swift.register('HEAD', key3 + '?symlink=get',
swob.HTTPOk, {}, None)
self.swift.register('DELETE', key3, swob.HTTPNoContent, {}, None)
key4 = '/v1/AUTH_test/bucket/Key4?symlink=get&version-id=null'
self.swift.register('HEAD', key4, swob.HTTPOk, {}, None)
self.swift.register('DELETE', key4, swob.HTTPNoContent, {}, None)
elem = Element('Delete')
items = (
('Key1', t1.normal),
('Key2', t2.normal),
('Key3', None),
('Key4', 'null'),
)
for key, version in items:
obj = SubElement(elem, 'Object')
SubElement(obj, 'Key').text = key
if version:
SubElement(obj, 'VersionId').text = version
body = tostring(elem, use_s3ns=False)
content_md5 = base64.b64encode(
md5(body, usedforsecurity=False).digest()).strip()
req = Request.blank('/bucket?delete',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'Content-MD5': content_md5},
body=body)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
self.assertEqual(self.swift.calls, [
('HEAD', '/v1/AUTH_test/bucket'),
('HEAD', key1),
('DELETE', key1),
('HEAD', key2),
('DELETE', key2),
('HEAD', key3 + '?symlink=get'),
('DELETE', key3),
('HEAD', key4),
('DELETE', key4),
])
elem = fromstring(body)
self.assertEqual({'Key1', 'Key2', 'Key3', 'Key4'}, set(
e.findtext('Key') for e in elem.findall('Deleted')))
@s3acl
def test_object_multi_DELETE_versioned_suspended(self):
self.swift.register(
'HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent, {}, None)
t1 = next(self.ts)
key1 = '/v1/AUTH_test/bucket/Key1' + \
'?symlink=get&version-id=%s' % t1.normal
self.swift.register('HEAD', key1, swob.HTTPOk, {}, None)
self.swift.register('DELETE', key1, swob.HTTPNoContent, {}, None)
t2 = next(self.ts)
key2 = '/v1/AUTH_test/bucket/Key2' + \
'?symlink=get&version-id=%s' % t2.normal
self.swift.register('HEAD', key2, swob.HTTPNotFound, {}, None)
self.swift.register('DELETE', key2, swob.HTTPNotFound, {}, None)
key3 = '/v1/AUTH_test/bucket/Key3'
self.swift.register('HEAD', key3, swob.HTTPOk, {}, None)
self.swift.register('DELETE', key3, swob.HTTPNoContent, {}, None)
elem = Element('Delete')
items = (
('Key1', t1),
('Key2', t2),
('Key3', None),
)
for key, ts in items:
obj = SubElement(elem, 'Object')
SubElement(obj, 'Key').text = key
if ts:
SubElement(obj, 'VersionId').text = ts.normal
body = tostring(elem, use_s3ns=False)
content_md5 = base64.b64encode(
md5(body, usedforsecurity=False).digest()).strip()
req = Request.blank('/bucket?delete',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'Content-MD5': content_md5},
body=body)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body)
self.assertEqual(len(elem.findall('Deleted')), 3)
self.assertEqual(self.swift.calls, [
('HEAD', '/v1/AUTH_test/bucket'),
('HEAD', '/v1/AUTH_test/bucket/Key1'
'?symlink=get&version-id=%s' % t1.normal),
('DELETE', '/v1/AUTH_test/bucket/Key1'
'?symlink=get&version-id=%s' % t1.normal),
('HEAD', '/v1/AUTH_test/bucket/Key2'
'?symlink=get&version-id=%s' % t2.normal),
('DELETE', '/v1/AUTH_test/bucket/Key2'
'?symlink=get&version-id=%s' % t2.normal),
('HEAD', '/v1/AUTH_test/bucket/Key3?symlink=get'),
('DELETE', '/v1/AUTH_test/bucket/Key3'),
])
@s3acl
def test_object_multi_DELETE_with_invalid_md5(self):
elem = Element('Delete')
for key in ['Key1', 'Key2']:
obj = SubElement(elem, 'Object')
SubElement(obj, 'Key').text = key
body = tostring(elem, use_s3ns=False)
req = Request.blank('/bucket?delete',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'Content-MD5': 'XXXX'},
body=body)
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidDigest')
@s3acl
def test_object_multi_DELETE_without_md5(self):
elem = Element('Delete')
for key in ['Key1', 'Key2']:
obj = SubElement(elem, 'Object')
SubElement(obj, 'Key').text = key
body = tostring(elem, use_s3ns=False)
req = Request.blank('/bucket?delete',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()},
body=body)
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidRequest')
@s3acl
def test_object_multi_DELETE_lots_of_keys(self):
elem = Element('Delete')
for i in range(self.s3api.conf.max_multi_delete_objects):
status = swob.HTTPOk if i % 2 else swob.HTTPNotFound
name = 'x' * 1000 + str(i)
self.swift.register('HEAD', '/v1/AUTH_test/bucket/%s' % name,
status, {}, None)
self.swift.register('DELETE', '/v1/AUTH_test/bucket/%s' % name,
swob.HTTPNoContent, {}, None)
obj = SubElement(elem, 'Object')
SubElement(obj, 'Key').text = name
body = tostring(elem, use_s3ns=False)
content_md5 = (base64.b64encode(
md5(body, usedforsecurity=False).digest()).strip())
req = Request.blank('/bucket?delete',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'Content-MD5': content_md5},
body=body)
status, headers, body = self.call_s3api(req)
self.assertEqual('200 OK', status)
elem = fromstring(body)
self.assertEqual(len(elem.findall('Deleted')),
self.s3api.conf.max_multi_delete_objects)
@s3acl
def test_object_multi_DELETE_too_many_keys(self):
elem = Element('Delete')
for i in range(self.s3api.conf.max_multi_delete_objects + 1):
obj = SubElement(elem, 'Object')
SubElement(obj, 'Key').text = 'x' * 1000 + str(i)
body = tostring(elem, use_s3ns=False)
content_md5 = (base64.b64encode(
md5(body, usedforsecurity=False).digest()).strip())
req = Request.blank('/bucket?delete',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'Content-MD5': content_md5},
body=body)
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'MalformedXML')
@s3acl
def test_object_multi_DELETE_unhandled_exception(self):
exploding_resp = mock.MagicMock(
side_effect=Exception('kaboom'))
self.swift.register('DELETE', '/v1/AUTH_test/bucket/Key1',
exploding_resp, {}, None)
elem = Element('Delete')
obj = SubElement(elem, 'Object')
SubElement(obj, 'Key').text = 'Key1'
body = tostring(elem, use_s3ns=False)
content_md5 = (base64.b64encode(
md5(body, usedforsecurity=False).digest()).strip())
req = Request.blank('/bucket?delete',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'Content-MD5': content_md5},
body=body)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
self.assertIn(b'<Error><Key>Key1</Key><Code>Server Error</Code>', body)
def _test_object_multi_DELETE(self, account):
self.keys = ['Key1', 'Key2']
self.swift.register(
'DELETE', '/v1/AUTH_test/bucket/%s' % self.keys[0],
swob.HTTPNoContent, {}, None)
self.swift.register(
'DELETE', '/v1/AUTH_test/bucket/%s' % self.keys[1],
swob.HTTPNotFound, {}, None)
elem = Element('Delete')
for key in self.keys:
obj = SubElement(elem, 'Object')
SubElement(obj, 'Key').text = key
body = tostring(elem, use_s3ns=False)
content_md5 = (
base64.b64encode(md5(body, usedforsecurity=False).digest())
.strip())
req = Request.blank('/bucket?delete',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS %s:hmac' % account,
'Date': self.get_date_header(),
'Content-MD5': content_md5},
body=body)
req.date = datetime.now()
req.content_type = 'text/plain'
return self.call_s3api(req)
@s3acl(s3acl_only=True)
def test_object_multi_DELETE_without_permission(self):
status, headers, body = self._test_object_multi_DELETE('test:other')
self.assertEqual(status.split()[0], '200')
elem = fromstring(body)
errors = elem.findall('Error')
self.assertEqual(len(errors), len(self.keys))
for e in errors:
self.assertTrue(e.find('Key').text in self.keys)
self.assertEqual(e.find('Code').text, 'AccessDenied')
self.assertEqual(e.find('Message').text, 'Access Denied.')
@s3acl(s3acl_only=True)
def test_object_multi_DELETE_with_write_permission(self):
status, headers, body = self._test_object_multi_DELETE('test:write')
self.assertEqual(status.split()[0], '200')
elem = fromstring(body)
self.assertEqual(len(elem.findall('Deleted')), len(self.keys))
@s3acl(s3acl_only=True)
def test_object_multi_DELETE_with_fullcontrol_permission(self):
status, headers, body = \
self._test_object_multi_DELETE('test:full_control')
self.assertEqual(status.split()[0], '200')
elem = fromstring(body)
self.assertEqual(len(elem.findall('Deleted')), len(self.keys))
def test_object_multi_DELETE_with_system_entity(self):
self.keys = ['Key1', 'Key2']
self.swift.register(
'DELETE', '/v1/AUTH_test/bucket/%s' % self.keys[0],
swob.HTTPNotFound, {}, None)
self.swift.register(
'DELETE', '/v1/AUTH_test/bucket/%s' % self.keys[1],
swob.HTTPNoContent, {}, None)
elem = Element('Delete')
for key in self.keys:
obj = SubElement(elem, 'Object')
SubElement(obj, 'Key').text = key
body = tostring(elem, use_s3ns=False)
body = body.replace(
b'?>\n',
b'?>\n<!DOCTYPE foo '
b'[<!ENTITY ent SYSTEM "file:///etc/passwd"> ]>\n',
).replace(b'>Key1<', b'>Key1&ent;<')
content_md5 = (
base64.b64encode(md5(body, usedforsecurity=False).digest())
.strip())
req = Request.blank('/bucket?delete',
environ={'REQUEST_METHOD': 'POST'},
headers={
'Authorization': 'AWS test:full_control:hmac',
'Date': self.get_date_header(),
'Content-MD5': content_md5},
body=body)
req.date = datetime.now()
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(status, '200 OK', body)
self.assertIn(b'<Deleted><Key>Key2</Key></Deleted>', body)
self.assertNotIn(b'root:/root', body)
self.assertIn(b'<Deleted><Key>Key1</Key></Deleted>', body)
def _test_no_body(self, use_content_length=False,
use_transfer_encoding=False, string_to_md5=b''):
content_md5 = (base64.b64encode(
md5(string_to_md5, usedforsecurity=False).digest())
.strip())
with UnreadableInput(self) as fake_input:
req = Request.blank(
'/bucket?delete',
environ={
'REQUEST_METHOD': 'POST',
'wsgi.input': fake_input},
headers={
'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'Content-MD5': content_md5},
body='')
if not use_content_length:
req.environ.pop('CONTENT_LENGTH')
if use_transfer_encoding:
req.environ['HTTP_TRANSFER_ENCODING'] = 'chunked'
status, headers, body = self.call_s3api(req)
self.assertEqual(status, '400 Bad Request')
self.assertEqual(self._get_error_code(body), 'MissingRequestBodyError')
@s3acl
def test_object_multi_DELETE_empty_body(self):
self._test_no_body()
self._test_no_body(string_to_md5=b'test')
self._test_no_body(use_content_length=True)
self._test_no_body(use_content_length=True, string_to_md5=b'test')
self._test_no_body(use_transfer_encoding=True)
self._test_no_body(use_transfer_encoding=True, string_to_md5=b'test')
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/s3api/test_multi_delete.py |
# Copyright (c) 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime, timedelta
import hashlib
from mock import patch, MagicMock
import unittest
from io import BytesIO
from swift.common import swob
from swift.common.middleware.s3api import s3response, controllers
from swift.common.swob import Request, HTTPNoContent
from swift.common.middleware.s3api.utils import mktime, Config
from swift.common.middleware.s3api.acl_handlers import get_acl_handler
from swift.common.middleware.s3api.subresource import ACL, User, Owner, \
Grant, encode_acl
from test.unit.common.middleware.s3api.test_s3api import S3ApiTestCase
from swift.common.middleware.s3api.s3request import S3Request, \
S3AclRequest, SigV4Request, SIGV4_X_AMZ_DATE_FORMAT, HashingInput
from swift.common.middleware.s3api.s3response import InvalidArgument, \
NoSuchBucket, InternalError, ServiceUnavailable, \
AccessDenied, SignatureDoesNotMatch, RequestTimeTooSkewed, BadDigest
from swift.common.utils import md5
from test.debug_logger import debug_logger
Fake_ACL_MAP = {
# HEAD Bucket
('HEAD', 'HEAD', 'container'):
{'Resource': 'container',
'Permission': 'READ'},
# GET Bucket
('GET', 'GET', 'container'):
{'Resource': 'container',
'Permission': 'READ'},
# HEAD Object
('HEAD', 'HEAD', 'object'):
{'Resource': 'object',
'Permission': 'READ'},
# GET Object
('GET', 'GET', 'object'):
{'Resource': 'object',
'Permission': 'READ'},
}
def _gen_test_acl_header(owner, permission=None, grantee=None,
resource='container'):
if permission is None:
return ACL(owner, [])
if grantee is None:
grantee = User('test:tester')
return encode_acl(resource, ACL(owner, [Grant(grantee, permission)]))
class FakeResponse(object):
def __init__(self, s3_acl):
self.sysmeta_headers = {}
if s3_acl:
owner = Owner(id='test:tester', name='test:tester')
self.sysmeta_headers.update(
_gen_test_acl_header(owner, 'FULL_CONTROL',
resource='container'))
self.sysmeta_headers.update(
_gen_test_acl_header(owner, 'FULL_CONTROL',
resource='object'))
class FakeSwiftResponse(object):
def __init__(self):
self.environ = {
'PATH_INFO': '/v1/AUTH_test',
'HTTP_X_TENANT_NAME': 'test',
'HTTP_X_USER_NAME': 'tester',
'HTTP_X_AUTH_TOKEN': 'token',
}
class TestRequest(S3ApiTestCase):
def setUp(self):
super(TestRequest, self).setUp()
self.s3api.conf.s3_acl = True
self.swift.s3_acl = True
@patch('swift.common.middleware.s3api.acl_handlers.ACL_MAP', Fake_ACL_MAP)
@patch('swift.common.middleware.s3api.s3request.S3AclRequest.authenticate',
lambda x, y: None)
def _test_get_response(self, method, container='bucket', obj=None,
permission=None, skip_check=False,
req_klass=S3Request, fake_swift_resp=None):
path = '/' + container + ('/' + obj if obj else '')
req = Request.blank(path,
environ={'REQUEST_METHOD': method},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
s3_req = req_klass(req.environ, conf=self.s3api.conf)
s3_req.set_acl_handler(
get_acl_handler(s3_req.controller_name)(s3_req, debug_logger()))
with patch('swift.common.middleware.s3api.s3request.S3Request.'
'_get_response') as mock_get_resp, \
patch('swift.common.middleware.s3api.subresource.ACL.'
'check_permission') as m_check_permission:
mock_get_resp.return_value = fake_swift_resp \
or FakeResponse(self.s3api.conf.s3_acl)
return mock_get_resp, m_check_permission,\
s3_req.get_response(self.s3api)
def test_get_response_without_s3_acl(self):
self.s3api.conf.s3_acl = False
self.swift.s3_acl = False
mock_get_resp, m_check_permission, s3_resp = \
self._test_get_response('HEAD')
self.assertFalse(hasattr(s3_resp, 'bucket_acl'))
self.assertFalse(hasattr(s3_resp, 'object_acl'))
self.assertEqual(mock_get_resp.call_count, 1)
self.assertEqual(m_check_permission.call_count, 0)
def test_get_response_without_match_ACL_MAP(self):
with self.assertRaises(Exception) as e:
self._test_get_response('POST', req_klass=S3AclRequest)
self.assertEqual(e.exception.args[0],
'No permission to be checked exists')
def test_get_response_without_duplication_HEAD_request(self):
obj = 'object'
mock_get_resp, m_check_permission, s3_resp = \
self._test_get_response('HEAD', obj=obj,
req_klass=S3AclRequest)
self.assertTrue(s3_resp.bucket_acl is not None)
self.assertTrue(s3_resp.object_acl is not None)
self.assertEqual(mock_get_resp.call_count, 1)
args, kargs = mock_get_resp.call_args_list[0]
get_resp_obj = args[3]
self.assertEqual(get_resp_obj, obj)
self.assertEqual(m_check_permission.call_count, 1)
args, kargs = m_check_permission.call_args
permission = args[1]
self.assertEqual(permission, 'READ')
def test_get_response_with_check_object_permission(self):
obj = 'object'
mock_get_resp, m_check_permission, s3_resp = \
self._test_get_response('GET', obj=obj,
req_klass=S3AclRequest)
self.assertTrue(s3_resp.bucket_acl is not None)
self.assertTrue(s3_resp.object_acl is not None)
self.assertEqual(mock_get_resp.call_count, 2)
args, kargs = mock_get_resp.call_args_list[0]
get_resp_obj = args[3]
self.assertEqual(get_resp_obj, obj)
self.assertEqual(m_check_permission.call_count, 1)
args, kargs = m_check_permission.call_args
permission = args[1]
self.assertEqual(permission, 'READ')
def test_get_response_with_check_container_permission(self):
mock_get_resp, m_check_permission, s3_resp = \
self._test_get_response('GET',
req_klass=S3AclRequest)
self.assertTrue(s3_resp.bucket_acl is not None)
self.assertTrue(s3_resp.object_acl is not None)
self.assertEqual(mock_get_resp.call_count, 2)
args, kargs = mock_get_resp.call_args_list[0]
get_resp_obj = args[3]
self.assertEqual(get_resp_obj, '')
self.assertEqual(m_check_permission.call_count, 1)
args, kargs = m_check_permission.call_args
permission = args[1]
self.assertEqual(permission, 'READ')
def test_get_validate_param(self):
def create_s3request_with_param(param, value):
req = Request.blank(
'/bucket?%s=%s' % (param, value),
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
return S3Request(req.environ)
s3req = create_s3request_with_param('max-keys', '1')
# a param in the range
self.assertEqual(s3req.get_validated_param('max-keys', 1000, 1000), 1)
self.assertEqual(s3req.get_validated_param('max-keys', 0, 1), 1)
# a param in the out of the range
self.assertEqual(s3req.get_validated_param('max-keys', 0, 0), 0)
# a param in the out of the integer range
s3req = create_s3request_with_param('max-keys', '1' * 30)
with self.assertRaises(InvalidArgument) as result:
s3req.get_validated_param('max-keys', 1)
self.assertIn(
b'not an integer or within integer range', result.exception.body)
self.assertEqual(
result.exception.headers['content-type'], 'application/xml')
# a param is negative integer
s3req = create_s3request_with_param('max-keys', '-1')
with self.assertRaises(InvalidArgument) as result:
s3req.get_validated_param('max-keys', 1)
self.assertIn(
b'must be an integer between 0 and', result.exception.body)
self.assertEqual(
result.exception.headers['content-type'], 'application/xml')
# a param is not integer
s3req = create_s3request_with_param('max-keys', 'invalid')
with self.assertRaises(InvalidArgument) as result:
s3req.get_validated_param('max-keys', 1)
self.assertIn(
b'not an integer or within integer range', result.exception.body)
self.assertEqual(
result.exception.headers['content-type'], 'application/xml')
def test_authenticate_delete_Authorization_from_s3req(self):
req = Request.blank('/bucket/obj',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
with patch.object(Request, 'get_response') as m_swift_resp, \
patch.object(Request, 'remote_user', 'authorized'):
m_swift_resp.return_value = FakeSwiftResponse()
s3_req = S3AclRequest(req.environ, self.s3api.conf, None)
self.assertNotIn('s3api.auth_details', s3_req.environ)
def test_to_swift_req_Authorization_not_exist_in_swreq(self):
# the difference from
# test_authenticate_delete_Authorization_from_s3req_headers above is
# this method asserts *to_swift_req* method.
container = 'bucket'
obj = 'obj'
method = 'GET'
req = Request.blank('/%s/%s' % (container, obj),
environ={'REQUEST_METHOD': method},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
with patch.object(Request, 'get_response') as m_swift_resp, \
patch.object(Request, 'remote_user', 'authorized'):
m_swift_resp.return_value = FakeSwiftResponse()
s3_req = S3AclRequest(req.environ)
# Yes, we *want* to assert this
sw_req = s3_req.to_swift_req(method, container, obj)
# So since the result of S3AclRequest init tests and with this
# result to_swift_req doesn't add Authorization header and token
self.assertNotIn('s3api.auth_details', sw_req.environ)
self.assertNotIn('X-Auth-Token', sw_req.headers)
def test_to_swift_req_subrequest_proxy_access_log(self):
container = 'bucket'
obj = 'obj'
method = 'GET'
# force_swift_request_proxy_log is True
req = Request.blank('/%s/%s' % (container, obj),
environ={'REQUEST_METHOD': method,
'swift.proxy_access_log_made': True},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
with patch.object(Request, 'get_response') as m_swift_resp, \
patch.object(Request, 'remote_user', 'authorized'):
m_swift_resp.return_value = FakeSwiftResponse()
s3_req = S3AclRequest(
req.environ,
conf=Config({'force_swift_request_proxy_log': True}))
sw_req = s3_req.to_swift_req(method, container, obj)
self.assertFalse(sw_req.environ['swift.proxy_access_log_made'])
# force_swift_request_proxy_log is False
req = Request.blank('/%s/%s' % (container, obj),
environ={'REQUEST_METHOD': method,
'swift.proxy_access_log_made': True},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
with patch.object(Request, 'get_response') as m_swift_resp, \
patch.object(Request, 'remote_user', 'authorized'):
m_swift_resp.return_value = FakeSwiftResponse()
s3_req = S3AclRequest(
req.environ,
conf=Config({'force_swift_request_proxy_log': False}))
sw_req = s3_req.to_swift_req(method, container, obj)
self.assertTrue(sw_req.environ['swift.proxy_access_log_made'])
def test_get_container_info(self):
s3api_acl = '{"Owner":"owner","Grant":'\
'[{"Grantee":"owner","Permission":"FULL_CONTROL"}]}'
self.swift.register('HEAD', '/v1/AUTH_test/bucket', HTTPNoContent,
{'x-container-read': 'foo',
'X-container-object-count': '5',
'x-container-sysmeta-versions-location':
'bucket2',
'x-container-sysmeta-s3api-acl': s3api_acl,
'X-container-meta-foo': 'bar'}, None)
req = Request.blank('/bucket', environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
s3_req = S3Request(req.environ)
# first, call get_response('HEAD')
info = s3_req.get_container_info(self.app)
self.assertTrue('status' in info) # sanity
self.assertEqual(204, info['status']) # sanity
self.assertEqual('foo', info['read_acl']) # sanity
self.assertEqual(5, info['object_count']) # sanity
self.assertEqual(
'bucket2', info['sysmeta']['versions-location']) # sanity
self.assertEqual(s3api_acl, info['sysmeta']['s3api-acl']) # sanity
self.assertEqual({'foo': 'bar'}, info['meta']) # sanity
with patch(
'swift.common.middleware.s3api.s3request.get_container_info',
return_value={'status': 204}) as mock_info:
# Then all calls goes to get_container_info
for x in range(10):
info = s3_req.get_container_info(self.swift)
self.assertTrue('status' in info) # sanity
self.assertEqual(204, info['status']) # sanity
self.assertEqual(10, mock_info.call_count)
expected_errors = [(404, NoSuchBucket), (0, InternalError),
(503, ServiceUnavailable)]
for status, expected_error in expected_errors:
with patch('swift.common.middleware.s3api.s3request.'
'get_container_info',
return_value={'status': status}):
self.assertRaises(
expected_error, s3_req.get_container_info, MagicMock())
def test_date_header_missing(self):
self.swift.register('HEAD', '/v1/AUTH_test/nojunk', swob.HTTPNotFound,
{}, None)
req = Request.blank('/nojunk',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'Authorization': 'AWS test:tester:hmac'})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '403')
self.assertEqual(body, b'')
def test_date_header_expired(self):
self.swift.register('HEAD', '/v1/AUTH_test/nojunk', swob.HTTPNotFound,
{}, None)
req = Request.blank('/nojunk',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': 'Fri, 01 Apr 2014 12:00:00 GMT'})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '403')
self.assertEqual(body, b'')
def test_date_header_with_x_amz_date_valid(self):
self.swift.register('HEAD', '/v1/AUTH_test/nojunk', swob.HTTPNotFound,
{}, None)
req = Request.blank('/nojunk',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': 'Fri, 01 Apr 2014 12:00:00 GMT',
'x-amz-date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '404')
self.assertEqual(body, b'')
def test_date_header_with_x_amz_date_expired(self):
self.swift.register('HEAD', '/v1/AUTH_test/nojunk', swob.HTTPNotFound,
{}, None)
req = Request.blank('/nojunk',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'x-amz-date':
'Fri, 01 Apr 2014 12:00:00 GMT'})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '403')
self.assertEqual(body, b'')
def _test_request_timestamp_sigv4(self, date_header):
# signature v4 here
environ = {
'REQUEST_METHOD': 'GET'}
if 'X-Amz-Date' in date_header:
included_header = 'x-amz-date'
scope_date = date_header['X-Amz-Date'].split('T', 1)[0]
elif 'Date' in date_header:
included_header = 'date'
scope_date = self.get_v4_amz_date_header().split('T', 1)[0]
else:
self.fail('Invalid date header specified as test')
headers = {
'Authorization':
'AWS4-HMAC-SHA256 '
'Credential=test/%s/us-east-1/s3/aws4_request, '
'SignedHeaders=%s,'
'Signature=X' % (
scope_date,
';'.join(sorted(['host', included_header]))),
'X-Amz-Content-SHA256': '0123456789'}
headers.update(date_header)
req = Request.blank('/', environ=environ, headers=headers)
sigv4_req = SigV4Request(req.environ, conf=self.s3api.conf)
if 'X-Amz-Date' in date_header:
timestamp = mktime(
date_header['X-Amz-Date'], SIGV4_X_AMZ_DATE_FORMAT)
elif 'Date' in date_header:
timestamp = mktime(date_header['Date'])
self.assertEqual(timestamp, int(sigv4_req.timestamp))
def test_request_timestamp_sigv4(self):
access_denied_message = \
b'AWS authentication requires a valid Date or x-amz-date header'
# normal X-Amz-Date header
date_header = {'X-Amz-Date': self.get_v4_amz_date_header()}
self._test_request_timestamp_sigv4(date_header)
# normal Date header
date_header = {'Date': self.get_date_header()}
self._test_request_timestamp_sigv4(date_header)
# mangled X-Amz-Date header
date_header = {'X-Amz-Date': self.get_v4_amz_date_header()[:-1]}
with self.assertRaises(AccessDenied) as cm:
self._test_request_timestamp_sigv4(date_header)
self.assertEqual('403 Forbidden', cm.exception.args[0])
self.assertIn(access_denied_message, cm.exception.body)
# mangled Date header
date_header = {'Date': self.get_date_header()[20:]}
with self.assertRaises(AccessDenied) as cm:
self._test_request_timestamp_sigv4(date_header)
self.assertEqual('403 Forbidden', cm.exception.args[0])
self.assertIn(access_denied_message, cm.exception.body)
# Negative timestamp
date_header = {'X-Amz-Date': '00160523T054055Z'}
with self.assertRaises(AccessDenied) as cm:
self._test_request_timestamp_sigv4(date_header)
self.assertEqual('403 Forbidden', cm.exception.args[0])
self.assertIn(access_denied_message, cm.exception.body)
# far-past Date header
date_header = {'Date': 'Tue, 07 Jul 999 21:53:04 GMT'}
with self.assertRaises(AccessDenied) as cm:
self._test_request_timestamp_sigv4(date_header)
self.assertEqual('403 Forbidden', cm.exception.args[0])
self.assertIn(access_denied_message, cm.exception.body)
# near-past X-Amz-Date headers
date_header = {'X-Amz-Date': self.get_v4_amz_date_header(
datetime.utcnow() - timedelta(minutes=10)
)}
self._test_request_timestamp_sigv4(date_header)
date_header = {'X-Amz-Date': self.get_v4_amz_date_header(
datetime.utcnow() - timedelta(minutes=10)
)}
with self.assertRaises(RequestTimeTooSkewed) as cm, \
patch.object(self.s3api.conf, 'allowable_clock_skew', 300):
self._test_request_timestamp_sigv4(date_header)
# near-future X-Amz-Date headers
date_header = {'X-Amz-Date': self.get_v4_amz_date_header(
datetime.utcnow() + timedelta(minutes=10)
)}
self._test_request_timestamp_sigv4(date_header)
date_header = {'X-Amz-Date': self.get_v4_amz_date_header(
datetime.utcnow() + timedelta(minutes=10)
)}
with self.assertRaises(RequestTimeTooSkewed) as cm, \
patch.object(self.s3api.conf, 'allowable_clock_skew', 300):
self._test_request_timestamp_sigv4(date_header)
date_header = {'X-Amz-Date': self.get_v4_amz_date_header(
datetime.utcnow() + timedelta(days=1)
)}
with self.assertRaises(RequestTimeTooSkewed) as cm:
self._test_request_timestamp_sigv4(date_header)
# far-future Date header
date_header = {'Date': 'Tue, 07 Jul 9999 21:53:04 GMT'}
with self.assertRaises(RequestTimeTooSkewed) as cm:
self._test_request_timestamp_sigv4(date_header)
self.assertEqual('403 Forbidden', cm.exception.args[0])
self.assertIn(b'The difference between the request time and the '
b'current time is too large.', cm.exception.body)
def _test_request_timestamp_sigv2(self, date_header):
# signature v4 here
environ = {
'REQUEST_METHOD': 'GET'}
headers = {'Authorization': 'AWS test:tester:hmac'}
headers.update(date_header)
req = Request.blank('/', environ=environ, headers=headers)
sigv2_req = S3Request(req.environ)
if 'X-Amz-Date' in date_header:
timestamp = mktime(req.headers.get('X-Amz-Date'))
elif 'Date' in date_header:
timestamp = mktime(req.headers.get('Date'))
else:
self.fail('Invalid date header specified as test')
self.assertEqual(timestamp, int(sigv2_req.timestamp))
def test_request_timestamp_sigv2(self):
access_denied_message = \
b'AWS authentication requires a valid Date or x-amz-date header'
# In v2 format, normal X-Amz-Date header is same
date_header = {'X-Amz-Date': self.get_date_header()}
self._test_request_timestamp_sigv2(date_header)
# normal Date header
date_header = {'Date': self.get_date_header()}
self._test_request_timestamp_sigv2(date_header)
# mangled X-Amz-Date header
date_header = {'X-Amz-Date': self.get_date_header()[:-20]}
with self.assertRaises(AccessDenied) as cm:
self._test_request_timestamp_sigv2(date_header)
self.assertEqual('403 Forbidden', cm.exception.args[0])
self.assertIn(access_denied_message, cm.exception.body)
# mangled Date header
date_header = {'Date': self.get_date_header()[:-20]}
with self.assertRaises(AccessDenied) as cm:
self._test_request_timestamp_sigv2(date_header)
self.assertEqual('403 Forbidden', cm.exception.args[0])
self.assertIn(access_denied_message, cm.exception.body)
# Negative timestamp
date_header = {'X-Amz-Date': '00160523T054055Z'}
with self.assertRaises(AccessDenied) as cm:
self._test_request_timestamp_sigv2(date_header)
self.assertEqual('403 Forbidden', cm.exception.args[0])
self.assertIn(access_denied_message, cm.exception.body)
# far-past Date header
date_header = {'Date': 'Tue, 07 Jul 999 21:53:04 GMT'}
with self.assertRaises(AccessDenied) as cm:
self._test_request_timestamp_sigv2(date_header)
self.assertEqual('403 Forbidden', cm.exception.args[0])
self.assertIn(access_denied_message, cm.exception.body)
# far-future Date header
date_header = {'Date': 'Tue, 07 Jul 9999 21:53:04 GMT'}
with self.assertRaises(RequestTimeTooSkewed) as cm:
self._test_request_timestamp_sigv2(date_header)
self.assertEqual('403 Forbidden', cm.exception.args[0])
self.assertIn(b'The difference between the request time and the '
b'current time is too large.', cm.exception.body)
def test_headers_to_sign_sigv4(self):
environ = {
'REQUEST_METHOD': 'GET'}
# host and x-amz-date
x_amz_date = self.get_v4_amz_date_header()
headers = {
'Authorization':
'AWS4-HMAC-SHA256 '
'Credential=test/%s/us-east-1/s3/aws4_request, '
'SignedHeaders=host;x-amz-content-sha256;x-amz-date,'
'Signature=X' % self.get_v4_amz_date_header().split('T', 1)[0],
'X-Amz-Content-SHA256': '0123456789',
'Date': self.get_date_header(),
'X-Amz-Date': x_amz_date}
req = Request.blank('/', environ=environ, headers=headers)
sigv4_req = SigV4Request(req.environ)
headers_to_sign = sigv4_req._headers_to_sign()
self.assertEqual(headers_to_sign, [
('host', 'localhost:80'),
('x-amz-content-sha256', '0123456789'),
('x-amz-date', x_amz_date)])
# no x-amz-date
headers = {
'Authorization':
'AWS4-HMAC-SHA256 '
'Credential=test/%s/us-east-1/s3/aws4_request, '
'SignedHeaders=host;x-amz-content-sha256,'
'Signature=X' % self.get_v4_amz_date_header().split('T', 1)[0],
'X-Amz-Content-SHA256': '0123456789',
'Date': self.get_date_header()}
req = Request.blank('/', environ=environ, headers=headers)
sigv4_req = SigV4Request(req.environ)
headers_to_sign = sigv4_req._headers_to_sign()
self.assertEqual(headers_to_sign, [
('host', 'localhost:80'),
('x-amz-content-sha256', '0123456789')])
# SignedHeaders says, host and x-amz-date included but there is not
# X-Amz-Date header
headers = {
'Authorization':
'AWS4-HMAC-SHA256 '
'Credential=test/%s/us-east-1/s3/aws4_request, '
'SignedHeaders=host;x-amz-content-sha256;x-amz-date,'
'Signature=X' % self.get_v4_amz_date_header().split('T', 1)[0],
'X-Amz-Content-SHA256': '0123456789',
'Date': self.get_date_header()}
req = Request.blank('/', environ=environ, headers=headers)
with self.assertRaises(SignatureDoesNotMatch):
sigv4_req = SigV4Request(req.environ)
sigv4_req._headers_to_sign()
def test_canonical_uri_sigv2(self):
environ = {
'HTTP_HOST': 'bucket1.s3.test.com',
'REQUEST_METHOD': 'GET'}
headers = {'Authorization': 'AWS test:tester:hmac',
'X-Amz-Date': self.get_date_header()}
# Virtual hosted-style
req = Request.blank('/', environ=environ, headers=headers)
sigv2_req = S3Request(
req.environ, conf=Config({'storage_domains': ['s3.test.com']}))
uri = sigv2_req._canonical_uri()
self.assertEqual(uri, '/bucket1/')
self.assertEqual(req.environ['PATH_INFO'], '/')
req = Request.blank('/obj1', environ=environ, headers=headers)
sigv2_req = S3Request(
req.environ, conf=Config({'storage_domains': ['s3.test.com']}))
uri = sigv2_req._canonical_uri()
self.assertEqual(uri, '/bucket1/obj1')
self.assertEqual(req.environ['PATH_INFO'], '/obj1')
req = Request.blank('/obj2', environ=environ, headers=headers)
sigv2_req = S3Request(
req.environ, conf=Config({
'storage_domains': ['alternate.domain', 's3.test.com']}))
uri = sigv2_req._canonical_uri()
self.assertEqual(uri, '/bucket1/obj2')
self.assertEqual(req.environ['PATH_INFO'], '/obj2')
# Now check the other storage_domain
environ = {
'HTTP_HOST': 'bucket1.alternate.domain',
'REQUEST_METHOD': 'GET'}
req = Request.blank('/obj2', environ=environ, headers=headers)
sigv2_req = S3Request(
req.environ, conf=Config({
'storage_domains': ['alternate.domain', 's3.test.com']}))
uri = sigv2_req._canonical_uri()
self.assertEqual(uri, '/bucket1/obj2')
self.assertEqual(req.environ['PATH_INFO'], '/obj2')
# Non existent storage_domain means we can't find the container
environ = {
'HTTP_HOST': 'bucket1.incorrect.domain',
'REQUEST_METHOD': 'GET'}
req = Request.blank('/obj2', environ=environ, headers=headers)
sigv2_req = S3Request(
req.environ, conf=Config({
'storage_domains': ['alternate.domain', 's3.test.com']}))
uri = sigv2_req._canonical_uri()
# uo oh, no bucket
self.assertEqual(uri, '/obj2')
self.assertEqual(sigv2_req.container_name, 'obj2')
environ = {
'HTTP_HOST': 's3.test.com',
'REQUEST_METHOD': 'GET'}
# Path-style
req = Request.blank('/', environ=environ, headers=headers)
sigv2_req = S3Request(req.environ)
uri = sigv2_req._canonical_uri()
self.assertEqual(uri, '/')
self.assertEqual(req.environ['PATH_INFO'], '/')
req = Request.blank('/bucket1/obj1',
environ=environ,
headers=headers)
sigv2_req = S3Request(req.environ)
uri = sigv2_req._canonical_uri()
self.assertEqual(uri, '/bucket1/obj1')
self.assertEqual(req.environ['PATH_INFO'], '/bucket1/obj1')
def test_canonical_uri_sigv4(self):
environ = {
'HTTP_HOST': 'bucket.s3.test.com',
'REQUEST_METHOD': 'GET'}
# host and x-amz-date
x_amz_date = self.get_v4_amz_date_header()
headers = {
'Authorization':
'AWS4-HMAC-SHA256 '
'Credential=test/%s/us-east-1/s3/aws4_request, '
'SignedHeaders=host;x-amz-content-sha256;x-amz-date,'
'Signature=X' % self.get_v4_amz_date_header().split('T', 1)[0],
'X-Amz-Content-SHA256': '0123456789',
'Date': self.get_date_header(),
'X-Amz-Date': x_amz_date}
# Virtual hosted-style
self.s3api.conf.storage_domains = ['s3.test.com']
req = Request.blank('/', environ=environ, headers=headers)
sigv4_req = SigV4Request(req.environ)
uri = sigv4_req._canonical_uri()
self.assertEqual(uri, b'/')
self.assertEqual(req.environ['PATH_INFO'], '/')
req = Request.blank('/obj1', environ=environ, headers=headers)
sigv4_req = SigV4Request(req.environ)
uri = sigv4_req._canonical_uri()
self.assertEqual(uri, b'/obj1')
self.assertEqual(req.environ['PATH_INFO'], '/obj1')
environ = {
'HTTP_HOST': 's3.test.com',
'REQUEST_METHOD': 'GET'}
# Path-style
self.s3api.conf.storage_domains = []
req = Request.blank('/', environ=environ, headers=headers)
sigv4_req = SigV4Request(req.environ)
uri = sigv4_req._canonical_uri()
self.assertEqual(uri, b'/')
self.assertEqual(req.environ['PATH_INFO'], '/')
req = Request.blank('/bucket/obj1',
environ=environ,
headers=headers)
sigv4_req = SigV4Request(req.environ)
uri = sigv4_req._canonical_uri()
self.assertEqual(uri, b'/bucket/obj1')
self.assertEqual(req.environ['PATH_INFO'], '/bucket/obj1')
@patch.object(S3Request, '_validate_dates', lambda *a: None)
def _test_check_signature_sigv2(self, secret):
# See https://web.archive.org/web/20151226025049/http://
# docs.aws.amazon.com//AmazonS3/latest/dev/RESTAuthentication.html
req = Request.blank('/photos/puppy.jpg', headers={
'Host': 'johnsmith.s3.amazonaws.com',
'Date': 'Tue, 27 Mar 2007 19:36:42 +0000',
'Authorization': ('AWS AKIAIOSFODNN7EXAMPLE:'
'bWq2s1WEIj+Ydj0vQ697zp+IXMU='),
})
sigv2_req = S3Request(req.environ, conf=Config({
'storage_domains': ['s3.amazonaws.com']}))
expected_sts = b'\n'.join([
b'GET',
b'',
b'',
b'Tue, 27 Mar 2007 19:36:42 +0000',
b'/johnsmith/photos/puppy.jpg',
])
self.assertEqual(expected_sts, sigv2_req._string_to_sign())
self.assertTrue(sigv2_req.check_signature(secret))
req = Request.blank('/photos/puppy.jpg', method='PUT', headers={
'Content-Type': 'image/jpeg',
'Content-Length': '94328',
'Host': 'johnsmith.s3.amazonaws.com',
'Date': 'Tue, 27 Mar 2007 21:15:45 +0000',
'Authorization': ('AWS AKIAIOSFODNN7EXAMPLE:'
'MyyxeRY7whkBe+bq8fHCL/2kKUg='),
})
sigv2_req = S3Request(req.environ, conf=Config({
'storage_domains': ['s3.amazonaws.com']}))
expected_sts = b'\n'.join([
b'PUT',
b'',
b'image/jpeg',
b'Tue, 27 Mar 2007 21:15:45 +0000',
b'/johnsmith/photos/puppy.jpg',
])
self.assertEqual(expected_sts, sigv2_req._string_to_sign())
self.assertTrue(sigv2_req.check_signature(secret))
req = Request.blank(
'/?prefix=photos&max-keys=50&marker=puppy',
headers={
'User-Agent': 'Mozilla/5.0',
'Host': 'johnsmith.s3.amazonaws.com',
'Date': 'Tue, 27 Mar 2007 19:42:41 +0000',
'Authorization': ('AWS AKIAIOSFODNN7EXAMPLE:'
'htDYFYduRNen8P9ZfE/s9SuKy0U='),
})
sigv2_req = S3Request(req.environ, conf=Config({
'storage_domains': ['s3.amazonaws.com']}))
expected_sts = b'\n'.join([
b'GET',
b'',
b'',
b'Tue, 27 Mar 2007 19:42:41 +0000',
b'/johnsmith/',
])
self.assertEqual(expected_sts, sigv2_req._string_to_sign())
self.assertTrue(sigv2_req.check_signature(secret))
with patch('swift.common.middleware.s3api.s3request.streq_const_time',
return_value=True) as mock_eq:
self.assertTrue(sigv2_req.check_signature(secret))
mock_eq.assert_called_once()
def test_check_signature_sigv2(self):
self._test_check_signature_sigv2(
'wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY')
def test_check_signature_sigv2_unicode_string(self):
self._test_check_signature_sigv2(
u'wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY')
@patch.object(S3Request, '_validate_dates', lambda *a: None)
def test_check_signature_multi_bytes_secret_failure(self):
# Test v2 check_signature with multi bytes invalid secret
req = Request.blank('/photos/puppy.jpg', headers={
'Host': 'johnsmith.s3.amazonaws.com',
'Date': 'Tue, 27 Mar 2007 19:36:42 +0000',
'Authorization': ('AWS AKIAIOSFODNN7EXAMPLE:'
'bWq2s1WEIj+Ydj0vQ697zp+IXMU='),
})
sigv2_req = S3Request(req.environ, Config({
'storage_domains': ['s3.amazonaws.com']}))
# This is a failure case with utf-8 non-ascii multi-bytes charactor
# but we expect to return just False instead of exceptions
self.assertFalse(sigv2_req.check_signature(
u'\u30c9\u30e9\u30b4\u30f3'))
# Test v4 check_signature with multi bytes invalid secret
amz_date_header = self.get_v4_amz_date_header()
req = Request.blank('/photos/puppy.jpg', headers={
'Authorization':
'AWS4-HMAC-SHA256 '
'Credential=test/%s/us-east-1/s3/aws4_request, '
'SignedHeaders=host;x-amz-content-sha256;x-amz-date,'
'Signature=X' % amz_date_header.split('T', 1)[0],
'X-Amz-Content-SHA256': '0123456789',
'X-Amz-Date': amz_date_header
})
sigv4_req = SigV4Request(
req.environ, Config({'storage_domains': ['s3.amazonaws.com']}))
self.assertFalse(sigv4_req.check_signature(
u'\u30c9\u30e9\u30b4\u30f3'))
with patch('swift.common.middleware.s3api.s3request.streq_const_time',
return_value=False) as mock_eq:
self.assertFalse(sigv4_req.check_signature(
u'\u30c9\u30e9\u30b4\u30f3'))
mock_eq.assert_called_once()
@patch.object(S3Request, '_validate_dates', lambda *a: None)
def test_check_signature_sigv4_unsigned_payload(self):
environ = {
'HTTP_HOST': 'bucket.s3.test.com',
'REQUEST_METHOD': 'GET'}
headers = {
'Authorization':
'AWS4-HMAC-SHA256 '
'Credential=test/20210104/us-east-1/s3/aws4_request, '
'SignedHeaders=host;x-amz-content-sha256;x-amz-date,'
'Signature=f721a7941d5b7710344bc62cc45f87e66f4bb1dd00d9075ee61'
'5b1a5c72b0f8c',
'X-Amz-Content-SHA256': 'UNSIGNED-PAYLOAD',
'Date': 'Mon, 04 Jan 2021 10:26:23 -0000',
'X-Amz-Date': '20210104T102623Z'}
# Virtual hosted-style
self.s3api.conf.storage_domains = ['s3.test.com']
req = Request.blank('/', environ=environ, headers=headers)
sigv4_req = SigV4Request(req.environ)
self.assertTrue(
sigv4_req._canonical_request().endswith(b'UNSIGNED-PAYLOAD'))
self.assertTrue(sigv4_req.check_signature('secret'))
@patch.object(S3Request, '_validate_dates', lambda *a: None)
def test_check_signature_sigv4_url_encode(self):
environ = {
'HTTP_HOST': 'bucket.s3.test.com',
'REQUEST_METHOD': 'PUT',
'RAW_PATH_INFO': '/test/~/file,1_1:1-1'}
headers = {
'Authorization':
'AWS4-HMAC-SHA256 '
'Credential=test/20210104/us-east-1/s3/aws4_request, '
'SignedHeaders=host;x-amz-content-sha256;x-amz-date,'
'Signature=06559fbf839b7ceac19d69f510a2d3b7dcb569c8df310965cc1'
'6a1dc55b3394a',
'X-Amz-Content-SHA256': 'UNSIGNED-PAYLOAD',
'Date': 'Mon, 04 Jan 2021 10:26:23 -0000',
'X-Amz-Date': '20210104T102623Z'}
# Virtual hosted-style
self.s3api.conf.storage_domain = 's3.test.com'
req = Request.blank(
environ['RAW_PATH_INFO'], environ=environ, headers=headers)
sigv4_req = SigV4Request(req.environ)
canonical_req = sigv4_req._canonical_request()
self.assertIn(b'PUT\n/test/~/file%2C1_1%3A1-1\n', canonical_req)
self.assertTrue(canonical_req.endswith(b'UNSIGNED-PAYLOAD'))
self.assertTrue(sigv4_req.check_signature('secret'))
@patch.object(S3Request, '_validate_dates', lambda *a: None)
def test_check_sigv4_req_zero_content_length_sha256(self):
# Virtual hosted-style
self.s3api.conf.storage_domains = ['s3.test.com']
# bad sha256
environ = {
'HTTP_HOST': 'bucket.s3.test.com',
'REQUEST_METHOD': 'GET'}
headers = {
'Authorization':
'AWS4-HMAC-SHA256 '
'Credential=test/20210104/us-east-1/s3/aws4_request, '
'SignedHeaders=host;x-amz-content-sha256;x-amz-date,'
'Signature=f721a7941d5b7710344bc62cc45f87e66f4bb1dd00d9075ee61'
'5b1a5c72b0f8c',
'X-Amz-Content-SHA256': 'bad',
'Date': 'Mon, 04 Jan 2021 10:26:23 -0000',
'X-Amz-Date': '20210104T102623Z',
'Content-Length': 0,
}
# lowercase sha256
req = Request.blank('/', environ=environ, headers=headers)
self.assertRaises(BadDigest, SigV4Request, req.environ)
sha256_of_nothing = hashlib.sha256().hexdigest().encode('ascii')
headers = {
'Authorization':
'AWS4-HMAC-SHA256 '
'Credential=test/20210104/us-east-1/s3/aws4_request, '
'SignedHeaders=host;x-amz-content-sha256;x-amz-date,'
'Signature=d90542e8b4c0d2f803162040a948e8e51db00b62a59ffb16682'
'ef433718fde12',
'X-Amz-Content-SHA256': sha256_of_nothing,
'Date': 'Mon, 04 Jan 2021 10:26:23 -0000',
'X-Amz-Date': '20210104T102623Z',
'Content-Length': 0,
}
req = Request.blank('/', environ=environ, headers=headers)
sigv4_req = SigV4Request(req.environ)
self.assertTrue(
sigv4_req._canonical_request().endswith(sha256_of_nothing))
self.assertTrue(sigv4_req.check_signature('secret'))
# uppercase sha256
headers = {
'Authorization':
'AWS4-HMAC-SHA256 '
'Credential=test/20210104/us-east-1/s3/aws4_request, '
'SignedHeaders=host;x-amz-content-sha256;x-amz-date,'
'Signature=4aab5102e58e9e40f331417d322465c24cac68a7ce77260e9bf'
'5ce9a6200862b',
'X-Amz-Content-SHA256': sha256_of_nothing.upper(),
'Date': 'Mon, 04 Jan 2021 10:26:23 -0000',
'X-Amz-Date': '20210104T102623Z',
'Content-Length': 0,
}
req = Request.blank('/', environ=environ, headers=headers)
sigv4_req = SigV4Request(req.environ)
self.assertTrue(
sigv4_req._canonical_request().endswith(sha256_of_nothing.upper()))
self.assertTrue(sigv4_req.check_signature('secret'))
class TestSigV4Request(S3ApiTestCase):
def setUp(self):
super(TestSigV4Request, self).setUp()
self.s3api.conf.s3_acl = True
self.swift.s3_acl = True
def test_init_header_authorization(self):
environ = {
'HTTP_HOST': 'bucket.s3.test.com',
'REQUEST_METHOD': 'GET'}
def do_check_ok(conf, auth):
x_amz_date = self.get_v4_amz_date_header()
headers = {
'Authorization': auth,
'X-Amz-Content-SHA256': '0123456789',
'Date': self.get_date_header(),
'X-Amz-Date': x_amz_date}
req = Request.blank('/', environ=environ, headers=headers)
sigv4_req = SigV4Request(req.environ, conf=conf)
self.assertEqual('X', sigv4_req.signature)
self.assertEqual('test', sigv4_req.access_key)
return sigv4_req
auth = ('AWS4-HMAC-SHA256 '
'Credential=test/%s/us-east-1/s3/aws4_request,'
'SignedHeaders=host;x-amz-content-sha256;x-amz-date,'
'Signature=X' % self.get_v4_amz_date_header().split('T', 1)[0])
# location lowercase matches
sigv4_req = do_check_ok(Config({'location': 'us-east-1'}), auth)
self.assertEqual('us-east-1', sigv4_req.location)
# location case mis-matches
sigv4_req = do_check_ok(Config({'location': 'US-East-1'}), auth)
self.assertEqual('us-east-1', sigv4_req.location)
# location uppercase matches
auth = ('AWS4-HMAC-SHA256 '
'Credential=test/%s/US-East-1/s3/aws4_request,'
'SignedHeaders=host;x-amz-content-sha256;x-amz-date,'
'Signature=X' % self.get_v4_amz_date_header().split('T', 1)[0])
sigv4_req = do_check_ok(Config({'location': 'US-East-1'}), auth)
self.assertEqual('US-East-1', sigv4_req.location)
def do_check_bad(conf, auth, exc):
x_amz_date = self.get_v4_amz_date_header()
headers = {
'Authorization': auth,
'X-Amz-Content-SHA256': '0123456789',
'Date': self.get_date_header(),
'X-Amz-Date': x_amz_date}
req = Request.blank('/', environ=environ, headers=headers)
self.assertRaises(exc, SigV4Request, req.environ, conf)
# location case mismatch
auth = ('AWS4-HMAC-SHA256 '
'Credential=test/%s/US-East-1/s3/aws4_request,'
'SignedHeaders=host;x-amz-content-sha256;x-amz-date,'
'Signature=X' % self.get_v4_amz_date_header().split('T', 1)[0])
do_check_bad(Config({'location': 'us-east-1'}), auth,
s3response.AuthorizationHeaderMalformed)
# bad location
auth = ('AWS4-HMAC-SHA256 '
'Credential=test/%s/us-west-1/s3/aws4_request,'
'SignedHeaders=host;x-amz-content-sha256;x-amz-date,'
'Signature=X' % self.get_v4_amz_date_header().split('T', 1)[0])
do_check_bad(Config({'location': 'us-east-1'}), auth,
s3response.AuthorizationHeaderMalformed)
# bad service name
auth = ('AWS4-HMAC-SHA256 '
'Credential=test/%s/us-east-1/S3/aws4_request,'
'SignedHeaders=host;x-amz-content-sha256;x-amz-date,'
'Signature=X' % self.get_v4_amz_date_header().split('T', 1)[0])
do_check_bad(Config({'location': 'us-east-1'}), auth,
s3response.AuthorizationHeaderMalformed)
# bad terminal name
auth = ('AWS4-HMAC-SHA256 '
'Credential=test/%s/us-east-1/s3/AWS4_request,'
'SignedHeaders=host;x-amz-content-sha256;x-amz-date,'
'Signature=X' % self.get_v4_amz_date_header().split('T', 1)[0])
do_check_bad(Config({'location': 'us-east-1'}), auth,
s3response.AuthorizationHeaderMalformed)
# bad Signature
auth = ('AWS4-HMAC-SHA256 '
'Credential=test/%s/us-east-1/s3/aws4_request,'
'SignedHeaders=host;x-amz-content-sha256;x-amz-date,'
'Signature=' % self.get_v4_amz_date_header().split('T', 1)[0])
do_check_bad(Config({'location': 'us-east-1'}), auth,
s3response.AccessDenied)
# bad SignedHeaders
auth = ('AWS4-HMAC-SHA256 '
'Credential=test/%s/us-east-1/s3/aws4_request,'
'SignedHeaders=,'
'Signature=X' % self.get_v4_amz_date_header().split('T', 1)[0])
do_check_bad(Config({'location': 'us-east-1'}), auth,
s3response.AuthorizationHeaderMalformed)
def test_init_query_authorization(self):
environ = {
'HTTP_HOST': 'bucket.s3.test.com',
'REQUEST_METHOD': 'GET'}
def do_check_ok(conf, params):
x_amz_date = self.get_v4_amz_date_header()
params['X-Amz-Date'] = x_amz_date
signed_headers = {
'X-Amz-Content-SHA256': '0123456789',
'Date': self.get_date_header(),
'X-Amz-Date': x_amz_date}
req = Request.blank('/', environ=environ, headers=signed_headers,
params=params)
sigv4_req = SigV4Request(req.environ, conf=conf)
self.assertEqual('X', sigv4_req.signature)
self.assertEqual('test', sigv4_req.access_key)
return sigv4_req
ok_params = {
'AWSAccessKeyId': 'test',
'X-Amz-Expires': '3600',
'X-Amz-Algorithm': 'AWS4-HMAC-SHA256',
'X-Amz-Credential': 'test/%s/us-east-1/s3/aws4_request' %
self.get_v4_amz_date_header().split('T', 1)[0],
'X-Amz-SignedHeaders': 'host;x-amz-content-sha256;x-amz-date',
'X-Amz-Signature': 'X'}
# location lowercase matches
sigv4_req = do_check_ok(Config({'location': 'us-east-1'}), ok_params)
self.assertEqual('us-east-1', sigv4_req.location)
# location case mis-matches
sigv4_req = do_check_ok(Config({'location': 'US-East-1'}), ok_params)
self.assertEqual('us-east-1', sigv4_req.location)
# location uppercase matches
ok_params['X-Amz-Credential'] = (
'test/%s/US-East-1/s3/aws4_request' %
self.get_v4_amz_date_header().split('T', 1)[0])
sigv4_req = do_check_ok(Config({'location': 'US-East-1'}), ok_params)
self.assertEqual('US-East-1', sigv4_req.location)
def do_check_bad(conf, params, exc):
x_amz_date = self.get_v4_amz_date_header()
params['X-Amz-Date'] = x_amz_date
signed_headers = {
'X-Amz-Content-SHA256': '0123456789',
'Date': self.get_date_header(),
'X-Amz-Date': x_amz_date}
req = Request.blank('/', environ=environ, headers=signed_headers,
params=params)
self.assertRaises(exc, SigV4Request, req.environ, conf)
# location case mismatch
bad_params = dict(ok_params)
bad_params['X-Amz-Credential'] = (
'test/%s/US-East-1/s3/aws4_request' %
self.get_v4_amz_date_header().split('T', 1)[0])
do_check_bad(Config({'location': 'us-east-1'}), bad_params,
s3response.AuthorizationQueryParametersError)
# bad location
bad_params = dict(ok_params)
bad_params['X-Amz-Credential'] = (
'test/%s/us-west-1/s3/aws4_request' %
self.get_v4_amz_date_header().split('T', 1)[0])
do_check_bad(Config({'location': 'us-east-1'}), bad_params,
s3response.AuthorizationQueryParametersError)
# bad service name
bad_params = dict(ok_params)
bad_params['X-Amz-Credential'] = (
'test/%s/us-east-1/S3/aws4_request' %
self.get_v4_amz_date_header().split('T', 1)[0])
do_check_bad(Config({'location': 'us-east-1'}), bad_params,
s3response.AuthorizationQueryParametersError)
# bad terminal name
bad_params = dict(ok_params)
bad_params['X-Amz-Credential'] = (
'test/%s/us-east-1/s3/AWS4_request' %
self.get_v4_amz_date_header().split('T', 1)[0])
do_check_bad(Config({'location': 'us-east-1'}), bad_params,
s3response.AuthorizationQueryParametersError)
# bad Signature
bad_params = dict(ok_params)
bad_params['X-Amz-Signature'] = ''
do_check_bad(Config({'location': 'us-east-1'}), bad_params,
s3response.AccessDenied)
# bad SignedHeaders
bad_params = dict(ok_params)
bad_params['X-Amz-SignedHeaders'] = ''
do_check_bad(Config({'location': 'us-east-1'}), bad_params,
s3response.AuthorizationQueryParametersError)
def test_controller_allow_multipart_uploads(self):
environ = {
'HTTP_HOST': 'bucket.s3.test.com',
'REQUEST_METHOD': 'GET'}
x_amz_date = self.get_v4_amz_date_header()
auth = ('AWS4-HMAC-SHA256 '
'Credential=test/%s/us-east-1/s3/aws4_request,'
'SignedHeaders=host;x-amz-content-sha256;x-amz-date,'
'Signature=X' % self.get_v4_amz_date_header().split('T', 1)[0])
headers = {
'Authorization': auth,
'X-Amz-Content-SHA256': '0123456789',
'Date': self.get_date_header(),
'X-Amz-Date': x_amz_date}
def make_s3req(config, path, params):
req = Request.blank(path, environ=environ, headers=headers,
params=params)
return SigV4Request(req.environ, None, config)
s3req = make_s3req(Config(), '/bkt', {'partNumber': '3'})
self.assertEqual(controllers.multi_upload.PartController,
s3req.controller)
s3req = make_s3req(Config(), '/bkt', {'uploadId': '4'})
self.assertEqual(controllers.multi_upload.UploadController,
s3req.controller)
s3req = make_s3req(Config(), '/bkt', {'uploads': '99'})
self.assertEqual(controllers.multi_upload.UploadsController,
s3req.controller)
# multi part requests require allow_multipart_uploads
def do_check_slo_not_enabled(params):
s3req = make_s3req(Config({
'allow_multipart_uploads': False}), '/bkt', params)
self.assertRaises(s3response.S3NotImplemented,
lambda: s3req.controller)
do_check_slo_not_enabled({'partNumber': '3'})
do_check_slo_not_enabled({'uploadId': '4'})
do_check_slo_not_enabled({'uploads': '99'})
# service requests not dependent on allow_multipart_uploads
s3req = make_s3req(Config(), '/', {'partNumber': '3'})
self.assertEqual(controllers.ServiceController,
s3req.controller)
s3req = make_s3req(Config({'allow_multipart_uploads': False}), '/',
{'partNumber': '3'})
self.assertEqual(controllers.ServiceController,
s3req.controller)
class TestHashingInput(S3ApiTestCase):
def test_good(self):
raw = b'123456789'
wrapped = HashingInput(
BytesIO(raw), 9, lambda: md5(usedforsecurity=False),
md5(raw, usedforsecurity=False).hexdigest())
self.assertEqual(b'1234', wrapped.read(4))
self.assertEqual(b'56', wrapped.read(2))
# trying to read past the end gets us whatever's left
self.assertEqual(b'789', wrapped.read(4))
# can continue trying to read -- but it'll be empty
self.assertEqual(b'', wrapped.read(2))
self.assertFalse(wrapped._input.closed)
wrapped.close()
self.assertTrue(wrapped._input.closed)
def test_empty(self):
wrapped = HashingInput(BytesIO(b''), 0, hashlib.sha256,
hashlib.sha256(b'').hexdigest())
self.assertEqual(b'', wrapped.read(4))
self.assertEqual(b'', wrapped.read(2))
self.assertFalse(wrapped._input.closed)
wrapped.close()
self.assertTrue(wrapped._input.closed)
def test_too_long(self):
raw = b'123456789'
wrapped = HashingInput(
BytesIO(raw), 8, lambda: md5(usedforsecurity=False),
md5(raw, usedforsecurity=False).hexdigest())
self.assertEqual(b'1234', wrapped.read(4))
self.assertEqual(b'56', wrapped.read(2))
# even though the hash matches, there was more data than we expected
with self.assertRaises(swob.HTTPException) as raised:
wrapped.read(3)
self.assertEqual(raised.exception.status, '422 Unprocessable Entity')
# the error causes us to close the input
self.assertTrue(wrapped._input.closed)
def test_too_short(self):
raw = b'123456789'
wrapped = HashingInput(
BytesIO(raw), 10, lambda: md5(usedforsecurity=False),
md5(raw, usedforsecurity=False).hexdigest())
self.assertEqual(b'1234', wrapped.read(4))
self.assertEqual(b'56', wrapped.read(2))
# even though the hash matches, there was more data than we expected
with self.assertRaises(swob.HTTPException) as raised:
wrapped.read(4)
self.assertEqual(raised.exception.status, '422 Unprocessable Entity')
self.assertTrue(wrapped._input.closed)
def test_bad_hash(self):
raw = b'123456789'
wrapped = HashingInput(
BytesIO(raw), 9, hashlib.sha256,
md5(raw, usedforsecurity=False).hexdigest())
self.assertEqual(b'1234', wrapped.read(4))
self.assertEqual(b'5678', wrapped.read(4))
with self.assertRaises(swob.HTTPException) as raised:
wrapped.read(4)
self.assertEqual(raised.exception.status, '422 Unprocessable Entity')
self.assertTrue(wrapped._input.closed)
def test_empty_bad_hash(self):
wrapped = HashingInput(BytesIO(b''), 0, hashlib.sha256, 'nope')
with self.assertRaises(swob.HTTPException) as raised:
wrapped.read(3)
self.assertEqual(raised.exception.status, '422 Unprocessable Entity')
# the error causes us to close the input
self.assertTrue(wrapped._input.closed)
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/s3api/test_s3request.py |
# Copyright (c) 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from swift.common import swob
from swift.common.swob import Request
from swift.common.utils import json
from test.unit.common.middleware.s3api.test_s3_acl import s3acl
from test.unit.common.middleware.s3api import S3ApiTestCase
from swift.common.middleware.s3api.etree import fromstring
from swift.common.middleware.s3api.subresource import ACL, Owner, encode_acl
def create_bucket_list_json(buckets):
"""
Create a json from bucket list
:param buckets: a list of tuples (or lists) consist of elements orderd as
name, count, bytes
"""
bucket_list = [{'name': item[0], 'count': item[1], 'bytes': item[2]}
for item in buckets]
return json.dumps(bucket_list)
class TestS3ApiService(S3ApiTestCase):
def setup_buckets(self):
self.buckets = (('apple', 1, 200), ('orange', 3, 430))
bucket_list = create_bucket_list_json(self.buckets)
self.swift.register('GET', '/v1/AUTH_test', swob.HTTPOk, {},
bucket_list)
def setUp(self):
super(TestS3ApiService, self).setUp()
self.setup_buckets()
def test_service_GET_error(self):
code = self._test_method_error(
'GET', '', swob.HTTPUnauthorized, expected_xml_tags=(
'Code', 'Message', 'AWSAccessKeyId', 'StringToSign',
'StringToSignBytes', 'SignatureProvided'))
self.assertEqual(code, 'SignatureDoesNotMatch')
code = self._test_method_error('GET', '', swob.HTTPForbidden)
self.assertEqual(code, 'AccessDenied')
code = self._test_method_error('GET', '', swob.HTTPServerError)
self.assertEqual(code, 'InternalError')
@s3acl
def test_service_GET(self):
req = Request.blank('/',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'ListAllMyBucketsResult')
all_buckets = elem.find('./Buckets')
buckets = all_buckets.iterchildren('Bucket')
listing = list(list(buckets)[0])
self.assertEqual(len(listing), 2)
names = []
for b in all_buckets.iterchildren('Bucket'):
names.append(b.find('./Name').text)
self.assertEqual(len(names), len(self.buckets))
for i in self.buckets:
self.assertTrue(i[0] in names)
@s3acl
def test_service_GET_subresource(self):
req = Request.blank('/?acl',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'ListAllMyBucketsResult')
all_buckets = elem.find('./Buckets')
buckets = all_buckets.iterchildren('Bucket')
listing = list(list(buckets)[0])
self.assertEqual(len(listing), 2)
names = []
for b in all_buckets.iterchildren('Bucket'):
names.append(b.find('./Name').text)
self.assertEqual(len(names), len(self.buckets))
for i in self.buckets:
self.assertTrue(i[0] in names)
def test_service_GET_with_blind_resource(self):
buckets = (('apple', 1, 200), ('orange', 3, 430),
('apple+segment', 1, 200))
expected = buckets[:-1]
bucket_list = create_bucket_list_json(buckets)
self.swift.register('GET', '/v1/AUTH_test', swob.HTTPOk, {},
bucket_list)
req = Request.blank('/',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'ListAllMyBucketsResult')
all_buckets = elem.find('./Buckets')
buckets = all_buckets.iterchildren('Bucket')
listing = list(list(buckets)[0])
self.assertEqual(len(listing), 2)
names = []
for b in all_buckets.iterchildren('Bucket'):
names.append(b.find('./Name').text)
self.assertEqual(len(names), len(expected))
for i in expected:
self.assertIn(i[0], names)
def _test_service_GET_for_check_bucket_owner(self, buckets):
self.s3api.conf.check_bucket_owner = True
bucket_list = create_bucket_list_json(buckets)
self.swift.register('GET', '/v1/AUTH_test', swob.HTTPOk, {},
bucket_list)
req = Request.blank('/',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
return self.call_s3api(req)
@s3acl(s3acl_only=True)
def test_service_GET_without_bucket(self):
bucket_list = []
for var in range(0, 10):
bucket = 'bucket%s' % var
self.swift.register('HEAD', '/v1/AUTH_test/%s' % bucket,
swob.HTTPNotFound, {}, None)
bucket_list.append((bucket, var, 300 + var))
status, headers, body = \
self._test_service_GET_for_check_bucket_owner(bucket_list)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'ListAllMyBucketsResult')
resp_buckets = elem.find('./Buckets')
buckets = resp_buckets.iterchildren('Bucket')
self.assertEqual(len(list(buckets)), 0)
@s3acl(s3acl_only=True)
def test_service_GET_without_owner_bucket(self):
bucket_list = []
for var in range(0, 10):
user_id = 'test:other'
bucket = 'bucket%s' % var
owner = Owner(user_id, user_id)
headers = encode_acl('container', ACL(owner, []))
self.swift.register('HEAD', '/v1/AUTH_test/%s' % bucket,
swob.HTTPNoContent, headers, None)
bucket_list.append((bucket, var, 300 + var))
status, headers, body = \
self._test_service_GET_for_check_bucket_owner(bucket_list)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'ListAllMyBucketsResult')
resp_buckets = elem.find('./Buckets')
buckets = resp_buckets.iterchildren('Bucket')
self.assertEqual(len(list(buckets)), 0)
@s3acl(s3acl_only=True)
def test_service_GET_bucket_list(self):
bucket_list = []
for var in range(0, 10):
if var % 3 == 0:
user_id = 'test:tester'
else:
user_id = 'test:other'
bucket = 'bucket%s' % var
owner = Owner(user_id, user_id)
headers = encode_acl('container', ACL(owner, []))
# set register to get owner of buckets
if var % 3 == 2:
self.swift.register('HEAD', '/v1/AUTH_test/%s' % bucket,
swob.HTTPNotFound, {}, None)
else:
self.swift.register('HEAD', '/v1/AUTH_test/%s' % bucket,
swob.HTTPNoContent, headers, None)
bucket_list.append((bucket, var, 300 + var))
status, headers, body = \
self._test_service_GET_for_check_bucket_owner(bucket_list)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'ListAllMyBucketsResult')
resp_buckets = elem.find('./Buckets')
buckets = resp_buckets.iterchildren('Bucket')
listing = list(list(buckets)[0])
self.assertEqual(len(listing), 2)
names = []
for b in resp_buckets.iterchildren('Bucket'):
names.append(b.find('./Name').text)
# Check whether getting bucket only locate in multiples of 3 in
# bucket_list which mean requested user is owner.
expected_buckets = [b for i, b in enumerate(bucket_list)
if i % 3 == 0]
self.assertEqual(len(names), len(expected_buckets))
for i in expected_buckets:
self.assertTrue(i[0] in names)
self.assertEqual(len(self.swift.calls_with_headers), 11)
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/s3api/test_service.py |
# Copyright (c) 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import six
from swift.common.middleware.s3api import etree
class TestS3ApiEtree(unittest.TestCase):
def test_xml_namespace(self):
def test_xml(ns, prefix):
return '<A %(ns)s><%(prefix)sB>C</%(prefix)sB></A>' % \
({'ns': ns, 'prefix': prefix})
# No namespace is same as having the S3 namespace.
xml = test_xml('', '')
elem = etree.fromstring(xml)
self.assertEqual(elem.find('./B').text, 'C')
# The S3 namespace is handled as no namespace.
xml = test_xml('xmlns="%s"' % etree.XMLNS_S3, '')
elem = etree.fromstring(xml)
self.assertEqual(elem.find('./B').text, 'C')
xml = test_xml('xmlns:s3="%s"' % etree.XMLNS_S3, 's3:')
elem = etree.fromstring(xml)
self.assertEqual(elem.find('./B').text, 'C')
# Any namespaces without a prefix work as no namespace.
xml = test_xml('xmlns="http://example.com/"', '')
elem = etree.fromstring(xml)
self.assertEqual(elem.find('./B').text, 'C')
xml = test_xml('xmlns:s3="http://example.com/"', 's3:')
elem = etree.fromstring(xml)
self.assertIsNone(elem.find('./B'))
def test_xml_with_comments(self):
xml = '<A><!-- comment --><B>C</B></A>'
elem = etree.fromstring(xml)
self.assertEqual(elem.find('./B').text, 'C')
def test_tostring_with_nonascii_text(self):
elem = etree.Element('Test')
sub = etree.SubElement(elem, 'FOO')
sub.text = '\xef\xbc\xa1'
self.assertTrue(isinstance(sub.text, str))
xml_string = etree.tostring(elem)
self.assertIsInstance(xml_string, bytes)
def test_fromstring_with_nonascii_text(self):
input_str = b'<?xml version="1.0" encoding="UTF-8"?>\n' \
b'<Test><FOO>\xef\xbc\xa1</FOO></Test>'
elem = etree.fromstring(input_str)
text = elem.find('FOO').text
if six.PY2:
self.assertEqual(text, b'\xef\xbc\xa1')
else:
self.assertEqual(text, b'\xef\xbc\xa1'.decode('utf8'))
self.assertIsInstance(text, str)
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/s3api/test_etree.py |
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import base64
import json
import time
import unittest
import uuid
import mock
import requests
from requests_mock.contrib import fixture as rm_fixture
from six.moves import urllib
from swift.common.middleware.s3api import s3token
from swift.common.swob import Request, Response
from swift.common.wsgi import ConfigFileError
from test.debug_logger import debug_logger
GOOD_RESPONSE_V2 = {'access': {
'user': {
'username': 'S3_USER',
'name': 'S3_USER',
'id': 'USER_ID',
'roles': [
{'name': 'swift-user'},
{'name': '_member_'},
],
},
'token': {
'id': 'TOKEN_ID',
'tenant': {
'id': 'TENANT_ID',
'name': 'TENANT_NAME'
}
}
}}
GOOD_RESPONSE_V3 = {'token': {
'user': {
'domain': {
'name': 'Default',
'id': 'default',
},
'name': 'S3_USER',
'id': 'USER_ID',
},
'project': {
'domain': {
'name': 'PROJECT_DOMAIN_NAME',
'id': 'PROJECT_DOMAIN_ID',
},
'name': 'PROJECT_NAME',
'id': 'PROJECT_ID',
},
'roles': [
{'name': 'swift-user'},
{'name': '_member_'},
],
}}
class FakeResponse(requests.Response):
"""Utility class to wrap requests.Response.
Class used to wrap requests.Response and provide some convenience to
initialize with a dict.
"""
def __init__(self, data):
self._text = None
super(FakeResponse, self).__init__()
if isinstance(data, dict):
self.status_code = data.get('status_code', 200)
headers = data.get('headers')
if headers:
self.headers.update(headers)
# Fake the text attribute to streamline Response creation
# _content is defined by requests.Response
self._content = data.get('text')
else:
self.status_code = data
def __eq__(self, other):
return self.__dict__ == other.__dict__
@property
def text(self):
return self.content
class FakeApp(object):
calls = 0
"""This represents a WSGI app protected by the auth_token middleware."""
def __call__(self, env, start_response):
self.calls += 1
resp = Response()
resp.environ = env
return resp(env, start_response)
class S3TokenMiddlewareTestBase(unittest.TestCase):
TEST_AUTH_URI = 'https://fakehost/identity/v2.0'
TEST_URL = '%s/s3tokens' % (TEST_AUTH_URI, )
TEST_DOMAIN_ID = '1'
TEST_DOMAIN_NAME = 'aDomain'
TEST_GROUP_ID = uuid.uuid4().hex
TEST_ROLE_ID = uuid.uuid4().hex
TEST_TENANT_ID = '1'
TEST_TENANT_NAME = 'aTenant'
TEST_TOKEN = 'aToken'
TEST_TRUST_ID = 'aTrust'
TEST_USER = 'test'
TEST_USER_ID = uuid.uuid4().hex
TEST_ROOT_URL = 'http://127.0.0.1:5000/'
def setUp(self):
super(S3TokenMiddlewareTestBase, self).setUp()
self.logger = debug_logger()
self.time_patcher = mock.patch.object(time, 'time', lambda: 1234)
self.time_patcher.start()
self.app = FakeApp()
self.conf = {
'auth_uri': self.TEST_AUTH_URI,
}
self.middleware = self.make_middleware(self.conf)
self.requests_mock = rm_fixture.Fixture()
self.requests_mock.setUp()
def make_middleware(self, conf):
with mock.patch('swift.common.middleware.s3api.s3token.get_logger',
return_value=self.logger):
return s3token.S3Token(self.app, conf)
def tearDown(self):
self.requests_mock.cleanUp()
self.time_patcher.stop()
super(S3TokenMiddlewareTestBase, self).tearDown()
def start_fake_response(self, status, headers):
self.response_status = int(status.split(' ', 1)[0])
self.response_headers = dict(headers)
class S3TokenMiddlewareTestGood(S3TokenMiddlewareTestBase):
def setUp(self):
super(S3TokenMiddlewareTestGood, self).setUp()
self.requests_mock.post(self.TEST_URL,
status_code=201,
json=GOOD_RESPONSE_V2)
# Ignore the request and pass to the next middleware in the
# pipeline if no path has been specified.
def test_no_path_request(self):
req = Request.blank('/')
self.middleware(req.environ, self.start_fake_response)
self.assertEqual(self.response_status, 200)
# Ignore the request and pass to the next middleware in the
# pipeline if no Authorization header has been specified
def test_without_authorization(self):
req = Request.blank('/v1/AUTH_cfa/c/o')
self.middleware(req.environ, self.start_fake_response)
self.assertEqual(self.response_status, 200)
def test_nukes_auth_headers(self):
client_env = {
'HTTP_X_IDENTITY_STATUS': 'Confirmed',
'HTTP_X_ROLES': 'admin,_member_,swift-user',
'HTTP_X_TENANT_ID': 'cfa'
}
req = Request.blank('/v1/AUTH_cfa/c/o', environ=client_env)
self.middleware(req.environ, self.start_fake_response)
self.assertEqual(self.response_status, 200)
for key in client_env:
self.assertNotIn(key, req.environ)
def test_without_auth_storage_token(self):
req = Request.blank('/v1/AUTH_cfa/c/o')
req.headers['Authorization'] = 'AWS badboy'
self.middleware(req.environ, self.start_fake_response)
self.assertEqual(self.response_status, 200)
def _assert_authorized(self, req, account_path='/v1/AUTH_TENANT_ID/'):
self.assertTrue(
req.path.startswith(account_path),
'%r does not start with %r' % (req.path, account_path))
self.assertNotIn('X-Auth-Token', req.headers)
expected_headers = {
'X-Identity-Status': 'Confirmed',
'X-Roles': 'swift-user,_member_',
'X-User-Id': 'USER_ID',
'X-User-Name': 'S3_USER',
'X-Tenant-Id': 'TENANT_ID',
'X-Tenant-Name': 'TENANT_NAME',
'X-Project-Id': 'TENANT_ID',
'X-Project-Name': 'TENANT_NAME',
}
for header, value in expected_headers.items():
self.assertIn(header, req.headers)
self.assertEqual(value, req.headers[header])
# WSGI wants native strings for headers
self.assertIsInstance(req.headers[header], str)
self.assertEqual(1, self.middleware._app.calls)
self.assertEqual(1, self.requests_mock.call_count)
request_call = self.requests_mock.request_history[0]
self.assertEqual(json.loads(request_call.body), {'credentials': {
'access': 'access',
'signature': 'signature',
'token': base64.urlsafe_b64encode(b'token').decode('ascii')}})
def test_authorized(self):
req = Request.blank('/v1/AUTH_cfa/c/o')
req.environ['s3api.auth_details'] = {
'access_key': u'access',
'signature': u'signature',
'string_to_sign': u'token',
}
req.get_response(self.middleware)
self._assert_authorized(req)
def test_tolerate_missing_token_id(self):
resp = copy.deepcopy(GOOD_RESPONSE_V2)
del resp['access']['token']['id']
self.requests_mock.post(self.TEST_URL,
status_code=201,
json=resp)
req = Request.blank('/v1/AUTH_cfa/c/o')
req.environ['s3api.auth_details'] = {
'access_key': u'access',
'signature': u'signature',
'string_to_sign': u'token',
}
req.get_response(self.middleware)
self._assert_authorized(req)
def test_authorized_bytes(self):
req = Request.blank('/v1/AUTH_cfa/c/o')
req.environ['s3api.auth_details'] = {
'access_key': b'access',
'signature': b'signature',
'string_to_sign': b'token',
}
req.get_response(self.middleware)
self._assert_authorized(req)
def test_authorized_http(self):
auth_uri = 'http://fakehost:35357/v2.0'
self.requests_mock.post(
'%s/s3tokens' % auth_uri,
status_code=201, json=GOOD_RESPONSE_V2)
self.middleware = self.make_middleware({
'auth_uri': auth_uri})
req = Request.blank('/v1/AUTH_cfa/c/o')
req.environ['s3api.auth_details'] = {
'access_key': u'access',
'signature': u'signature',
'string_to_sign': u'token',
}
req.get_response(self.middleware)
self._assert_authorized(req)
def test_authorized_v3(self):
# Prior to https://github.com/openstack/keystone/commit/dd1e705
# even v3 URLs would respond with a v2-format response
auth_uri = 'http://fakehost:35357/v3'
self.requests_mock.post(
'%s/s3tokens' % auth_uri,
status_code=201, json=GOOD_RESPONSE_V2)
self.middleware = self.make_middleware({
'auth_uri': auth_uri})
req = Request.blank('/v1/AUTH_cfa/c/o')
req.environ['s3api.auth_details'] = {
'access_key': u'access',
'signature': u'signature',
'string_to_sign': u'token',
}
req.get_response(self.middleware)
self._assert_authorized(req)
def test_authorized_trailing_slash(self):
self.middleware = self.make_middleware({
'auth_uri': self.TEST_AUTH_URI + '/'})
req = Request.blank('/v1/AUTH_cfa/c/o')
req.environ['s3api.auth_details'] = {
'access_key': u'access',
'signature': u'signature',
'string_to_sign': u'token',
}
req.get_response(self.middleware)
self._assert_authorized(req)
def test_authorization_nova_toconnect(self):
req = Request.blank('/v1/AUTH_swiftint/c/o')
req.environ['s3api.auth_details'] = {
'access_key': u'access:FORCED_TENANT_ID',
'signature': u'signature',
'string_to_sign': u'token',
}
req.get_response(self.middleware)
self._assert_authorized(req, account_path='/v1/AUTH_FORCED_TENANT_ID/')
@mock.patch.object(requests, 'post')
def test_insecure(self, MOCK_REQUEST):
self.middleware = self.make_middleware({
'insecure': 'True', 'auth_uri': 'http://example.com'})
text_return_value = json.dumps(GOOD_RESPONSE_V2)
MOCK_REQUEST.return_value = FakeResponse({
'status_code': 201,
'text': text_return_value})
req = Request.blank('/v1/AUTH_cfa/c/o')
req.environ['s3api.auth_details'] = {
'access_key': u'access',
'signature': u'signature',
'string_to_sign': u'token',
}
req.get_response(self.middleware)
self.assertTrue(MOCK_REQUEST.called)
mock_args, mock_kwargs = MOCK_REQUEST.call_args
self.assertIs(mock_kwargs['verify'], False)
def test_insecure_option(self):
# insecure is passed as a string.
# Some non-secure values.
true_values = ['true', 'True', '1', 'yes']
for val in true_values:
config = {'insecure': val,
'certfile': 'false_ind',
'auth_uri': 'http://example.com'}
middleware = s3token.filter_factory(config)(self.app)
self.assertIs(False, middleware._verify)
# Some "secure" values, including unexpected value.
false_values = ['false', 'False', '0', 'no', 'someweirdvalue']
for val in false_values:
config = {'insecure': val,
'certfile': 'false_ind',
'auth_uri': 'http://example.com'}
middleware = s3token.filter_factory(config)(self.app)
self.assertEqual('false_ind', middleware._verify)
# Default is secure.
config = {'certfile': 'false_ind',
'auth_uri': 'http://example.com'}
middleware = s3token.filter_factory(config)(self.app)
self.assertIs('false_ind', middleware._verify)
def test_reseller_prefix(self):
def do_test(conf, expected):
conf.update(self.conf)
middleware = s3token.filter_factory(conf)(self.app)
self.assertEqual(expected, middleware._reseller_prefix)
do_test({}, 'AUTH_')
do_test({'reseller_prefix': 'KEY_'}, 'KEY_')
do_test({'reseller_prefix': 'KEY'}, 'KEY_')
def test_auth_uris(self):
for conf, expected in [
({'auth_uri': 'https://example.com/v2.0'},
'https://example.com/v2.0/s3tokens'),
# Trailing slash doesn't interfere
({'auth_uri': 'https://example.com/v2.0/'},
'https://example.com/v2.0/s3tokens'),
# keystone running under mod_wsgi often has a path prefix
({'auth_uri': 'https://example.com/identity/v2.0'},
'https://example.com/identity/v2.0/s3tokens'),
({'auth_uri': 'https://example.com/identity/v2.0/'},
'https://example.com/identity/v2.0/s3tokens'),
# IPv4 addresses are fine
({'auth_uri': 'http://127.0.0.1:35357/v3'},
'http://127.0.0.1:35357/v3/s3tokens'),
({'auth_uri': 'http://127.0.0.1:35357/v3/'},
'http://127.0.0.1:35357/v3/s3tokens'),
# IPv6 addresses need [brackets] per RFC 3986
({'auth_uri': 'https://[::FFFF:129.144.52.38]:5000/v3'},
'https://[::FFFF:129.144.52.38]:5000/v3/s3tokens'),
({'auth_uri': 'https://[::FFFF:129.144.52.38]:5000/v3/'},
'https://[::FFFF:129.144.52.38]:5000/v3/s3tokens'),
]:
middleware = s3token.filter_factory(conf)(self.app)
self.assertEqual(expected, middleware._request_uri)
@mock.patch.object(requests, 'post')
def test_http_timeout(self, MOCK_REQUEST):
self.middleware = self.make_middleware({
'http_timeout': '2',
'auth_uri': 'http://example.com',
})
MOCK_REQUEST.return_value = FakeResponse({
'status_code': 201,
'text': json.dumps(GOOD_RESPONSE_V2)})
req = Request.blank('/v1/AUTH_cfa/c/o')
req.environ['s3api.auth_details'] = {
'access_key': u'access',
'signature': u'signature',
'string_to_sign': u'token',
}
req.get_response(self.middleware)
self.assertTrue(MOCK_REQUEST.called)
mock_args, mock_kwargs = MOCK_REQUEST.call_args
self.assertEqual(mock_kwargs['timeout'], 2)
def test_http_timeout_option(self):
good_values = ['1', '5.3', '10', '.001']
for val in good_values:
middleware = s3token.filter_factory({
'http_timeout': val,
'auth_uri': 'http://example.com',
})(self.app)
self.assertEqual(float(val), middleware._timeout)
bad_values = ['1, 4', '-3', '100', 'foo', '0']
for val in bad_values:
with self.assertRaises(ValueError) as ctx:
s3token.filter_factory({
'http_timeout': val,
'auth_uri': 'http://example.com',
})(self.app)
self.assertTrue(ctx.exception.args[0].startswith((
'invalid literal for float():',
'could not convert string to float:',
'http_timeout must be between 0 and 60 seconds',
)), 'Unexpected error message: %s' % ctx.exception)
# default is 10 seconds
middleware = s3token.filter_factory({
'auth_uri': 'http://example.com'})(self.app)
self.assertEqual(10, middleware._timeout)
def test_bad_auth_uris(self):
for auth_uri in [
'/not/a/uri',
'http://',
'//example.com/path']:
with self.assertRaises(ConfigFileError) as cm:
s3token.filter_factory({'auth_uri': auth_uri})(self.app)
self.assertEqual('Invalid auth_uri; must include scheme and host',
cm.exception.args[0])
with self.assertRaises(ConfigFileError) as cm:
s3token.filter_factory({
'auth_uri': 'nonhttp://example.com'})(self.app)
self.assertEqual('Invalid auth_uri; scheme must be http or https',
cm.exception.args[0])
for auth_uri in [
'http://user@example.com/',
'http://example.com/?with=query',
'http://example.com/#with-fragment']:
with self.assertRaises(ConfigFileError) as cm:
s3token.filter_factory({'auth_uri': auth_uri})(self.app)
self.assertEqual('Invalid auth_uri; must not include username, '
'query, or fragment', cm.exception.args[0])
def test_unicode_path(self):
url = u'/v1/AUTH_cfa/c/euro\u20ac'.encode('utf8')
req = Request.blank(urllib.parse.quote(url))
req.environ['s3api.auth_details'] = {
'access_key': u'access',
'signature': u'signature',
'string_to_sign': u'token',
}
req.get_response(self.middleware)
self._assert_authorized(req)
def test_authorize_with_access_key(self):
req = Request.blank('/v1/accesskey/c/o')
req.environ['s3api.auth_details'] = {
'access_key': u'access',
'signature': u'signature',
'string_to_sign': u'token',
}
req.get_response(self.middleware)
self._assert_authorized(req, account_path='/v1/')
self.assertEqual(req.environ['PATH_INFO'], '/v1/AUTH_TENANT_ID/c/o')
def test_authorize_with_access_key_and_unquote_chars(self):
req = Request.blank('/v1/access%key=/c/o')
req.environ['s3api.auth_details'] = {
'access_key': u'access',
'signature': u'signature',
'string_to_sign': u'token',
}
req.get_response(self.middleware)
self._assert_authorized(req, account_path='/v1/')
self.assertEqual(req.environ['PATH_INFO'], '/v1/AUTH_TENANT_ID/c/o')
@mock.patch('swift.common.middleware.s3api.s3token.cache_from_env')
@mock.patch('keystoneclient.v3.client.Client')
@mock.patch.object(requests, 'post')
def test_secret_is_cached(self, MOCK_REQUEST, MOCK_KEYSTONE,
MOCK_CACHE_FROM_ENV):
self.middleware = self.make_middleware({
'auth_uri': 'http://example.com',
'secret_cache_duration': '20',
'auth_type': 'v3password',
'auth_url': 'http://example.com:5000/v3',
'username': 'swift',
'password': 'secret',
'project_name': 'service',
'user_domain_name': 'default',
'project_domain_name': 'default',
})
self.assertEqual(20, self.middleware._secret_cache_duration)
self.assertIsNone(MOCK_KEYSTONE.mock_calls[0][2]['region_name'])
cache = MOCK_CACHE_FROM_ENV.return_value
fake_cache_response = ({}, {'id': 'tenant_id'}, 'secret')
cache.get.return_value = fake_cache_response
MOCK_REQUEST.return_value = FakeResponse({
'status_code': 201,
'text': json.dumps(GOOD_RESPONSE_V2)})
req = Request.blank('/v1/AUTH_cfa/c/o')
req.environ['s3api.auth_details'] = {
'access_key': u'access',
'signature': u'signature',
'string_to_sign': u'token',
'check_signature': lambda x: True
}
req.get_response(self.middleware)
# Ensure we don't request auth from keystone
self.assertFalse(MOCK_REQUEST.called)
@mock.patch('swift.common.middleware.s3api.s3token.cache_from_env')
@mock.patch('keystoneclient.v3.client.Client')
@mock.patch.object(requests, 'post')
def test_secret_sets_cache(self, MOCK_REQUEST, MOCK_KEYSTONE,
MOCK_CACHE_FROM_ENV):
self.middleware = self.make_middleware({
'auth_uri': 'http://example.com',
'secret_cache_duration': '20',
'auth_type': 'v3password',
'auth_url': 'http://example.com:5000/v3',
'username': 'swift',
'password': 'secret',
'project_name': 'service',
'user_domain_name': 'default',
'project_domain_name': 'default',
'region_name': 'some-other-region',
})
self.assertEqual(20, self.middleware._secret_cache_duration)
self.assertEqual(MOCK_KEYSTONE.mock_calls[0][2]['region_name'],
'some-other-region')
cache = MOCK_CACHE_FROM_ENV.return_value
cache.get.return_value = None
keystone_client = MOCK_KEYSTONE.return_value
keystone_client.ec2.get.return_value = mock.Mock(secret='secret')
MOCK_REQUEST.return_value = FakeResponse({
'status_code': 201,
'text': json.dumps(GOOD_RESPONSE_V2).encode('ascii')})
req = Request.blank('/v1/AUTH_cfa/c/o')
req.environ['s3api.auth_details'] = {
'access_key': u'access',
'signature': u'signature',
'string_to_sign': u'token',
'check_signature': lambda x: True
}
req.get_response(self.middleware)
expected_headers = {
'X-Identity-Status': u'Confirmed',
'X-Roles': u'swift-user,_member_',
'X-User-Id': u'USER_ID',
'X-User-Name': u'S3_USER',
'X-Tenant-Id': u'TENANT_ID',
'X-Tenant-Name': u'TENANT_NAME',
'X-Project-Id': u'TENANT_ID',
'X-Project-Name': u'TENANT_NAME',
}
self.assertTrue(MOCK_REQUEST.called)
tenant = GOOD_RESPONSE_V2['access']['token']['tenant']
expected_cache = (expected_headers, tenant, 'secret')
cache.set.assert_called_once_with('s3secret/access', expected_cache,
time=20)
class S3TokenMiddlewareTestBad(S3TokenMiddlewareTestBase):
def test_unauthorized_token(self):
ret = {"error":
{"message": "EC2 access key not found.",
"code": 401,
"title": "Unauthorized"}}
self.requests_mock.post(self.TEST_URL, status_code=403, json=ret)
req = Request.blank('/v1/AUTH_cfa/c/o')
req.environ['s3api.auth_details'] = {
'access_key': u'access',
'signature': u'signature',
'string_to_sign': u'token',
}
resp = req.get_response(self.middleware)
s3_denied_req = self.middleware._deny_request('AccessDenied')
self.assertEqual(resp.body, s3_denied_req.body)
self.assertEqual(
resp.status_int, # pylint: disable-msg=E1101
s3_denied_req.status_int) # pylint: disable-msg=E1101
self.assertEqual(0, self.middleware._app.calls)
self.assertEqual(1, self.requests_mock.call_count)
request_call = self.requests_mock.request_history[0]
self.assertEqual(json.loads(request_call.body), {'credentials': {
'access': 'access',
'signature': 'signature',
'token': base64.urlsafe_b64encode(b'token').decode('ascii')}})
def test_no_s3_creds_defers_to_auth_middleware(self):
# Without an Authorization header, we should just pass through to the
# auth system to make a decision.
req = Request.blank('/v1/AUTH_cfa/c/o')
resp = req.get_response(self.middleware)
self.assertEqual(resp.status_int, 200) # pylint: disable-msg=E1101
self.assertEqual(1, self.middleware._app.calls)
def test_fail_to_connect_to_keystone(self):
with mock.patch.object(self.middleware, '_json_request') as o:
s3_invalid_resp = self.middleware._deny_request('InvalidURI')
o.side_effect = s3_invalid_resp
req = Request.blank('/v1/AUTH_cfa/c/o')
req.environ['s3api.auth_details'] = {
'access_key': u'access',
'signature': u'signature',
'string_to_sign': u'token',
}
resp = req.get_response(self.middleware)
self.assertEqual(resp.body, s3_invalid_resp.body)
self.assertEqual(
resp.status_int, # pylint: disable-msg=E1101
s3_invalid_resp.status_int) # pylint: disable-msg=E1101
self.assertEqual(0, self.middleware._app.calls)
def _test_bad_reply(self, response_body):
self.requests_mock.post(self.TEST_URL,
status_code=201,
text=response_body)
req = Request.blank('/v1/AUTH_cfa/c/o')
req.environ['s3api.auth_details'] = {
'access_key': u'access',
'signature': u'signature',
'string_to_sign': u'token',
}
resp = req.get_response(self.middleware)
s3_invalid_resp = self.middleware._deny_request('InvalidURI')
self.assertEqual(resp.body, s3_invalid_resp.body)
self.assertEqual(
resp.status_int, # pylint: disable-msg=E1101
s3_invalid_resp.status_int) # pylint: disable-msg=E1101
self.assertEqual(0, self.middleware._app.calls)
def test_bad_reply_not_json(self):
self._test_bad_reply('<badreply>')
def _test_bad_reply_missing_parts(self, *parts):
resp = copy.deepcopy(GOOD_RESPONSE_V2)
part_dict = resp
for part in parts[:-1]:
part_dict = part_dict[part]
del part_dict[parts[-1]]
self._test_bad_reply(json.dumps(resp))
def test_bad_reply_missing_token_dict(self):
self._test_bad_reply_missing_parts('access', 'token')
def test_bad_reply_missing_user_dict(self):
self._test_bad_reply_missing_parts('access', 'user')
def test_bad_reply_missing_user_roles(self):
self._test_bad_reply_missing_parts('access', 'user', 'roles')
def test_bad_reply_missing_user_name(self):
self._test_bad_reply_missing_parts('access', 'user', 'name')
def test_bad_reply_missing_user_id(self):
self._test_bad_reply_missing_parts('access', 'user', 'id')
def test_bad_reply_missing_tenant_dict(self):
self._test_bad_reply_missing_parts('access', 'token', 'tenant')
def test_bad_reply_missing_tenant_id(self):
self._test_bad_reply_missing_parts('access', 'token', 'tenant', 'id')
def test_bad_reply_missing_tenant_name(self):
self._test_bad_reply_missing_parts('access', 'token', 'tenant', 'name')
def test_bad_reply_valid_but_bad_json(self):
self._test_bad_reply('{}')
self._test_bad_reply('[]')
self._test_bad_reply('null')
self._test_bad_reply('"foo"')
self._test_bad_reply('1')
self._test_bad_reply('true')
class S3TokenMiddlewareTestDeferredAuth(S3TokenMiddlewareTestBase):
def setUp(self):
super(S3TokenMiddlewareTestDeferredAuth, self).setUp()
self.conf['delay_auth_decision'] = 'yes'
self.middleware = self.make_middleware(self.conf)
def test_unauthorized_token(self):
ret = {"error":
{"message": "EC2 access key not found.",
"code": 401,
"title": "Unauthorized"}}
self.requests_mock.post(self.TEST_URL, status_code=403, json=ret)
req = Request.blank('/v1/AUTH_cfa/c/o')
req.environ['s3api.auth_details'] = {
'access_key': u'access',
'signature': u'signature',
'string_to_sign': u'token',
}
resp = req.get_response(self.middleware)
self.assertEqual(
resp.status_int, # pylint: disable-msg=E1101
200)
self.assertNotIn('X-Auth-Token', req.headers)
self.assertEqual(1, self.middleware._app.calls)
self.assertEqual(1, self.requests_mock.call_count)
request_call = self.requests_mock.request_history[0]
self.assertEqual(json.loads(request_call.body), {'credentials': {
'access': 'access',
'signature': 'signature',
'token': base64.urlsafe_b64encode(b'token').decode('ascii')}})
def test_fail_to_connect_to_keystone(self):
with mock.patch.object(self.middleware, '_json_request') as o:
o.side_effect = self.middleware._deny_request('InvalidURI')
req = Request.blank('/v1/AUTH_cfa/c/o')
req.environ['s3api.auth_details'] = {
'access_key': u'access',
'signature': u'signature',
'string_to_sign': u'token',
}
resp = req.get_response(self.middleware)
self.assertEqual(
resp.status_int, # pylint: disable-msg=E1101
200)
self.assertNotIn('X-Auth-Token', req.headers)
self.assertEqual(1, self.middleware._app.calls)
def test_bad_reply(self):
self.requests_mock.post(self.TEST_URL,
status_code=201,
text="<badreply>")
req = Request.blank('/v1/AUTH_cfa/c/o')
req.environ['s3api.auth_details'] = {
'access_key': u'access',
'signature': u'signature',
'string_to_sign': u'token',
}
resp = req.get_response(self.middleware)
self.assertEqual(
resp.status_int, # pylint: disable-msg=E1101
200)
self.assertNotIn('X-Auth-Token', req.headers)
self.assertEqual(1, self.middleware._app.calls)
class S3TokenMiddlewareTestV3(S3TokenMiddlewareTestBase):
def setUp(self):
super(S3TokenMiddlewareTestV3, self).setUp()
self.requests_mock.post(self.TEST_URL,
status_code=200,
json=GOOD_RESPONSE_V3)
def _assert_authorized(self, req,
account_path='/v1/AUTH_PROJECT_ID/'):
self.assertTrue(req.path.startswith(account_path))
expected_headers = {
'X-Identity-Status': 'Confirmed',
'X-Roles': 'swift-user,_member_',
'X-User-Id': 'USER_ID',
'X-User-Name': 'S3_USER',
'X-User-Domain-Id': 'default',
'X-User-Domain-Name': 'Default',
'X-Tenant-Id': 'PROJECT_ID',
'X-Tenant-Name': 'PROJECT_NAME',
'X-Project-Id': 'PROJECT_ID',
'X-Project-Name': 'PROJECT_NAME',
'X-Project-Domain-Id': 'PROJECT_DOMAIN_ID',
'X-Project-Domain-Name': 'PROJECT_DOMAIN_NAME',
}
for header, value in expected_headers.items():
self.assertIn(header, req.headers)
self.assertEqual(value, req.headers[header])
# WSGI wants native strings for headers
self.assertIsInstance(req.headers[header], str)
self.assertNotIn('X-Auth-Token', req.headers)
self.assertEqual(1, self.middleware._app.calls)
def test_authorized(self):
req = Request.blank('/v1/AUTH_cfa/c/o')
req.environ['s3api.auth_details'] = {
'access_key': u'access',
'signature': u'signature',
'string_to_sign': u'token',
}
req.get_response(self.middleware)
self._assert_authorized(req)
def test_authorized_bytes(self):
req = Request.blank('/v1/AUTH_cfa/c/o')
req.environ['s3api.auth_details'] = {
'access_key': b'access',
'signature': b'signature',
'string_to_sign': b'token',
}
req.get_response(self.middleware)
self._assert_authorized(req)
def test_authorized_http(self):
# Following https://github.com/openstack/keystone/commit/3ec1aa4
# even v2 URLs would respond with a v3-format response
auth_uri = 'http://fakehost:35357/v2.0/'
self.requests_mock.post(
auth_uri + 's3tokens',
status_code=201, json=GOOD_RESPONSE_V3)
self.middleware = self.make_middleware({
'auth_uri': auth_uri})
req = Request.blank('/v1/AUTH_cfa/c/o')
req.environ['s3api.auth_details'] = {
'access_key': u'access',
'signature': u'signature',
'string_to_sign': u'token',
}
req.get_response(self.middleware)
self._assert_authorized(req)
def test_authorized_v3(self):
auth_uri = 'http://fakehost:35357/v3/'
self.requests_mock.post(
auth_uri + 's3tokens',
status_code=201, json=GOOD_RESPONSE_V3)
self.middleware = self.make_middleware({
'auth_uri': auth_uri})
req = Request.blank('/v1/AUTH_cfa/c/o')
req.environ['s3api.auth_details'] = {
'access_key': u'access',
'signature': u'signature',
'string_to_sign': u'token',
}
req.get_response(self.middleware)
self._assert_authorized(req)
def test_authorized_trailing_slash(self):
self.middleware = self.make_middleware({
'auth_uri': self.TEST_AUTH_URI + '/'})
req = Request.blank('/v1/AUTH_cfa/c/o')
req.environ['s3api.auth_details'] = {
'access_key': u'access',
'signature': u'signature',
'string_to_sign': u'token',
}
req.get_response(self.middleware)
self._assert_authorized(req)
def test_authorization_nova_toconnect(self):
req = Request.blank('/v1/AUTH_swiftint/c/o')
req.environ['s3api.auth_details'] = {
'access_key': u'access:FORCED_TENANT_ID',
'signature': u'signature',
'string_to_sign': u'token',
}
req.get_response(self.middleware)
self._assert_authorized(req, account_path='/v1/AUTH_FORCED_TENANT_ID/')
def _test_bad_reply_missing_parts(self, *parts):
resp = copy.deepcopy(GOOD_RESPONSE_V3)
part_dict = resp
for part in parts[:-1]:
part_dict = part_dict[part]
del part_dict[parts[-1]]
self.requests_mock.post(self.TEST_URL,
status_code=201,
text=json.dumps(resp))
req = Request.blank('/v1/AUTH_cfa/c/o')
req.environ['s3api.auth_details'] = {
'access_key': u'access',
'signature': u'signature',
'string_to_sign': u'token',
}
resp = req.get_response(self.middleware)
s3_invalid_resp = self.middleware._deny_request('InvalidURI')
self.assertEqual(resp.body, s3_invalid_resp.body)
self.assertEqual(
resp.status_int, # pylint: disable-msg=E1101
s3_invalid_resp.status_int) # pylint: disable-msg=E1101
self.assertEqual(0, self.middleware._app.calls)
def test_bad_reply_missing_parts(self):
self._test_bad_reply_missing_parts('token', 'user', 'id')
self._test_bad_reply_missing_parts('token', 'user', 'name')
self._test_bad_reply_missing_parts('token', 'user', 'domain', 'id')
self._test_bad_reply_missing_parts('token', 'user', 'domain', 'name')
self._test_bad_reply_missing_parts('token', 'user', 'domain')
self._test_bad_reply_missing_parts('token', 'user')
self._test_bad_reply_missing_parts('token', 'project', 'id')
self._test_bad_reply_missing_parts('token', 'project', 'name')
self._test_bad_reply_missing_parts('token', 'project', 'domain', 'id')
self._test_bad_reply_missing_parts('token', 'project', 'domain',
'name')
self._test_bad_reply_missing_parts('token', 'project', 'domain')
self._test_bad_reply_missing_parts('token', 'project')
self._test_bad_reply_missing_parts('token', 'roles')
def test_authorize_with_access_key(self):
req = Request.blank('/v1/accesskey/c/o')
req.environ['s3api.auth_details'] = {
'access_key': u'access',
'signature': u'signature',
'string_to_sign': u'token',
}
req.get_response(self.middleware)
self._assert_authorized(req, account_path='/v1/')
self.assertEqual(req.environ['PATH_INFO'], '/v1/AUTH_PROJECT_ID/c/o')
def test_authorize_with_access_key_in_container(self):
req = Request.blank('/v1/accesskey/accesskey.c/o')
req.environ['s3api.auth_details'] = {
'access_key': u'access',
'signature': u'signature',
'string_to_sign': u'token',
}
req.get_response(self.middleware)
self._assert_authorized(req, account_path='/v1/')
self.assertEqual(req.environ['PATH_INFO'],
'/v1/AUTH_PROJECT_ID/accesskey.c/o')
def test_authorize_with_access_key_and_unquote_chars(self):
req = Request.blank('/v1/ab%c=/c/o')
req.environ['s3api.auth_details'] = {
'access_key': u'access',
'signature': u'signature',
'string_to_sign': u'token',
}
req.get_response(self.middleware)
self._assert_authorized(req, account_path='/v1/')
self.assertEqual(req.environ['PATH_INFO'], '/v1/AUTH_PROJECT_ID/c/o')
| swift-master | test/unit/common/middleware/s3api/test_s3token.py |
# Copyright (c) 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from swift.common.middleware.s3api.utils import Config
class TestS3ApiCfg(unittest.TestCase):
def test_config(self):
conf = Config(
{
'a': 'str',
'b': 10,
'c': True,
}
)
conf.update(
{
'a': 'str2',
'b': '100',
'c': 'false',
}
)
self.assertEqual(conf['a'], 'str2')
self.assertEqual(conf['b'], 100)
self.assertEqual(conf['c'], False)
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/s3api/test_cfg.py |
# Copyright (c) 2011-2014 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from datetime import datetime
import email
import mock
import time
from swift.common import swob
from swift.common.middleware.s3api.s3api import filter_factory
from swift.common.middleware.s3api.etree import fromstring
from test.debug_logger import debug_logger
from test.unit.common.middleware.s3api.helpers import FakeSwift
class FakeApp(object):
container_existence_skip_cache = 0.0
account_existence_skip_cache = 0.0
def __init__(self):
self._pipeline_final_app = self
self.swift = FakeSwift()
self.logger = debug_logger()
def _update_s3_path_info(self, env):
"""
For S3 requests, Swift auth middleware replaces a user name in
env['PATH_INFO'] with a valid tenant id.
E.g. '/v1/test:tester/bucket/object' will become
'/v1/AUTH_test/bucket/object'. This method emulates the behavior.
"""
tenant_user = env['s3api.auth_details']['access_key']
tenant, user = tenant_user.rsplit(':', 1)
path = env['PATH_INFO']
env['PATH_INFO'] = path.replace(tenant_user, 'AUTH_' + tenant)
def __call__(self, env, start_response):
if 's3api.auth_details' in env:
self._update_s3_path_info(env)
if env['REQUEST_METHOD'] == 'TEST':
def authorize_cb(req):
# Assume swift owner, if not yet set
req.environ.setdefault('REMOTE_USER', 'authorized')
req.environ.setdefault('swift_owner', True)
# But then default to blocking authz, to ensure we've replaced
# the default auth system
return swob.HTTPForbidden(request=req)
env['swift.authorize'] = authorize_cb
return self.swift(env, start_response)
class S3ApiTestCase(unittest.TestCase):
def __init__(self, name):
unittest.TestCase.__init__(self, name)
def setUp(self):
# setup default config dict
self.conf = {
'allow_no_owner': False,
'location': 'us-east-1',
'dns_compliant_bucket_names': True,
'max_bucket_listing': 1000,
'max_parts_listing': 1000,
'max_multi_delete_objects': 1000,
's3_acl': False,
'storage_domain': 'localhost',
'auth_pipeline_check': True,
'max_upload_part_num': 10000,
'check_bucket_owner': False,
'force_swift_request_proxy_log': False,
'allow_multipart_uploads': True,
'min_segment_size': 5242880,
'log_level': 'debug'
}
self.app = FakeApp()
self.swift = self.app.swift
# note: self.conf has no __file__ key so check_pipeline will be skipped
# when constructing self.s3api
self.s3api = filter_factory({}, **self.conf)(self.app)
self.logger = self.s3api.logger = self.swift.logger = debug_logger()
self.swift.register('HEAD', '/v1/AUTH_test',
swob.HTTPOk, {}, None)
self.swift.register('HEAD', '/v1/AUTH_test/bucket',
swob.HTTPNoContent, {}, None)
self.swift.register('PUT', '/v1/AUTH_test/bucket',
swob.HTTPCreated, {}, None)
self.swift.register('POST', '/v1/AUTH_test/bucket',
swob.HTTPNoContent, {}, None)
self.swift.register('DELETE', '/v1/AUTH_test/bucket',
swob.HTTPNoContent, {}, None)
self.swift.register('GET', '/v1/AUTH_test/bucket/object',
swob.HTTPOk, {'etag': 'object etag'}, "")
self.swift.register('PUT', '/v1/AUTH_test/bucket/object',
swob.HTTPCreated, {'etag': 'object etag'}, None)
self.swift.register('DELETE', '/v1/AUTH_test/bucket/object',
swob.HTTPNoContent, {}, None)
self.mock_get_swift_info_result = {'object_versioning': {}}
for s3api_path in (
'controllers.obj',
'controllers.bucket',
'controllers.multi_delete',
'controllers.versioning',
):
patcher = mock.patch(
'swift.common.middleware.s3api.%s.get_swift_info' % s3api_path,
return_value=self.mock_get_swift_info_result)
patcher.start()
self.addCleanup(patcher.stop)
def _get_error_code(self, body):
elem = fromstring(body, 'Error')
return elem.find('./Code').text
def _get_error_message(self, body):
elem = fromstring(body, 'Error')
return elem.find('./Message').text
def _test_method_error(self, method, path, response_class, headers={},
env={}, expected_xml_tags=None,
expected_status=None):
if not path.startswith('/'):
path = '/' + path # add a missing slash before the path
uri = '/v1/AUTH_test'
if path != '/':
uri += path
self.swift.register(method, uri, response_class, headers, None)
headers.update({'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
env.update({'REQUEST_METHOD': method})
req = swob.Request.blank(path, environ=env, headers=headers)
status, headers, body = self.call_s3api(req)
if expected_status is not None:
self.assertEqual(status, expected_status)
if expected_xml_tags is not None:
elem = fromstring(body, 'Error')
self.assertEqual(set(expected_xml_tags),
{x.tag for x in elem})
return self._get_error_code(body)
def get_date_header(self, skew=0):
# email.utils.formatdate returns utc timestamp in default
return email.utils.formatdate(time.time() + skew)
def get_v4_amz_date_header(self, when=None):
if when is None:
when = datetime.utcnow()
return when.strftime('%Y%m%dT%H%M%SZ')
def call_app(self, req, app=None, expect_exception=False):
if app is None:
app = self.app
req.headers.setdefault("User-Agent", "Mozzarella Foxfire")
status = [None]
headers = [None]
def start_response(s, h, ei=None):
status[0] = s
headers[0] = swob.HeaderKeyDict(h)
body_iter = app(req.environ, start_response)
body = b''
caught_exc = None
try:
for chunk in body_iter:
body += chunk
except Exception as exc:
if expect_exception:
caught_exc = exc
else:
raise
if expect_exception:
return status[0], headers[0], body, caught_exc
else:
return status[0], headers[0], body
def call_s3api(self, req, **kwargs):
return self.call_app(req, app=self.s3api, **kwargs)
| swift-master | test/unit/common/middleware/s3api/__init__.py |
# Copyright (c) 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import functools
import sys
import traceback
from mock import patch, MagicMock
from swift.common import swob
from swift.common.swob import Request
from swift.common.utils import json
from swift.common.middleware.s3api.etree import tostring, Element, SubElement
from swift.common.middleware.s3api.subresource import ACL, ACLPrivate, User, \
encode_acl, AuthenticatedUsers, AllUsers, Owner, Grant, PERMISSIONS
from test.unit.common.middleware.s3api.test_s3api import S3ApiTestCase
from test.unit.common.middleware.s3api.exceptions import NotMethodException
from test.unit.common.middleware.s3api import FakeSwift
XMLNS_XSI = 'http://www.w3.org/2001/XMLSchema-instance'
def s3acl(func=None, s3acl_only=False, versioning_enabled=True):
"""
NOTE: s3acl decorator needs an instance of s3api testing framework.
(i.e. An instance for first argument is necessary)
"""
if func is None:
return functools.partial(
s3acl,
s3acl_only=s3acl_only,
versioning_enabled=versioning_enabled)
@functools.wraps(func)
def s3acl_decorator(*args, **kwargs):
if not args and not kwargs:
raise NotMethodException('Use s3acl decorator for a method')
def call_func(failing_point=''):
try:
# For maintainability, we patch 204 status for every
# get_container_info. if you want, we can rewrite the
# statement easily with nested decorator like as:
#
# @s3acl
# @patch(xxx)
# def test_xxxx(self)
fake_info = {'status': 204}
if versioning_enabled:
fake_info['sysmeta'] = {
'versions-container': '\x00versions\x00bucket',
}
with patch('swift.common.middleware.s3api.s3request.'
'get_container_info', return_value=fake_info):
func(*args, **kwargs)
except AssertionError:
# Make traceback message to clarify the assertion
exc_type, exc_instance, exc_traceback = sys.exc_info()
formatted_traceback = ''.join(traceback.format_tb(
exc_traceback))
message = '\n%s\n%s' % (formatted_traceback,
exc_type.__name__)
if exc_instance.args:
message += ':\n%s' % (exc_instance.args[0],)
message += failing_point
raise exc_type(message)
instance = args[0]
if not s3acl_only:
call_func()
instance.swift._calls = []
instance.s3api.conf.s3_acl = True
instance.swift.s3_acl = True
owner = Owner('test:tester', 'test:tester')
generate_s3acl_environ('test', instance.swift, owner)
call_func(' (fail at s3_acl)')
return s3acl_decorator
def _gen_test_headers(owner, grants=[], resource='container'):
if not grants:
grants = [Grant(User('test:tester'), 'FULL_CONTROL')]
return encode_acl(resource, ACL(owner, grants))
def _make_xml(grantee):
owner = 'test:tester'
permission = 'READ'
elem = Element('AccessControlPolicy')
elem_owner = SubElement(elem, 'Owner')
SubElement(elem_owner, 'ID').text = owner
SubElement(elem_owner, 'DisplayName').text = owner
acl_list_elem = SubElement(elem, 'AccessControlList')
elem_grant = SubElement(acl_list_elem, 'Grant')
elem_grant.append(grantee)
SubElement(elem_grant, 'Permission').text = permission
return tostring(elem)
def generate_s3acl_environ(account, swift, owner):
def gen_grant(permission):
# generate Grant with a grantee named by "permission"
account_name = '%s:%s' % (account, permission.lower())
return Grant(User(account_name), permission)
grants = [gen_grant(perm) for perm in PERMISSIONS]
container_headers = _gen_test_headers(owner, grants)
object_headers = _gen_test_headers(owner, grants, 'object')
object_body = 'hello'
object_headers['Content-Length'] = len(object_body)
# TEST method is used to resolve a tenant name
swift.register('TEST', '/v1/AUTH_test', swob.HTTPMethodNotAllowed,
{}, None)
swift.register('TEST', '/v1/AUTH_X', swob.HTTPMethodNotAllowed,
{}, None)
# for bucket
swift.register('HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent,
container_headers, None)
swift.register('HEAD', '/v1/AUTH_test/bucket+segments', swob.HTTPNoContent,
container_headers, None)
swift.register('PUT', '/v1/AUTH_test/bucket',
swob.HTTPCreated, {}, None)
swift.register('GET', '/v1/AUTH_test/bucket', swob.HTTPNoContent,
container_headers, json.dumps([]))
swift.register('POST', '/v1/AUTH_test/bucket',
swob.HTTPNoContent, {}, None)
swift.register('DELETE', '/v1/AUTH_test/bucket',
swob.HTTPNoContent, {}, None)
# necessary for canned-acl tests
public_headers = _gen_test_headers(owner, [Grant(AllUsers(), 'READ')])
swift.register('GET', '/v1/AUTH_test/public', swob.HTTPNoContent,
public_headers, json.dumps([]))
authenticated_headers = _gen_test_headers(
owner, [Grant(AuthenticatedUsers(), 'READ')], 'bucket')
swift.register('GET', '/v1/AUTH_test/authenticated',
swob.HTTPNoContent, authenticated_headers,
json.dumps([]))
# for object
swift.register('HEAD', '/v1/AUTH_test/bucket/object', swob.HTTPOk,
object_headers, None)
class TestS3ApiS3Acl(S3ApiTestCase):
def setUp(self):
super(TestS3ApiS3Acl, self).setUp()
self.s3api.conf.s3_acl = True
self.swift.s3_acl = True
account = 'test'
owner_name = '%s:tester' % account
self.default_owner = Owner(owner_name, owner_name)
generate_s3acl_environ(account, self.swift, self.default_owner)
def tearDown(self):
self.s3api.conf.s3_acl = False
def test_bucket_acl_PUT_with_other_owner(self):
req = Request.blank('/bucket?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()},
body=tostring(
ACLPrivate(
Owner(id='test:other',
name='test:other')).elem()))
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'AccessDenied')
def test_object_acl_PUT_xml_error(self):
req = Request.blank('/bucket/object?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()},
body="invalid xml")
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'MalformedACLError')
def test_canned_acl_private(self):
req = Request.blank('/bucket/object?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'x-amz-acl': 'private'})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
self.assertIn('REMOTE_USER', req.environ)
def test_canned_acl_public_read(self):
req = Request.blank('/bucket/object?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'x-amz-acl': 'public-read'})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
def test_canned_acl_public_read_write(self):
req = Request.blank('/bucket/object?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'x-amz-acl': 'public-read-write'})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
def test_canned_acl_authenticated_read(self):
req = Request.blank('/bucket/object?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'x-amz-acl': 'authenticated-read'})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
def test_canned_acl_bucket_owner_read(self):
req = Request.blank('/bucket/object?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'x-amz-acl': 'bucket-owner-read'})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
def test_canned_acl_bucket_owner_full_control(self):
req = Request.blank('/bucket/object?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'x-amz-acl': 'bucket-owner-full-control'})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
def test_invalid_canned_acl(self):
req = Request.blank('/bucket/object?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'x-amz-acl': 'invalid'})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
def _test_grant_header(self, permission):
req = Request.blank('/bucket/object?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'x-amz-grant-' + permission:
'id=test:tester'})
return self.call_s3api(req)
def test_grant_read(self):
status, headers, body = self._test_grant_header('read')
self.assertEqual(status.split()[0], '200')
def test_grant_write(self):
status, headers, body = self._test_grant_header('write')
self.assertEqual(status.split()[0], '200')
def test_grant_read_acp(self):
status, headers, body = self._test_grant_header('read-acp')
self.assertEqual(status.split()[0], '200')
def test_grant_write_acp(self):
status, headers, body = self._test_grant_header('write-acp')
self.assertEqual(status.split()[0], '200')
def test_grant_full_control(self):
status, headers, body = self._test_grant_header('full-control')
self.assertEqual(status.split()[0], '200')
def test_grant_invalid_permission(self):
status, headers, body = self._test_grant_header('invalid')
self.assertEqual(self._get_error_code(body), 'MissingSecurityHeader')
def test_grant_with_both_header_and_xml(self):
req = Request.blank('/bucket/object?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'x-amz-grant-full-control':
'id=test:tester'},
body=tostring(
ACLPrivate(
Owner(id='test:tester',
name='test:tester')).elem()))
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'UnexpectedContent')
def test_grant_with_both_header_and_canned_acl(self):
req = Request.blank('/bucket/object?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'x-amz-grant-full-control':
'id=test:tester',
'x-amz-acl': 'public-read'})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidRequest')
def test_grant_email(self):
req = Request.blank('/bucket/object?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'x-amz-grant-read': 'emailAddress=a@b.c'})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'NotImplemented')
def test_grant_email_xml(self):
grantee = Element('Grantee', nsmap={'xsi': XMLNS_XSI})
grantee.set('{%s}type' % XMLNS_XSI, 'AmazonCustomerByEmail')
SubElement(grantee, 'EmailAddress').text = 'Grantees@email.com'
xml = _make_xml(grantee=grantee)
req = Request.blank('/bucket/object?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()},
body=xml)
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'NotImplemented')
def test_grant_invalid_group_xml(self):
grantee = Element('Grantee', nsmap={'xsi': XMLNS_XSI})
grantee.set('{%s}type' % XMLNS_XSI, 'Invalid')
xml = _make_xml(grantee=grantee)
req = Request.blank('/bucket/object?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()},
body=xml)
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'MalformedACLError')
def test_grant_authenticated_users(self):
req = Request.blank('/bucket/object?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'x-amz-grant-read':
'uri="http://acs.amazonaws.com/groups/'
'global/AuthenticatedUsers"'})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
def test_grant_all_users(self):
req = Request.blank('/bucket/object?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'x-amz-grant-read':
'uri="http://acs.amazonaws.com/groups/'
'global/AllUsers"'})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
def test_grant_all_users_with_uppercase_type(self):
req = Request.blank('/bucket/object?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'x-amz-grant-read':
'URI="http://acs.amazonaws.com/groups/'
'global/AllUsers"'})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
def test_grant_invalid_uri(self):
req = Request.blank('/bucket/object?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'x-amz-grant-read':
'uri="http://localhost/"'})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
def test_grant_invalid_uri_xml(self):
grantee = Element('Grantee', nsmap={'xsi': XMLNS_XSI})
grantee.set('{%s}type' % XMLNS_XSI, 'Group')
SubElement(grantee, 'URI').text = 'invalid'
xml = _make_xml(grantee)
req = Request.blank('/bucket/object?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()},
body=xml)
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
def test_grant_invalid_target(self):
req = Request.blank('/bucket/object?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'x-amz-grant-read': 'key=value'})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
def _test_bucket_acl_GET(self, account):
req = Request.blank('/bucket?acl',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS %s:hmac' % account,
'Date': self.get_date_header()})
return self.call_s3api(req)
def test_bucket_acl_GET_without_permission(self):
status, headers, body = self._test_bucket_acl_GET('test:other')
self.assertEqual(self._get_error_code(body), 'AccessDenied')
def test_bucket_acl_GET_with_read_acp_permission(self):
status, headers, body = self._test_bucket_acl_GET('test:read_acp')
self.assertEqual(status.split()[0], '200')
def test_bucket_acl_GET_with_fullcontrol_permission(self):
status, headers, body = self._test_bucket_acl_GET('test:full_control')
self.assertEqual(status.split()[0], '200')
def test_bucket_acl_GET_with_owner_permission(self):
status, headers, body = self._test_bucket_acl_GET('test:tester')
self.assertEqual(status.split()[0], '200')
def _test_bucket_acl_PUT(self, account, permission='FULL_CONTROL'):
acl = ACL(self.default_owner, [Grant(User(account), permission)])
req = Request.blank('/bucket?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS %s:hmac' % account,
'Date': self.get_date_header()},
body=tostring(acl.elem()))
return self.call_s3api(req)
def test_bucket_acl_PUT_without_permission(self):
status, headers, body = self._test_bucket_acl_PUT('test:other')
self.assertEqual(self._get_error_code(body), 'AccessDenied')
def test_bucket_acl_PUT_with_write_acp_permission(self):
status, headers, body = self._test_bucket_acl_PUT('test:write_acp')
self.assertEqual(status.split()[0], '200')
def test_bucket_acl_PUT_with_fullcontrol_permission(self):
status, headers, body = self._test_bucket_acl_PUT('test:full_control')
self.assertEqual(status.split()[0], '200')
def test_bucket_acl_PUT_with_owner_permission(self):
status, headers, body = self._test_bucket_acl_PUT('test:tester')
self.assertEqual(status.split()[0], '200')
def _test_object_acl_GET(self, account):
req = Request.blank('/bucket/object?acl',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS %s:hmac' % account,
'Date': self.get_date_header()})
return self.call_s3api(req)
def test_object_acl_GET_without_permission(self):
status, headers, body = self._test_object_acl_GET('test:other')
self.assertEqual(self._get_error_code(body), 'AccessDenied')
def test_object_acl_GET_with_read_acp_permission(self):
status, headers, body = self._test_object_acl_GET('test:read_acp')
self.assertEqual(status.split()[0], '200')
def test_object_acl_GET_with_fullcontrol_permission(self):
status, headers, body = self._test_object_acl_GET('test:full_control')
self.assertEqual(status.split()[0], '200')
def test_object_acl_GET_with_owner_permission(self):
status, headers, body = self._test_object_acl_GET('test:tester')
self.assertEqual(status.split()[0], '200')
def _test_object_acl_PUT(self, account, permission='FULL_CONTROL'):
acl = ACL(self.default_owner, [Grant(User(account), permission)])
req = Request.blank('/bucket/object?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS %s:hmac' % account,
'Date': self.get_date_header()},
body=tostring(acl.elem()))
return self.call_s3api(req)
def test_object_acl_PUT_without_permission(self):
status, headers, body = self._test_object_acl_PUT('test:other')
self.assertEqual(self._get_error_code(body), 'AccessDenied')
def test_object_acl_PUT_with_write_acp_permission(self):
status, headers, body = self._test_object_acl_PUT('test:write_acp')
self.assertEqual(status.split()[0], '200')
def test_object_acl_PUT_with_fullcontrol_permission(self):
status, headers, body = self._test_object_acl_PUT('test:full_control')
self.assertEqual(status.split()[0], '200')
def test_object_acl_PUT_with_owner_permission(self):
status, headers, body = self._test_object_acl_PUT('test:tester')
self.assertEqual(status.split()[0], '200')
def test_s3acl_decorator(self):
@s3acl
def non_class_s3acl_error():
raise TypeError()
class FakeClass(object):
def __init__(self):
self.s3api = MagicMock()
self.swift = FakeSwift()
@s3acl
def s3acl_error(self):
raise TypeError()
@s3acl
def s3acl_assert_fail(self):
assert False
@s3acl(s3acl_only=True)
def s3acl_s3only_error(self):
if self.s3api.conf.s3_acl:
raise TypeError()
@s3acl(s3acl_only=True)
def s3acl_s3only_no_error(self):
if not self.s3api.conf.s3_acl:
raise TypeError()
fake_class = FakeClass()
self.assertRaises(NotMethodException, non_class_s3acl_error)
self.assertRaises(TypeError, fake_class.s3acl_error)
self.assertRaises(AssertionError, fake_class.s3acl_assert_fail)
self.assertRaises(TypeError, fake_class.s3acl_s3only_error)
self.assertIsNone(fake_class.s3acl_s3only_no_error())
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/s3api/test_s3_acl.py |
# Copyright (c) 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from mock import patch
from swift.common.swob import Request, HTTPNoContent
from swift.common.middleware.s3api.etree import fromstring, tostring, \
Element, SubElement
from test.unit.common.middleware.s3api import S3ApiTestCase
class TestS3ApiVersioning(S3ApiTestCase):
def _versioning_GET(self, path):
req = Request.blank('%s?versioning' % path,
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
return status, headers, body
def _versioning_GET_not_configured(self, path):
self.swift.register('HEAD', '/v1/AUTH_test/bucket',
HTTPNoContent, {}, None)
status, headers, body = self._versioning_GET(path)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'VersioningConfiguration')
self.assertEqual(list(elem), [])
def _versioning_GET_enabled(self, path):
self.swift.register('HEAD', '/v1/AUTH_test/bucket', HTTPNoContent, {
'X-Container-Sysmeta-Versions-Enabled': 'True',
}, None)
status, headers, body = self._versioning_GET(path)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'VersioningConfiguration')
status = elem.find('./Status').text
self.assertEqual(status, 'Enabled')
def _versioning_GET_suspended(self, path):
self.swift.register('HEAD', '/v1/AUTH_test/bucket', HTTPNoContent, {
'X-Container-Sysmeta-Versions-Enabled': 'False',
}, None)
status, headers, body = self._versioning_GET('/bucket/object')
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'VersioningConfiguration')
status = elem.find('./Status').text
self.assertEqual(status, 'Suspended')
def _versioning_PUT_error(self, path):
# Root tag is not VersioningConfiguration
elem = Element('foo')
SubElement(elem, 'Status').text = 'Enabled'
xml = tostring(elem)
req = Request.blank('%s?versioning' % path,
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()},
body=xml)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '400')
# Status is not "Enabled" or "Suspended"
elem = Element('VersioningConfiguration')
SubElement(elem, 'Status').text = 'enabled'
xml = tostring(elem)
req = Request.blank('%s?versioning' % path,
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()},
body=xml)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '400')
def _versioning_PUT_enabled(self, path):
elem = Element('VersioningConfiguration')
SubElement(elem, 'Status').text = 'Enabled'
xml = tostring(elem)
self.swift.register('POST', '/v1/AUTH_test/bucket', HTTPNoContent,
{'X-Container-Sysmeta-Versions-Enabled': 'True'},
None)
req = Request.blank('%s?versioning' % path,
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()},
body=xml)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
calls = self.swift.calls_with_headers
self.assertEqual(calls[-1][0], 'POST')
self.assertIn(('X-Versions-Enabled', 'true'),
list(calls[-1][2].items()))
def _versioning_PUT_suspended(self, path):
elem = Element('VersioningConfiguration')
SubElement(elem, 'Status').text = 'Suspended'
xml = tostring(elem)
self.swift.register('POST', '/v1/AUTH_test/bucket', HTTPNoContent,
{'x-container-sysmeta-versions-enabled': 'False'},
None)
req = Request.blank('%s?versioning' % path,
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()},
body=xml)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
calls = self.swift.calls_with_headers
self.assertEqual(calls[-1][0], 'POST')
self.assertIn(('X-Versions-Enabled', 'false'),
list(calls[-1][2].items()))
def test_object_versioning_GET_not_configured(self):
self._versioning_GET_not_configured('/bucket/object')
def test_object_versioning_GET_enabled(self):
self._versioning_GET_enabled('/bucket/object')
def test_object_versioning_GET_suspended(self):
self._versioning_GET_suspended('/bucket/object')
def test_object_versioning_PUT_error(self):
self._versioning_PUT_error('/bucket/object')
def test_object_versioning_PUT_enabled(self):
self._versioning_PUT_enabled('/bucket/object')
def test_object_versioning_PUT_suspended(self):
self._versioning_PUT_suspended('/bucket/object')
def test_bucket_versioning_GET_not_configured(self):
self._versioning_GET_not_configured('/bucket')
def test_bucket_versioning_GET_enabled(self):
self._versioning_GET_enabled('/bucket')
def test_bucket_versioning_GET_suspended(self):
self._versioning_GET_suspended('/bucket')
def test_bucket_versioning_PUT_error(self):
self._versioning_PUT_error('/bucket')
def test_object_versioning_PUT_not_implemented(self):
elem = Element('VersioningConfiguration')
SubElement(elem, 'Status').text = 'Enabled'
xml = tostring(elem)
req = Request.blank('/bucket?versioning',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()},
body=xml)
with patch('swift.common.middleware.s3api.controllers.versioning.'
'get_swift_info', return_value={}):
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '501', body)
def test_bucket_versioning_PUT_enabled(self):
self._versioning_PUT_enabled('/bucket')
def test_bucket_versioning_PUT_suspended(self):
self._versioning_PUT_suspended('/bucket')
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/s3api/test_versioning.py |
# Copyright (c) 2011-2014 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import unittest
from mock import patch, MagicMock
import calendar
from datetime import datetime
import mock
import requests
import json
import six
from paste.deploy import loadwsgi
from six.moves.urllib.parse import unquote, quote
import swift.common.middleware.s3api
from swift.common.middleware.s3api.s3response import ErrorResponse, \
AccessDenied
from swift.common.middleware.s3api.utils import Config
from swift.common.middleware.keystoneauth import KeystoneAuth
from swift.common import swob, registry
from swift.common.swob import Request
from swift.common.utils import md5, get_logger
from keystonemiddleware.auth_token import AuthProtocol
from keystoneauth1.access import AccessInfoV2
from test.debug_logger import debug_logger, FakeStatsdClient
from test.unit.common.middleware.s3api import S3ApiTestCase
from test.unit.common.middleware.s3api.helpers import FakeSwift
from test.unit.common.middleware.s3api.test_s3token import \
GOOD_RESPONSE_V2, GOOD_RESPONSE_V3
from swift.common.middleware.s3api.s3request import SigV4Request, S3Request
from swift.common.middleware.s3api.etree import fromstring
from swift.common.middleware.s3api.s3api import filter_factory, \
S3ApiMiddleware
from swift.common.middleware.s3api.s3token import S3Token
class TestListingMiddleware(S3ApiTestCase):
def test_s3_etag_in_json(self):
# This translation happens all the time, even on normal swift requests
body_data = json.dumps([
{'name': 'obj1', 'hash': '0123456789abcdef0123456789abcdef'},
{'name': 'obj2', 'hash': 'swiftetag; s3_etag=mu-etag'},
{'name': 'obj2', 'hash': 'swiftetag; something=else'},
{'subdir': 'path/'},
]).encode('ascii')
self.swift.register(
'GET', '/v1/a/c', swob.HTTPOk,
{'Content-Type': 'application/json; charset=UTF-8'},
body_data)
req = Request.blank('/v1/a/c')
status, headers, body = self.call_s3api(req)
self.assertEqual(json.loads(body), [
{'name': 'obj1', 'hash': '0123456789abcdef0123456789abcdef'},
{'name': 'obj2', 'hash': 'swiftetag', 's3_etag': '"mu-etag"'},
{'name': 'obj2', 'hash': 'swiftetag; something=else'},
{'subdir': 'path/'},
])
def test_s3_etag_non_json(self):
self.swift.register(
'GET', '/v1/a/c', swob.HTTPOk,
{'Content-Type': 'application/json; charset=UTF-8'},
b'Not actually JSON')
req = Request.blank('/v1/a/c')
status, headers, body = self.call_s3api(req)
self.assertEqual(body, b'Not actually JSON')
# Yes JSON, but wrong content-type
body_data = json.dumps([
{'name': 'obj1', 'hash': '0123456789abcdef0123456789abcdef'},
{'name': 'obj2', 'hash': 'swiftetag; s3_etag=mu-etag'},
{'name': 'obj2', 'hash': 'swiftetag; something=else'},
{'subdir': 'path/'},
]).encode('ascii')
self.swift.register(
'GET', '/v1/a/c', swob.HTTPOk,
{'Content-Type': 'text/plain; charset=UTF-8'},
body_data)
req = Request.blank('/v1/a/c')
status, headers, body = self.call_s3api(req)
self.assertEqual(body, body_data)
class TestS3ApiMiddleware(S3ApiTestCase):
def setUp(self):
super(TestS3ApiMiddleware, self).setUp()
self.swift.register('GET', '/something', swob.HTTPOk, {}, 'FAKE APP')
def test_init_config(self):
# verify config loading
# note: test confs do not have __file__ attribute so check_pipeline
# will be short-circuited
# check all defaults
expected = dict(Config())
expected.update({
'auth_pipeline_check': True,
'check_bucket_owner': False,
'max_bucket_listing': 1000,
'max_multi_delete_objects': 1000,
'max_parts_listing': 1000,
'max_upload_part_num': 1000,
'min_segment_size': 5242880,
'multi_delete_concurrency': 2,
's3_acl': False,
'cors_preflight_allow_origin': [],
'ratelimit_as_client_error': False,
})
s3api = S3ApiMiddleware(None, {})
self.assertEqual(expected, s3api.conf)
# check all non-defaults are loaded
conf = {
'storage_domain': 'somewhere,some.other.where',
'location': 'us-west-1',
'force_swift_request_proxy_log': True,
'dns_compliant_bucket_names': False,
'allow_multipart_uploads': False,
'allow_no_owner': True,
'allowable_clock_skew': 300,
'auth_pipeline_check': False,
'check_bucket_owner': True,
'max_bucket_listing': 500,
'max_multi_delete_objects': 600,
'max_parts_listing': 70,
'max_upload_part_num': 800,
'min_segment_size': 1000000,
'multi_delete_concurrency': 1,
's3_acl': True,
'cors_preflight_allow_origin': 'foo.example.com,bar.example.com',
'ratelimit_as_client_error': True,
}
s3api = S3ApiMiddleware(None, conf)
conf['cors_preflight_allow_origin'] = \
conf['cors_preflight_allow_origin'].split(',')
conf['storage_domains'] = conf.pop('storage_domain').split(',')
self.assertEqual(conf, s3api.conf)
# test allow_origin list with a '*' fails.
conf = {
'storage_domain': 'somewhere',
'location': 'us-west-1',
'force_swift_request_proxy_log': True,
'dns_compliant_bucket_names': False,
'allow_multipart_uploads': False,
'allow_no_owner': True,
'allowable_clock_skew': 300,
'auth_pipeline_check': False,
'check_bucket_owner': True,
'max_bucket_listing': 500,
'max_multi_delete_objects': 600,
'max_parts_listing': 70,
'max_upload_part_num': 800,
'min_segment_size': 1000000,
'multi_delete_concurrency': 1,
's3_acl': True,
'cors_preflight_allow_origin': 'foo.example.com,bar.example.com,*',
}
with self.assertRaises(ValueError) as ex:
S3ApiMiddleware(None, conf)
self.assertIn("if cors_preflight_allow_origin should include all "
"domains, * must be the only entry", str(ex.exception))
def check_bad_positive_ints(**kwargs):
bad_conf = dict(conf, **kwargs)
self.assertRaises(ValueError, S3ApiMiddleware, None, bad_conf)
check_bad_positive_ints(allowable_clock_skew=-100)
check_bad_positive_ints(allowable_clock_skew=0)
check_bad_positive_ints(max_bucket_listing=-100)
check_bad_positive_ints(max_bucket_listing=0)
check_bad_positive_ints(max_multi_delete_objects=-100)
check_bad_positive_ints(max_multi_delete_objects=0)
check_bad_positive_ints(max_parts_listing=-100)
check_bad_positive_ints(max_parts_listing=0)
check_bad_positive_ints(max_upload_part_num=-100)
check_bad_positive_ints(max_upload_part_num=0)
check_bad_positive_ints(min_segment_size=-100)
check_bad_positive_ints(min_segment_size=0)
check_bad_positive_ints(multi_delete_concurrency=-100)
check_bad_positive_ints(multi_delete_concurrency=0)
def test_init_passes_wsgi_conf_file_to_check_pipeline(self):
# verify that check_pipeline is called during init: add __file__ attr
# to test config to make it more representative of middleware being
# init'd by wgsi
context = mock.Mock()
with patch("swift.common.middleware.s3api.s3api.loadcontext",
return_value=context) as loader, \
patch("swift.common.middleware.s3api.s3api.PipelineWrapper") \
as pipeline:
conf = dict(self.conf,
auth_pipeline_check=True,
__file__='proxy-conf-file')
pipeline.return_value = 's3api tempauth proxy-server'
self.s3api = S3ApiMiddleware(None, conf)
loader.assert_called_with(loadwsgi.APP, 'proxy-conf-file')
pipeline.assert_called_with(context)
def test_init_logger(self):
proxy_logger = get_logger({}, log_route='proxy-server').logger
s3api = S3ApiMiddleware(None, {})
self.assertEqual('s3api', s3api.logger.name)
self.assertEqual('s3api', s3api.logger.logger.name)
self.assertIsNot(s3api.logger.logger, proxy_logger)
self.assertEqual('swift', s3api.logger.server)
self.assertIsNone(s3api.logger.logger.statsd_client)
with mock.patch('swift.common.utils.StatsdClient', FakeStatsdClient):
s3api = S3ApiMiddleware(None, {'log_name': 'proxy-server',
'log_statsd_host': '1.2.3.4'})
s3api.logger.increment('test-metric')
self.assertEqual('s3api', s3api.logger.name)
self.assertEqual('s3api', s3api.logger.logger.name)
self.assertIsNot(s3api.logger.logger, proxy_logger)
self.assertEqual('proxy-server', s3api.logger.server)
self.assertEqual('s3api.', s3api.logger.logger.statsd_client._prefix)
client = s3api.logger.logger.statsd_client
self.assertEqual({'test-metric': 1}, client.get_increment_counts())
self.assertEqual(1, len(client.sendto_calls))
self.assertEqual(b's3api.test-metric:1|c', client.sendto_calls[0][0])
def test_non_s3_request_passthrough(self):
req = Request.blank('/something')
status, headers, body = self.call_s3api(req)
self.assertEqual(body, b'FAKE APP')
def test_bad_format_authorization(self):
req = Request.blank('/something',
headers={'Authorization': 'hoge',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'AccessDenied')
self.assertEqual(
{'403.AccessDenied.invalid_header_auth': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
def test_bad_method(self):
req = Request.blank('/',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'MethodNotAllowed')
self.assertEqual(
{'405.MethodNotAllowed': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
def test_bad_method_but_method_exists_in_controller(self):
req = Request.blank(
'/bucket',
environ={'REQUEST_METHOD': '_delete_segments_bucket'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'MethodNotAllowed')
self.assertEqual(
{'405.MethodNotAllowed': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
def test_path_info_encode(self):
bucket_name = 'b%75cket'
object_name = 'ob%6aect:1'
self.swift.register('GET', '/v1/AUTH_test/bucket/object:1',
swob.HTTPOk, {}, None)
req = Request.blank('/%s/%s' % (bucket_name, object_name),
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
raw_path_info = "/%s/%s" % (bucket_name, object_name)
path_info = req.environ['PATH_INFO']
self.assertEqual(path_info, unquote(raw_path_info))
self.assertEqual(req.path, quote(path_info))
self.assertIn('swift.backend_path', req.environ)
self.assertEqual('/v1/AUTH_test/bucket/object:1',
req.environ['swift.backend_path'])
def test_canonical_string_v2(self):
"""
The hashes here were generated by running the same requests against
boto.utils.canonical_string
"""
def canonical_string(path, headers):
if '?' in path:
path, query_string = path.split('?', 1)
else:
query_string = ''
env = {
'REQUEST_METHOD': 'GET',
'PATH_INFO': path,
'QUERY_STRING': query_string,
'HTTP_AUTHORIZATION': 'AWS X:Y:Z',
}
for header, value in headers.items():
header = 'HTTP_' + header.replace('-', '_').upper()
if header in ('HTTP_CONTENT_TYPE', 'HTTP_CONTENT_LENGTH'):
header = header[5:]
env[header] = value
with patch('swift.common.middleware.s3api.s3request.'
'S3Request._validate_headers'), \
patch('swift.common.middleware.s3api.s3request.'
'S3Request._validate_dates'):
req = S3Request(env)
return req.environ['s3api.auth_details']['string_to_sign']
def verify(hash, path, headers):
s = canonical_string(path, headers)
self.assertEqual(hash, md5(s, usedforsecurity=False).hexdigest())
verify('6dd08c75e42190a1ce9468d1fd2eb787', '/bucket/object',
{'Content-Type': 'text/plain', 'X-Amz-Something': 'test',
'Date': 'whatever'})
verify('c8447135da232ae7517328f3429df481', '/bucket/object',
{'Content-Type': 'text/plain', 'X-Amz-Something': 'test'})
verify('bf49304103a4de5c325dce6384f2a4a2', '/bucket/object',
{'content-type': 'text/plain'})
verify('be01bd15d8d47f9fe5e2d9248cc6f180', '/bucket/object', {})
verify('e9ec7dca45eef3e2c7276af23135e896', '/bucket/object',
{'Content-MD5': 'somestuff'})
verify('a822deb31213ad09af37b5a7fe59e55e', '/bucket/object?acl', {})
verify('cce5dd1016595cb706c93f28d3eaa18f', '/bucket/object',
{'Content-Type': 'text/plain', 'X-Amz-A': 'test',
'X-Amz-Z': 'whatever', 'X-Amz-B': 'lalala',
'X-Amz-Y': 'lalalalalalala'})
verify('7506d97002c7d2de922cc0ec34af8846', '/bucket/object',
{'Content-Type': None, 'X-Amz-Something': 'test'})
verify('28f76d6162444a193b612cd6cb20e0be', '/bucket/object',
{'Content-Type': None,
'X-Amz-Date': 'Mon, 11 Jul 2011 10:52:57 +0000',
'Date': 'Tue, 12 Jul 2011 10:52:57 +0000'})
verify('ed6971e3eca5af4ee361f05d7c272e49', '/bucket/object',
{'Content-Type': None,
'Date': 'Tue, 12 Jul 2011 10:52:57 +0000'})
verify('41ecd87e7329c33fea27826c1c9a6f91', '/bucket/object?cors', {})
verify('d91b062f375d8fab407d6dab41fd154e', '/bucket/object?tagging',
{})
verify('ebab878a96814b30eb178e27efb3973f', '/bucket/object?restore',
{})
verify('f6bf1b2d92b054350d3679d28739fc69', '/bucket/object?'
'response-cache-control&response-content-disposition&'
'response-content-encoding&response-content-language&'
'response-content-type&response-expires', {})
str1 = canonical_string('/', headers={'Content-Type': None,
'X-Amz-Something': 'test'})
str2 = canonical_string('/', headers={'Content-Type': '',
'X-Amz-Something': 'test'})
str3 = canonical_string('/', headers={'X-Amz-Something': 'test'})
self.assertEqual(str1, str2)
self.assertEqual(str2, str3)
# Note that boto does not do proper stripping (as of 2.42.0).
# These were determined by examining the StringToSignBytes element of
# resulting SignatureDoesNotMatch errors from AWS.
str1 = canonical_string('/', {'Content-Type': 'text/plain',
'Content-MD5': '##'})
str2 = canonical_string('/', {'Content-Type': '\x01\x02text/plain',
'Content-MD5': '\x1f ##'})
str3 = canonical_string('/', {'Content-Type': 'text/plain \x10',
'Content-MD5': '##\x18'})
self.assertEqual(str1, str2)
self.assertEqual(str2, str3)
def test_signed_urls_expired(self):
expire = '1000000000'
req = Request.blank('/bucket/object?Signature=X&Expires=%s&'
'AWSAccessKeyId=test:tester' % expire,
environ={'REQUEST_METHOD': 'GET'},
headers={'Date': self.get_date_header()})
req.headers['Date'] = datetime.utcnow()
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'AccessDenied')
self.assertEqual(
{'403.AccessDenied.expired': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
def test_signed_urls(self):
# Set expire to last 32b timestamp value
# This number can't be higher, because it breaks tests on 32b systems
expire = '2147483647' # 19 Jan 2038 03:14:07
utc_date = datetime.utcnow()
req = Request.blank('/bucket/object?Signature=X&Expires=%s&'
'AWSAccessKeyId=test:tester&Timestamp=%s' %
(expire, utc_date.isoformat().rsplit('.')[0]),
environ={'REQUEST_METHOD': 'GET'},
headers={'Date': self.get_date_header()})
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
self.assertIn('swift.backend_path', req.environ)
self.assertEqual('/v1/AUTH_test/bucket/object',
req.environ['swift.backend_path'])
for _, path, headers in self.swift.calls_with_headers:
self.assertNotIn('Authorization', headers)
def test_signed_urls_no_timestamp(self):
expire = '2147483647' # 19 Jan 2038 03:14:07
req = Request.blank('/bucket/object?Signature=X&Expires=%s&'
'AWSAccessKeyId=test:tester' % expire,
environ={'REQUEST_METHOD': 'GET'})
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
# Curious! But actually S3 doesn't verify any x-amz-date/date headers
# for signed_url access and it also doesn't check timestamp
self.assertEqual(status.split()[0], '200')
self.assertIn('swift.backend_path', req.environ)
self.assertEqual('/v1/AUTH_test/bucket/object',
req.environ['swift.backend_path'])
for _, _, headers in self.swift.calls_with_headers:
self.assertNotIn('Authorization', headers)
def test_signed_urls_invalid_expire(self):
expire = 'invalid'
req = Request.blank('/bucket/object?Signature=X&Expires=%s&'
'AWSAccessKeyId=test:tester' % expire,
environ={'REQUEST_METHOD': 'GET'},
headers={'Date': self.get_date_header()})
req.headers['Date'] = datetime.utcnow()
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'AccessDenied')
self.assertEqual(
{'403.AccessDenied.invalid_expires': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
def test_signed_urls_no_sign(self):
expire = '2147483647' # 19 Jan 2038 03:14:07
req = Request.blank('/bucket/object?Expires=%s&'
'AWSAccessKeyId=test:tester' % expire,
environ={'REQUEST_METHOD': 'GET'},
headers={'Date': self.get_date_header()})
req.headers['Date'] = datetime.utcnow()
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'AccessDenied')
self.assertEqual(
{'403.AccessDenied.invalid_query_auth': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
def test_signed_urls_no_access(self):
expire = '2147483647' # 19 Jan 2038 03:14:07
req = Request.blank('/bucket/object?Expires=%s&'
'AWSAccessKeyId=' % expire,
environ={'REQUEST_METHOD': 'GET'})
req.headers['Date'] = datetime.utcnow()
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'AccessDenied')
self.assertEqual(
{'403.AccessDenied.invalid_query_auth': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
def test_signed_urls_v4(self):
req = Request.blank(
'/bucket/object'
'?X-Amz-Algorithm=AWS4-HMAC-SHA256'
'&X-Amz-Credential=test:tester/%s/us-east-1/s3/aws4_request'
'&X-Amz-Date=%s'
'&X-Amz-Expires=1000'
'&X-Amz-SignedHeaders=host'
'&X-Amz-Signature=X' % (
self.get_v4_amz_date_header().split('T', 1)[0],
self.get_v4_amz_date_header()),
headers={'Date': self.get_date_header()},
environ={'REQUEST_METHOD': 'GET'})
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertIn('swift.backend_path', req.environ)
self.assertEqual('/v1/AUTH_test/bucket/object',
req.environ['swift.backend_path'])
self.assertEqual(status.split()[0], '200', body)
for _, _, headers in self.swift.calls_with_headers:
self.assertNotIn('Authorization', headers)
self.assertNotIn('X-Auth-Token', headers)
def test_signed_urls_v4_bad_credential(self):
def test(credential, message, extra=b''):
req = Request.blank(
'/bucket/object'
'?X-Amz-Algorithm=AWS4-HMAC-SHA256'
'&X-Amz-Credential=%s'
'&X-Amz-Date=%s'
'&X-Amz-Expires=1000'
'&X-Amz-SignedHeaders=host'
'&X-Amz-Signature=X' % (
credential,
self.get_v4_amz_date_header()),
headers={'Date': self.get_date_header()},
environ={'REQUEST_METHOD': 'GET'})
req.content_type = 'text/plain'
self.s3api.logger.logger.clear()
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '400', body)
self.assertEqual(self._get_error_code(body),
'AuthorizationQueryParametersError')
self.assertEqual(self._get_error_message(body), message)
self.assertIn(extra, body)
self.assertEqual(
{'400.AuthorizationQueryParametersError': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
dt = self.get_v4_amz_date_header().split('T', 1)[0]
test('test:tester/not-a-date/us-east-1/s3/aws4_request',
'Invalid credential date "not-a-date". This date is not the same '
'as X-Amz-Date: "%s".' % dt)
test('test:tester/%s/us-west-1/s3/aws4_request' % dt,
"Error parsing the X-Amz-Credential parameter; the region "
"'us-west-1' is wrong; expecting 'us-east-1'",
b'<Region>us-east-1</Region>')
test('test:tester/%s/us-east-1/not-s3/aws4_request' % dt,
'Error parsing the X-Amz-Credential parameter; incorrect service '
'"not-s3". This endpoint belongs to "s3".')
test('test:tester/%s/us-east-1/s3/not-aws4_request' % dt,
'Error parsing the X-Amz-Credential parameter; incorrect '
'terminal "not-aws4_request". This endpoint uses "aws4_request".')
def test_signed_urls_v4_missing_x_amz_date(self):
req = Request.blank(
'/bucket/object'
'?X-Amz-Algorithm=AWS4-HMAC-SHA256'
'&X-Amz-Credential=test/20T20Z/us-east-1/s3/aws4_request'
'&X-Amz-Expires=1000'
'&X-Amz-SignedHeaders=host'
'&X-Amz-Signature=X',
environ={'REQUEST_METHOD': 'GET'})
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'AccessDenied')
self.assertEqual(
{'403.AccessDenied.invalid_date': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
def test_signed_urls_v4_invalid_algorithm(self):
req = Request.blank(
'/bucket/object'
'?X-Amz-Algorithm=FAKE'
'&X-Amz-Credential=test/20T20Z/us-east-1/s3/aws4_request'
'&X-Amz-Date=%s'
'&X-Amz-Expires=1000'
'&X-Amz-SignedHeaders=host'
'&X-Amz-Signature=X' %
self.get_v4_amz_date_header(),
environ={'REQUEST_METHOD': 'GET'})
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
self.assertEqual(
{'400.InvalidArgument': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
def test_signed_urls_v4_missing_signed_headers(self):
req = Request.blank(
'/bucket/object'
'?X-Amz-Algorithm=AWS4-HMAC-SHA256'
'&X-Amz-Credential=test/20T20Z/us-east-1/s3/aws4_request'
'&X-Amz-Date=%s'
'&X-Amz-Expires=1000'
'&X-Amz-Signature=X' %
self.get_v4_amz_date_header(),
environ={'REQUEST_METHOD': 'GET'})
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body),
'AuthorizationHeaderMalformed')
self.assertEqual(
{'400.AuthorizationHeaderMalformed': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
def test_signed_urls_v4_invalid_credentials(self):
req = Request.blank('/bucket/object'
'?X-Amz-Algorithm=AWS4-HMAC-SHA256'
'&X-Amz-Credential=test'
'&X-Amz-Date=%s'
'&X-Amz-Expires=1000'
'&X-Amz-SignedHeaders=host'
'&X-Amz-Signature=X' %
self.get_v4_amz_date_header(),
environ={'REQUEST_METHOD': 'GET'})
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'AccessDenied')
self.assertEqual(
{'403.AccessDenied.invalid_credential': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
def test_signed_urls_v4_missing_signature(self):
req = Request.blank(
'/bucket/object'
'?X-Amz-Algorithm=AWS4-HMAC-SHA256'
'&X-Amz-Credential=test/20T20Z/us-east-1/s3/aws4_request'
'&X-Amz-Date=%s'
'&X-Amz-Expires=1000'
'&X-Amz-SignedHeaders=host' %
self.get_v4_amz_date_header(),
environ={'REQUEST_METHOD': 'GET'})
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'AccessDenied')
self.assertEqual(
{'403.AccessDenied.invalid_query_auth': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
def test_bucket_virtual_hosted_style(self):
req = Request.blank('/',
environ={'HTTP_HOST': 'bucket.localhost:80',
'REQUEST_METHOD': 'HEAD',
'HTTP_AUTHORIZATION':
'AWS test:tester:hmac'},
headers={'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
self.assertIn('swift.backend_path', req.environ)
self.assertEqual('/v1/AUTH_test/bucket',
req.environ['swift.backend_path'])
def test_object_virtual_hosted_style(self):
req = Request.blank('/object',
environ={'HTTP_HOST': 'bucket.localhost:80',
'REQUEST_METHOD': 'HEAD',
'HTTP_AUTHORIZATION':
'AWS test:tester:hmac'},
headers={'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
self.assertIn('swift.backend_path', req.environ)
self.assertEqual('/v1/AUTH_test/bucket/object',
req.environ['swift.backend_path'])
def test_token_generation(self):
self.swift.register('HEAD', '/v1/AUTH_test/bucket+segments/'
'object/123456789abcdef',
swob.HTTPOk, {}, None)
self.swift.register('PUT', '/v1/AUTH_test/bucket+segments/'
'object/123456789abcdef/1',
swob.HTTPCreated, {}, None)
req = Request.blank('/bucket/object?uploadId=123456789abcdef'
'&partNumber=1',
environ={'REQUEST_METHOD': 'PUT'})
req.headers['Authorization'] = 'AWS test:tester:hmac'
date_header = self.get_date_header()
req.headers['Date'] = date_header
with mock.patch('swift.common.middleware.s3api.s3request.'
'S3Request.check_signature') as mock_cs:
status, headers, body = self.call_s3api(req)
self.assertIn('swift.backend_path', req.environ)
self.assertEqual(
'/v1/AUTH_test/bucket+segments/object/123456789abcdef/1',
req.environ['swift.backend_path'])
_, _, headers = self.swift.calls_with_headers[-1]
self.assertEqual(req.environ['s3api.auth_details'], {
'access_key': 'test:tester',
'signature': 'hmac',
'string_to_sign': b'\n'.join([
b'PUT', b'', b'', date_header.encode('ascii'),
b'/bucket/object?partNumber=1&uploadId=123456789abcdef']),
'check_signature': mock_cs})
def test_non_ascii_user(self):
self.swift.register('HEAD', '/v1/AUTH_test/bucket+segments/'
'object/123456789abcdef',
swob.HTTPOk, {}, None)
self.swift.register('PUT', '/v1/AUTH_test/bucket+segments/'
'object/123456789abcdef/1',
swob.HTTPCreated, {}, None)
req = Request.blank('/bucket/object?uploadId=123456789abcdef'
'&partNumber=1',
environ={'REQUEST_METHOD': 'PUT'})
# NB: WSGI string for a snowman
req.headers['Authorization'] = 'AWS test:\xe2\x98\x83:sig'
date_header = self.get_date_header()
req.headers['Date'] = date_header
with mock.patch('swift.common.middleware.s3api.s3request.'
'S3Request.check_signature') as mock_cs:
status, headers, body = self.call_s3api(req)
self.assertIn('swift.backend_path', req.environ)
self.assertEqual(
'/v1/AUTH_test/bucket+segments/object/123456789abcdef/1',
req.environ['swift.backend_path'])
_, _, headers = self.swift.calls_with_headers[-1]
self.assertEqual(req.environ['s3api.auth_details'], {
'access_key': (u'test:\N{SNOWMAN}'.encode('utf-8') if six.PY2
else u'test:\N{SNOWMAN}'),
'signature': 'sig',
'string_to_sign': b'\n'.join([
b'PUT', b'', b'', date_header.encode('ascii'),
b'/bucket/object?partNumber=1&uploadId=123456789abcdef']),
'check_signature': mock_cs})
def test_invalid_uri(self):
req = Request.blank('/bucket/invalid\xffname',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidURI')
self.assertEqual(
{'400.InvalidURI': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
def test_object_create_bad_md5_unreadable(self):
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'PUT',
'HTTP_AUTHORIZATION': 'AWS X:Y:Z',
'HTTP_CONTENT_MD5': '#'},
headers={'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidDigest')
self.assertEqual(
{'400.InvalidDigest': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
def test_object_create_bad_md5_too_short(self):
too_short_digest = md5(b'hey', usedforsecurity=False).digest()[:-1]
md5_str = base64.b64encode(too_short_digest).strip()
if not six.PY2:
md5_str = md5_str.decode('ascii')
req = Request.blank(
'/bucket/object',
environ={'REQUEST_METHOD': 'PUT',
'HTTP_AUTHORIZATION': 'AWS X:Y:Z',
'HTTP_CONTENT_MD5': md5_str},
headers={'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidDigest')
self.assertEqual(
{'400.InvalidDigest': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
def test_object_create_bad_md5_bad_padding(self):
too_short_digest = md5(b'hey', usedforsecurity=False).digest()
md5_str = base64.b64encode(too_short_digest).strip(b'=\n')
if not six.PY2:
md5_str = md5_str.decode('ascii')
req = Request.blank(
'/bucket/object',
environ={'REQUEST_METHOD': 'PUT',
'HTTP_AUTHORIZATION': 'AWS X:Y:Z',
'HTTP_CONTENT_MD5': md5_str},
headers={'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidDigest')
self.assertEqual(
{'400.InvalidDigest': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
def test_object_create_bad_md5_too_long(self):
too_long_digest = md5(
b'hey', usedforsecurity=False).digest() + b'suffix'
md5_str = base64.b64encode(too_long_digest).strip()
if not six.PY2:
md5_str = md5_str.decode('ascii')
req = Request.blank(
'/bucket/object',
environ={'REQUEST_METHOD': 'PUT',
'HTTP_AUTHORIZATION': 'AWS X:Y:Z',
'HTTP_CONTENT_MD5': md5_str},
headers={'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidDigest')
self.assertEqual(
{'400.InvalidDigest': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
def test_invalid_metadata_directive(self):
req = Request.blank('/',
environ={'REQUEST_METHOD': 'GET',
'HTTP_AUTHORIZATION': 'AWS X:Y:Z',
'HTTP_X_AMZ_METADATA_DIRECTIVE':
'invalid'},
headers={'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
self.assertEqual(
{'400.InvalidArgument': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
def test_invalid_storage_class(self):
req = Request.blank('/',
environ={'REQUEST_METHOD': 'GET',
'HTTP_AUTHORIZATION': 'AWS X:Y:Z',
'HTTP_X_AMZ_STORAGE_CLASS': 'INVALID'},
headers={'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidStorageClass')
self.assertEqual(
{'400.InvalidStorageClass': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
def test_invalid_ssc(self):
req = Request.blank('/',
environ={'REQUEST_METHOD': 'GET',
'HTTP_AUTHORIZATION': 'AWS X:Y:Z'},
headers={'x-amz-server-side-encryption': 'invalid',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
self.assertEqual(
{'400.InvalidArgument': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
def _test_unsupported_header(self, header, value=None):
if value is None:
value = 'value'
req = Request.blank('/error',
environ={'REQUEST_METHOD': 'GET',
'HTTP_AUTHORIZATION': 'AWS X:Y:Z'},
headers={header: value,
'Date': self.get_date_header()})
self.s3api.logger.logger.clear()
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'NotImplemented')
self.assertEqual(
{'501.NotImplemented': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
def test_mfa(self):
self._test_unsupported_header('x-amz-mfa')
@mock.patch.object(registry, '_swift_admin_info', dict())
def test_server_side_encryption(self):
sse_header = 'x-amz-server-side-encryption'
self._test_unsupported_header(sse_header, 'AES256')
self._test_unsupported_header(sse_header, 'aws:kms')
registry.register_swift_info('encryption', admin=True, enabled=False)
self._test_unsupported_header(sse_header, 'AES256')
self._test_unsupported_header(sse_header, 'aws:kms')
registry.register_swift_info('encryption', admin=True, enabled=True)
# AES256 now works
self.swift.register('PUT', '/v1/AUTH_X/bucket/object',
swob.HTTPCreated, {}, None)
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'PUT',
'HTTP_AUTHORIZATION': 'AWS X:Y:Z'},
headers={sse_header: 'AES256',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status, '200 OK')
self.assertIn('swift.backend_path', req.environ)
self.assertEqual('/v1/AUTH_X/bucket/object',
req.environ['swift.backend_path'])
# ...but aws:kms continues to fail
self._test_unsupported_header(sse_header, 'aws:kms')
def test_website_redirect_location(self):
self._test_unsupported_header('x-amz-website-redirect-location')
def test_aws_chunked(self):
self._test_unsupported_header('content-encoding', 'aws-chunked')
# https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-streaming.html
# has a multi-encoding example:
#
# > Amazon S3 supports multiple content encodings. For example:
# >
# > Content-Encoding : aws-chunked,gzip
# > That is, you can specify your custom content-encoding when using
# > Signature Version 4 streaming API.
self._test_unsupported_header('Content-Encoding', 'aws-chunked,gzip')
# Some clients skip the content-encoding,
# such as minio-go and aws-sdk-java
self._test_unsupported_header('x-amz-content-sha256',
'STREAMING-AWS4-HMAC-SHA256-PAYLOAD')
self._test_unsupported_header('x-amz-decoded-content-length')
def test_object_tagging(self):
self._test_unsupported_header('x-amz-tagging')
def _test_unsupported_resource(self, resource):
req = Request.blank('/error?' + resource,
environ={'REQUEST_METHOD': 'GET',
'HTTP_AUTHORIZATION': 'AWS X:Y:Z'},
headers={'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'NotImplemented')
self.assertEqual(
{'501.NotImplemented': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
def test_notification(self):
self._test_unsupported_resource('notification')
def test_policy(self):
self._test_unsupported_resource('policy')
def test_request_payment(self):
self._test_unsupported_resource('requestPayment')
def test_torrent(self):
self._test_unsupported_resource('torrent')
def test_website(self):
self._test_unsupported_resource('website')
def test_cors(self):
self._test_unsupported_resource('cors')
def test_tagging(self):
req = Request.blank('/bucket?tagging',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
self.assertEqual(
{},
self.s3api.logger.logger.statsd_client.get_increment_counts())
req = Request.blank('/bucket?tagging',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
self.s3api.logger.logger.clear()
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'NotImplemented')
self.assertEqual(
{'501.NotImplemented': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
req = Request.blank('/bucket?tagging',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
self.s3api.logger.logger.clear()
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'NotImplemented')
self.assertEqual(
{'501.NotImplemented': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
def test_restore(self):
self._test_unsupported_resource('restore')
def test_unsupported_method(self):
req = Request.blank('/bucket?acl',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'Error')
self.assertEqual(elem.find('./Code').text, 'MethodNotAllowed')
self.assertEqual(elem.find('./Method').text, 'POST')
self.assertEqual(elem.find('./ResourceType').text, 'ACL')
self.assertEqual(
{'405.MethodNotAllowed': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
@mock.patch.object(registry, '_sensitive_headers', set())
@mock.patch.object(registry, '_sensitive_params', set())
def test_registered_sensitive_info(self):
self.assertFalse(registry.get_sensitive_headers())
self.assertFalse(registry.get_sensitive_params())
filter_factory(self.conf)
sensitive = registry.get_sensitive_headers()
self.assertIn('authorization', sensitive)
sensitive = registry.get_sensitive_params()
self.assertIn('X-Amz-Signature', sensitive)
self.assertIn('Signature', sensitive)
@mock.patch.object(registry, '_swift_info', dict())
def test_registered_defaults(self):
conf_from_file = {k: str(v) for k, v in self.conf.items()}
filter_factory(conf_from_file)
swift_info = registry.get_swift_info()
self.assertTrue('s3api' in swift_info)
registered_keys = [
'max_bucket_listing', 'max_parts_listing', 'max_upload_part_num',
'max_multi_delete_objects', 'allow_multipart_uploads',
'min_segment_size', 's3_acl']
expected = dict((k, self.conf[k]) for k in registered_keys)
self.assertEqual(expected, swift_info['s3api'])
def test_check_pipeline(self):
with patch("swift.common.middleware.s3api.s3api.loadcontext"), \
patch("swift.common.middleware.s3api.s3api.PipelineWrapper") \
as pipeline:
# cause check_pipeline to not return early...
self.conf['__file__'] = ''
# ...and enable pipeline auth checking
self.s3api.conf.auth_pipeline_check = True
pipeline.return_value = 's3api tempauth proxy-server'
self.s3api.check_pipeline(self.conf)
# This *should* still work; authtoken will remove our auth details,
# but the X-Auth-Token we drop in will remain
# if we found one in the response
pipeline.return_value = 's3api s3token authtoken keystoneauth ' \
'proxy-server'
self.s3api.check_pipeline(self.conf)
# This should work now; no more doubled-up requests to keystone!
pipeline.return_value = 's3api s3token keystoneauth proxy-server'
self.s3api.check_pipeline(self.conf)
# Note that authtoken would need to have delay_auth_decision=True
pipeline.return_value = 's3api authtoken s3token keystoneauth ' \
'proxy-server'
self.s3api.check_pipeline(self.conf)
pipeline.return_value = 's3api proxy-server'
with self.assertRaises(ValueError) as cm:
self.s3api.check_pipeline(self.conf)
self.assertIn('expected auth between s3api and proxy-server',
cm.exception.args[0])
pipeline.return_value = 'proxy-server'
with self.assertRaises(ValueError) as cm:
self.s3api.check_pipeline(self.conf)
self.assertIn("missing filters ['s3api']",
cm.exception.args[0])
def test_s3api_initialization_with_disabled_pipeline_check(self):
with patch("swift.common.middleware.s3api.s3api.loadcontext"), \
patch("swift.common.middleware.s3api.s3api.PipelineWrapper") \
as pipeline:
# cause check_pipeline to not return early...
self.conf['__file__'] = ''
# ...but disable pipeline auth checking
self.s3api.conf.auth_pipeline_check = False
pipeline.return_value = 's3api tempauth proxy-server'
self.s3api.check_pipeline(self.conf)
pipeline.return_value = 's3api s3token authtoken keystoneauth ' \
'proxy-server'
self.s3api.check_pipeline(self.conf)
pipeline.return_value = 's3api authtoken s3token keystoneauth ' \
'proxy-server'
self.s3api.check_pipeline(self.conf)
pipeline.return_value = 's3api proxy-server'
self.s3api.check_pipeline(self.conf)
pipeline.return_value = 'proxy-server'
with self.assertRaises(ValueError):
self.s3api.check_pipeline(self.conf)
def test_signature_v4(self):
environ = {
'REQUEST_METHOD': 'GET'}
authz_header = 'AWS4-HMAC-SHA256 ' + ', '.join([
'Credential=test:tester/%s/us-east-1/s3/aws4_request' %
self.get_v4_amz_date_header().split('T', 1)[0],
'SignedHeaders=host;x-amz-date',
'Signature=X',
])
headers = {
'Authorization': authz_header,
'X-Amz-Date': self.get_v4_amz_date_header(),
'X-Amz-Content-SHA256': '0123456789'}
req = Request.blank('/bucket/object', environ=environ, headers=headers)
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200', body)
self.assertIn('swift.backend_path', req.environ)
self.assertEqual('/v1/AUTH_test/bucket/object',
req.environ['swift.backend_path'])
for _, _, headers in self.swift.calls_with_headers:
self.assertEqual(authz_header, headers['Authorization'])
self.assertNotIn('X-Auth-Token', headers)
def test_signature_v4_no_date(self):
environ = {
'REQUEST_METHOD': 'GET'}
headers = {
'Authorization':
'AWS4-HMAC-SHA256 '
'Credential=test:tester/20130524/us-east-1/s3/aws4_request, '
'SignedHeaders=host;range;x-amz-date,'
'Signature=X',
'X-Amz-Content-SHA256': '0123456789'}
req = Request.blank('/bucket/object', environ=environ, headers=headers)
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '403')
self.assertEqual(self._get_error_code(body), 'AccessDenied')
self.assertEqual(
{'403.AccessDenied.invalid_date': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
def test_signature_v4_no_payload(self):
environ = {
'REQUEST_METHOD': 'GET'}
headers = {
'Authorization':
'AWS4-HMAC-SHA256 '
'Credential=test:tester/%s/us-east-1/s3/aws4_request, '
'SignedHeaders=host;x-amz-date,'
'Signature=X' % self.get_v4_amz_date_header().split('T', 1)[0],
'X-Amz-Date': self.get_v4_amz_date_header()}
req = Request.blank('/bucket/object', environ=environ, headers=headers)
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '400')
self.assertEqual(self._get_error_code(body), 'InvalidRequest')
self.assertEqual(
self._get_error_message(body),
'Missing required header for this request: x-amz-content-sha256')
self.assertEqual(
{'400.InvalidRequest': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
def test_signature_v4_bad_authorization_string(self):
def test(auth_str, error, msg, metric, extra=b''):
environ = {
'REQUEST_METHOD': 'GET'}
headers = {
'Authorization': auth_str,
'X-Amz-Date': self.get_v4_amz_date_header(),
'X-Amz-Content-SHA256': '0123456789'}
req = Request.blank('/bucket/object', environ=environ,
headers=headers)
req.content_type = 'text/plain'
self.s3api.logger.logger.clear()
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), error)
self.assertEqual(self._get_error_message(body), msg)
self.assertIn(extra, body)
self.assertEqual(
{metric: 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
auth_str = ('AWS4-HMAC-SHA256 '
'SignedHeaders=host;x-amz-date,'
'Signature=X')
test(auth_str, 'AccessDenied', 'Access Denied.',
'403.AccessDenied.invalid_credential')
auth_str = (
'AWS4-HMAC-SHA256 '
'Credential=test:tester/20130524/us-east-1/s3/aws4_request, '
'Signature=X')
test(auth_str, 'AuthorizationHeaderMalformed',
'The authorization header is malformed; the authorization '
'header requires three components: Credential, SignedHeaders, '
'and Signature.', '400.AuthorizationHeaderMalformed')
auth_str = ('AWS4-HMAC-SHA256 '
'Credential=test:tester/%s/us-west-2/s3/aws4_request, '
'Signature=X, SignedHeaders=host;x-amz-date' %
self.get_v4_amz_date_header().split('T', 1)[0])
test(auth_str, 'AuthorizationHeaderMalformed',
"The authorization header is malformed; "
"the region 'us-west-2' is wrong; expecting 'us-east-1'",
'400.AuthorizationHeaderMalformed', b'<Region>us-east-1</Region>')
auth_str = ('AWS4-HMAC-SHA256 '
'Credential=test:tester/%s/us-east-1/not-s3/aws4_request, '
'Signature=X, SignedHeaders=host;x-amz-date' %
self.get_v4_amz_date_header().split('T', 1)[0])
test(auth_str, 'AuthorizationHeaderMalformed',
'The authorization header is malformed; '
'incorrect service "not-s3". This endpoint belongs to "s3".',
'400.AuthorizationHeaderMalformed')
auth_str = ('AWS4-HMAC-SHA256 '
'Credential=test:tester/%s/us-east-1/s3/not-aws4_request, '
'Signature=X, SignedHeaders=host;x-amz-date' %
self.get_v4_amz_date_header().split('T', 1)[0])
test(auth_str, 'AuthorizationHeaderMalformed',
'The authorization header is malformed; '
'incorrect terminal "not-aws4_request". '
'This endpoint uses "aws4_request".',
'400.AuthorizationHeaderMalformed')
auth_str = (
'AWS4-HMAC-SHA256 '
'Credential=test:tester/20130524/us-east-1/s3/aws4_request, '
'SignedHeaders=host;x-amz-date')
test(auth_str, 'AccessDenied', 'Access Denied.',
'403.AccessDenied.invalid_header_auth')
def test_canonical_string_v4(self):
def _get_req(path, environ):
if '?' in path:
path, query_string = path.split('?', 1)
else:
query_string = ''
env = {
'REQUEST_METHOD': 'GET',
'PATH_INFO': path,
'QUERY_STRING': query_string,
'HTTP_DATE': 'Mon, 09 Sep 2011 23:36:00 GMT',
'HTTP_X_AMZ_CONTENT_SHA256':
'e3b0c44298fc1c149afbf4c8996fb924'
'27ae41e4649b934ca495991b7852b855',
'HTTP_AUTHORIZATION':
'AWS4-HMAC-SHA256 '
'Credential=X:Y/20110909/us-east-1/s3/aws4_request, '
'SignedHeaders=content-md5;content-type;date, '
'Signature=x',
}
fake_time = calendar.timegm((2011, 9, 9, 23, 36, 0))
env.update(environ)
with patch('swift.common.middleware.s3api.s3request.'
'S3Request._validate_headers'), \
patch('swift.common.middleware.s3api.utils.time.time',
return_value=fake_time):
req = SigV4Request(env, conf=self.s3api.conf)
return req
def canonical_string(path, environ):
return _get_req(path, environ)._canonical_request()
def verify(hash_val, path, environ):
# See http://docs.aws.amazon.com/general/latest/gr
# /signature-v4-test-suite.html for where location, service, and
# signing key came from
with patch.object(self.s3api.conf, 'location', 'us-east-1'), \
patch.object(swift.common.middleware.s3api.s3request,
'SERVICE', 'host'):
req = _get_req(path, environ)
hash_in_sts = req._string_to_sign().split(b'\n')[3]
self.assertEqual(hash_val, hash_in_sts.decode('ascii'))
self.assertTrue(req.check_signature(
'wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY'))
# all next data got from aws4_testsuite from Amazon
# http://docs.aws.amazon.com/general/latest/gr/samples
# /aws4_testsuite.zip
# Each *expected* hash value is the 4th line in <test-name>.sts in the
# test suite.
# get-vanilla
env = {
'HTTP_AUTHORIZATION': (
'AWS4-HMAC-SHA256 '
'Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, '
'SignedHeaders=date;host, '
'Signature=b27ccfbfa7df52a200ff74193ca6e32d'
'4b48b8856fab7ebf1c595d0670a7e470'),
'HTTP_HOST': 'host.foo.com'}
verify('366b91fb121d72a00f46bbe8d395f53a'
'102b06dfb7e79636515208ed3fa606b1',
'/', env)
# get-header-value-trim
env = {
'REQUEST_METHOD': 'POST',
'HTTP_AUTHORIZATION': (
'AWS4-HMAC-SHA256 '
'Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, '
'SignedHeaders=date;host;p, '
'Signature=debf546796015d6f6ded8626f5ce9859'
'7c33b47b9164cf6b17b4642036fcb592'),
'HTTP_HOST': 'host.foo.com',
'HTTP_P': 'phfft'}
verify('dddd1902add08da1ac94782b05f9278c'
'08dc7468db178a84f8950d93b30b1f35',
'/', env)
# get-utf8 (not exact)
env = {
'HTTP_AUTHORIZATION': (
'AWS4-HMAC-SHA256 '
'Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, '
'SignedHeaders=date;host, '
'Signature=8d6634c189aa8c75c2e51e106b6b5121'
'bed103fdb351f7d7d4381c738823af74'),
'HTTP_HOST': 'host.foo.com',
'RAW_PATH_INFO': '/%E1%88%B4'}
# This might look weird because actually S3 doesn't care about utf-8
# encoded multi-byte bucket name from bucket-in-host name constraint.
# However, aws4_testsuite has only a sample hash with utf-8 *bucket*
# name to make sure the correctness (probably it can be used in other
# aws resource except s3) so, to test also utf-8, skip the bucket name
# validation in the following test.
# NOTE: eventlet's PATH_INFO is unquoted
with patch('swift.common.middleware.s3api.s3request.'
'validate_bucket_name'):
verify('27ba31df5dbc6e063d8f87d62eb07143'
'f7f271c5330a917840586ac1c85b6f6b',
swob.wsgi_unquote('/%E1%88%B4'), env)
# get-vanilla-query-order-key
env = {
'HTTP_AUTHORIZATION': (
'AWS4-HMAC-SHA256 '
'Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, '
'SignedHeaders=date;host, '
'Signature=0dc122f3b28b831ab48ba65cb47300de'
'53fbe91b577fe113edac383730254a3b'),
'HTTP_HOST': 'host.foo.com'}
verify('2f23d14fe13caebf6dfda346285c6d9c'
'14f49eaca8f5ec55c627dd7404f7a727',
'/?a=foo&b=foo', env)
# post-header-value-case
env = {
'REQUEST_METHOD': 'POST',
'HTTP_AUTHORIZATION': (
'AWS4-HMAC-SHA256 '
'Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, '
'SignedHeaders=date;host;zoo, '
'Signature=273313af9d0c265c531e11db70bbd653'
'f3ba074c1009239e8559d3987039cad7'),
'HTTP_HOST': 'host.foo.com',
'HTTP_ZOO': 'ZOOBAR'}
verify('3aae6d8274b8c03e2cc96fc7d6bda4b9'
'bd7a0a184309344470b2c96953e124aa',
'/', env)
# post-x-www-form-urlencoded-parameters
env = {
'REQUEST_METHOD': 'POST',
'HTTP_AUTHORIZATION': (
'AWS4-HMAC-SHA256 '
'Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, '
'SignedHeaders=date;host;content-type, '
'Signature=b105eb10c6d318d2294de9d49dd8b031'
'b55e3c3fe139f2e637da70511e9e7b71'),
'HTTP_HOST': 'host.foo.com',
'HTTP_X_AMZ_CONTENT_SHA256':
'3ba8907e7a252327488df390ed517c45'
'b96dead033600219bdca7107d1d3f88a',
'CONTENT_TYPE':
'application/x-www-form-urlencoded; charset=utf8'}
verify('c4115f9e54b5cecf192b1eaa23b8e88e'
'd8dc5391bd4fde7b3fff3d9c9fe0af1f',
'/', env)
# post-x-www-form-urlencoded
env = {
'REQUEST_METHOD': 'POST',
'HTTP_AUTHORIZATION': (
'AWS4-HMAC-SHA256 '
'Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, '
'SignedHeaders=date;host;content-type, '
'Signature=5a15b22cf462f047318703b92e6f4f38'
'884e4a7ab7b1d6426ca46a8bd1c26cbc'),
'HTTP_HOST': 'host.foo.com',
'HTTP_X_AMZ_CONTENT_SHA256':
'3ba8907e7a252327488df390ed517c45'
'b96dead033600219bdca7107d1d3f88a',
'CONTENT_TYPE':
'application/x-www-form-urlencoded'}
verify('4c5c6e4b52fb5fb947a8733982a8a5a6'
'1b14f04345cbfe6e739236c76dd48f74',
'/', env)
# Note that boto does not do proper stripping (as of 2.42.0).
# These were determined by examining the StringToSignBytes element of
# resulting SignatureDoesNotMatch errors from AWS.
str1 = canonical_string('/', {'CONTENT_TYPE': 'text/plain',
'HTTP_CONTENT_MD5': '##'})
str2 = canonical_string('/', {'CONTENT_TYPE': '\x01\x02text/plain',
'HTTP_CONTENT_MD5': '\x1f ##'})
str3 = canonical_string('/', {'CONTENT_TYPE': 'text/plain \x10',
'HTTP_CONTENT_MD5': '##\x18'})
self.assertEqual(str1, str2)
self.assertEqual(str2, str3)
def test_mixture_param_v4(self):
# now we have an Authorization header
headers = {
'Authorization':
'AWS4-HMAC-SHA256 '
'Credential=test/20130524/us-east-1/s3/aws4_request_A, '
'SignedHeaders=hostA;rangeA;x-amz-dateA,'
'Signature=X',
'X-Amz-Date': self.get_v4_amz_date_header(),
'X-Amz-Content-SHA256': '0123456789'}
# and then, different auth info (Credential, SignedHeaders, Signature)
# in query
req = Request.blank(
'/bucket/object'
'?X-Amz-Algorithm=AWS4-HMAC-SHA256'
'&X-Amz-Credential=test/20T20Z/us-east-1/s3/aws4_requestB'
'&X-Amz-SignedHeaders=hostB'
'&X-Amz-Signature=Y',
environ={'REQUEST_METHOD': 'GET'},
headers=headers)
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
# FIXME: should this failed as 400 or pass via query auth?
# for now, 403 forbidden for safety
self.assertEqual(status.split()[0], '403', body)
self.assertEqual(
{'403.AccessDenied.invalid_expires': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
# But if we are missing Signature in query param
req = Request.blank(
'/bucket/object'
'?X-Amz-Algorithm=AWS4-HMAC-SHA256'
'&X-Amz-Credential=test/20T20Z/us-east-1/s3/aws4_requestB'
'&X-Amz-SignedHeaders=hostB',
environ={'REQUEST_METHOD': 'GET'},
headers=headers)
req.content_type = 'text/plain'
self.s3api.logger.logger.clear()
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '403', body)
self.assertEqual(
{'403.AccessDenied.invalid_expires': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
def test_s3api_with_only_s3_token(self):
self.swift = FakeSwift()
self.keystone_auth = KeystoneAuth(
self.swift, {'operator_roles': 'swift-user'})
self.s3_token = S3Token(
self.keystone_auth, {'auth_uri': 'https://fakehost/identity'})
self.s3api = S3ApiMiddleware(self.s3_token, self.conf)
self.s3api.logger = debug_logger()
req = Request.blank(
'/bucket',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS access:signature',
'Date': self.get_date_header()})
self.swift.register('PUT', '/v1/AUTH_TENANT_ID/bucket',
swob.HTTPCreated, {}, None)
self.swift.register('HEAD', '/v1/AUTH_TENANT_ID',
swob.HTTPOk, {}, None)
with patch.object(self.s3_token, '_json_request') as mock_req:
mock_resp = requests.Response()
mock_resp._content = json.dumps(GOOD_RESPONSE_V2).encode('ascii')
mock_resp.status_code = 201
mock_req.return_value = mock_resp
status, headers, body = self.call_s3api(req)
self.assertEqual(body, b'')
self.assertEqual(1, mock_req.call_count)
self.assertIn('swift.backend_path', req.environ)
self.assertEqual('/v1/AUTH_TENANT_ID/bucket',
req.environ['swift.backend_path'])
def test_s3api_with_only_s3_token_v3(self):
self.swift = FakeSwift()
self.keystone_auth = KeystoneAuth(
self.swift, {'operator_roles': 'swift-user'})
self.s3_token = S3Token(
self.keystone_auth, {'auth_uri': 'https://fakehost/identity'})
self.s3api = S3ApiMiddleware(self.s3_token, self.conf)
self.s3api.logger = debug_logger()
req = Request.blank(
'/bucket',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS access:signature',
'Date': self.get_date_header()})
self.swift.register('PUT', '/v1/AUTH_PROJECT_ID/bucket',
swob.HTTPCreated, {}, None)
self.swift.register('HEAD', '/v1/AUTH_PROJECT_ID',
swob.HTTPOk, {}, None)
with patch.object(self.s3_token, '_json_request') as mock_req:
mock_resp = requests.Response()
mock_resp._content = json.dumps(GOOD_RESPONSE_V3).encode('ascii')
mock_resp.status_code = 200
mock_req.return_value = mock_resp
status, headers, body = self.call_s3api(req)
self.assertEqual(body, b'')
self.assertEqual(1, mock_req.call_count)
self.assertIn('swift.backend_path', req.environ)
self.assertEqual('/v1/AUTH_PROJECT_ID/bucket',
req.environ['swift.backend_path'])
def test_s3api_with_s3_token_and_auth_token(self):
self.swift = FakeSwift()
self.keystone_auth = KeystoneAuth(
self.swift, {'operator_roles': 'swift-user'})
self.auth_token = AuthProtocol(
self.keystone_auth, {'delay_auth_decision': 'True'})
self.s3_token = S3Token(
self.auth_token, {'auth_uri': 'https://fakehost/identity'})
self.s3api = S3ApiMiddleware(self.s3_token, self.conf)
self.s3api.logger = debug_logger()
req = Request.blank(
'/bucket',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS access:signature',
'Date': self.get_date_header()})
self.swift.register('PUT', '/v1/AUTH_TENANT_ID/bucket',
swob.HTTPCreated, {}, None)
self.swift.register('HEAD', '/v1/AUTH_TENANT_ID',
swob.HTTPOk, {}, None)
with patch.object(self.s3_token, '_json_request') as mock_req:
with patch.object(self.auth_token,
'_do_fetch_token') as mock_fetch:
# sanity check
self.assertIn('id', GOOD_RESPONSE_V2['access']['token'])
mock_resp = requests.Response()
mock_resp._content = json.dumps(
GOOD_RESPONSE_V2).encode('ascii')
mock_resp.status_code = 201
mock_req.return_value = mock_resp
mock_access_info = AccessInfoV2(GOOD_RESPONSE_V2)
mock_access_info.will_expire_soon = \
lambda stale_duration: False
mock_fetch.return_value = (MagicMock(), mock_access_info)
status, headers, body = self.call_s3api(req)
# Even though s3token got a token back from keystone, we drop
# it on the floor, resulting in a 401 Unauthorized at
# `swift.common.middleware.keystoneauth` because
# keystonemiddleware's auth_token strips out all auth headers,
# significantly 'X-Identity-Status'. Without a token, it then
# sets 'X-Identity-Status: Invalid' and never contacts
# Keystone.
self.assertEqual('403 Forbidden', status)
self.assertIn('swift.backend_path', req.environ)
self.assertEqual('/v1/AUTH_TENANT_ID/bucket',
req.environ['swift.backend_path'])
self.assertEqual(1, mock_req.call_count)
# it never even tries to contact keystone
self.assertEqual(0, mock_fetch.call_count)
statsd_client = self.s3api.logger.logger.statsd_client
self.assertEqual(
{'403.SignatureDoesNotMatch': 1},
statsd_client.get_increment_counts())
def test_s3api_with_only_s3_token_in_s3acl(self):
self.swift = FakeSwift()
self.keystone_auth = KeystoneAuth(
self.swift, {'operator_roles': 'swift-user'})
self.s3_token = S3Token(
self.keystone_auth, {'auth_uri': 'https://fakehost/identity'})
self.conf['s3_acl'] = True
self.s3api = S3ApiMiddleware(self.s3_token, self.conf)
self.s3api.logger = debug_logger()
req = Request.blank(
'/bucket',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS access:signature',
'Date': self.get_date_header()})
self.swift.register('PUT', '/v1/AUTH_TENANT_ID/bucket',
swob.HTTPCreated, {}, None)
# For now, s3 acl commits the bucket owner acl via POST
# after PUT container so we need to register the resposne here
self.swift.register('POST', '/v1/AUTH_TENANT_ID/bucket',
swob.HTTPNoContent, {}, None)
self.swift.register('TEST', '/v1/AUTH_TENANT_ID',
swob.HTTPMethodNotAllowed, {}, None)
with patch.object(self.s3_token, '_json_request') as mock_req:
mock_resp = requests.Response()
mock_resp._content = json.dumps(GOOD_RESPONSE_V2).encode('ascii')
mock_resp.status_code = 201
mock_req.return_value = mock_resp
status, headers, body = self.call_s3api(req)
self.assertEqual(body, b'')
self.assertIn('swift.backend_path', req.environ)
self.assertEqual('/v1/AUTH_TENANT_ID/bucket',
req.environ['swift.backend_path'])
self.assertEqual(1, mock_req.call_count)
def test_s3api_with_time_skew(self):
def do_test(skew):
req = Request.blank(
'/object',
environ={'HTTP_HOST': 'bucket.localhost:80',
'REQUEST_METHOD': 'GET',
'HTTP_AUTHORIZATION':
'AWS test:tester:hmac'},
headers={'Date': self.get_date_header(skew=skew)})
self.s3api.logger.logger.clear()
return self.call_s3api(req)
status, _, body = do_test(800)
self.assertEqual('200 OK', status)
self.assertFalse(
self.s3api.logger.logger.statsd_client.get_increment_counts())
status, _, body = do_test(-800)
self.assertEqual('200 OK', status)
self.assertFalse(
self.s3api.logger.logger.statsd_client.get_increment_counts())
status, _, body = do_test(1000)
self.assertEqual('403 Forbidden', status)
self.assertEqual(self._get_error_code(body), 'RequestTimeTooSkewed')
self.assertEqual(
{'403.RequestTimeTooSkewed': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
status, _, body = do_test(-1000)
self.assertEqual('403 Forbidden', status)
self.assertEqual(self._get_error_code(body), 'RequestTimeTooSkewed')
self.assertEqual(
{'403.RequestTimeTooSkewed': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
self.s3api.conf.allowable_clock_skew = 100
status, _, body = do_test(800)
self.assertEqual('403 Forbidden', status)
self.assertEqual(self._get_error_code(body), 'RequestTimeTooSkewed')
self.assertEqual(
{'403.RequestTimeTooSkewed': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
def test_s3api_error_metric(self):
class KaboomResponse(ErrorResponse):
_code = 'ka boom'
def do_test(err_response):
req = Request.blank(
'/object',
environ={'HTTP_HOST': 'bucket.localhost:80',
'REQUEST_METHOD': 'GET',
'HTTP_AUTHORIZATION':
'AWS test:tester:hmac'},
headers={'Date': self.get_date_header()})
self.s3api.logger.logger.clear()
with mock.patch.object(
self.s3api, 'handle_request', side_effect=err_response):
self.call_s3api(req)
do_test(ErrorResponse(status=403, msg='not good', reason='bad'))
self.assertEqual(
{'403.ErrorResponse.bad': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
do_test(AccessDenied(msg='no entry', reason='invalid_date'))
self.assertEqual(
{'403.AccessDenied.invalid_date': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
# check whitespace replaced with underscore
do_test(KaboomResponse(status=400, msg='boom', reason='boom boom'))
self.assertEqual(
{'400.ka_boom.boom_boom': 1},
self.s3api.logger.logger.statsd_client.get_increment_counts())
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/s3api/test_s3api.py |
# Copyright (c) 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from swift.common.swob import Request
from swift.common.middleware.s3api import s3response
from swift.common.middleware.s3api.acl_utils import handle_acl_header
from test.unit.common.middleware.s3api import S3ApiTestCase
class TestS3ApiAclUtils(S3ApiTestCase):
def setUp(self):
super(TestS3ApiAclUtils, self).setUp()
def check_generated_acl_header(self, acl, expected):
req = Request.blank('/bucket',
headers={'X-Amz-Acl': acl})
try:
handle_acl_header(req)
except s3response.ErrorResponse as e:
if isinstance(e, expected):
self.assertEqual(expected._status, e._status)
else:
raise
else:
for target in expected:
self.assertTrue(target[0] in req.headers)
self.assertEqual(req.headers[target[0]], target[1])
def test_canned_acl_header(self):
# https://docs.aws.amazon.com/AmazonS3/latest/userguide/acl-overview.html#canned-acl
self.check_generated_acl_header(
'private',
[('X-Container-Read', '.'), ('X-Container-Write', '.')])
self.check_generated_acl_header(
'public-read', [('X-Container-Read', '.r:*,.rlistings')])
self.check_generated_acl_header(
'public-read-write', [('X-Container-Read', '.r:*,.rlistings'),
('X-Container-Write', '.r:*')])
self.check_generated_acl_header(
'aws-exec-read', s3response.InvalidArgument)
self.check_generated_acl_header(
'authenticated-read', s3response.S3NotImplemented)
self.check_generated_acl_header(
'bucket-owner-read', [('X-Container-Read', '.'),
('X-Container-Write', '.')])
self.check_generated_acl_header(
'bucket-owner-full-control', [('X-Container-Read', '.'),
('X-Container-Write', '.')])
self.check_generated_acl_header(
'log-delivery-write', s3response.S3NotImplemented)
# the 400 response is the catch all
self.check_generated_acl_header(
'some-non-sense', s3response.InvalidArgument)
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/s3api/test_acl_utils.py |
# Copyright (c) 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import mock
import six
from six.moves.urllib.parse import quote, parse_qsl
from swift.common import swob
from swift.common.middleware.versioned_writes.object_versioning import \
DELETE_MARKER_CONTENT_TYPE
from swift.common.swob import Request
from swift.common.utils import json
from swift.common.middleware.s3api.etree import fromstring, tostring, \
Element, SubElement
from swift.common.middleware.s3api.subresource import Owner, encode_acl, \
ACLPublicRead
from swift.common.middleware.s3api.s3request import MAX_32BIT_INT
from test.unit.common.middleware.helpers import normalize_path
from test.unit.common.middleware.s3api import S3ApiTestCase
from test.unit.common.middleware.s3api.test_s3_acl import s3acl
from test.unit.common.middleware.s3api.helpers import UnreadableInput
# Example etag from ProxyFS; note that it is already quote-wrapped
PFS_ETAG = '"pfsv2/AUTH_test/01234567/89abcdef-32"'
class TestS3ApiBucket(S3ApiTestCase):
def setup_objects(self):
self.objects = (('lily', '2011-01-05T02:19:14.275290', '0', '3909'),
(u'lily-\u062a', '2011-01-05T02:19:14.275290', 0, 390),
('mu', '2011-01-05T02:19:14.275290',
'md5-of-the-manifest; s3_etag=0', '3909'),
('pfs-obj', '2011-01-05T02:19:14.275290',
PFS_ETAG, '3909'),
('rose', '2011-01-05T02:19:14.275290', 0, 303),
('slo', '2011-01-05T02:19:14.275290',
'md5-of-the-manifest', '3909'),
('viola', '2011-01-05T02:19:14.275290', '0', 3909),
('with space', '2011-01-05T02:19:14.275290', 0, 390),
('with%20space', '2011-01-05T02:19:14.275290', 0, 390))
self.objects_list = [
{'name': item[0], 'last_modified': str(item[1]),
'content_type': 'application/octet-stream',
'hash': str(item[2]), 'bytes': str(item[3])}
for item in self.objects]
self.objects_list[5]['slo_etag'] = '"0"'
self.versioned_objects = [{
'name': 'rose',
'version_id': '2',
'hash': '0',
'bytes': '0',
'last_modified': '2010-03-01T17:09:51.510928',
'content_type': DELETE_MARKER_CONTENT_TYPE,
'is_latest': False,
}, {
'name': 'rose',
'version_id': '1',
'hash': '1234',
'bytes': '6',
'last_modified': '2010-03-01T17:09:50.510928',
'content_type': 'application/octet-stream',
'is_latest': False,
}]
listing_body = json.dumps(self.objects_list)
self.prefixes = ['rose', 'viola', 'lily']
object_list_subdir = [{"subdir": p} for p in self.prefixes]
self.swift.register('DELETE', '/v1/AUTH_test/bucket+segments',
swob.HTTPNoContent, {}, json.dumps([]))
for name, _, _, _ in self.objects:
self.swift.register(
'DELETE',
'/v1/AUTH_test/bucket+segments/' +
swob.bytes_to_wsgi(name.encode('utf-8')),
swob.HTTPNoContent, {}, json.dumps([]))
self.swift.register(
'GET',
'/v1/AUTH_test/bucket+segments?format=json&marker=with%2520space',
swob.HTTPOk,
{'Content-Type': 'application/json; charset=utf-8'},
json.dumps([]))
self.swift.register(
'GET', '/v1/AUTH_test/bucket+segments?format=json&marker=',
swob.HTTPOk, {'Content-Type': 'application/json'}, listing_body)
self.swift.register(
'HEAD', '/v1/AUTH_test/junk', swob.HTTPNoContent, {}, None)
self.swift.register(
'HEAD', '/v1/AUTH_test/nojunk', swob.HTTPNotFound, {}, None)
self.swift.register(
'HEAD', '/v1/AUTH_test/unavailable', swob.HTTPServiceUnavailable,
{}, None)
self.swift.register(
'GET', '/v1/AUTH_test/junk', swob.HTTPOk,
{'Content-Type': 'application/json'}, listing_body)
self.swift.register(
'GET', '/v1/AUTH_test/junk-subdir', swob.HTTPOk,
{'Content-Type': 'application/json; charset=utf-8'},
json.dumps(object_list_subdir))
self.swift.register(
'GET',
'/v1/AUTH_test/subdirs?delimiter=/&limit=3',
swob.HTTPOk, {}, json.dumps([
{'subdir': 'nothing/'},
{'subdir': u'but-\u062a/'},
{'subdir': 'subdirs/'},
]))
def setUp(self):
super(TestS3ApiBucket, self).setUp()
self.setup_objects()
def test_bucket_HEAD(self):
req = Request.blank('/junk',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
def test_bucket_HEAD_error(self):
req = Request.blank('/nojunk',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '404')
self.assertEqual(body, b'') # sanity
def test_bucket_HEAD_503(self):
req = Request.blank('/unavailable',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '503')
self.assertEqual(body, b'') # sanity
def test_bucket_HEAD_slash(self):
req = Request.blank('/junk/',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
def test_bucket_HEAD_slash_error(self):
req = Request.blank('/nojunk/',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '404')
@s3acl
def test_bucket_GET_error(self):
code = self._test_method_error('GET', '/bucket', swob.HTTPUnauthorized)
self.assertEqual(code, 'SignatureDoesNotMatch')
code = self._test_method_error('GET', '/bucket', swob.HTTPForbidden)
self.assertEqual(code, 'AccessDenied')
code = self._test_method_error('GET', '/bucket', swob.HTTPNotFound)
self.assertEqual(code, 'NoSuchBucket')
code = self._test_method_error('GET', '/bucket',
swob.HTTPServiceUnavailable)
self.assertEqual(code, 'ServiceUnavailable')
code = self._test_method_error('GET', '/bucket', swob.HTTPServerError)
self.assertEqual(code, 'InternalError')
@s3acl
def test_bucket_GET_non_json(self):
# Suppose some middleware accidentally makes it return txt instead
resp_body = b'\n'.join([b'obj%d' % i for i in range(100)])
self.swift.register('GET', '/v1/AUTH_test/bucket', swob.HTTPOk, {},
resp_body)
# When we do our GET...
req = Request.blank('/bucket',
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
# ...there isn't much choice but to error...
self.assertEqual(self._get_error_code(body), 'InternalError')
# ... but we should at least log the body to aid in debugging
self.assertIn(
'Got non-JSON response trying to list /bucket: %r'
% (resp_body[:60] + b'...'),
self.s3api.logger.get_lines_for_level('error'))
def test_bucket_GET(self):
bucket_name = 'junk'
req = Request.blank('/%s' % bucket_name,
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'ListBucketResult')
name = elem.find('./Name').text
self.assertEqual(name, bucket_name)
objects = elem.iterchildren('Contents')
items = []
for o in objects:
items.append((o.find('./Key').text, o.find('./ETag').text))
self.assertEqual('2011-01-05T02:19:15.000Z',
o.find('./LastModified').text)
expected = [
(i[0].encode('utf-8') if six.PY2 else i[0],
PFS_ETAG if i[0] == 'pfs-obj' else
'"0-N"' if i[0] == 'slo' else '"0"')
for i in self.objects
]
self.assertEqual(items, expected)
def test_bucket_GET_last_modified_rounding(self):
objects_list = [
{'name': 'a', 'last_modified': '2011-01-05T02:19:59.275290',
'content_type': 'application/octet-stream',
'hash': 'ahash', 'bytes': '12345'},
{'name': 'b', 'last_modified': '2011-01-05T02:19:59.000000',
'content_type': 'application/octet-stream',
'hash': 'ahash', 'bytes': '12345'},
]
self.swift.register(
'GET', '/v1/AUTH_test/junk',
swob.HTTPOk, {'Content-Type': 'application/json'},
json.dumps(objects_list))
req = Request.blank('/junk',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'ListBucketResult')
name = elem.find('./Name').text
self.assertEqual(name, 'junk')
objects = elem.iterchildren('Contents')
actual = [(obj.find('./Key').text, obj.find('./LastModified').text)
for obj in objects]
self.assertEqual(
[('a', '2011-01-05T02:20:00.000Z'),
('b', '2011-01-05T02:19:59.000Z')],
actual)
def test_bucket_GET_url_encoded(self):
bucket_name = 'junk'
req = Request.blank('/%s?encoding-type=url' % bucket_name,
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'ListBucketResult')
name = elem.find('./Name').text
self.assertEqual(name, bucket_name)
objects = elem.iterchildren('Contents')
items = []
for o in objects:
items.append((o.find('./Key').text, o.find('./ETag').text))
self.assertEqual('2011-01-05T02:19:15.000Z',
o.find('./LastModified').text)
self.assertEqual(items, [
(quote(i[0].encode('utf-8')),
PFS_ETAG if i[0] == 'pfs-obj' else
'"0-N"' if i[0] == 'slo' else '"0"')
for i in self.objects])
def test_bucket_GET_subdir(self):
bucket_name = 'junk-subdir'
req = Request.blank('/%s' % bucket_name,
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'ListBucketResult')
name = elem.find('./Name').text
self.assertEqual(name, bucket_name)
prefixes = elem.findall('CommonPrefixes')
self.assertEqual(len(prefixes), len(self.prefixes))
for p in prefixes:
self.assertTrue(p.find('./Prefix').text in self.prefixes)
def test_bucket_GET_is_truncated(self):
bucket_name = 'junk'
req = Request.blank(
'/%s?max-keys=%d' % (bucket_name, len(self.objects)),
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'ListBucketResult')
self.assertEqual(elem.find('./IsTruncated').text, 'false')
req = Request.blank(
'/%s?max-keys=%d' % (bucket_name, len(self.objects) - 1),
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'ListBucketResult')
self.assertEqual(elem.find('./IsTruncated').text, 'true')
req = Request.blank('/subdirs?delimiter=/&max-keys=2',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'ListBucketResult')
self.assertEqual(elem.find('./IsTruncated').text, 'true')
if six.PY2:
self.assertEqual(elem.find('./NextMarker').text,
u'but-\u062a/'.encode('utf-8'))
else:
self.assertEqual(elem.find('./NextMarker').text,
u'but-\u062a/')
def test_bucket_GET_is_truncated_url_encoded(self):
bucket_name = 'junk'
req = Request.blank(
'/%s?encoding-type=url&max-keys=%d' % (
bucket_name, len(self.objects)),
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'ListBucketResult')
self.assertEqual(elem.find('./IsTruncated').text, 'false')
req = Request.blank(
'/%s?encoding-type=url&max-keys=%d' % (
bucket_name, len(self.objects) - 1),
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'ListBucketResult')
self.assertEqual(elem.find('./IsTruncated').text, 'true')
req = Request.blank('/subdirs?encoding-type=url&delimiter=/&'
'max-keys=2',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'ListBucketResult')
self.assertEqual(elem.find('./IsTruncated').text, 'true')
self.assertEqual(elem.find('./NextMarker').text,
quote(u'but-\u062a/'.encode('utf-8')))
def test_bucket_GET_v2_is_truncated(self):
bucket_name = 'junk'
req = Request.blank(
'/%s?list-type=2&max-keys=%d' % (bucket_name, len(self.objects)),
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'ListBucketResult')
self.assertEqual(elem.find('./KeyCount').text, str(len(self.objects)))
self.assertEqual(elem.find('./IsTruncated').text, 'false')
req = Request.blank(
'/%s?list-type=2&max-keys=%d' % (bucket_name,
len(self.objects) - 1),
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'ListBucketResult')
self.assertIsNotNone(elem.find('./NextContinuationToken'))
self.assertEqual(elem.find('./KeyCount').text,
str(len(self.objects) - 1))
self.assertEqual(elem.find('./IsTruncated').text, 'true')
req = Request.blank('/subdirs?list-type=2&delimiter=/&max-keys=2',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'ListBucketResult')
self.assertIsNotNone(elem.find('./NextContinuationToken'))
self.assertEqual(elem.find('./KeyCount').text, '2')
self.assertEqual(elem.find('./IsTruncated').text, 'true')
def test_bucket_GET_max_keys(self):
bucket_name = 'junk'
req = Request.blank('/%s?max-keys=5' % bucket_name,
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'ListBucketResult')
self.assertEqual(elem.find('./MaxKeys').text, '5')
_, path = self.swift.calls[-1]
_, query_string = path.split('?')
args = dict(parse_qsl(query_string))
self.assertEqual(args['limit'], '6')
req = Request.blank('/%s?max-keys=5000' % bucket_name,
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'ListBucketResult')
self.assertEqual(elem.find('./MaxKeys').text, '5000')
_, path = self.swift.calls[-1]
_, query_string = path.split('?')
args = dict(parse_qsl(query_string))
self.assertEqual(args['limit'], '1001')
def test_bucket_GET_str_max_keys(self):
bucket_name = 'junk'
req = Request.blank('/%s?max-keys=invalid' % bucket_name,
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
def test_bucket_GET_negative_max_keys(self):
bucket_name = 'junk'
req = Request.blank('/%s?max-keys=-1' % bucket_name,
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
def test_bucket_GET_over_32bit_int_max_keys(self):
bucket_name = 'junk'
req = Request.blank('/%s?max-keys=%s' %
(bucket_name, MAX_32BIT_INT + 1),
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
def test_bucket_GET_passthroughs(self):
bucket_name = 'junk'
req = Request.blank('/%s?delimiter=a&marker=b&prefix=c' % bucket_name,
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'ListBucketResult')
self.assertEqual(elem.find('./Prefix').text, 'c')
self.assertEqual(elem.find('./Marker').text, 'b')
self.assertEqual(elem.find('./Delimiter').text, 'a')
_, path = self.swift.calls[-1]
_, query_string = path.split('?')
args = dict(parse_qsl(query_string))
self.assertEqual(args['delimiter'], 'a')
self.assertEqual(args['marker'], 'b')
self.assertEqual(args['prefix'], 'c')
def test_bucket_GET_v2_passthroughs(self):
bucket_name = 'junk'
req = Request.blank(
'/%s?list-type=2&delimiter=a&start-after=b&prefix=c' % bucket_name,
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'ListBucketResult')
self.assertEqual(elem.find('./Prefix').text, 'c')
self.assertEqual(elem.find('./StartAfter').text, 'b')
self.assertEqual(elem.find('./Delimiter').text, 'a')
_, path = self.swift.calls[-1]
_, query_string = path.split('?')
args = dict(parse_qsl(query_string))
self.assertEqual(args['delimiter'], 'a')
# "start-after" is converted to "marker"
self.assertEqual(args['marker'], 'b')
self.assertEqual(args['prefix'], 'c')
def test_bucket_GET_with_nonascii_queries(self):
bucket_name = 'junk'
req = Request.blank(
'/%s?delimiter=\xef\xbc\xa1&marker=\xef\xbc\xa2&'
'prefix=\xef\xbc\xa3' % bucket_name,
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'ListBucketResult')
self.assertEqual(elem.find('./Prefix').text,
swob.wsgi_to_str('\xef\xbc\xa3'))
self.assertEqual(elem.find('./Marker').text,
swob.wsgi_to_str('\xef\xbc\xa2'))
self.assertEqual(elem.find('./Delimiter').text,
swob.wsgi_to_str('\xef\xbc\xa1'))
_, path = self.swift.calls[-1]
_, query_string = path.split('?')
args = [part.partition('=')[::2] for part in query_string.split('&')]
self.assertEqual(sorted(args), [
('delimiter', '%EF%BC%A1'),
('limit', '1001'),
('marker', '%EF%BC%A2'),
('prefix', '%EF%BC%A3'),
])
req = Request.blank(
'/%s?delimiter=\xef\xbc\xa1&marker=\xef\xbc\xa2&'
'prefix=\xef\xbc\xa3&encoding-type=url' % bucket_name,
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'ListBucketResult')
self.assertEqual(elem.find('./Prefix').text, '%EF%BC%A3')
self.assertEqual(elem.find('./Marker').text, '%EF%BC%A2')
self.assertEqual(elem.find('./Delimiter').text, '%EF%BC%A1')
_, path = self.swift.calls[-1]
_, query_string = path.split('?')
args = [part.partition('=')[::2] for part in query_string.split('&')]
self.assertEqual(sorted(args), [
('delimiter', '%EF%BC%A1'),
('limit', '1001'),
('marker', '%EF%BC%A2'),
('prefix', '%EF%BC%A3'),
])
def test_bucket_GET_v2_with_nonascii_queries(self):
bucket_name = 'junk'
req = Request.blank(
'/%s?list-type=2&delimiter=\xef\xbc\xa1&start-after=\xef\xbc\xa2&'
'prefix=\xef\xbc\xa3' % bucket_name,
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'ListBucketResult')
self.assertEqual(elem.find('./Prefix').text,
swob.wsgi_to_str('\xef\xbc\xa3'))
self.assertEqual(elem.find('./StartAfter').text,
swob.wsgi_to_str('\xef\xbc\xa2'))
self.assertEqual(elem.find('./Delimiter').text,
swob.wsgi_to_str('\xef\xbc\xa1'))
_, path = self.swift.calls[-1]
_, query_string = path.split('?')
args = [part.partition('=')[::2] for part in query_string.split('&')]
self.assertEqual(sorted(args), [
('delimiter', '%EF%BC%A1'),
('limit', '1001'),
('marker', '%EF%BC%A2'),
('prefix', '%EF%BC%A3'),
])
req = Request.blank(
'/%s?list-type=2&delimiter=\xef\xbc\xa1&start-after=\xef\xbc\xa2&'
'prefix=\xef\xbc\xa3&encoding-type=url' % bucket_name,
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'ListBucketResult')
self.assertEqual(elem.find('./Prefix').text, '%EF%BC%A3')
self.assertEqual(elem.find('./StartAfter').text, '%EF%BC%A2')
self.assertEqual(elem.find('./Delimiter').text, '%EF%BC%A1')
_, path = self.swift.calls[-1]
_, query_string = path.split('?')
args = [part.partition('=')[::2] for part in query_string.split('&')]
self.assertEqual(sorted(args), [
('delimiter', '%EF%BC%A1'),
('limit', '1001'),
('marker', '%EF%BC%A2'),
('prefix', '%EF%BC%A3'),
])
def test_bucket_GET_with_delimiter_max_keys(self):
bucket_name = 'junk'
req = Request.blank('/%s?delimiter=a&max-keys=4' % bucket_name,
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'ListBucketResult')
self.assertEqual(elem.find('./NextMarker').text,
self.objects_list[3]['name'])
self.assertEqual(elem.find('./MaxKeys').text, '4')
self.assertEqual(elem.find('./IsTruncated').text, 'true')
def test_bucket_GET_v2_with_delimiter_max_keys(self):
bucket_name = 'junk'
req = Request.blank(
'/%s?list-type=2&delimiter=a&max-keys=2' % bucket_name,
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'ListBucketResult')
next_token = elem.find('./NextContinuationToken')
self.assertIsNotNone(next_token)
self.assertEqual(elem.find('./MaxKeys').text, '2')
self.assertEqual(elem.find('./IsTruncated').text, 'true')
req = Request.blank(
'/%s?list-type=2&delimiter=a&max-keys=2&continuation-token=%s' %
(bucket_name, next_token.text),
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'ListBucketResult')
names = [o.find('./Key').text for o in elem.iterchildren('Contents')]
self.assertEqual(names[0], 'lily')
def test_bucket_GET_subdir_with_delimiter_max_keys(self):
bucket_name = 'junk-subdir'
req = Request.blank('/%s?delimiter=a&max-keys=1' % bucket_name,
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'ListBucketResult')
self.assertEqual(elem.find('./NextMarker').text, 'rose')
self.assertEqual(elem.find('./MaxKeys').text, '1')
self.assertEqual(elem.find('./IsTruncated').text, 'true')
def test_bucket_GET_v2_fetch_owner(self):
bucket_name = 'junk'
req = Request.blank('/%s?list-type=2' % bucket_name,
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'ListBucketResult')
name = elem.find('./Name').text
self.assertEqual(name, bucket_name)
objects = elem.iterchildren('Contents')
for o in objects:
self.assertIsNone(o.find('./Owner'))
req = Request.blank('/%s?list-type=2&fetch-owner=true' % bucket_name,
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'ListBucketResult')
name = elem.find('./Name').text
self.assertEqual(name, bucket_name)
objects = elem.iterchildren('Contents')
for o in objects:
self.assertIsNotNone(o.find('./Owner'))
def test_bucket_GET_with_versions_versioning_not_configured(self):
for obj in self.objects:
self.swift.register(
'HEAD', '/v1/AUTH_test/junk/%s' % quote(obj[0].encode('utf8')),
swob.HTTPOk, {}, None)
# self.swift.register('HEAD', '/v1/AUTH_test/junk/viola',
# swob.HTTPOk, {}, None)
self._add_versions_request(versioned_objects=[])
req = Request.blank('/junk?versions',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'ListVersionsResult')
self.assertEqual(elem.find('./Name').text, 'junk')
self.assertIsNone(elem.find('./Prefix').text)
self.assertIsNone(elem.find('./KeyMarker').text)
self.assertIsNone(elem.find('./VersionIdMarker').text)
self.assertEqual(elem.find('./MaxKeys').text, '1000')
self.assertEqual(elem.find('./IsTruncated').text, 'false')
self.assertEqual(elem.findall('./DeleteMarker'), [])
versions = elem.findall('./Version')
objects = list(self.objects)
if six.PY2:
expected = [v[0].encode('utf-8') for v in objects]
else:
expected = [v[0] for v in objects]
self.assertEqual([v.find('./Key').text for v in versions], expected)
self.assertEqual([v.find('./IsLatest').text for v in versions],
['true' for v in objects])
self.assertEqual([v.find('./VersionId').text for v in versions],
['null' for v in objects])
# Last modified in self.objects is 2011-01-05T02:19:14.275290 but
# the returned value is rounded up to 2011-01-05T02:19:15Z
self.assertEqual([v.find('./LastModified').text for v in versions],
['2011-01-05T02:19:15.000Z'] * len(objects))
self.assertEqual([v.find('./ETag').text for v in versions],
[PFS_ETAG if v[0] == 'pfs-obj' else
'"0-N"' if v[0] == 'slo' else '"0"'
for v in objects])
self.assertEqual([v.find('./Size').text for v in versions],
[str(v[3]) for v in objects])
self.assertEqual([v.find('./Owner/ID').text for v in versions],
['test:tester' for v in objects])
self.assertEqual([v.find('./Owner/DisplayName').text
for v in versions],
['test:tester' for v in objects])
self.assertEqual([v.find('./StorageClass').text for v in versions],
['STANDARD' for v in objects])
def _add_versions_request(self, orig_objects=None, versioned_objects=None,
bucket='junk'):
if orig_objects is None:
orig_objects = self.objects_list
if versioned_objects is None:
versioned_objects = self.versioned_objects
all_versions = versioned_objects + [
dict(i, version_id='null', is_latest=True)
for i in orig_objects]
all_versions.sort(key=lambda o: (
o['name'], '' if o['version_id'] == 'null' else o['version_id']))
self.swift.register(
'GET', '/v1/AUTH_test/%s' % bucket, swob.HTTPOk,
{'Content-Type': 'application/json'}, json.dumps(all_versions))
def _assert_delete_markers(self, elem):
delete_markers = elem.findall('./DeleteMarker')
self.assertEqual(len(delete_markers), 1)
self.assertEqual(delete_markers[0].find('./IsLatest').text, 'false')
self.assertEqual(delete_markers[0].find('./VersionId').text, '2')
self.assertEqual(delete_markers[0].find('./Key').text, 'rose')
def test_bucket_GET_with_versions(self):
self._add_versions_request()
req = Request.blank('/junk?versions',
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'ListVersionsResult')
self.assertEqual(elem.find('./Name').text, 'junk')
self._assert_delete_markers(elem)
versions = elem.findall('./Version')
self.assertEqual(len(versions), len(self.objects) + 1)
expected = []
for o in self.objects_list:
name = o['name']
if six.PY2:
name = name.encode('utf8')
expected.append((name, 'true', 'null'))
if name == 'rose':
expected.append((name, 'false', '1'))
discovered = [
tuple(e.find('./%s' % key).text for key in (
'Key', 'IsLatest', 'VersionId'))
for e in versions
]
self.assertEqual(expected, discovered)
def test_bucket_GET_with_versions_with_max_keys(self):
self._add_versions_request()
req = Request.blank('/junk?versions&max-keys=7',
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'ListVersionsResult')
self.assertEqual(elem.find('./MaxKeys').text, '7')
self.assertEqual(elem.find('./IsTruncated').text, 'true')
self._assert_delete_markers(elem)
versions = elem.findall('./Version')
self.assertEqual(len(versions), 6)
expected = []
for o in self.objects_list[:5]:
name = o['name']
if six.PY2:
name = name.encode('utf8')
expected.append((name, 'true', 'null'))
if name == 'rose':
expected.append((name, 'false', '1'))
discovered = [
tuple(e.find('./%s' % key).text for key in (
'Key', 'IsLatest', 'VersionId'))
for e in versions
]
self.assertEqual(expected, discovered)
def test_bucket_GET_with_versions_with_max_keys_and_key_marker(self):
self._add_versions_request(orig_objects=self.objects_list[4:])
req = Request.blank('/junk?versions&max-keys=3&key-marker=ros',
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'ListVersionsResult')
self.assertEqual(elem.find('./MaxKeys').text, '3')
self.assertEqual(elem.find('./IsTruncated').text, 'true')
self._assert_delete_markers(elem)
versions = elem.findall('./Version')
self.assertEqual(len(versions), 2)
expected = [
('rose', 'true', 'null'),
('rose', 'false', '1'),
]
discovered = [
tuple(e.find('./%s' % key).text for key in (
'Key', 'IsLatest', 'VersionId'))
for e in versions
]
self.assertEqual(expected, discovered)
def test_bucket_GET_versions_with_key_marker_and_version_id_marker(self):
container_listing = [{
"bytes": 8192,
"content_type": "binary/octet-stream",
"hash": "221994040b14294bdf7fbc128e66633c",
"last_modified": "2019-08-16T19:39:53.152780",
"name": "subdir/foo",
}]
versions_listing = [{
'bytes': 0,
'content_type': DELETE_MARKER_CONTENT_TYPE,
'hash': '0',
"last_modified": "2019-08-19T19:05:33.565940",
'name': 'subdir/bar',
"version_id": "1565241533.55320",
'is_latest': True,
}, {
"bytes": 8192,
"content_type": "binary/octet-stream",
"hash": "221994040b14294bdf7fbc128e66633c",
"last_modified": "2019-08-16T19:39:53.508510",
"name": "subdir/bar",
"version_id": "1564984393.68962",
'is_latest': False,
}, {
"bytes": 8192,
"content_type": "binary/octet-stream",
"hash": "221994040b14294bdf7fbc128e66633c",
"last_modified": "2019-08-16T19:39:42.673260",
"name": "subdir/foo",
"version_id": "1565984382.67326",
'is_latest': False,
}]
self._add_versions_request(container_listing, versions_listing,
bucket='mybucket')
req = Request.blank(
'/mybucket?versions&key-marker=subdir/bar&'
'version-id-marker=1566589611.065522',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'ListVersionsResult')
self.assertEqual(elem.find('./IsTruncated').text, 'false')
delete_markers = elem.findall('./DeleteMarker')
self.assertEqual(['subdir/bar'], [
o.find('Key').text for o in delete_markers])
expected = [
('subdir/bar', 'false', '1564984393.68962'),
('subdir/foo', 'true', 'null'),
('subdir/foo', 'false', '1565984382.67326'),
]
discovered = [
tuple(e.find('./%s' % key).text for key in (
'Key', 'IsLatest', 'VersionId'))
for e in elem.findall('./Version')
]
self.assertEqual(expected, discovered)
self._add_versions_request(container_listing, versions_listing[1:],
bucket='mybucket')
req = Request.blank(
'/mybucket?versions&key-marker=subdir/bar&'
'version-id-marker=1565241533.55320',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'ListVersionsResult')
self.assertEqual(elem.find('./IsTruncated').text, 'false')
delete_markers = elem.findall('./DeleteMarker')
self.assertEqual(0, len(delete_markers))
expected = [
('subdir/bar', 'false', '1564984393.68962'),
('subdir/foo', 'true', 'null'),
('subdir/foo', 'false', '1565984382.67326'),
]
discovered = [
tuple(e.find('./%s' % key).text for key in (
'Key', 'IsLatest', 'VersionId'))
for e in elem.findall('./Version')
]
self.assertEqual(expected, discovered)
self._add_versions_request([], versions_listing[-1:],
bucket='mybucket')
req = Request.blank(
'/mybucket?versions&key-marker=subdir/foo&'
'version-id-marker=null',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'ListVersionsResult')
self.assertEqual(elem.find('./IsTruncated').text, 'false')
delete_markers = elem.findall('./DeleteMarker')
self.assertEqual(0, len(delete_markers))
expected = [
('subdir/foo', 'false', '1565984382.67326'),
]
discovered = [
tuple(e.find('./%s' % key).text for key in (
'Key', 'IsLatest', 'VersionId'))
for e in elem.findall('./Version')
]
self.assertEqual(expected, discovered)
def test_bucket_GET_versions_with_version_id_marker(self):
self._add_versions_request()
req = Request.blank(
'/junk?versions',
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
# sanity
elem = fromstring(body, 'ListVersionsResult')
expected = [('rose', 'false', '2')]
discovered = [
tuple(e.find('./%s' % key).text for key in (
'Key', 'IsLatest', 'VersionId'))
for e in elem.findall('./DeleteMarker')
]
self.assertEqual(expected, discovered)
expected = [
('lily', 'true', 'null'),
(b'lily-\xd8\xaa', 'true', 'null'),
('mu', 'true', 'null'),
('pfs-obj', 'true', 'null'),
('rose', 'true', 'null'),
('rose', 'false', '1'),
('slo', 'true', 'null'),
('viola', 'true', 'null'),
('with space', 'true', 'null'),
('with%20space', 'true', 'null'),
]
if not six.PY2:
item = list(expected[1])
item[0] = item[0].decode('utf8')
expected[1] = tuple(item)
discovered = [
tuple(e.find('./%s' % key).text for key in (
'Key', 'IsLatest', 'VersionId'))
for e in elem.findall('./Version')
]
self.assertEqual(expected, discovered)
self._add_versions_request(self.objects_list[5:])
req = Request.blank(
'/junk?versions&key-marker=rose&version-id-marker=null',
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'ListVersionsResult')
self.assertEqual(elem.find('./IsTruncated').text, 'false')
delete_markers = elem.findall('./DeleteMarker')
self.assertEqual(len(delete_markers), 1)
expected = [
('rose', 'false', '1'),
('slo', 'true', 'null'),
('viola', 'true', 'null'),
('with space', 'true', 'null'),
('with%20space', 'true', 'null'),
]
discovered = [
tuple(e.find('./%s' % key).text for key in (
'Key', 'IsLatest', 'VersionId'))
for e in elem.findall('./Version')
]
self.assertEqual(expected, discovered)
# N.B. versions are sorted most recent to oldest
self._add_versions_request(self.objects_list[5:],
self.versioned_objects[1:])
req = Request.blank(
'/junk?versions&key-marker=rose&version-id-marker=2',
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'ListVersionsResult')
self.assertEqual(elem.find('./IsTruncated').text, 'false')
delete_markers = elem.findall('./DeleteMarker')
self.assertEqual(len(delete_markers), 0)
expected = [
('rose', 'false', '1'),
('slo', 'true', 'null'),
('viola', 'true', 'null'),
('with space', 'true', 'null'),
('with%20space', 'true', 'null'),
]
discovered = [
tuple(e.find('./%s' % key).text for key in (
'Key', 'IsLatest', 'VersionId'))
for e in elem.findall('./Version')
]
self.assertEqual(expected, discovered)
self._add_versions_request(self.objects_list[5:],
self.versioned_objects[2:])
req = Request.blank(
'/junk?versions&key-marker=rose&version-id-marker=1',
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'ListVersionsResult')
self.assertEqual(elem.find('./IsTruncated').text, 'false')
delete_markers = elem.findall('./DeleteMarker')
self.assertEqual(len(delete_markers), 0)
expected = [
('slo', 'true', 'null'),
('viola', 'true', 'null'),
('with space', 'true', 'null'),
('with%20space', 'true', 'null'),
]
discovered = [
tuple(e.find('./%s' % key).text for key in (
'Key', 'IsLatest', 'VersionId'))
for e in elem.findall('./Version')
]
self.assertEqual(expected, discovered)
def test_bucket_GET_versions_non_existent_version_id_marker(self):
self._add_versions_request(orig_objects=self.objects_list[5:])
req = Request.blank(
'/junk?versions&key-marker=rose&'
'version-id-marker=null',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200', body)
elem = fromstring(body, 'ListVersionsResult')
self.assertEqual(elem.find('./Name').text, 'junk')
delete_markers = elem.findall('./DeleteMarker')
self.assertEqual(len(delete_markers), 1)
expected = [
('rose', 'false', '1'),
('slo', 'true', 'null'),
('viola', 'true', 'null'),
('with space', 'true', 'null'),
('with%20space', 'true', 'null'),
]
discovered = [
tuple(e.find('./%s' % key).text for key in (
'Key', 'IsLatest', 'VersionId'))
for e in elem.findall('./Version')
]
self.assertEqual(expected, discovered)
self.assertEqual(self.swift.calls, [
('GET', normalize_path('/v1/AUTH_test/junk?'
'limit=1001&marker=rose&version_marker=null&versions=')),
])
def test_bucket_GET_versions_prefix(self):
container_listing = [{
"bytes": 8192,
"content_type": "binary/octet-stream",
"hash": "221994040b14294bdf7fbc128e66633c",
"last_modified": "2019-08-16T19:39:53.152780",
"name": "subdir/foo",
}]
versions_listing = [{
"bytes": 8192,
"content_type": "binary/octet-stream",
"hash": "221994040b14294bdf7fbc128e66633c",
"last_modified": "2019-08-16T19:39:53.508510",
"name": "subdir/bar",
"version_id": "1565984393.68962",
"is_latest": True,
}, {
'bytes': 0,
'content_type': DELETE_MARKER_CONTENT_TYPE,
'hash': '0',
"last_modified": "2019-08-19T19:05:33.565940",
'name': 'subdir/bar',
'version_id': '1566241533.55320',
'is_latest': False,
}, {
"bytes": 8192,
"content_type": "binary/octet-stream",
"hash": "221994040b14294bdf7fbc128e66633c",
"last_modified": "2019-08-16T19:39:42.673260",
"name": "subdir/foo",
"version_id": "1565984382.67326",
'is_latest': False,
}]
self._add_versions_request(container_listing, versions_listing)
req = Request.blank(
'/junk?versions&prefix=subdir/',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'ListVersionsResult')
self.assertEqual(elem.find('./Name').text, 'junk')
delete_markers = elem.findall('./DeleteMarker')
self.assertEqual(len(delete_markers), 1)
expected = [
('subdir/bar', 'true', '1565984393.68962'),
('subdir/foo', 'true', 'null'),
('subdir/foo', 'false', '1565984382.67326'),
]
discovered = [
tuple(e.find('./%s' % key).text for key in (
'Key', 'IsLatest', 'VersionId'))
for e in elem.findall('./Version')
]
self.assertEqual(expected, discovered)
self.assertEqual(self.swift.calls, [
('GET', normalize_path('/v1/AUTH_test/junk'
'?limit=1001&prefix=subdir/&versions=')),
])
@s3acl
def test_bucket_PUT_error(self):
code = self._test_method_error('PUT', '/bucket', swob.HTTPCreated,
headers={'Content-Length': 'a'})
self.assertEqual(code, 'InvalidArgument')
code = self._test_method_error('PUT', '/bucket', swob.HTTPCreated,
headers={'Content-Length': '-1'})
self.assertEqual(code, 'InvalidArgument')
code = self._test_method_error('PUT', '/bucket', swob.HTTPUnauthorized)
self.assertEqual(code, 'SignatureDoesNotMatch')
code = self._test_method_error('PUT', '/bucket', swob.HTTPForbidden)
self.assertEqual(code, 'AccessDenied')
code = self._test_method_error('PUT', '/bucket', swob.HTTPAccepted)
self.assertEqual(code, 'BucketAlreadyOwnedByYou')
with mock.patch(
'swift.common.middleware.s3api.s3request.get_container_info',
return_value={'sysmeta': {'s3api-acl': '{"Owner": "nope"}'}}):
code = self._test_method_error(
'PUT', '/bucket', swob.HTTPAccepted)
self.assertEqual(code, 'BucketAlreadyExists')
code = self._test_method_error('PUT', '/bucket', swob.HTTPServerError)
self.assertEqual(code, 'InternalError')
code = self._test_method_error(
'PUT', '/bucket', swob.HTTPServiceUnavailable)
self.assertEqual(code, 'ServiceUnavailable')
code = self._test_method_error(
'PUT', '/bucket+bucket', swob.HTTPCreated)
self.assertEqual(code, 'InvalidBucketName')
code = self._test_method_error(
'PUT', '/192.168.11.1', swob.HTTPCreated)
self.assertEqual(code, 'InvalidBucketName')
code = self._test_method_error(
'PUT', '/bucket.-bucket', swob.HTTPCreated)
self.assertEqual(code, 'InvalidBucketName')
code = self._test_method_error(
'PUT', '/bucket-.bucket', swob.HTTPCreated)
self.assertEqual(code, 'InvalidBucketName')
code = self._test_method_error('PUT', '/bucket*', swob.HTTPCreated)
self.assertEqual(code, 'InvalidBucketName')
code = self._test_method_error('PUT', '/b', swob.HTTPCreated)
self.assertEqual(code, 'InvalidBucketName')
code = self._test_method_error(
'PUT', '/%s' % ''.join(['b' for x in range(64)]),
swob.HTTPCreated)
self.assertEqual(code, 'InvalidBucketName')
@s3acl(s3acl_only=True)
def test_bucket_PUT_error_non_swift_owner(self):
code = self._test_method_error('PUT', '/bucket', swob.HTTPAccepted,
env={'swift_owner': False})
self.assertEqual(code, 'AccessDenied')
@s3acl
def test_bucket_PUT_bucket_already_owned_by_you(self):
self.swift.register(
'PUT', '/v1/AUTH_test/bucket', swob.HTTPAccepted,
{'X-Container-Object-Count': 0}, None)
req = Request.blank('/bucket',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status, '409 Conflict')
self.assertIn(b'BucketAlreadyOwnedByYou', body)
@s3acl
def test_bucket_PUT_first_put_fail(self):
self.swift.register(
'PUT', '/v1/AUTH_test/bucket',
swob.HTTPServiceUnavailable,
{'X-Container-Object-Count': 0}, None)
req = Request.blank('/bucket',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status, '503 Service Unavailable')
# The last call was PUT not POST for acl set
self.assertEqual(self.swift.calls, [
('PUT', '/v1/AUTH_test/bucket'),
])
@s3acl
def test_bucket_PUT(self):
req = Request.blank('/bucket',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(body, b'')
self.assertEqual(status.split()[0], '200')
self.assertEqual(headers['Location'], '/bucket')
# Apparently some clients will include a chunked transfer-encoding
# even with no body
req = Request.blank('/bucket',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'Transfer-Encoding': 'chunked'})
status, headers, body = self.call_s3api(req)
self.assertEqual(body, b'')
self.assertEqual(status.split()[0], '200')
self.assertEqual(headers['Location'], '/bucket')
with UnreadableInput(self) as fake_input:
req = Request.blank(
'/bucket',
environ={'REQUEST_METHOD': 'PUT',
'wsgi.input': fake_input},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(body, b'')
self.assertEqual(status.split()[0], '200')
self.assertEqual(headers['Location'], '/bucket')
def _test_bucket_PUT_with_location(self, root_element):
elem = Element(root_element)
SubElement(elem, 'LocationConstraint').text = 'us-east-1'
xml = tostring(elem)
req = Request.blank('/bucket',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()},
body=xml)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
@s3acl
def test_bucket_PUT_with_location(self):
self._test_bucket_PUT_with_location('CreateBucketConfiguration')
@s3acl
def test_bucket_PUT_with_ami_location(self):
# ec2-ami-tools apparently uses CreateBucketConstraint instead?
self._test_bucket_PUT_with_location('CreateBucketConstraint')
@s3acl
def test_bucket_PUT_with_strange_location(self):
# Even crazier: it doesn't seem to matter
self._test_bucket_PUT_with_location('foo')
def test_bucket_PUT_with_mixed_case_location(self):
self.s3api.conf.location = 'RegionOne'
elem = Element('CreateBucketConfiguration')
# We've observed some clients (like aws-sdk-net) shift regions
# to lower case
SubElement(elem, 'LocationConstraint').text = 'regionone'
headers = {
'Authorization': 'AWS4-HMAC-SHA256 ' + ', '.join([
'Credential=test:tester/%s/regionone/s3/aws4_request' %
self.get_v4_amz_date_header().split('T', 1)[0],
'SignedHeaders=host',
'Signature=X',
]),
'Date': self.get_date_header(),
'x-amz-content-sha256': 'UNSIGNED-PAYLOAD',
}
req = Request.blank('/bucket',
environ={'REQUEST_METHOD': 'PUT'},
headers=headers,
body=tostring(elem))
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200', body)
def test_bucket_PUT_with_canned_acl(self):
req = Request.blank('/bucket',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'X-Amz-Acl': 'public-read'})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
_, _, headers = self.swift.calls_with_headers[-1]
self.assertTrue('X-Container-Read' in headers)
self.assertEqual(headers.get('X-Container-Read'), '.r:*,.rlistings')
self.assertNotIn('X-Container-Sysmeta-S3api-Acl', headers)
@s3acl(s3acl_only=True)
def test_bucket_PUT_with_canned_s3acl(self):
account = 'test:tester'
acl = \
encode_acl('container', ACLPublicRead(Owner(account, account)))
req = Request.blank('/bucket',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'X-Amz-Acl': 'public-read'})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
_, _, headers = self.swift.calls_with_headers[-1]
self.assertNotIn('X-Container-Read', headers)
self.assertIn('X-Container-Sysmeta-S3api-Acl', headers)
self.assertEqual(headers.get('X-Container-Sysmeta-S3api-Acl'),
acl['x-container-sysmeta-s3api-acl'])
@s3acl
def test_bucket_PUT_with_location_error(self):
elem = Element('CreateBucketConfiguration')
SubElement(elem, 'LocationConstraint').text = 'XXX'
xml = tostring(elem)
req = Request.blank('/bucket',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()},
body=xml)
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body),
'InvalidLocationConstraint')
@s3acl
def test_bucket_PUT_with_location_invalid_xml(self):
req = Request.blank('/bucket',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()},
body='invalid_xml')
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'MalformedXML')
def _test_method_error_delete(self, path, sw_resp):
self.swift.register('HEAD', '/v1/AUTH_test' + path, sw_resp, {}, None)
return self._test_method_error('DELETE', path, sw_resp)
@s3acl
def test_bucket_DELETE_error(self):
code = self._test_method_error_delete('/bucket', swob.HTTPUnauthorized)
self.assertEqual(code, 'SignatureDoesNotMatch')
code = self._test_method_error_delete('/bucket', swob.HTTPForbidden)
self.assertEqual(code, 'AccessDenied')
code = self._test_method_error_delete('/bucket', swob.HTTPNotFound)
self.assertEqual(code, 'NoSuchBucket')
code = self._test_method_error_delete('/bucket', swob.HTTPServerError)
self.assertEqual(code, 'InternalError')
# bucket not empty is now validated at s3api
self.swift._responses.get(('HEAD', '/v1/AUTH_test/bucket'))
self.swift.register('HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent,
{'X-Container-Object-Count': '1'}, None)
req = Request.blank('/bucket',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, _headers, body = self.call_s3api(req)
self.assertEqual('409 Conflict', status)
self.assertEqual('BucketNotEmpty', self._get_error_code(body))
self.assertNotIn('You must delete all versions in the bucket',
self._get_error_message(body))
@s3acl
def test_bucket_DELETE_error_with_enabled_versioning(self):
self.swift.register('HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent,
{'X-Container-Object-Count': '1',
'X-Container-Sysmeta-Versions-Enabled': 'True'},
None)
req = Request.blank('/bucket',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, _headers, body = self.call_s3api(req)
self.assertEqual('409 Conflict', status)
self.assertEqual('BucketNotEmpty', self._get_error_code(body))
self.assertIn('You must delete all versions in the bucket',
self._get_error_message(body))
@s3acl
def test_bucket_DELETE_error_with_suspended_versioning(self):
self.swift.register('HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent,
{'X-Container-Object-Count': '1',
'X-Container-Sysmeta-Versions-Enabled': 'False'},
None)
req = Request.blank('/bucket',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, _headers, body = self.call_s3api(req)
self.assertEqual('409 Conflict', status)
self.assertEqual('BucketNotEmpty', self._get_error_code(body))
self.assertIn('You must delete all versions in the bucket',
self._get_error_message(body))
@s3acl
def test_bucket_DELETE(self):
# overwrite default HEAD to return x-container-object-count
self.swift.register(
'HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent,
{'X-Container-Object-Count': 0}, None)
req = Request.blank('/bucket',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '204')
@s3acl
def test_bucket_DELETE_with_empty_versioning(self):
self.swift.register('HEAD', '/v1/AUTH_test/bucket+versioning',
swob.HTTPNoContent, {}, None)
self.swift.register('DELETE', '/v1/AUTH_test/bucket+versioning',
swob.HTTPNoContent, {}, None)
# overwrite default HEAD to return x-container-object-count
self.swift.register(
'HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent,
{'X-Container-Object-Count': 0}, None)
req = Request.blank('/bucket',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '204')
@s3acl
def test_bucket_DELETE_error_while_segment_bucket_delete(self):
# An error occurred while deleting segment objects
self.swift.register('DELETE', '/v1/AUTH_test/bucket+segments/lily',
swob.HTTPServiceUnavailable, {}, json.dumps([]))
# overwrite default HEAD to return x-container-object-count
self.swift.register(
'HEAD', '/v1/AUTH_test/bucket', swob.HTTPNoContent,
{'X-Container-Object-Count': 0}, None)
req = Request.blank('/bucket',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '503')
called = [(method, path) for method, path, _ in
self.swift.calls_with_headers]
# Don't delete original bucket when error occurred in segment container
self.assertNotIn(('DELETE', '/v1/AUTH_test/bucket'), called)
def _test_bucket_for_s3acl(self, method, account):
req = Request.blank('/bucket',
environ={'REQUEST_METHOD': method},
headers={'Authorization': 'AWS %s:hmac' % account,
'Date': self.get_date_header()})
return self.call_s3api(req)
@s3acl(s3acl_only=True)
def test_bucket_GET_without_permission(self):
status, headers, body = self._test_bucket_for_s3acl('GET',
'test:other')
self.assertEqual(self._get_error_code(body), 'AccessDenied')
@s3acl(s3acl_only=True)
def test_bucket_GET_with_read_permission(self):
status, headers, body = self._test_bucket_for_s3acl('GET',
'test:read')
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_bucket_GET_with_fullcontrol_permission(self):
status, headers, body = \
self._test_bucket_for_s3acl('GET', 'test:full_control')
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_bucket_GET_with_owner_permission(self):
status, headers, body = self._test_bucket_for_s3acl('GET',
'test:tester')
self.assertEqual(status.split()[0], '200')
def _test_bucket_GET_canned_acl(self, bucket):
req = Request.blank('/%s' % bucket,
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
return self.call_s3api(req)
@s3acl(s3acl_only=True)
def test_bucket_GET_authenticated_users(self):
status, headers, body = \
self._test_bucket_GET_canned_acl('authenticated')
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_bucket_GET_all_users(self):
status, headers, body = self._test_bucket_GET_canned_acl('public')
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_bucket_DELETE_without_permission(self):
status, headers, body = self._test_bucket_for_s3acl('DELETE',
'test:other')
self.assertEqual(self._get_error_code(body), 'AccessDenied')
# Don't delete anything in backend Swift
called = [method for method, _, _ in self.swift.calls_with_headers]
self.assertNotIn('DELETE', called)
@s3acl(s3acl_only=True)
def test_bucket_DELETE_with_write_permission(self):
status, headers, body = self._test_bucket_for_s3acl('DELETE',
'test:write')
self.assertEqual(self._get_error_code(body), 'AccessDenied')
# Don't delete anything in backend Swift
called = [method for method, _, _ in self.swift.calls_with_headers]
self.assertNotIn('DELETE', called)
@s3acl(s3acl_only=True)
def test_bucket_DELETE_with_fullcontrol_permission(self):
status, headers, body = \
self._test_bucket_for_s3acl('DELETE', 'test:full_control')
self.assertEqual(self._get_error_code(body), 'AccessDenied')
# Don't delete anything in backend Swift
called = [method for method, _, _ in self.swift.calls_with_headers]
self.assertNotIn('DELETE', called)
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/s3api/test_bucket.py |
# Copyright (c) 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from swift.common.utils import json
from swift.common.middleware.s3api.s3response import AccessDenied, \
InvalidArgument, S3NotImplemented
from swift.common.middleware.s3api.subresource import User, \
AuthenticatedUsers, AllUsers, \
ACLPrivate, ACLPublicRead, ACLPublicReadWrite, ACLAuthenticatedRead, \
ACLBucketOwnerRead, ACLBucketOwnerFullControl, Owner, ACL, encode_acl, \
decode_acl, canned_acl_grantees, Grantee
from swift.common.middleware.s3api.utils import sysmeta_header
from swift.common.middleware.s3api.exception import InvalidSubresource
class TestS3ApiSubresource(unittest.TestCase):
def setUp(self):
self.s3_acl = True
self.allow_no_owner = False
def test_acl_canonical_user(self):
grantee = User('test:tester')
self.assertTrue('test:tester' in grantee)
self.assertTrue('test:tester2' not in grantee)
self.assertEqual(str(grantee), 'test:tester')
self.assertEqual(grantee.elem().find('./ID').text, 'test:tester')
def test_acl_authenticated_users(self):
grantee = AuthenticatedUsers()
self.assertTrue('test:tester' in grantee)
self.assertTrue('test:tester2' in grantee)
uri = 'http://acs.amazonaws.com/groups/global/AuthenticatedUsers'
self.assertEqual(grantee.elem().find('./URI').text, uri)
def test_acl_all_users(self):
grantee = AllUsers()
self.assertTrue('test:tester' in grantee)
self.assertTrue('test:tester2' in grantee)
uri = 'http://acs.amazonaws.com/groups/global/AllUsers'
self.assertEqual(grantee.elem().find('./URI').text, uri)
def check_permission(self, acl, user_id, permission):
try:
acl.check_permission(user_id, permission)
return True
except AccessDenied:
return False
def test_acl_private(self):
acl = ACLPrivate(Owner(id='test:tester',
name='test:tester'),
s3_acl=self.s3_acl,
allow_no_owner=self.allow_no_owner)
self.assertTrue(self.check_permission(acl, 'test:tester', 'READ'))
self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE'))
self.assertTrue(self.check_permission(acl, 'test:tester', 'READ_ACP'))
self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE_ACP'))
self.assertFalse(self.check_permission(acl, 'test:tester2', 'READ'))
self.assertFalse(self.check_permission(acl, 'test:tester2', 'WRITE'))
self.assertFalse(self.check_permission(acl, 'test:tester2',
'READ_ACP'))
self.assertFalse(self.check_permission(acl, 'test:tester2',
'WRITE_ACP'))
def test_acl_public_read(self):
acl = ACLPublicRead(Owner(id='test:tester',
name='test:tester'),
s3_acl=self.s3_acl,
allow_no_owner=self.allow_no_owner)
self.assertTrue(self.check_permission(acl, 'test:tester', 'READ'))
self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE'))
self.assertTrue(self.check_permission(acl, 'test:tester', 'READ_ACP'))
self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE_ACP'))
self.assertTrue(self.check_permission(acl, 'test:tester2', 'READ'))
self.assertFalse(self.check_permission(acl, 'test:tester2', 'WRITE'))
self.assertFalse(self.check_permission(acl, 'test:tester2',
'READ_ACP'))
self.assertFalse(self.check_permission(acl, 'test:tester2',
'WRITE_ACP'))
def test_acl_public_read_write(self):
acl = ACLPublicReadWrite(Owner(id='test:tester',
name='test:tester'),
s3_acl=self.s3_acl,
allow_no_owner=self.allow_no_owner)
self.assertTrue(self.check_permission(acl, 'test:tester', 'READ'))
self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE'))
self.assertTrue(self.check_permission(acl, 'test:tester', 'READ_ACP'))
self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE_ACP'))
self.assertTrue(self.check_permission(acl, 'test:tester2', 'READ'))
self.assertTrue(self.check_permission(acl, 'test:tester2', 'WRITE'))
self.assertFalse(self.check_permission(acl, 'test:tester2',
'READ_ACP'))
self.assertFalse(self.check_permission(acl, 'test:tester2',
'WRITE_ACP'))
def test_acl_authenticated_read(self):
acl = ACLAuthenticatedRead(Owner(id='test:tester',
name='test:tester'),
s3_acl=self.s3_acl,
allow_no_owner=self.allow_no_owner)
self.assertTrue(self.check_permission(acl, 'test:tester', 'READ'))
self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE'))
self.assertTrue(self.check_permission(acl, 'test:tester', 'READ_ACP'))
self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE_ACP'))
self.assertTrue(self.check_permission(acl, 'test:tester2', 'READ'))
self.assertFalse(self.check_permission(acl, 'test:tester2', 'WRITE'))
self.assertFalse(self.check_permission(acl, 'test:tester2',
'READ_ACP'))
self.assertFalse(self.check_permission(acl, 'test:tester2',
'WRITE_ACP'))
def test_acl_bucket_owner_read(self):
acl = ACLBucketOwnerRead(
bucket_owner=Owner('test:tester2', 'test:tester2'),
object_owner=Owner('test:tester', 'test:tester'),
s3_acl=self.s3_acl,
allow_no_owner=self.allow_no_owner)
self.assertTrue(self.check_permission(acl, 'test:tester', 'READ'))
self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE'))
self.assertTrue(self.check_permission(acl, 'test:tester', 'READ_ACP'))
self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE_ACP'))
self.assertTrue(self.check_permission(acl, 'test:tester2', 'READ'))
self.assertFalse(self.check_permission(acl, 'test:tester2', 'WRITE'))
self.assertFalse(self.check_permission(acl, 'test:tester2',
'READ_ACP'))
self.assertFalse(self.check_permission(acl, 'test:tester2',
'WRITE_ACP'))
def test_acl_bucket_owner_full_control(self):
acl = ACLBucketOwnerFullControl(
bucket_owner=Owner('test:tester2', 'test:tester2'),
object_owner=Owner('test:tester', 'test:tester'),
s3_acl=self.s3_acl,
allow_no_owner=self.allow_no_owner)
self.assertTrue(self.check_permission(acl, 'test:tester', 'READ'))
self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE'))
self.assertTrue(self.check_permission(acl, 'test:tester', 'READ_ACP'))
self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE_ACP'))
self.assertTrue(self.check_permission(acl, 'test:tester2', 'READ'))
self.assertTrue(self.check_permission(acl, 'test:tester2', 'WRITE'))
self.assertTrue(self.check_permission(acl, 'test:tester2', 'READ_ACP'))
self.assertTrue(self.check_permission(acl, 'test:tester2',
'WRITE_ACP'))
def test_acl_elem(self):
acl = ACLPrivate(Owner(id='test:tester',
name='test:tester'),
s3_acl=self.s3_acl,
allow_no_owner=self.allow_no_owner)
elem = acl.elem()
self.assertTrue(elem.find('./Owner') is not None)
self.assertTrue(elem.find('./AccessControlList') is not None)
grants = [e for e in elem.findall('./AccessControlList/Grant')]
self.assertEqual(len(grants), 1)
self.assertEqual(grants[0].find('./Grantee/ID').text, 'test:tester')
self.assertEqual(
grants[0].find('./Grantee/DisplayName').text, 'test:tester')
def test_acl_from_elem(self):
# check translation from element
acl = ACLPrivate(Owner(id='test:tester',
name='test:tester'),
s3_acl=self.s3_acl,
allow_no_owner=self.allow_no_owner)
elem = acl.elem()
acl = ACL.from_elem(elem, self.s3_acl, self.allow_no_owner)
self.assertTrue(self.check_permission(acl, 'test:tester', 'READ'))
self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE'))
self.assertTrue(self.check_permission(acl, 'test:tester', 'READ_ACP'))
self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE_ACP'))
self.assertFalse(self.check_permission(acl, 'test:tester2', 'READ'))
self.assertFalse(self.check_permission(acl, 'test:tester2', 'WRITE'))
self.assertFalse(self.check_permission(acl, 'test:tester2',
'READ_ACP'))
self.assertFalse(self.check_permission(acl, 'test:tester2',
'WRITE_ACP'))
def test_acl_from_elem_by_id_only(self):
elem = ACLPrivate(Owner(id='test:tester',
name='test:tester'),
s3_acl=self.s3_acl,
allow_no_owner=self.allow_no_owner).elem()
elem.find('./Owner').remove(elem.find('./Owner/DisplayName'))
acl = ACL.from_elem(elem, self.s3_acl, self.allow_no_owner)
self.assertTrue(self.check_permission(acl, 'test:tester', 'READ'))
self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE'))
self.assertTrue(self.check_permission(acl, 'test:tester', 'READ_ACP'))
self.assertTrue(self.check_permission(acl, 'test:tester', 'WRITE_ACP'))
self.assertFalse(self.check_permission(acl, 'test:tester2', 'READ'))
self.assertFalse(self.check_permission(acl, 'test:tester2', 'WRITE'))
self.assertFalse(self.check_permission(acl, 'test:tester2',
'READ_ACP'))
self.assertFalse(self.check_permission(acl, 'test:tester2',
'WRITE_ACP'))
def test_decode_acl_container(self):
access_control_policy = \
{'Owner': 'test:tester',
'Grant': [{'Permission': 'FULL_CONTROL',
'Grantee': 'test:tester'}]}
headers = {sysmeta_header('container', 'acl'):
json.dumps(access_control_policy)}
acl = decode_acl('container', headers, self.allow_no_owner)
self.assertEqual(type(acl), ACL)
self.assertEqual(acl.owner.id, 'test:tester')
self.assertEqual(len(acl.grants), 1)
self.assertEqual(str(acl.grants[0].grantee), 'test:tester')
self.assertEqual(acl.grants[0].permission, 'FULL_CONTROL')
def test_decode_acl_object(self):
access_control_policy = \
{'Owner': 'test:tester',
'Grant': [{'Permission': 'FULL_CONTROL',
'Grantee': 'test:tester'}]}
headers = {sysmeta_header('object', 'acl'):
json.dumps(access_control_policy)}
acl = decode_acl('object', headers, self.allow_no_owner)
self.assertEqual(type(acl), ACL)
self.assertEqual(acl.owner.id, 'test:tester')
self.assertEqual(len(acl.grants), 1)
self.assertEqual(str(acl.grants[0].grantee), 'test:tester')
self.assertEqual(acl.grants[0].permission, 'FULL_CONTROL')
def test_decode_acl_undefined(self):
headers = {}
acl = decode_acl('container', headers, self.allow_no_owner)
self.assertEqual(type(acl), ACL)
self.assertIsNone(acl.owner.id)
self.assertEqual(len(acl.grants), 0)
def test_decode_acl_empty_list(self):
headers = {sysmeta_header('container', 'acl'): '[]'}
acl = decode_acl('container', headers, self.allow_no_owner)
self.assertEqual(type(acl), ACL)
self.assertIsNone(acl.owner.id)
self.assertEqual(len(acl.grants), 0)
def test_decode_acl_with_invalid_json(self):
headers = {sysmeta_header('container', 'acl'): '['}
self.assertRaises(
InvalidSubresource, decode_acl, 'container',
headers, self.allow_no_owner)
def test_encode_acl_container(self):
acl = ACLPrivate(Owner(id='test:tester',
name='test:tester'))
acp = encode_acl('container', acl)
header_value = json.loads(acp[sysmeta_header('container', 'acl')])
self.assertTrue('Owner' in header_value)
self.assertTrue('Grant' in header_value)
self.assertEqual('test:tester', header_value['Owner'])
self.assertEqual(len(header_value['Grant']), 1)
def test_encode_acl_object(self):
acl = ACLPrivate(Owner(id='test:tester',
name='test:tester'))
acp = encode_acl('object', acl)
header_value = json.loads(acp[sysmeta_header('object', 'acl')])
self.assertTrue('Owner' in header_value)
self.assertTrue('Grant' in header_value)
self.assertEqual('test:tester', header_value['Owner'])
self.assertEqual(len(header_value['Grant']), 1)
def test_encode_acl_many_grant(self):
headers = {}
users = []
for i in range(0, 99):
users.append('id=test:tester%s' % str(i))
users = ','.join(users)
headers['x-amz-grant-read'] = users
acl = ACL.from_headers(headers, Owner('test:tester', 'test:tester'))
acp = encode_acl('container', acl)
header_value = acp[sysmeta_header('container', 'acl')]
header_value = json.loads(header_value)
self.assertTrue('Owner' in header_value)
self.assertTrue('Grant' in header_value)
self.assertEqual('test:tester', header_value['Owner'])
self.assertEqual(len(header_value['Grant']), 99)
def test_from_headers_x_amz_acl(self):
canned_acls = ['public-read', 'public-read-write',
'authenticated-read', 'bucket-owner-read',
'bucket-owner-full-control', 'log-delivery-write']
owner = Owner('test:tester', 'test:tester')
grantee_map = canned_acl_grantees(owner)
for acl_str in canned_acls:
acl = ACL.from_headers({'x-amz-acl': acl_str}, owner)
expected = grantee_map[acl_str]
self.assertEqual(len(acl.grants), len(expected)) # sanity
# parse Grant object to permission and grantee
actual_grants = [(grant.permission, grant.grantee)
for grant in acl.grants]
assertions = zip(sorted(expected), sorted(actual_grants))
for (expected_permission, expected_grantee), \
(permission, grantee) in assertions:
self.assertEqual(expected_permission, permission)
self.assertTrue(
isinstance(grantee, expected_grantee.__class__))
if isinstance(grantee, User):
self.assertEqual(expected_grantee.id, grantee.id)
self.assertEqual(expected_grantee.display_name,
grantee.display_name)
def test_from_headers_x_amz_acl_invalid(self):
with self.assertRaises(InvalidArgument) as cm:
ACL.from_headers({'x-amz-acl': 'invalid'},
Owner('test:tester', 'test:tester'))
self.assertTrue('argument_name' in cm.exception.info)
self.assertEqual(cm.exception.info['argument_name'], 'x-amz-acl')
self.assertTrue('argument_value' in cm.exception.info)
self.assertEqual(cm.exception.info['argument_value'], 'invalid')
def test_canned_acl_grantees(self):
grantee_map = canned_acl_grantees(Owner('test:tester', 'test:tester'))
canned_acls = ['private', 'public-read', 'public-read-write',
'authenticated-read', 'bucket-owner-read',
'bucket-owner-full-control', 'log-delivery-write']
for canned_acl in canned_acls:
self.assertTrue(canned_acl in grantee_map)
self.assertEqual(len(canned_acls), len(grantee_map)) # sanity
def test_base_grantee(self):
grantee = Grantee()
func = lambda: '' in grantee
self.assertRaises(S3NotImplemented, func)
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/s3api/test_subresource.py |
# Copyright (c) 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from swift.common.swob import Response
from swift.common.utils import HeaderKeyDict
from swift.common.middleware.s3api.s3response import S3Response
from swift.common.middleware.s3api.utils import sysmeta_prefix
class TestResponse(unittest.TestCase):
def test_from_swift_resp_slo(self):
for expected, header_vals in \
((True, ('true', '1')), (False, ('false', 'ugahhh', None))):
for val in header_vals:
resp = Response(headers={'X-Static-Large-Object': val,
'Etag': 'theetag'})
s3resp = S3Response.from_swift_resp(resp)
self.assertEqual(expected, s3resp.is_slo)
if s3resp.is_slo:
self.assertEqual('"theetag-N"', s3resp.headers['ETag'])
else:
self.assertEqual('"theetag"', s3resp.headers['ETag'])
def test_response_s3api_user_meta_headers(self):
resp = Response(headers={
'X-Object-Meta-Foo': 'Bar',
'X-Object-Meta-Non-\xdcnicode-Value': '\xff',
'X-Object-Meta-With=5FUnderscore': 'underscored',
'X-Object-Sysmeta-Baz': 'quux',
'Etag': 'unquoted',
'Content-type': 'text/plain',
'content-length': '0',
})
s3resp = S3Response.from_swift_resp(resp)
self.assertEqual(dict(s3resp.headers), {
'x-amz-meta-foo': 'Bar',
'x-amz-meta-non-\xdcnicode-value': '\xff',
'x-amz-meta-with_underscore': 'underscored',
'ETag': '"unquoted"',
'Content-Type': 'text/plain',
'Content-Length': '0',
})
def test_response_s3api_sysmeta_headers(self):
for _server_type in ('object', 'container'):
swift_headers = HeaderKeyDict(
{sysmeta_prefix(_server_type) + 'test': 'ok'})
resp = Response(headers=swift_headers)
s3resp = S3Response.from_swift_resp(resp)
self.assertEqual(swift_headers, s3resp.sysmeta_headers)
def test_response_s3api_sysmeta_headers_ignore_other_sysmeta(self):
for _server_type in ('object', 'container'):
swift_headers = HeaderKeyDict(
# sysmeta not leading sysmeta_prefix even including s3api word
{'x-%s-sysmeta-test-s3api' % _server_type: 'ok',
sysmeta_prefix(_server_type) + 'test': 'ok'})
resp = Response(headers=swift_headers)
s3resp = S3Response.from_swift_resp(resp)
expected_headers = HeaderKeyDict(
{sysmeta_prefix(_server_type) + 'test': 'ok'})
self.assertEqual(expected_headers, s3resp.sysmeta_headers)
self.assertIn('x-%s-sysmeta-test-s3api' % _server_type,
s3resp.sw_headers)
def test_response_s3api_sysmeta_from_swift3_sysmeta(self):
for _server_type in ('object', 'container'):
# swift could return older swift3 sysmeta
swift_headers = HeaderKeyDict(
{('x-%s-sysmeta-swift3-' % _server_type) + 'test': 'ok'})
resp = Response(headers=swift_headers)
s3resp = S3Response.from_swift_resp(resp)
expected_headers = HeaderKeyDict(
{sysmeta_prefix(_server_type) + 'test': 'ok'})
# but Response class should translates as s3api sysmeta
self.assertEqual(expected_headers, s3resp.sysmeta_headers)
def test_response_swift3_sysmeta_does_not_overwrite_s3api_sysmeta(self):
for _server_type in ('object', 'container'):
# same key name except sysmeta prefix
swift_headers = HeaderKeyDict(
{('x-%s-sysmeta-swift3-' % _server_type) + 'test': 'ng',
sysmeta_prefix(_server_type) + 'test': 'ok'})
resp = Response(headers=swift_headers)
s3resp = S3Response.from_swift_resp(resp)
expected_headers = HeaderKeyDict(
{sysmeta_prefix(_server_type) + 'test': 'ok'})
# but only s3api sysmeta remains in the response sysmeta_headers
self.assertEqual(expected_headers, s3resp.sysmeta_headers)
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/s3api/test_s3response.py |
# Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class NotMethodException(Exception):
pass
| swift-master | test/unit/common/middleware/s3api/exceptions.py |
# Copyright (c) 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import binascii
import unittest
from datetime import datetime
import functools
from hashlib import sha256
import os
from os.path import join
import time
from mock import patch
import six
import json
from swift.common import swob
from swift.common.swob import Request
from swift.common.middleware.proxy_logging import ProxyLoggingMiddleware
from test.unit import mock_timestamp_now
from test.unit.common.middleware.s3api import S3ApiTestCase
from test.unit.common.middleware.s3api.test_s3_acl import s3acl
from swift.common.middleware.s3api.s3request import SigV4Request
from swift.common.middleware.s3api.subresource import ACL, User, encode_acl, \
Owner, Grant
from swift.common.middleware.s3api.etree import fromstring
from swift.common.middleware.s3api.utils import mktime, S3Timestamp
from swift.common.middleware.versioned_writes.object_versioning import \
DELETE_MARKER_CONTENT_TYPE
from swift.common.utils import md5
class TestS3ApiObj(S3ApiTestCase):
def setUp(self):
super(TestS3ApiObj, self).setUp()
self.object_body = b'hello'
self.etag = md5(self.object_body, usedforsecurity=False).hexdigest()
self.last_modified = 'Fri, 01 Apr 2014 12:00:00 GMT'
self.response_headers = {'Content-Type': 'text/html',
'Content-Length': len(self.object_body),
'Content-Disposition': 'inline',
'Content-Language': 'en',
'x-object-meta-test': 'swift',
'etag': self.etag,
'last-modified': self.last_modified,
'expires': 'Mon, 21 Sep 2015 12:00:00 GMT',
'x-robots-tag': 'nofollow',
'cache-control': 'private'}
self.swift.register('GET', '/v1/AUTH_test/bucket/object',
swob.HTTPOk, self.response_headers,
self.object_body)
self.swift.register('GET', '/v1/AUTH_test/bucket/object?symlink=get',
swob.HTTPOk, self.response_headers,
self.object_body)
self.swift.register('PUT', '/v1/AUTH_test/bucket/object',
swob.HTTPCreated,
{'etag': self.etag,
'last-modified': self.last_modified,
'x-object-meta-something': 'oh hai'},
None)
def _test_object_GETorHEAD(self, method):
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': method},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
# we'll want this for logging
self.assertEqual(req.headers['X-Backend-Storage-Policy-Index'], '2')
unexpected_headers = []
for key, val in self.response_headers.items():
if key in ('Content-Length', 'Content-Type', 'content-encoding',
'last-modified', 'cache-control', 'Content-Disposition',
'Content-Language', 'expires', 'x-robots-tag'):
self.assertIn(key, headers)
self.assertEqual(headers[key], str(val))
elif key == 'etag':
self.assertEqual(headers[key], '"%s"' % val)
elif key.startswith('x-object-meta-'):
self.assertIn('x-amz-meta-' + key[14:], headers)
self.assertEqual(headers['x-amz-meta-' + key[14:]], val)
else:
unexpected_headers.append((key, val))
if unexpected_headers:
self.fail('unexpected headers: %r' % unexpected_headers)
self.assertEqual(headers['etag'],
'"%s"' % self.response_headers['etag'])
if method == 'GET':
self.assertEqual(body, self.object_body)
@s3acl
def test_object_HEAD_error(self):
# HEAD does not return the body even an error response in the
# specifications of the REST API.
# So, check the response code for error test of HEAD.
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPUnauthorized, {}, None)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '403')
self.assertEqual(body, b'') # sanity
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPForbidden, {}, None)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '403')
self.assertEqual(body, b'') # sanity
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPNotFound, {}, None)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '404')
self.assertEqual(body, b'') # sanity
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPPreconditionFailed, {}, None)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '412')
self.assertEqual(body, b'') # sanity
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPServerError, {}, None)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '500')
self.assertEqual(body, b'') # sanity
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPServiceUnavailable, {}, None)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '503')
self.assertEqual(body, b'') # sanity
def test_object_HEAD(self):
self._test_object_GETorHEAD('HEAD')
def test_object_policy_index_logging(self):
req = Request.blank('/bucket/object',
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
self.s3api = ProxyLoggingMiddleware(self.s3api, {}, logger=self.logger)
status, headers, body = self.call_s3api(req)
access_lines = self.logger.get_lines_for_level('info')
self.assertEqual(1, len(access_lines))
parts = access_lines[0].split()
self.assertEqual(' '.join(parts[3:7]),
'GET /bucket/object HTTP/1.0 200')
self.assertEqual(parts[-1], '2')
def _test_object_HEAD_Range(self, range_value):
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'Authorization': 'AWS test:tester:hmac',
'Range': range_value,
'Date': self.get_date_header()})
return self.call_s3api(req)
@s3acl
def test_object_HEAD_Range_with_invalid_value(self):
range_value = ''
status, headers, body = self._test_object_HEAD_Range(range_value)
self.assertEqual(status.split()[0], '200')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '5')
self.assertTrue('content-range' not in headers)
range_value = 'hoge'
status, headers, body = self._test_object_HEAD_Range(range_value)
self.assertEqual(status.split()[0], '200')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '5')
self.assertTrue('content-range' not in headers)
range_value = 'bytes='
status, headers, body = self._test_object_HEAD_Range(range_value)
self.assertEqual(status.split()[0], '200')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '5')
self.assertTrue('content-range' not in headers)
range_value = 'bytes=1'
status, headers, body = self._test_object_HEAD_Range(range_value)
self.assertEqual(status.split()[0], '200')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '5')
self.assertTrue('content-range' not in headers)
range_value = 'bytes=5-1'
status, headers, body = self._test_object_HEAD_Range(range_value)
self.assertEqual(status.split()[0], '200')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '5')
self.assertTrue('content-range' not in headers)
range_value = 'bytes=5-10'
status, headers, body = self._test_object_HEAD_Range(range_value)
self.assertEqual(status.split()[0], '416')
@s3acl
def test_object_HEAD_Range(self):
# update response headers
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPOk, self.response_headers,
self.object_body)
range_value = 'bytes=0-3'
status, headers, body = self._test_object_HEAD_Range(range_value)
self.assertEqual(status.split()[0], '206')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '4')
self.assertTrue('content-range' in headers)
self.assertTrue(headers['content-range'].startswith('bytes 0-3'))
self.assertTrue('x-amz-meta-test' in headers)
self.assertEqual('swift', headers['x-amz-meta-test'])
range_value = 'bytes=3-3'
status, headers, body = self._test_object_HEAD_Range(range_value)
self.assertEqual(status.split()[0], '206')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '1')
self.assertTrue('content-range' in headers)
self.assertTrue(headers['content-range'].startswith('bytes 3-3'))
self.assertTrue('x-amz-meta-test' in headers)
self.assertEqual('swift', headers['x-amz-meta-test'])
range_value = 'bytes=1-'
status, headers, body = self._test_object_HEAD_Range(range_value)
self.assertEqual(status.split()[0], '206')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '4')
self.assertTrue('content-range' in headers)
self.assertTrue(headers['content-range'].startswith('bytes 1-4'))
self.assertTrue('x-amz-meta-test' in headers)
self.assertEqual('swift', headers['x-amz-meta-test'])
range_value = 'bytes=-3'
status, headers, body = self._test_object_HEAD_Range(range_value)
self.assertEqual(status.split()[0], '206')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '3')
self.assertTrue('content-range' in headers)
self.assertTrue(headers['content-range'].startswith('bytes 2-4'))
self.assertTrue('x-amz-meta-test' in headers)
self.assertEqual('swift', headers['x-amz-meta-test'])
@s3acl
def test_object_GET_error(self):
code = self._test_method_error('GET', '/bucket/object',
swob.HTTPUnauthorized)
self.assertEqual(code, 'SignatureDoesNotMatch')
code = self._test_method_error('GET', '/bucket/object',
swob.HTTPForbidden)
self.assertEqual(code, 'AccessDenied')
code = self._test_method_error('GET', '/bucket/object',
swob.HTTPNotFound)
self.assertEqual(code, 'NoSuchKey')
code = self._test_method_error('GET', '/bucket/object',
swob.HTTPServerError)
self.assertEqual(code, 'InternalError')
code = self._test_method_error('GET', '/bucket/object',
swob.HTTPPreconditionFailed)
self.assertEqual(code, 'PreconditionFailed')
code = self._test_method_error('GET', '/bucket/object',
swob.HTTPServiceUnavailable)
self.assertEqual(code, 'ServiceUnavailable')
code = self._test_method_error('GET', '/bucket/object',
swob.HTTPConflict)
self.assertEqual(code, 'BrokenMPU')
code = self._test_method_error(
'GET', '/bucket/object',
functools.partial(swob.Response, status='498 Rate Limited'),
expected_status='503 Slow Down')
self.assertEqual(code, 'SlowDown')
with patch.object(self.s3api.conf, 'ratelimit_as_client_error', True):
code = self._test_method_error(
'GET', '/bucket/object',
functools.partial(swob.Response, status='498 Rate Limited'),
expected_status='429 Slow Down')
self.assertEqual(code, 'SlowDown')
@s3acl
def test_object_GET(self):
self._test_object_GETorHEAD('GET')
@s3acl(s3acl_only=True)
def test_object_GET_with_s3acl_and_unknown_user(self):
self.swift.remote_user = None
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status, '403 Forbidden')
self.assertEqual(self._get_error_code(body), 'SignatureDoesNotMatch')
@s3acl(s3acl_only=True)
def test_object_GET_with_s3acl_and_keystone(self):
# for passing keystone authentication root
orig_auth = self.swift._fake_auth_middleware
calls = []
def wrapped_auth(env):
calls.append((env['REQUEST_METHOD'], 's3api.auth_details' in env))
orig_auth(env)
with patch.object(self.swift, '_fake_auth_middleware', wrapped_auth):
self._test_object_GETorHEAD('GET')
self.assertEqual(calls, [
('TEST', True),
('HEAD', False),
('GET', False),
])
@s3acl
def test_object_GET_Range(self):
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Range': 'bytes=0-3',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '206')
self.assertTrue('content-range' in headers)
self.assertTrue(headers['content-range'].startswith('bytes 0-3'))
@s3acl
def test_object_GET_Range_error(self):
code = self._test_method_error('GET', '/bucket/object',
swob.HTTPRequestedRangeNotSatisfiable)
self.assertEqual(code, 'InvalidRange')
@s3acl
def test_object_GET_Response(self):
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'GET',
'QUERY_STRING':
'response-content-type=%s&'
'response-content-language=%s&'
'response-expires=%s&'
'response-cache-control=%s&'
'response-content-disposition=%s&'
'response-content-encoding=%s&'
% ('text/plain', 'en',
'Fri, 01 Apr 2014 12:00:00 GMT',
'no-cache',
'attachment',
'gzip')},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
self.assertTrue('content-type' in headers)
self.assertEqual(headers['content-type'], 'text/plain')
self.assertTrue('content-language' in headers)
self.assertEqual(headers['content-language'], 'en')
self.assertTrue('expires' in headers)
self.assertEqual(headers['expires'], 'Fri, 01 Apr 2014 12:00:00 GMT')
self.assertTrue('cache-control' in headers)
self.assertEqual(headers['cache-control'], 'no-cache')
self.assertTrue('content-disposition' in headers)
self.assertEqual(headers['content-disposition'],
'attachment')
self.assertTrue('content-encoding' in headers)
self.assertEqual(headers['content-encoding'], 'gzip')
@s3acl
def test_object_GET_version_id_not_implemented(self):
# GET version that is not null
req = Request.blank('/bucket/object?versionId=2',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
with patch('swift.common.middleware.s3api.controllers.obj.'
'get_swift_info', return_value={}):
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '501', body)
# GET current version
req = Request.blank('/bucket/object?versionId=null',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
with patch('swift.common.middleware.s3api.controllers.obj.'
'get_swift_info', return_value={}):
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200', body)
self.assertEqual(body, self.object_body)
@s3acl
def test_object_GET_version_id(self):
# GET current version
req = Request.blank('/bucket/object?versionId=null',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200', body)
self.assertEqual(body, self.object_body)
# GET current version that is not null
req = Request.blank('/bucket/object?versionId=2',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200', body)
self.assertEqual(body, self.object_body)
# GET version in archive
headers = self.response_headers.copy()
headers['Content-Length'] = 6
account = 'test:tester'
grants = [Grant(User(account), 'FULL_CONTROL')]
headers.update(
encode_acl('object', ACL(Owner(account, account), grants)))
self.swift.register(
'HEAD', '/v1/AUTH_test/bucket/object?version-id=1', swob.HTTPOk,
headers, None)
self.swift.register(
'GET', '/v1/AUTH_test/bucket/object?version-id=1', swob.HTTPOk,
headers, 'hello1')
req = Request.blank('/bucket/object?versionId=1',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200', body)
self.assertEqual(body, b'hello1')
# Version not found
self.swift.register(
'GET', '/v1/AUTH_test/bucket/object?version-id=A',
swob.HTTPNotFound, {}, None)
req = Request.blank('/bucket/object?versionId=A',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '404')
@s3acl(versioning_enabled=False)
def test_object_GET_with_version_id_but_not_enabled(self):
# Version not found
self.swift.register(
'HEAD', '/v1/AUTH_test/bucket',
swob.HTTPNoContent, {}, None)
req = Request.blank('/bucket/object?versionId=A',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '404')
elem = fromstring(body, 'Error')
self.assertEqual(elem.find('Code').text, 'NoSuchVersion')
self.assertEqual(elem.find('Key').text, 'object')
self.assertEqual(elem.find('VersionId').text, 'A')
expected_calls = []
# NB: No actual backend GET!
self.assertEqual(expected_calls, self.swift.calls)
@s3acl
def test_object_PUT_error(self):
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPUnauthorized)
self.assertEqual(code, 'SignatureDoesNotMatch')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPForbidden)
self.assertEqual(code, 'AccessDenied')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPNotFound)
self.assertEqual(code, 'NoSuchBucket')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPRequestEntityTooLarge)
self.assertEqual(code, 'EntityTooLarge')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPServerError)
self.assertEqual(code, 'InternalError')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPUnprocessableEntity)
self.assertEqual(code, 'BadDigest')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPLengthRequired)
self.assertEqual(code, 'MissingContentLength')
# Swift can 412 if the versions container is missing
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPPreconditionFailed)
self.assertEqual(code, 'PreconditionFailed')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPServiceUnavailable)
self.assertEqual(code, 'ServiceUnavailable')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPCreated,
{'X-Amz-Copy-Source': ''})
self.assertEqual(code, 'InvalidArgument')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPCreated,
{'X-Amz-Copy-Source': '/'})
self.assertEqual(code, 'InvalidArgument')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPCreated,
{'X-Amz-Copy-Source': '/bucket'})
self.assertEqual(code, 'InvalidArgument')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPCreated,
{'X-Amz-Copy-Source': '/bucket/'})
self.assertEqual(code, 'InvalidArgument')
code = self._test_method_error(
'PUT', '/bucket/object',
swob.HTTPCreated,
{'X-Amz-Copy-Source': '/bucket/src_obj?foo=bar'})
self.assertEqual(code, 'InvalidArgument')
# adding other query paramerters will cause an error
code = self._test_method_error(
'PUT', '/bucket/object',
swob.HTTPCreated,
{'X-Amz-Copy-Source': '/bucket/src_obj?versionId=foo&bar=baz'})
self.assertEqual(code, 'InvalidArgument')
# ...even versionId appears in the last
code = self._test_method_error(
'PUT', '/bucket/object',
swob.HTTPCreated,
{'X-Amz-Copy-Source': '/bucket/src_obj?bar=baz&versionId=foo'})
self.assertEqual(code, 'InvalidArgument')
code = self._test_method_error(
'PUT', '/bucket/object',
swob.HTTPCreated,
{'X-Amz-Copy-Source': '/src_bucket/src_object',
'X-Amz-Copy-Source-Range': 'bytes=0-0'})
self.assertEqual(code, 'InvalidArgument')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPRequestTimeout)
self.assertEqual(code, 'RequestTimeout')
code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPClientDisconnect,
{})
self.assertEqual(code, 'RequestTimeout')
def test_object_PUT_with_version(self):
self.swift.register('GET',
'/v1/AUTH_test/bucket/src_obj?version-id=foo',
swob.HTTPOk, self.response_headers,
self.object_body)
self.swift.register('PUT', '/v1/AUTH_test/bucket/object',
swob.HTTPCreated, {
'etag': self.etag,
'last-modified': self.last_modified,
}, None)
req = Request.blank('/bucket/object', method='PUT', body='', headers={
'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'X-Amz-Copy-Source': '/bucket/src_obj?versionId=foo',
})
status, headers, body = self.call_s3api(req)
self.assertEqual('200 OK', status)
elem = fromstring(body, 'CopyObjectResult')
self.assertEqual(elem.find('ETag').text, '"%s"' % self.etag)
self.assertEqual(self.swift.calls, [
('HEAD', '/v1/AUTH_test/bucket/src_obj?version-id=foo'),
('PUT', '/v1/AUTH_test/bucket/object?version-id=foo'),
])
_, _, headers = self.swift.calls_with_headers[-1]
self.assertEqual(headers['x-copy-from'], '/bucket/src_obj')
@s3acl
def test_object_PUT(self):
etag = self.response_headers['etag']
content_md5 = binascii.b2a_base64(binascii.a2b_hex(etag)).strip()
if not six.PY2:
content_md5 = content_md5.decode('ascii')
req = Request.blank(
'/bucket/object',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'x-amz-storage-class': 'STANDARD',
'Content-MD5': content_md5,
'Date': self.get_date_header()},
body=self.object_body)
req.date = datetime.now()
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
# Check that s3api returns an etag header.
self.assertEqual(headers['etag'], '"%s"' % etag)
_, _, headers = self.swift.calls_with_headers[-1]
# Check that s3api converts a Content-MD5 header into an etag.
self.assertEqual(headers['etag'], etag)
@s3acl
def test_object_PUT_quota_exceeded(self):
etag = self.response_headers['etag']
content_md5 = binascii.b2a_base64(binascii.a2b_hex(etag)).strip()
if not six.PY2:
content_md5 = content_md5.decode('ascii')
self.swift.register(
'PUT', '/v1/AUTH_test/bucket/object',
swob.HTTPRequestEntityTooLarge, {}, 'Upload exceeds quota.')
req = Request.blank(
'/bucket/object',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'x-amz-storage-class': 'STANDARD',
'Content-MD5': content_md5,
'Date': self.get_date_header()},
body=self.object_body)
req.date = datetime.now()
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '400')
self.assertIn(b'<Code>EntityTooLarge</Code>', body)
self.assertIn(b'<Message>Upload exceeds quota.</Message', body)
@s3acl
def test_object_PUT_v4(self):
body_sha = sha256(self.object_body).hexdigest()
req = Request.blank(
'/bucket/object',
environ={'REQUEST_METHOD': 'PUT'},
headers={
'Authorization':
'AWS4-HMAC-SHA256 '
'Credential=test:tester/%s/us-east-1/s3/aws4_request, '
'SignedHeaders=host;x-amz-date, '
'Signature=hmac' % (
self.get_v4_amz_date_header().split('T', 1)[0]),
'x-amz-date': self.get_v4_amz_date_header(),
'x-amz-storage-class': 'STANDARD',
'x-amz-content-sha256': body_sha,
'Date': self.get_date_header()},
body=self.object_body)
req.date = datetime.now()
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
# Check that s3api returns an etag header.
self.assertEqual(headers['etag'],
'"%s"' % self.response_headers['etag'])
_, _, headers = self.swift.calls_with_headers[-1]
# No way to determine ETag to send
self.assertNotIn('etag', headers)
self.assertEqual('/v1/AUTH_test/bucket/object',
req.environ.get('swift.backend_path'))
@s3acl
def test_object_PUT_v4_bad_hash(self):
req = Request.blank(
'/bucket/object',
environ={'REQUEST_METHOD': 'PUT'},
headers={
'Authorization':
'AWS4-HMAC-SHA256 '
'Credential=test:tester/%s/us-east-1/s3/aws4_request, '
'SignedHeaders=host;x-amz-date, '
'Signature=hmac' % (
self.get_v4_amz_date_header().split('T', 1)[0]),
'x-amz-date': self.get_v4_amz_date_header(),
'x-amz-storage-class': 'STANDARD',
'x-amz-content-sha256': 'not the hash',
'Date': self.get_date_header()},
body=self.object_body)
req.date = datetime.now()
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '400')
self.assertEqual(self._get_error_code(body), 'BadDigest')
self.assertEqual('/v1/AUTH_test/bucket/object',
req.environ.get('swift.backend_path'))
@s3acl
def test_object_PUT_v4_unsigned_payload(self):
req = Request.blank(
'/bucket/object',
environ={'REQUEST_METHOD': 'PUT'},
headers={
'Authorization':
'AWS4-HMAC-SHA256 '
'Credential=test:tester/%s/us-east-1/s3/aws4_request, '
'SignedHeaders=host;x-amz-date, '
'Signature=hmac' % (
self.get_v4_amz_date_header().split('T', 1)[0]),
'x-amz-date': self.get_v4_amz_date_header(),
'x-amz-storage-class': 'STANDARD',
'x-amz-content-sha256': 'UNSIGNED-PAYLOAD',
'Date': self.get_date_header()},
body=self.object_body)
req.date = datetime.now()
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
# Check that s3api returns an etag header.
self.assertEqual(headers['etag'],
'"%s"' % self.response_headers['etag'])
_, _, headers = self.swift.calls_with_headers[-1]
# No way to determine ETag to send
self.assertNotIn('etag', headers)
self.assertIn(b'UNSIGNED-PAYLOAD', SigV4Request(
req.environ, self.s3api.conf)._canonical_request())
def test_object_PUT_headers(self):
content_md5 = binascii.b2a_base64(binascii.a2b_hex(self.etag)).strip()
if not six.PY2:
content_md5 = content_md5.decode('ascii')
self.swift.register('HEAD', '/v1/AUTH_test/some/source',
swob.HTTPOk, {'last-modified': self.last_modified},
None)
req = Request.blank(
'/bucket/object',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'X-Amz-Storage-Class': 'STANDARD',
'X-Amz-Meta-Something': 'oh hai',
'X-Amz-Meta-Unreadable-Prefix': '\x04w',
'X-Amz-Meta-Unreadable-Suffix': 'h\x04',
'X-Amz-Meta-Lots-Of-Unprintable': 5 * '\x04',
'X-Amz-Copy-Source': '/some/source',
'Content-MD5': content_md5,
'Date': self.get_date_header()},
body=self.object_body)
req.date = datetime.now()
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual('200 ', status[:4], body)
# Check that s3api does not return an etag header,
# specified copy source.
self.assertNotIn('etag', headers)
# Check that s3api does not return custom metadata in response
self.assertNotIn('x-amz-meta-something', headers)
_, _, headers = self.swift.calls_with_headers[-1]
# Check that s3api converts a Content-MD5 header into an etag.
self.assertEqual(headers['ETag'], self.etag)
# Check that metadata is omited if no directive is specified
self.assertIsNone(headers.get('X-Object-Meta-Something'))
self.assertIsNone(headers.get('X-Object-Meta-Unreadable-Prefix'))
self.assertIsNone(headers.get('X-Object-Meta-Unreadable-Suffix'))
self.assertIsNone(headers.get('X-Object-Meta-Lots-Of-Unprintable'))
self.assertEqual(headers['X-Copy-From'], '/some/source')
self.assertEqual(headers['Content-Length'], '0')
def _test_object_PUT_copy(self, head_resp, put_header=None,
src_path='/some/source', timestamp=None):
account = 'test:tester'
grants = [Grant(User(account), 'FULL_CONTROL')]
head_headers = \
encode_acl('object',
ACL(Owner(account, account), grants))
head_headers.update({'last-modified': self.last_modified})
self.swift.register('HEAD', '/v1/AUTH_test/some/source',
head_resp, head_headers, None)
put_header = put_header or {}
return self._call_object_copy(src_path, put_header, timestamp)
def _test_object_PUT_copy_self(self, head_resp,
put_header=None, timestamp=None):
account = 'test:tester'
grants = [Grant(User(account), 'FULL_CONTROL')]
head_headers = \
encode_acl('object',
ACL(Owner(account, account), grants))
head_headers.update({'last-modified': self.last_modified})
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
head_resp, head_headers, None)
put_header = put_header or {}
return self._call_object_copy('/bucket/object', put_header, timestamp)
def _call_object_copy(self, src_path, put_header, timestamp=None):
put_headers = {'Authorization': 'AWS test:tester:hmac',
'X-Amz-Copy-Source': src_path,
'Date': self.get_date_header()}
put_headers.update(put_header)
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'PUT'},
headers=put_headers)
req.date = datetime.now()
req.content_type = 'text/plain'
timestamp = timestamp or time.time()
with patch('swift.common.middleware.s3api.utils.time.time',
return_value=timestamp):
return self.call_s3api(req)
def test_simple_object_copy(self):
self.swift.register('HEAD', '/v1/AUTH_test/some/source',
swob.HTTPOk, {
'x-backend-storage-policy-index': '1',
}, None)
req = Request.blank(
'/bucket/object', method='PUT',
headers={
'Authorization': 'AWS test:tester:hmac',
'X-Amz-Copy-Source': '/some/source',
'Date': self.get_date_header(),
},
)
timestamp = time.time()
with patch('swift.common.middleware.s3api.utils.time.time',
return_value=timestamp):
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
head_call, put_call = self.swift.calls_with_headers
self.assertEqual(
head_call.headers['x-backend-storage-policy-index'], '1')
self.assertEqual(put_call.headers['x-copy-from'], '/some/source')
self.assertNotIn('x-backend-storage-policy-index', put_call.headers)
@s3acl
def test_object_PUT_copy(self):
def do_test(src_path):
date_header = self.get_date_header()
timestamp = mktime(date_header)
allowed_last_modified = [S3Timestamp(timestamp).s3xmlformat]
status, headers, body = self._test_object_PUT_copy(
swob.HTTPOk, put_header={'Date': date_header},
timestamp=timestamp, src_path=src_path)
# may have gotten unlucky and had the clock roll over
date_header = self.get_date_header()
timestamp = mktime(date_header)
allowed_last_modified.append(S3Timestamp(timestamp).s3xmlformat)
self.assertEqual(status.split()[0], '200')
self.assertEqual(headers['Content-Type'], 'application/xml')
self.assertTrue(headers.get('etag') is None)
self.assertTrue(headers.get('x-amz-meta-something') is None)
elem = fromstring(body, 'CopyObjectResult')
self.assertIn(elem.find('LastModified').text,
allowed_last_modified)
self.assertEqual(elem.find('ETag').text, '"%s"' % self.etag)
_, _, headers = self.swift.calls_with_headers[-1]
self.assertEqual(headers['X-Copy-From'], '/some/source')
self.assertTrue(headers.get('X-Fresh-Metadata') is None)
self.assertEqual(headers['Content-Length'], '0')
do_test('/some/source')
do_test('/some/source?')
do_test('/some/source?versionId=null')
# Some clients (like Boto) don't include the leading slash;
# AWS seems to tolerate this so we should, too
do_test('some/source')
@s3acl
def test_object_PUT_copy_metadata_replace(self):
with mock_timestamp_now(klass=S3Timestamp) as now:
status, headers, body = \
self._test_object_PUT_copy(
swob.HTTPOk,
{'X-Amz-Metadata-Directive': 'REPLACE',
'X-Amz-Meta-Something': 'oh hai',
'X-Amz-Meta-Unreadable-Prefix': '\x04w',
'X-Amz-Meta-Unreadable-Suffix': 'h\x04',
'X-Amz-Meta-Lots-Of-Unprintable': 5 * '\x04',
'Cache-Control': 'hello',
'content-disposition': 'how are you',
'content-encoding': 'good and you',
'content-language': 'great',
'content-type': 'so',
'expires': 'yeah',
'x-robots-tag': 'bye'})
self.assertEqual(status.split()[0], '200')
self.assertEqual(headers['Content-Type'], 'application/xml')
self.assertIsNone(headers.get('etag'))
elem = fromstring(body, 'CopyObjectResult')
self.assertEqual(S3Timestamp(now.ceil()).s3xmlformat,
elem.find('LastModified').text)
self.assertEqual(elem.find('ETag').text, '"%s"' % self.etag)
_, _, headers = self.swift.calls_with_headers[-1]
self.assertEqual(headers['X-Copy-From'], '/some/source')
# Check that metadata is included if replace directive is specified
# and that Fresh Metadata is set
self.assertTrue(headers.get('X-Fresh-Metadata') == 'True')
self.assertEqual(headers['X-Object-Meta-Something'], 'oh hai')
self.assertEqual(headers['X-Object-Meta-Unreadable-Prefix'],
'=?UTF-8?Q?=04w?=')
self.assertEqual(headers['X-Object-Meta-Unreadable-Suffix'],
'=?UTF-8?Q?h=04?=')
self.assertEqual(headers['X-Object-Meta-Lots-Of-Unprintable'],
'=?UTF-8?B?BAQEBAQ=?=')
# Check other metadata is set
self.assertEqual(headers['Cache-Control'], 'hello')
self.assertEqual(headers['Content-Disposition'], 'how are you')
self.assertEqual(headers['Content-Encoding'], 'good and you')
self.assertEqual(headers['Content-Language'], 'great')
self.assertEqual(headers['Content-Type'], 'so')
self.assertEqual(headers['Expires'], 'yeah')
self.assertEqual(headers['X-Robots-Tag'], 'bye')
self.assertEqual(headers['Content-Length'], '0')
@s3acl
def test_object_PUT_copy_metadata_copy(self):
with mock_timestamp_now(klass=S3Timestamp) as now:
status, headers, body = \
self._test_object_PUT_copy(
swob.HTTPOk,
{'X-Amz-Metadata-Directive': 'COPY',
'X-Amz-Meta-Something': 'oh hai',
'X-Amz-Meta-Unreadable-Prefix': '\x04w',
'X-Amz-Meta-Unreadable-Suffix': 'h\x04',
'X-Amz-Meta-Lots-Of-Unprintable': 5 * '\x04',
'Cache-Control': 'hello',
'content-disposition': 'how are you',
'content-encoding': 'good and you',
'content-language': 'great',
'content-type': 'so',
'expires': 'yeah',
'x-robots-tag': 'bye'})
self.assertEqual(status.split()[0], '200')
self.assertEqual(headers['Content-Type'], 'application/xml')
self.assertIsNone(headers.get('etag'))
elem = fromstring(body, 'CopyObjectResult')
self.assertEqual(S3Timestamp(now.ceil()).s3xmlformat,
elem.find('LastModified').text)
self.assertEqual(elem.find('ETag').text, '"%s"' % self.etag)
_, _, headers = self.swift.calls_with_headers[-1]
self.assertEqual(headers['X-Copy-From'], '/some/source')
# Check that metadata is omited if COPY directive is specified
self.assertIsNone(headers.get('X-Fresh-Metadata'))
self.assertIsNone(headers.get('X-Object-Meta-Something'))
self.assertIsNone(headers.get('X-Object-Meta-Unreadable-Prefix'))
self.assertIsNone(headers.get('X-Object-Meta-Unreadable-Suffix'))
self.assertIsNone(headers.get('X-Object-Meta-Lots-Of-Unprintable'))
self.assertIsNone(headers.get('Cache-Control'))
self.assertIsNone(headers.get('Content-Disposition'))
self.assertIsNone(headers.get('Content-Encoding'))
self.assertIsNone(headers.get('Content-Language'))
self.assertIsNone(headers.get('Content-Type'))
self.assertIsNone(headers.get('Expires'))
self.assertIsNone(headers.get('X-Robots-Tag'))
self.assertEqual(headers['Content-Length'], '0')
@s3acl
def test_object_PUT_copy_self(self):
status, headers, body = \
self._test_object_PUT_copy_self(swob.HTTPOk)
self.assertEqual(status.split()[0], '400')
elem = fromstring(body, 'Error')
err_msg = ("This copy request is illegal because it is trying to copy "
"an object to itself without changing the object's "
"metadata, storage class, website redirect location or "
"encryption attributes.")
self.assertEqual(elem.find('Code').text, 'InvalidRequest')
self.assertEqual(elem.find('Message').text, err_msg)
@s3acl
def test_object_PUT_copy_self_metadata_copy(self):
header = {'x-amz-metadata-directive': 'COPY'}
status, headers, body = \
self._test_object_PUT_copy_self(swob.HTTPOk, header)
self.assertEqual(status.split()[0], '400')
elem = fromstring(body, 'Error')
err_msg = ("This copy request is illegal because it is trying to copy "
"an object to itself without changing the object's "
"metadata, storage class, website redirect location or "
"encryption attributes.")
self.assertEqual(elem.find('Code').text, 'InvalidRequest')
self.assertEqual(elem.find('Message').text, err_msg)
@s3acl
def test_object_PUT_copy_self_metadata_replace(self):
date_header = self.get_date_header()
timestamp = mktime(date_header)
allowed_last_modified = [S3Timestamp(timestamp).s3xmlformat]
header = {'x-amz-metadata-directive': 'REPLACE',
'Date': date_header}
status, headers, body = self._test_object_PUT_copy_self(
swob.HTTPOk, header, timestamp=timestamp)
date_header = self.get_date_header()
timestamp = mktime(date_header)
allowed_last_modified.append(S3Timestamp(timestamp).s3xmlformat)
self.assertEqual(status.split()[0], '200')
self.assertEqual(headers['Content-Type'], 'application/xml')
self.assertTrue(headers.get('etag') is None)
elem = fromstring(body, 'CopyObjectResult')
self.assertIn(elem.find('LastModified').text, allowed_last_modified)
self.assertEqual(elem.find('ETag').text, '"%s"' % self.etag)
_, _, headers = self.swift.calls_with_headers[-1]
self.assertEqual(headers['X-Copy-From'], '/bucket/object')
self.assertEqual(headers['Content-Length'], '0')
@s3acl
def test_object_PUT_copy_headers_error(self):
etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
last_modified_since = 'Fri, 01 Apr 2014 12:00:00 GMT'
header = {'X-Amz-Copy-Source-If-Match': etag,
'Date': self.get_date_header()}
status, header, body = \
self._test_object_PUT_copy(swob.HTTPPreconditionFailed,
header)
self.assertEqual(self._get_error_code(body), 'PreconditionFailed')
header = {'X-Amz-Copy-Source-If-None-Match': etag}
status, header, body = \
self._test_object_PUT_copy(swob.HTTPNotModified,
header)
self.assertEqual(self._get_error_code(body), 'PreconditionFailed')
header = {'X-Amz-Copy-Source-If-Modified-Since': last_modified_since}
status, header, body = \
self._test_object_PUT_copy(swob.HTTPNotModified,
header)
self.assertEqual(self._get_error_code(body), 'PreconditionFailed')
header = \
{'X-Amz-Copy-Source-If-Unmodified-Since': last_modified_since}
status, header, body = \
self._test_object_PUT_copy(swob.HTTPPreconditionFailed,
header)
self.assertEqual(self._get_error_code(body), 'PreconditionFailed')
def test_object_PUT_copy_headers_with_match(self):
etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
last_modified_since = 'Fri, 01 Apr 2014 11:00:00 GMT'
header = {'X-Amz-Copy-Source-If-Match': etag,
'X-Amz-Copy-Source-If-Modified-Since': last_modified_since,
'Date': self.get_date_header()}
status, header, body = \
self._test_object_PUT_copy(swob.HTTPOk, header)
self.assertEqual(status.split()[0], '200')
self.assertEqual(len(self.swift.calls_with_headers), 2)
_, _, headers = self.swift.calls_with_headers[-1]
self.assertTrue(headers.get('If-Match') is None)
self.assertTrue(headers.get('If-Modified-Since') is None)
_, _, headers = self.swift.calls_with_headers[0]
self.assertEqual(headers['If-Match'], etag)
self.assertEqual(headers['If-Modified-Since'], last_modified_since)
@s3acl(s3acl_only=True)
def test_object_PUT_copy_headers_with_match_and_s3acl(self):
etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
last_modified_since = 'Fri, 01 Apr 2014 11:00:00 GMT'
header = {'X-Amz-Copy-Source-If-Match': etag,
'X-Amz-Copy-Source-If-Modified-Since': last_modified_since,
'Date': self.get_date_header()}
status, header, body = \
self._test_object_PUT_copy(swob.HTTPOk, header)
self.assertEqual(status.split()[0], '200')
self.assertEqual(len(self.swift.calls_with_headers), 3)
# After the check of the copy source in the case of s3acl is valid,
# s3api check the bucket write permissions of the destination.
_, _, headers = self.swift.calls_with_headers[-2]
self.assertTrue(headers.get('If-Match') is None)
self.assertTrue(headers.get('If-Modified-Since') is None)
_, _, headers = self.swift.calls_with_headers[-1]
self.assertTrue(headers.get('If-Match') is None)
self.assertTrue(headers.get('If-Modified-Since') is None)
_, _, headers = self.swift.calls_with_headers[0]
self.assertEqual(headers['If-Match'], etag)
self.assertEqual(headers['If-Modified-Since'], last_modified_since)
def test_object_PUT_copy_headers_with_not_match(self):
etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
last_modified_since = 'Fri, 01 Apr 2014 12:00:00 GMT'
header = {'X-Amz-Copy-Source-If-None-Match': etag,
'X-Amz-Copy-Source-If-Unmodified-Since': last_modified_since,
'Date': self.get_date_header()}
status, header, body = \
self._test_object_PUT_copy(swob.HTTPOk, header)
self.assertEqual(status.split()[0], '200')
self.assertEqual(len(self.swift.calls_with_headers), 2)
_, _, headers = self.swift.calls_with_headers[-1]
self.assertTrue(headers.get('If-None-Match') is None)
self.assertTrue(headers.get('If-Unmodified-Since') is None)
_, _, headers = self.swift.calls_with_headers[0]
self.assertEqual(headers['If-None-Match'], etag)
self.assertEqual(headers['If-Unmodified-Since'], last_modified_since)
@s3acl(s3acl_only=True)
def test_object_PUT_copy_headers_with_not_match_and_s3acl(self):
etag = '7dfa07a8e59ddbcd1dc84d4c4f82aea1'
last_modified_since = 'Fri, 01 Apr 2014 12:00:00 GMT'
header = {'X-Amz-Copy-Source-If-None-Match': etag,
'X-Amz-Copy-Source-If-Unmodified-Since': last_modified_since,
'Date': self.get_date_header()}
status, header, body = \
self._test_object_PUT_copy(swob.HTTPOk, header)
self.assertEqual(status.split()[0], '200')
# After the check of the copy source in the case of s3acl is valid,
# s3api check the bucket write permissions of the destination.
self.assertEqual(len(self.swift.calls_with_headers), 3)
_, _, headers = self.swift.calls_with_headers[-1]
self.assertTrue(headers.get('If-None-Match') is None)
self.assertTrue(headers.get('If-Unmodified-Since') is None)
_, _, headers = self.swift.calls_with_headers[0]
self.assertEqual(headers['If-None-Match'], etag)
self.assertEqual(headers['If-Unmodified-Since'], last_modified_since)
@s3acl
def test_object_POST_error(self):
code = self._test_method_error('POST', '/bucket/object', None)
self.assertEqual(code, 'NotImplemented')
@s3acl
def test_object_DELETE_error(self):
code = self._test_method_error('DELETE', '/bucket/object',
swob.HTTPUnauthorized)
self.assertEqual(code, 'SignatureDoesNotMatch')
code = self._test_method_error('DELETE', '/bucket/object',
swob.HTTPForbidden)
self.assertEqual(code, 'AccessDenied')
code = self._test_method_error('DELETE', '/bucket/object',
swob.HTTPServerError)
self.assertEqual(code, 'InternalError')
code = self._test_method_error('DELETE', '/bucket/object',
swob.HTTPServiceUnavailable)
self.assertEqual(code, 'ServiceUnavailable')
with patch(
'swift.common.middleware.s3api.s3request.get_container_info',
return_value={'status': 404}):
code = self._test_method_error('DELETE', '/bucket/object',
swob.HTTPNotFound)
self.assertEqual(code, 'NoSuchBucket')
@s3acl
def test_object_DELETE_no_multipart(self):
self.s3api.conf.allow_multipart_uploads = False
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '204')
self.assertNotIn(('HEAD', '/v1/AUTH_test/bucket/object'),
self.swift.calls)
self.assertIn(('DELETE', '/v1/AUTH_test/bucket/object'),
self.swift.calls)
_, path = self.swift.calls[-1]
self.assertEqual(path.count('?'), 0)
def test_object_DELETE_old_version_id(self):
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPOk, self.response_headers, None)
resp_headers = {'X-Object-Current-Version-Id': '1574360804.34906'}
self.swift.register('DELETE', '/v1/AUTH_test/bucket/object'
'?symlink=get&version-id=1574358170.12293',
swob.HTTPNoContent, resp_headers, None)
req = Request.blank('/bucket/object?versionId=1574358170.12293',
method='DELETE', headers={
'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
fake_info = {
'status': 204,
'sysmeta': {
'versions-container': '\x00versions\x00bucket',
}
}
with patch('swift.common.middleware.s3api.s3request.'
'get_container_info', return_value=fake_info):
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '204')
self.assertEqual([
('HEAD', '/v1/AUTH_test/bucket/object'
'?symlink=get&version-id=1574358170.12293'),
('DELETE', '/v1/AUTH_test/bucket/object'
'?symlink=get&version-id=1574358170.12293')
], self.swift.calls)
def test_object_DELETE_current_version_id(self):
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPOk, self.response_headers, None)
resp_headers = {'X-Object-Current-Version-Id': 'null'}
self.swift.register('DELETE', '/v1/AUTH_test/bucket/object'
'?symlink=get&version-id=1574358170.12293',
swob.HTTPNoContent, resp_headers, None)
old_versions = [{
'name': 'object',
'version_id': '1574341899.21751',
'content_type': 'application/found',
}, {
'name': 'object',
'version_id': '1574333192.15190',
'content_type': 'application/older',
}]
self.swift.register('GET', '/v1/AUTH_test/bucket', swob.HTTPOk, {},
json.dumps(old_versions))
req = Request.blank('/bucket/object?versionId=1574358170.12293',
method='DELETE', headers={
'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
fake_info = {
'status': 204,
'sysmeta': {
'versions-container': '\x00versions\x00bucket',
}
}
with patch('swift.common.middleware.s3api.s3request.'
'get_container_info', return_value=fake_info):
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '204')
self.assertEqual([
('HEAD', '/v1/AUTH_test/bucket/object'
'?symlink=get&version-id=1574358170.12293'),
('DELETE', '/v1/AUTH_test/bucket/object'
'?symlink=get&version-id=1574358170.12293'),
('GET', '/v1/AUTH_test/bucket'
'?prefix=object&versions=True'),
('PUT', '/v1/AUTH_test/bucket/object'
'?version-id=1574341899.21751'),
], self.swift.calls)
@s3acl(versioning_enabled=False)
def test_object_DELETE_with_version_id_but_not_enabled(self):
self.swift.register('HEAD', '/v1/AUTH_test/bucket',
swob.HTTPNoContent, {}, None)
req = Request.blank('/bucket/object?versionId=1574358170.12293',
method='DELETE', headers={
'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '204')
expected_calls = []
# NB: No actual backend DELETE!
self.assertEqual(expected_calls, self.swift.calls)
def test_object_DELETE_version_id_not_implemented(self):
req = Request.blank('/bucket/object?versionId=1574358170.12293',
method='DELETE', headers={
'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
with patch('swift.common.middleware.s3api.controllers.obj.'
'get_swift_info', return_value={}):
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '501', body)
def test_object_DELETE_current_version_id_is_delete_marker(self):
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPOk, self.response_headers, None)
resp_headers = {'X-Object-Current-Version-Id': 'null'}
self.swift.register('DELETE', '/v1/AUTH_test/bucket/object'
'?symlink=get&version-id=1574358170.12293',
swob.HTTPNoContent, resp_headers, None)
old_versions = [{
'name': 'object',
'version_id': '1574341899.21751',
'content_type': 'application/x-deleted;swift_versions_deleted=1',
}]
self.swift.register('GET', '/v1/AUTH_test/bucket', swob.HTTPOk, {},
json.dumps(old_versions))
req = Request.blank('/bucket/object?versionId=1574358170.12293',
method='DELETE', headers={
'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
fake_info = {
'status': 204,
'sysmeta': {
'versions-container': '\x00versions\x00bucket',
}
}
with patch('swift.common.middleware.s3api.s3request.'
'get_container_info', return_value=fake_info):
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '204')
self.assertEqual([
('HEAD', '/v1/AUTH_test/bucket/object'
'?symlink=get&version-id=1574358170.12293'),
('DELETE', '/v1/AUTH_test/bucket/object'
'?symlink=get&version-id=1574358170.12293'),
('GET', '/v1/AUTH_test/bucket'
'?prefix=object&versions=True'),
], self.swift.calls)
def test_object_DELETE_current_version_id_is_missing(self):
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPOk, self.response_headers, None)
resp_headers = {'X-Object-Current-Version-Id': 'null'}
self.swift.register('DELETE', '/v1/AUTH_test/bucket/object'
'?symlink=get&version-id=1574358170.12293',
swob.HTTPNoContent, resp_headers, None)
old_versions = [{
'name': 'object',
'version_id': '1574341899.21751',
'content_type': 'application/missing',
}, {
'name': 'object',
'version_id': '1574333192.15190',
'content_type': 'application/found',
}]
self.swift.register('GET', '/v1/AUTH_test/bucket', swob.HTTPOk, {},
json.dumps(old_versions))
self.swift.register('PUT', '/v1/AUTH_test/bucket/object'
'?version-id=1574341899.21751',
swob.HTTPPreconditionFailed, {}, None)
self.swift.register('PUT', '/v1/AUTH_test/bucket/object'
'?version-id=1574333192.15190',
swob.HTTPCreated, {}, None)
req = Request.blank('/bucket/object?versionId=1574358170.12293',
method='DELETE', headers={
'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
fake_info = {
'status': 204,
'sysmeta': {
'versions-container': '\x00versions\x00bucket',
}
}
with patch('swift.common.middleware.s3api.s3request.'
'get_container_info', return_value=fake_info):
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '204')
self.assertEqual([
('HEAD', '/v1/AUTH_test/bucket/object'
'?symlink=get&version-id=1574358170.12293'),
('DELETE', '/v1/AUTH_test/bucket/object'
'?symlink=get&version-id=1574358170.12293'),
('GET', '/v1/AUTH_test/bucket'
'?prefix=object&versions=True'),
('PUT', '/v1/AUTH_test/bucket/object'
'?version-id=1574341899.21751'),
('PUT', '/v1/AUTH_test/bucket/object'
'?version-id=1574333192.15190'),
], self.swift.calls)
def test_object_DELETE_current_version_id_GET_error(self):
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPOk, self.response_headers, None)
resp_headers = {'X-Object-Current-Version-Id': 'null'}
self.swift.register('DELETE', '/v1/AUTH_test/bucket/object'
'?symlink=get&version-id=1574358170.12293',
swob.HTTPNoContent, resp_headers, None)
self.swift.register('GET', '/v1/AUTH_test/bucket',
swob.HTTPServerError, {}, '')
req = Request.blank('/bucket/object?versionId=1574358170.12293',
method='DELETE', headers={
'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
fake_info = {
'status': 204,
'sysmeta': {
'versions-container': '\x00versions\x00bucket',
}
}
with patch('swift.common.middleware.s3api.s3request.'
'get_container_info', return_value=fake_info):
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '500')
self.assertEqual([
('HEAD', '/v1/AUTH_test/bucket/object'
'?symlink=get&version-id=1574358170.12293'),
('DELETE', '/v1/AUTH_test/bucket/object'
'?symlink=get&version-id=1574358170.12293'),
('GET', '/v1/AUTH_test/bucket'
'?prefix=object&versions=True'),
], self.swift.calls)
def test_object_DELETE_current_version_id_PUT_error(self):
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPOk, self.response_headers, None)
resp_headers = {'X-Object-Current-Version-Id': 'null'}
self.swift.register('DELETE', '/v1/AUTH_test/bucket/object'
'?symlink=get&version-id=1574358170.12293',
swob.HTTPNoContent, resp_headers, None)
old_versions = [{
'name': 'object',
'version_id': '1574341899.21751',
'content_type': 'application/foo',
}]
self.swift.register('GET', '/v1/AUTH_test/bucket', swob.HTTPOk, {},
json.dumps(old_versions))
self.swift.register('PUT', '/v1/AUTH_test/bucket/object'
'?version-id=1574341899.21751',
swob.HTTPServerError, {}, None)
req = Request.blank('/bucket/object?versionId=1574358170.12293',
method='DELETE', headers={
'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
fake_info = {
'status': 204,
'sysmeta': {
'versions-container': '\x00versions\x00bucket',
}
}
with patch('swift.common.middleware.s3api.s3request.'
'get_container_info', return_value=fake_info):
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '500')
self.assertEqual([
('HEAD', '/v1/AUTH_test/bucket/object'
'?symlink=get&version-id=1574358170.12293'),
('DELETE', '/v1/AUTH_test/bucket/object'
'?symlink=get&version-id=1574358170.12293'),
('GET', '/v1/AUTH_test/bucket'
'?prefix=object&versions=True'),
('PUT', '/v1/AUTH_test/bucket/object'
'?version-id=1574341899.21751'),
], self.swift.calls)
def test_object_DELETE_in_versioned_container_without_version(self):
resp_headers = {
'X-Object-Version-Id': '1574360804.34906',
'X-Backend-Content-Type': DELETE_MARKER_CONTENT_TYPE}
self.swift.register('DELETE', '/v1/AUTH_test/bucket/object',
swob.HTTPNoContent, resp_headers, None)
self.swift.register('HEAD', '/v1/AUTH_test/bucket',
swob.HTTPNoContent, {
'X-Container-Sysmeta-Versions-Enabled': True},
None)
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPNotFound, self.response_headers, None)
req = Request.blank('/bucket/object', method='DELETE', headers={
'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '204')
self.assertEqual([
('HEAD', '/v1/AUTH_test/bucket/object?symlink=get'),
('HEAD', '/v1/AUTH_test'),
('HEAD', '/v1/AUTH_test/bucket'),
('DELETE', '/v1/AUTH_test/bucket/object'),
], self.swift.calls)
self.assertEqual('1574360804.34906', headers.get('x-amz-version-id'))
self.assertEqual('true', headers.get('x-amz-delete-marker'))
def test_object_DELETE_in_versioned_container_with_version_id(self):
resp_headers = {
'X-Object-Version-Id': '1574701081.61553'}
self.swift.register('DELETE', '/v1/AUTH_test/bucket/object',
swob.HTTPNoContent, resp_headers, None)
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPNotFound, self.response_headers, None)
req = Request.blank('/bucket/object?versionId=1574701081.61553',
method='DELETE', headers={
'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
fake_info = {
'status': 204,
'sysmeta': {
'versions-container': '\x00versions\x00bucket',
}
}
with patch('swift.common.middleware.s3api.s3request.'
'get_container_info', return_value=fake_info):
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '204')
self.assertEqual([
('HEAD', '/v1/AUTH_test/bucket/object'
'?symlink=get&version-id=1574701081.61553'),
('DELETE', '/v1/AUTH_test/bucket/object'
'?symlink=get&version-id=1574701081.61553'),
], self.swift.calls)
self.assertEqual('1574701081.61553', headers.get('x-amz-version-id'))
@s3acl
def test_object_DELETE_multipart(self):
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '204')
self.assertIn(('HEAD', '/v1/AUTH_test/bucket/object?symlink=get'),
self.swift.calls)
self.assertEqual(('DELETE', '/v1/AUTH_test/bucket/object'),
self.swift.calls[-1])
_, path = self.swift.calls[-1]
self.assertEqual(path.count('?'), 0)
@s3acl
def test_object_DELETE_missing(self):
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPNotFound, {}, None)
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '204')
self.assertEqual(('HEAD', '/v1/AUTH_test/bucket/object?symlink=get'),
self.swift.calls[0])
# the s3acl retests w/ a get_container_info HEAD @ self.swift.calls[1]
self.assertEqual(('DELETE', '/v1/AUTH_test/bucket/object'),
self.swift.calls[-1])
@s3acl
def test_slo_object_DELETE(self):
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPOk,
{'x-static-large-object': 'True'},
None)
self.swift.register('DELETE', '/v1/AUTH_test/bucket/object',
swob.HTTPOk, {}, '<SLO delete results>')
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'Content-Type': 'foo/bar'})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '204')
self.assertEqual(body, b'')
self.assertIn(('HEAD', '/v1/AUTH_test/bucket/object?symlink=get'),
self.swift.calls)
self.assertIn(('DELETE', '/v1/AUTH_test/bucket/object'
'?multipart-manifest=delete'),
self.swift.calls)
_, path, headers = self.swift.calls_with_headers[-1]
path, query_string = path.split('?', 1)
query = {}
for q in query_string.split('&'):
key, arg = q.split('=')
query[key] = arg
self.assertEqual(query['multipart-manifest'], 'delete')
# HEAD did not indicate that it was an S3 MPU, so no async delete
self.assertNotIn('async', query)
self.assertNotIn('Content-Type', headers)
@s3acl
def test_slo_object_async_DELETE(self):
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPOk,
{'x-static-large-object': 'True',
'x-object-sysmeta-s3api-etag': 's3-style-etag'},
None)
self.swift.register('DELETE', '/v1/AUTH_test/bucket/object',
swob.HTTPNoContent, {}, '')
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'Content-Type': 'foo/bar'})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '204')
self.assertEqual(body, b'')
self.assertIn(('HEAD', '/v1/AUTH_test/bucket/object?symlink=get'),
self.swift.calls)
self.assertIn(('DELETE', '/v1/AUTH_test/bucket/object'
'?async=on&multipart-manifest=delete'),
self.swift.calls)
_, path, headers = self.swift.calls_with_headers[-1]
path, query_string = path.split('?', 1)
query = {}
for q in query_string.split('&'):
key, arg = q.split('=')
query[key] = arg
self.assertEqual(query['multipart-manifest'], 'delete')
self.assertEqual(query['async'], 'on')
self.assertNotIn('Content-Type', headers)
def _test_object_for_s3acl(self, method, account):
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': method},
headers={'Authorization': 'AWS %s:hmac' % account,
'Date': self.get_date_header()})
return self.call_s3api(req)
def _test_set_container_permission(self, account, permission):
grants = [Grant(User(account), permission)]
headers = \
encode_acl('container',
ACL(Owner('test:tester', 'test:tester'), grants))
self.swift.register('HEAD', '/v1/AUTH_test/bucket',
swob.HTTPNoContent, headers, None)
@s3acl(s3acl_only=True)
def test_object_GET_without_permission(self):
status, headers, body = self._test_object_for_s3acl('GET',
'test:other')
self.assertEqual(self._get_error_code(body), 'AccessDenied')
@s3acl(s3acl_only=True)
def test_object_GET_with_read_permission(self):
status, headers, body = self._test_object_for_s3acl('GET',
'test:read')
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_object_GET_with_fullcontrol_permission(self):
status, headers, body = \
self._test_object_for_s3acl('GET', 'test:full_control')
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_object_PUT_without_permission(self):
status, headers, body = self._test_object_for_s3acl('PUT',
'test:other')
self.assertEqual(self._get_error_code(body), 'AccessDenied')
@s3acl(s3acl_only=True)
def test_object_PUT_with_owner_permission(self):
status, headers, body = self._test_object_for_s3acl('PUT',
'test:tester')
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_object_PUT_with_write_permission(self):
account = 'test:other'
self._test_set_container_permission(account, 'WRITE')
status, headers, body = self._test_object_for_s3acl('PUT', account)
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_object_PUT_with_fullcontrol_permission(self):
account = 'test:other'
self._test_set_container_permission(account, 'FULL_CONTROL')
status, headers, body = \
self._test_object_for_s3acl('PUT', account)
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_object_DELETE_without_permission(self):
account = 'test:other'
status, headers, body = self._test_object_for_s3acl('DELETE',
account)
self.assertEqual(self._get_error_code(body), 'AccessDenied')
@s3acl(s3acl_only=True)
def test_object_DELETE_with_owner_permission(self):
status, headers, body = self._test_object_for_s3acl('DELETE',
'test:tester')
self.assertEqual(status.split()[0], '204')
@s3acl(s3acl_only=True)
def test_object_DELETE_with_write_permission(self):
account = 'test:other'
self._test_set_container_permission(account, 'WRITE')
status, headers, body = self._test_object_for_s3acl('DELETE',
account)
self.assertEqual(status.split()[0], '204')
@s3acl(s3acl_only=True)
def test_object_DELETE_with_fullcontrol_permission(self):
account = 'test:other'
self._test_set_container_permission(account, 'FULL_CONTROL')
status, headers, body = self._test_object_for_s3acl('DELETE', account)
self.assertEqual(status.split()[0], '204')
def _test_object_copy_for_s3acl(self, account, src_permission=None,
src_path='/src_bucket/src_obj'):
owner = 'test:tester'
grants = [Grant(User(account), src_permission)] \
if src_permission else [Grant(User(owner), 'FULL_CONTROL')]
src_o_headers = \
encode_acl('object', ACL(Owner(owner, owner), grants))
src_o_headers.update({'last-modified': self.last_modified})
self.swift.register(
'HEAD', join('/v1/AUTH_test', src_path.lstrip('/')),
swob.HTTPOk, src_o_headers, None)
req = Request.blank(
'/bucket/object',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS %s:hmac' % account,
'X-Amz-Copy-Source': src_path,
'Date': self.get_date_header()})
return self.call_s3api(req)
@s3acl(s3acl_only=True)
def test_object_PUT_copy_with_owner_permission(self):
status, headers, body = \
self._test_object_copy_for_s3acl('test:tester')
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_object_PUT_copy_with_fullcontrol_permission(self):
status, headers, body = \
self._test_object_copy_for_s3acl('test:full_control',
'FULL_CONTROL')
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_object_PUT_copy_with_grantee_permission(self):
status, headers, body = \
self._test_object_copy_for_s3acl('test:write', 'READ')
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_object_PUT_copy_without_src_obj_permission(self):
status, headers, body = \
self._test_object_copy_for_s3acl('test:write')
self.assertEqual(status.split()[0], '403')
@s3acl(s3acl_only=True)
def test_object_PUT_copy_without_dst_container_permission(self):
status, headers, body = \
self._test_object_copy_for_s3acl('test:other', 'READ')
self.assertEqual(status.split()[0], '403')
@s3acl(s3acl_only=True)
def test_object_PUT_copy_empty_src_path(self):
self.swift.register('PUT', '/v1/AUTH_test/bucket/object',
swob.HTTPPreconditionFailed, {}, None)
status, headers, body = self._test_object_copy_for_s3acl(
'test:write', 'READ', src_path='')
self.assertEqual(status.split()[0], '400')
def test_cors_preflight(self):
req = Request.blank(
'/bucket/cors-object',
environ={'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'http://example.com',
'Access-Control-Request-Method': 'GET',
'Access-Control-Request-Headers': 'authorization'})
self.s3api.conf.cors_preflight_allow_origin = ['*']
status, headers, body = self.call_s3api(req)
self.assertEqual(status, '200 OK')
self.assertDictEqual(headers, {
'Allow': 'GET, HEAD, PUT, POST, DELETE, OPTIONS',
'Access-Control-Allow-Origin': 'http://example.com',
'Access-Control-Allow-Methods': ('GET, HEAD, PUT, POST, DELETE, '
'OPTIONS'),
'Access-Control-Allow-Headers': 'authorization',
'Vary': 'Origin, Access-Control-Request-Headers',
})
# test more allow_origins
self.s3api.conf.cors_preflight_allow_origin = ['http://example.com',
'http://other.com']
status, headers, body = self.call_s3api(req)
self.assertEqual(status, '200 OK')
self.assertDictEqual(headers, {
'Allow': 'GET, HEAD, PUT, POST, DELETE, OPTIONS',
'Access-Control-Allow-Origin': 'http://example.com',
'Access-Control-Allow-Methods': ('GET, HEAD, PUT, POST, DELETE, '
'OPTIONS'),
'Access-Control-Allow-Headers': 'authorization',
'Vary': 'Origin, Access-Control-Request-Headers',
})
# test presigned urls
req = Request.blank(
'/bucket/cors-object?AWSAccessKeyId=test%3Atester&'
'Expires=1621558415&Signature=MKMdW3FpYcoFEJlTLF3EhP7AJgc%3D',
environ={'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'http://example.com',
'Access-Control-Request-Method': 'PUT'})
status, headers, body = self.call_s3api(req)
self.assertEqual(status, '200 OK')
self.assertDictEqual(headers, {
'Allow': 'GET, HEAD, PUT, POST, DELETE, OPTIONS',
'Access-Control-Allow-Origin': 'http://example.com',
'Access-Control-Allow-Methods': ('GET, HEAD, PUT, POST, DELETE, '
'OPTIONS'),
'Vary': 'Origin, Access-Control-Request-Headers',
})
req = Request.blank(
'/bucket/cors-object?X-Amz-Algorithm=AWS4-HMAC-SHA256&'
'X-Amz-Credential=test%3Atester%2F20210521%2Fus-east-1%2Fs3%2F'
'aws4_request&X-Amz-Date=20210521T003835Z&X-Amz-Expires=900&'
'X-Amz-Signature=e413549f2cbeddb457c5fddb2d28820ce58de514bb900'
'5d588800d7ebb1a6a2d&X-Amz-SignedHeaders=host',
environ={'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'http://example.com',
'Access-Control-Request-Method': 'DELETE'})
status, headers, body = self.call_s3api(req)
self.assertEqual(status, '200 OK')
self.assertDictEqual(headers, {
'Allow': 'GET, HEAD, PUT, POST, DELETE, OPTIONS',
'Access-Control-Allow-Origin': 'http://example.com',
'Access-Control-Allow-Methods': ('GET, HEAD, PUT, POST, DELETE, '
'OPTIONS'),
'Vary': 'Origin, Access-Control-Request-Headers',
})
# Wrong protocol
self.s3api.conf.cors_preflight_allow_origin = ['https://example.com']
status, headers, body = self.call_s3api(req)
self.assertEqual(status, '401 Unauthorized')
self.assertEqual(headers, {
'Allow': 'GET, HEAD, PUT, POST, DELETE, OPTIONS',
})
def test_cors_headers(self):
# note: Access-Control-Allow-Methods would normally be expected in
# response to an OPTIONS request but its included here in GET/PUT tests
# to check that it is always passed back in S3Response
cors_headers = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': ('GET, PUT, POST, COPY, '
'DELETE, PUT, OPTIONS'),
'Access-Control-Expose-Headers':
'x-object-meta-test, x-object-meta-test=5funderscore, etag',
}
get_resp_headers = self.response_headers
get_resp_headers['x-object-meta-test=5funderscore'] = 'underscored'
self.swift.register(
'GET', '/v1/AUTH_test/bucket/cors-object', swob.HTTPOk,
dict(get_resp_headers, **cors_headers),
self.object_body)
self.swift.register(
'PUT', '/v1/AUTH_test/bucket/cors-object', swob.HTTPCreated,
dict({'etag': self.etag,
'last-modified': self.last_modified,
'x-object-meta-something': 'oh hai',
'x-object-meta-test=5funderscore': 'underscored'},
**cors_headers),
None)
req = Request.blank(
'/bucket/cors-object',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'Origin': 'http://example.com',
'Access-Control-Request-Method': 'GET',
'Access-Control-Request-Headers': 'authorization'})
status, headers, body = self.call_s3api(req)
self.assertEqual(status, '200 OK')
self.assertIn('Access-Control-Allow-Origin', headers)
self.assertEqual(headers['Access-Control-Allow-Origin'], '*')
self.assertIn('Access-Control-Expose-Headers', headers)
self.assertEqual(
headers['Access-Control-Expose-Headers'],
'x-amz-meta-test, x-amz-meta-test_underscore, etag, '
'x-amz-request-id, x-amz-id-2')
self.assertIn('Access-Control-Allow-Methods', headers)
self.assertEqual(
headers['Access-Control-Allow-Methods'],
'GET, PUT, POST, DELETE, PUT, OPTIONS')
self.assertIn('x-amz-meta-test_underscore', headers)
self.assertEqual('underscored', headers['x-amz-meta-test_underscore'])
req = Request.blank(
'/bucket/cors-object',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'Origin': 'http://example.com',
'Access-Control-Request-Method': 'PUT',
'Access-Control-Request-Headers': 'authorization'})
status, headers, body = self.call_s3api(req)
self.assertEqual(status, '200 OK')
self.assertIn('Access-Control-Allow-Origin', headers)
self.assertEqual(headers['Access-Control-Allow-Origin'], '*')
self.assertIn('Access-Control-Expose-Headers', headers)
self.assertEqual(
headers['Access-Control-Expose-Headers'],
'x-amz-meta-test, x-amz-meta-test_underscore, etag, '
'x-amz-request-id, x-amz-id-2')
self.assertIn('Access-Control-Allow-Methods', headers)
self.assertEqual(
headers['Access-Control-Allow-Methods'],
'GET, PUT, POST, DELETE, PUT, OPTIONS')
self.assertEqual('underscored', headers['x-amz-meta-test_underscore'])
class TestS3ApiObjNonUTC(TestS3ApiObj):
def setUp(self):
self.orig_tz = os.environ.get('TZ', '')
os.environ['TZ'] = 'EST+05EDT,M4.1.0,M10.5.0'
time.tzset()
super(TestS3ApiObjNonUTC, self).setUp()
def tearDown(self):
super(TestS3ApiObjNonUTC, self).tearDown()
os.environ['TZ'] = self.orig_tz
time.tzset()
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/s3api/test_obj.py |
# Copyright (c) 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from swift.common.swob import Request
from test.unit.common.middleware.s3api import S3ApiTestCase
from swift.common.middleware.s3api.etree import fromstring
class TestS3ApiLocation(S3ApiTestCase):
def test_object_location(self):
req = Request.blank('/bucket?location',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'LocationConstraint')
location = elem.text
self.assertIsNone(location)
def test_object_location_setting_as_us_west_1(self):
self.s3api.conf.location = 'us-west-1'
req = Request.blank('/bucket?location',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'LocationConstraint')
location = elem.text
self.assertEqual(location, 'us-west-1')
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/s3api/test_location.py |
# Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This stuff can't live in test/unit/__init__.py due to its swob dependency.
import unittest
from test.unit.common.middleware.s3api.helpers import FakeSwift
from swift.common.middleware.s3api.utils import sysmeta_header
from swift.common.swob import HeaderKeyDict
from mock import MagicMock
class S3ApiHelperTestCase(unittest.TestCase):
def setUp(self):
self.method = 'HEAD'
self.path = '/v1/AUTH_test/bucket'
def _check_headers(self, swift, method, path, headers):
_, response_headers, _ = swift._responses[(method, path)]
self.assertEqual(headers, response_headers)
def test_fake_swift_sysmeta(self):
swift = FakeSwift()
orig_headers = HeaderKeyDict()
orig_headers.update({sysmeta_header('container', 'acl'): 'test',
'x-container-meta-foo': 'bar'})
swift.register(self.method, self.path, MagicMock(), orig_headers, None)
self._check_headers(swift, self.method, self.path, orig_headers)
new_headers = orig_headers.copy()
del new_headers[sysmeta_header('container', 'acl').title()]
swift.register(self.method, self.path, MagicMock(), new_headers, None)
self._check_headers(swift, self.method, self.path, orig_headers)
def test_fake_swift_sysmeta_overwrite(self):
swift = FakeSwift()
orig_headers = HeaderKeyDict()
orig_headers.update({sysmeta_header('container', 'acl'): 'test',
'x-container-meta-foo': 'bar'})
swift.register(self.method, self.path, MagicMock(), orig_headers, None)
self._check_headers(swift, self.method, self.path, orig_headers)
new_headers = orig_headers.copy()
new_headers[sysmeta_header('container', 'acl').title()] = 'bar'
swift.register(self.method, self.path, MagicMock(), new_headers, None)
self.assertFalse(orig_headers == new_headers)
self._check_headers(swift, self.method, self.path, new_headers)
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/s3api/test_helpers.py |
# Copyright (c) 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import unittest
import mock
from io import BytesIO
from swift.common.swob import Request, HTTPAccepted
from swift.common.middleware.s3api.etree import fromstring, tostring, \
Element, SubElement, XMLNS_XSI
from swift.common.middleware.s3api.s3response import InvalidArgument
from swift.common.middleware.s3api.acl_utils import handle_acl_header
from swift.common.utils import md5
from test.unit.common.middleware.s3api import S3ApiTestCase
from test.unit.common.middleware.s3api.helpers import UnreadableInput
from test.unit.common.middleware.s3api.test_s3_acl import s3acl
class TestS3ApiAcl(S3ApiTestCase):
def setUp(self):
super(TestS3ApiAcl, self).setUp()
# All ACL API should be called against to existing bucket.
self.swift.register('PUT', '/v1/AUTH_test/bucket',
HTTPAccepted, {}, None)
def _check_acl(self, owner, body):
elem = fromstring(body, 'AccessControlPolicy')
permission = elem.find('./AccessControlList/Grant/Permission').text
self.assertEqual(permission, 'FULL_CONTROL')
name = elem.find('./AccessControlList/Grant/Grantee/ID').text
self.assertEqual(name, owner)
@s3acl
def test_bucket_acl_GET(self):
req = Request.blank('/bucket?acl',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
if not self.s3api.conf.s3_acl:
self._check_acl('test:tester', body)
self.assertSetEqual(set((('HEAD', '/v1/AUTH_test/bucket'),)),
set(self.swift.calls))
def test_bucket_acl_PUT(self):
elem = Element('AccessControlPolicy')
owner = SubElement(elem, 'Owner')
SubElement(owner, 'ID').text = 'id'
acl = SubElement(elem, 'AccessControlList')
grant = SubElement(acl, 'Grant')
grantee = SubElement(grant, 'Grantee', nsmap={'xsi': XMLNS_XSI})
grantee.set('{%s}type' % XMLNS_XSI, 'Group')
SubElement(grantee, 'URI').text = \
'http://acs.amazonaws.com/groups/global/AllUsers'
SubElement(grant, 'Permission').text = 'READ'
xml = tostring(elem)
req = Request.blank('/bucket?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()},
body=xml)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
req = Request.blank('/bucket?acl',
environ={'REQUEST_METHOD': 'PUT',
'wsgi.input': BytesIO(xml)},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'Transfer-Encoding': 'chunked'})
self.assertIsNone(req.content_length)
self.assertIsNone(req.message_length())
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
def test_bucket_canned_acl_PUT(self):
req = Request.blank('/bucket?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'X-AMZ-ACL': 'public-read'})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_bucket_canned_acl_PUT_with_s3acl(self):
req = Request.blank('/bucket?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'X-AMZ-ACL': 'public-read'})
with mock.patch('swift.common.middleware.s3api.s3request.'
'handle_acl_header') as mock_handler:
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
self.assertEqual(mock_handler.call_count, 0)
def test_bucket_fails_with_both_acl_header_and_xml_PUT(self):
elem = Element('AccessControlPolicy')
owner = SubElement(elem, 'Owner')
SubElement(owner, 'ID').text = 'id'
acl = SubElement(elem, 'AccessControlList')
grant = SubElement(acl, 'Grant')
grantee = SubElement(grant, 'Grantee', nsmap={'xsi': XMLNS_XSI})
grantee.set('{%s}type' % XMLNS_XSI, 'Group')
SubElement(grantee, 'URI').text = \
'http://acs.amazonaws.com/groups/global/AllUsers'
SubElement(grant, 'Permission').text = 'READ'
xml = tostring(elem)
req = Request.blank('/bucket?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'X-AMZ-ACL': 'public-read'},
body=xml)
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body),
'UnexpectedContent')
def _test_put_no_body(self, use_content_length=False,
use_transfer_encoding=False, string_to_md5=b''):
content_md5 = base64.b64encode(
md5(string_to_md5, usedforsecurity=False).digest()).strip()
with UnreadableInput(self) as fake_input:
req = Request.blank(
'/bucket?acl',
environ={
'REQUEST_METHOD': 'PUT',
'wsgi.input': fake_input},
headers={
'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'Content-MD5': content_md5},
body='')
if not use_content_length:
req.environ.pop('CONTENT_LENGTH')
if use_transfer_encoding:
req.environ['HTTP_TRANSFER_ENCODING'] = 'chunked'
status, headers, body = self.call_s3api(req)
self.assertEqual(status, '400 Bad Request')
self.assertEqual(self._get_error_code(body), 'MissingSecurityHeader')
self.assertEqual(self._get_error_message(body),
'Your request was missing a required header.')
self.assertIn(b'<MissingHeaderName>x-amz-acl</MissingHeaderName>',
body)
@s3acl
def test_bucket_fails_with_neither_acl_header_nor_xml_PUT(self):
self._test_put_no_body()
self._test_put_no_body(string_to_md5=b'test')
self._test_put_no_body(use_content_length=True)
self._test_put_no_body(use_content_length=True, string_to_md5=b'test')
self._test_put_no_body(use_transfer_encoding=True)
self._test_put_no_body(use_transfer_encoding=True, string_to_md5=b'zz')
@s3acl
def test_object_acl_GET(self):
req = Request.blank('/bucket/object?acl',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
if not self.s3api.conf.s3_acl:
self._check_acl('test:tester', body)
self.assertSetEqual(set((('HEAD', '/v1/AUTH_test/bucket/object'),)),
set(self.swift.calls))
def test_invalid_xml(self):
req = Request.blank('/bucket?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()},
body='invalid')
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'MalformedACLError')
def test_handle_acl_header(self):
def check_generated_acl_header(acl, targets):
req = Request.blank('/bucket',
headers={'X-Amz-Acl': acl})
handle_acl_header(req)
for target in targets:
self.assertTrue(target[0] in req.headers)
self.assertEqual(req.headers[target[0]], target[1])
check_generated_acl_header('public-read',
[('X-Container-Read', '.r:*,.rlistings')])
check_generated_acl_header('public-read-write',
[('X-Container-Read', '.r:*,.rlistings'),
('X-Container-Write', '.r:*')])
check_generated_acl_header('private',
[('X-Container-Read', '.'),
('X-Container-Write', '.')])
@s3acl(s3acl_only=True)
def test_handle_acl_header_with_s3acl(self):
def check_generated_acl_header(acl, targets):
req = Request.blank('/bucket',
headers={'X-Amz-Acl': acl})
for target in targets:
self.assertTrue(target not in req.headers)
self.assertTrue('HTTP_X_AMZ_ACL' in req.environ)
# TODO: add transration and assertion for s3acl
check_generated_acl_header('public-read',
['X-Container-Read'])
check_generated_acl_header('public-read-write',
['X-Container-Read', 'X-Container-Write'])
check_generated_acl_header('private',
['X-Container-Read', 'X-Container-Write'])
def test_handle_acl_with_invalid_header_string(self):
req = Request.blank('/bucket', headers={'X-Amz-Acl': 'invalid'})
with self.assertRaises(InvalidArgument) as cm:
handle_acl_header(req)
self.assertTrue('argument_name' in cm.exception.info)
self.assertEqual(cm.exception.info['argument_name'], 'x-amz-acl')
self.assertTrue('argument_value' in cm.exception.info)
self.assertEqual(cm.exception.info['argument_value'], 'invalid')
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/s3api/test_acl.py |
# Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This stuff can't live in test/unit/__init__.py due to its swob dependency.
from swift.common import swob
from swift.common.utils import split_path
from swift.common.request_helpers import is_sys_meta
from test.unit.common.middleware.helpers import FakeSwift as BaseFakeSwift
class FakeSwift(BaseFakeSwift):
"""
A good-enough fake Swift proxy server to use in testing middleware.
"""
ALLOWED_METHODS = BaseFakeSwift.ALLOWED_METHODS + ['TEST']
def __init__(self, s3_acl=False):
super(FakeSwift, self).__init__()
self.s3_acl = s3_acl
self.remote_user = 'authorized'
def _fake_auth_middleware(self, env):
if 'swift.authorize_override' in env:
return
if 's3api.auth_details' not in env:
return
tenant_user = env['s3api.auth_details']['access_key']
tenant, user = tenant_user.rsplit(':', 1)
path = env['PATH_INFO']
env['PATH_INFO'] = path.replace(tenant_user, 'AUTH_' + tenant)
if self.remote_user:
env['REMOTE_USER'] = self.remote_user
if env['REQUEST_METHOD'] == 'TEST':
def authorize_cb(req):
# Assume swift owner, if not yet set
req.environ.setdefault('swift_owner', True)
# But then default to blocking authz, to ensure we've replaced
# the default auth system
return swob.HTTPForbidden(request=req)
env['swift.authorize'] = authorize_cb
else:
env['swift.authorize'] = lambda req: None
def __call__(self, env, start_response):
if self.s3_acl:
self._fake_auth_middleware(env)
return super(FakeSwift, self).__call__(env, start_response)
def register(self, method, path, response_class, headers, body):
# assuming the path format like /v1/account/container/object
resource_map = ['account', 'container', 'object']
index = len(list(filter(None, split_path(path, 0, 4, True)[1:]))) - 1
resource = resource_map[index]
if (method, path) in self._responses:
old_headers = self._responses[(method, path)][1]
headers = headers.copy()
for key, value in old_headers.items():
if is_sys_meta(resource, key) and key not in headers:
# keep old sysmeta for s3acl
headers.update({key: value})
if body is not None and not isinstance(body, (bytes, list)):
body = body.encode('utf8')
return super(FakeSwift, self).register(
method, path, response_class, headers, body)
def register_unconditionally(self, method, path, response_class, headers,
body):
# register() keeps old sysmeta around, but
# register_unconditionally() keeps nothing.
if body is not None and not isinstance(body, bytes):
body = body.encode('utf8')
self._responses[(method, path)] = (response_class, headers, body)
def clear_calls(self):
del self._calls[:]
class UnreadableInput(object):
# Some clients will send neither a Content-Length nor a Transfer-Encoding
# header, which will cause (some versions of?) eventlet to bomb out on
# reads. This class helps us simulate that behavior.
def __init__(self, test_case):
self.calls = 0
self.test_case = test_case
def read(self, *a, **kw):
self.calls += 1
# Calling wsgi.input.read with neither a Content-Length nor
# a Transfer-Encoding header will raise TypeError (See
# https://bugs.launchpad.net/swift3/+bug/1593870 in detail)
# This unreadable class emulates the behavior
raise TypeError
def __enter__(self):
return self
def __exit__(self, *args):
self.test_case.assertEqual(0, self.calls)
| swift-master | test/unit/common/middleware/s3api/helpers.py |
# Copyright (c) 2015-2016 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import json
import os
import unittest
import mock
from swift.common.request_helpers import is_object_transient_sysmeta
from swift.common.utils import MD5_OF_EMPTY_STRING
from swift.common.header_key_dict import HeaderKeyDict
from swift.common.middleware.crypto import decrypter
from swift.common.middleware.crypto.crypto_utils import CRYPTO_KEY_CALLBACK, \
dump_crypto_meta, Crypto, load_crypto_meta
from swift.common.swob import Request, HTTPException, HTTPOk, \
HTTPPreconditionFailed, HTTPNotFound, HTTPPartialContent, bytes_to_wsgi
from test.debug_logger import debug_logger
from test.unit.common.middleware.crypto.crypto_helpers import md5hex, \
fetch_crypto_keys, FAKE_IV, encrypt, fake_get_crypto_meta
from test.unit.common.middleware.helpers import FakeSwift, FakeAppThatExcepts
def get_crypto_meta_header(crypto_meta=None):
if crypto_meta is None:
crypto_meta = fake_get_crypto_meta()
return dump_crypto_meta(crypto_meta)
def encrypt_and_append_meta(value, key, crypto_meta=None):
if not isinstance(value, bytes):
value = value.encode('ascii')
return '%s; swift_meta=%s' % (
base64.b64encode(encrypt(value, key, FAKE_IV)).decode('ascii'),
get_crypto_meta_header(crypto_meta))
class TestDecrypterObjectRequests(unittest.TestCase):
def setUp(self):
self.app = FakeSwift()
self.decrypter = decrypter.Decrypter(self.app, {})
self.decrypter.logger = debug_logger()
def _make_response_headers(self, content_length, plaintext_etag, keys,
body_key, key_id=None):
# helper method to make a typical set of response headers for a GET or
# HEAD request
cont_key = keys['container']
object_key = keys['object']
body_key_meta = {'key': encrypt(body_key, object_key, FAKE_IV),
'iv': FAKE_IV}
body_crypto_meta = fake_get_crypto_meta(body_key=body_key_meta)
other_crypto_meta = fake_get_crypto_meta()
if key_id:
body_crypto_meta['key_id'] = key_id
other_crypto_meta['key_id'] = key_id
return HeaderKeyDict({
'Etag': 'hashOfCiphertext',
'content-type': 'text/plain',
'content-length': content_length,
'X-Object-Sysmeta-Crypto-Etag': '%s; swift_meta=%s' % (
bytes_to_wsgi(base64.b64encode(encrypt(
plaintext_etag.encode('ascii'), object_key, FAKE_IV))),
get_crypto_meta_header(other_crypto_meta)),
'X-Object-Sysmeta-Crypto-Body-Meta':
get_crypto_meta_header(body_crypto_meta),
'X-Object-Transient-Sysmeta-Crypto-Meta':
get_crypto_meta_header(other_crypto_meta),
'x-object-transient-sysmeta-crypto-meta-test':
bytes_to_wsgi(base64.b64encode(encrypt(
b'encrypt me', object_key, FAKE_IV))) +
';swift_meta=' + get_crypto_meta_header(other_crypto_meta),
'x-object-sysmeta-container-update-override-etag':
encrypt_and_append_meta('encrypt me, too', cont_key),
'x-object-sysmeta-test': 'do not encrypt me',
})
def _test_request_success(self, method, body, key_id=None):
env = {'REQUEST_METHOD': method,
CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
req = Request.blank('/v1/a/c/o', environ=env)
plaintext_etag = md5hex(body)
body_key = os.urandom(32)
enc_body = encrypt(body, body_key, FAKE_IV)
hdrs = self._make_response_headers(
len(enc_body), plaintext_etag, fetch_crypto_keys(key_id=key_id),
body_key, key_id=key_id)
if key_id:
crypto_meta = load_crypto_meta(
hdrs['X-Object-Sysmeta-Crypto-Body-Meta'])
# sanity check that the test setup used provided key_id
self.assertEqual(key_id, crypto_meta['key_id'])
# there shouldn't be any x-object-meta- headers, but if there are
# then the decrypted header will win where there is a name clash...
hdrs.update({
'x-object-meta-test': 'unexpected, overwritten by decrypted value',
'x-object-meta-distinct': 'unexpected but distinct from encrypted'
})
self.app.register(
method, '/v1/a/c/o', HTTPOk, body=enc_body, headers=hdrs)
resp = req.get_response(self.decrypter)
self.assertEqual('200 OK', resp.status)
self.assertEqual(plaintext_etag, resp.headers['Etag'])
self.assertEqual('text/plain', resp.headers['Content-Type'])
self.assertEqual('encrypt me', resp.headers['x-object-meta-test'])
self.assertEqual('unexpected but distinct from encrypted',
resp.headers['x-object-meta-distinct'])
self.assertEqual('do not encrypt me',
resp.headers['x-object-sysmeta-test'])
self.assertEqual(
'encrypt me, too',
resp.headers['X-Object-Sysmeta-Container-Update-Override-Etag'])
self.assertNotIn('X-Object-Sysmeta-Crypto-Body-Meta', resp.headers)
self.assertNotIn('X-Object-Sysmeta-Crypto-Etag', resp.headers)
self.assertNotIn('Access-Control-Expose-Headers', resp.headers)
return resp
def test_GET_success(self):
body = b'FAKE APP'
resp = self._test_request_success('GET', body)
self.assertEqual(body, resp.body)
key_id_val = {'secret_id': 'myid'}
resp = self._test_request_success('GET', body, key_id=key_id_val)
self.assertEqual(body, resp.body)
key_id_val = {'secret_id': ''}
resp = self._test_request_success('GET', body, key_id=key_id_val)
self.assertEqual(body, resp.body)
def test_HEAD_success(self):
body = b'FAKE APP'
resp = self._test_request_success('HEAD', body)
self.assertEqual(b'', resp.body)
key_id_val = {'secret_id': 'myid'}
resp = self._test_request_success('HEAD', body, key_id=key_id_val)
self.assertEqual(b'', resp.body)
key_id_val = {'secret_id': ''}
resp = self._test_request_success('HEAD', body, key_id=key_id_val)
self.assertEqual(b'', resp.body)
def _check_different_keys_for_data_and_metadata(self, method):
env = {'REQUEST_METHOD': method,
CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
req = Request.blank('/v1/a/c/o', environ=env)
data_key_id = {}
metadata_key_id = {'secret_id': 'myid'}
body = b'object data'
plaintext_etag = md5hex(body)
body_key = os.urandom(32)
enc_body = encrypt(body, body_key, FAKE_IV)
data_key = fetch_crypto_keys(data_key_id)
metadata_key = fetch_crypto_keys(metadata_key_id)
# synthesise response headers to mimic different key used for data PUT
# vs metadata POST
hdrs = self._make_response_headers(
len(enc_body), plaintext_etag, data_key, body_key,
key_id=data_key_id)
metadata_hdrs = self._make_response_headers(
len(enc_body), plaintext_etag, metadata_key, body_key,
key_id=metadata_key_id)
for k, v in metadata_hdrs.items():
if is_object_transient_sysmeta(k):
self.assertNotEqual(hdrs[k], v) # sanity check
hdrs[k] = v
self.app.register(
method, '/v1/a/c/o', HTTPOk, body=enc_body, headers=hdrs)
resp = req.get_response(self.decrypter)
self.assertEqual('200 OK', resp.status)
self.assertEqual(plaintext_etag, resp.headers['Etag'])
self.assertEqual('text/plain', resp.headers['Content-Type'])
self.assertEqual('encrypt me', resp.headers['x-object-meta-test'])
self.assertEqual(
'encrypt me, too',
resp.headers['X-Object-Sysmeta-Container-Update-Override-Etag'])
return resp
def test_GET_different_keys_for_data_and_metadata(self):
resp = self._check_different_keys_for_data_and_metadata('GET')
self.assertEqual(b'object data', resp.body)
def test_HEAD_different_keys_for_data_and_metadata(self):
resp = self._check_different_keys_for_data_and_metadata('HEAD')
self.assertEqual(b'', resp.body)
def _check_unencrypted_data_and_encrypted_metadata(self, method):
env = {'REQUEST_METHOD': method,
CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
req = Request.blank('/v1/a/c/o', environ=env)
body = b'object data'
plaintext_etag = md5hex(body)
metadata_key = fetch_crypto_keys()
# synthesise headers for unencrypted PUT + headers for encrypted POST
hdrs = HeaderKeyDict({
'Etag': plaintext_etag,
'content-type': 'text/plain',
'content-length': len(body)})
# we don't the data related headers but need a body key to keep the
# helper function happy
body_key = os.urandom(32)
metadata_hdrs = self._make_response_headers(
len(body), plaintext_etag, metadata_key, body_key)
for k, v in metadata_hdrs.items():
if is_object_transient_sysmeta(k):
hdrs[k] = v
self.app.register(
method, '/v1/a/c/o', HTTPOk, body=body, headers=hdrs)
resp = req.get_response(self.decrypter)
self.assertEqual('200 OK', resp.status)
self.assertEqual(plaintext_etag, resp.headers['Etag'])
self.assertEqual('text/plain', resp.headers['Content-Type'])
self.assertEqual('encrypt me', resp.headers['x-object-meta-test'])
self.assertNotIn('Access-Control-Expose-Headers', resp.headers)
return resp
def test_GET_unencrypted_data_and_encrypted_metadata(self):
resp = self._check_unencrypted_data_and_encrypted_metadata('GET')
self.assertEqual(b'object data', resp.body)
def test_HEAD_unencrypted_data_and_encrypted_metadata(self):
resp = self._check_unencrypted_data_and_encrypted_metadata('HEAD')
self.assertEqual(b'', resp.body)
def _check_encrypted_data_and_unencrypted_metadata(self, method):
env = {'REQUEST_METHOD': method,
CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
req = Request.blank('/v1/a/c/o', environ=env)
body = b'object data'
plaintext_etag = md5hex(body)
body_key = os.urandom(32)
enc_body = encrypt(body, body_key, FAKE_IV)
data_key = fetch_crypto_keys()
hdrs = self._make_response_headers(
len(enc_body), plaintext_etag, data_key, body_key)
to_remove = [k for k in hdrs if is_object_transient_sysmeta(k)]
for k in to_remove:
hdrs.pop(k)
hdrs['x-object-meta-test'] = 'unencrypted'
self.app.register(
method, '/v1/a/c/o', HTTPOk, body=enc_body, headers=hdrs)
resp = req.get_response(self.decrypter)
self.assertEqual('200 OK', resp.status)
self.assertEqual(plaintext_etag, resp.headers['Etag'])
self.assertEqual('text/plain', resp.headers['Content-Type'])
self.assertEqual('unencrypted', resp.headers['x-object-meta-test'])
self.assertNotIn('Access-Control-Expose-Headers', resp.headers)
return resp
def test_GET_encrypted_data_and_unencrypted_metadata(self):
resp = self._check_encrypted_data_and_unencrypted_metadata('GET')
self.assertEqual(b'object data', resp.body)
def test_HEAD_encrypted_data_and_unencrypted_metadata(self):
resp = self._check_encrypted_data_and_unencrypted_metadata('HEAD')
self.assertEqual(b'', resp.body)
def test_headers_case(self):
body = b'fAkE ApP'
req = Request.blank('/v1/a/c/o', body='FaKe', headers={
'Origin': 'http://example.com'})
req.environ[CRYPTO_KEY_CALLBACK] = fetch_crypto_keys
plaintext_etag = md5hex(body)
body_key = os.urandom(32)
enc_body = encrypt(body, body_key, FAKE_IV)
hdrs = self._make_response_headers(
len(enc_body), plaintext_etag, fetch_crypto_keys(), body_key)
hdrs.update({
'x-Object-mEta-ignoRes-caSe': 'thIs pArt WilL bE cOol',
'access-control-Expose-Headers': 'x-object-meta-ignores-case',
'access-control-allow-origin': '*',
})
self.assertNotIn('x-object-meta-test', [k.lower() for k in hdrs])
self.app.register(
'GET', '/v1/a/c/o', HTTPOk, body=enc_body, headers=hdrs)
status, headers, app_iter = req.call_application(self.decrypter)
self.assertEqual(status, '200 OK')
expected = {
'Etag': '7f7837924188f7b511a9e3881a9f77a8',
'X-Object-Sysmeta-Container-Update-Override-Etag':
'encrypt me, too',
'X-Object-Meta-Test': 'encrypt me',
'Content-Length': '8',
'X-Object-Meta-Ignores-Case': 'thIs pArt WilL bE cOol',
'X-Object-Sysmeta-Test': 'do not encrypt me',
'Content-Type': 'text/plain',
'Access-Control-Expose-Headers': ', '.join([
'x-object-meta-ignores-case',
'x-object-meta-test',
]),
'Access-Control-Allow-Origin': '*',
}
self.assertEqual(dict(headers), expected)
self.assertEqual(b'fAkE ApP', b''.join(app_iter))
def _test_412_response(self, method):
# simulate a 412 response to a conditional GET which has an Etag header
data = b'the object content'
env = {CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
req = Request.blank('/v1/a/c/o', environ=env, method=method)
resp_body = b'I am sorry, you have failed to meet a precondition'
hdrs = self._make_response_headers(
len(resp_body), md5hex(data), fetch_crypto_keys(), b'not used')
self.app.register(method, '/v1/a/c/o', HTTPPreconditionFailed,
body=resp_body, headers=hdrs)
resp = req.get_response(self.decrypter)
self.assertEqual('412 Precondition Failed', resp.status)
# the response body should not be decrypted, it is already plaintext
self.assertEqual(resp_body if method == 'GET' else b'', resp.body)
# whereas the Etag and other headers should be decrypted
self.assertEqual(md5hex(data), resp.headers['Etag'])
self.assertEqual('text/plain', resp.headers['Content-Type'])
self.assertEqual('encrypt me', resp.headers['x-object-meta-test'])
self.assertEqual('do not encrypt me',
resp.headers['x-object-sysmeta-test'])
def test_GET_412_response(self):
self._test_412_response('GET')
def test_HEAD_412_response(self):
self._test_412_response('HEAD')
def _test_404_response(self, method):
# simulate a 404 response, sanity check response headers
env = {CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
req = Request.blank('/v1/a/c/o', environ=env, method=method)
resp_body = b'You still have not found what you are looking for'
hdrs = {'content-type': 'text/plain',
'content-length': len(resp_body)}
self.app.register(method, '/v1/a/c/o', HTTPNotFound,
body=resp_body, headers=hdrs)
resp = req.get_response(self.decrypter)
self.assertEqual('404 Not Found', resp.status)
# the response body should not be decrypted, it is already plaintext
self.assertEqual(resp_body if method == 'GET' else b'', resp.body)
# there should be no etag header inserted by decrypter
self.assertNotIn('Etag', resp.headers)
self.assertEqual('text/plain', resp.headers['Content-Type'])
def test_GET_404_response(self):
self._test_404_response('GET')
def test_HEAD_404_response(self):
self._test_404_response('HEAD')
def test_GET_missing_etag_crypto_meta(self):
env = {'REQUEST_METHOD': 'GET',
CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
req = Request.blank('/v1/a/c/o', environ=env)
body = b'FAKE APP'
key = fetch_crypto_keys()['object']
enc_body = encrypt(body, key, FAKE_IV)
hdrs = self._make_response_headers(
len(body), md5hex(body), fetch_crypto_keys(), b'not used')
# simulate missing crypto meta from encrypted etag
hdrs['X-Object-Sysmeta-Crypto-Etag'] = bytes_to_wsgi(base64.b64encode(
encrypt(md5hex(body).encode('ascii'), key, FAKE_IV)))
self.app.register('GET', '/v1/a/c/o', HTTPOk, body=enc_body,
headers=hdrs)
resp = req.get_response(self.decrypter)
self.assertEqual('500 Internal Error', resp.status)
self.assertIn(b'Error decrypting header', resp.body)
self.assertIn('Error decrypting header X-Object-Sysmeta-Crypto-Etag',
self.decrypter.logger.get_lines_for_level('error')[0])
def _test_override_etag_bad_meta(self, method, bad_crypto_meta):
env = {'REQUEST_METHOD': method,
CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
req = Request.blank('/v1/a/c/o', environ=env)
body = b'FAKE APP'
key = fetch_crypto_keys()['object']
enc_body = encrypt(body, key, FAKE_IV)
hdrs = self._make_response_headers(
len(body), md5hex(body), fetch_crypto_keys(), b'not used')
# simulate missing crypto meta from encrypted override etag
hdrs['X-Object-Sysmeta-Container-Update-Override-Etag'] = \
encrypt_and_append_meta(
md5hex(body), key, crypto_meta=bad_crypto_meta)
self.app.register(method, '/v1/a/c/o', HTTPOk, body=enc_body,
headers=hdrs)
resp = req.get_response(self.decrypter)
self.assertEqual('500 Internal Error', resp.status)
self.assertIn('Error decrypting header '
'X-Object-Sysmeta-Container-Update-Override-Etag',
self.decrypter.logger.get_lines_for_level('error')[0])
return resp
def test_GET_override_etag_bad_iv(self):
bad_crypto_meta = fake_get_crypto_meta()
bad_crypto_meta['iv'] = b'bad_iv'
resp = self._test_override_etag_bad_meta('GET', bad_crypto_meta)
self.assertIn(b'Error decrypting header', resp.body)
def test_HEAD_override_etag_bad_iv(self):
bad_crypto_meta = fake_get_crypto_meta()
bad_crypto_meta['iv'] = b'bad_iv'
resp = self._test_override_etag_bad_meta('HEAD', bad_crypto_meta)
self.assertEqual(b'', resp.body)
def test_GET_override_etag_bad_cipher(self):
bad_crypto_meta = fake_get_crypto_meta()
bad_crypto_meta['cipher'] = 'unknown cipher'
resp = self._test_override_etag_bad_meta('GET', bad_crypto_meta)
self.assertIn(b'Error decrypting header', resp.body)
def test_HEAD_override_etag_bad_cipher(self):
bad_crypto_meta = fake_get_crypto_meta()
bad_crypto_meta['cipher'] = 'unknown cipher'
resp = self._test_override_etag_bad_meta('HEAD', bad_crypto_meta)
self.assertEqual(b'', resp.body)
def _test_bad_key(self, method):
# use bad key
def bad_fetch_crypto_keys(**kwargs):
keys = fetch_crypto_keys()
keys['object'] = b'bad key'
return keys
env = {'REQUEST_METHOD': method,
CRYPTO_KEY_CALLBACK: bad_fetch_crypto_keys}
req = Request.blank('/v1/a/c/o', environ=env)
body = b'FAKE APP'
key = fetch_crypto_keys()['object']
enc_body = encrypt(body, key, FAKE_IV)
hdrs = self._make_response_headers(
len(body), md5hex(body), fetch_crypto_keys(), b'not used')
self.app.register(method, '/v1/a/c/o', HTTPOk, body=enc_body,
headers=hdrs)
return req.get_response(self.decrypter)
def test_HEAD_with_bad_key(self):
resp = self._test_bad_key('HEAD')
self.assertEqual('500 Internal Error', resp.status)
self.assertIn("Bad key for 'object'",
self.decrypter.logger.get_lines_for_level('error')[0])
def test_GET_with_bad_key(self):
resp = self._test_bad_key('GET')
self.assertEqual('500 Internal Error', resp.status)
self.assertEqual(b'Unable to retrieve encryption keys.',
resp.body)
self.assertIn("Bad key for 'object'",
self.decrypter.logger.get_lines_for_level('error')[0])
def _test_bad_crypto_meta_for_user_metadata(self, method, bad_crypto_meta):
# use bad iv for metadata headers
env = {'REQUEST_METHOD': method,
CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
req = Request.blank('/v1/a/c/o', environ=env)
body = b'FAKE APP'
key = fetch_crypto_keys()['object']
enc_body = encrypt(body, key, FAKE_IV)
hdrs = self._make_response_headers(
len(body), md5hex(body),
fetch_crypto_keys(), b'not used')
enc_val = base64.b64encode(encrypt(
b'encrypt me', key, FAKE_IV)).decode('ascii')
if bad_crypto_meta:
enc_val += ';swift_meta=' + get_crypto_meta_header(
crypto_meta=bad_crypto_meta)
hdrs['x-object-transient-sysmeta-crypto-meta-test'] = enc_val
self.app.register(method, '/v1/a/c/o', HTTPOk, body=enc_body,
headers=hdrs)
resp = req.get_response(self.decrypter)
self.assertEqual('500 Internal Error', resp.status)
self.assertIn(
'Error decrypting header X-Object-Transient-Sysmeta-Crypto-Meta-'
'Test', self.decrypter.logger.get_lines_for_level('error')[0])
return resp
def test_HEAD_with_missing_crypto_meta_for_user_metadata(self):
self._test_bad_crypto_meta_for_user_metadata('HEAD', None)
self.assertIn('Missing crypto meta in value',
self.decrypter.logger.get_lines_for_level('error')[0])
def test_GET_with_missing_crypto_meta_for_user_metadata(self):
self._test_bad_crypto_meta_for_user_metadata('GET', None)
self.assertIn('Missing crypto meta in value',
self.decrypter.logger.get_lines_for_level('error')[0])
def test_HEAD_with_bad_iv_for_user_metadata(self):
bad_crypto_meta = fake_get_crypto_meta()
bad_crypto_meta['iv'] = b'bad_iv'
self._test_bad_crypto_meta_for_user_metadata('HEAD', bad_crypto_meta)
self.assertIn('IV must be length 16',
self.decrypter.logger.get_lines_for_level('error')[0])
def test_HEAD_with_missing_iv_for_user_metadata(self):
bad_crypto_meta = fake_get_crypto_meta()
bad_crypto_meta.pop('iv')
self._test_bad_crypto_meta_for_user_metadata('HEAD', bad_crypto_meta)
self.assertIn(
'iv', self.decrypter.logger.get_lines_for_level('error')[0])
def test_GET_with_bad_iv_for_user_metadata(self):
bad_crypto_meta = fake_get_crypto_meta()
bad_crypto_meta['iv'] = b'bad_iv'
resp = self._test_bad_crypto_meta_for_user_metadata(
'GET', bad_crypto_meta)
self.assertEqual(b'Error decrypting header', resp.body)
self.assertIn('IV must be length 16',
self.decrypter.logger.get_lines_for_level('error')[0])
def test_GET_with_missing_iv_for_user_metadata(self):
bad_crypto_meta = fake_get_crypto_meta()
bad_crypto_meta.pop('iv')
resp = self._test_bad_crypto_meta_for_user_metadata(
'GET', bad_crypto_meta)
self.assertEqual(b'Error decrypting header', resp.body)
self.assertIn(
'iv', self.decrypter.logger.get_lines_for_level('error')[0])
def _test_GET_with_bad_crypto_meta_for_object_body(self, bad_crypto_meta):
# use bad iv for object body
env = {'REQUEST_METHOD': 'GET',
CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
req = Request.blank('/v1/a/c/o', environ=env)
body = b'FAKE APP'
key = fetch_crypto_keys()['object']
enc_body = encrypt(body, key, FAKE_IV)
hdrs = self._make_response_headers(
len(body), md5hex(body), fetch_crypto_keys(), b'not used')
hdrs['X-Object-Sysmeta-Crypto-Body-Meta'] = \
get_crypto_meta_header(crypto_meta=bad_crypto_meta)
self.app.register('GET', '/v1/a/c/o', HTTPOk, body=enc_body,
headers=hdrs)
resp = req.get_response(self.decrypter)
self.assertEqual('500 Internal Error', resp.status)
self.assertEqual(b'Error decrypting object', resp.body)
self.assertIn('Error decrypting object',
self.decrypter.logger.get_lines_for_level('error')[0])
def test_GET_with_bad_iv_for_object_body(self):
bad_crypto_meta = fake_get_crypto_meta(key=os.urandom(32))
bad_crypto_meta['iv'] = b'bad_iv'
self._test_GET_with_bad_crypto_meta_for_object_body(bad_crypto_meta)
self.assertIn('IV must be length 16',
self.decrypter.logger.get_lines_for_level('error')[0])
def test_GET_with_missing_iv_for_object_body(self):
bad_crypto_meta = fake_get_crypto_meta(key=os.urandom(32))
bad_crypto_meta.pop('iv')
self._test_GET_with_bad_crypto_meta_for_object_body(bad_crypto_meta)
self.assertIn("Missing 'iv'",
self.decrypter.logger.get_lines_for_level('error')[0])
def test_GET_with_bad_body_key_for_object_body(self):
body_key_meta = {'key': b'wrapped too short key', 'iv': FAKE_IV}
bad_crypto_meta = fake_get_crypto_meta(body_key=body_key_meta)
self._test_GET_with_bad_crypto_meta_for_object_body(bad_crypto_meta)
self.assertIn('Key must be length 32',
self.decrypter.logger.get_lines_for_level('error')[0])
def test_GET_with_missing_body_key_for_object_body(self):
bad_crypto_meta = fake_get_crypto_meta() # no key by default
self._test_GET_with_bad_crypto_meta_for_object_body(bad_crypto_meta)
self.assertIn("Missing 'body_key'",
self.decrypter.logger.get_lines_for_level('error')[0])
def _test_req_metadata_not_encrypted(self, method):
# check that metadata is not decrypted if it does not have crypto meta;
# testing for case of an unencrypted POST to an object.
env = {'REQUEST_METHOD': method,
CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
req = Request.blank('/v1/a/c/o', environ=env)
body = b'FAKE APP'
plaintext_etag = md5hex(body)
body_key = os.urandom(32)
enc_body = encrypt(body, body_key, FAKE_IV)
hdrs = self._make_response_headers(
len(body), plaintext_etag, fetch_crypto_keys(), body_key)
hdrs.pop('x-object-transient-sysmeta-crypto-meta-test')
hdrs['x-object-meta-test'] = 'plaintext not encrypted'
self.app.register(
method, '/v1/a/c/o', HTTPOk, body=enc_body, headers=hdrs)
resp = req.get_response(self.decrypter)
self.assertEqual('200 OK', resp.status)
self.assertEqual(plaintext_etag, resp.headers['Etag'])
self.assertEqual('text/plain', resp.headers['Content-Type'])
self.assertEqual('plaintext not encrypted',
resp.headers['x-object-meta-test'])
def test_HEAD_metadata_not_encrypted(self):
self._test_req_metadata_not_encrypted('HEAD')
def test_GET_metadata_not_encrypted(self):
self._test_req_metadata_not_encrypted('GET')
def test_GET_unencrypted_data(self):
# testing case of an unencrypted object with encrypted metadata from
# a later POST
env = {'REQUEST_METHOD': 'GET',
CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
req = Request.blank('/v1/a/c/o', environ=env)
body = b'FAKE APP'
obj_key = fetch_crypto_keys()['object']
hdrs = {'Etag': md5hex(body),
'content-type': 'text/plain',
'content-length': len(body),
'x-object-transient-sysmeta-crypto-meta-test':
bytes_to_wsgi(base64.b64encode(encrypt(
b'encrypt me', obj_key, FAKE_IV))) +
';swift_meta=' + get_crypto_meta_header(),
'x-object-sysmeta-test': 'do not encrypt me'}
self.app.register('GET', '/v1/a/c/o', HTTPOk, body=body, headers=hdrs)
resp = req.get_response(self.decrypter)
self.assertEqual(body, resp.body)
self.assertEqual('200 OK', resp.status)
self.assertEqual(md5hex(body), resp.headers['Etag'])
self.assertEqual('text/plain', resp.headers['Content-Type'])
# POSTed user meta was encrypted
self.assertEqual('encrypt me', resp.headers['x-object-meta-test'])
# PUT sysmeta was not encrypted
self.assertEqual('do not encrypt me',
resp.headers['x-object-sysmeta-test'])
def test_GET_multiseg(self):
env = {'REQUEST_METHOD': 'GET',
CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
req = Request.blank('/v1/a/c/o', environ=env)
chunks = [b'some', b'chunks', b'of data']
body = b''.join(chunks)
plaintext_etag = md5hex(body)
body_key = os.urandom(32)
ctxt = Crypto().create_encryption_ctxt(body_key, FAKE_IV)
enc_body = [encrypt(chunk, ctxt=ctxt) for chunk in chunks]
hdrs = self._make_response_headers(
sum(map(len, enc_body)), plaintext_etag, fetch_crypto_keys(),
body_key)
self.app.register(
'GET', '/v1/a/c/o', HTTPOk, body=enc_body, headers=hdrs)
resp = req.get_response(self.decrypter)
self.assertEqual(body, resp.body)
self.assertEqual('200 OK', resp.status)
self.assertEqual(plaintext_etag, resp.headers['Etag'])
self.assertEqual('text/plain', resp.headers['Content-Type'])
def test_GET_multiseg_with_range(self):
env = {'REQUEST_METHOD': 'GET',
CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
req = Request.blank('/v1/a/c/o', environ=env)
req.headers['Content-Range'] = 'bytes 3-10/17'
chunks = [b'0123', b'45678', b'9abcdef']
body = b''.join(chunks)
plaintext_etag = md5hex(body)
body_key = os.urandom(32)
ctxt = Crypto().create_encryption_ctxt(body_key, FAKE_IV)
enc_body = [encrypt(chunk, ctxt=ctxt) for chunk in chunks]
enc_body = [enc_body[0][3:], enc_body[1], enc_body[2][:2]]
hdrs = self._make_response_headers(
sum(map(len, enc_body)), plaintext_etag, fetch_crypto_keys(),
body_key)
hdrs['content-range'] = req.headers['Content-Range']
self.app.register(
'GET', '/v1/a/c/o', HTTPOk, body=enc_body, headers=hdrs)
resp = req.get_response(self.decrypter)
self.assertEqual(b'3456789a', resp.body)
self.assertEqual('200 OK', resp.status)
self.assertEqual(plaintext_etag, resp.headers['Etag'])
self.assertEqual('text/plain', resp.headers['Content-Type'])
# Force the decrypter context updates to be less than one of our range
# sizes to check that the decrypt context offset is setup correctly with
# offset to first byte of range for first update and then re-used.
# Do mocking here to have the mocked value have effect in the generator
# function.
@mock.patch.object(decrypter, 'DECRYPT_CHUNK_SIZE', 4)
def test_GET_multipart_ciphertext(self):
# build fake multipart response body
body_key = os.urandom(32)
plaintext = b'Cwm fjord veg balks nth pyx quiz'
plaintext_etag = md5hex(plaintext)
ciphertext = encrypt(plaintext, body_key, FAKE_IV)
parts = ((0, 3, 'text/plain'),
(4, 9, 'text/plain; charset=us-ascii'),
(24, 32, 'text/plain'))
length = len(ciphertext)
body = b''
for start, end, ctype in parts:
body += b'--multipartboundary\r\n'
body += b'Content-Type: %s\r\n' % ctype.encode('utf-8')
body += b'Content-Range: bytes %d-%d/%d' % (start, end - 1, length)
body += b'\r\n\r\n' + ciphertext[start:end] + b'\r\n'
body += b'--multipartboundary--'
# register request with fake swift
hdrs = self._make_response_headers(
len(body), plaintext_etag, fetch_crypto_keys(), body_key)
hdrs['content-type'] = \
'multipart/byteranges;boundary=multipartboundary'
self.app.register('GET', '/v1/a/c/o', HTTPPartialContent, body=body,
headers=hdrs)
# issue request
env = {'REQUEST_METHOD': 'GET',
CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
req = Request.blank('/v1/a/c/o', environ=env)
resp = req.get_response(self.decrypter)
self.assertEqual('206 Partial Content', resp.status)
self.assertEqual(plaintext_etag, resp.headers['Etag'])
self.assertEqual(len(body), int(resp.headers['Content-Length']))
self.assertEqual('multipart/byteranges;boundary=multipartboundary',
resp.headers['Content-Type'])
# the multipart headers could be re-ordered, so parse response body to
# verify expected content
resp_lines = resp.body.split(b'\r\n')
resp_lines.reverse()
for start, end, ctype in parts:
self.assertEqual(b'--multipartboundary', resp_lines.pop())
expected_header_lines = {
b'Content-Type: %s' % ctype.encode('utf8'),
b'Content-Range: bytes %d-%d/%d' % (start, end - 1, length)}
resp_header_lines = {resp_lines.pop(), resp_lines.pop()}
self.assertEqual(expected_header_lines, resp_header_lines)
self.assertEqual(b'', resp_lines.pop())
self.assertEqual(plaintext[start:end], resp_lines.pop())
self.assertEqual(b'--multipartboundary--', resp_lines.pop())
# we should have consumed the whole response body
self.assertFalse(resp_lines)
def test_GET_multipart_content_type(self):
# *just* having multipart content type shouldn't trigger the mime doc
# code path
body_key = os.urandom(32)
plaintext = b'Cwm fjord veg balks nth pyx quiz'
plaintext_etag = md5hex(plaintext)
ciphertext = encrypt(plaintext, body_key, FAKE_IV)
# register request with fake swift
hdrs = self._make_response_headers(
len(ciphertext), plaintext_etag, fetch_crypto_keys(), body_key)
hdrs['content-type'] = \
'multipart/byteranges;boundary=multipartboundary'
self.app.register('GET', '/v1/a/c/o', HTTPOk, body=ciphertext,
headers=hdrs)
# issue request
env = {'REQUEST_METHOD': 'GET',
CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
req = Request.blank('/v1/a/c/o', environ=env)
resp = req.get_response(self.decrypter)
self.assertEqual('200 OK', resp.status)
self.assertEqual(plaintext_etag, resp.headers['Etag'])
self.assertEqual(len(plaintext), int(resp.headers['Content-Length']))
self.assertEqual('multipart/byteranges;boundary=multipartboundary',
resp.headers['Content-Type'])
self.assertEqual(plaintext, resp.body)
def test_GET_multipart_no_body_crypto_meta(self):
# build fake multipart response body
plaintext = b'Cwm fjord veg balks nth pyx quiz'
plaintext_etag = md5hex(plaintext)
parts = ((0, 3, 'text/plain'),
(4, 9, 'text/plain; charset=us-ascii'),
(24, 32, 'text/plain'))
length = len(plaintext)
body = b''
for start, end, ctype in parts:
body += b'--multipartboundary\r\n'
body += b'Content-Type: %s\r\n' % ctype.encode('utf-8')
body += b'Content-Range: bytes %d-%d/%d' % (start, end - 1, length)
body += b'\r\n\r\n' + plaintext[start:end] + b'\r\n'
body += b'--multipartboundary--'
# register request with fake swift
hdrs = {
'Etag': plaintext_etag,
'content-type': 'multipart/byteranges;boundary=multipartboundary',
'content-length': len(body)}
self.app.register('GET', '/v1/a/c/o', HTTPPartialContent, body=body,
headers=hdrs)
# issue request
env = {'REQUEST_METHOD': 'GET',
CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
req = Request.blank('/v1/a/c/o', environ=env)
resp = req.get_response(self.decrypter)
self.assertEqual('206 Partial Content', resp.status)
self.assertEqual(plaintext_etag, resp.headers['Etag'])
self.assertEqual(len(body), int(resp.headers['Content-Length']))
self.assertEqual('multipart/byteranges;boundary=multipartboundary',
resp.headers['Content-Type'])
# the multipart response body should be unchanged
self.assertEqual(body, resp.body)
def _test_GET_multipart_bad_body_crypto_meta(self, bad_crypto_meta):
# build fake multipart response body
key = fetch_crypto_keys()['object']
ctxt = Crypto().create_encryption_ctxt(key, FAKE_IV)
plaintext = b'Cwm fjord veg balks nth pyx quiz'
plaintext_etag = md5hex(plaintext)
ciphertext = encrypt(plaintext, ctxt=ctxt)
parts = ((0, 3, 'text/plain'),
(4, 9, 'text/plain; charset=us-ascii'),
(24, 32, 'text/plain'))
length = len(ciphertext)
body = b''
for start, end, ctype in parts:
body += b'--multipartboundary\r\n'
body += b'Content-Type: %s\r\n' % ctype.encode('utf-8')
body += b'Content-Range: bytes %d-%d/%d' % (start, end - 1, length)
body += b'\r\n\r\n' + ciphertext[start:end] + b'\r\n'
body += b'--multipartboundary--'
# register request with fake swift
hdrs = self._make_response_headers(
len(body), plaintext_etag, fetch_crypto_keys(), b'not used')
hdrs['content-type'] = \
'multipart/byteranges;boundary=multipartboundary'
hdrs['X-Object-Sysmeta-Crypto-Body-Meta'] = \
get_crypto_meta_header(bad_crypto_meta)
self.app.register('GET', '/v1/a/c/o', HTTPOk, body=body, headers=hdrs)
# issue request
env = {'REQUEST_METHOD': 'GET',
CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
req = Request.blank('/v1/a/c/o', environ=env)
resp = req.get_response(self.decrypter)
self.assertEqual('500 Internal Error', resp.status)
self.assertEqual(b'Error decrypting object', resp.body)
self.assertIn('Error decrypting object',
self.decrypter.logger.get_lines_for_level('error')[0])
def test_GET_multipart_bad_body_cipher(self):
self._test_GET_multipart_bad_body_crypto_meta(
{'cipher': 'Mystery cipher', 'iv': b'1234567887654321'})
self.assertIn('Cipher must be AES_CTR_256',
self.decrypter.logger.get_lines_for_level('error')[0])
def test_GET_multipart_missing_body_cipher(self):
self._test_GET_multipart_bad_body_crypto_meta(
{'iv': b'1234567887654321'})
self.assertIn('cipher',
self.decrypter.logger.get_lines_for_level('error')[0])
def test_GET_multipart_too_short_body_iv(self):
self._test_GET_multipart_bad_body_crypto_meta(
{'cipher': 'AES_CTR_256', 'iv': b'too short'})
self.assertIn('IV must be length 16',
self.decrypter.logger.get_lines_for_level('error')[0])
def test_GET_multipart_too_long_body_iv(self):
self._test_GET_multipart_bad_body_crypto_meta(
{'cipher': 'AES_CTR_256', 'iv': b'a little too long'})
self.assertIn('IV must be length 16',
self.decrypter.logger.get_lines_for_level('error')[0])
def test_GET_multipart_missing_body_iv(self):
self._test_GET_multipart_bad_body_crypto_meta(
{'cipher': 'AES_CTR_256'})
self.assertIn('iv',
self.decrypter.logger.get_lines_for_level('error')[0])
def test_GET_missing_key_callback(self):
# Do not provide keys, and do not set override flag
env = {'REQUEST_METHOD': 'GET'}
req = Request.blank('/v1/a/c/o', environ=env)
body = b'FAKE APP'
enc_body = encrypt(body, fetch_crypto_keys()['object'], FAKE_IV)
hdrs = self._make_response_headers(
len(body), md5hex(b'not the body'),
fetch_crypto_keys(), b'not used')
self.app.register(
'GET', '/v1/a/c/o', HTTPOk, body=enc_body, headers=hdrs)
resp = req.get_response(self.decrypter)
self.assertEqual('500 Internal Error', resp.status)
self.assertEqual(b'Unable to retrieve encryption keys.',
resp.body)
self.assertIn('missing callback',
self.decrypter.logger.get_lines_for_level('error')[0])
def test_GET_error_in_key_callback(self):
def raise_exc(**kwargs):
raise Exception('Testing')
env = {'REQUEST_METHOD': 'GET',
CRYPTO_KEY_CALLBACK: raise_exc}
req = Request.blank('/v1/a/c/o', environ=env)
body = b'FAKE APP'
enc_body = encrypt(body, fetch_crypto_keys()['object'], FAKE_IV)
hdrs = self._make_response_headers(
len(body), md5hex(body), fetch_crypto_keys(), b'not used')
self.app.register(
'GET', '/v1/a/c/o', HTTPOk, body=enc_body, headers=hdrs)
resp = req.get_response(self.decrypter)
self.assertEqual('500 Internal Error', resp.status)
self.assertEqual(b'Unable to retrieve encryption keys.',
resp.body)
self.assertIn('from callback: Testing',
self.decrypter.logger.get_lines_for_level('error')[0])
def test_GET_cipher_mismatch_for_body(self):
# Cipher does not match
env = {'REQUEST_METHOD': 'GET',
CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
req = Request.blank('/v1/a/c/o', environ=env)
body = b'FAKE APP'
enc_body = encrypt(body, fetch_crypto_keys()['object'], FAKE_IV)
bad_crypto_meta = fake_get_crypto_meta()
bad_crypto_meta['cipher'] = 'unknown_cipher'
hdrs = self._make_response_headers(
len(enc_body), md5hex(body), fetch_crypto_keys(), b'not used')
hdrs['X-Object-Sysmeta-Crypto-Body-Meta'] = \
get_crypto_meta_header(crypto_meta=bad_crypto_meta)
self.app.register(
'GET', '/v1/a/c/o', HTTPOk, body=enc_body, headers=hdrs)
resp = req.get_response(self.decrypter)
self.assertEqual('500 Internal Error', resp.status)
self.assertEqual(b'Error decrypting object', resp.body)
self.assertIn('Error decrypting object',
self.decrypter.logger.get_lines_for_level('error')[0])
self.assertIn('Bad crypto meta: Cipher',
self.decrypter.logger.get_lines_for_level('error')[0])
def test_GET_cipher_mismatch_for_metadata(self):
# Cipher does not match
env = {'REQUEST_METHOD': 'GET',
CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
req = Request.blank('/v1/a/c/o', environ=env)
body = b'FAKE APP'
key = fetch_crypto_keys()['object']
enc_body = encrypt(body, key, FAKE_IV)
bad_crypto_meta = fake_get_crypto_meta()
bad_crypto_meta['cipher'] = 'unknown_cipher'
hdrs = self._make_response_headers(
len(enc_body), md5hex(body), fetch_crypto_keys(), b'not used')
enc_val = bytes_to_wsgi(base64.b64encode(
encrypt(b'encrypt me', key, FAKE_IV)))
hdrs.update({'x-object-transient-sysmeta-crypto-meta-test':
enc_val + ';swift_meta=' +
get_crypto_meta_header(crypto_meta=bad_crypto_meta)})
self.app.register(
'GET', '/v1/a/c/o', HTTPOk, body=enc_body, headers=hdrs)
resp = req.get_response(self.decrypter)
self.assertEqual('500 Internal Error', resp.status)
self.assertEqual(b'Error decrypting header', resp.body)
self.assertIn(
'Error decrypting header X-Object-Transient-Sysmeta-Crypto-Meta-'
'Test', self.decrypter.logger.get_lines_for_level('error')[0])
def test_GET_decryption_override(self):
# This covers the case of an old un-encrypted object
env = {'REQUEST_METHOD': 'GET',
CRYPTO_KEY_CALLBACK: fetch_crypto_keys,
'swift.crypto.override': True}
req = Request.blank('/v1/a/c/o', environ=env)
body = b'FAKE APP'
hdrs = {'Etag': md5hex(body),
'content-type': 'text/plain',
'content-length': len(body),
'x-object-meta-test': 'do not encrypt me',
'x-object-sysmeta-test': 'do not encrypt me'}
self.app.register('GET', '/v1/a/c/o', HTTPOk, body=body, headers=hdrs)
resp = req.get_response(self.decrypter)
self.assertEqual(body, resp.body)
self.assertEqual('200 OK', resp.status)
self.assertEqual(md5hex(body), resp.headers['Etag'])
self.assertEqual('text/plain', resp.headers['Content-Type'])
self.assertEqual('do not encrypt me',
resp.headers['x-object-meta-test'])
self.assertEqual('do not encrypt me',
resp.headers['x-object-sysmeta-test'])
class TestDecrypterContainerRequests(unittest.TestCase):
def setUp(self):
self.app = FakeSwift()
self.decrypter = decrypter.Decrypter(self.app, {})
self.decrypter.logger = debug_logger()
def _make_cont_get_req(self, resp_body, format, override=False,
callback=fetch_crypto_keys):
path = '/v1/a/c'
content_type = 'text/plain'
if format:
path = '%s/?format=%s' % (path, format)
content_type = 'application/' + format
env = {'REQUEST_METHOD': 'GET',
CRYPTO_KEY_CALLBACK: callback}
if override:
env['swift.crypto.override'] = True
req = Request.blank(path, environ=env)
hdrs = {'content-type': content_type}
self.app.register('GET', path, HTTPOk, body=resp_body, headers=hdrs)
return req.get_response(self.decrypter)
def test_GET_container_success(self):
# no format requested, listing has names only
fake_body = b'testfile1\ntestfile2\n'
calls = [0]
def wrapped_fetch_crypto_keys():
calls[0] += 1
return fetch_crypto_keys()
resp = self._make_cont_get_req(fake_body, None,
callback=wrapped_fetch_crypto_keys)
self.assertEqual('200 OK', resp.status)
self.assertEqual(resp.body.split(b'\n'), [
b'testfile1',
b'testfile2',
b'',
])
self.assertEqual(0, calls[0])
def test_GET_container_json(self):
content_type_1 = u'\uF10F\uD20D\uB30B\u9409'
content_type_2 = 'text/plain; param=foo'
pt_etag1 = 'c6e8196d7f0fff6444b90861fe8d609d'
pt_etag2 = 'ac0374ed4d43635f803c82469d0b5a10'
key = fetch_crypto_keys()['container']
subdir = {"subdir": "pseudo-dir/"}
obj_dict_1 = {"bytes": 16,
"last_modified": "2015-04-14T23:33:06.439040",
"hash": encrypt_and_append_meta(
pt_etag1.encode('utf-8'), key),
"name": "testfile",
"content_type": content_type_1}
obj_dict_2 = {"bytes": 24,
"last_modified": "2015-04-14T23:33:06.519020",
"hash": encrypt_and_append_meta(
pt_etag2.encode('utf-8'), key),
"name": "testfile2",
"content_type": content_type_2}
listing = [subdir, obj_dict_1, obj_dict_2]
fake_body = json.dumps(listing).encode('ascii')
resp = self._make_cont_get_req(fake_body, 'json')
self.assertEqual('200 OK', resp.status)
body = resp.body
self.assertEqual(len(body), int(resp.headers['Content-Length']))
body_json = json.loads(body)
self.assertEqual(3, len(body_json))
self.assertDictEqual(subdir, body_json[0])
obj_dict_1['hash'] = pt_etag1
self.assertDictEqual(obj_dict_1, body_json[1])
obj_dict_2['hash'] = pt_etag2
self.assertDictEqual(obj_dict_2, body_json[2])
def test_GET_container_json_with_crypto_override(self):
content_type_1 = 'image/jpeg'
content_type_2 = 'text/plain; param=foo'
pt_etag1 = 'c6e8196d7f0fff6444b90861fe8d609d'
pt_etag2 = 'ac0374ed4d43635f803c82469d0b5a10'
obj_dict_1 = {"bytes": 16,
"last_modified": "2015-04-14T23:33:06.439040",
"hash": pt_etag1,
"name": "testfile",
"content_type": content_type_1}
obj_dict_2 = {"bytes": 24,
"last_modified": "2015-04-14T23:33:06.519020",
"hash": pt_etag2,
"name": "testfile2",
"content_type": content_type_2}
listing = [obj_dict_1, obj_dict_2]
fake_body = json.dumps(listing).encode('ascii')
resp = self._make_cont_get_req(fake_body, 'json', override=True)
self.assertEqual('200 OK', resp.status)
body = resp.body
self.assertEqual(len(body), int(resp.headers['Content-Length']))
body_json = json.loads(body)
self.assertEqual(2, len(body_json))
self.assertDictEqual(obj_dict_1, body_json[0])
self.assertDictEqual(obj_dict_2, body_json[1])
def test_cont_get_json_req_with_cipher_mismatch(self):
bad_crypto_meta = fake_get_crypto_meta()
bad_crypto_meta['cipher'] = 'unknown_cipher'
key = fetch_crypto_keys()['container']
pt_etag = 'c6e8196d7f0fff6444b90861fe8d609d'
ct_etag = encrypt_and_append_meta(pt_etag, key,
crypto_meta=bad_crypto_meta)
obj_dict_1 = {"bytes": 16,
"last_modified": "2015-04-14T23:33:06.439040",
"hash": ct_etag,
"name": "testfile",
"content_type": "image/jpeg"}
listing = [obj_dict_1]
fake_body = json.dumps(listing).encode('ascii')
resp = self._make_cont_get_req(fake_body, 'json')
self.assertEqual('200 OK', resp.status)
self.assertEqual(
['<unknown>'],
[x['hash'] for x in json.loads(resp.body)])
self.assertIn("Cipher must be AES_CTR_256",
self.decrypter.logger.get_lines_for_level('error')[0])
self.assertIn('Error decrypting container listing',
self.decrypter.logger.get_lines_for_level('error')[0])
def test_cont_get_json_req_with_unknown_secret_id(self):
bad_crypto_meta = fake_get_crypto_meta()
bad_crypto_meta['key_id'] = {'secret_id': 'unknown_key'}
key = fetch_crypto_keys()['container']
pt_etag = 'c6e8196d7f0fff6444b90861fe8d609d'
ct_etag = encrypt_and_append_meta(pt_etag, key,
crypto_meta=bad_crypto_meta)
obj_dict_1 = {"bytes": 16,
"last_modified": "2015-04-14T23:33:06.439040",
"hash": ct_etag,
"name": "testfile",
"content_type": "image/jpeg"}
listing = [obj_dict_1]
fake_body = json.dumps(listing).encode('ascii')
resp = self._make_cont_get_req(fake_body, 'json')
self.assertEqual('200 OK', resp.status)
self.assertEqual(
['<unknown>'],
[x['hash'] for x in json.loads(resp.body)])
self.assertEqual(self.decrypter.logger.get_lines_for_level('error'), [
'get_keys(): unknown key id: unknown_key',
'Error decrypting container listing: unknown_key',
])
def test_GET_container_json_not_encrypted_obj(self):
pt_etag = '%s; symlink_path=/a/c/o' % MD5_OF_EMPTY_STRING
obj_dict = {"bytes": 0,
"last_modified": "2015-04-14T23:33:06.439040",
"hash": pt_etag,
"name": "symlink",
"content_type": 'application/symlink'}
listing = [obj_dict]
fake_body = json.dumps(listing).encode('ascii')
resp = self._make_cont_get_req(fake_body, 'json')
self.assertEqual('200 OK', resp.status)
body = resp.body
self.assertEqual(len(body), int(resp.headers['Content-Length']))
body_json = json.loads(body)
self.assertEqual(1, len(body_json))
self.assertEqual(pt_etag, body_json[0]['hash'])
class TestModuleMethods(unittest.TestCase):
def test_purge_crypto_sysmeta_headers(self):
retained_headers = {'x-object-sysmeta-test1': 'keep',
'x-object-meta-test2': 'retain',
'x-object-transient-sysmeta-test3': 'leave intact',
'etag': 'hold onto',
'x-other': 'cherish',
'x-object-not-meta': 'do not remove'}
purged_headers = {'x-object-sysmeta-crypto-test1': 'remove',
'x-object-transient-sysmeta-crypto-test2': 'purge'}
test_headers = retained_headers.copy()
test_headers.update(purged_headers)
actual = decrypter.purge_crypto_sysmeta_headers(test_headers.items())
for k, v in actual:
k = k.lower()
self.assertNotIn(k, purged_headers)
self.assertEqual(retained_headers[k], v)
retained_headers.pop(k)
self.assertFalse(retained_headers)
class TestDecrypter(unittest.TestCase):
def test_app_exception(self):
app = decrypter.Decrypter(FakeAppThatExcepts(HTTPException), {})
req = Request.blank('/', environ={'REQUEST_METHOD': 'GET'})
with self.assertRaises(HTTPException) as catcher:
req.get_response(app)
self.assertEqual(FakeAppThatExcepts.MESSAGE, catcher.exception.body)
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/crypto/test_decrypter.py |
# Copyright (c) 2015-2016 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import unittest
import mock
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from swift.common.exceptions import EncryptionException
from swift.common.middleware.crypto import crypto_utils
from swift.common.middleware.crypto.crypto_utils import (
CRYPTO_KEY_CALLBACK, Crypto, CryptoWSGIContext)
from swift.common.swob import HTTPException
from test.debug_logger import debug_logger
from test.unit.common.middleware.crypto.crypto_helpers import fetch_crypto_keys
class TestCryptoWsgiContext(unittest.TestCase):
def setUp(self):
class FakeFilter(object):
app = None
crypto = Crypto({})
self.fake_logger = debug_logger()
self.crypto_context = CryptoWSGIContext(
FakeFilter(), 'object', self.fake_logger)
def test_get_keys(self):
# ok
env = {CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
keys = self.crypto_context.get_keys(env)
self.assertDictEqual(fetch_crypto_keys(), keys)
# only default required keys are checked
subset_keys = {'object': fetch_crypto_keys()['object']}
env = {CRYPTO_KEY_CALLBACK: lambda *args, **kwargs: subset_keys}
keys = self.crypto_context.get_keys(env)
self.assertDictEqual(subset_keys, keys)
# only specified required keys are checked
subset_keys = {'container': fetch_crypto_keys()['container']}
env = {CRYPTO_KEY_CALLBACK: lambda *args, **kwargs: subset_keys}
keys = self.crypto_context.get_keys(env, required=['container'])
self.assertDictEqual(subset_keys, keys)
subset_keys = {'object': fetch_crypto_keys()['object'],
'container': fetch_crypto_keys()['container']}
env = {CRYPTO_KEY_CALLBACK: lambda *args, **kwargs: subset_keys}
keys = self.crypto_context.get_keys(
env, required=['object', 'container'])
self.assertDictEqual(subset_keys, keys)
def test_get_keys_with_crypto_meta(self):
# verify that key_id from crypto_meta is passed to fetch_crypto_keys
keys = fetch_crypto_keys()
mock_fetch_crypto_keys = mock.MagicMock(return_value=keys)
env = {CRYPTO_KEY_CALLBACK: mock_fetch_crypto_keys}
key_id = {'secret_id': '123'}
keys = self.crypto_context.get_keys(env, key_id=key_id)
self.assertDictEqual(fetch_crypto_keys(), keys)
mock_fetch_crypto_keys.assert_called_with(key_id={'secret_id': '123'})
# but it's ok for there to be no crypto_meta
keys = self.crypto_context.get_keys(env, key_id={})
self.assertDictEqual(fetch_crypto_keys(), keys)
mock_fetch_crypto_keys.assert_called_with(key_id={})
keys = self.crypto_context.get_keys(env)
self.assertDictEqual(fetch_crypto_keys(), keys)
mock_fetch_crypto_keys.assert_called_with(key_id=None)
def test_get_keys_missing_callback(self):
with self.assertRaises(HTTPException) as cm:
self.crypto_context.get_keys({})
self.assertIn('500 Internal Error', cm.exception.status)
self.assertIn('missing callback',
self.fake_logger.get_lines_for_level('error')[0])
self.assertIn(b'Unable to retrieve encryption keys.',
cm.exception.body)
def test_get_keys_callback_exception(self):
def callback(*args, **kwargs):
raise Exception('boom')
with self.assertRaises(HTTPException) as cm:
self.crypto_context.get_keys({CRYPTO_KEY_CALLBACK: callback})
self.assertIn('500 Internal Error', cm.exception.status)
self.assertIn('from callback: boom',
self.fake_logger.get_lines_for_level('error')[0])
self.assertIn(b'Unable to retrieve encryption keys.',
cm.exception.body)
def test_get_keys_missing_key_for_default_required_list(self):
bad_keys = dict(fetch_crypto_keys())
bad_keys.pop('object')
with self.assertRaises(HTTPException) as cm:
self.crypto_context.get_keys(
{CRYPTO_KEY_CALLBACK: lambda *args, **kwargs: bad_keys})
self.assertIn('500 Internal Error', cm.exception.status)
self.assertIn("Missing key for 'object'",
self.fake_logger.get_lines_for_level('error')[0])
self.assertIn(b'Unable to retrieve encryption keys.',
cm.exception.body)
def test_get_keys_missing_object_key_for_specified_required_list(self):
bad_keys = dict(fetch_crypto_keys())
bad_keys.pop('object')
with self.assertRaises(HTTPException) as cm:
self.crypto_context.get_keys(
{CRYPTO_KEY_CALLBACK: lambda *args, **kwargs: bad_keys},
required=['object', 'container'])
self.assertIn('500 Internal Error', cm.exception.status)
self.assertIn("Missing key for 'object'",
self.fake_logger.get_lines_for_level('error')[0])
self.assertIn(b'Unable to retrieve encryption keys.',
cm.exception.body)
def test_get_keys_missing_container_key_for_specified_required_list(self):
bad_keys = dict(fetch_crypto_keys())
bad_keys.pop('container')
with self.assertRaises(HTTPException) as cm:
self.crypto_context.get_keys(
{CRYPTO_KEY_CALLBACK: lambda *args, **kwargs: bad_keys},
required=['object', 'container'])
self.assertIn('500 Internal Error', cm.exception.status)
self.assertIn("Missing key for 'container'",
self.fake_logger.get_lines_for_level('error')[0])
self.assertIn(b'Unable to retrieve encryption keys.',
cm.exception.body)
def test_bad_object_key_for_default_required_list(self):
bad_keys = dict(fetch_crypto_keys())
bad_keys['object'] = b'the minor key'
with self.assertRaises(HTTPException) as cm:
self.crypto_context.get_keys(
{CRYPTO_KEY_CALLBACK: lambda *args, **kwargs: bad_keys})
self.assertIn('500 Internal Error', cm.exception.status)
self.assertIn("Bad key for 'object'",
self.fake_logger.get_lines_for_level('error')[0])
self.assertIn(b'Unable to retrieve encryption keys.',
cm.exception.body)
def test_bad_container_key_for_default_required_list(self):
bad_keys = dict(fetch_crypto_keys())
bad_keys['container'] = b'the major key'
with self.assertRaises(HTTPException) as cm:
self.crypto_context.get_keys(
{CRYPTO_KEY_CALLBACK: lambda *args, **kwargs: bad_keys},
required=['object', 'container'])
self.assertIn('500 Internal Error', cm.exception.status)
self.assertIn("Bad key for 'container'",
self.fake_logger.get_lines_for_level('error')[0])
self.assertIn(b'Unable to retrieve encryption keys.',
cm.exception.body)
def test_get_keys_not_a_dict(self):
with self.assertRaises(HTTPException) as cm:
self.crypto_context.get_keys(
{CRYPTO_KEY_CALLBACK:
lambda *args, **kwargs: ['key', 'quay', 'qui']})
self.assertEqual('500 Internal Error', cm.exception.status)
self.assertIn("Did not get a keys dict",
self.fake_logger.get_lines_for_level('error')[0])
self.assertIn(b'Unable to retrieve encryption keys.',
cm.exception.body)
def test_get_multiple_keys(self):
env = {CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
mutliple_keys = self.crypto_context.get_multiple_keys(env)
self.assertEqual(
[fetch_crypto_keys(),
fetch_crypto_keys(key_id={'secret_id': 'myid'})],
mutliple_keys)
class TestModuleMethods(unittest.TestCase):
meta = {'iv': b'0123456789abcdef', 'cipher': 'AES_CTR_256'}
serialized_meta = '%7B%22cipher%22%3A+%22AES_CTR_256%22%2C+%22' \
'iv%22%3A+%22MDEyMzQ1Njc4OWFiY2RlZg%3D%3D%22%7D'
meta_with_key = {'iv': b'0123456789abcdef', 'cipher': 'AES_CTR_256',
'body_key': {'key': b'fedcba9876543210fedcba9876543210',
'iv': b'fedcba9876543210'}}
serialized_meta_with_key = '%7B%22body_key%22%3A+%7B%22iv%22%3A+%22ZmVkY' \
'2JhOTg3NjU0MzIxMA%3D%3D%22%2C+%22key%22%3A+%' \
'22ZmVkY2JhOTg3NjU0MzIxMGZlZGNiYTk4NzY1NDMyMT' \
'A%3D%22%7D%2C+%22cipher%22%3A+%22AES_CTR_256' \
'%22%2C+%22iv%22%3A+%22MDEyMzQ1Njc4OWFiY2RlZg' \
'%3D%3D%22%7D'
def test_dump_crypto_meta(self):
actual = crypto_utils.dump_crypto_meta(self.meta)
self.assertEqual(self.serialized_meta, actual)
actual = crypto_utils.dump_crypto_meta(self.meta_with_key)
self.assertEqual(self.serialized_meta_with_key, actual)
def test_load_crypto_meta(self):
actual = crypto_utils.load_crypto_meta(self.serialized_meta)
self.assertEqual(self.meta, actual)
actual = crypto_utils.load_crypto_meta(self.serialized_meta_with_key)
self.assertEqual(self.meta_with_key, actual)
def assert_raises(value, message):
with self.assertRaises(EncryptionException) as cm:
crypto_utils.load_crypto_meta(value)
self.assertIn('Bad crypto meta %r' % value, cm.exception.args[0])
if isinstance(message, (tuple, list)):
for opt in message:
if opt in cm.exception.args[0]:
break
else:
self.fail('Expected to find one of %r in %r' % (
message, cm.exception.args[0]))
else:
self.assertIn(message, cm.exception.args[0])
assert_raises(None, 'crypto meta not a string')
assert_raises(99, 'crypto meta not a string')
assert_raises('', ('No JSON object could be decoded',
'Expecting value: line 1 column 1'))
assert_raises('abc', ('No JSON object could be decoded',
'Expecting value: line 1 column 1'))
assert_raises('[]', 'crypto meta not a Mapping')
bad_type_messages = [
'must be string or buffer',
'argument should be a bytes-like object or ASCII string',
]
assert_raises('{"iv": "abcdef"}', 'Incorrect padding')
assert_raises('{"iv": []}', bad_type_messages)
assert_raises('{"iv": {}}', bad_type_messages)
assert_raises('{"iv": 99}', bad_type_messages)
assert_raises('{"key": "abcdef"}', 'Incorrect padding')
assert_raises('{"key": []}', bad_type_messages)
assert_raises('{"key": {}}', bad_type_messages)
assert_raises('{"key": 99}', bad_type_messages)
assert_raises('{"body_key": {"iv": "abcdef"}}', 'Incorrect padding')
assert_raises('{"body_key": {"iv": []}}', bad_type_messages)
assert_raises('{"body_key": {"iv": {}}}', bad_type_messages)
assert_raises('{"body_key": {"iv": 99}}', bad_type_messages)
assert_raises('{"body_key": {"key": "abcdef"}}', 'Incorrect padding')
assert_raises('{"body_key": {"key": []}}', bad_type_messages)
assert_raises('{"body_key": {"key": {}}}', bad_type_messages)
assert_raises('{"body_key": {"key": 99}}', bad_type_messages)
def test_dump_then_load_crypto_meta(self):
actual = crypto_utils.load_crypto_meta(
crypto_utils.dump_crypto_meta(self.meta))
self.assertEqual(self.meta, actual)
actual = crypto_utils.load_crypto_meta(
crypto_utils.dump_crypto_meta(self.meta_with_key))
self.assertEqual(self.meta_with_key, actual)
def test_append_crypto_meta(self):
actual = crypto_utils.append_crypto_meta('abc', self.meta)
expected = 'abc; swift_meta=%s' % self.serialized_meta
self.assertEqual(actual, expected)
actual = crypto_utils.append_crypto_meta('abc', self.meta_with_key)
expected = 'abc; swift_meta=%s' % self.serialized_meta_with_key
self.assertEqual(actual, expected)
def check_bad_value(value):
with self.assertRaises(ValueError):
crypto_utils.append_crypto_meta(value, self.meta)
check_bad_value(None)
check_bad_value({})
check_bad_value(1)
def test_extract_crypto_meta(self):
val, meta = crypto_utils.extract_crypto_meta(
'abc; swift_meta=%s' % self.serialized_meta)
self.assertEqual('abc', val)
self.assertDictEqual(self.meta, meta)
val, meta = crypto_utils.extract_crypto_meta(
'abc; swift_meta=%s' % self.serialized_meta_with_key)
self.assertEqual('abc', val)
self.assertDictEqual(self.meta_with_key, meta)
val, meta = crypto_utils.extract_crypto_meta('abc')
self.assertEqual('abc', val)
self.assertIsNone(meta)
# other param names will be ignored
val, meta = crypto_utils.extract_crypto_meta('abc; foo=bar')
self.assertEqual('abc', val)
self.assertIsNone(meta)
val, meta = crypto_utils.extract_crypto_meta(
'abc; swift_meta=%s; foo=bar' % self.serialized_meta_with_key)
self.assertEqual('abc', val)
self.assertDictEqual(self.meta_with_key, meta)
def test_append_then_extract_crypto_meta(self):
val = 'abc'
actual = crypto_utils.extract_crypto_meta(
crypto_utils.append_crypto_meta(val, self.meta))
self.assertEqual((val, self.meta), actual)
class TestCrypto(unittest.TestCase):
def setUp(self):
self.crypto = Crypto({})
def test_create_encryption_context(self):
value = b'encrypt me' * 100 # more than one cipher block
key = os.urandom(32)
iv = os.urandom(16)
ctxt = self.crypto.create_encryption_ctxt(key, iv)
expected = Cipher(
algorithms.AES(key), modes.CTR(iv),
backend=default_backend()).encryptor().update(value)
self.assertEqual(expected, ctxt.update(value))
for bad_iv in (b'a little too long', b'too short'):
self.assertRaises(
ValueError, self.crypto.create_encryption_ctxt, key, bad_iv)
for bad_key in (b'objKey', b'a' * 31, b'a' * 33, b'a' * 16, b'a' * 24):
self.assertRaises(
ValueError, self.crypto.create_encryption_ctxt, bad_key, iv)
def test_create_decryption_context(self):
value = b'decrypt me' * 100 # more than one cipher block
key = os.urandom(32)
iv = os.urandom(16)
ctxt = self.crypto.create_decryption_ctxt(key, iv, 0)
expected = Cipher(
algorithms.AES(key), modes.CTR(iv),
backend=default_backend()).decryptor().update(value)
self.assertEqual(expected, ctxt.update(value))
for bad_iv in (b'a little too long', b'too short'):
self.assertRaises(
ValueError, self.crypto.create_decryption_ctxt, key, bad_iv, 0)
for bad_key in (b'objKey', b'a' * 31, b'a' * 33, b'a' * 16, b'a' * 24):
self.assertRaises(
ValueError, self.crypto.create_decryption_ctxt, bad_key, iv, 0)
with self.assertRaises(ValueError) as cm:
self.crypto.create_decryption_ctxt(key, iv, -1)
self.assertEqual("Offset must not be negative", cm.exception.args[0])
def test_enc_dec_small_chunks(self):
self.enc_dec_chunks([b'encrypt me', b'because I', b'am sensitive'])
def test_enc_dec_large_chunks(self):
self.enc_dec_chunks([os.urandom(65536), os.urandom(65536)])
def enc_dec_chunks(self, chunks):
key = b'objL7wjV6L79Sfs4y7dy41273l0k6Wki'
iv = self.crypto.create_iv()
enc_ctxt = self.crypto.create_encryption_ctxt(key, iv)
enc_val = [enc_ctxt.update(chunk) for chunk in chunks]
self.assertTrue(b''.join(enc_val) != chunks)
dec_ctxt = self.crypto.create_decryption_ctxt(key, iv, 0)
dec_val = [dec_ctxt.update(chunk) for chunk in enc_val]
self.assertEqual(b''.join(chunks), b''.join(dec_val),
'Expected value {%s} but got {%s}' %
(b''.join(chunks), b''.join(dec_val)))
def test_decrypt_range(self):
chunks = [b'0123456789abcdef', b'ghijklmnopqrstuv']
key = b'objL7wjV6L79Sfs4y7dy41273l0k6Wki'
iv = self.crypto.create_iv()
enc_ctxt = self.crypto.create_encryption_ctxt(key, iv)
enc_val = [enc_ctxt.update(chunk) for chunk in chunks]
# Simulate a ranged GET from byte 19 to 32 : 'jklmnopqrstuv'
dec_ctxt = self.crypto.create_decryption_ctxt(key, iv, 19)
ranged_chunks = [enc_val[1][3:]]
dec_val = [dec_ctxt.update(chunk) for chunk in ranged_chunks]
self.assertEqual(b'jklmnopqrstuv', b''.join(dec_val),
'Expected value {%s} but got {%s}' %
(b'jklmnopqrstuv', b''.join(dec_val)))
def test_create_decryption_context_non_zero_offset(self):
# Verify that iv increments for each 16 bytes of offset.
# For a ranged GET we pass a non-zero offset so that the decrypter
# counter is incremented to the correct value to start decrypting at
# that offset into the object body. The counter should increment by one
# from the starting IV value for every 16 bytes offset into the object
# body, until it reaches 2^128 -1 when it should wrap to zero. We check
# that is happening by verifying a decrypted value using various
# offsets.
key = b'objL7wjV6L79Sfs4y7dy41273l0k6Wki'
def do_test():
for offset, exp_iv in mappings.items():
dec_ctxt = self.crypto.create_decryption_ctxt(key, iv, offset)
offset_in_block = offset % 16
cipher = Cipher(algorithms.AES(key),
modes.CTR(exp_iv),
backend=default_backend())
expected = cipher.decryptor().update(
b'p' * offset_in_block + b'ciphertext')
actual = dec_ctxt.update(b'ciphertext')
expected = expected[offset % 16:]
self.assertEqual(expected, actual,
'Expected %r but got %r, iv=%s and offset=%s'
% (expected, actual, iv, offset))
iv = b'0000000010000000'
mappings = {
2: b'0000000010000000',
16: b'0000000010000001',
19: b'0000000010000001',
48: b'0000000010000003',
1024: b'000000001000000p',
5119: b'000000001000001o'
}
do_test()
# choose max iv value and test that it wraps to zero
iv = b'\xff' * 16
mappings = {
2: iv,
16: bytes(bytearray.fromhex('00' * 16)), # iv wraps to 0
19: bytes(bytearray.fromhex('00' * 16)),
48: bytes(bytearray.fromhex('00' * 15 + '02')),
1024: bytes(bytearray.fromhex('00' * 15 + '3f')),
5119: bytes(bytearray.fromhex('00' * 14 + '013E'))
}
do_test()
iv = b'\x00' * 16
mappings = {
2: iv,
16: bytes(bytearray.fromhex('00' * 15 + '01')),
19: bytes(bytearray.fromhex('00' * 15 + '01')),
48: bytes(bytearray.fromhex('00' * 15 + '03')),
1024: bytes(bytearray.fromhex('00' * 15 + '40')),
5119: bytes(bytearray.fromhex('00' * 14 + '013F'))
}
do_test()
iv = b'\x00' * 8 + b'\xff' * 8
mappings = {
2: iv,
16: bytes(bytearray.fromhex('00' * 7 + '01' + '00' * 8)),
19: bytes(bytearray.fromhex('00' * 7 + '01' + '00' * 8)),
48: bytes(bytearray.fromhex('00' * 7 + '01' + '00' * 7 + '02')),
1024: bytes(bytearray.fromhex('00' * 7 + '01' + '00' * 7 + '3F')),
5119: bytes(bytearray.fromhex('00' * 7 + '01' + '00' * 6 + '013E'))
}
do_test()
def test_check_key(self):
for key in ('objKey', 'a' * 31, 'a' * 33, 'a' * 16, 'a' * 24):
with self.assertRaises(ValueError) as cm:
self.crypto.check_key(key)
self.assertEqual("Key must be length 32 bytes",
cm.exception.args[0])
def test_check_crypto_meta(self):
meta = {'cipher': 'AES_CTR_256'}
with self.assertRaises(EncryptionException) as cm:
self.crypto.check_crypto_meta(meta)
self.assertEqual("Bad crypto meta: Missing 'iv'",
cm.exception.args[0])
for bad_iv in ('a little too long', 'too short'):
meta['iv'] = bad_iv
with self.assertRaises(EncryptionException) as cm:
self.crypto.check_crypto_meta(meta)
self.assertEqual("Bad crypto meta: IV must be length 16 bytes",
cm.exception.args[0])
meta = {'iv': os.urandom(16)}
with self.assertRaises(EncryptionException) as cm:
self.crypto.check_crypto_meta(meta)
self.assertEqual("Bad crypto meta: Missing 'cipher'",
cm.exception.args[0])
meta['cipher'] = 'Mystery cipher'
with self.assertRaises(EncryptionException) as cm:
self.crypto.check_crypto_meta(meta)
self.assertEqual("Bad crypto meta: Cipher must be AES_CTR_256",
cm.exception.args[0])
def test_create_iv(self):
self.assertEqual(16, len(self.crypto.create_iv()))
# crude check that we get back different values on each call
self.assertNotEqual(self.crypto.create_iv(), self.crypto.create_iv())
def test_get_crypto_meta(self):
meta = self.crypto.create_crypto_meta()
self.assertIsInstance(meta, dict)
# this is deliberately brittle so that if new items are added then the
# test will need to be updated
self.assertEqual(2, len(meta))
self.assertIn('iv', meta)
self.assertEqual(16, len(meta['iv']))
self.assertIn('cipher', meta)
self.assertEqual('AES_CTR_256', meta['cipher'])
self.crypto.check_crypto_meta(meta) # sanity check
meta2 = self.crypto.create_crypto_meta()
self.assertNotEqual(meta['iv'], meta2['iv']) # crude sanity check
def test_create_random_key(self):
# crude check that we get unique keys on each call
keys = set()
for i in range(10):
key = self.crypto.create_random_key()
self.assertEqual(32, len(key))
keys.add(key)
self.assertEqual(10, len(keys))
def test_wrap_unwrap_key(self):
wrapping_key = os.urandom(32)
key_to_wrap = os.urandom(32)
iv = os.urandom(16)
with mock.patch(
'swift.common.middleware.crypto.crypto_utils.Crypto.create_iv',
return_value=iv):
wrapped = self.crypto.wrap_key(wrapping_key, key_to_wrap)
cipher = Cipher(algorithms.AES(wrapping_key), modes.CTR(iv),
backend=default_backend())
expected = {'key': cipher.encryptor().update(key_to_wrap),
'iv': iv}
self.assertEqual(expected, wrapped)
unwrapped = self.crypto.unwrap_key(wrapping_key, wrapped)
self.assertEqual(key_to_wrap, unwrapped)
def test_unwrap_bad_key(self):
# verify that ValueError is raised if unwrapped key is invalid
wrapping_key = os.urandom(32)
for length in (0, 16, 24, 31, 33):
key_to_wrap = os.urandom(length)
wrapped = self.crypto.wrap_key(wrapping_key, key_to_wrap)
with self.assertRaises(ValueError) as cm:
self.crypto.unwrap_key(wrapping_key, wrapped)
self.assertEqual(
cm.exception.args[0], 'Key must be length 32 bytes')
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/crypto/test_crypto_utils.py |
# Copyright (c) 2016 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import mock
from swift.common import registry
from swift.common.middleware import crypto
class TestCrypto(unittest.TestCase):
def test_filter_factory(self):
def do_test(conf, expect_enabled):
fake_app = object()
with mock.patch.dict('swift.common.registry._swift_admin_info',
clear=True):
# we're not expecting utils._swift_info to be modified but mock
# it anyway just in case it is
with mock.patch.dict('swift.common.registry._swift_info',
clear=True):
# Sanity checks...
self.assertNotIn('encryption', registry._swift_admin_info)
self.assertNotIn('encryption',
registry.get_swift_info(admin=True))
self.assertNotIn(
'encryption',
registry.get_swift_info(admin=True)['admin'])
factory = crypto.filter_factory(conf)
self.assertTrue(callable(factory))
filtered_app = factory(fake_app)
self.assertNotIn('encryption', registry._swift_info)
self.assertNotIn('encryption', registry.get_swift_info())
self.assertNotIn('encryption',
registry.get_swift_info(admin=True))
self.assertIn('encryption', registry._swift_admin_info)
self.assertDictEqual(
{'enabled': expect_enabled},
registry._swift_admin_info['encryption'])
self.assertIn('encryption',
registry.get_swift_info(admin=True)['admin'])
self.assertDictEqual(
{'enabled': expect_enabled},
registry.get_swift_info(
admin=True)['admin']['encryption'])
self.assertIsInstance(filtered_app, crypto.decrypter.Decrypter)
self.assertIsInstance(filtered_app.app, crypto.encrypter.Encrypter)
self.assertIs(filtered_app.app.app, fake_app)
# default enabled
do_test({}, True)
# explicitly enabled
do_test({'disable_encryption': False}, True)
# explicitly disabled
do_test({'disable_encryption': True}, False)
| swift-master | test/unit/common/middleware/crypto/test_crypto.py |
# -*- coding: utf-8 -*-
# Copyright (c) 2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import mock
import unittest
import sys
sys.modules['castellan'] = mock.Mock()
sys.modules['castellan.common'] = mock.Mock()
sys.modules['castellan.common.credentials'] = mock.Mock()
from keystoneauth1.exceptions.connection import ConnectFailure
from keystoneauth1.exceptions.http import Unauthorized
from keystoneclient.exceptions import DiscoveryFailure
from swift.common.middleware.crypto import kms_keymaster
from swift.common.swob import Request
from test.unit.common.middleware.helpers import FakeSwift, FakeAppThatExcepts
TEST_KMS_INVALID_KEY_ID = 'invalid-kms-key-id'
TEST_KMS_NONEXISTENT_KEY_ID = '11111111-1111-1111-1111-ffffffffffff'
TEST_KMS_OPAQUE_KEY_ID = '22222222-2222-2222-2222-aaaaaaaaaaaa'
TEST_KMS_SHORT_KEY_ID = '22222222-2222-2222-2222-bbbbbbbbbbbb'
TEST_KMS_DES_KEY_ID = '22222222-2222-2222-2222-cccccccccccc'
TEST_KMS_NONE_KEY_ID = '22222222-2222-2222-2222-dddddddddddd'
TEST_KMS_INVALID_API_VERSION = 'vBadVersion'
TEST_KMS_INVALID_USER_DOMAIN_NAME = "baduserdomainname"
TEST_KMS_CONNECT_FAILURE_URL = 'http://endpoint_url_connect_error:45621'
TEST_KMS_NON_BARBICAN_URL = 'http://endpoint_url_nonbarbican:45621'
TEST_PROXYSERVER_CONF_EXTERNAL_KEYMASTER_CONF = {
'keymaster_config_path': 'PATH_TO_KEYMASTER_CONFIG_FILE',
}
TEST_KMS_KEYMASTER_CONF = {
'auth_endpoint': 'kmsauthurlv3',
'password': 'kmspass',
'username': 'kmsuser',
'user_domain_id': None,
'user_domain_name': 'default',
'project_id': None,
'project_name': 'kmsproject',
'project_domain_id': None,
'project_domain_name': 'default',
'key_id': 'valid_kms_key_id-abcdefg-123456'
}
def capture_start_response():
calls = []
def start_response(*args):
calls.append(args)
return start_response, calls
def mock_castellan_api_side_effect(*args, **kwargs):
return MockBarbicanKeyManager(args[0])
def mock_options_set_defaults_side_effect(*args, **kwargs):
'''
Add options from kwargs into args dict.
'''
args[0].update(kwargs)
def mock_config_opts_side_effect(*args, **kwargs):
return dict()
def mock_keystone_password_side_effect(auth_url, username, password,
project_name, user_domain_name,
project_domain_name, user_id,
user_domain_id, trust_id,
domain_id, domain_name, project_id,
project_domain_id, reauthenticate):
return MockPassword(auth_url, username, password, project_name,
user_domain_name, project_domain_name, user_id,
user_domain_id, trust_id, domain_id, domain_name,
project_id, project_domain_id, reauthenticate)
ERR_MESSAGE_SECRET_INCORRECTLY_SPECIFIED = 'Secret incorrectly specified.'
ERR_MESSAGE_KEY_UUID_NOT_FOUND = 'Key not found, uuid: '
class MockBarbicanKeyManager(object):
def __init__(self, conf):
self.conf = conf
def get(self, ctxt, key_id):
# If authentication fails, raise an exception here.
if (TEST_KMS_KEYMASTER_CONF['username'] !=
ctxt.username
or TEST_KMS_KEYMASTER_CONF['password'] !=
ctxt.password or
TEST_KMS_KEYMASTER_CONF['user_domain_name'] !=
ctxt.user_domain_name):
raise Unauthorized(
message='The request you have made requires authentication.',
http_status=401)
elif self.conf['auth_endpoint'] == TEST_KMS_CONNECT_FAILURE_URL:
raise ConnectFailure('Unable to establish connection')
elif self.conf['auth_endpoint'] == TEST_KMS_NON_BARBICAN_URL:
raise DiscoveryFailure(
'Could not determine a suitable URL for the plugin')
elif (self.conf['auth_endpoint'] !=
TEST_KMS_KEYMASTER_CONF['auth_endpoint']):
raise Unauthorized(
message='Cannot authorize API client.')
elif (key_id == TEST_KMS_NONEXISTENT_KEY_ID):
message = ERR_MESSAGE_KEY_UUID_NOT_FOUND + key_id
'''
Raising a ManagedObjectNotFoundError would require importing it
from castellan.common.exception. To avoid this import, raising a
general Exception.
'''
raise Exception(message)
elif key_id == TEST_KMS_INVALID_KEY_ID:
raise ValueError(ERR_MESSAGE_SECRET_INCORRECTLY_SPECIFIED)
elif key_id == TEST_KMS_NONE_KEY_ID:
return None
if 'unicode' in key_id:
key_str = key_id[0] * 32
else:
key_str = (str(key_id[0]) * 32).encode('utf8')
return MockBarbicanKey(key_str, key_id)
class MockBarbicanKey(object):
def __init__(self, key_material, key_id):
self.key_material = key_material
self.bit_length = len(key_material) * 8
if key_id == TEST_KMS_OPAQUE_KEY_ID:
self.format = 'Opaque'
else:
self.format = 'RAW'
self.algorithm = "aes"
if key_id == TEST_KMS_DES_KEY_ID:
self.format = 'des'
if key_id == TEST_KMS_SHORT_KEY_ID:
self.bit_length = 128
self.key_material[:128]
def get_encoded(self):
return self.key_material
def format(self):
return self.format
class MockPassword(object):
def __init__(self, auth_url, username, password, project_name,
user_domain_name, project_domain_name, user_id,
user_domain_id, trust_id, domain_id, domain_name, project_id,
project_domain_id, reauthenticate):
self.auth_url = auth_url
self.password = password
self.username = username
self.user_domain_name = user_domain_name
self.project_name = project_name
self.project_domain_name = project_domain_name
self.user_id = user_id,
self.user_domain_id = user_domain_id,
self.trust_id = trust_id,
self.domain_id = domain_id,
self.domain_name = domain_name,
self.project_id = project_id,
self.project_domain_id = project_domain_id,
self.reauthenticate = reauthenticate
class TestKmsKeymaster(unittest.TestCase):
"""
Unit tests for storing the encryption root secret in a Barbican external
key management system accessed using Castellan.
"""
def setUp(self):
super(TestKmsKeymaster, self).setUp()
self.swift = FakeSwift()
"""
Tests using the v3 Identity API, where all calls to Barbican are mocked.
"""
@mock.patch('swift.common.middleware.crypto.keymaster.readconf')
@mock.patch.object(kms_keymaster.KmsKeyMaster,
'_get_root_secret')
def test_filter_v3(self, mock_get_root_secret_from_kms,
mock_readconf):
mock_get_root_secret_from_kms.return_value = (
base64.b64encode(b'x' * 32))
mock_readconf.return_value = TEST_KMS_KEYMASTER_CONF
factory = kms_keymaster.filter_factory(TEST_KMS_KEYMASTER_CONF)
self.assertTrue(callable(factory))
self.assertTrue(callable(factory(self.swift)))
@mock.patch('swift.common.middleware.crypto.keymaster.readconf')
@mock.patch.object(kms_keymaster.KmsKeyMaster,
'_get_root_secret')
def test_app_exception_v3(self, mock_get_root_secret_from_kms,
mock_readconf):
mock_get_root_secret_from_kms.return_value = (
base64.b64encode(b'x' * 32))
mock_readconf.return_value = TEST_KMS_KEYMASTER_CONF
app = kms_keymaster.KmsKeyMaster(
FakeAppThatExcepts(), TEST_KMS_KEYMASTER_CONF)
req = Request.blank('/', environ={'REQUEST_METHOD': 'PUT'})
start_response, _ = capture_start_response()
self.assertRaises(Exception, app, req.environ, start_response)
@mock.patch.object(kms_keymaster.KmsKeyMaster, '_get_root_secret')
def test_get_root_secret(
self, mock_get_root_secret_from_kms):
# Successful call with coarse _get_root_secret_from_kms() mock.
mock_get_root_secret_from_kms.return_value = (
base64.b64encode(b'x' * 32))
# Provide valid Barbican configuration parameters in proxy-server
# config.
self.app = kms_keymaster.KmsKeyMaster(self.swift,
TEST_KMS_KEYMASTER_CONF)
# Verify that _get_root_secret_from_kms() was called with the
# correct parameters.
mock_get_root_secret_from_kms.assert_called_with(
TEST_KMS_KEYMASTER_CONF
)
@mock.patch('swift.common.middleware.crypto.keymaster.readconf')
@mock.patch.object(kms_keymaster.KmsKeyMaster, '_get_root_secret')
def test_get_root_secret_from_external_file(
self, mock_get_root_secret_from_kms, mock_readconf):
# Return valid Barbican configuration parameters.
mock_readconf.return_value = TEST_KMS_KEYMASTER_CONF
# Successful call with coarse _get_root_secret_from_kms() mock.
mock_get_root_secret_from_kms.return_value = (
base64.b64encode(b'x' * 32))
# Point to external config in proxy-server config.
self.app = kms_keymaster.KmsKeyMaster(
self.swift, TEST_PROXYSERVER_CONF_EXTERNAL_KEYMASTER_CONF)
# Verify that _get_root_secret_from_kms() was called with the
# correct parameters.
mock_get_root_secret_from_kms.assert_called_with(
TEST_KMS_KEYMASTER_CONF
)
self.assertEqual(mock_readconf.mock_calls, [
mock.call('PATH_TO_KEYMASTER_CONFIG_FILE', 'kms_keymaster')])
@mock.patch('swift.common.middleware.crypto.kms_keymaster.'
'keystone_password.KeystonePassword')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.cfg')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.options')
@mock.patch('swift.common.middleware.crypto.keymaster.readconf')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.key_manager')
def test_mocked_castellan_keymanager(
self, mock_castellan_key_manager, mock_readconf,
mock_castellan_options, mock_oslo_config, mock_keystone_password):
# Successful call with finer grained mocks.
mock_keystone_password.side_effect = (
mock_keystone_password_side_effect)
'''
Set side_effect functions.
'''
mock_castellan_key_manager.API.side_effect = (
mock_castellan_api_side_effect)
mock_castellan_options.set_defaults.side_effect = (
mock_options_set_defaults_side_effect)
mock_oslo_config.ConfigOpts.side_effect = (
mock_config_opts_side_effect)
'''
Return valid Barbican configuration parameters.
'''
mock_readconf.return_value = TEST_KMS_KEYMASTER_CONF
'''
Verify that no exceptions are raised by the mocked functions.
'''
try:
self.app = kms_keymaster.KmsKeyMaster(self.swift,
TEST_KMS_KEYMASTER_CONF)
except Exception:
print("Unexpected error: %s" % sys.exc_info()[0])
raise
@mock.patch('swift.common.middleware.crypto.kms_keymaster.'
'keystone_password.KeystonePassword')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.cfg')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.options')
@mock.patch('swift.common.middleware.crypto.keymaster.readconf')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.key_manager')
def test_mocked_castellan_keymanager_invalid_key_id(
self, mock_castellan_key_manager, mock_readconf,
mock_castellan_options, mock_oslo_config,
mock_keystone_password):
# Invalid key ID.
mock_keystone_password.side_effect = (
mock_keystone_password_side_effect)
'''
Set side_effect functions.
'''
mock_castellan_key_manager.API.side_effect = (
mock_castellan_api_side_effect)
mock_castellan_options.set_defaults.side_effect = (
mock_options_set_defaults_side_effect)
mock_oslo_config.ConfigOpts.side_effect = (
mock_config_opts_side_effect)
'''
Return invalid Barbican configuration parameters.
'''
kms_conf = dict(TEST_KMS_KEYMASTER_CONF)
kms_conf['key_id'] = TEST_KMS_INVALID_KEY_ID
mock_readconf.return_value = kms_conf
'''
Verify that an exception is raised by the mocked function.
'''
try:
self.app = kms_keymaster.KmsKeyMaster(
self.swift, TEST_PROXYSERVER_CONF_EXTERNAL_KEYMASTER_CONF)
raise Exception('Success even though key id invalid')
except ValueError as e:
self.assertEqual(e.args[0],
ERR_MESSAGE_SECRET_INCORRECTLY_SPECIFIED)
except Exception:
print("Unexpected error: %s" % sys.exc_info()[0])
raise
@mock.patch('swift.common.middleware.crypto.kms_keymaster.'
'keystone_password.KeystonePassword')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.cfg')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.options')
@mock.patch('swift.common.middleware.crypto.keymaster.readconf')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.key_manager')
def test_mocked_castellan_keymanager_nonexistent_key_id(
self, mock_castellan_key_manager, mock_readconf,
mock_castellan_options, mock_oslo_config,
mock_keystone_password):
# Nonexistent key.
mock_keystone_password.side_effect = (
mock_keystone_password_side_effect)
'''
Set side_effect functions.
'''
mock_castellan_key_manager.API.side_effect = (
mock_castellan_api_side_effect)
mock_castellan_options.set_defaults.side_effect = (
mock_options_set_defaults_side_effect)
mock_oslo_config.ConfigOpts.side_effect = (
mock_config_opts_side_effect)
'''
Return invalid Barbican configuration parameters.
'''
kms_conf = dict(TEST_KMS_KEYMASTER_CONF)
kms_conf['key_id'] = TEST_KMS_NONEXISTENT_KEY_ID
mock_readconf.return_value = kms_conf
'''
Verify that an exception is raised by the mocked function.
'''
try:
self.app = kms_keymaster.KmsKeyMaster(
self.swift, TEST_PROXYSERVER_CONF_EXTERNAL_KEYMASTER_CONF)
raise Exception('Success even though key id invalid')
except Exception as e:
expected_message = ('Key not found, uuid: ' +
TEST_KMS_NONEXISTENT_KEY_ID)
self.assertEqual(e.args[0], expected_message)
@mock.patch('swift.common.middleware.crypto.kms_keymaster.'
'keystone_password.KeystonePassword')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.cfg')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.options')
@mock.patch('swift.common.middleware.crypto.keymaster.readconf')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.key_manager')
def test_mocked_castellan_keymanager_invalid_key_format(
self, mock_castellan_key_manager, mock_readconf,
mock_castellan_options, mock_oslo_config,
mock_keystone_password):
# Nonexistent key.
mock_keystone_password.side_effect = (
mock_keystone_password_side_effect)
'''
Set side_effect functions.
'''
mock_castellan_key_manager.API.side_effect = (
mock_castellan_api_side_effect)
mock_castellan_options.set_defaults.side_effect = (
mock_options_set_defaults_side_effect)
mock_oslo_config.ConfigOpts.side_effect = (
mock_config_opts_side_effect)
'''
Return invalid Barbican configuration parameters.
'''
kms_conf = dict(TEST_KMS_KEYMASTER_CONF)
kms_conf['key_id'] = TEST_KMS_OPAQUE_KEY_ID
mock_readconf.return_value = kms_conf
'''
Verify that an exception is raised by the mocked function.
'''
try:
self.app = kms_keymaster.KmsKeyMaster(
self.swift, TEST_PROXYSERVER_CONF_EXTERNAL_KEYMASTER_CONF)
raise Exception('Success even though key format invalid')
except ValueError:
pass
except Exception:
print("Unexpected error: %s" % sys.exc_info()[0])
raise
@mock.patch('swift.common.middleware.crypto.kms_keymaster.'
'keystone_password.KeystonePassword')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.cfg')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.options')
@mock.patch('swift.common.middleware.crypto.keymaster.readconf')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.key_manager')
def test_mocked_castellan_keymanager_config_file_and_params(
self, mock_castellan_key_manager, mock_readconf,
mock_castellan_options, mock_oslo_config,
mock_keystone_password):
# Both external config file and config parameters specified.
mock_keystone_password.side_effect = (
mock_keystone_password_side_effect)
'''
Set side_effect functions.
'''
mock_castellan_key_manager.API.side_effect = (
mock_castellan_api_side_effect)
mock_castellan_options.set_defaults.side_effect = (
mock_options_set_defaults_side_effect)
mock_oslo_config.ConfigOpts.side_effect = (
mock_config_opts_side_effect)
'''
Return invalid Barbican configuration parameters.
'''
kms_conf = dict(TEST_KMS_KEYMASTER_CONF)
kms_conf['keymaster_config_path'] = (
'PATH_TO_KEYMASTER_CONFIG_FILE'
)
mock_readconf.return_value = kms_conf
'''
Verify that an exception is raised by the mocked function.
'''
try:
self.app = kms_keymaster.KmsKeyMaster(self.swift, kms_conf)
raise Exception('Success even though config invalid')
except Exception as e:
expected_message = ('keymaster_config_path is set, but there are '
'other config options specified:')
self.assertTrue(e.args[0].startswith(expected_message),
"Error message does not start with '%s'" %
expected_message)
@mock.patch('swift.common.middleware.crypto.kms_keymaster.'
'keystone_password.KeystonePassword')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.cfg')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.options')
@mock.patch('swift.common.middleware.crypto.keymaster.readconf')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.key_manager')
def test_mocked_castellan_keymanager_invalid_username(
self, mock_castellan_key_manager, mock_readconf,
mock_castellan_options, mock_oslo_config,
mock_keystone_password):
# Invalid username.
mock_keystone_password.side_effect = (
mock_keystone_password_side_effect)
'''
Set side_effect functions.
'''
mock_castellan_key_manager.API.side_effect = (
mock_castellan_api_side_effect)
mock_castellan_options.set_defaults.side_effect = (
mock_options_set_defaults_side_effect)
mock_oslo_config.ConfigOpts.side_effect = (
mock_config_opts_side_effect)
'''
Return invalid Barbican configuration parameters.
'''
kms_conf = dict(TEST_KMS_KEYMASTER_CONF)
kms_conf['username'] = 'invaliduser'
mock_readconf.return_value = kms_conf
'''
Verify that an exception is raised by the mocked function.
'''
try:
self.app = kms_keymaster.KmsKeyMaster(
self.swift, TEST_PROXYSERVER_CONF_EXTERNAL_KEYMASTER_CONF)
raise Exception('Success even though username invalid')
except Unauthorized as e:
self.assertEqual(e.http_status, 401)
except Exception:
print("Unexpected error: %s" % sys.exc_info()[0])
raise
@mock.patch('swift.common.middleware.crypto.kms_keymaster.'
'keystone_password.KeystonePassword')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.cfg')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.options')
@mock.patch('swift.common.middleware.crypto.keymaster.readconf')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.key_manager')
def test_mocked_castellan_keymanager_invalid_password(
self, mock_castellan_key_manager, mock_readconf,
mock_castellan_options, mock_oslo_config,
mock_keystone_password):
# Invalid password.
mock_keystone_password.side_effect = (
mock_keystone_password_side_effect)
'''
Set side_effect functions.
'''
mock_castellan_key_manager.API.side_effect = (
mock_castellan_api_side_effect)
mock_castellan_options.set_defaults.side_effect = (
mock_options_set_defaults_side_effect)
mock_oslo_config.ConfigOpts.side_effect = (
mock_config_opts_side_effect)
'''
Return invalid Barbican configuration parameters.
'''
kms_conf = dict(TEST_KMS_KEYMASTER_CONF)
kms_conf['password'] = 'invalidpassword'
mock_readconf.return_value = kms_conf
'''
Verify that an exception is raised by the mocked function.
'''
try:
self.app = kms_keymaster.KmsKeyMaster(
self.swift, TEST_PROXYSERVER_CONF_EXTERNAL_KEYMASTER_CONF)
raise Exception('Success even though password invalid')
except Unauthorized as e:
self.assertEqual(e.http_status, 401)
except Exception:
print("Unexpected error: %s" % sys.exc_info()[0])
raise
@mock.patch('swift.common.middleware.crypto.kms_keymaster.'
'keystone_password.KeystonePassword')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.cfg')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.options')
@mock.patch('swift.common.middleware.crypto.keymaster.readconf')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.key_manager')
def test_mocked_castellan_keymanager_connect_failure_auth_url(
self, mock_castellan_key_manager, mock_readconf,
mock_castellan_options, mock_oslo_config, mock_keystone_password):
# Connect failure kms auth_url.
mock_keystone_password.side_effect = (
mock_keystone_password_side_effect)
'''
Set side_effect functions.
'''
mock_castellan_key_manager.API.side_effect = (
mock_castellan_api_side_effect)
mock_castellan_options.set_defaults.side_effect = (
mock_options_set_defaults_side_effect)
mock_oslo_config.ConfigOpts.side_effect = (
mock_config_opts_side_effect)
'''
Return invalid Barbican configuration parameters.
'''
kms_conf = dict(TEST_KMS_KEYMASTER_CONF)
kms_conf['auth_endpoint'] = TEST_KMS_CONNECT_FAILURE_URL
mock_readconf.return_value = kms_conf
'''
Verify that an exception is raised by the mocked function.
'''
try:
self.app = kms_keymaster.KmsKeyMaster(
self.swift, TEST_PROXYSERVER_CONF_EXTERNAL_KEYMASTER_CONF)
raise Exception('Success even though auth_url invalid')
except ConnectFailure:
pass
except Exception:
print("Unexpected error: %s" % sys.exc_info()[0])
raise
@mock.patch('swift.common.middleware.crypto.kms_keymaster.'
'keystone_password.KeystonePassword')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.cfg')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.options')
@mock.patch('swift.common.middleware.crypto.keymaster.readconf')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.key_manager')
def test_mocked_castellan_keymanager_bad_auth_url(
self, mock_castellan_key_manager, mock_readconf,
mock_castellan_options, mock_oslo_config,
mock_keystone_password):
# Bad kms auth_url.
mock_keystone_password.side_effect = (
mock_keystone_password_side_effect)
'''
Set side_effect functions.
'''
mock_castellan_key_manager.API.side_effect = (
mock_castellan_api_side_effect)
mock_castellan_options.set_defaults.side_effect = (
mock_options_set_defaults_side_effect)
mock_oslo_config.ConfigOpts.side_effect = (
mock_config_opts_side_effect)
'''
Return invalid Barbican configuration parameters.
'''
kms_conf = dict(TEST_KMS_KEYMASTER_CONF)
kms_conf['auth_endpoint'] = TEST_KMS_NON_BARBICAN_URL
mock_readconf.return_value = kms_conf
'''
Verify that an exception is raised by the mocked function.
'''
try:
self.app = kms_keymaster.KmsKeyMaster(
self.swift, TEST_PROXYSERVER_CONF_EXTERNAL_KEYMASTER_CONF)
raise Exception('Success even though auth_url invalid')
except DiscoveryFailure:
pass
except Exception:
print("Unexpected error: %s" % sys.exc_info()[0])
raise
@mock.patch('swift.common.middleware.crypto.kms_keymaster.'
'keystone_password.KeystonePassword')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.cfg')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.options')
@mock.patch('swift.common.middleware.crypto.keymaster.readconf')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.key_manager')
def test_mocked_castellan_keymanager_bad_user_domain_name(
self, mock_castellan_key_manager, mock_readconf,
mock_castellan_options, mock_oslo_config, mock_keystone_password):
# Bad user domain name with mocks.
mock_keystone_password.side_effect = (
mock_keystone_password_side_effect)
'''
Set side_effect functions.
'''
mock_castellan_key_manager.API.side_effect = (
mock_castellan_api_side_effect)
mock_castellan_options.set_defaults.side_effect = (
mock_options_set_defaults_side_effect)
mock_oslo_config.ConfigOpts.side_effect = (
mock_config_opts_side_effect)
'''
Return invalid Barbican configuration parameters.
'''
kms_conf = dict(TEST_KMS_KEYMASTER_CONF)
kms_conf['user_domain_name'] = (
TEST_KMS_INVALID_USER_DOMAIN_NAME)
mock_readconf.return_value = kms_conf
'''
Verify that an exception is raised by the mocked function.
'''
try:
self.app = kms_keymaster.KmsKeyMaster(
self.swift, TEST_PROXYSERVER_CONF_EXTERNAL_KEYMASTER_CONF)
raise Exception('Success even though api_version invalid')
except Unauthorized as e:
self.assertEqual(e.http_status, 401)
except Exception:
print("Unexpected error: %s" % sys.exc_info()[0])
raise
@mock.patch('swift.common.middleware.crypto.kms_keymaster.'
'keystone_password.KeystonePassword')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.cfg')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.options')
@mock.patch('swift.common.middleware.crypto.keymaster.readconf')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.key_manager')
def test_mocked_castellan_keymanager_invalid_key_algorithm(
self, mock_castellan_key_manager, mock_readconf,
mock_castellan_options, mock_oslo_config,
mock_keystone_password):
# Nonexistent key.
mock_keystone_password.side_effect = (
mock_keystone_password_side_effect)
'''
Set side_effect functions.
'''
mock_castellan_key_manager.API.side_effect = (
mock_castellan_api_side_effect)
mock_castellan_options.set_defaults.side_effect = (
mock_options_set_defaults_side_effect)
mock_oslo_config.ConfigOpts.side_effect = (
mock_config_opts_side_effect)
'''
Return invalid Barbican configuration parameters.
'''
kms_conf = dict(TEST_KMS_KEYMASTER_CONF)
kms_conf['key_id'] = TEST_KMS_DES_KEY_ID
mock_readconf.return_value = kms_conf
'''
Verify that an exception is raised by the mocked function.
'''
try:
self.app = kms_keymaster.KmsKeyMaster(
self.swift, TEST_PROXYSERVER_CONF_EXTERNAL_KEYMASTER_CONF)
raise Exception('Success even though key format invalid')
except ValueError:
pass
except Exception:
print("Unexpected error: %s" % sys.exc_info()[0])
raise
@mock.patch('swift.common.middleware.crypto.kms_keymaster.'
'keystone_password.KeystonePassword')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.cfg')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.options')
@mock.patch('swift.common.middleware.crypto.keymaster.readconf')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.key_manager')
def test_mocked_castellan_keymanager_invalid_key_length(
self, mock_castellan_key_manager, mock_readconf,
mock_castellan_options, mock_oslo_config,
mock_keystone_password):
# Nonexistent key.
mock_keystone_password.side_effect = (
mock_keystone_password_side_effect)
'''
Set side_effect functions.
'''
mock_castellan_key_manager.API.side_effect = (
mock_castellan_api_side_effect)
mock_castellan_options.set_defaults.side_effect = (
mock_options_set_defaults_side_effect)
mock_oslo_config.ConfigOpts.side_effect = (
mock_config_opts_side_effect)
'''
Return invalid Barbican configuration parameters.
'''
kms_conf = dict(TEST_KMS_KEYMASTER_CONF)
kms_conf['key_id'] = TEST_KMS_SHORT_KEY_ID
mock_readconf.return_value = kms_conf
'''
Verify that an exception is raised by the mocked function.
'''
try:
self.app = kms_keymaster.KmsKeyMaster(
self.swift, TEST_PROXYSERVER_CONF_EXTERNAL_KEYMASTER_CONF)
raise Exception('Success even though key format invalid')
except ValueError:
pass
except Exception:
print("Unexpected error: %s" % sys.exc_info()[0])
raise
@mock.patch('swift.common.middleware.crypto.kms_keymaster.'
'keystone_password.KeystonePassword')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.cfg')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.options')
@mock.patch('swift.common.middleware.crypto.keymaster.readconf')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.key_manager')
def test_mocked_castellan_keymanager_none_key(
self, mock_castellan_key_manager, mock_readconf,
mock_castellan_options, mock_oslo_config,
mock_keystone_password):
# Nonexistent key.
mock_keystone_password.side_effect = (
mock_keystone_password_side_effect)
'''
Set side_effect functions.
'''
mock_castellan_key_manager.API.side_effect = (
mock_castellan_api_side_effect)
mock_castellan_options.set_defaults.side_effect = (
mock_options_set_defaults_side_effect)
mock_oslo_config.ConfigOpts.side_effect = (
mock_config_opts_side_effect)
'''
Return invalid Barbican configuration parameters.
'''
kms_conf = dict(TEST_KMS_KEYMASTER_CONF)
kms_conf['key_id'] = TEST_KMS_NONE_KEY_ID
mock_readconf.return_value = kms_conf
'''
Verify that an exception is raised by the mocked function.
'''
try:
self.app = kms_keymaster.KmsKeyMaster(
self.swift, TEST_PROXYSERVER_CONF_EXTERNAL_KEYMASTER_CONF)
raise Exception('Success even though None key returned')
except ValueError:
pass
except Exception:
print("Unexpected error: %s" % sys.exc_info()[0])
raise
@mock.patch('swift.common.middleware.crypto.kms_keymaster.'
'keystone_password.KeystonePassword', MockPassword)
@mock.patch('swift.common.middleware.crypto.kms_keymaster.cfg')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.options')
@mock.patch('swift.common.middleware.crypto.keymaster.readconf')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.key_manager')
def test_get_root_secret_multiple_keys(
self, mock_castellan_key_manager, mock_readconf,
mock_castellan_options, mock_oslo_config,):
config = dict(TEST_KMS_KEYMASTER_CONF)
config.update({
'key_id_foo': 'foo-valid_kms_key_id-123456',
'key_id_bar': 'bar-valid_kms_key_id-123456',
'key_id_baz': 'zz-valid_unicode_kms_key_id-123456',
'key_id_non_ascii': u'\N{SNOWMAN}_unicode_key_id',
'active_root_secret_id': 'foo'})
# Set side_effect functions.
mock_castellan_key_manager.API.side_effect = (
mock_castellan_api_side_effect)
mock_castellan_options.set_defaults.side_effect = (
mock_options_set_defaults_side_effect)
mock_oslo_config.ConfigOpts.side_effect = (
mock_config_opts_side_effect)
# Return valid Barbican configuration parameters.
mock_readconf.return_value = config
self.app = kms_keymaster.KmsKeyMaster(self.swift,
config)
expected_secrets = {
None: b'vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv',
'foo': b'ffffffffffffffffffffffffffffffff',
'bar': b'bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb',
'baz': b'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz',
'non_ascii': b'\xe2\x98\x83' * 32}
self.assertDictEqual(self.app._root_secrets, expected_secrets)
self.assertEqual(self.app.active_secret_id, 'foo')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.'
'keystone_password.KeystonePassword', MockPassword)
@mock.patch('swift.common.middleware.crypto.kms_keymaster.cfg')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.options')
@mock.patch('swift.common.middleware.crypto.keymaster.readconf')
@mock.patch('swift.common.middleware.crypto.kms_keymaster.key_manager')
def test_get_root_secret_legacy_key_id(
self, mock_castellan_key_manager, mock_readconf,
mock_castellan_options, mock_oslo_config):
# Set side_effect functions.
mock_castellan_key_manager.API.side_effect = (
mock_castellan_api_side_effect)
mock_castellan_options.set_defaults.side_effect = (
mock_options_set_defaults_side_effect)
mock_oslo_config.ConfigOpts.side_effect = (
mock_config_opts_side_effect)
# Return valid Barbican configuration parameters.
mock_readconf.return_value = TEST_KMS_KEYMASTER_CONF
self.app = kms_keymaster.KmsKeyMaster(self.swift,
TEST_KMS_KEYMASTER_CONF)
expected_secrets = {None: b'vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv'}
self.assertDictEqual(self.app._root_secrets, expected_secrets)
self.assertIsNone(self.app.active_secret_id)
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/crypto/test_kms_keymaster.py |
swift-master | test/unit/common/middleware/crypto/__init__.py |
|
# Copyright (c) 2015-2016 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
from swift.common.exceptions import UnknownSecretIdError
from swift.common.middleware.crypto.crypto_utils import Crypto
from swift.common.utils import md5
def fetch_crypto_keys(key_id=None):
id_to_keys = {None: {'account': b'This is an account key 012345678',
'container': b'This is a container key 01234567',
'object': b'This is an object key 0123456789'},
'myid': {'account': b'This is an account key 123456789',
'container': b'This is a container key 12345678',
'object': b'This is an object key 1234567890'}}
key_id = key_id or {}
secret_id = key_id.get('secret_id') or None
try:
keys = dict(id_to_keys[secret_id])
except KeyError:
raise UnknownSecretIdError(secret_id)
keys['id'] = {'v': 'fake', 'path': '/a/c/fake'}
if secret_id:
keys['id']['secret_id'] = secret_id
keys['all_ids'] = [{'v': 'fake', 'path': '/a/c/fake'},
{'v': 'fake', 'path': '/a/c/fake', 'secret_id': 'myid'}]
return keys
def md5hex(s):
return md5(s, usedforsecurity=False).hexdigest()
def encrypt(val, key=None, iv=None, ctxt=None):
if ctxt is None:
ctxt = Crypto({}).create_encryption_ctxt(key, iv)
enc_val = ctxt.update(val)
return enc_val
def decrypt(key, iv, enc_val):
dec_ctxt = Crypto({}).create_decryption_ctxt(key, iv, 0)
dec_val = dec_ctxt.update(enc_val)
return dec_val
FAKE_IV = b"This is an IV123"
# do not use this example encryption_root_secret in production, use a randomly
# generated value with high entropy
TEST_KEYMASTER_CONF = {
'encryption_root_secret': base64.b64encode(b'x' * 32),
'encryption_root_secret_1': base64.b64encode(b'y' * 32),
'encryption_root_secret_2': base64.b64encode(b'z' * 32)
}
def fake_get_crypto_meta(**kwargs):
meta = {'iv': FAKE_IV, 'cipher': Crypto.cipher}
meta.update(kwargs)
return meta
| swift-master | test/unit/common/middleware/crypto/crypto_helpers.py |
# Copyright (c) 2015-2016 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import hashlib
import hmac
import json
import os
import unittest
import mock
from six.moves.urllib import parse as urlparse
from swift.common.middleware.crypto import encrypter
from swift.common.middleware.crypto.crypto_utils import (
CRYPTO_KEY_CALLBACK, Crypto)
from swift.common.swob import (
Request, HTTPException, HTTPCreated, HTTPAccepted, HTTPOk, HTTPBadRequest,
wsgi_to_bytes, bytes_to_wsgi)
from swift.common.utils import FileLikeIter
from test.debug_logger import debug_logger
from test.unit import EMPTY_ETAG
from test.unit.common.middleware.crypto.crypto_helpers import (
fetch_crypto_keys, md5hex, FAKE_IV, encrypt)
from test.unit.common.middleware.helpers import FakeSwift, FakeAppThatExcepts
@mock.patch('swift.common.middleware.crypto.crypto_utils.Crypto.create_iv',
lambda *args: FAKE_IV)
class TestEncrypter(unittest.TestCase):
def setUp(self):
self.app = FakeSwift()
self.encrypter = encrypter.Encrypter(self.app, {})
self.encrypter.logger = debug_logger()
def _verify_user_metadata(self, req_hdrs, name, value, key):
# verify encrypted version of user metadata
self.assertNotIn('X-Object-Meta-' + name, req_hdrs)
expected_hdr = 'X-Object-Transient-Sysmeta-Crypto-Meta-' + name
self.assertIn(expected_hdr, req_hdrs)
enc_val, param = req_hdrs[expected_hdr].split(';')
param = param.strip()
self.assertTrue(param.startswith('swift_meta='))
actual_meta = json.loads(
urlparse.unquote_plus(param[len('swift_meta='):]))
self.assertEqual(Crypto.cipher, actual_meta['cipher'])
meta_iv = base64.b64decode(actual_meta['iv'])
self.assertEqual(FAKE_IV, meta_iv)
self.assertEqual(
base64.b64encode(encrypt(wsgi_to_bytes(value), key, meta_iv)),
wsgi_to_bytes(enc_val))
# if there is any encrypted user metadata then this header should exist
self.assertIn('X-Object-Transient-Sysmeta-Crypto-Meta', req_hdrs)
common_meta = json.loads(urlparse.unquote_plus(
req_hdrs['X-Object-Transient-Sysmeta-Crypto-Meta']))
self.assertDictEqual({'cipher': Crypto.cipher,
'key_id': {'v': 'fake', 'path': '/a/c/fake'}},
common_meta)
def test_PUT_req(self):
body_key = os.urandom(32)
object_key = fetch_crypto_keys()['object']
plaintext = b'FAKE APP'
plaintext_etag = md5hex(plaintext)
ciphertext = encrypt(plaintext, body_key, FAKE_IV)
ciphertext_etag = md5hex(ciphertext)
env = {'REQUEST_METHOD': 'PUT',
CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
hdrs = {'etag': plaintext_etag,
'content-type': 'text/plain',
'content-length': str(len(plaintext)),
'x-object-meta-etag': 'not to be confused with the Etag!',
'x-object-meta-test': 'encrypt me',
'x-object-sysmeta-test': 'do not encrypt me'}
req = Request.blank(
'/v1/a/c/o', environ=env, body=plaintext, headers=hdrs)
self.app.register('PUT', '/v1/a/c/o', HTTPCreated, {})
with mock.patch(
'swift.common.middleware.crypto.crypto_utils.'
'Crypto.create_random_key',
return_value=body_key):
resp = req.get_response(self.encrypter)
self.assertEqual('201 Created', resp.status)
self.assertEqual(plaintext_etag, resp.headers['Etag'])
# verify metadata items
self.assertEqual(1, len(self.app.calls), self.app.calls)
self.assertEqual('PUT', self.app.calls[0][0])
req_hdrs = self.app.headers[0]
# verify body crypto meta
actual = req_hdrs['X-Object-Sysmeta-Crypto-Body-Meta']
actual = json.loads(urlparse.unquote_plus(actual))
self.assertEqual(Crypto().cipher, actual['cipher'])
self.assertEqual(FAKE_IV, base64.b64decode(actual['iv']))
# verify wrapped body key
expected_wrapped_key = encrypt(body_key, object_key, FAKE_IV)
self.assertEqual(expected_wrapped_key,
base64.b64decode(actual['body_key']['key']))
self.assertEqual(FAKE_IV,
base64.b64decode(actual['body_key']['iv']))
self.assertEqual(fetch_crypto_keys()['id'], actual['key_id'])
# verify etag
self.assertEqual(ciphertext_etag, req_hdrs['Etag'])
encrypted_etag, _junk, etag_meta = \
req_hdrs['X-Object-Sysmeta-Crypto-Etag'].partition('; swift_meta=')
# verify crypto_meta was appended to this etag
self.assertTrue(etag_meta)
actual_meta = json.loads(urlparse.unquote_plus(etag_meta))
self.assertEqual(Crypto().cipher, actual_meta['cipher'])
# verify encrypted version of plaintext etag
actual = base64.b64decode(encrypted_etag)
etag_iv = base64.b64decode(actual_meta['iv'])
enc_etag = encrypt(plaintext_etag.encode('ascii'), object_key, etag_iv)
self.assertEqual(enc_etag, actual)
# verify etag MAC for conditional requests
actual_hmac = base64.b64decode(
req_hdrs['X-Object-Sysmeta-Crypto-Etag-Mac'])
exp_hmac = hmac.new(
object_key,
plaintext_etag.encode('ascii'),
hashlib.sha256).digest()
self.assertEqual(actual_hmac, exp_hmac)
# verify encrypted etag for container update
self.assertIn(
'X-Object-Sysmeta-Container-Update-Override-Etag', req_hdrs)
parts = req_hdrs[
'X-Object-Sysmeta-Container-Update-Override-Etag'].rsplit(';', 1)
self.assertEqual(2, len(parts))
# extract crypto_meta from end of etag for container update
param = parts[1].strip()
crypto_meta_tag = 'swift_meta='
self.assertTrue(param.startswith(crypto_meta_tag), param)
actual_meta = json.loads(
urlparse.unquote_plus(param[len(crypto_meta_tag):]))
self.assertEqual(Crypto().cipher, actual_meta['cipher'])
self.assertEqual(fetch_crypto_keys()['id'], actual_meta['key_id'])
cont_key = fetch_crypto_keys()['container']
cont_etag_iv = base64.b64decode(actual_meta['iv'])
self.assertEqual(FAKE_IV, cont_etag_iv)
exp_etag = encrypt(plaintext_etag.encode('ascii'),
cont_key, cont_etag_iv)
self.assertEqual(exp_etag, base64.b64decode(parts[0]))
# content-type is not encrypted
self.assertEqual('text/plain', req_hdrs['Content-Type'])
# user meta is encrypted
self._verify_user_metadata(req_hdrs, 'Test', 'encrypt me', object_key)
self._verify_user_metadata(
req_hdrs, 'Etag', 'not to be confused with the Etag!', object_key)
# sysmeta is not encrypted
self.assertEqual('do not encrypt me',
req_hdrs['X-Object-Sysmeta-Test'])
# verify object is encrypted by getting direct from the app
get_req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = get_req.get_response(self.app)
self.assertEqual(ciphertext, resp.body)
self.assertEqual(ciphertext_etag, resp.headers['Etag'])
def test_PUT_zero_size_object(self):
# object body encryption should be skipped for zero sized object body
object_key = fetch_crypto_keys()['object']
plaintext_etag = EMPTY_ETAG
env = {'REQUEST_METHOD': 'PUT',
CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
hdrs = {'etag': EMPTY_ETAG,
'content-type': 'text/plain',
'content-length': '0',
'x-object-meta-etag': 'not to be confused with the Etag!',
'x-object-meta-test': 'encrypt me',
'x-object-sysmeta-test': 'do not encrypt me'}
req = Request.blank(
'/v1/a/c/o', environ=env, body='', headers=hdrs)
self.app.register('PUT', '/v1/a/c/o', HTTPCreated, {})
resp = req.get_response(self.encrypter)
self.assertEqual('201 Created', resp.status)
self.assertEqual(plaintext_etag, resp.headers['Etag'])
self.assertEqual(1, len(self.app.calls), self.app.calls)
self.assertEqual('PUT', self.app.calls[0][0])
req_hdrs = self.app.headers[0]
# verify that there is no body crypto meta
self.assertNotIn('X-Object-Sysmeta-Crypto-Meta', req_hdrs)
# verify etag is md5 of plaintext
self.assertEqual(EMPTY_ETAG, req_hdrs['Etag'])
# verify there is no etag crypto meta
self.assertNotIn('X-Object-Sysmeta-Crypto-Etag', req_hdrs)
# verify there is no container update override for etag
self.assertNotIn(
'X-Object-Sysmeta-Container-Update-Override-Etag', req_hdrs)
# user meta is still encrypted
self._verify_user_metadata(req_hdrs, 'Test', 'encrypt me', object_key)
self._verify_user_metadata(
req_hdrs, 'Etag', 'not to be confused with the Etag!', object_key)
# sysmeta is not encrypted
self.assertEqual('do not encrypt me',
req_hdrs['X-Object-Sysmeta-Test'])
# verify object is empty by getting direct from the app
get_req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = get_req.get_response(self.app)
self.assertEqual(b'', resp.body)
self.assertEqual(EMPTY_ETAG, resp.headers['Etag'])
def _test_PUT_with_other_footers(self, override_etag):
# verify handling of another middleware's footer callback
body_key = os.urandom(32)
object_key = fetch_crypto_keys()['object']
plaintext = b'FAKE APP'
plaintext_etag = md5hex(plaintext)
ciphertext = encrypt(plaintext, body_key, FAKE_IV)
ciphertext_etag = md5hex(ciphertext)
other_footers = {
'Etag': plaintext_etag,
'X-Object-Sysmeta-Other': 'other sysmeta',
'X-Object-Sysmeta-Container-Update-Override-Size':
'other override',
'X-Object-Sysmeta-Container-Update-Override-Etag':
override_etag}
env = {'REQUEST_METHOD': 'PUT',
CRYPTO_KEY_CALLBACK: fetch_crypto_keys,
'swift.callback.update_footers':
lambda footers: footers.update(other_footers)}
hdrs = {'content-type': 'text/plain',
'content-length': str(len(plaintext)),
'Etag': 'correct etag is in footers'}
req = Request.blank(
'/v1/a/c/o', environ=env, body=plaintext, headers=hdrs)
self.app.register('PUT', '/v1/a/c/o', HTTPCreated, {})
with mock.patch(
'swift.common.middleware.crypto.crypto_utils.'
'Crypto.create_random_key',
lambda *args: body_key):
resp = req.get_response(self.encrypter)
self.assertEqual('201 Created', resp.status)
self.assertEqual(plaintext_etag, resp.headers['Etag'])
# verify metadata items
self.assertEqual(1, len(self.app.calls), self.app.calls)
self.assertEqual('PUT', self.app.calls[0][0])
req_hdrs = self.app.headers[0]
# verify that other middleware's footers made it to app, including any
# container update overrides but nothing Etag-related
other_footers.pop('Etag')
other_footers.pop('X-Object-Sysmeta-Container-Update-Override-Etag')
for k, v in other_footers.items():
self.assertEqual(v, req_hdrs[k])
# verify encryption footers are ok
encrypted_etag, _junk, etag_meta = \
req_hdrs['X-Object-Sysmeta-Crypto-Etag'].partition('; swift_meta=')
self.assertTrue(etag_meta)
actual_meta = json.loads(urlparse.unquote_plus(etag_meta))
self.assertEqual(Crypto().cipher, actual_meta['cipher'])
self.assertEqual(ciphertext_etag, req_hdrs['Etag'])
actual = base64.b64decode(encrypted_etag)
etag_iv = base64.b64decode(actual_meta['iv'])
exp_etag = encrypt(plaintext_etag.encode('ascii'), object_key, etag_iv)
self.assertEqual(exp_etag, actual)
# verify encrypted etag for container update
self.assertIn(
'X-Object-Sysmeta-Container-Update-Override-Etag', req_hdrs)
parts = req_hdrs[
'X-Object-Sysmeta-Container-Update-Override-Etag'].rsplit(';', 1)
self.assertEqual(2, len(parts))
# extract crypto_meta from end of etag for container update
param = parts[1].strip()
crypto_meta_tag = 'swift_meta='
self.assertTrue(param.startswith(crypto_meta_tag), param)
actual_meta = json.loads(
urlparse.unquote_plus(param[len(crypto_meta_tag):]))
self.assertEqual(Crypto().cipher, actual_meta['cipher'])
cont_key = fetch_crypto_keys()['container']
cont_etag_iv = base64.b64decode(actual_meta['iv'])
self.assertEqual(FAKE_IV, cont_etag_iv)
exp_etag = encrypt(override_etag.encode('ascii'),
cont_key, cont_etag_iv)
self.assertEqual(exp_etag, base64.b64decode(parts[0]))
# verify body crypto meta
actual = req_hdrs['X-Object-Sysmeta-Crypto-Body-Meta']
actual = json.loads(urlparse.unquote_plus(actual))
self.assertEqual(Crypto().cipher, actual['cipher'])
self.assertEqual(FAKE_IV, base64.b64decode(actual['iv']))
# verify wrapped body key
expected_wrapped_key = encrypt(body_key, object_key, FAKE_IV)
self.assertEqual(expected_wrapped_key,
base64.b64decode(actual['body_key']['key']))
self.assertEqual(FAKE_IV,
base64.b64decode(actual['body_key']['iv']))
self.assertEqual(fetch_crypto_keys()['id'], actual['key_id'])
def test_PUT_with_other_footers(self):
self._test_PUT_with_other_footers('override etag')
def test_PUT_with_other_footers_and_etag_of_empty_body(self):
# verify that an override etag value of EMPTY_ETAG will be encrypted
# when there was a non-zero body length
self._test_PUT_with_other_footers(EMPTY_ETAG)
def _test_PUT_with_etag_override_in_headers(self, override_etag):
# verify handling of another middleware's
# container-update-override-etag in headers
plaintext = b'FAKE APP'
plaintext_etag = md5hex(plaintext)
env = {'REQUEST_METHOD': 'PUT',
CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
hdrs = {'content-type': 'text/plain',
'content-length': str(len(plaintext)),
'Etag': plaintext_etag,
'X-Object-Sysmeta-Container-Update-Override-Etag':
override_etag}
req = Request.blank(
'/v1/a/c/o', environ=env, body=plaintext, headers=hdrs)
self.app.register('PUT', '/v1/a/c/o', HTTPCreated, {})
resp = req.get_response(self.encrypter)
self.assertEqual('201 Created', resp.status)
self.assertEqual(plaintext_etag, resp.headers['Etag'])
# verify metadata items
self.assertEqual(1, len(self.app.calls), self.app.calls)
self.assertEqual(('PUT', '/v1/a/c/o'), self.app.calls[0])
req_hdrs = self.app.headers[0]
# verify encrypted etag for container update
self.assertIn(
'X-Object-Sysmeta-Container-Update-Override-Etag', req_hdrs)
parts = req_hdrs[
'X-Object-Sysmeta-Container-Update-Override-Etag'].rsplit(';', 1)
self.assertEqual(2, len(parts))
cont_key = fetch_crypto_keys()['container']
# extract crypto_meta from end of etag for container update
param = parts[1].strip()
crypto_meta_tag = 'swift_meta='
self.assertTrue(param.startswith(crypto_meta_tag), param)
actual_meta = json.loads(
urlparse.unquote_plus(param[len(crypto_meta_tag):]))
self.assertEqual(Crypto().cipher, actual_meta['cipher'])
self.assertEqual(fetch_crypto_keys()['id'], actual_meta['key_id'])
cont_etag_iv = base64.b64decode(actual_meta['iv'])
self.assertEqual(FAKE_IV, cont_etag_iv)
exp_etag = encrypt(override_etag.encode('ascii'),
cont_key, cont_etag_iv)
self.assertEqual(exp_etag, base64.b64decode(parts[0]))
def test_PUT_with_etag_override_in_headers(self):
self._test_PUT_with_etag_override_in_headers('override_etag')
def test_PUT_with_etag_of_empty_body_override_in_headers(self):
# verify that an override etag value of EMPTY_ETAG will be encrypted
# when there was a non-zero body length
self._test_PUT_with_etag_override_in_headers(EMPTY_ETAG)
def _test_PUT_with_empty_etag_override_in_headers(self, plaintext):
# verify that an override etag value of '' from other middleware is
# passed through unencrypted
plaintext_etag = md5hex(plaintext)
override_etag = ''
env = {'REQUEST_METHOD': 'PUT',
CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
hdrs = {'content-type': 'text/plain',
'content-length': str(len(plaintext)),
'Etag': plaintext_etag,
'X-Object-Sysmeta-Container-Update-Override-Etag':
override_etag}
req = Request.blank(
'/v1/a/c/o', environ=env, body=plaintext, headers=hdrs)
self.app.register('PUT', '/v1/a/c/o', HTTPCreated, {})
resp = req.get_response(self.encrypter)
self.assertEqual('201 Created', resp.status)
self.assertEqual(plaintext_etag, resp.headers['Etag'])
self.assertEqual(1, len(self.app.calls), self.app.calls)
self.assertEqual(('PUT', '/v1/a/c/o'), self.app.calls[0])
req_hdrs = self.app.headers[0]
self.assertIn(
'X-Object-Sysmeta-Container-Update-Override-Etag', req_hdrs)
self.assertEqual(
override_etag,
req_hdrs['X-Object-Sysmeta-Container-Update-Override-Etag'])
def test_PUT_with_empty_etag_override_in_headers(self):
self._test_PUT_with_empty_etag_override_in_headers(b'body')
def test_PUT_with_empty_etag_override_in_headers_no_body(self):
self._test_PUT_with_empty_etag_override_in_headers(b'')
def _test_PUT_with_empty_etag_override_in_footers(self, plaintext):
# verify that an override etag value of '' from other middleware is
# passed through unencrypted
plaintext_etag = md5hex(plaintext)
override_etag = ''
other_footers = {
'X-Object-Sysmeta-Container-Update-Override-Etag': override_etag}
env = {'REQUEST_METHOD': 'PUT',
CRYPTO_KEY_CALLBACK: fetch_crypto_keys,
'swift.callback.update_footers':
lambda footers: footers.update(other_footers)}
hdrs = {'content-type': 'text/plain',
'content-length': str(len(plaintext)),
'Etag': plaintext_etag}
req = Request.blank(
'/v1/a/c/o', environ=env, body=plaintext, headers=hdrs)
self.app.register('PUT', '/v1/a/c/o', HTTPCreated, {})
resp = req.get_response(self.encrypter)
self.assertEqual('201 Created', resp.status)
self.assertEqual(plaintext_etag, resp.headers['Etag'])
self.assertEqual(1, len(self.app.calls), self.app.calls)
self.assertEqual(('PUT', '/v1/a/c/o'), self.app.calls[0])
req_hdrs = self.app.headers[0]
self.assertIn(
'X-Object-Sysmeta-Container-Update-Override-Etag', req_hdrs)
self.assertEqual(
override_etag,
req_hdrs['X-Object-Sysmeta-Container-Update-Override-Etag'])
def test_PUT_with_empty_etag_override_in_footers(self):
self._test_PUT_with_empty_etag_override_in_footers(b'body')
def test_PUT_with_empty_etag_override_in_footers_no_body(self):
self._test_PUT_with_empty_etag_override_in_footers(b'')
def test_PUT_with_bad_etag_in_other_footers(self):
# verify that etag supplied in footers from other middleware overrides
# header etag when validating inbound plaintext etags
plaintext = b'FAKE APP'
plaintext_etag = md5hex(plaintext)
other_footers = {
'Etag': 'bad etag',
'X-Object-Sysmeta-Other': 'other sysmeta',
'X-Object-Sysmeta-Container-Update-Override-Etag':
'other override'}
env = {'REQUEST_METHOD': 'PUT',
CRYPTO_KEY_CALLBACK: fetch_crypto_keys,
'swift.callback.update_footers':
lambda footers: footers.update(other_footers)}
hdrs = {'content-type': 'text/plain',
'content-length': str(len(plaintext)),
'Etag': plaintext_etag}
req = Request.blank(
'/v1/a/c/o', environ=env, body=plaintext, headers=hdrs)
self.app.register('PUT', '/v1/a/c/o', HTTPCreated, {})
resp = req.get_response(self.encrypter)
self.assertEqual('422 Unprocessable Entity', resp.status)
self.assertNotIn('Etag', resp.headers)
def test_PUT_with_bad_etag_in_headers_and_other_footers(self):
# verify that etag supplied in headers from other middleware is used if
# none is supplied in footers when validating inbound plaintext etags
plaintext = 'FAKE APP'
other_footers = {
'X-Object-Sysmeta-Other': 'other sysmeta',
'X-Object-Sysmeta-Container-Update-Override-Etag':
'other override'}
env = {'REQUEST_METHOD': 'PUT',
CRYPTO_KEY_CALLBACK: fetch_crypto_keys,
'swift.callback.update_footers':
lambda footers: footers.update(other_footers)}
hdrs = {'content-type': 'text/plain',
'content-length': str(len(plaintext)),
'Etag': 'bad etag'}
req = Request.blank(
'/v1/a/c/o', environ=env, body=plaintext, headers=hdrs)
self.app.register('PUT', '/v1/a/c/o', HTTPCreated, {})
resp = req.get_response(self.encrypter)
self.assertEqual('422 Unprocessable Entity', resp.status)
self.assertNotIn('Etag', resp.headers)
def test_PUT_nothing_read(self):
# simulate an artificial scenario of a downstream filter/app not
# actually reading the input stream from encrypter.
class NonReadingApp(object):
def __call__(self, env, start_response):
# note: no read from wsgi.input
req = Request(env)
env['swift.callback.update_footers'](req.headers)
call_headers.append(req.headers)
resp = HTTPCreated(req=req, headers={'Etag': 'response etag'})
return resp(env, start_response)
env = {'REQUEST_METHOD': 'PUT',
CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
hdrs = {'content-type': 'text/plain',
'content-length': 0,
'etag': 'etag from client'}
req = Request.blank('/v1/a/c/o', environ=env, body='', headers=hdrs)
call_headers = []
resp = req.get_response(encrypter.Encrypter(NonReadingApp(), {}))
self.assertEqual('201 Created', resp.status)
self.assertEqual('response etag', resp.headers['Etag'])
self.assertEqual(1, len(call_headers))
self.assertEqual('etag from client', call_headers[0]['etag'])
# verify no encryption footers
for k in call_headers[0]:
self.assertFalse(k.lower().startswith('x-object-sysmeta-crypto-'))
# check that an upstream footer callback gets called
other_footers = {
'Etag': EMPTY_ETAG,
'X-Object-Sysmeta-Other': 'other sysmeta',
'X-Object-Sysmeta-Container-Update-Override-Etag':
'other override'}
env.update({'swift.callback.update_footers':
lambda footers: footers.update(other_footers)})
req = Request.blank('/v1/a/c/o', environ=env, body='', headers=hdrs)
call_headers = []
resp = req.get_response(encrypter.Encrypter(NonReadingApp(), {}))
self.assertEqual('201 Created', resp.status)
self.assertEqual('response etag', resp.headers['Etag'])
self.assertEqual(1, len(call_headers))
# verify encrypted override etag for container update.
self.assertIn(
'X-Object-Sysmeta-Container-Update-Override-Etag', call_headers[0])
parts = call_headers[0][
'X-Object-Sysmeta-Container-Update-Override-Etag'].rsplit(';', 1)
self.assertEqual(2, len(parts))
cont_key = fetch_crypto_keys()['container']
param = parts[1].strip()
crypto_meta_tag = 'swift_meta='
self.assertTrue(param.startswith(crypto_meta_tag), param)
actual_meta = json.loads(
urlparse.unquote_plus(param[len(crypto_meta_tag):]))
self.assertEqual(Crypto().cipher, actual_meta['cipher'])
self.assertEqual(fetch_crypto_keys()['id'], actual_meta['key_id'])
cont_etag_iv = base64.b64decode(actual_meta['iv'])
self.assertEqual(FAKE_IV, cont_etag_iv)
self.assertEqual(encrypt(b'other override', cont_key, cont_etag_iv),
base64.b64decode(parts[0]))
# verify that other middleware's footers made it to app
other_footers.pop('X-Object-Sysmeta-Container-Update-Override-Etag')
for k, v in other_footers.items():
self.assertEqual(v, call_headers[0][k])
# verify no encryption footers
for k in call_headers[0]:
self.assertFalse(k.lower().startswith('x-object-sysmeta-crypto-'))
# if upstream footer override etag is for an empty body then check that
# it is not encrypted
other_footers = {
'Etag': EMPTY_ETAG,
'X-Object-Sysmeta-Container-Update-Override-Etag': EMPTY_ETAG}
env.update({'swift.callback.update_footers':
lambda footers: footers.update(other_footers)})
req = Request.blank('/v1/a/c/o', environ=env, body='', headers=hdrs)
call_headers = []
resp = req.get_response(encrypter.Encrypter(NonReadingApp(), {}))
self.assertEqual('201 Created', resp.status)
self.assertEqual('response etag', resp.headers['Etag'])
self.assertEqual(1, len(call_headers))
# verify that other middleware's footers made it to app
for k, v in other_footers.items():
self.assertEqual(v, call_headers[0][k])
# verify no encryption footers
for k in call_headers[0]:
self.assertFalse(k.lower().startswith('x-object-sysmeta-crypto-'))
# if upstream footer override etag is an empty string then check that
# it is not encrypted
other_footers = {
'Etag': EMPTY_ETAG,
'X-Object-Sysmeta-Container-Update-Override-Etag': ''}
env.update({'swift.callback.update_footers':
lambda footers: footers.update(other_footers)})
req = Request.blank('/v1/a/c/o', environ=env, body='', headers=hdrs)
call_headers = []
resp = req.get_response(encrypter.Encrypter(NonReadingApp(), {}))
self.assertEqual('201 Created', resp.status)
self.assertEqual('response etag', resp.headers['Etag'])
self.assertEqual(1, len(call_headers))
# verify that other middleware's footers made it to app
for k, v in other_footers.items():
self.assertEqual(v, call_headers[0][k])
# verify no encryption footers
for k in call_headers[0]:
self.assertFalse(k.lower().startswith('x-object-sysmeta-crypto-'))
def test_POST_req(self):
body = b'FAKE APP'
env = {'REQUEST_METHOD': 'POST',
CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
hdrs = {'x-object-meta-test': 'encrypt me',
'x-object-meta-test2': '',
'x-object-sysmeta-test': 'do not encrypt me'}
req = Request.blank('/v1/a/c/o', environ=env, body=body, headers=hdrs)
key = fetch_crypto_keys()['object']
self.app.register('POST', '/v1/a/c/o', HTTPAccepted, {})
resp = req.get_response(self.encrypter)
self.assertEqual('202 Accepted', resp.status)
self.assertNotIn('Etag', resp.headers)
# verify metadata items
self.assertEqual(1, len(self.app.calls), self.app.calls)
self.assertEqual('POST', self.app.calls[0][0])
req_hdrs = self.app.headers[0]
# user meta is encrypted
self._verify_user_metadata(req_hdrs, 'Test', 'encrypt me', key)
# unless it had no value
self.assertEqual('', req_hdrs['X-Object-Meta-Test2'])
# sysmeta is not encrypted
self.assertEqual('do not encrypt me',
req_hdrs['X-Object-Sysmeta-Test'])
def _test_no_user_metadata(self, method):
# verify that x-object-transient-sysmeta-crypto-meta is not set when
# there is no user metadata
env = {'REQUEST_METHOD': method,
CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
req = Request.blank('/v1/a/c/o', environ=env, body='body')
self.app.register(method, '/v1/a/c/o', HTTPAccepted, {})
resp = req.get_response(self.encrypter)
self.assertEqual('202 Accepted', resp.status)
self.assertEqual(1, len(self.app.calls), self.app.calls)
self.assertEqual(method, self.app.calls[0][0])
self.assertNotIn('x-object-transient-sysmeta-crypto-meta',
self.app.headers[0])
def test_PUT_no_user_metadata(self):
self._test_no_user_metadata('PUT')
def test_POST_no_user_metadata(self):
self._test_no_user_metadata('POST')
def _test_if_match(self, method, match_header_name):
def do_test(method, plain_etags, expected_plain_etags=None):
env = {CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
match_header_value = ', '.join(plain_etags)
req = Request.blank(
'/v1/a/c/o', environ=env, method=method,
headers={match_header_name: match_header_value})
app = FakeSwift()
app.register(method, '/v1/a/c/o', HTTPOk, {})
resp = req.get_response(encrypter.Encrypter(app, {}))
self.assertEqual('200 OK', resp.status)
self.assertEqual(1, len(app.calls), app.calls)
self.assertEqual(method, app.calls[0][0])
actual_headers = app.headers[0]
# verify the alternate etag location has been specified
if match_header_value and match_header_value != '*':
self.assertIn('X-Backend-Etag-Is-At', actual_headers)
self.assertEqual('X-Object-Sysmeta-Crypto-Etag-Mac',
actual_headers['X-Backend-Etag-Is-At'])
# verify etags have been supplemented with masked values
self.assertIn(match_header_name, actual_headers)
actual_etags = set(actual_headers[match_header_name].split(', '))
# masked values for secret_id None
key = fetch_crypto_keys()['object']
masked_etags = [
'"%s"' % bytes_to_wsgi(base64.b64encode(hmac.new(
key, wsgi_to_bytes(etag.strip('"')),
hashlib.sha256).digest()))
for etag in plain_etags if etag not in ('*', '')]
# masked values for secret_id myid
key = fetch_crypto_keys(key_id={'secret_id': 'myid'})['object']
masked_etags_myid = [
'"%s"' % bytes_to_wsgi(base64.b64encode(hmac.new(
key, wsgi_to_bytes(etag.strip('"')),
hashlib.sha256).digest()))
for etag in plain_etags if etag not in ('*', '')]
expected_etags = set((expected_plain_etags or plain_etags) +
masked_etags + masked_etags_myid)
self.assertEqual(expected_etags, actual_etags)
# check that the request environ was returned to original state
self.assertEqual(set(plain_etags),
set(req.headers[match_header_name].split(', ')))
do_test(method, [''])
do_test(method, ['"an etag"'])
do_test(method, ['"an etag"', '"another_etag"'])
do_test(method, ['*'])
# rfc2616 does not allow wildcard *and* etag but test it anyway
do_test(method, ['*', '"an etag"'])
# etags should be quoted but check we can cope if they are not
do_test(
method, ['*', 'an etag', 'another_etag'],
expected_plain_etags=['*', '"an etag"', '"another_etag"'])
def test_GET_if_match(self):
self._test_if_match('GET', 'If-Match')
def test_HEAD_if_match(self):
self._test_if_match('HEAD', 'If-Match')
def test_GET_if_none_match(self):
self._test_if_match('GET', 'If-None-Match')
def test_HEAD_if_none_match(self):
self._test_if_match('HEAD', 'If-None-Match')
def _test_existing_etag_is_at_header(self, method, match_header_name):
# if another middleware has already set X-Backend-Etag-Is-At then
# encrypter should not override that value
env = {CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
req = Request.blank(
'/v1/a/c/o', environ=env, method=method,
headers={match_header_name: "an etag",
'X-Backend-Etag-Is-At': 'X-Object-Sysmeta-Other-Etag'})
self.app.register(method, '/v1/a/c/o', HTTPOk, {})
resp = req.get_response(self.encrypter)
self.assertEqual('200 OK', resp.status)
self.assertEqual(1, len(self.app.calls), self.app.calls)
self.assertEqual(method, self.app.calls[0][0])
actual_headers = self.app.headers[0]
self.assertIn('X-Backend-Etag-Is-At', actual_headers)
self.assertEqual(
'X-Object-Sysmeta-Other-Etag,X-Object-Sysmeta-Crypto-Etag-Mac',
actual_headers['X-Backend-Etag-Is-At'])
actual_etags = set(actual_headers[match_header_name].split(', '))
self.assertIn('"an etag"', actual_etags)
def test_GET_if_match_with_existing_etag_is_at_header(self):
self._test_existing_etag_is_at_header('GET', 'If-Match')
def test_HEAD_if_match_with_existing_etag_is_at_header(self):
self._test_existing_etag_is_at_header('HEAD', 'If-Match')
def test_GET_if_none_match_with_existing_etag_is_at_header(self):
self._test_existing_etag_is_at_header('GET', 'If-None-Match')
def test_HEAD_if_none_match_with_existing_etag_is_at_header(self):
self._test_existing_etag_is_at_header('HEAD', 'If-None-Match')
def _test_etag_is_at_not_duplicated(self, method):
# verify only one occurrence of X-Object-Sysmeta-Crypto-Etag-Mac in
# X-Backend-Etag-Is-At
key = fetch_crypto_keys()['object']
env = {CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
req = Request.blank(
'/v1/a/c/o', environ=env, method=method,
headers={'If-Match': '"an etag"',
'If-None-Match': '"another etag"'})
self.app.register(method, '/v1/a/c/o', HTTPOk, {})
resp = req.get_response(self.encrypter)
self.assertEqual('200 OK', resp.status)
self.assertEqual(1, len(self.app.calls), self.app.calls)
self.assertEqual(method, self.app.calls[0][0])
actual_headers = self.app.headers[0]
self.assertIn('X-Backend-Etag-Is-At', actual_headers)
self.assertEqual('X-Object-Sysmeta-Crypto-Etag-Mac',
actual_headers['X-Backend-Etag-Is-At'])
self.assertIn('"%s"' % bytes_to_wsgi(base64.b64encode(
hmac.new(key, b'an etag', hashlib.sha256).digest())),
actual_headers['If-Match'])
self.assertIn('"another etag"', actual_headers['If-None-Match'])
self.assertIn('"%s"' % bytes_to_wsgi(base64.b64encode(
hmac.new(key, b'another etag', hashlib.sha256).digest())),
actual_headers['If-None-Match'])
def test_GET_etag_is_at_not_duplicated(self):
self._test_etag_is_at_not_duplicated('GET')
def test_HEAD_etag_is_at_not_duplicated(self):
self._test_etag_is_at_not_duplicated('HEAD')
def test_PUT_response_inconsistent_etag_is_not_replaced(self):
# if response is success but etag does not match the ciphertext md5
# then verify that we do *not* replace it with the plaintext etag
body = 'FAKE APP'
env = {'REQUEST_METHOD': 'PUT',
CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
hdrs = {'content-type': 'text/plain',
'content-length': str(len(body))}
req = Request.blank('/v1/a/c/o', environ=env, body=body, headers=hdrs)
self.app.register('PUT', '/v1/a/c/o', HTTPCreated,
{'Etag': 'not the ciphertext etag'})
resp = req.get_response(self.encrypter)
self.assertEqual('201 Created', resp.status)
self.assertEqual('not the ciphertext etag', resp.headers['Etag'])
def test_PUT_multiseg_no_client_etag(self):
body_key = os.urandom(32)
chunks = [b'some', b'chunks', b'of data']
body = b''.join(chunks)
env = {'REQUEST_METHOD': 'PUT',
CRYPTO_KEY_CALLBACK: fetch_crypto_keys,
'wsgi.input': FileLikeIter(chunks)}
hdrs = {'content-type': 'text/plain',
'content-length': str(len(body))}
req = Request.blank('/v1/a/c/o', environ=env, headers=hdrs)
self.app.register('PUT', '/v1/a/c/o', HTTPCreated, {})
with mock.patch(
'swift.common.middleware.crypto.crypto_utils.'
'Crypto.create_random_key',
lambda *args: body_key):
resp = req.get_response(self.encrypter)
self.assertEqual('201 Created', resp.status)
# verify object is encrypted by getting direct from the app
get_req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'GET'})
self.assertEqual(encrypt(body, body_key, FAKE_IV),
get_req.get_response(self.app).body)
def test_PUT_multiseg_good_client_etag(self):
body_key = os.urandom(32)
chunks = [b'some', b'chunks', b'of data']
body = b''.join(chunks)
env = {'REQUEST_METHOD': 'PUT',
CRYPTO_KEY_CALLBACK: fetch_crypto_keys,
'wsgi.input': FileLikeIter(chunks)}
hdrs = {'content-type': 'text/plain',
'content-length': str(len(body)),
'Etag': md5hex(body)}
req = Request.blank('/v1/a/c/o', environ=env, headers=hdrs)
self.app.register('PUT', '/v1/a/c/o', HTTPCreated, {})
with mock.patch(
'swift.common.middleware.crypto.crypto_utils.'
'Crypto.create_random_key',
lambda *args: body_key):
resp = req.get_response(self.encrypter)
self.assertEqual('201 Created', resp.status)
# verify object is encrypted by getting direct from the app
get_req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'GET'})
self.assertEqual(encrypt(body, body_key, FAKE_IV),
get_req.get_response(self.app).body)
def test_PUT_multiseg_bad_client_etag(self):
chunks = [b'some', b'chunks', b'of data']
body = b''.join(chunks)
env = {'REQUEST_METHOD': 'PUT',
CRYPTO_KEY_CALLBACK: fetch_crypto_keys,
'wsgi.input': FileLikeIter(chunks)}
hdrs = {'content-type': 'text/plain',
'content-length': str(len(body)),
'Etag': 'badclientetag'}
req = Request.blank('/v1/a/c/o', environ=env, headers=hdrs)
self.app.register('PUT', '/v1/a/c/o', HTTPCreated, {})
resp = req.get_response(self.encrypter)
self.assertEqual('422 Unprocessable Entity', resp.status)
def test_PUT_missing_key_callback(self):
body = b'FAKE APP'
env = {'REQUEST_METHOD': 'PUT'}
hdrs = {'content-type': 'text/plain',
'content-length': str(len(body))}
req = Request.blank('/v1/a/c/o', environ=env, body=body, headers=hdrs)
resp = req.get_response(self.encrypter)
self.assertEqual('500 Internal Error', resp.status)
self.assertIn('missing callback',
self.encrypter.logger.get_lines_for_level('error')[0])
self.assertEqual(b'Unable to retrieve encryption keys.', resp.body)
def test_PUT_error_in_key_callback(self):
def raise_exc(*args, **kwargs):
raise Exception('Testing')
body = b'FAKE APP'
env = {'REQUEST_METHOD': 'PUT',
CRYPTO_KEY_CALLBACK: raise_exc}
hdrs = {'content-type': 'text/plain',
'content-length': str(len(body))}
req = Request.blank('/v1/a/c/o', environ=env, body=body, headers=hdrs)
resp = req.get_response(self.encrypter)
self.assertEqual('500 Internal Error', resp.status)
self.assertIn('from callback: Testing',
self.encrypter.logger.get_lines_for_level('error')[0])
self.assertEqual(b'Unable to retrieve encryption keys.', resp.body)
def test_PUT_encryption_override(self):
# set crypto override to disable encryption.
# simulate another middleware wanting to set footers
other_footers = {
'Etag': 'other etag',
'X-Object-Sysmeta-Other': 'other sysmeta',
'X-Object-Sysmeta-Container-Update-Override-Etag':
'other override'}
body = b'FAKE APP'
env = {'REQUEST_METHOD': 'PUT',
'swift.crypto.override': True,
'swift.callback.update_footers':
lambda footers: footers.update(other_footers)}
hdrs = {'content-type': 'text/plain',
'content-length': str(len(body))}
req = Request.blank('/v1/a/c/o', environ=env, body=body, headers=hdrs)
self.app.register('PUT', '/v1/a/c/o', HTTPCreated, {})
resp = req.get_response(self.encrypter)
self.assertEqual('201 Created', resp.status)
# verify that other middleware's footers made it to app
req_hdrs = self.app.headers[0]
for k, v in other_footers.items():
self.assertEqual(v, req_hdrs[k])
# verify object is NOT encrypted by getting direct from the app
get_req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'GET'})
self.assertEqual(body, get_req.get_response(self.app).body)
def _test_constraints_checking(self, method):
# verify that the check_metadata function is called on PUT and POST
body = b'FAKE APP'
env = {'REQUEST_METHOD': method,
CRYPTO_KEY_CALLBACK: fetch_crypto_keys}
hdrs = {'content-type': 'text/plain',
'content-length': str(len(body))}
req = Request.blank('/v1/a/c/o', environ=env, body=body, headers=hdrs)
mocked_func = 'swift.common.middleware.crypto.encrypter.check_metadata'
with mock.patch(mocked_func) as mocked:
mocked.side_effect = [HTTPBadRequest(b'testing')]
resp = req.get_response(self.encrypter)
self.assertEqual('400 Bad Request', resp.status)
self.assertEqual(1, mocked.call_count)
mocked.assert_called_once_with(mock.ANY, 'object')
self.assertEqual(req.headers,
mocked.call_args_list[0][0][0].headers)
def test_PUT_constraints_checking(self):
self._test_constraints_checking('PUT')
def test_POST_constraints_checking(self):
self._test_constraints_checking('POST')
def test_config_true_value_on_disable_encryption(self):
app = FakeSwift()
self.assertFalse(encrypter.Encrypter(app, {}).disable_encryption)
for val in ('true', '1', 'yes', 'on', 't', 'y'):
app = encrypter.Encrypter(app,
{'disable_encryption': val})
self.assertTrue(app.disable_encryption)
def test_PUT_app_exception(self):
app = encrypter.Encrypter(FakeAppThatExcepts(HTTPException), {})
req = Request.blank('/', environ={'REQUEST_METHOD': 'PUT'})
with self.assertRaises(HTTPException) as catcher:
req.get_response(app)
self.assertEqual(FakeAppThatExcepts.MESSAGE, catcher.exception.body)
def test_encrypt_header_val(self):
# Prepare key and Crypto instance
object_key = fetch_crypto_keys()['object']
# - Normal string can be crypted
encrypted = encrypter.encrypt_header_val(Crypto(), 'aaa', object_key)
# sanity: return value is 2 item tuple
self.assertEqual(2, len(encrypted))
crypted_val, crypt_info = encrypted
expected_crypt_val = base64.b64encode(
encrypt(b'aaa', object_key, FAKE_IV))
expected_crypt_info = {
'cipher': 'AES_CTR_256', 'iv': b'This is an IV123'}
self.assertEqual(expected_crypt_val, wsgi_to_bytes(crypted_val))
self.assertEqual(expected_crypt_info, crypt_info)
# - Empty string raises a ValueError for safety
with self.assertRaises(ValueError) as cm:
encrypter.encrypt_header_val(Crypto(), '', object_key)
self.assertEqual('empty value is not acceptable',
cm.exception.args[0])
# - None also raises a ValueError for safety
with self.assertRaises(ValueError) as cm:
encrypter.encrypt_header_val(Crypto(), None, object_key)
self.assertEqual('empty value is not acceptable',
cm.exception.args[0])
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/crypto/test_encrypter.py |
# Copyright (c) 2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import copy
import hashlib
import hmac
import os
import mock
import six
import unittest
from getpass import getuser
from swift.common import swob
from swift.common.middleware.crypto import keymaster
from swift.common.middleware.crypto.crypto_utils import CRYPTO_KEY_CALLBACK
from swift.common.swob import Request
from test.unit.common.middleware.helpers import FakeSwift, FakeAppThatExcepts
from test.unit.common.middleware.crypto.crypto_helpers import (
TEST_KEYMASTER_CONF)
from test.unit import tmpfile
def capture_start_response():
calls = []
def start_response(*args):
calls.append(args)
return start_response, calls
class TestKeymaster(unittest.TestCase):
def setUp(self):
super(TestKeymaster, self).setUp()
self.swift = FakeSwift()
self.app = keymaster.KeyMaster(self.swift, TEST_KEYMASTER_CONF)
def test_object_path(self):
self.verify_v3_keys_for_path(
'/a/c/o', expected_keys=('object', 'container'))
self.verify_v3_keys_for_path(
'/a/c//o', expected_keys=('object', 'container'))
self.verify_keys_for_path(
'/a/c//o', expected_keys=('object', 'container'))
self.verify_v1_keys_for_path(
'/a/c//o', expected_keys=('object', 'container'))
def test_container_path(self):
self.verify_v3_keys_for_path(
'/a/c', expected_keys=('container',))
def test_unicode_object_path(self):
# NB: path is WSGI
self.verify_v3_keys_for_path(
'/\xe2\x98\x83/\xf0\x9f\x8c\xb4/\xf0\x9f\x8c\x8a',
expected_keys=('object', 'container'))
self.verify_keys_for_path(
'/\xe2\x98\x83/\xf0\x9f\x8c\xb4/\xf0\x9f\x8c\x8a',
expected_keys=('object', 'container'))
self.verify_v1_keys_for_path(
'/\xe2\x98\x83/\xf0\x9f\x8c\xb4/\xf0\x9f\x8c\x8a',
expected_keys=('object', 'container'))
# Double-whammy: *also* hit the os.path.join issue
self.verify_v3_keys_for_path(
'/\xe2\x98\x83/\xf0\x9f\x8c\xb4//\xf0\x9f\x8c\x8a',
expected_keys=('object', 'container'))
self.verify_keys_for_path(
'/\xe2\x98\x83/\xf0\x9f\x8c\xb4//\xf0\x9f\x8c\x8a',
expected_keys=('object', 'container'))
self.verify_v1_keys_for_path(
'/\xe2\x98\x83/\xf0\x9f\x8c\xb4//\xf0\x9f\x8c\x8a',
expected_keys=('object', 'container'))
def verify_v3_keys_for_path(self, wsgi_path, expected_keys, key_id=None):
put_keys = None
self.app.meta_version_to_write = '3'
for method, resp_class, status in (
('PUT', swob.HTTPCreated, '201'),
('POST', swob.HTTPAccepted, '202'),
('GET', swob.HTTPOk, '200'),
('HEAD', swob.HTTPNoContent, '204')):
resp_headers = {}
self.swift.register(
method, '/v1' + wsgi_path, resp_class, resp_headers, b'')
req = Request.blank(
'/v1' + wsgi_path, environ={'REQUEST_METHOD': method})
start_response, calls = capture_start_response()
self.app(req.environ, start_response)
self.assertEqual(1, len(calls))
self.assertTrue(calls[0][0].startswith(status))
self.assertNotIn('swift.crypto.override', req.environ)
self.assertIn(CRYPTO_KEY_CALLBACK, req.environ,
'%s not set in env' % CRYPTO_KEY_CALLBACK)
keys = req.environ.get(CRYPTO_KEY_CALLBACK)(key_id=key_id)
self.assertIn('id', keys)
id = keys.pop('id')
path = swob.wsgi_to_str(wsgi_path)
self.assertEqual(path, id['path'])
self.assertEqual('3', id['v'])
keys.pop('all_ids')
self.assertListEqual(sorted(expected_keys), sorted(keys.keys()),
'%s %s got keys %r, but expected %r'
% (method, path, keys.keys(), expected_keys))
if put_keys is not None:
# check all key sets were consistent for this path
self.assertDictEqual(put_keys, keys)
else:
put_keys = keys
self.app.meta_version_to_write = '2' # Clean up after ourselves
return put_keys
def verify_keys_for_path(self, wsgi_path, expected_keys, key_id=None):
put_keys = None
for method, resp_class, status in (
('PUT', swob.HTTPCreated, '201'),
('POST', swob.HTTPAccepted, '202'),
('GET', swob.HTTPOk, '200'),
('HEAD', swob.HTTPNoContent, '204')):
resp_headers = {}
self.swift.register(
method, '/v1' + wsgi_path, resp_class, resp_headers, b'')
req = Request.blank(
'/v1' + wsgi_path, environ={'REQUEST_METHOD': method})
start_response, calls = capture_start_response()
self.app(req.environ, start_response)
self.assertEqual(1, len(calls))
self.assertTrue(calls[0][0].startswith(status))
self.assertNotIn('swift.crypto.override', req.environ)
self.assertIn(CRYPTO_KEY_CALLBACK, req.environ,
'%s not set in env' % CRYPTO_KEY_CALLBACK)
keys = req.environ.get(CRYPTO_KEY_CALLBACK)(key_id=key_id)
self.assertIn('id', keys)
id = keys.pop('id')
path = swob.wsgi_to_str(wsgi_path)
if six.PY2:
self.assertEqual(path, id['path'])
else:
self.assertEqual(swob.str_to_wsgi(path), id['path'])
self.assertEqual('2', id['v'])
keys.pop('all_ids')
self.assertListEqual(sorted(expected_keys), sorted(keys.keys()),
'%s %s got keys %r, but expected %r'
% (method, path, keys.keys(), expected_keys))
if put_keys is not None:
# check all key sets were consistent for this path
self.assertDictEqual(put_keys, keys)
else:
put_keys = keys
return put_keys
def verify_v1_keys_for_path(self, wsgi_path, expected_keys, key_id=None):
put_keys = None
self.app.meta_version_to_write = '1'
for method, resp_class, status in (
('PUT', swob.HTTPCreated, '201'),
('POST', swob.HTTPAccepted, '202'),
('GET', swob.HTTPOk, '200'),
('HEAD', swob.HTTPNoContent, '204')):
resp_headers = {}
self.swift.register(
method, '/v1' + wsgi_path, resp_class, resp_headers, b'')
req = Request.blank(
'/v1' + wsgi_path, environ={'REQUEST_METHOD': method})
start_response, calls = capture_start_response()
self.app(req.environ, start_response)
self.assertEqual(1, len(calls))
self.assertTrue(calls[0][0].startswith(status))
self.assertNotIn('swift.crypto.override', req.environ)
self.assertIn(CRYPTO_KEY_CALLBACK, req.environ,
'%s not set in env' % CRYPTO_KEY_CALLBACK)
keys = req.environ.get(CRYPTO_KEY_CALLBACK)(key_id=key_id)
self.assertIn('id', keys)
id = keys.pop('id')
path = swob.wsgi_to_str(wsgi_path)
if '//' in path:
path = path[path.index('//') + 1:]
if six.PY2:
self.assertEqual(path, id['path'])
else:
self.assertEqual(swob.str_to_wsgi(path), id['path'])
self.assertEqual('1', id['v'])
keys.pop('all_ids')
self.assertListEqual(sorted(expected_keys), sorted(keys.keys()),
'%s %s got keys %r, but expected %r'
% (method, path, keys.keys(), expected_keys))
if put_keys is not None:
# check all key sets were consistent for this path
self.assertDictEqual(put_keys, keys)
else:
put_keys = keys
self.app.meta_version_to_write = '2' # Clean up after ourselves
return put_keys
def test_key_uniqueness(self):
# a rudimentary check that different keys are made for different paths
ref_path_parts = ('a1', 'c1', 'o1')
path = '/' + '/'.join(ref_path_parts)
ref_keys = self.verify_keys_for_path(
path, expected_keys=('object', 'container'))
# for same path and for each differing path check that keys are unique
# when path to object or container is unique and vice-versa
for path_parts in [(a, c, o) for a in ('a1', 'a2')
for c in ('c1', 'c2')
for o in ('o1', 'o2')]:
path = '/' + '/'.join(path_parts)
keys = self.verify_keys_for_path(
path, expected_keys=('object', 'container'))
# object keys should only be equal when complete paths are equal
self.assertEqual(path_parts == ref_path_parts,
keys['object'] == ref_keys['object'],
'Path %s keys:\n%s\npath %s keys\n%s' %
(ref_path_parts, ref_keys, path_parts, keys))
# container keys should only be equal when paths to container are
# equal
self.assertEqual(path_parts[:2] == ref_path_parts[:2],
keys['container'] == ref_keys['container'],
'Path %s keys:\n%s\npath %s keys\n%s' %
(ref_path_parts, ref_keys, path_parts, keys))
def test_filter(self):
factory = keymaster.filter_factory(TEST_KEYMASTER_CONF)
self.assertTrue(callable(factory))
self.assertTrue(callable(factory(self.swift)))
def test_app_exception(self):
app = keymaster.KeyMaster(
FakeAppThatExcepts(), TEST_KEYMASTER_CONF)
req = Request.blank('/', environ={'REQUEST_METHOD': 'PUT'})
start_response, _ = capture_start_response()
self.assertRaises(Exception, app, req.environ, start_response)
def test_missing_conf_section(self):
sample_conf = "[default]\nuser = %s\n" % getuser()
with tmpfile(sample_conf) as conf_file:
self.assertRaisesRegex(
ValueError, 'Unable to find keymaster config section in.*',
keymaster.KeyMaster, self.swift, {
'keymaster_config_path': conf_file})
def test_root_secret(self):
def do_test(dflt_id):
for secret in (os.urandom(32), os.urandom(33), os.urandom(50)):
encoded_secret = base64.b64encode(secret)
self.assertIsInstance(encoded_secret, bytes)
for conf_val in (
encoded_secret,
encoded_secret.decode('ascii'),
encoded_secret[:30] + b'\n' + encoded_secret[30:],
(encoded_secret[:30] + b'\n' +
encoded_secret[30:]).decode('ascii')):
try:
app = keymaster.KeyMaster(
self.swift, {'encryption_root_secret': conf_val,
'active_root_secret_id': dflt_id,
'keymaster_config_path': ''})
self.assertEqual(secret, app.root_secret)
except AssertionError as err:
self.fail(str(err) + ' for secret %r' % conf_val)
do_test(None)
do_test('')
def test_no_root_secret(self):
with self.assertRaises(ValueError) as cm:
keymaster.KeyMaster(self.swift, {})
self.assertEqual('No secret loaded for active_root_secret_id None',
str(cm.exception))
def test_multiple_root_secrets(self):
secrets = {None: os.urandom(32),
'22': os.urandom(33),
'my_secret_id': os.urandom(50)}
conf = {}
for secret_id, secret in secrets.items():
opt = ('encryption_root_secret%s' %
(('_%s' % secret_id) if secret_id else ''))
conf[opt] = base64.b64encode(secret)
app = keymaster.KeyMaster(self.swift, conf)
self.assertEqual(secrets, app._root_secrets)
self.assertEqual([None, '22', 'my_secret_id'], app.root_secret_ids)
def test_chained_keymasters(self):
conf_inner = {'active_root_secret_id': '22'}
conf_inner.update(
('encryption_root_secret_%s' % secret_id, base64.b64encode(secret))
for secret_id, secret in [('22', os.urandom(33)),
('my_secret_id', os.urandom(50))])
conf_outer = {'encryption_root_secret': base64.b64encode(
os.urandom(32))}
app = keymaster.KeyMaster(
keymaster.KeyMaster(self.swift, conf_inner),
conf_outer)
self.swift.register('GET', '/v1/a/c', swob.HTTPOk, {}, b'')
req = Request.blank('/v1/a/c')
start_response, calls = capture_start_response()
app(req.environ, start_response)
self.assertEqual(1, len(calls))
self.assertNotIn('swift.crypto.override', req.environ)
self.assertIn(CRYPTO_KEY_CALLBACK, req.environ,
'%s not set in env' % CRYPTO_KEY_CALLBACK)
keys = copy.deepcopy(req.environ[CRYPTO_KEY_CALLBACK](key_id=None))
self.assertIn('id', keys)
self.assertEqual(keys.pop('id'), {
'v': '2',
'path': '/a/c',
'secret_id': '22',
})
# Inner-most active root secret wins
root_key = base64.b64decode(conf_inner['encryption_root_secret_22'])
self.assertIn('container', keys)
self.assertEqual(keys.pop('container'),
hmac.new(root_key, b'/a/c',
digestmod=hashlib.sha256).digest())
self.assertIn('all_ids', keys)
all_keys = set()
at_least_one_old_style_id = False
for key_id in keys.pop('all_ids'):
# Can get key material for each key_id
all_keys.add(req.environ[CRYPTO_KEY_CALLBACK](
key_id=key_id)['container'])
if 'secret_id' in key_id:
self.assertIn(key_id.pop('secret_id'), {'22', 'my_secret_id'})
else:
at_least_one_old_style_id = True
self.assertEqual(key_id, {
'path': '/a/c',
'v': '2',
})
self.assertTrue(at_least_one_old_style_id)
self.assertEqual(len(all_keys), 3)
self.assertFalse(keys)
# Also all works for objects
self.swift.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, b'')
req = Request.blank('/v1/a/c/o')
start_response, calls = capture_start_response()
app(req.environ, start_response)
self.assertEqual(1, len(calls))
self.assertNotIn('swift.crypto.override', req.environ)
self.assertIn(CRYPTO_KEY_CALLBACK, req.environ,
'%s not set in env' % CRYPTO_KEY_CALLBACK)
keys = req.environ.get(CRYPTO_KEY_CALLBACK)(key_id=None)
self.assertIn('id', keys)
self.assertEqual(keys.pop('id'), {
'v': '2',
'path': '/a/c/o',
'secret_id': '22',
})
root_key = base64.b64decode(conf_inner['encryption_root_secret_22'])
self.assertIn('container', keys)
self.assertEqual(keys.pop('container'),
hmac.new(root_key, b'/a/c',
digestmod=hashlib.sha256).digest())
self.assertIn('object', keys)
self.assertEqual(keys.pop('object'),
hmac.new(root_key, b'/a/c/o',
digestmod=hashlib.sha256).digest())
self.assertIn('all_ids', keys)
at_least_one_old_style_id = False
for key_id in keys.pop('all_ids'):
if 'secret_id' not in key_id:
at_least_one_old_style_id = True
else:
self.assertIn(key_id.pop('secret_id'), {'22', 'my_secret_id'})
self.assertEqual(key_id, {
'path': '/a/c/o',
'v': '2',
})
self.assertTrue(at_least_one_old_style_id)
self.assertEqual(len(all_keys), 3)
self.assertFalse(keys)
def test_multiple_root_secrets_with_invalid_secret(self):
conf = {'encryption_root_secret': base64.b64encode(os.urandom(32)),
# too short...
'encryption_root_secret_22': base64.b64encode(os.urandom(31))}
with self.assertRaises(ValueError) as err:
keymaster.KeyMaster(self.swift, conf)
self.assertEqual(
'encryption_root_secret_22 option in proxy-server.conf '
'must be a base64 encoding of at least 32 raw bytes',
str(err.exception))
def test_multiple_root_secrets_with_invalid_id(self):
def do_test(bad_option):
conf = {'encryption_root_secret': base64.b64encode(os.urandom(32)),
bad_option: base64.b64encode(os.urandom(32))}
with self.assertRaises(ValueError) as err:
keymaster.KeyMaster(self.swift, conf)
self.assertEqual(
'Malformed root secret option name %s' % bad_option,
str(err.exception))
do_test('encryption_root_secret1')
do_test('encryption_root_secret123')
do_test('encryption_root_secret_')
def test_multiple_root_secrets_missing_active_root_secret_id(self):
conf = {'encryption_root_secret_22': base64.b64encode(os.urandom(32))}
with self.assertRaises(ValueError) as err:
keymaster.KeyMaster(self.swift, conf)
self.assertEqual(
'No secret loaded for active_root_secret_id None',
str(err.exception))
conf = {'encryption_root_secret_22': base64.b64encode(os.urandom(32)),
'active_root_secret_id': 'missing'}
with self.assertRaises(ValueError) as err:
keymaster.KeyMaster(self.swift, conf)
self.assertEqual(
'No secret loaded for active_root_secret_id missing',
str(err.exception))
def test_correct_root_secret_used(self):
secrets = {None: os.urandom(32),
'22': os.urandom(33),
'my_secret_id': os.urandom(50)}
# no active_root_secret_id configured
conf = {}
for secret_id, secret in secrets.items():
opt = ('encryption_root_secret%s' %
(('_%s' % secret_id) if secret_id else ''))
conf[opt] = base64.b64encode(secret)
self.app = keymaster.KeyMaster(self.swift, conf)
keys = self.verify_keys_for_path('/a/c/o', ('container', 'object'))
expected_keys = {
'container': hmac.new(secrets[None], b'/a/c',
digestmod=hashlib.sha256).digest(),
'object': hmac.new(secrets[None], b'/a/c/o',
digestmod=hashlib.sha256).digest()}
self.assertEqual(expected_keys, keys)
# active_root_secret_id configured
conf['active_root_secret_id'] = '22'
self.app = keymaster.KeyMaster(self.swift, conf)
keys = self.verify_keys_for_path('/a/c/o', ('container', 'object'))
expected_keys = {
'container': hmac.new(secrets['22'], b'/a/c',
digestmod=hashlib.sha256).digest(),
'object': hmac.new(secrets['22'], b'/a/c/o',
digestmod=hashlib.sha256).digest()}
self.assertEqual(expected_keys, keys)
# secret_id passed to fetch_crypto_keys callback
for secret_id in ('my_secret_id', None):
keys = self.verify_keys_for_path(
'/a/c/o', ('container', 'object'),
key_id={'secret_id': secret_id, 'v': '2', 'path': '/a/c/o'})
expected_keys = {
'container': hmac.new(secrets[secret_id], b'/a/c',
digestmod=hashlib.sha256).digest(),
'object': hmac.new(secrets[secret_id], b'/a/c/o',
digestmod=hashlib.sha256).digest()}
self.assertEqual(expected_keys, keys)
def test_keys_cached(self):
secrets = {None: os.urandom(32),
'22': os.urandom(33),
'my_secret_id': os.urandom(50)}
conf = {}
for secret_id, secret in secrets.items():
opt = ('encryption_root_secret%s' %
(('_%s' % secret_id) if secret_id else ''))
conf[opt] = base64.b64encode(secret)
conf['active_root_secret_id'] = '22'
self.app = keymaster.KeyMaster(self.swift, conf)
orig_create_key = self.app.create_key
calls = []
def mock_create_key(path, secret_id=None):
calls.append((path, secret_id))
return orig_create_key(path, secret_id)
context = keymaster.KeyMasterContext(self.app, 'a', 'c', 'o')
with mock.patch.object(self.app, 'create_key', mock_create_key):
keys = context.fetch_crypto_keys()
expected_keys = {
'container': hmac.new(secrets['22'], b'/a/c',
digestmod=hashlib.sha256).digest(),
'object': hmac.new(secrets['22'], b'/a/c/o',
digestmod=hashlib.sha256).digest(),
'id': {'path': '/a/c/o', 'secret_id': '22', 'v': '2'},
'all_ids': [
{'path': '/a/c/o', 'v': '2'},
{'path': '/a/c/o', 'secret_id': '22', 'v': '2'},
{'path': '/a/c/o', 'secret_id': 'my_secret_id', 'v': '2'}]}
self.assertEqual(expected_keys, keys)
self.assertEqual([('/a/c', '22'), ('/a/c/o', '22')], calls)
with mock.patch.object(self.app, 'create_key', mock_create_key):
keys = context.fetch_crypto_keys()
# no more calls to create_key
self.assertEqual([('/a/c', '22'), ('/a/c/o', '22')], calls)
self.assertEqual(expected_keys, keys)
with mock.patch.object(self.app, 'create_key', mock_create_key):
keys = context.fetch_crypto_keys(key_id={
'secret_id': None, 'v': '2', 'path': '/a/c/o'})
expected_keys = {
'container': hmac.new(secrets[None], b'/a/c',
digestmod=hashlib.sha256).digest(),
'object': hmac.new(secrets[None], b'/a/c/o',
digestmod=hashlib.sha256).digest(),
'id': {'path': '/a/c/o', 'v': '2'},
'all_ids': [
{'path': '/a/c/o', 'v': '2'},
{'path': '/a/c/o', 'secret_id': '22', 'v': '2'},
{'path': '/a/c/o', 'secret_id': 'my_secret_id', 'v': '2'}]}
self.assertEqual(expected_keys, keys)
self.assertEqual([('/a/c', '22'), ('/a/c/o', '22'),
('/a/c', None), ('/a/c/o', None)],
calls)
def test_v1_keys(self):
secrets = {None: os.urandom(32),
'22': os.urandom(33)}
conf = {}
for secret_id, secret in secrets.items():
opt = ('encryption_root_secret%s' %
(('_%s' % secret_id) if secret_id else ''))
conf[opt] = base64.b64encode(secret)
conf['active_root_secret_id'] = '22'
self.app = keymaster.KeyMaster(self.swift, conf)
orig_create_key = self.app.create_key
calls = []
def mock_create_key(path, secret_id=None):
calls.append((path, secret_id))
return orig_create_key(path, secret_id)
context = keymaster.KeyMasterContext(self.app, 'a', 'c', 'o')
for version in ('1', '2', '3'):
with mock.patch.object(self.app, 'create_key', mock_create_key):
keys = context.fetch_crypto_keys(key_id={
'v': version, 'path': '/a/c/o'})
expected_keys = {
'container': hmac.new(secrets[None], b'/a/c',
digestmod=hashlib.sha256).digest(),
'object': hmac.new(secrets[None], b'/a/c/o',
digestmod=hashlib.sha256).digest(),
'id': {'path': '/a/c/o', 'v': version},
'all_ids': [
{'path': '/a/c/o', 'v': version},
{'path': '/a/c/o', 'secret_id': '22', 'v': version}]}
self.assertEqual(expected_keys, keys)
self.assertEqual([('/a/c', None), ('/a/c/o', None)], calls)
del calls[:]
context = keymaster.KeyMasterContext(self.app, 'a', 'c', '/o')
with mock.patch.object(self.app, 'create_key', mock_create_key):
keys = context.fetch_crypto_keys(key_id={
'v': '1', 'path': '/o'})
expected_keys = {
'container': hmac.new(secrets[None], b'/a/c',
digestmod=hashlib.sha256).digest(),
'object': hmac.new(secrets[None], b'/o',
digestmod=hashlib.sha256).digest(),
'id': {'path': '/o', 'v': '1'},
'all_ids': [
{'path': '/o', 'v': '1'},
{'path': '/o', 'secret_id': '22', 'v': '1'}]}
self.assertEqual(expected_keys, keys)
self.assertEqual([('/a/c', None), ('/o', None)], calls)
del calls[:]
context = keymaster.KeyMasterContext(self.app, 'a', 'c', '/o')
with mock.patch.object(self.app, 'create_key', mock_create_key):
keys = context.fetch_crypto_keys(key_id={
'v': '2', 'path': '/a/c//o'})
expected_keys = {
'container': hmac.new(secrets[None], b'/a/c',
digestmod=hashlib.sha256).digest(),
'object': hmac.new(secrets[None], b'/a/c//o',
digestmod=hashlib.sha256).digest(),
'id': {'path': '/a/c//o', 'v': '2'},
'all_ids': [
{'path': '/a/c//o', 'v': '2'},
{'path': '/a/c//o', 'secret_id': '22', 'v': '2'}]}
self.assertEqual(expected_keys, keys)
self.assertEqual([('/a/c', None), ('/a/c//o', None)], calls)
def test_v1_keys_with_weird_paths(self):
secrets = {None: os.urandom(32),
'22': os.urandom(33)}
conf = {}
for secret_id, secret in secrets.items():
opt = ('encryption_root_secret%s' %
(('_%s' % secret_id) if secret_id else ''))
conf[opt] = base64.b64encode(secret)
conf['active_root_secret_id'] = '22'
self.app = keymaster.KeyMaster(self.swift, conf)
orig_create_key = self.app.create_key
calls = []
def mock_create_key(path, secret_id=None):
calls.append((path, secret_id))
return orig_create_key(path, secret_id)
# request path doesn't match stored path -- this could happen if you
# misconfigured your proxy to have copy right of encryption
context = keymaster.KeyMasterContext(self.app, 'a', 'not-c', 'not-o')
for version in ('1', '2', '3'):
with mock.patch.object(self.app, 'create_key', mock_create_key):
keys = context.fetch_crypto_keys(key_id={
'v': version, 'path': '/a/c/o'})
expected_keys = {
'container': hmac.new(secrets[None], b'/a/c',
digestmod=hashlib.sha256).digest(),
'object': hmac.new(secrets[None], b'/a/c/o',
digestmod=hashlib.sha256).digest(),
'id': {'path': '/a/c/o', 'v': version},
'all_ids': [
{'path': '/a/c/o', 'v': version},
{'path': '/a/c/o', 'secret_id': '22', 'v': version}]}
self.assertEqual(expected_keys, keys)
self.assertEqual([('/a/c', None), ('/a/c/o', None)], calls)
del calls[:]
context = keymaster.KeyMasterContext(
self.app, 'not-a', 'not-c', '/not-o')
with mock.patch.object(self.app, 'create_key', mock_create_key):
keys = context.fetch_crypto_keys(key_id={
'v': '1', 'path': '/o'})
expected_keys = {
'container': hmac.new(secrets[None], b'/not-a/not-c',
digestmod=hashlib.sha256).digest(),
'object': hmac.new(secrets[None], b'/o',
digestmod=hashlib.sha256).digest(),
'id': {'path': '/o', 'v': '1'},
'all_ids': [
{'path': '/o', 'v': '1'},
{'path': '/o', 'secret_id': '22', 'v': '1'}]}
self.assertEqual(expected_keys, keys)
self.assertEqual([('/not-a/not-c', None), ('/o', None)], calls)
del calls[:]
context = keymaster.KeyMasterContext(
self.app, 'not-a', 'not-c', '/not-o')
with mock.patch.object(self.app, 'create_key', mock_create_key):
keys = context.fetch_crypto_keys(key_id={
'v': '2', 'path': '/a/c//o'})
expected_keys = {
'container': hmac.new(secrets[None], b'/a/c',
digestmod=hashlib.sha256).digest(),
'object': hmac.new(secrets[None], b'/a/c//o',
digestmod=hashlib.sha256).digest(),
'id': {'path': '/a/c//o', 'v': '2'},
'all_ids': [
{'path': '/a/c//o', 'v': '2'},
{'path': '/a/c//o', 'secret_id': '22', 'v': '2'}]}
self.assertEqual(expected_keys, keys)
self.assertEqual([('/a/c', None), ('/a/c//o', None)], calls)
def test_v2_keys(self):
secrets = {None: os.urandom(32),
'22': os.urandom(33)}
conf = {}
for secret_id, secret in secrets.items():
opt = ('encryption_root_secret%s' %
(('_%s' % secret_id) if secret_id else ''))
conf[opt] = base64.b64encode(secret)
conf['active_root_secret_id'] = '22'
self.app = keymaster.KeyMaster(self.swift, conf)
orig_create_key = self.app.create_key
calls = []
def mock_create_key(path, secret_id=None):
calls.append((path, secret_id))
return orig_create_key(path, secret_id)
container = u'\N{SNOWMAN}'
obj = u'\N{SNOWFLAKE}'
if six.PY2:
container = container.encode('utf-8')
obj = obj.encode('utf-8')
good_con_path = '/a/%s' % container
good_path = '/a/%s/%s' % (container, obj)
if six.PY2:
mangled_con_path = ('/a/%s' % container).decode(
'latin-1').encode('utf-8')
mangled_path = ('/a/%s/%s' % (
container, obj)).decode('latin-1').encode('utf-8')
else:
mangled_con_path = ('/a/%s' % container).encode(
'utf-8').decode('latin-1')
mangled_path = ('/a/%s/%s' % (
container, obj)).encode('utf-8').decode('latin-1')
context = keymaster.KeyMasterContext(self.app, 'a', container, obj)
for version in ('1', '2', '3'):
with mock.patch.object(self.app, 'create_key', mock_create_key):
keys = context.fetch_crypto_keys(key_id={
'v': version, 'path': good_path})
key_id_path = (good_path if version == '3' or six.PY2
else mangled_path)
expected_keys = {
'container': hmac.new(secrets[None], b'/a/\xe2\x98\x83',
digestmod=hashlib.sha256).digest(),
'object': hmac.new(
secrets[None], b'/a/\xe2\x98\x83/\xe2\x9d\x84',
digestmod=hashlib.sha256).digest(),
'id': {'path': key_id_path, 'v': version},
'all_ids': [
{'path': key_id_path, 'v': version},
{'path': key_id_path, 'secret_id': '22', 'v': version}]}
self.assertEqual(expected_keys, keys)
self.assertEqual([(good_con_path, None), (good_path, None)], calls)
del calls[:]
context = keymaster.KeyMasterContext(self.app, 'a', container, obj)
for version in ('1', '2'):
with mock.patch.object(self.app, 'create_key', mock_create_key):
keys = context.fetch_crypto_keys(key_id={
'v': version, 'path': mangled_path})
key_id_path = (good_path if six.PY2 else mangled_path)
expected_keys = {
'container': hmac.new(secrets[None], b'/a/\xe2\x98\x83',
digestmod=hashlib.sha256).digest(),
'object': hmac.new(
secrets[None], b'/a/\xe2\x98\x83/\xe2\x9d\x84',
digestmod=hashlib.sha256).digest(),
'id': {'path': key_id_path, 'v': version},
'all_ids': [
{'path': key_id_path, 'v': version},
{'path': key_id_path, 'secret_id': '22', 'v': version}]}
self.assertEqual(expected_keys, keys)
self.assertEqual([(good_con_path, None), (good_path, None)], calls)
del calls[:]
# If v3, we know to trust the meta -- presumably, data was PUT with
# the mojibake path then COPYed to the right path (but with bad
# pipeline placement for copy)
with mock.patch.object(self.app, 'create_key', mock_create_key):
keys = context.fetch_crypto_keys(key_id={
'v': '3', 'path': mangled_path})
expected_keys = {
'container': hmac.new(
secrets[None], b'/a/\xc3\xa2\xc2\x98\xc2\x83',
digestmod=hashlib.sha256).digest(),
'object': hmac.new(
secrets[None],
b'/a/\xc3\xa2\xc2\x98\xc2\x83/\xc3\xa2\xc2\x9d\xc2\x84',
digestmod=hashlib.sha256).digest(),
'id': {'path': mangled_path, 'v': '3'},
'all_ids': [
{'path': mangled_path, 'v': '3'},
{'path': mangled_path, 'secret_id': '22', 'v': '3'}]}
self.assertEqual(expected_keys, keys)
self.assertEqual([(mangled_con_path, None), (mangled_path, None)],
calls)
del calls[:]
@mock.patch('swift.common.middleware.crypto.keymaster.readconf')
def test_keymaster_config_path(self, mock_readconf):
for secret in (os.urandom(32), os.urandom(33), os.urandom(50)):
enc_secret = base64.b64encode(secret)
self.assertIsInstance(enc_secret, bytes)
for conf_val in (enc_secret, enc_secret.decode('ascii'),
enc_secret[:30] + b'\n' + enc_secret[30:],
enc_secret[:30] + b'\r\n' + enc_secret[30:],
(enc_secret[:30] + b'\n' +
enc_secret[30:]).decode('ascii'),
(enc_secret[:30] + b'\r\n' +
enc_secret[30:]).decode('ascii')):
mock_readconf.reset_mock()
mock_readconf.return_value = {
'encryption_root_secret': conf_val}
app = keymaster.KeyMaster(self.swift, {
'keymaster_config_path': '/some/path'})
try:
self.assertEqual(secret, app.root_secret)
self.assertEqual(mock_readconf.mock_calls, [
mock.call('/some/path', 'keymaster')])
except AssertionError as err:
self.fail(str(err) + ' for secret %r' % secret)
def test_invalid_root_secret(self):
for secret in (base64.b64encode(os.urandom(31)), # too short
base64.b64encode(os.urandom(31)).decode('ascii'),
u'a' * 44 + u'????', b'a' * 44 + b'????', # not base64
u'a' * 45, b'a' * 45, # bad padding
99, None):
conf = {'encryption_root_secret': secret}
try:
with self.assertRaises(ValueError) as err:
keymaster.KeyMaster(self.swift, conf)
self.assertEqual(
'encryption_root_secret option in proxy-server.conf '
'must be a base64 encoding of at least 32 raw bytes',
str(err.exception))
except AssertionError as err:
self.fail(str(err) + ' for conf %s' % str(conf))
@mock.patch('swift.common.middleware.crypto.keymaster.readconf')
def test_root_secret_path_invalid_secret(self, mock_readconf):
for secret in (base64.b64encode(os.urandom(31)), # too short
base64.b64encode(os.urandom(31)).decode('ascii'),
u'a' * 44 + u'????', b'a' * 44 + b'????', # not base64
u'a' * 45, b'a' * 45, # bad padding
99, None):
mock_readconf.reset_mock()
mock_readconf.return_value = {'encryption_root_secret': secret}
try:
with self.assertRaises(ValueError) as err:
keymaster.KeyMaster(self.swift, {
'keymaster_config_path': '/some/other/path'})
self.assertEqual(
'encryption_root_secret option in /some/other/path '
'must be a base64 encoding of at least 32 raw bytes',
str(err.exception))
self.assertEqual(mock_readconf.mock_calls, [
mock.call('/some/other/path', 'keymaster')])
except AssertionError as err:
self.fail(str(err) + ' for secret %r' % secret)
def test_can_only_configure_secret_in_one_place(self):
def do_test(conf):
with self.assertRaises(ValueError) as err:
keymaster.KeyMaster(self.swift, conf)
expected_message = ('keymaster_config_path is set, but there are '
'other config options specified:')
self.assertTrue(str(err.exception).startswith(expected_message),
"Error message does not start with '%s'" %
expected_message)
conf = {'encryption_root_secret': 'a' * 44,
'keymaster_config_path': '/etc/swift/keymaster.conf'}
do_test(conf)
conf = {'encryption_root_secret_1': 'a' * 44,
'keymaster_config_path': '/etc/swift/keymaster.conf'}
do_test(conf)
conf = {'encryption_root_secret_': 'a' * 44,
'keymaster_config_path': '/etc/swift/keymaster.conf'}
do_test(conf)
conf = {'active_root_secret_id': '1',
'keymaster_config_path': '/etc/swift/keymaster.conf'}
do_test(conf)
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/crypto/test_keymaster.py |
# -*- coding: utf-8 -*-
# Copyright (c) 2018 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import mock
import os
import unittest
from tempfile import mkdtemp
from textwrap import dedent
from shutil import rmtree
import sys
sys.modules['kmip'] = mock.Mock()
sys.modules['kmip.pie'] = mock.Mock()
sys.modules['kmip.pie.client'] = mock.Mock()
from swift.common.middleware.crypto import kmip_keymaster
KMIP_CLIENT_CLASS = \
'swift.common.middleware.crypto.kmip_keymaster.ProxyKmipClient'
class MockProxyKmipClient(object):
def __init__(self, secrets, calls, kwargs):
calls.append(('__init__', kwargs))
self.secrets = secrets
self.calls = calls
def get(self, uid):
self.calls.append(('get', uid))
return self.secrets[uid]
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
pass
def create_secret(algorithm_name, length, value):
algorithm = mock.MagicMock()
algorithm.name = algorithm_name
secret = mock.MagicMock(cryptographic_algorithm=algorithm,
cryptographic_length=length,
value=value)
return secret
def create_mock_client(secrets, calls):
def mock_client(*args, **kwargs):
if args:
raise Exception('unexpected args provided: %r' % (args,))
return MockProxyKmipClient(secrets, calls, kwargs)
return mock_client
class InMemoryHandler(logging.Handler):
def __init__(self):
self.messages = []
super(InMemoryHandler, self).__init__()
def handle(self, record):
self.messages.append(record.msg)
class TestKmipKeymaster(unittest.TestCase):
def setUp(self):
self.tempdir = mkdtemp()
def tearDown(self):
rmtree(self.tempdir)
def test_config_in_filter_section(self):
conf = {'__file__': '/etc/swift/proxy-server.conf',
'__name__': 'kmip_keymaster',
'key_id': '1234'}
secrets = {'1234': create_secret('AES', 256, b'x' * 32)}
calls = []
with mock.patch(KMIP_CLIENT_CLASS, create_mock_client(secrets, calls)):
km = kmip_keymaster.filter_factory(conf)(None)
self.assertEqual({None: b'x' * 32}, km._root_secrets)
self.assertEqual(None, km.active_secret_id)
self.assertIsNone(km.keymaster_config_path)
self.assertEqual(calls, [
('__init__', {'config_file': '/etc/swift/proxy-server.conf',
'config': 'filter:kmip_keymaster'}),
('get', '1234'),
])
def test_multikey_config_in_filter_section(self):
conf = {'__file__': '/etc/swift/proxy-server.conf',
'__name__': 'kmip-keymaster',
'key_id': '1234',
'key_id_xyzzy': 'foobar',
'key_id_alt_secret_id': 'foobar',
'active_root_secret_id': 'xyzzy'}
secrets = {'1234': create_secret('AES', 256, b'x' * 32),
'foobar': create_secret('AES', 256, b'y' * 32)}
calls = []
with mock.patch(KMIP_CLIENT_CLASS, create_mock_client(secrets, calls)):
km = kmip_keymaster.KmipKeyMaster(None, conf)
self.assertEqual({None: b'x' * 32, 'xyzzy': b'y' * 32,
'alt_secret_id': b'y' * 32},
km._root_secrets)
self.assertEqual('xyzzy', km.active_secret_id)
self.assertIsNone(km.keymaster_config_path)
self.assertEqual(calls, [
('__init__', {'config_file': '/etc/swift/proxy-server.conf',
'config': 'filter:kmip-keymaster'}),
('get', '1234'),
('get', 'foobar'),
])
def test_bad_active_key(self):
conf = {'__file__': '/etc/swift/proxy-server.conf',
'__name__': 'kmip_keymaster',
'key_id': '1234',
'key_id_xyzzy': 'foobar',
'active_root_secret_id': 'unknown'}
secrets = {'1234': create_secret('AES', 256, b'x' * 32),
'foobar': create_secret('AES', 256, b'y' * 32)}
calls = []
with mock.patch(KMIP_CLIENT_CLASS,
create_mock_client(secrets, calls)), \
self.assertRaises(ValueError) as raised:
kmip_keymaster.KmipKeyMaster(None, conf)
self.assertEqual('No secret loaded for active_root_secret_id unknown',
str(raised.exception))
def test_config_in_separate_file(self):
km_conf = """
[kmip_keymaster]
key_id = 4321
"""
km_config_file = os.path.join(self.tempdir, 'km.conf')
with open(km_config_file, 'wt') as fd:
fd.write(dedent(km_conf))
conf = {'__file__': '/etc/swift/proxy-server.conf',
'__name__': 'keymaster-kmip',
'keymaster_config_path': km_config_file}
secrets = {'4321': create_secret('AES', 256, b'x' * 32)}
calls = []
with mock.patch(KMIP_CLIENT_CLASS, create_mock_client(secrets, calls)):
km = kmip_keymaster.KmipKeyMaster(None, conf)
self.assertEqual({None: b'x' * 32}, km._root_secrets)
self.assertEqual(None, km.active_secret_id)
self.assertEqual(km_config_file, km.keymaster_config_path)
self.assertEqual(calls, [
('__init__', {'config_file': km_config_file,
'config': 'kmip_keymaster'}),
('get', '4321')])
def test_multikey_config_in_separate_file(self):
km_conf = """
[kmip_keymaster]
key_id = 4321
key_id_secret_id = another id
active_root_secret_id = secret_id
"""
km_config_file = os.path.join(self.tempdir, 'km.conf')
with open(km_config_file, 'wt') as fd:
fd.write(dedent(km_conf))
conf = {'__file__': '/etc/swift/proxy-server.conf',
'__name__': 'kmip_keymaster',
'keymaster_config_path': km_config_file}
secrets = {'4321': create_secret('AES', 256, b'x' * 32),
'another id': create_secret('AES', 256, b'y' * 32)}
calls = []
with mock.patch(KMIP_CLIENT_CLASS, create_mock_client(secrets, calls)):
km = kmip_keymaster.KmipKeyMaster(None, conf)
self.assertEqual({None: b'x' * 32, 'secret_id': b'y' * 32},
km._root_secrets)
self.assertEqual('secret_id', km.active_secret_id)
self.assertEqual(km_config_file, km.keymaster_config_path)
self.assertEqual(calls, [
('__init__', {'config_file': km_config_file,
'config': 'kmip_keymaster'}),
('get', '4321'),
('get', 'another id')])
def test_proxy_server_conf_dir(self):
proxy_server_conf_dir = os.path.join(self.tempdir, 'proxy_server.d')
os.mkdir(proxy_server_conf_dir)
# KmipClient can't read conf from a dir, so check that is caught early
conf = {'__file__': proxy_server_conf_dir,
'__name__': 'kmip_keymaster',
'key_id': '789'}
with self.assertRaises(ValueError) as cm:
kmip_keymaster.KmipKeyMaster(None, conf)
self.assertIn('config cannot be read from conf dir', str(cm.exception))
# ...but a conf file in a conf dir could point back to itself for the
# KmipClient config
km_config_file = os.path.join(proxy_server_conf_dir, '40.conf')
km_conf = """
[filter:kmip_keymaster]
keymaster_config_file = %s
[kmip_keymaster]
key_id = 789
""" % km_config_file
with open(km_config_file, 'wt') as fd:
fd.write(dedent(km_conf))
conf = {'__file__': proxy_server_conf_dir,
'__name__': 'kmip_keymaster',
'keymaster_config_path': km_config_file}
secrets = {'789': create_secret('AES', 256, b'x' * 32)}
calls = []
with mock.patch(KMIP_CLIENT_CLASS, create_mock_client(secrets, calls)):
km = kmip_keymaster.KmipKeyMaster(None, conf)
self.assertEqual({None: b'x' * 32}, km._root_secrets)
self.assertEqual(None, km.active_secret_id)
self.assertEqual(km_config_file, km.keymaster_config_path)
self.assertEqual(calls, [
('__init__', {'config_file': km_config_file,
# NB: no "filter:"
'config': 'kmip_keymaster'}),
('get', '789')])
def test_bad_key_length(self):
conf = {'__file__': '/etc/swift/proxy-server.conf',
'__name__': 'kmip_keymaster',
'key_id': '1234'}
secrets = {'1234': create_secret('AES', 128, b'x' * 16)}
calls = []
with mock.patch(KMIP_CLIENT_CLASS,
create_mock_client(secrets, calls)), \
self.assertRaises(ValueError) as cm:
kmip_keymaster.KmipKeyMaster(None, conf)
self.assertIn('Expected key 1234 to be an AES-256 key',
str(cm.exception))
self.assertEqual(calls, [
('__init__', {'config_file': '/etc/swift/proxy-server.conf',
'config': 'filter:kmip_keymaster'}),
('get', '1234')])
def test_bad_key_algorithm(self):
conf = {'__file__': '/etc/swift/proxy-server.conf',
'__name__': 'kmip_keymaster',
'key_id': '1234'}
secrets = {'1234': create_secret('notAES', 256, b'x' * 32)}
calls = []
with mock.patch(KMIP_CLIENT_CLASS,
create_mock_client(secrets, calls)), \
self.assertRaises(ValueError) as cm:
kmip_keymaster.KmipKeyMaster(None, conf)
self.assertIn('Expected key 1234 to be an AES-256 key',
str(cm.exception))
self.assertEqual(calls, [
('__init__', {'config_file': '/etc/swift/proxy-server.conf',
'config': 'filter:kmip_keymaster'}),
('get', '1234')])
def test_missing_key_id(self):
conf = {'__file__': '/etc/swift/proxy-server.conf',
'__name__': 'kmip_keymaster'}
secrets = {}
calls = []
with mock.patch(KMIP_CLIENT_CLASS,
create_mock_client(secrets, calls)), \
self.assertRaises(ValueError) as cm:
kmip_keymaster.KmipKeyMaster(None, conf)
self.assertEqual('No secret loaded for active_root_secret_id None',
str(cm.exception))
# We make the client, but never use it
self.assertEqual(calls, [
('__init__', {'config_file': '/etc/swift/proxy-server.conf',
'config': 'filter:kmip_keymaster'})])
def test_logger_manipulations(self):
root_logger = logging.getLogger()
old_level = root_logger.getEffectiveLevel()
handler = InMemoryHandler()
try:
root_logger.setLevel(logging.DEBUG)
root_logger.addHandler(handler)
conf = {'__file__': '/etc/swift/proxy-server.conf',
'__name__': 'kmip_keymaster'}
secrets = {}
calls = []
with mock.patch(KMIP_CLIENT_CLASS,
create_mock_client(secrets, calls)), \
self.assertRaises(ValueError):
# missing key_id, as above, but that's not the interesting bit
kmip_keymaster.KmipKeyMaster(None, conf)
self.assertEqual(handler.messages, [])
logger = logging.getLogger('kmip.services.server.kmip_protocol')
logger.debug('Something secret!')
logger.info('Something useful')
self.assertNotIn('Something secret!', handler.messages)
self.assertIn('Something useful', handler.messages)
logger = logging.getLogger('kmip.core.config_helper')
logger.debug('Also secret')
logger.warning('Also useful')
self.assertNotIn('Also secret', handler.messages)
self.assertIn('Also useful', handler.messages)
logger = logging.getLogger('kmip')
logger.debug('Boring, but not secret')
self.assertIn('Boring, but not secret', handler.messages)
finally:
root_logger.setLevel(old_level)
root_logger.removeHandler(handler)
| swift-master | test/unit/common/middleware/crypto/test_kmip_keymaster.py |
# Copyright (c) 2015-2016 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import hashlib
import hmac
import json
import unittest
import uuid
from swift.common import storage_policy, constraints
from swift.common.middleware import copy
from swift.common.middleware import crypto
from swift.common.middleware.crypto import keymaster
from swift.common.middleware.crypto.crypto_utils import (
load_crypto_meta, Crypto)
from swift.common.ring import Ring
from swift.common.swob import Request, str_to_wsgi
from swift.obj import diskfile
from test.debug_logger import debug_logger
from test.unit import skip_if_no_xattrs
from test.unit.common.middleware.crypto.crypto_helpers import (
md5hex, encrypt, TEST_KEYMASTER_CONF)
from test.unit.helpers import setup_servers, teardown_servers
class TestCryptoPipelineChanges(unittest.TestCase):
# Tests the consequences of crypto middleware being in/out of the pipeline
# or having encryption disabled for PUT/GET requests on same object. Uses
# real backend servers so that the handling of headers and sysmeta is
# verified to diskfile and back.
_test_context = None
@classmethod
def setUpClass(cls):
cls._test_context = setup_servers()
cls.proxy_app = cls._test_context["test_servers"][0]
@classmethod
def tearDownClass(cls):
if cls._test_context is not None:
teardown_servers(cls._test_context)
cls._test_context = None
def setUp(self):
skip_if_no_xattrs()
self.plaintext = b'unencrypted body content'
self.plaintext_etag = md5hex(self.plaintext)
self._setup_crypto_app()
def _setup_crypto_app(self, disable_encryption=False, root_secret_id=None):
# Set up a pipeline of crypto middleware ending in the proxy app so
# that tests can make requests to either the proxy server directly or
# via the crypto middleware. Make a fresh instance for each test to
# avoid any state coupling.
conf = {'disable_encryption': disable_encryption}
self.encryption = crypto.filter_factory(conf)(self.proxy_app)
self.encryption.logger = self.proxy_app.logger
km_conf = dict(TEST_KEYMASTER_CONF)
if root_secret_id is not None:
km_conf['active_root_secret_id'] = root_secret_id
self.km = keymaster.KeyMaster(self.encryption, km_conf)
self.crypto_app = self.km # for clarity
self.crypto_app.logger = self.encryption.logger
def _create_container(self, app, policy_name='one', container_path=None):
if not container_path:
# choose new container name so that the policy can be specified
self.container_name = uuid.uuid4().hex
self.container_path = 'http://foo:8080/v1/a/' + self.container_name
self.object_name = 'o'
self.object_path = self.container_path + '/' + self.object_name
container_path = self.container_path
req = Request.blank(
str_to_wsgi(container_path), method='PUT',
headers={'X-Storage-Policy': policy_name})
resp = req.get_response(app)
self.assertEqual('201 Created', resp.status)
# sanity check
req = Request.blank(
str_to_wsgi(container_path), method='HEAD',
headers={'X-Storage-Policy': policy_name})
resp = req.get_response(app)
self.assertEqual(policy_name, resp.headers['X-Storage-Policy'])
def _put_object(self, app, body):
req = Request.blank(
str_to_wsgi(self.object_path), method='PUT', body=body,
headers={'Content-Type': 'application/test'})
resp = req.get_response(app)
self.assertEqual('201 Created', resp.status)
self.assertEqual(self.plaintext_etag, resp.headers['Etag'])
return resp
def _post_object(self, app):
req = Request.blank(str_to_wsgi(self.object_path), method='POST',
headers={'Content-Type': 'application/test',
'X-Object-Meta-Fruit': 'Kiwi'})
resp = req.get_response(app)
self.assertEqual('202 Accepted', resp.status)
return resp
def _copy_object(self, app, destination):
req = Request.blank(str_to_wsgi(self.object_path), method='COPY',
headers={'Destination': destination})
resp = req.get_response(app)
self.assertEqual('201 Created', resp.status)
self.assertEqual(self.plaintext_etag, resp.headers['Etag'])
return resp
def _check_GET_and_HEAD(self, app, object_path=None):
object_path = str_to_wsgi(object_path or self.object_path)
req = Request.blank(object_path, method='GET')
resp = req.get_response(app)
self.assertEqual('200 OK', resp.status)
self.assertEqual(self.plaintext, resp.body)
self.assertEqual('Kiwi', resp.headers['X-Object-Meta-Fruit'])
req = Request.blank(object_path, method='HEAD')
resp = req.get_response(app)
self.assertEqual('200 OK', resp.status)
self.assertEqual(b'', resp.body)
self.assertEqual('Kiwi', resp.headers['X-Object-Meta-Fruit'])
def _check_match_requests(self, method, app, object_path=None):
object_path = str_to_wsgi(object_path or self.object_path)
# verify conditional match requests
expected_body = self.plaintext if method == 'GET' else b''
# If-Match matches
req = Request.blank(object_path, method=method,
headers={'If-Match': '"%s"' % self.plaintext_etag})
resp = req.get_response(app)
self.assertEqual('200 OK', resp.status)
self.assertEqual(expected_body, resp.body)
self.assertEqual(self.plaintext_etag, resp.headers['Etag'])
self.assertEqual('Kiwi', resp.headers['X-Object-Meta-Fruit'])
# If-Match wildcard
req = Request.blank(object_path, method=method,
headers={'If-Match': '*'})
resp = req.get_response(app)
self.assertEqual('200 OK', resp.status)
self.assertEqual(expected_body, resp.body)
self.assertEqual(self.plaintext_etag, resp.headers['Etag'])
self.assertEqual('Kiwi', resp.headers['X-Object-Meta-Fruit'])
# If-Match does not match
req = Request.blank(object_path, method=method,
headers={'If-Match': '"not the etag"'})
resp = req.get_response(app)
self.assertEqual('412 Precondition Failed', resp.status)
self.assertEqual(b'', resp.body)
self.assertEqual(self.plaintext_etag, resp.headers['Etag'])
# If-None-Match matches
req = Request.blank(
object_path, method=method,
headers={'If-None-Match': '"%s"' % self.plaintext_etag})
resp = req.get_response(app)
self.assertEqual('304 Not Modified', resp.status)
self.assertEqual(b'', resp.body)
self.assertEqual(self.plaintext_etag, resp.headers['Etag'])
# If-None-Match wildcard
req = Request.blank(object_path, method=method,
headers={'If-None-Match': '*'})
resp = req.get_response(app)
self.assertEqual('304 Not Modified', resp.status)
self.assertEqual(b'', resp.body)
self.assertEqual(self.plaintext_etag, resp.headers['Etag'])
# If-None-Match does not match
req = Request.blank(object_path, method=method,
headers={'If-None-Match': '"not the etag"'})
resp = req.get_response(app)
self.assertEqual('200 OK', resp.status)
self.assertEqual(expected_body, resp.body)
self.assertEqual(self.plaintext_etag, resp.headers['Etag'])
self.assertEqual('Kiwi', resp.headers['X-Object-Meta-Fruit'])
def _check_listing(self, app, expect_mismatch=False, container_path=None):
container_path = str_to_wsgi(container_path or self.container_path)
req = Request.blank(
container_path, method='GET', query_string='format=json')
resp = req.get_response(app)
self.assertEqual('200 OK', resp.status)
listing = json.loads(resp.body)
self.assertEqual(1, len(listing))
self.assertEqual(self.object_name, listing[0]['name'])
self.assertEqual(len(self.plaintext), listing[0]['bytes'])
if expect_mismatch:
self.assertNotEqual(self.plaintext_etag, listing[0]['hash'])
else:
self.assertEqual(self.plaintext_etag, listing[0]['hash'])
def test_write_with_crypto_and_override_headers(self):
self._create_container(self.proxy_app, policy_name='one')
def verify_overrides():
# verify object sysmeta
req = Request.blank(
self.object_path, method='GET')
resp = req.get_response(self.crypto_app)
for k, v in overrides.items():
self.assertIn(k, resp.headers)
self.assertEqual(overrides[k], resp.headers[k])
# check container listing
req = Request.blank(
self.container_path, method='GET', query_string='format=json')
resp = req.get_response(self.crypto_app)
self.assertEqual('200 OK', resp.status)
listing = json.loads(resp.body)
self.assertEqual(1, len(listing))
self.assertEqual('o', listing[0]['name'])
self.assertEqual(
overrides['x-object-sysmeta-container-update-override-size'],
str(listing[0]['bytes']))
self.assertEqual(
overrides['x-object-sysmeta-container-update-override-etag'],
listing[0]['hash'])
# include overrides in headers
overrides = {'x-object-sysmeta-container-update-override-etag': 'foo',
'x-object-sysmeta-container-update-override-size':
str(len(self.plaintext) + 1)}
req = Request.blank(self.object_path, method='PUT',
body=self.plaintext, headers=overrides.copy())
resp = req.get_response(self.crypto_app)
self.assertEqual('201 Created', resp.status)
self.assertEqual(self.plaintext_etag, resp.headers['Etag'])
verify_overrides()
# include overrides in footers
overrides = {'x-object-sysmeta-container-update-override-etag': 'bar',
'x-object-sysmeta-container-update-override-size':
str(len(self.plaintext) + 2)}
def callback(footers):
footers.update(overrides)
req = Request.blank(
self.object_path, method='PUT', body=self.plaintext)
req.environ['swift.callback.update_footers'] = callback
resp = req.get_response(self.crypto_app)
self.assertEqual('201 Created', resp.status)
self.assertEqual(self.plaintext_etag, resp.headers['Etag'])
verify_overrides()
def test_write_with_crypto_read_with_crypto(self):
self._create_container(self.proxy_app, policy_name='one')
self._put_object(self.crypto_app, self.plaintext)
self._post_object(self.crypto_app)
self._check_GET_and_HEAD(self.crypto_app)
self._check_match_requests('GET', self.crypto_app)
self._check_match_requests('HEAD', self.crypto_app)
self._check_listing(self.crypto_app)
def test_write_with_crypto_read_with_crypto_different_root_secrets(self):
root_secret = self.crypto_app.root_secret
self._create_container(self.proxy_app, policy_name='one')
self._put_object(self.crypto_app, self.plaintext)
# change root secret
self._setup_crypto_app(root_secret_id='1')
root_secret_1 = self.crypto_app.root_secret
self.assertNotEqual(root_secret, root_secret_1) # sanity check
self._post_object(self.crypto_app)
self._check_GET_and_HEAD(self.crypto_app)
self._check_match_requests('GET', self.crypto_app)
self._check_match_requests('HEAD', self.crypto_app)
self._check_listing(self.crypto_app)
# change root secret
self._setup_crypto_app(root_secret_id='2')
root_secret_2 = self.crypto_app.root_secret
self.assertNotEqual(root_secret_2, root_secret_1) # sanity check
self.assertNotEqual(root_secret_2, root_secret) # sanity check
self._check_GET_and_HEAD(self.crypto_app)
self._check_match_requests('GET', self.crypto_app)
self._check_match_requests('HEAD', self.crypto_app)
self._check_listing(self.crypto_app)
# write object again
self._put_object(self.crypto_app, self.plaintext)
self._post_object(self.crypto_app)
self._check_GET_and_HEAD(self.crypto_app)
self._check_match_requests('GET', self.crypto_app)
self._check_match_requests('HEAD', self.crypto_app)
self._check_listing(self.crypto_app)
def test_write_with_crypto_read_with_crypto_ec(self):
self._create_container(self.proxy_app, policy_name='ec')
self._put_object(self.crypto_app, self.plaintext)
self._post_object(self.crypto_app)
self._check_GET_and_HEAD(self.crypto_app)
self._check_match_requests('GET', self.crypto_app)
self._check_match_requests('HEAD', self.crypto_app)
self._check_listing(self.crypto_app)
def test_put_without_crypto_post_with_crypto_read_with_crypto(self):
self._create_container(self.proxy_app, policy_name='one')
self._put_object(self.proxy_app, self.plaintext)
self._post_object(self.crypto_app)
self._check_GET_and_HEAD(self.crypto_app)
self._check_match_requests('GET', self.crypto_app)
self._check_match_requests('HEAD', self.crypto_app)
self._check_listing(self.crypto_app)
def test_write_without_crypto_read_with_crypto(self):
self._create_container(self.proxy_app, policy_name='one')
self._put_object(self.proxy_app, self.plaintext)
self._post_object(self.proxy_app)
self._check_GET_and_HEAD(self.proxy_app) # sanity check
self._check_GET_and_HEAD(self.crypto_app)
self._check_match_requests('GET', self.proxy_app) # sanity check
self._check_match_requests('GET', self.crypto_app)
self._check_match_requests('HEAD', self.proxy_app) # sanity check
self._check_match_requests('HEAD', self.crypto_app)
self._check_listing(self.crypto_app)
def test_write_without_crypto_read_with_crypto_ec(self):
self._create_container(self.proxy_app, policy_name='ec')
self._put_object(self.proxy_app, self.plaintext)
self._post_object(self.proxy_app)
self._check_GET_and_HEAD(self.proxy_app) # sanity check
self._check_GET_and_HEAD(self.crypto_app)
self._check_match_requests('GET', self.proxy_app) # sanity check
self._check_match_requests('GET', self.crypto_app)
self._check_match_requests('HEAD', self.proxy_app) # sanity check
self._check_match_requests('HEAD', self.crypto_app)
self._check_listing(self.crypto_app)
def _check_GET_and_HEAD_not_decrypted(self, app):
req = Request.blank(self.object_path, method='GET')
resp = req.get_response(app)
self.assertEqual('200 OK', resp.status)
self.assertNotEqual(self.plaintext, resp.body)
self.assertEqual('%s' % len(self.plaintext),
resp.headers['Content-Length'])
self.assertNotEqual('Kiwi', resp.headers['X-Object-Meta-Fruit'])
req = Request.blank(self.object_path, method='HEAD')
resp = req.get_response(app)
self.assertEqual('200 OK', resp.status)
self.assertEqual(b'', resp.body)
self.assertNotEqual('Kiwi', resp.headers['X-Object-Meta-Fruit'])
def test_write_with_crypto_read_without_crypto(self):
self._create_container(self.proxy_app, policy_name='one')
self._put_object(self.crypto_app, self.plaintext)
self._post_object(self.crypto_app)
self._check_GET_and_HEAD(self.crypto_app) # sanity check
# without crypto middleware, GET and HEAD returns ciphertext
self._check_GET_and_HEAD_not_decrypted(self.proxy_app)
self._check_listing(self.proxy_app, expect_mismatch=True)
def test_write_with_crypto_read_without_crypto_ec(self):
self._create_container(self.proxy_app, policy_name='ec')
self._put_object(self.crypto_app, self.plaintext)
self._post_object(self.crypto_app)
self._check_GET_and_HEAD(self.crypto_app) # sanity check
# without crypto middleware, GET and HEAD returns ciphertext
self._check_GET_and_HEAD_not_decrypted(self.proxy_app)
self._check_listing(self.proxy_app, expect_mismatch=True)
def test_disable_encryption_config_option(self):
# check that on disable_encryption = true, object is not encrypted
self._setup_crypto_app(disable_encryption=True)
self._create_container(self.proxy_app, policy_name='one')
self._put_object(self.crypto_app, self.plaintext)
self._post_object(self.crypto_app)
self._check_GET_and_HEAD(self.crypto_app)
# check as if no crypto middleware exists
self._check_GET_and_HEAD(self.proxy_app)
self._check_match_requests('GET', self.crypto_app)
self._check_match_requests('HEAD', self.crypto_app)
self._check_match_requests('GET', self.proxy_app)
self._check_match_requests('HEAD', self.proxy_app)
def test_write_with_crypto_read_with_disable_encryption_conf(self):
self._create_container(self.proxy_app, policy_name='one')
self._put_object(self.crypto_app, self.plaintext)
self._post_object(self.crypto_app)
self._check_GET_and_HEAD(self.crypto_app) # sanity check
# turn on disable_encryption config option
self._setup_crypto_app(disable_encryption=True)
# GET and HEAD of encrypted objects should still work
self._check_GET_and_HEAD(self.crypto_app)
self._check_listing(self.crypto_app, expect_mismatch=False)
self._check_match_requests('GET', self.crypto_app)
self._check_match_requests('HEAD', self.crypto_app)
def _test_ondisk_data_after_write_with_crypto(self, policy_name):
policy = storage_policy.POLICIES.get_by_name(policy_name)
self._create_container(self.proxy_app, policy_name=policy_name)
self._put_object(self.crypto_app, self.plaintext)
self._post_object(self.crypto_app)
# Verify container listing etag is encrypted by direct GET to container
# server. We can use any server for all nodes since they all share same
# devices dir.
cont_server = self._test_context['test_servers'][3]
cont_ring = Ring(self._test_context['testdir'], ring_name='container')
part, nodes = cont_ring.get_nodes('a', self.container_name)
for node in nodes:
req = Request.blank('/%s/%s/a/%s'
% (node['device'], part, self.container_name),
method='GET', query_string='format=json')
resp = req.get_response(cont_server)
listing = json.loads(resp.body)
# sanity checks...
self.assertEqual(1, len(listing))
self.assertEqual('o', listing[0]['name'])
self.assertEqual('application/test', listing[0]['content_type'])
# verify encrypted etag value
parts = listing[0]['hash'].rsplit(';', 1)
crypto_meta_param = parts[1].strip()
crypto_meta = crypto_meta_param[len('swift_meta='):]
listing_etag_iv = load_crypto_meta(crypto_meta)['iv']
exp_enc_listing_etag = base64.b64encode(
encrypt(self.plaintext_etag.encode('ascii'),
self.km.create_key('/a/%s' % self.container_name),
listing_etag_iv)).decode('ascii')
self.assertEqual(exp_enc_listing_etag, parts[0])
# Verify diskfile data and metadata is encrypted
ring_object = self.proxy_app.get_object_ring(int(policy))
partition, nodes = ring_object.get_nodes('a', self.container_name, 'o')
conf = {'devices': self._test_context["testdir"],
'mount_check': 'false'}
df_mgr = diskfile.DiskFileRouter(conf, debug_logger())[policy]
ondisk_data = []
exp_enc_body = None
for node_index, node in enumerate(nodes):
df = df_mgr.get_diskfile(node['device'], partition,
'a', self.container_name, 'o',
policy=policy)
with df.open():
meta = df.get_metadata()
contents = b''.join(df.reader())
metadata = dict((k.lower(), v) for k, v in meta.items())
# verify on disk data - body
body_iv = load_crypto_meta(
metadata['x-object-sysmeta-crypto-body-meta'])['iv']
body_key_meta = load_crypto_meta(
metadata['x-object-sysmeta-crypto-body-meta'])['body_key']
obj_key = self.km.create_key('/a/%s/o' % self.container_name)
body_key = Crypto().unwrap_key(obj_key, body_key_meta)
exp_enc_body = encrypt(self.plaintext, body_key, body_iv)
ondisk_data.append((node, contents))
# verify on disk user metadata
enc_val, meta = metadata[
'x-object-transient-sysmeta-crypto-meta-fruit'].split(';')
meta = meta.strip()[len('swift_meta='):]
metadata_iv = load_crypto_meta(meta)['iv']
exp_enc_meta = base64.b64encode(encrypt(
b'Kiwi', obj_key, metadata_iv)).decode('ascii')
self.assertEqual(exp_enc_meta, enc_val)
self.assertNotIn('x-object-meta-fruit', metadata)
self.assertIn(
'x-object-transient-sysmeta-crypto-meta', metadata)
meta = load_crypto_meta(
metadata['x-object-transient-sysmeta-crypto-meta'])
self.assertIn('key_id', meta)
self.assertIn('path', meta['key_id'])
self.assertEqual(
'/a/%s/%s' % (self.container_name, self.object_name),
meta['key_id']['path'])
self.assertIn('v', meta['key_id'])
self.assertEqual('2', meta['key_id']['v'])
self.assertIn('cipher', meta)
self.assertEqual(Crypto.cipher, meta['cipher'])
# verify etag
actual_enc_etag, _junk, actual_etag_meta = metadata[
'x-object-sysmeta-crypto-etag'].partition('; swift_meta=')
etag_iv = load_crypto_meta(actual_etag_meta)['iv']
exp_enc_etag = base64.b64encode(encrypt(
self.plaintext_etag.encode('ascii'),
obj_key, etag_iv)).decode('ascii')
self.assertEqual(exp_enc_etag, actual_enc_etag)
# verify etag hmac
exp_etag_mac = hmac.new(
obj_key, self.plaintext_etag.encode('ascii'),
digestmod=hashlib.sha256).digest()
exp_etag_mac = base64.b64encode(exp_etag_mac).decode('ascii')
self.assertEqual(exp_etag_mac,
metadata['x-object-sysmeta-crypto-etag-mac'])
# verify etag override for container updates
override = 'x-object-sysmeta-container-update-override-etag'
parts = metadata[override].rsplit(';', 1)
crypto_meta_param = parts[1].strip()
crypto_meta = crypto_meta_param[len('swift_meta='):]
listing_etag_iv = load_crypto_meta(crypto_meta)['iv']
cont_key = self.km.create_key('/a/%s' % self.container_name)
exp_enc_listing_etag = base64.b64encode(
encrypt(self.plaintext_etag.encode('ascii'), cont_key,
listing_etag_iv)).decode('ascii')
self.assertEqual(exp_enc_listing_etag, parts[0])
self._check_GET_and_HEAD(self.crypto_app)
return exp_enc_body, ondisk_data
def test_ondisk_data_after_write_with_crypto(self):
exp_body, ondisk_data = self._test_ondisk_data_after_write_with_crypto(
policy_name='one')
for node, body in ondisk_data:
self.assertEqual(exp_body, body)
def test_ondisk_data_after_write_with_crypto_ec(self):
exp_body, ondisk_data = self._test_ondisk_data_after_write_with_crypto(
policy_name='ec')
policy = storage_policy.POLICIES.get_by_name('ec')
for frag_selection in (ondisk_data[:2], ondisk_data[1:]):
frags = [frag for node, frag in frag_selection]
self.assertEqual(exp_body, policy.pyeclib_driver.decode(frags))
def _test_copy_encrypted_to_encrypted(
self, src_policy_name, dest_policy_name):
self._create_container(self.proxy_app, policy_name=src_policy_name)
self._put_object(self.crypto_app, self.plaintext)
self._post_object(self.crypto_app)
copy_crypto_app = copy.ServerSideCopyMiddleware(self.crypto_app, {})
dest_container = uuid.uuid4().hex
dest_container_path = 'http://localhost:8080/v1/a/' + dest_container
self._create_container(copy_crypto_app, policy_name=dest_policy_name,
container_path=dest_container_path)
dest_obj_path = dest_container_path + '/o'
dest = '/%s/%s' % (dest_container, 'o')
self._copy_object(copy_crypto_app, dest)
self._check_GET_and_HEAD(copy_crypto_app, object_path=dest_obj_path)
self._check_listing(
copy_crypto_app, container_path=dest_container_path)
self._check_match_requests(
'GET', copy_crypto_app, object_path=dest_obj_path)
self._check_match_requests(
'HEAD', copy_crypto_app, object_path=dest_obj_path)
def test_copy_encrypted_to_encrypted(self):
self._test_copy_encrypted_to_encrypted('ec', 'ec')
self._test_copy_encrypted_to_encrypted('one', 'ec')
self._test_copy_encrypted_to_encrypted('ec', 'one')
self._test_copy_encrypted_to_encrypted('one', 'one')
def _test_copy_encrypted_to_unencrypted(
self, src_policy_name, dest_policy_name):
self._create_container(self.proxy_app, policy_name=src_policy_name)
self._put_object(self.crypto_app, self.plaintext)
self._post_object(self.crypto_app)
# make a pipeline with encryption disabled, use it to copy object
self._setup_crypto_app(disable_encryption=True)
copy_app = copy.ServerSideCopyMiddleware(self.crypto_app, {})
dest_container = uuid.uuid4().hex
dest_container_path = 'http://localhost:8080/v1/a/' + dest_container
self._create_container(self.crypto_app, policy_name=dest_policy_name,
container_path=dest_container_path)
dest_obj_path = dest_container_path + '/o'
dest = '/%s/%s' % (dest_container, 'o')
self._copy_object(copy_app, dest)
self._check_GET_and_HEAD(copy_app, object_path=dest_obj_path)
self._check_GET_and_HEAD(self.proxy_app, object_path=dest_obj_path)
self._check_listing(copy_app, container_path=dest_container_path)
self._check_listing(self.proxy_app, container_path=dest_container_path)
self._check_match_requests(
'GET', self.proxy_app, object_path=dest_obj_path)
self._check_match_requests(
'HEAD', self.proxy_app, object_path=dest_obj_path)
def test_copy_encrypted_to_unencrypted(self):
self._test_copy_encrypted_to_unencrypted('ec', 'ec')
self._test_copy_encrypted_to_unencrypted('one', 'ec')
self._test_copy_encrypted_to_unencrypted('ec', 'one')
self._test_copy_encrypted_to_unencrypted('one', 'one')
def _test_copy_unencrypted_to_encrypted(
self, src_policy_name, dest_policy_name):
self._create_container(self.proxy_app, policy_name=src_policy_name)
self._put_object(self.proxy_app, self.plaintext)
self._post_object(self.proxy_app)
copy_crypto_app = copy.ServerSideCopyMiddleware(self.crypto_app, {})
dest_container = uuid.uuid4().hex
dest_container_path = 'http://localhost:8080/v1/a/' + dest_container
self._create_container(copy_crypto_app, policy_name=dest_policy_name,
container_path=dest_container_path)
dest_obj_path = dest_container_path + '/o'
dest = '/%s/%s' % (dest_container, 'o')
self._copy_object(copy_crypto_app, dest)
self._check_GET_and_HEAD(copy_crypto_app, object_path=dest_obj_path)
self._check_listing(
copy_crypto_app, container_path=dest_container_path)
self._check_match_requests(
'GET', copy_crypto_app, object_path=dest_obj_path)
self._check_match_requests(
'HEAD', copy_crypto_app, object_path=dest_obj_path)
def test_copy_unencrypted_to_encrypted(self):
self._test_copy_unencrypted_to_encrypted('ec', 'ec')
self._test_copy_unencrypted_to_encrypted('one', 'ec')
self._test_copy_unencrypted_to_encrypted('ec', 'one')
self._test_copy_unencrypted_to_encrypted('one', 'one')
def test_crypto_max_length_path(self):
# the path is stashed in the key_id in crypto meta; check that a long
# path is ok
self.container_name = 'c' * constraints.MAX_CONTAINER_NAME_LENGTH
self.object_name = 'o' * constraints.MAX_OBJECT_NAME_LENGTH
self.container_path = 'http://foo:8080/v1/a/' + self.container_name
self.object_path = '%s/%s' % (self.container_path, self.object_name)
self._create_container(self.proxy_app, policy_name='one',
container_path=self.container_path)
self._put_object(self.crypto_app, self.plaintext)
self._post_object(self.crypto_app)
self._check_GET_and_HEAD(self.crypto_app)
self._check_match_requests('GET', self.crypto_app)
self._check_match_requests('HEAD', self.crypto_app)
self._check_listing(self.crypto_app)
def test_crypto_UTF8_path(self):
# check that UTF8 path is ok
self.container_name = self.object_name = u'\u010brypto'
self.container_path = 'http://foo:8080/v1/a/' + self.container_name
self.object_path = '%s/%s' % (self.container_path, self.object_name)
self._create_container(self.proxy_app, policy_name='one',
container_path=self.container_path)
self._put_object(self.crypto_app, self.plaintext)
self._post_object(self.crypto_app)
self._check_GET_and_HEAD(self.crypto_app)
self._check_match_requests('GET', self.crypto_app)
self._check_match_requests('HEAD', self.crypto_app)
self._check_listing(self.crypto_app)
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/middleware/crypto/test_encryption.py |
# Copyright (c) 2010-2023 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for swift.common.utils.libc"""
import ctypes
import os
import platform
import tempfile
import unittest
import mock
from swift.common.utils import libc
from test.debug_logger import debug_logger
class Test_LibcWrapper(unittest.TestCase):
def test_available_function(self):
# This should pretty much always exist
getpid_wrapper = libc._LibcWrapper('getpid')
self.assertTrue(getpid_wrapper.available)
self.assertEqual(getpid_wrapper(), os.getpid())
def test_unavailable_function(self):
# This won't exist
no_func_wrapper = libc._LibcWrapper('diffractively_protectorship')
self.assertFalse(no_func_wrapper.available)
self.assertRaises(NotImplementedError, no_func_wrapper)
def test_argument_plumbing(self):
lseek_wrapper = libc._LibcWrapper('lseek')
with tempfile.TemporaryFile() as tf:
tf.write(b"abcdefgh")
tf.flush()
lseek_wrapper(tf.fileno(),
ctypes.c_uint64(3),
# 0 is SEEK_SET
0)
self.assertEqual(tf.read(100), b"defgh")
class TestModifyPriority(unittest.TestCase):
def test_modify_priority(self):
pid = os.getpid()
logger = debug_logger()
called = {}
def _fake_setpriority(*args):
called['setpriority'] = args
def _fake_syscall(*args):
called['syscall'] = args
# Test if current architecture supports changing of priority
try:
libc.NR_ioprio_set()
except OSError as e:
raise unittest.SkipTest(e)
with mock.patch('swift.common.utils.libc._libc_setpriority',
_fake_setpriority), \
mock.patch('swift.common.utils.libc._posix_syscall',
_fake_syscall):
called = {}
# not set / default
libc.modify_priority({}, logger)
self.assertEqual(called, {})
called = {}
# just nice
libc.modify_priority({'nice_priority': '1'}, logger)
self.assertEqual(called, {'setpriority': (0, pid, 1)})
called = {}
# just ionice class uses default priority 0
libc.modify_priority({'ionice_class': 'IOPRIO_CLASS_RT'}, logger)
architecture = os.uname()[4]
arch_bits = platform.architecture()[0]
if architecture == 'x86_64' and arch_bits == '64bit':
self.assertEqual(called, {'syscall': (251, 1, pid, 1 << 13)})
elif architecture == 'aarch64' and arch_bits == '64bit':
self.assertEqual(called, {'syscall': (30, 1, pid, 1 << 13)})
else:
self.fail("Unexpected call: %r" % called)
called = {}
# just ionice priority is ignored
libc.modify_priority({'ionice_priority': '4'}, logger)
self.assertEqual(called, {})
called = {}
# bad ionice class
libc.modify_priority({'ionice_class': 'class_foo'}, logger)
self.assertEqual(called, {})
called = {}
# ionice class & priority
libc.modify_priority({
'ionice_class': 'IOPRIO_CLASS_BE',
'ionice_priority': '4',
}, logger)
if architecture == 'x86_64' and arch_bits == '64bit':
self.assertEqual(called, {
'syscall': (251, 1, pid, 2 << 13 | 4)
})
elif architecture == 'aarch64' and arch_bits == '64bit':
self.assertEqual(called, {
'syscall': (30, 1, pid, 2 << 13 | 4)
})
else:
self.fail("Unexpected call: %r" % called)
called = {}
# all
libc.modify_priority({
'nice_priority': '-15',
'ionice_class': 'IOPRIO_CLASS_IDLE',
'ionice_priority': '6',
}, logger)
if architecture == 'x86_64' and arch_bits == '64bit':
self.assertEqual(called, {
'setpriority': (0, pid, -15),
'syscall': (251, 1, pid, 3 << 13 | 6),
})
elif architecture == 'aarch64' and arch_bits == '64bit':
self.assertEqual(called, {
'setpriority': (0, pid, -15),
'syscall': (30, 1, pid, 3 << 13 | 6),
})
else:
self.fail("Unexpected call: %r" % called)
def test__NR_ioprio_set(self):
with mock.patch('os.uname', return_value=('', '', '', '', 'x86_64')), \
mock.patch('platform.architecture',
return_value=('64bit', '')):
self.assertEqual(251, libc.NR_ioprio_set())
with mock.patch('os.uname', return_value=('', '', '', '', 'x86_64')), \
mock.patch('platform.architecture',
return_value=('32bit', '')):
self.assertRaises(OSError, libc.NR_ioprio_set)
with mock.patch('os.uname',
return_value=('', '', '', '', 'aarch64')), \
mock.patch('platform.architecture',
return_value=('64bit', '')):
self.assertEqual(30, libc.NR_ioprio_set())
with mock.patch('os.uname',
return_value=('', '', '', '', 'aarch64')), \
mock.patch('platform.architecture',
return_value=('32bit', '')):
self.assertRaises(OSError, libc.NR_ioprio_set)
with mock.patch('os.uname', return_value=('', '', '', '', 'alpha')), \
mock.patch('platform.architecture',
return_value=('64bit', '')):
self.assertRaises(OSError, libc.NR_ioprio_set)
| swift-master | test/unit/common/utils/test_libc.py |
# Copyright (c) 2010-2023 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for swift.common.utils.timestamp"""
import random
import time
import unittest
import mock
from swift.common.utils import timestamp
class TestTimestamp(unittest.TestCase):
"""Tests for swift.common.utils.timestamp.Timestamp"""
def test_invalid_input(self):
with self.assertRaises(ValueError):
timestamp.Timestamp(time.time(), offset=-1)
with self.assertRaises(ValueError):
timestamp.Timestamp('123.456_78_90')
def test_invalid_string_conversion(self):
t = timestamp.Timestamp.now()
self.assertRaises(TypeError, str, t)
def test_offset_limit(self):
t = 1417462430.78693
# can't have a offset above MAX_OFFSET
with self.assertRaises(ValueError):
timestamp.Timestamp(t, offset=timestamp.MAX_OFFSET + 1)
# exactly max offset is fine
ts = timestamp.Timestamp(t, offset=timestamp.MAX_OFFSET)
self.assertEqual(ts.internal, '1417462430.78693_ffffffffffffffff')
# but you can't offset it further
with self.assertRaises(ValueError):
timestamp.Timestamp(ts.internal, offset=1)
# unless you start below it
ts = timestamp.Timestamp(t, offset=timestamp.MAX_OFFSET - 1)
self.assertEqual(timestamp.Timestamp(ts.internal, offset=1),
'1417462430.78693_ffffffffffffffff')
def test_normal_format_no_offset(self):
expected = '1402436408.91203'
test_values = (
'1402436408.91203',
'1402436408.91203_00000000',
'1402436408.912030000',
'1402436408.912030000_0000000000000',
'000001402436408.912030000',
'000001402436408.912030000_0000000000',
1402436408.91203,
1402436408.912029,
1402436408.9120300000000000,
1402436408.91202999999999999,
timestamp.Timestamp(1402436408.91203),
timestamp.Timestamp(1402436408.91203, offset=0),
timestamp.Timestamp(1402436408.912029),
timestamp.Timestamp(1402436408.912029, offset=0),
timestamp.Timestamp('1402436408.91203'),
timestamp.Timestamp('1402436408.91203', offset=0),
timestamp.Timestamp('1402436408.91203_00000000'),
timestamp.Timestamp('1402436408.91203_00000000', offset=0),
)
for value in test_values:
ts = timestamp.Timestamp(value)
self.assertEqual(ts.normal, expected)
# timestamp instance can also compare to string or float
self.assertEqual(ts, expected)
self.assertEqual(ts, float(expected))
self.assertEqual(ts, timestamp.normalize_timestamp(expected))
def test_isoformat(self):
expected = '2014-06-10T22:47:32.054580'
test_values = (
'1402440452.05458',
'1402440452.054579',
'1402440452.05458_00000000',
'1402440452.054579_00000000',
'1402440452.054580000',
'1402440452.054579999',
'1402440452.054580000_0000000000000',
'1402440452.054579999_0000ff00',
'000001402440452.054580000',
'000001402440452.0545799',
'000001402440452.054580000_0000000000',
'000001402440452.054579999999_00000fffff',
1402440452.05458,
1402440452.054579,
1402440452.0545800000000000,
1402440452.054579999,
timestamp.Timestamp(1402440452.05458),
timestamp.Timestamp(1402440452.0545799),
timestamp.Timestamp(1402440452.05458, offset=0),
timestamp.Timestamp(1402440452.05457999999, offset=0),
timestamp.Timestamp(1402440452.05458, offset=100),
timestamp.Timestamp(1402440452.054579, offset=100),
timestamp.Timestamp('1402440452.05458'),
timestamp.Timestamp('1402440452.054579999'),
timestamp.Timestamp('1402440452.05458', offset=0),
timestamp.Timestamp('1402440452.054579', offset=0),
timestamp.Timestamp('1402440452.05458', offset=300),
timestamp.Timestamp('1402440452.05457999', offset=300),
timestamp.Timestamp('1402440452.05458_00000000'),
timestamp.Timestamp('1402440452.05457999_00000000'),
timestamp.Timestamp('1402440452.05458_00000000', offset=0),
timestamp.Timestamp('1402440452.05457999_00000aaa', offset=0),
timestamp.Timestamp('1402440452.05458_00000000', offset=400),
timestamp.Timestamp('1402440452.054579_0a', offset=400),
)
for value in test_values:
self.assertEqual(timestamp.Timestamp(value).isoformat, expected)
expected = '1970-01-01T00:00:00.000000'
test_values = (
'0',
'0000000000.00000',
'0000000000.00000_ffffffffffff',
0,
0.0,
)
for value in test_values:
self.assertEqual(timestamp.Timestamp(value).isoformat, expected)
def test_from_isoformat(self):
ts = timestamp.Timestamp.from_isoformat('2014-06-10T22:47:32.054580')
self.assertIsInstance(ts, timestamp.Timestamp)
self.assertEqual(1402440452.05458, float(ts))
self.assertEqual('2014-06-10T22:47:32.054580', ts.isoformat)
ts = timestamp.Timestamp.from_isoformat('1970-01-01T00:00:00.000000')
self.assertIsInstance(ts, timestamp.Timestamp)
self.assertEqual(0.0, float(ts))
self.assertEqual('1970-01-01T00:00:00.000000', ts.isoformat)
ts = timestamp.Timestamp(1402440452.05458)
self.assertIsInstance(ts, timestamp.Timestamp)
self.assertEqual(ts, timestamp.Timestamp.from_isoformat(ts.isoformat))
def test_ceil(self):
self.assertEqual(0.0, timestamp.Timestamp(0).ceil())
self.assertEqual(1.0, timestamp.Timestamp(0.00001).ceil())
self.assertEqual(1.0, timestamp.Timestamp(0.000001).ceil())
self.assertEqual(12345678.0, timestamp.Timestamp(12345678.0).ceil())
self.assertEqual(12345679.0,
timestamp.Timestamp(12345678.000001).ceil())
def test_not_equal(self):
ts = '1402436408.91203_0000000000000001'
test_values = (
timestamp.Timestamp('1402436408.91203_0000000000000002'),
timestamp.Timestamp('1402436408.91203'),
timestamp.Timestamp(1402436408.91203),
timestamp.Timestamp(1402436408.91204),
timestamp.Timestamp(1402436408.91203, offset=0),
timestamp.Timestamp(1402436408.91203, offset=2),
)
for value in test_values:
self.assertTrue(value != ts)
self.assertIs(True, timestamp.Timestamp(ts) == ts) # sanity
self.assertIs(False,
timestamp.Timestamp(ts) != timestamp.Timestamp(ts))
self.assertIs(False, timestamp.Timestamp(ts) != ts)
self.assertIs(False, timestamp.Timestamp(ts) is None)
self.assertIs(True, timestamp.Timestamp(ts) is not None)
def test_no_force_internal_no_offset(self):
"""Test that internal is the same as normal with no offset"""
with mock.patch('swift.common.utils.timestamp.FORCE_INTERNAL',
new=False):
self.assertEqual(timestamp.Timestamp(0).internal,
'0000000000.00000')
self.assertEqual(timestamp.Timestamp(1402437380.58186).internal,
'1402437380.58186')
self.assertEqual(timestamp.Timestamp(1402437380.581859).internal,
'1402437380.58186')
self.assertEqual(timestamp.Timestamp(0).internal,
timestamp.normalize_timestamp(0))
def test_no_force_internal_with_offset(self):
"""Test that internal always includes the offset if significant"""
with mock.patch('swift.common.utils.timestamp.FORCE_INTERNAL',
new=False):
self.assertEqual(timestamp.Timestamp(0, offset=1).internal,
'0000000000.00000_0000000000000001')
self.assertEqual(
timestamp.Timestamp(1402437380.58186, offset=16).internal,
'1402437380.58186_0000000000000010')
self.assertEqual(
timestamp.Timestamp(1402437380.581859, offset=240).internal,
'1402437380.58186_00000000000000f0')
self.assertEqual(
timestamp.Timestamp('1402437380.581859_00000001',
offset=240).internal,
'1402437380.58186_00000000000000f1')
def test_force_internal(self):
"""Test that internal always includes the offset if forced"""
with mock.patch('swift.common.utils.timestamp.FORCE_INTERNAL',
new=True):
self.assertEqual(timestamp.Timestamp(0).internal,
'0000000000.00000_0000000000000000')
self.assertEqual(timestamp.Timestamp(1402437380.58186).internal,
'1402437380.58186_0000000000000000')
self.assertEqual(timestamp.Timestamp(1402437380.581859).internal,
'1402437380.58186_0000000000000000')
self.assertEqual(timestamp.Timestamp(0, offset=1).internal,
'0000000000.00000_0000000000000001')
self.assertEqual(
timestamp.Timestamp(1402437380.58186, offset=16).internal,
'1402437380.58186_0000000000000010')
self.assertEqual(
timestamp.Timestamp(1402437380.581859, offset=16).internal,
'1402437380.58186_0000000000000010')
def test_internal_format_no_offset(self):
expected = '1402436408.91203_0000000000000000'
test_values = (
'1402436408.91203',
'1402436408.91203_00000000',
'1402436408.912030000',
'1402436408.912030000_0000000000000',
'000001402436408.912030000',
'000001402436408.912030000_0000000000',
1402436408.91203,
1402436408.9120300000000000,
1402436408.912029,
1402436408.912029999999999999,
timestamp.Timestamp(1402436408.91203),
timestamp.Timestamp(1402436408.91203, offset=0),
timestamp.Timestamp(1402436408.912029),
timestamp.Timestamp(1402436408.91202999999999999, offset=0),
timestamp.Timestamp('1402436408.91203'),
timestamp.Timestamp('1402436408.91203', offset=0),
timestamp.Timestamp('1402436408.912029'),
timestamp.Timestamp('1402436408.912029', offset=0),
timestamp.Timestamp('1402436408.912029999999999'),
timestamp.Timestamp('1402436408.912029999999999', offset=0),
)
for value in test_values:
# timestamp instance is always equivalent
self.assertEqual(timestamp.Timestamp(value), expected)
if timestamp.FORCE_INTERNAL:
# the FORCE_INTERNAL flag makes the internal format always
# include the offset portion of the timestamp even when it's
# not significant and would be bad during upgrades
self.assertEqual(timestamp.Timestamp(value).internal, expected)
else:
# unless we FORCE_INTERNAL, when there's no offset the
# internal format is equivalent to the normalized format
self.assertEqual(timestamp.Timestamp(value).internal,
'1402436408.91203')
def test_internal_format_with_offset(self):
expected = '1402436408.91203_00000000000000f0'
test_values = (
'1402436408.91203_000000f0',
u'1402436408.91203_000000f0',
b'1402436408.91203_000000f0',
'1402436408.912030000_0000000000f0',
'1402436408.912029_000000f0',
'1402436408.91202999999_0000000000f0',
'000001402436408.912030000_000000000f0',
'000001402436408.9120299999_000000000f0',
timestamp.Timestamp(1402436408.91203, offset=240),
timestamp.Timestamp(1402436408.912029, offset=240),
timestamp.Timestamp('1402436408.91203', offset=240),
timestamp.Timestamp('1402436408.91203_00000000', offset=240),
timestamp.Timestamp('1402436408.91203_0000000f', offset=225),
timestamp.Timestamp('1402436408.9120299999', offset=240),
timestamp.Timestamp('1402436408.9120299999_00000000', offset=240),
timestamp.Timestamp('1402436408.9120299999_00000010', offset=224),
)
for value in test_values:
ts = timestamp.Timestamp(value)
self.assertEqual(ts.internal, expected)
# can compare with offset if the string is internalized
self.assertEqual(ts, expected)
# if comparison value only includes the normalized portion and the
# timestamp includes an offset, it is considered greater
normal = timestamp.Timestamp(expected).normal
self.assertTrue(ts > normal,
'%r is not bigger than %r given %r' % (
ts, normal, value))
self.assertTrue(ts > float(normal),
'%r is not bigger than %f given %r' % (
ts, float(normal), value))
def test_short_format_with_offset(self):
expected = '1402436408.91203_f0'
ts = timestamp.Timestamp(1402436408.91203, 0xf0)
self.assertEqual(expected, ts.short)
expected = '1402436408.91203'
ts = timestamp.Timestamp(1402436408.91203)
self.assertEqual(expected, ts.short)
def test_raw(self):
expected = 140243640891203
ts = timestamp.Timestamp(1402436408.91203)
self.assertEqual(expected, ts.raw)
# 'raw' does not include offset
ts = timestamp.Timestamp(1402436408.91203, 0xf0)
self.assertEqual(expected, ts.raw)
def test_delta(self):
def _assertWithinBounds(expected, timestamp):
tolerance = 0.00001
minimum = expected - tolerance
maximum = expected + tolerance
self.assertTrue(float(timestamp) > minimum)
self.assertTrue(float(timestamp) < maximum)
ts = timestamp.Timestamp(1402436408.91203, delta=100)
_assertWithinBounds(1402436408.91303, ts)
self.assertEqual(140243640891303, ts.raw)
ts = timestamp.Timestamp(1402436408.91203, delta=-100)
_assertWithinBounds(1402436408.91103, ts)
self.assertEqual(140243640891103, ts.raw)
ts = timestamp.Timestamp(1402436408.91203, delta=0)
_assertWithinBounds(1402436408.91203, ts)
self.assertEqual(140243640891203, ts.raw)
# delta is independent of offset
ts = timestamp.Timestamp(1402436408.91203, offset=42, delta=100)
self.assertEqual(140243640891303, ts.raw)
self.assertEqual(42, ts.offset)
# cannot go negative
self.assertRaises(ValueError, timestamp.Timestamp, 1402436408.91203,
delta=-140243640891203)
def test_int(self):
expected = 1402437965
test_values = (
'1402437965.91203',
'1402437965.91203_00000000',
'1402437965.912030000',
'1402437965.912030000_0000000000000',
'000001402437965.912030000',
'000001402437965.912030000_0000000000',
1402437965.91203,
1402437965.9120300000000000,
1402437965.912029,
1402437965.912029999999999999,
timestamp.Timestamp(1402437965.91203),
timestamp.Timestamp(1402437965.91203, offset=0),
timestamp.Timestamp(1402437965.91203, offset=500),
timestamp.Timestamp(1402437965.912029),
timestamp.Timestamp(1402437965.91202999999999999, offset=0),
timestamp.Timestamp(1402437965.91202999999999999, offset=300),
timestamp.Timestamp('1402437965.91203'),
timestamp.Timestamp('1402437965.91203', offset=0),
timestamp.Timestamp('1402437965.91203', offset=400),
timestamp.Timestamp('1402437965.912029'),
timestamp.Timestamp('1402437965.912029', offset=0),
timestamp.Timestamp('1402437965.912029', offset=200),
timestamp.Timestamp('1402437965.912029999999999'),
timestamp.Timestamp('1402437965.912029999999999', offset=0),
timestamp.Timestamp('1402437965.912029999999999', offset=100),
)
for value in test_values:
ts = timestamp.Timestamp(value)
self.assertEqual(int(ts), expected)
self.assertTrue(ts > expected)
def test_float(self):
expected = 1402438115.91203
test_values = (
'1402438115.91203',
'1402438115.91203_00000000',
'1402438115.912030000',
'1402438115.912030000_0000000000000',
'000001402438115.912030000',
'000001402438115.912030000_0000000000',
1402438115.91203,
1402438115.9120300000000000,
1402438115.912029,
1402438115.912029999999999999,
timestamp.Timestamp(1402438115.91203),
timestamp.Timestamp(1402438115.91203, offset=0),
timestamp.Timestamp(1402438115.91203, offset=500),
timestamp.Timestamp(1402438115.912029),
timestamp.Timestamp(1402438115.91202999999999999, offset=0),
timestamp.Timestamp(1402438115.91202999999999999, offset=300),
timestamp.Timestamp('1402438115.91203'),
timestamp.Timestamp('1402438115.91203', offset=0),
timestamp.Timestamp('1402438115.91203', offset=400),
timestamp.Timestamp('1402438115.912029'),
timestamp.Timestamp('1402438115.912029', offset=0),
timestamp.Timestamp('1402438115.912029', offset=200),
timestamp.Timestamp('1402438115.912029999999999'),
timestamp.Timestamp('1402438115.912029999999999', offset=0),
timestamp.Timestamp('1402438115.912029999999999', offset=100),
)
tolerance = 0.00001
minimum = expected - tolerance
maximum = expected + tolerance
for value in test_values:
ts = timestamp.Timestamp(value)
self.assertTrue(float(ts) > minimum,
'%f is not bigger than %f given %r' % (
ts, minimum, value))
self.assertTrue(float(ts) < maximum,
'%f is not smaller than %f given %r' % (
ts, maximum, value))
# direct comparison of timestamp works too
self.assertTrue(ts > minimum,
'%s is not bigger than %f given %r' % (
ts.normal, minimum, value))
self.assertTrue(ts < maximum,
'%s is not smaller than %f given %r' % (
ts.normal, maximum, value))
# ... even against strings
self.assertTrue(ts > '%f' % minimum,
'%s is not bigger than %s given %r' % (
ts.normal, minimum, value))
self.assertTrue(ts < '%f' % maximum,
'%s is not smaller than %s given %r' % (
ts.normal, maximum, value))
def test_false(self):
self.assertFalse(timestamp.Timestamp(0))
self.assertFalse(timestamp.Timestamp(0, offset=0))
self.assertFalse(timestamp.Timestamp('0'))
self.assertFalse(timestamp.Timestamp('0', offset=0))
self.assertFalse(timestamp.Timestamp(0.0))
self.assertFalse(timestamp.Timestamp(0.0, offset=0))
self.assertFalse(timestamp.Timestamp('0.0'))
self.assertFalse(timestamp.Timestamp('0.0', offset=0))
self.assertFalse(timestamp.Timestamp(00000000.00000000))
self.assertFalse(timestamp.Timestamp(00000000.00000000, offset=0))
self.assertFalse(timestamp.Timestamp('00000000.00000000'))
self.assertFalse(timestamp.Timestamp('00000000.00000000', offset=0))
def test_true(self):
self.assertTrue(timestamp.Timestamp(1))
self.assertTrue(timestamp.Timestamp(1, offset=1))
self.assertTrue(timestamp.Timestamp(0, offset=1))
self.assertTrue(timestamp.Timestamp('1'))
self.assertTrue(timestamp.Timestamp('1', offset=1))
self.assertTrue(timestamp.Timestamp('0', offset=1))
self.assertTrue(timestamp.Timestamp(1.1))
self.assertTrue(timestamp.Timestamp(1.1, offset=1))
self.assertTrue(timestamp.Timestamp(0.0, offset=1))
self.assertTrue(timestamp.Timestamp('1.1'))
self.assertTrue(timestamp.Timestamp('1.1', offset=1))
self.assertTrue(timestamp.Timestamp('0.0', offset=1))
self.assertTrue(timestamp.Timestamp(11111111.11111111))
self.assertTrue(timestamp.Timestamp(11111111.11111111, offset=1))
self.assertTrue(timestamp.Timestamp(00000000.00000000, offset=1))
self.assertTrue(timestamp.Timestamp('11111111.11111111'))
self.assertTrue(timestamp.Timestamp('11111111.11111111', offset=1))
self.assertTrue(timestamp.Timestamp('00000000.00000000', offset=1))
def test_greater_no_offset(self):
now = time.time()
older = now - 1
ts = timestamp.Timestamp(now)
test_values = (
0, '0', 0.0, '0.0', '0000.0000', '000.000_000',
1, '1', 1.1, '1.1', '1111.1111', '111.111_111',
1402443112.213252, '1402443112.213252', '1402443112.213252_ffff',
older, '%f' % older, '%f_0000ffff' % older,
)
for value in test_values:
other = timestamp.Timestamp(value)
self.assertNotEqual(ts, other) # sanity
self.assertTrue(ts > value,
'%r is not greater than %r given %r' % (
ts, value, value))
self.assertTrue(ts > other,
'%r is not greater than %r given %r' % (
ts, other, value))
self.assertTrue(ts > other.normal,
'%r is not greater than %r given %r' % (
ts, other.normal, value))
self.assertTrue(ts > other.internal,
'%r is not greater than %r given %r' % (
ts, other.internal, value))
self.assertTrue(ts > float(other),
'%r is not greater than %r given %r' % (
ts, float(other), value))
self.assertTrue(ts > int(other),
'%r is not greater than %r given %r' % (
ts, int(other), value))
def _test_greater_with_offset(self, now, test_values):
for offset in range(1, 1000, 100):
ts = timestamp.Timestamp(now, offset=offset)
for value in test_values:
other = timestamp.Timestamp(value)
self.assertNotEqual(ts, other) # sanity
self.assertTrue(ts > value,
'%r is not greater than %r given %r' % (
ts, value, value))
self.assertTrue(ts > other,
'%r is not greater than %r given %r' % (
ts, other, value))
self.assertTrue(ts > other.normal,
'%r is not greater than %r given %r' % (
ts, other.normal, value))
self.assertTrue(ts > other.internal,
'%r is not greater than %r given %r' % (
ts, other.internal, value))
self.assertTrue(ts > float(other),
'%r is not greater than %r given %r' % (
ts, float(other), value))
self.assertTrue(ts > int(other),
'%r is not greater than %r given %r' % (
ts, int(other), value))
def test_greater_with_offset(self):
# Part 1: use the natural time of the Python. This is deliciously
# unpredictable, but completely legitimate and realistic. Finds bugs!
now = time.time()
older = now - 1
test_values = (
0, '0', 0.0, '0.0', '0000.0000', '000.000_000',
1, '1', 1.1, '1.1', '1111.1111', '111.111_111',
1402443346.935174, '1402443346.93517', '1402443346.935169_ffff',
older, now,
)
self._test_greater_with_offset(now, test_values)
# Part 2: Same as above, but with fixed time values that reproduce
# specific corner cases.
now = 1519830570.6949348
older = now - 1
test_values = (
0, '0', 0.0, '0.0', '0000.0000', '000.000_000',
1, '1', 1.1, '1.1', '1111.1111', '111.111_111',
1402443346.935174, '1402443346.93517', '1402443346.935169_ffff',
older, now,
)
self._test_greater_with_offset(now, test_values)
# Part 3: The '%f' problem. Timestamps cannot be converted to %f
# strings, then back to timestamps, then compared with originals.
# You can only "import" a floating point representation once.
now = 1519830570.6949348
now = float('%f' % now)
older = now - 1
test_values = (
0, '0', 0.0, '0.0', '0000.0000', '000.000_000',
1, '1', 1.1, '1.1', '1111.1111', '111.111_111',
older, '%f' % older, '%f_0000ffff' % older,
now, '%f' % now, '%s_00000000' % now,
)
self._test_greater_with_offset(now, test_values)
def test_smaller_no_offset(self):
now = time.time()
newer = now + 1
ts = timestamp.Timestamp(now)
test_values = (
9999999999.99999, '9999999999.99999', '9999999999.99999_ffff',
newer, '%f' % newer, '%f_0000ffff' % newer,
)
for value in test_values:
other = timestamp.Timestamp(value)
self.assertNotEqual(ts, other) # sanity
self.assertTrue(ts < value,
'%r is not smaller than %r given %r' % (
ts, value, value))
self.assertTrue(ts < other,
'%r is not smaller than %r given %r' % (
ts, other, value))
self.assertTrue(ts < other.normal,
'%r is not smaller than %r given %r' % (
ts, other.normal, value))
self.assertTrue(ts < other.internal,
'%r is not smaller than %r given %r' % (
ts, other.internal, value))
self.assertTrue(ts < float(other),
'%r is not smaller than %r given %r' % (
ts, float(other), value))
self.assertTrue(ts < int(other),
'%r is not smaller than %r given %r' % (
ts, int(other), value))
def test_smaller_with_offset(self):
now = time.time()
newer = now + 1
test_values = (
9999999999.99999, '9999999999.99999', '9999999999.99999_ffff',
newer, '%f' % newer, '%f_0000ffff' % newer,
)
for offset in range(1, 1000, 100):
ts = timestamp.Timestamp(now, offset=offset)
for value in test_values:
other = timestamp.Timestamp(value)
self.assertNotEqual(ts, other) # sanity
self.assertTrue(ts < value,
'%r is not smaller than %r given %r' % (
ts, value, value))
self.assertTrue(ts < other,
'%r is not smaller than %r given %r' % (
ts, other, value))
self.assertTrue(ts < other.normal,
'%r is not smaller than %r given %r' % (
ts, other.normal, value))
self.assertTrue(ts < other.internal,
'%r is not smaller than %r given %r' % (
ts, other.internal, value))
self.assertTrue(ts < float(other),
'%r is not smaller than %r given %r' % (
ts, float(other), value))
self.assertTrue(ts < int(other),
'%r is not smaller than %r given %r' % (
ts, int(other), value))
def test_cmp_with_none(self):
self.assertGreater(timestamp.Timestamp(0), None)
self.assertGreater(timestamp.Timestamp(1.0), None)
self.assertGreater(timestamp.Timestamp(1.0, 42), None)
def test_ordering(self):
given = [
'1402444820.62590_000000000000000a',
'1402444820.62589_0000000000000001',
'1402444821.52589_0000000000000004',
'1402444920.62589_0000000000000004',
'1402444821.62589_000000000000000a',
'1402444821.72589_000000000000000a',
'1402444920.62589_0000000000000002',
'1402444820.62589_0000000000000002',
'1402444820.62589_000000000000000a',
'1402444820.62590_0000000000000004',
'1402444920.62589_000000000000000a',
'1402444820.62590_0000000000000002',
'1402444821.52589_0000000000000002',
'1402444821.52589_0000000000000000',
'1402444920.62589',
'1402444821.62589_0000000000000004',
'1402444821.72589_0000000000000001',
'1402444820.62590',
'1402444820.62590_0000000000000001',
'1402444820.62589_0000000000000004',
'1402444821.72589_0000000000000000',
'1402444821.52589_000000000000000a',
'1402444821.72589_0000000000000004',
'1402444821.62589',
'1402444821.52589_0000000000000001',
'1402444821.62589_0000000000000001',
'1402444821.62589_0000000000000002',
'1402444821.72589_0000000000000002',
'1402444820.62589',
'1402444920.62589_0000000000000001']
expected = [
'1402444820.62589',
'1402444820.62589_0000000000000001',
'1402444820.62589_0000000000000002',
'1402444820.62589_0000000000000004',
'1402444820.62589_000000000000000a',
'1402444820.62590',
'1402444820.62590_0000000000000001',
'1402444820.62590_0000000000000002',
'1402444820.62590_0000000000000004',
'1402444820.62590_000000000000000a',
'1402444821.52589',
'1402444821.52589_0000000000000001',
'1402444821.52589_0000000000000002',
'1402444821.52589_0000000000000004',
'1402444821.52589_000000000000000a',
'1402444821.62589',
'1402444821.62589_0000000000000001',
'1402444821.62589_0000000000000002',
'1402444821.62589_0000000000000004',
'1402444821.62589_000000000000000a',
'1402444821.72589',
'1402444821.72589_0000000000000001',
'1402444821.72589_0000000000000002',
'1402444821.72589_0000000000000004',
'1402444821.72589_000000000000000a',
'1402444920.62589',
'1402444920.62589_0000000000000001',
'1402444920.62589_0000000000000002',
'1402444920.62589_0000000000000004',
'1402444920.62589_000000000000000a',
]
# less visual version
"""
now = time.time()
given = [
timestamp.Timestamp(now + i, offset=offset).internal
for i in (0, 0.00001, 0.9, 1.0, 1.1, 100.0)
for offset in (0, 1, 2, 4, 10)
]
expected = [t for t in given]
random.shuffle(given)
"""
self.assertEqual(len(given), len(expected)) # sanity
timestamps = [timestamp.Timestamp(t) for t in given]
# our expected values don't include insignificant offsets
with mock.patch('swift.common.utils.timestamp.FORCE_INTERNAL',
new=False):
self.assertEqual(
[t.internal for t in sorted(timestamps)], expected)
# string sorting works as well
self.assertEqual(
sorted([t.internal for t in timestamps]), expected)
def test_hashable(self):
ts_0 = timestamp.Timestamp('1402444821.72589')
ts_0_also = timestamp.Timestamp('1402444821.72589')
self.assertEqual(ts_0, ts_0_also) # sanity
self.assertEqual(hash(ts_0), hash(ts_0_also))
d = {ts_0: 'whatever'}
self.assertIn(ts_0, d) # sanity
self.assertIn(ts_0_also, d)
def test_out_of_range_comparisons(self):
now = timestamp.Timestamp.now()
def check_is_later(val):
self.assertTrue(now != val)
self.assertFalse(now == val)
self.assertTrue(now <= val)
self.assertTrue(now < val)
self.assertTrue(val > now)
self.assertTrue(val >= now)
check_is_later(1e30)
check_is_later(1579753284000) # someone gave us ms instead of s!
check_is_later('1579753284000')
check_is_later(b'1e15')
check_is_later(u'1.e+10_f')
def check_is_earlier(val):
self.assertTrue(now != val)
self.assertFalse(now == val)
self.assertTrue(now >= val)
self.assertTrue(now > val)
self.assertTrue(val < now)
self.assertTrue(val <= now)
check_is_earlier(-1)
check_is_earlier(-0.1)
check_is_earlier('-9999999')
check_is_earlier(b'-9999.999')
check_is_earlier(u'-1234_5678')
def test_inversion(self):
ts = timestamp.Timestamp(0)
self.assertIsInstance(~ts, timestamp.Timestamp)
self.assertEqual((~ts).internal, '9999999999.99999')
ts = timestamp.Timestamp(123456.789)
self.assertIsInstance(~ts, timestamp.Timestamp)
self.assertEqual(ts.internal, '0000123456.78900')
self.assertEqual((~ts).internal, '9999876543.21099')
timestamps = sorted(timestamp.Timestamp(random.random() * 1e10)
for _ in range(20))
self.assertEqual([x.internal for x in timestamps],
sorted(x.internal for x in timestamps))
self.assertEqual([(~x).internal for x in reversed(timestamps)],
sorted((~x).internal for x in timestamps))
ts = timestamp.Timestamp.now()
self.assertGreater(~ts, ts) # NB: will break around 2128
ts = timestamp.Timestamp.now(offset=1)
with self.assertRaises(ValueError) as caught:
~ts
self.assertEqual(caught.exception.args[0],
'Cannot invert timestamps with offsets')
class TestTimestampEncoding(unittest.TestCase):
def setUp(self):
t0 = timestamp.Timestamp(0.0)
t1 = timestamp.Timestamp(997.9996)
t2 = timestamp.Timestamp(999)
t3 = timestamp.Timestamp(1000, 24)
t4 = timestamp.Timestamp(1001)
t5 = timestamp.Timestamp(1002.00040)
# encodings that are expected when explicit = False
self.non_explicit_encodings = (
('0000001000.00000_18', (t3, t3, t3)),
('0000001000.00000_18', (t3, t3, None)),
)
# mappings that are expected when explicit = True
self.explicit_encodings = (
('0000001000.00000_18+0+0', (t3, t3, t3)),
('0000001000.00000_18+0', (t3, t3, None)),
)
# mappings that are expected when explicit = True or False
self.encodings = (
('0000001000.00000_18+0+186a0', (t3, t3, t4)),
('0000001000.00000_18+186a0+186c8', (t3, t4, t5)),
('0000001000.00000_18-186a0+0', (t3, t2, t2)),
('0000001000.00000_18+0-186a0', (t3, t3, t2)),
('0000001000.00000_18-186a0-186c8', (t3, t2, t1)),
('0000001000.00000_18', (t3, None, None)),
('0000001000.00000_18+186a0', (t3, t4, None)),
('0000001000.00000_18-186a0', (t3, t2, None)),
('0000001000.00000_18', (t3, None, t1)),
('0000001000.00000_18-5f5e100', (t3, t0, None)),
('0000001000.00000_18+0-5f5e100', (t3, t3, t0)),
('0000001000.00000_18-5f5e100+5f45a60', (t3, t0, t2)),
)
# decodings that are expected when explicit = False
self.non_explicit_decodings = (
('0000001000.00000_18', (t3, t3, t3)),
('0000001000.00000_18+186a0', (t3, t4, t4)),
('0000001000.00000_18-186a0', (t3, t2, t2)),
('0000001000.00000_18+186a0', (t3, t4, t4)),
('0000001000.00000_18-186a0', (t3, t2, t2)),
('0000001000.00000_18-5f5e100', (t3, t0, t0)),
)
# decodings that are expected when explicit = True
self.explicit_decodings = (
('0000001000.00000_18+0+0', (t3, t3, t3)),
('0000001000.00000_18+0', (t3, t3, None)),
('0000001000.00000_18', (t3, None, None)),
('0000001000.00000_18+186a0', (t3, t4, None)),
('0000001000.00000_18-186a0', (t3, t2, None)),
('0000001000.00000_18-5f5e100', (t3, t0, None)),
)
# decodings that are expected when explicit = True or False
self.decodings = (
('0000001000.00000_18+0+186a0', (t3, t3, t4)),
('0000001000.00000_18+186a0+186c8', (t3, t4, t5)),
('0000001000.00000_18-186a0+0', (t3, t2, t2)),
('0000001000.00000_18+0-186a0', (t3, t3, t2)),
('0000001000.00000_18-186a0-186c8', (t3, t2, t1)),
('0000001000.00000_18-5f5e100+5f45a60', (t3, t0, t2)),
)
def _assertEqual(self, expected, actual, test):
self.assertEqual(expected, actual,
'Got %s but expected %s for parameters %s'
% (actual, expected, test))
def test_encoding(self):
for test in self.explicit_encodings:
actual = timestamp.encode_timestamps(test[1][0], test[1][1],
test[1][2], True)
self._assertEqual(test[0], actual, test[1])
for test in self.non_explicit_encodings:
actual = timestamp.encode_timestamps(test[1][0], test[1][1],
test[1][2], False)
self._assertEqual(test[0], actual, test[1])
for explicit in (True, False):
for test in self.encodings:
actual = timestamp.encode_timestamps(test[1][0], test[1][1],
test[1][2], explicit)
self._assertEqual(test[0], actual, test[1])
def test_decoding(self):
for test in self.explicit_decodings:
actual = timestamp.decode_timestamps(test[0], True)
self._assertEqual(test[1], actual, test[0])
for test in self.non_explicit_decodings:
actual = timestamp.decode_timestamps(test[0], False)
self._assertEqual(test[1], actual, test[0])
for explicit in (True, False):
for test in self.decodings:
actual = timestamp.decode_timestamps(test[0], explicit)
self._assertEqual(test[1], actual, test[0])
| swift-master | test/unit/common/utils/test_timestamp.py |
swift-master | test/unit/common/utils/__init__.py |
|
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ctypes
from mock import patch
import socket
import unittest
# Continue importing from utils, as 3rd parties may depend on those imports
from swift.common import utils
from swift.common.utils import ipaddrs as utils_ipaddrs
class TestIsValidIP(unittest.TestCase):
def test_is_valid_ip(self):
self.assertTrue(utils.is_valid_ip("127.0.0.1"))
self.assertTrue(utils.is_valid_ip("10.0.0.1"))
ipv6 = "fe80:0000:0000:0000:0204:61ff:fe9d:f156"
self.assertTrue(utils.is_valid_ip(ipv6))
ipv6 = "fe80:0:0:0:204:61ff:fe9d:f156"
self.assertTrue(utils.is_valid_ip(ipv6))
ipv6 = "fe80::204:61ff:fe9d:f156"
self.assertTrue(utils.is_valid_ip(ipv6))
ipv6 = "fe80:0000:0000:0000:0204:61ff:254.157.241.86"
self.assertTrue(utils.is_valid_ip(ipv6))
ipv6 = "fe80:0:0:0:0204:61ff:254.157.241.86"
self.assertTrue(utils.is_valid_ip(ipv6))
ipv6 = "fe80::204:61ff:254.157.241.86"
self.assertTrue(utils.is_valid_ip(ipv6))
ipv6 = "fe80::"
self.assertTrue(utils.is_valid_ip(ipv6))
ipv6 = "::1"
self.assertTrue(utils.is_valid_ip(ipv6))
not_ipv6 = "3ffe:0b00:0000:0001:0000:0000:000a"
self.assertFalse(utils.is_valid_ip(not_ipv6))
not_ipv6 = "1:2:3:4:5:6::7:8"
self.assertFalse(utils.is_valid_ip(not_ipv6))
def test_is_valid_ipv4(self):
self.assertTrue(utils.is_valid_ipv4("127.0.0.1"))
self.assertTrue(utils.is_valid_ipv4("10.0.0.1"))
ipv6 = "fe80:0000:0000:0000:0204:61ff:fe9d:f156"
self.assertFalse(utils.is_valid_ipv4(ipv6))
ipv6 = "fe80:0:0:0:204:61ff:fe9d:f156"
self.assertFalse(utils.is_valid_ipv4(ipv6))
ipv6 = "fe80::204:61ff:fe9d:f156"
self.assertFalse(utils.is_valid_ipv4(ipv6))
ipv6 = "fe80:0000:0000:0000:0204:61ff:254.157.241.86"
self.assertFalse(utils.is_valid_ipv4(ipv6))
ipv6 = "fe80:0:0:0:0204:61ff:254.157.241.86"
self.assertFalse(utils.is_valid_ipv4(ipv6))
ipv6 = "fe80::204:61ff:254.157.241.86"
self.assertFalse(utils.is_valid_ipv4(ipv6))
ipv6 = "fe80::"
self.assertFalse(utils.is_valid_ipv4(ipv6))
ipv6 = "::1"
self.assertFalse(utils.is_valid_ipv4(ipv6))
not_ipv6 = "3ffe:0b00:0000:0001:0000:0000:000a"
self.assertFalse(utils.is_valid_ipv4(not_ipv6))
not_ipv6 = "1:2:3:4:5:6::7:8"
self.assertFalse(utils.is_valid_ipv4(not_ipv6))
def test_is_valid_ipv6(self):
self.assertFalse(utils.is_valid_ipv6("127.0.0.1"))
self.assertFalse(utils.is_valid_ipv6("10.0.0.1"))
ipv6 = "fe80:0000:0000:0000:0204:61ff:fe9d:f156"
self.assertTrue(utils.is_valid_ipv6(ipv6))
ipv6 = "fe80:0:0:0:204:61ff:fe9d:f156"
self.assertTrue(utils.is_valid_ipv6(ipv6))
ipv6 = "fe80::204:61ff:fe9d:f156"
self.assertTrue(utils.is_valid_ipv6(ipv6))
ipv6 = "fe80:0000:0000:0000:0204:61ff:254.157.241.86"
self.assertTrue(utils.is_valid_ipv6(ipv6))
ipv6 = "fe80:0:0:0:0204:61ff:254.157.241.86"
self.assertTrue(utils.is_valid_ipv6(ipv6))
ipv6 = "fe80::204:61ff:254.157.241.86"
self.assertTrue(utils.is_valid_ipv6(ipv6))
ipv6 = "fe80::"
self.assertTrue(utils.is_valid_ipv6(ipv6))
ipv6 = "::1"
self.assertTrue(utils.is_valid_ipv6(ipv6))
not_ipv6 = "3ffe:0b00:0000:0001:0000:0000:000a"
self.assertFalse(utils.is_valid_ipv6(not_ipv6))
not_ipv6 = "1:2:3:4:5:6::7:8"
self.assertFalse(utils.is_valid_ipv6(not_ipv6))
class TestExpandIPv6(unittest.TestCase):
def test_expand_ipv6(self):
expanded_ipv6 = "fe80::204:61ff:fe9d:f156"
upper_ipv6 = "fe80:0000:0000:0000:0204:61ff:fe9d:f156"
self.assertEqual(expanded_ipv6, utils.expand_ipv6(upper_ipv6))
omit_ipv6 = "fe80:0000:0000::0204:61ff:fe9d:f156"
self.assertEqual(expanded_ipv6, utils.expand_ipv6(omit_ipv6))
less_num_ipv6 = "fe80:0:00:000:0204:61ff:fe9d:f156"
self.assertEqual(expanded_ipv6, utils.expand_ipv6(less_num_ipv6))
class TestWhatAreMyIPs(unittest.TestCase):
def test_whataremyips(self):
myips = utils.whataremyips()
self.assertTrue(len(myips) > 1)
self.assertIn('127.0.0.1', myips)
def test_whataremyips_bind_to_all(self):
for any_addr in ('0.0.0.0', '0000:0000:0000:0000:0000:0000:0000:0000',
'::0', '::0000', '::',
# Wacky parse-error input produces all IPs
'I am a bear'):
myips = utils.whataremyips(any_addr)
self.assertTrue(len(myips) > 1)
self.assertIn('127.0.0.1', myips)
def test_whataremyips_bind_ip_specific(self):
self.assertEqual(['1.2.3.4'], utils.whataremyips('1.2.3.4'))
def test_whataremyips_getifaddrs(self):
def mock_getifaddrs(ptr):
addrs = [
utils_ipaddrs.ifaddrs(None, b'lo', 0, ctypes.pointer(
utils_ipaddrs.sockaddr_in4(
sin_family=socket.AF_INET,
sin_addr=(127, 0, 0, 1)))),
utils_ipaddrs.ifaddrs(None, b'lo', 0, ctypes.cast(
ctypes.pointer(utils_ipaddrs.sockaddr_in6(
sin6_family=socket.AF_INET6,
sin6_addr=(
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1))),
ctypes.POINTER(utils_ipaddrs.sockaddr_in4))),
utils_ipaddrs.ifaddrs(None, b'eth0', 0, ctypes.pointer(
utils_ipaddrs.sockaddr_in4(
sin_family=socket.AF_INET,
sin_addr=(192, 168, 50, 63)))),
utils_ipaddrs.ifaddrs(None, b'eth0', 0, ctypes.cast(
ctypes.pointer(utils_ipaddrs.sockaddr_in6(
sin6_family=socket.AF_INET6,
sin6_addr=(
254, 128, 0, 0, 0, 0, 0, 0,
106, 191, 199, 168, 109, 243, 41, 35))),
ctypes.POINTER(utils_ipaddrs.sockaddr_in4))),
# MAC address will be ignored
utils_ipaddrs.ifaddrs(None, b'eth0', 0, ctypes.cast(
ctypes.pointer(utils_ipaddrs.sockaddr_in6(
sin6_family=getattr(socket, 'AF_PACKET', 17),
sin6_port=0,
sin6_flowinfo=2,
sin6_addr=(
1, 0, 0, 6, 172, 116, 177, 85,
64, 146, 0, 0, 0, 0, 0, 0))),
ctypes.POINTER(utils_ipaddrs.sockaddr_in4))),
# Seen in the wild: no addresses at all
utils_ipaddrs.ifaddrs(None, b'cscotun0', 69841),
]
for cur, nxt in zip(addrs, addrs[1:]):
cur.ifa_next = ctypes.pointer(nxt)
ptr._obj.contents = addrs[0]
with patch.object(utils_ipaddrs, 'getifaddrs', mock_getifaddrs), \
patch('swift.common.utils.ipaddrs.freeifaddrs') as mock_free:
self.assertEqual(utils.whataremyips(), [
'127.0.0.1',
'::1',
'192.168.50.63',
'fe80::6abf:c7a8:6df3:2923',
])
self.assertEqual(len(mock_free.mock_calls), 1)
def test_whataremyips_netifaces_error(self):
class FakeNetifaces(object):
@staticmethod
def interfaces():
return ['eth0']
@staticmethod
def ifaddresses(interface):
raise ValueError
with patch.object(utils_ipaddrs, 'getifaddrs', None), \
patch.object(utils_ipaddrs, 'netifaces', FakeNetifaces):
self.assertEqual(utils.whataremyips(), [])
def test_whataremyips_netifaces_ipv6(self):
test_ipv6_address = '2001:6b0:dead:beef:2::32'
test_interface = 'eth0'
class FakeNetifaces(object):
AF_INET = int(socket.AF_INET)
AF_INET6 = int(socket.AF_INET6)
@staticmethod
def interfaces():
return ['eth0']
@staticmethod
def ifaddresses(interface):
return {int(socket.AF_INET6): [
{'netmask': 'ffff:ffff:ffff:ffff::',
'addr': '%s%%%s' % (test_ipv6_address, test_interface)}]}
with patch.object(utils_ipaddrs, 'getifaddrs', None), \
patch.object(utils_ipaddrs, 'netifaces', FakeNetifaces):
myips = utils.whataremyips()
self.assertEqual(len(myips), 1)
self.assertEqual(myips[0], test_ipv6_address)
| swift-master | test/unit/common/utils/test_ipaddrs.py |
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import unittest
from collections import defaultdict
from swift.common import exceptions
from swift.common import ring
from swift.common.ring.utils import (tiers_for_dev, build_tier_tree,
validate_and_normalize_ip,
validate_and_normalize_address,
is_valid_hostname,
is_local_device, parse_search_value,
parse_search_values_from_opts,
parse_change_values_from_opts,
validate_args, parse_args,
parse_builder_ring_filename_args,
build_dev_from_opts, dispersion_report,
parse_address, get_tier_name, pretty_dev,
validate_replicas_by_tier)
class TestUtils(unittest.TestCase):
def setUp(self):
self.test_dev = {'region': 1, 'zone': 1, 'ip': '192.168.1.1',
'port': '6200', 'id': 0}
def get_test_devs():
dev0 = {'region': 1, 'zone': 1, 'ip': '192.168.1.1',
'port': '6200', 'id': 0}
dev1 = {'region': 1, 'zone': 1, 'ip': '192.168.1.1',
'port': '6200', 'id': 1}
dev2 = {'region': 1, 'zone': 1, 'ip': '192.168.1.1',
'port': '6200', 'id': 2}
dev3 = {'region': 1, 'zone': 1, 'ip': '192.168.1.2',
'port': '6200', 'id': 3}
dev4 = {'region': 1, 'zone': 1, 'ip': '192.168.1.2',
'port': '6200', 'id': 4}
dev5 = {'region': 1, 'zone': 1, 'ip': '192.168.1.2',
'port': '6200', 'id': 5}
dev6 = {'region': 1, 'zone': 2, 'ip': '192.168.2.1',
'port': '6200', 'id': 6}
dev7 = {'region': 1, 'zone': 2, 'ip': '192.168.2.1',
'port': '6200', 'id': 7}
dev8 = {'region': 1, 'zone': 2, 'ip': '192.168.2.1',
'port': '6200', 'id': 8}
dev9 = {'region': 1, 'zone': 2, 'ip': '192.168.2.2',
'port': '6200', 'id': 9}
dev10 = {'region': 1, 'zone': 2, 'ip': '192.168.2.2',
'port': '6200', 'id': 10}
dev11 = {'region': 1, 'zone': 2, 'ip': '192.168.2.2',
'port': '6200', 'id': 11}
return [dev0, dev1, dev2, dev3, dev4, dev5,
dev6, dev7, dev8, dev9, dev10, dev11]
self.test_devs = get_test_devs()
def test_tiers_for_dev(self):
self.assertEqual(
tiers_for_dev(self.test_dev),
((1,),
(1, 1),
(1, 1, '192.168.1.1'),
(1, 1, '192.168.1.1', 0)))
def test_build_tier_tree(self):
ret = build_tier_tree(self.test_devs)
self.assertEqual(len(ret), 8)
self.assertEqual(ret[()], set([(1,)]))
self.assertEqual(ret[(1,)], set([(1, 1), (1, 2)]))
self.assertEqual(ret[(1, 1)],
set([(1, 1, '192.168.1.2'),
(1, 1, '192.168.1.1')]))
self.assertEqual(ret[(1, 2)],
set([(1, 2, '192.168.2.2'),
(1, 2, '192.168.2.1')]))
self.assertEqual(ret[(1, 1, '192.168.1.1')],
set([(1, 1, '192.168.1.1', 0),
(1, 1, '192.168.1.1', 1),
(1, 1, '192.168.1.1', 2)]))
self.assertEqual(ret[(1, 1, '192.168.1.2')],
set([(1, 1, '192.168.1.2', 3),
(1, 1, '192.168.1.2', 4),
(1, 1, '192.168.1.2', 5)]))
self.assertEqual(ret[(1, 2, '192.168.2.1')],
set([(1, 2, '192.168.2.1', 6),
(1, 2, '192.168.2.1', 7),
(1, 2, '192.168.2.1', 8)]))
self.assertEqual(ret[(1, 2, '192.168.2.2')],
set([(1, 2, '192.168.2.2', 9),
(1, 2, '192.168.2.2', 10),
(1, 2, '192.168.2.2', 11)]))
def test_is_valid_hostname(self):
self.assertTrue(is_valid_hostname("local"))
self.assertTrue(is_valid_hostname("test.test.com"))
hostname = "test." * 51
self.assertTrue(is_valid_hostname(hostname))
hostname = hostname.rstrip('.')
self.assertTrue(is_valid_hostname(hostname))
hostname = hostname + "00"
self.assertFalse(is_valid_hostname(hostname))
self.assertFalse(is_valid_hostname("$blah#"))
def test_is_local_device(self):
# localhost shows up in whataremyips() output as "::1" for IPv6
my_ips = ["127.0.0.1", "::1"]
my_port = 6200
self.assertTrue(is_local_device(my_ips, my_port,
"127.0.0.1", my_port))
self.assertTrue(is_local_device(my_ips, my_port,
"::1", my_port))
self.assertTrue(is_local_device(
my_ips, my_port,
"0000:0000:0000:0000:0000:0000:0000:0001", my_port))
self.assertTrue(is_local_device(my_ips, my_port,
"localhost", my_port))
self.assertFalse(is_local_device(my_ips, my_port,
"localhost", my_port + 1))
self.assertFalse(is_local_device(my_ips, my_port,
"127.0.0.2", my_port))
# for those that don't have a local port
self.assertTrue(is_local_device(my_ips, None,
my_ips[0], None))
# When servers_per_port is active, the "my_port" passed in is None
# which means "don't include port in the determination of locality
# because it's not reliable in this deployment scenario"
self.assertTrue(is_local_device(my_ips, None,
"127.0.0.1", 6666))
self.assertTrue(is_local_device(my_ips, None,
"::1", 6666))
self.assertTrue(is_local_device(
my_ips, None,
"0000:0000:0000:0000:0000:0000:0000:0001", 6666))
self.assertTrue(is_local_device(my_ips, None,
"localhost", 6666))
self.assertFalse(is_local_device(my_ips, None,
"127.0.0.2", my_port))
def test_validate_and_normalize_ip(self):
ipv4 = "10.0.0.1"
self.assertEqual(ipv4, validate_and_normalize_ip(ipv4))
ipv6 = "fe80::204:61ff:fe9d:f156"
self.assertEqual(ipv6, validate_and_normalize_ip(ipv6.upper()))
hostname = "test.test.com"
self.assertRaises(ValueError,
validate_and_normalize_ip, hostname)
hostname = "$blah#"
self.assertRaises(ValueError,
validate_and_normalize_ip, hostname)
def test_validate_and_normalize_address(self):
ipv4 = "10.0.0.1"
self.assertEqual(ipv4, validate_and_normalize_address(ipv4))
ipv6 = "fe80::204:61ff:fe9d:f156"
self.assertEqual(ipv6, validate_and_normalize_address(ipv6.upper()))
hostname = "test.test.com"
self.assertEqual(hostname,
validate_and_normalize_address(hostname.upper()))
hostname = "$blah#"
self.assertRaises(ValueError,
validate_and_normalize_address, hostname)
def test_validate_replicas_by_tier_close(self):
one_ip_six_devices = \
defaultdict(float,
{(): 4.0,
(0,): 4.0,
(0, 0): 4.0,
(0, 0, '127.0.0.1'): 4.0,
(0, 0, '127.0.0.1', 0): 0.6666666670,
(0, 0, '127.0.0.1', 1): 0.6666666668,
(0, 0, '127.0.0.1', 2): 0.6666666667,
(0, 0, '127.0.0.1', 3): 0.6666666666,
(0, 0, '127.0.0.1', 4): 0.6666666665,
(0, 0, '127.0.0.1', 5): 0.6666666664,
})
try:
validate_replicas_by_tier(4, one_ip_six_devices)
except Exception as e:
self.fail('one_ip_six_devices is invalid for %s' % e)
def test_validate_replicas_by_tier_exact(self):
three_regions_three_devices = \
defaultdict(float,
{(): 3.0,
(0,): 1.0,
(0, 0): 1.0,
(0, 0, '127.0.0.1'): 1.0,
(0, 0, '127.0.0.1', 0): 1.0,
(1,): 1.0,
(1, 1): 1.0,
(1, 1, '127.0.0.1'): 1.0,
(1, 1, '127.0.0.1', 1): 1.0,
(2,): 1.0,
(2, 2): 1.0,
(2, 2, '127.0.0.1'): 1.0,
(2, 2, '127.0.0.1', 2): 1.0,
})
try:
validate_replicas_by_tier(3, three_regions_three_devices)
except Exception as e:
self.fail('three_regions_three_devices is invalid for %s' % e)
def test_validate_replicas_by_tier_errors(self):
pseudo_replicas = \
defaultdict(float,
{(): 3.0,
(0,): 1.0,
(0, 0): 1.0,
(0, 0, '127.0.0.1'): 1.0,
(0, 0, '127.0.0.1', 0): 1.0,
(1,): 1.0,
(1, 1): 1.0,
(1, 1, '127.0.0.1'): 1.0,
(1, 1, '127.0.0.1', 1): 1.0,
(2,): 1.0,
(2, 2): 1.0,
(2, 2, '127.0.0.1'): 1.0,
(2, 2, '127.0.0.1', 2): 1.0,
})
def do_test(bad_tier_key, bad_tier_name):
# invalidate a copy of pseudo_replicas at given key and check for
# an exception to be raised
test_replicas = dict(pseudo_replicas)
test_replicas[bad_tier_key] += 0.1 # <- this is not fair!
with self.assertRaises(exceptions.RingValidationError) as ctx:
validate_replicas_by_tier(3, test_replicas)
self.assertEqual(
'3.1 != 3 at tier %s' % bad_tier_name, str(ctx.exception))
do_test((), 'cluster')
do_test((1,), 'regions')
do_test((0, 0), 'zones')
do_test((2, 2, '127.0.0.1'), 'servers')
do_test((1, 1, '127.0.0.1', 1), 'devices')
def test_parse_search_value(self):
res = parse_search_value('r0')
self.assertEqual(res, {'region': 0})
res = parse_search_value('r1')
self.assertEqual(res, {'region': 1})
res = parse_search_value('r1z2')
self.assertEqual(res, {'region': 1, 'zone': 2})
res = parse_search_value('d1')
self.assertEqual(res, {'id': 1})
res = parse_search_value('z1')
self.assertEqual(res, {'zone': 1})
res = parse_search_value('-127.0.0.1')
self.assertEqual(res, {'ip': '127.0.0.1'})
res = parse_search_value('127.0.0.1')
self.assertEqual(res, {'ip': '127.0.0.1'})
res = parse_search_value('-[127.0.0.1]:10001')
self.assertEqual(res, {'ip': '127.0.0.1', 'port': 10001})
res = parse_search_value(':10001')
self.assertEqual(res, {'port': 10001})
res = parse_search_value('R127.0.0.10')
self.assertEqual(res, {'replication_ip': '127.0.0.10'})
res = parse_search_value('R[127.0.0.10]:20000')
self.assertEqual(res, {'replication_ip': '127.0.0.10',
'replication_port': 20000})
res = parse_search_value('R:20000')
self.assertEqual(res, {'replication_port': 20000})
res = parse_search_value('/sdb1')
self.assertEqual(res, {'device': 'sdb1'})
res = parse_search_value('_meta1')
self.assertEqual(res, {'meta': 'meta1'})
self.assertRaises(ValueError, parse_search_value, 'OMGPONIES')
def test_parse_search_values_from_opts(self):
argv = \
["--id", "1", "--region", "2", "--zone", "3",
"--ip", "test.test.com",
"--port", "6200",
"--replication-ip", "r.test.com",
"--replication-port", "7000",
"--device", "sda3",
"--meta", "some meta data",
"--weight", "3.14159265359",
"--change-ip", "change.test.test.com",
"--change-port", "6201",
"--change-replication-ip", "change.r.test.com",
"--change-replication-port", "7001",
"--change-device", "sdb3",
"--change-meta", "some meta data for change"]
expected = {
'id': 1,
'region': 2,
'zone': 3,
'ip': "test.test.com",
'port': 6200,
'replication_ip': "r.test.com",
'replication_port': 7000,
'device': "sda3",
'meta': "some meta data",
'weight': 3.14159265359,
}
new_cmd_format, opts, args = validate_args(argv)
search_values = parse_search_values_from_opts(opts)
self.assertEqual(search_values, expected)
argv = \
["--id", "1", "--region", "2", "--zone", "3",
"--ip", "127.0.0.1",
"--port", "6200",
"--replication-ip", "127.0.0.10",
"--replication-port", "7000",
"--device", "sda3",
"--meta", "some meta data",
"--weight", "3.14159265359",
"--change-ip", "127.0.0.2",
"--change-port", "6201",
"--change-replication-ip", "127.0.0.20",
"--change-replication-port", "7001",
"--change-device", "sdb3",
"--change-meta", "some meta data for change"]
expected = {
'id': 1,
'region': 2,
'zone': 3,
'ip': "127.0.0.1",
'port': 6200,
'replication_ip': "127.0.0.10",
'replication_port': 7000,
'device': "sda3",
'meta': "some meta data",
'weight': 3.14159265359,
}
new_cmd_format, opts, args = validate_args(argv)
search_values = parse_search_values_from_opts(opts)
self.assertEqual(search_values, expected)
argv = \
["--id", "1", "--region", "2", "--zone", "3",
"--ip", "[127.0.0.1]",
"--port", "6200",
"--replication-ip", "[127.0.0.10]",
"--replication-port", "7000",
"--device", "sda3",
"--meta", "some meta data",
"--weight", "3.14159265359",
"--change-ip", "[127.0.0.2]",
"--change-port", "6201",
"--change-replication-ip", "[127.0.0.20]",
"--change-replication-port", "7001",
"--change-device", "sdb3",
"--change-meta", "some meta data for change"]
new_cmd_format, opts, args = validate_args(argv)
search_values = parse_search_values_from_opts(opts)
self.assertEqual(search_values, expected)
def test_parse_change_values_from_opts(self):
argv = \
["--id", "1", "--region", "2", "--zone", "3",
"--ip", "test.test.com",
"--port", "6200",
"--replication-ip", "r.test.com",
"--replication-port", "7000",
"--device", "sda3",
"--meta", "some meta data",
"--weight", "3.14159265359",
"--change-ip", "change.test.test.com",
"--change-port", "6201",
"--change-replication-ip", "change.r.test.com",
"--change-replication-port", "7001",
"--change-device", "sdb3",
"--change-meta", "some meta data for change"]
expected = {
'ip': "change.test.test.com",
'port': 6201,
'replication_ip': "change.r.test.com",
'replication_port': 7001,
'device': "sdb3",
'meta': "some meta data for change",
}
new_cmd_format, opts, args = validate_args(argv)
search_values = parse_change_values_from_opts(opts)
self.assertEqual(search_values, expected)
argv = \
["--id", "1", "--region", "2", "--zone", "3",
"--ip", "127.0.0.1",
"--port", "6200",
"--replication-ip", "127.0.0.10",
"--replication-port", "7000",
"--device", "sda3",
"--meta", "some meta data",
"--weight", "3.14159265359",
"--change-ip", "127.0.0.2",
"--change-port", "6201",
"--change-replication-ip", "127.0.0.20",
"--change-replication-port", "7001",
"--change-device", "sdb3",
"--change-meta", "some meta data for change"]
expected = {
'ip': "127.0.0.2",
'port': 6201,
'replication_ip': "127.0.0.20",
'replication_port': 7001,
'device': "sdb3",
'meta': "some meta data for change",
}
new_cmd_format, opts, args = validate_args(argv)
search_values = parse_change_values_from_opts(opts)
self.assertEqual(search_values, expected)
argv = \
["--id", "1", "--region", "2", "--zone", "3",
"--ip", "[127.0.0.1]",
"--port", "6200",
"--replication-ip", "[127.0.0.10]",
"--replication-port", "7000",
"--device", "sda3",
"--meta", "some meta data",
"--weight", "3.14159265359",
"--change-ip", "[127.0.0.2]",
"--change-port", "6201",
"--change-replication-ip", "[127.0.0.20]",
"--change-replication-port", "7001",
"--change-device", "sdb3",
"--change-meta", "some meta data for change"]
new_cmd_format, opts, args = validate_args(argv)
search_values = parse_change_values_from_opts(opts)
self.assertEqual(search_values, expected)
def test_validate_args(self):
argv = \
["--id", "1", "--region", "2", "--zone", "3",
"--ip", "test.test.com",
"--port", "6200",
"--replication-ip", "r.test.com",
"--replication-port", "7000",
"--device", "sda3",
"--meta", "some meta data",
"--weight", "3.14159265359",
"--change-ip", "change.test.test.com",
"--change-port", "6201",
"--change-replication-ip", "change.r.test.com",
"--change-replication-port", "7001",
"--change-device", "sdb3",
"--change-meta", "some meta data for change"]
new_cmd_format, opts, args = validate_args(argv)
self.assertTrue(new_cmd_format)
self.assertEqual(opts.id, 1)
self.assertEqual(opts.region, 2)
self.assertEqual(opts.zone, 3)
self.assertEqual(opts.ip, "test.test.com")
self.assertEqual(opts.port, 6200)
self.assertEqual(opts.replication_ip, "r.test.com")
self.assertEqual(opts.replication_port, 7000)
self.assertEqual(opts.device, "sda3")
self.assertEqual(opts.meta, "some meta data")
self.assertEqual(opts.weight, 3.14159265359)
self.assertEqual(opts.change_ip, "change.test.test.com")
self.assertEqual(opts.change_port, 6201)
self.assertEqual(opts.change_replication_ip, "change.r.test.com")
self.assertEqual(opts.change_replication_port, 7001)
self.assertEqual(opts.change_device, "sdb3")
self.assertEqual(opts.change_meta, "some meta data for change")
def test_validate_args_new_cmd_format(self):
argv = \
["--id", "0", "--region", "0", "--zone", "0",
"--ip", "",
"--port", "0",
"--replication-ip", "",
"--replication-port", "0",
"--device", "",
"--meta", "",
"--weight", "0",
"--change-ip", "",
"--change-port", "0",
"--change-replication-ip", "",
"--change-replication-port", "0",
"--change-device", "",
"--change-meta", ""]
new_cmd_format, opts, args = validate_args(argv)
self.assertTrue(new_cmd_format)
argv = \
["--id", None, "--region", None, "--zone", None,
"--ip", "",
"--port", "0",
"--replication-ip", "",
"--replication-port", "0",
"--device", "",
"--meta", "",
"--weight", None,
"--change-ip", "change.test.test.com",
"--change-port", "6201",
"--change-replication-ip", "change.r.test.com",
"--change-replication-port", "7001",
"--change-device", "sdb3",
"--change-meta", "some meta data for change"]
new_cmd_format, opts, args = validate_args(argv)
self.assertFalse(new_cmd_format)
argv = \
["--id", "0"]
new_cmd_format, opts, args = validate_args(argv)
self.assertTrue(new_cmd_format)
argv = \
["--region", "0"]
new_cmd_format, opts, args = validate_args(argv)
self.assertTrue(new_cmd_format)
argv = \
["--zone", "0"]
new_cmd_format, opts, args = validate_args(argv)
self.assertTrue(new_cmd_format)
argv = \
["--weight", "0"]
new_cmd_format, opts, args = validate_args(argv)
self.assertTrue(new_cmd_format)
def test_parse_args(self):
argv = \
["--id", "1", "--region", "2", "--zone", "3",
"--ip", "test.test.com",
"--port", "6200",
"--replication-ip", "r.test.com",
"--replication-port", "7000",
"--device", "sda3",
"--meta", "some meta data",
"--weight", "3.14159265359",
"--change-ip", "change.test.test.com",
"--change-port", "6201",
"--change-replication-ip", "change.r.test.com",
"--change-replication-port", "7001",
"--change-device", "sdb3",
"--change-meta", "some meta data for change"]
opts, args = parse_args(argv)
self.assertEqual(opts.id, 1)
self.assertEqual(opts.region, 2)
self.assertEqual(opts.zone, 3)
self.assertEqual(opts.ip, "test.test.com")
self.assertEqual(opts.port, 6200)
self.assertEqual(opts.replication_ip, "r.test.com")
self.assertEqual(opts.replication_port, 7000)
self.assertEqual(opts.device, "sda3")
self.assertEqual(opts.meta, "some meta data")
self.assertEqual(opts.weight, 3.14159265359)
self.assertEqual(opts.change_ip, "change.test.test.com")
self.assertEqual(opts.change_port, 6201)
self.assertEqual(opts.change_replication_ip, "change.r.test.com")
self.assertEqual(opts.change_replication_port, 7001)
self.assertEqual(opts.change_device, "sdb3")
self.assertEqual(opts.change_meta, "some meta data for change")
self.assertEqual(len(args), 0)
def test_parse_builder_ring_filename_args(self):
args = 'swift-ring-builder object.builder write_ring'
self.assertEqual((
'object.builder', 'object.ring.gz'
), parse_builder_ring_filename_args(args.split()))
args = 'swift-ring-builder container.ring.gz write_builder'
self.assertEqual((
'container.builder', 'container.ring.gz'
), parse_builder_ring_filename_args(args.split()))
# builder name arg should always fall through
args = 'swift-ring-builder test create'
self.assertEqual((
'test', 'test.ring.gz'
), parse_builder_ring_filename_args(args.split()))
args = 'swift-ring-builder my.file.name create'
self.assertEqual((
'my.file.name', 'my.file.name.ring.gz'
), parse_builder_ring_filename_args(args.split()))
def test_build_dev_from_opts(self):
argv = \
["--region", "0", "--zone", "3",
"--ip", "test.test.com",
"--port", "6200",
"--replication-ip", "r.test.com",
"--replication-port", "7000",
"--device", "sda3",
"--meta", "some meta data",
"--weight", "3.14159265359"]
expected = {
'region': 0,
'zone': 3,
'ip': "test.test.com",
'port': 6200,
'replication_ip': "r.test.com",
'replication_port': 7000,
'device': "sda3",
'meta': "some meta data",
'weight': 3.14159265359,
}
opts, args = parse_args(argv)
device = build_dev_from_opts(opts)
self.assertEqual(device, expected)
argv = \
["--region", "2", "--zone", "3",
"--ip", "[test.test.com]",
"--port", "6200",
"--replication-ip", "[r.test.com]",
"--replication-port", "7000",
"--device", "sda3",
"--meta", "some meta data",
"--weight", "3.14159265359"]
opts, args = parse_args(argv)
self.assertRaises(ValueError, build_dev_from_opts, opts)
argv = \
["--region", "2", "--zone", "3",
"--ip", "[test.test.com]",
"--port", "6200",
"--replication-ip", "[r.test.com]",
"--replication-port", "7000",
"--meta", "some meta data",
"--weight", "3.14159265359"]
opts, args = parse_args(argv)
self.assertRaises(ValueError, build_dev_from_opts, opts)
def test_replication_defaults(self):
args = '-r 1 -z 1 -i 127.0.0.1 -p 6010 -d d1 -w 100'.split()
opts, _ = parse_args(args)
device = build_dev_from_opts(opts)
expected = {
'device': 'd1',
'ip': '127.0.0.1',
'meta': '',
'port': 6010,
'region': 1,
'replication_ip': '127.0.0.1',
'replication_port': 6010,
'weight': 100.0,
'zone': 1,
}
self.assertEqual(device, expected)
args = '-r 1 -z 1 -i test.com -p 6010 -d d1 -w 100'.split()
opts, _ = parse_args(args)
device = build_dev_from_opts(opts)
expected = {
'device': 'd1',
'ip': 'test.com',
'meta': '',
'port': 6010,
'region': 1,
'replication_ip': 'test.com',
'replication_port': 6010,
'weight': 100.0,
'zone': 1,
}
self.assertEqual(device, expected)
@unittest.skipIf(sys.version_info < (3,),
"Seed-specific tests don't work well between python "
"versions. This test is now PY3 only")
def test_dispersion_report(self):
rb = ring.RingBuilder(8, 3, 0)
rb.add_dev({'id': 0, 'region': 1, 'zone': 0, 'weight': 100,
'ip': '127.0.0.0', 'port': 10000, 'device': 'sda1'})
rb.add_dev({'id': 3, 'region': 1, 'zone': 0, 'weight': 100,
'ip': '127.0.0.0', 'port': 10000, 'device': 'sdb1'})
rb.add_dev({'id': 4, 'region': 1, 'zone': 0, 'weight': 100,
'ip': '127.0.0.0', 'port': 10000, 'device': 'sdc1'})
rb.add_dev({'id': 5, 'region': 1, 'zone': 0, 'weight': 100,
'ip': '127.0.0.0', 'port': 10000, 'device': 'sdd1'})
rb.add_dev({'id': 1, 'region': 1, 'zone': 1, 'weight': 200,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sda1'})
rb.add_dev({'id': 6, 'region': 1, 'zone': 1, 'weight': 200,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sdb1'})
rb.add_dev({'id': 7, 'region': 1, 'zone': 1, 'weight': 200,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sdc1'})
rb.add_dev({'id': 8, 'region': 1, 'zone': 1, 'weight': 200,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sdd1'})
rb.add_dev({'id': 2, 'region': 1, 'zone': 1, 'weight': 200,
'ip': '127.0.0.2', 'port': 10002, 'device': 'sda1'})
rb.add_dev({'id': 9, 'region': 1, 'zone': 1, 'weight': 200,
'ip': '127.0.0.2', 'port': 10002, 'device': 'sdb1'})
rb.add_dev({'id': 10, 'region': 1, 'zone': 1, 'weight': 200,
'ip': '127.0.0.2', 'port': 10002, 'device': 'sdc1'})
rb.add_dev({'id': 11, 'region': 1, 'zone': 1, 'weight': 200,
'ip': '127.0.0.2', 'port': 10002, 'device': 'sdd1'})
# this ring is pretty volatile and the assertions are pretty brittle
# so we use a specific seed
rb.rebalance(seed=100)
rb.validate()
self.assertEqual(rb.dispersion, 16.796875)
report = dispersion_report(rb)
self.assertEqual(report['worst_tier'], 'r1z1-127.0.0.1')
self.assertEqual(report['max_dispersion'], 20.967741935483872)
def build_tier_report(max_replicas, placed_parts, dispersion,
replicas):
return {
'max_replicas': max_replicas,
'placed_parts': placed_parts,
'dispersion': dispersion,
'replicas': replicas,
}
# every partition has at least two replicas in this zone, unfortunately
# sometimes they're both on the same server.
expected = [
['r1z1', build_tier_report(
2, 621, 17.55233494363929, [0, 0, 147, 109])],
['r1z1-127.0.0.1', build_tier_report(
1, 310, 20.967741935483872, [11, 180, 65, 0])],
['r1z1-127.0.0.2', build_tier_report(
1, 311, 20.578778135048232, [9, 183, 64, 0])],
]
report = dispersion_report(rb, 'r1z1[^/]*$', verbose=True)
graph = report['graph']
for i, (expected_key, expected_report) in enumerate(expected):
key, report = graph[i]
self.assertEqual(
(key, report),
(expected_key, expected_report)
)
# overcompensate in r1z0
rb.add_dev({'id': 12, 'region': 1, 'zone': 0, 'weight': 500,
'ip': '127.0.0.3', 'port': 10003, 'device': 'sda1'})
rb.add_dev({'id': 13, 'region': 1, 'zone': 0, 'weight': 500,
'ip': '127.0.0.3', 'port': 10003, 'device': 'sdb1'})
rb.add_dev({'id': 14, 'region': 1, 'zone': 0, 'weight': 500,
'ip': '127.0.0.3', 'port': 10003, 'device': 'sdc1'})
rb.add_dev({'id': 15, 'region': 1, 'zone': 0, 'weight': 500,
'ip': '127.0.0.3', 'port': 10003, 'device': 'sdd1'})
# when the biggest tier has the smallest devices things get ugly
# can't move all the part-replicas in one rebalance
rb.rebalance(seed=100)
report = dispersion_report(rb, verbose=True)
self.assertEqual(rb.dispersion, 2.8645833333333335)
self.assertEqual(report['worst_tier'], 'r1z1-127.0.0.1')
self.assertEqual(report['max_dispersion'], 6.593406593406593)
# do a sencond rebalance
rb.rebalance(seed=100)
report = dispersion_report(rb, verbose=True)
self.assertEqual(rb.dispersion, 16.666666666666668)
self.assertEqual(report['worst_tier'], 'r1z0-127.0.0.3')
self.assertEqual(report['max_dispersion'], 33.333333333333336)
# ... but overload can square it
rb.set_overload(rb.get_required_overload())
rb.rebalance()
self.assertEqual(rb.dispersion, 0.0)
def test_parse_address_old_format(self):
# Test old format
argv = "127.0.0.1:6200R127.0.0.1:6200/sda1_some meta data"
ip, port, rest = parse_address(argv)
self.assertEqual(ip, '127.0.0.1')
self.assertEqual(port, 6200)
self.assertEqual(rest, 'R127.0.0.1:6200/sda1_some meta data')
def test_normalized_device_tier_names(self):
rb = ring.RingBuilder(8, 3, 0)
rb.add_dev({
'region': 1,
'zone': 1,
'ip': '127.0.0.1',
'port': 6011,
'device': 'd1',
'weight': 0.0,
})
dev = rb.devs[0]
expected = 'r1z1-127.0.0.1/d1'
self.assertEqual(expected, get_tier_name(tiers_for_dev(dev)[-1], rb))
self.assertEqual(expected, pretty_dev(dev))
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/ring/test_utils.py |
swift-master | test/unit/common/ring/__init__.py |
|
# Copyright (c) 2010-2017 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from contextlib import contextmanager
import mock
import os
import random
import tempfile
import unittest
import shutil
import copy
import time
from collections import defaultdict, Counter
from swift.common.exceptions import RingBuilderError
from swift.common.ring import RingBuilder, Ring
from swift.common.ring.composite_builder import (
compose_rings, CompositeRingBuilder, CooperativeRingBuilder)
def make_device_iter():
x = 0
base_port = 6000
while True:
yield {'region': 0, # Note that region may be replaced on the tests
'zone': 0,
'ip': '10.0.0.%s' % x,
'replication_ip': '10.0.0.%s' % x,
'port': base_port + x,
'replication_port': base_port + x,
'device': 'sda',
'weight': 100.0, }
x += 1
class BaseTestCompositeBuilder(unittest.TestCase):
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
self.device_iter = make_device_iter()
self.output_ring = os.path.join(self.tmpdir, 'composite.ring.gz')
def pop_region_device(self, region):
dev = next(self.device_iter)
dev.update({'region': region})
return dev
def tearDown(self):
try:
shutil.rmtree(self.tmpdir, True)
except OSError:
pass
def save_builder_with_no_id(self, builder, fname):
orig_to_dict = builder.to_dict
def fake_to_dict():
res = orig_to_dict()
res.pop('id')
return res
with mock.patch.object(builder, 'to_dict', fake_to_dict):
builder.save(fname)
def save_builders(self, builders, missing_ids=None, prefix='builder'):
missing_ids = missing_ids or []
builder_files = []
for i, builder in enumerate(builders):
fname = os.path.join(self.tmpdir, '%s_%s.builder' % (prefix, i))
if i in missing_ids:
self.save_builder_with_no_id(builder, fname)
else:
builder.save(fname)
builder_files.append(fname)
return builder_files
def create_sample_ringbuilders(self, num_builders=2, rebalance=True):
"""
Create sample rings with four devices
:returns: a list of ring builder instances
"""
builders = []
for region in range(num_builders):
fname = os.path.join(self.tmpdir, 'builder_%s.builder' % region)
builder = RingBuilder(6, 3, 0)
for _ in range(5):
dev = self.pop_region_device(region)
builder.add_dev(dev)
# remove last dev to simulate a ring with some history
builder.remove_dev(dev['id'])
# add a dev that won't be assigned any parts
new_dev = self.pop_region_device(region)
new_dev['weight'] = 0
builder.add_dev(new_dev)
if rebalance:
builder.rebalance()
builder.save(fname)
self.assertTrue(os.path.exists(fname))
builders.append(builder)
return builders
def add_dev(self, builder, weight=None, region=None):
if region is None:
dev = next(builder._iter_devs())
region = dev['region']
new_dev = self.pop_region_device(region)
if weight is not None:
new_dev['weight'] = weight
builder.add_dev(new_dev)
def add_dev_and_rebalance(self, builder, weight=None):
self.add_dev(builder, weight)
builder.rebalance()
def assertDevices(self, composite_ring, builders):
"""
:param composite_ring: a Ring instance
:param builders: a list of RingBuilder instances for assertion
"""
# assert all component devices are in composite device table
builder_devs = []
for builder in builders:
builder_devs.extend([
(dev['ip'], dev['port'], dev['device'])
for dev in builder._iter_devs()])
got_devices = [
(dev['ip'], dev['port'], dev['device'])
for dev in composite_ring.devs if dev]
self.assertEqual(sorted(builder_devs), sorted(got_devices),
"composite_ring mismatched with part of the rings")
# assert composite device ids correctly index into the dev list
dev_ids = []
for i, dev in enumerate(composite_ring.devs):
if dev:
self.assertEqual(i, dev['id'])
dev_ids.append(dev['id'])
self.assertEqual(len(builder_devs), len(dev_ids))
def uniqueness(dev):
return (dev['ip'], dev['port'], dev['device'])
# assert part assignment is ordered by ring order
part_count = composite_ring.partition_count
for part in range(part_count):
primaries = [uniqueness(primary) for primary in
composite_ring.get_part_nodes(part)]
offset = 0
for builder in builders:
sub_primaries = [uniqueness(primary) for primary in
builder.get_part_devices(part)]
self.assertEqual(
primaries[offset:offset + builder.replicas],
sub_primaries,
"composite ring is not ordered by ring order, %s, %s"
% (primaries, sub_primaries))
offset += builder.replicas
def check_composite_ring(self, ring_file, builders):
got_ring = Ring(ring_file)
self.assertEqual(got_ring.partition_count, builders[0].parts)
self.assertEqual(got_ring.replica_count,
sum(b.replicas for b in builders))
self.assertEqual(got_ring._part_shift, builders[0].part_shift)
self.assertDevices(got_ring, builders)
def check_composite_meta(self, cb_file, builder_files, version=1):
with open(cb_file) as fd:
actual = json.load(fd)
builders = [RingBuilder.load(fname) for fname in builder_files]
expected_metadata = {
'saved_path': os.path.abspath(cb_file),
'serialization_version': 1,
'version': version,
'components': [
{'id': builder.id,
'version': builder.version,
'replicas': builder.replicas,
}
for builder in builders
],
'component_builder_files':
dict((builder.id, os.path.abspath(builder_files[i]))
for i, builder in enumerate(builders))
}
self.assertEqual(expected_metadata, actual)
def _make_composite_builder(self, builders):
# helper to compose a ring, save it and sanity check it
builder_files = self.save_builders(builders)
cb = CompositeRingBuilder(builder_files)
cb.compose().save(self.output_ring)
self.check_composite_ring(self.output_ring, builders)
return cb, builder_files
class TestCompositeBuilder(BaseTestCompositeBuilder):
def test_compose_rings(self):
def do_test(builder_count):
builders = self.create_sample_ringbuilders(builder_count)
rd = compose_rings(builders)
rd.save(self.output_ring)
self.check_composite_ring(self.output_ring, builders)
do_test(2)
do_test(3)
do_test(4)
def test_composite_same_region_in_the_different_rings_error(self):
builder_1 = self.create_sample_ringbuilders(1)
builder_2 = self.create_sample_ringbuilders(1)
builders = builder_1 + builder_2
with self.assertRaises(ValueError) as cm:
compose_rings(builders)
self.assertIn('Same region found in different rings',
cm.exception.args[0])
def test_composite_only_one_ring_in_the_args_error(self):
builders = self.create_sample_ringbuilders(1)
with self.assertRaises(ValueError) as cm:
compose_rings(builders)
self.assertIn(
'Two or more component builders are required.',
cm.exception.args[0])
def test_composite_same_device_in_the_different_rings_error(self):
builders = self.create_sample_ringbuilders(2)
same_device = copy.deepcopy(builders[0].devs[0])
# create one more ring which duplicates a device in the first ring
builder = RingBuilder(6, 3, 1)
_, fname = tempfile.mkstemp(dir=self.tmpdir)
# add info to feed to add_dev
same_device.update({'region': 2, 'weight': 100})
builder.add_dev(same_device)
# add rest of the devices, which are unique
for _ in range(3):
dev = self.pop_region_device(2)
builder.add_dev(dev)
builder.rebalance()
builder.save(fname)
# sanity
self.assertTrue(os.path.exists(fname))
builders.append(builder)
with self.assertRaises(ValueError) as cm:
compose_rings(builders)
self.assertIn(
'Duplicate ip/port/device combination %(ip)s/%(port)s/%(device)s '
'found in builders at indexes 0 and 2' %
same_device, cm.exception.args[0])
def test_different_part_power_error(self):
# create a ring builder
# (default, part power is 6 with create_sample_ringbuilders)
builders = self.create_sample_ringbuilders(1)
# prepare another ring which has different part power
incorrect_builder = RingBuilder(4, 3, 1)
_, fname = tempfile.mkstemp(dir=self.tmpdir)
for _ in range(4):
dev = self.pop_region_device(1)
incorrect_builder.add_dev(dev)
incorrect_builder.rebalance()
incorrect_builder.save(fname)
# sanity
self.assertTrue(os.path.exists(fname))
# sanity
correct_builder = builders[0]
self.assertNotEqual(correct_builder.part_shift,
incorrect_builder.part_shift)
self.assertNotEqual(correct_builder.part_power,
incorrect_builder.part_power)
builders.append(incorrect_builder)
with self.assertRaises(ValueError) as cm:
compose_rings(builders)
self.assertIn("All builders must have same value for 'part_power'",
cm.exception.args[0])
def test_compose_rings_float_replica_count_builder_error(self):
builders = self.create_sample_ringbuilders(1)
# prepare another ring which has float replica count
incorrect_builder = RingBuilder(6, 1.5, 1)
_, fname = tempfile.mkstemp(dir=self.tmpdir)
for _ in range(4):
dev = self.pop_region_device(1)
incorrect_builder.add_dev(dev)
incorrect_builder.rebalance()
incorrect_builder.save(fname)
# sanity
self.assertTrue(os.path.exists(fname))
self.assertEqual(1.5, incorrect_builder.replicas)
# the first replica has 2 ** 6 partitions
self.assertEqual(
2 ** 6, len(incorrect_builder._replica2part2dev[0]))
# but the second replica has the half of the first partitions
self.assertEqual(
2 ** 5, len(incorrect_builder._replica2part2dev[1]))
builders.append(incorrect_builder)
with self.assertRaises(ValueError) as cm:
compose_rings(builders)
self.assertIn("Problem with builders", cm.exception.args[0])
self.assertIn("Non integer replica count", cm.exception.args[0])
def test_compose_rings_rebalance_needed(self):
builders = self.create_sample_ringbuilders(2)
# add a new device to builder 1 but no rebalance
dev = self.pop_region_device(1)
builders[1].add_dev(dev)
self.assertTrue(builders[1].devs_changed) # sanity check
with self.assertRaises(ValueError) as cm:
compose_rings(builders)
self.assertIn("Problem with builders", cm.exception.args[0])
self.assertIn("Builder needs rebalance", cm.exception.args[0])
# after rebalance, that works (sanity)
builders[1].rebalance()
compose_rings(builders)
def test_different_replica_count_works(self):
# create a ring builder
# (default, part power is 6 with create_sample_ringbuilders)
builders = self.create_sample_ringbuilders(1)
# prepare another ring which has different replica count
builder = RingBuilder(6, 1, 1)
_, fname = tempfile.mkstemp(dir=self.tmpdir)
for _ in range(4):
dev = self.pop_region_device(1)
builder.add_dev(dev)
builder.rebalance()
builder.save(fname)
# sanity
self.assertTrue(os.path.exists(fname))
builders.append(builder)
rd = compose_rings(builders)
rd.save(self.output_ring)
got_ring = Ring(self.output_ring)
self.assertEqual(got_ring.partition_count, 2 ** 6)
self.assertEqual(got_ring.replica_count, 4) # 3 + 1
self.assertEqual(got_ring._part_shift, 26)
self.assertDevices(got_ring, builders)
def test_ring_swap(self):
# sanity
builders = self.create_sample_ringbuilders(2)
rd = compose_rings(builders)
rd.save(self.output_ring)
got_ring = Ring(self.output_ring)
self.assertEqual(got_ring.partition_count, 2 ** 6)
self.assertEqual(got_ring.replica_count, 6)
self.assertEqual(got_ring._part_shift, 26)
self.assertDevices(got_ring, builders)
# even if swapped, it works
reverse_builders = builders[::-1]
self.assertNotEqual(reverse_builders, builders)
rd = compose_rings(reverse_builders)
rd.save(self.output_ring)
got_ring = Ring(self.output_ring)
self.assertEqual(got_ring.partition_count, 2 ** 6)
self.assertEqual(got_ring.replica_count, 6)
self.assertEqual(got_ring._part_shift, 26)
self.assertDevices(got_ring, reverse_builders)
# but if the composite rings are different order, the composite ring
# *will* be different. Note that the CompositeRingBuilder class will
# check builder order against the existing ring and fail if the order
# is different (actually checking the metadata). See also
# test_compose_different_builder_order
with self.assertRaises(AssertionError) as cm:
self.assertDevices(got_ring, builders)
self.assertIn("composite ring is not ordered by ring order",
cm.exception.args[0])
class TestCompositeRingBuilder(BaseTestCompositeBuilder):
def test_compose_with_builder_files(self):
cb_file = os.path.join(self.tmpdir, 'test-composite-ring.json')
builders = self.create_sample_ringbuilders(2)
cb, _ = self._make_composite_builder(builders)
cb.save(cb_file)
for i, b in enumerate(builders):
self.add_dev_and_rebalance(b)
self.save_builders(builders)
cb = CompositeRingBuilder.load(cb_file)
cb.compose().save(self.output_ring)
self.check_composite_ring(self.output_ring, builders)
def test_compose_ok(self):
cb_file = os.path.join(self.tmpdir, 'test-composite-ring.json')
builders = self.create_sample_ringbuilders(2)
# make first version of composite ring
cb, builder_files = self._make_composite_builder(builders)
# check composite builder persists ok
cb.save(cb_file)
self.assertTrue(os.path.exists(cb_file))
self.check_composite_meta(cb_file, builder_files)
# and reloads ok
cb = CompositeRingBuilder.load(cb_file)
self.assertEqual(1, cb.version)
# compose detects if no component builder changes, if we ask it to...
with self.assertRaises(ValueError) as cm:
cb.compose(require_modified=True)
self.assertIn('None of the component builders has been modified',
cm.exception.args[0])
self.assertEqual(1, cb.version)
# ...but by default will compose again despite no changes to components
cb.compose(force=True).save(self.output_ring)
self.check_composite_ring(self.output_ring, builders)
self.assertEqual(2, cb.version)
# check composite builder persists ok again
cb_file = os.path.join(self.tmpdir, 'test-composite-ring.json2')
cb.save(cb_file)
self.assertTrue(os.path.exists(cb_file))
self.check_composite_meta(cb_file, builder_files, version=2)
def test_compose_modified_component_builders(self):
# check it's ok to compose again with same but modified builders
cb_file = os.path.join(self.tmpdir, 'test-composite-ring.json')
builders = self.create_sample_ringbuilders(2)
cb, builder_files = self._make_composite_builder(builders)
ring = Ring(self.output_ring)
orig_devs = [dev for dev in ring.devs if dev]
self.assertEqual(10, len(orig_devs)) # sanity check
self.add_dev_and_rebalance(builders[1])
builder_files = self.save_builders(builders)
cb.compose().save(self.output_ring)
self.check_composite_ring(self.output_ring, builders)
ring = Ring(self.output_ring)
modified_devs = [dev for dev in ring.devs if dev]
self.assertEqual(len(orig_devs) + 1, len(modified_devs))
# check composite builder persists ok
cb.save(cb_file)
self.assertTrue(os.path.exists(cb_file))
self.check_composite_meta(cb_file, builder_files, version=2)
# and reloads ok
cb = CompositeRingBuilder.load(cb_file)
# and composes ok after reload
cb.compose(force=True).save(self.output_ring)
self.check_composite_ring(self.output_ring, builders)
# check composite builder persists ok again
cb_file = os.path.join(self.tmpdir, 'test-composite-ring.json2')
cb.save(cb_file)
self.assertTrue(os.path.exists(cb_file))
self.check_composite_meta(cb_file, builder_files, version=3)
def test_compose_override_component_builders(self):
# check passing different builder files to the compose() method
# overrides loaded builder files
cb_file = os.path.join(self.tmpdir, 'test-composite-ring.json')
builders = self.create_sample_ringbuilders(2)
cb, builder_files = self._make_composite_builder(builders)
# modify builders and save in different files
self.add_dev_and_rebalance(builders[1])
with self.assertRaises(ValueError):
# sanity check - originals are unchanged
cb.compose(builder_files, require_modified=True)
other_files = self.save_builders(builders, prefix='other')
cb.compose(other_files, require_modified=True).save(self.output_ring)
self.check_composite_ring(self.output_ring, builders)
# check composite builder persists ok
cb.save(cb_file)
self.assertTrue(os.path.exists(cb_file))
self.check_composite_meta(cb_file, other_files, version=2)
# and reloads ok
cb = CompositeRingBuilder.load(cb_file)
# and composes ok after reload
cb.compose(force=True).save(self.output_ring)
self.check_composite_ring(self.output_ring, builders)
# check composite builder persists ok again
cb_file = os.path.join(self.tmpdir, 'test-composite-ring.json2')
cb.save(cb_file)
self.assertTrue(os.path.exists(cb_file))
self.check_composite_meta(cb_file, other_files, version=3)
def test_abs_paths_persisted(self):
cwd = os.getcwd()
try:
os.chdir(self.tmpdir)
builders = self.create_sample_ringbuilders(2)
builder_files = self.save_builders(builders)
rel_builder_files = [os.path.basename(bf) for bf in builder_files]
cb = CompositeRingBuilder(rel_builder_files)
cb.compose().save(self.output_ring)
self.check_composite_ring(self.output_ring, builders)
cb_file = os.path.join(self.tmpdir, 'test-composite-ring.json')
rel_cb_file = os.path.basename(cb_file)
cb.save(rel_cb_file)
self.check_composite_meta(rel_cb_file, rel_builder_files)
finally:
os.chdir(cwd)
def test_load_errors(self):
bad_file = os.path.join(self.tmpdir, 'bad_file.json')
with self.assertRaises(IOError):
CompositeRingBuilder.load(bad_file)
def check_bad_content(content):
with open(bad_file, 'wb') as fp:
fp.write(content)
try:
with self.assertRaises(ValueError) as cm:
CompositeRingBuilder.load(bad_file)
self.assertIn(
"File does not contain valid composite ring data",
cm.exception.args[0])
except AssertionError as err:
raise AssertionError('With content %r: %s' % (content, err))
for content in ('', 'not json', json.dumps({}), json.dumps([])):
check_bad_content(content.encode('ascii'))
good_content = {
'components': [
{'version': 1, 'id': 'uuid_x', 'replicas': 12},
{'version': 2, 'id': 'uuid_y', 'replicas': 12}
],
'builder_files': {'uuid_x': '/path/to/file_x',
'uuid_y': '/path/to/file_y'},
'version': 99}
for missing in good_content:
bad_content = dict(good_content)
bad_content.pop(missing)
check_bad_content(json.dumps(bad_content).encode('ascii'))
def test_save_errors(self):
cb_file = os.path.join(self.tmpdir, 'test-composite-ring.json')
def do_test(cb):
with self.assertRaises(ValueError) as cm:
cb.save(cb_file)
self.assertIn("No composed ring to save", cm.exception.args[0])
do_test(CompositeRingBuilder())
do_test(CompositeRingBuilder([]))
do_test(CompositeRingBuilder(['file1', 'file2']))
def test_rebalance(self):
@contextmanager
def mock_rebalance():
# captures component builder rebalance call results, yields a dict
# that maps builder -> results
calls = defaultdict(list)
orig_func = RingBuilder.rebalance
def func(builder, **kwargs):
result = orig_func(builder, **kwargs)
calls[builder].append(result)
return result
with mock.patch('swift.common.ring.RingBuilder.rebalance', func):
yield calls
def check_results():
self.assertEqual(2, len(rebalance_calls)) # 2 builders called
for calls in rebalance_calls.values():
self.assertFalse(calls[1:]) # 1 call to each builder
self.assertEqual(sorted(expected_ids),
sorted([b.id for b in rebalance_calls]))
self.assertEqual(sorted(expected_versions),
sorted([b.version for b in rebalance_calls]))
for b in rebalance_calls:
self.assertEqual(set(rebalance_calls.keys()),
set(b.parent_builder._builders))
# check the rebalanced builders were saved
written_builders = [RingBuilder.load(f) for f in builder_files]
self.assertEqual(expected_ids,
[b.id for b in written_builders])
self.assertEqual(expected_versions,
[b.version for b in written_builders])
# check returned results, should be in component order
self.assertEqual(2, len(results))
self.assertEqual(builder_files,
[r['builder_file'] for r in results])
self.assertEqual(expected_versions,
[r['builder'].version for r in results])
self.assertEqual(expected_ids, [r['builder'].id for r in results])
self.assertEqual(
[rebalance_calls[r['builder']][0] for r in results],
[r['result'] for r in results])
# N.B. the sample builders have zero min_part_hours
builders = self.create_sample_ringbuilders(2)
expected_versions = [b.version + 1 for b in builders]
expected_ids = [b.id for b in builders]
# test rebalance loads component builders
builder_files = self.save_builders(builders)
cb = CompositeRingBuilder(builder_files)
with mock_rebalance() as rebalance_calls:
results = cb.rebalance()
check_results()
# test loading builder files via load_components
# revert builder files to original builder state
builder_files = self.save_builders(builders)
cb = CompositeRingBuilder()
cb.load_components(builder_files)
with mock_rebalance() as rebalance_calls:
results = cb.rebalance()
check_results()
def test_rebalance_errors(self):
cb = CompositeRingBuilder()
with self.assertRaises(ValueError) as cm:
cb.rebalance()
self.assertIn('Two or more component builders are required',
cm.exception.args[0])
builders = self.create_sample_ringbuilders(2)
cb, builder_files = self._make_composite_builder(builders)
with mock.patch('swift.common.ring.RingBuilder.rebalance',
side_effect=RingBuilderError('test')):
with mock.patch('swift.common.ring.composite_builder.shuffle',
lambda x: x):
with self.assertRaises(RingBuilderError) as cm:
cb.rebalance()
self.assertIn('An error occurred while rebalancing component %s' %
builder_files[0], str(cm.exception))
self.assertIsNone(cb._builders)
with mock.patch('swift.common.ring.RingBuilder.validate',
side_effect=RingBuilderError('test')):
with mock.patch('swift.common.ring.composite_builder.shuffle',
lambda x: x):
with self.assertRaises(RingBuilderError) as cm:
cb.rebalance()
self.assertIn('An error occurred while rebalancing component %s' %
builder_files[0], str(cm.exception))
self.assertIsNone(cb._builders)
def test_rebalance_with_unrebalanced_builders(self):
# create 2 non-rebalanced rings
builders = self.create_sample_ringbuilders(rebalance=False)
# save builders
builder_files = self.save_builders(builders)
cb = CompositeRingBuilder(builder_files)
# sanity, it is impossible to compose un-rebalanced component rings
with self.assertRaises(ValueError) as cm:
cb.compose()
self.assertIn("Builder needs rebalance", cm.exception.args[0])
# but ok to compose after rebalance
cb.rebalance()
rd = cb.compose()
rd.save(self.output_ring)
rebalanced_builders = [RingBuilder.load(f) for f in builder_files]
self.check_composite_ring(self.output_ring, rebalanced_builders)
class TestLoadComponents(BaseTestCompositeBuilder):
# Tests for the loading of component builders.
def _call_method_under_test(self, cb, *args, **kwargs):
# Component builder loading is triggered by the load_components method
# and the compose method. This method provides a hook for subclasses to
# configure a different method to repeat the component loading tests.
cb.load_components(*args, **kwargs)
def test_load_components(self):
builders = self.create_sample_ringbuilders(2)
builder_files = self.save_builders(builders)
cb = CompositeRingBuilder(builder_files)
# check lazy loading
self.assertEqual(builder_files, cb._builder_files)
self.assertFalse(cb._builders) # none loaded yet
# check loading configured files
self._call_method_under_test(cb)
self.assertEqual(builder_files, cb._builder_files)
for i, builder in enumerate(cb._builders):
self.assertEqual(builders[i].id, builder.id)
self.assertEqual(builders[i].devs, builder.devs)
# modify builders and save in different files
self.add_dev_and_rebalance(builders[0])
other_files = self.save_builders(builders, prefix='other')
# reload from other files
self._call_method_under_test(cb, other_files)
self.assertEqual(other_files, cb._builder_files)
for i, builder in enumerate(cb._builders):
self.assertEqual(builders[i].id, builder.id)
self.assertEqual(builders[i].devs, builder.devs)
# modify builders again and save in same files
self.add_dev_and_rebalance(builders[1])
self.save_builders(builders, prefix='other')
# reload from same files
self._call_method_under_test(cb)
self.assertEqual(other_files, cb._builder_files)
for i, builder in enumerate(cb._builders):
self.assertEqual(builders[i].id, builder.id)
self.assertEqual(builders[i].devs, builder.devs)
def test_load_components_insufficient_builders(self):
def do_test(builder_files, force):
cb = CompositeRingBuilder(builder_files)
with self.assertRaises(ValueError) as cm:
self._call_method_under_test(cb, builder_files,
force=force)
self.assertIn('Two or more component builders are required',
cm.exception.args[0])
cb = CompositeRingBuilder()
with self.assertRaises(ValueError) as cm:
self._call_method_under_test(cb, builder_files,
force=force)
self.assertIn('Two or more component builders are required',
cm.exception.args[0])
builders = self.create_sample_ringbuilders(3)
builder_files = self.save_builders(builders)
do_test([], force=False)
do_test([], force=True) # this error is never ignored
do_test(builder_files[:1], force=False)
do_test(builder_files[:1], force=True) # this error is never ignored
def test_load_components_missing_builder_id(self):
def check_missing_id(cb, builders):
# not ok to load builder_files that have no id assigned
orig_version = cb.version
no_id = random.randint(0, len(builders) - 1)
# rewrite the builder files so that one has missing id
builder_files = self.save_builders(builders, missing_ids=[no_id])
def do_check(force):
with self.assertRaises(ValueError) as cm:
self._call_method_under_test(cb, builder_files,
force=force)
error_lines = cm.exception.args[0].split('\n')
self.assertIn("Problem with builder at index %s" % no_id,
error_lines[0])
self.assertIn("id attribute has not been initialised",
error_lines[0])
self.assertFalse(error_lines[1:])
self.assertEqual(orig_version, cb.version)
do_check(False)
do_check(True) # we never ignore this error
# check with compose not previously called, cb has no existing metadata
builders = self.create_sample_ringbuilders(3)
cb = CompositeRingBuilder()
check_missing_id(cb, builders)
# now save good copies of builders and compose so this cb has
# existing component metadata
builder_files = self.save_builders(builders)
cb = CompositeRingBuilder(builder_files)
cb.compose() # cb now has component metadata
check_missing_id(cb, builders)
def test_load_components_duplicate_builder_ids(self):
builders = self.create_sample_ringbuilders(3)
builders[2]._id = builders[0]._id
cb = CompositeRingBuilder(self.save_builders(builders))
def do_check(force):
with self.assertRaises(ValueError) as cm:
self._call_method_under_test(cb, force=force)
error_lines = cm.exception.args[0].split('\n')
self.assertIn("Builder id %r used at indexes 0, 2" %
builders[0].id, error_lines[0])
self.assertFalse(error_lines[1:])
self.assertEqual(0, cb.version)
do_check(False)
do_check(True)
def test_load_components_unchanged_builders(self):
def do_test(cb, builder_files, **kwargs):
orig_version = cb.version
with self.assertRaises(ValueError) as cm:
self._call_method_under_test(cb, builder_files, **kwargs)
error_lines = cm.exception.args[0].split('\n')
self.assertIn("None of the component builders has been modified",
error_lines[0])
self.assertFalse(error_lines[1:])
self.assertEqual(orig_version, cb.version)
builders = self.create_sample_ringbuilders(2)
cb, builder_files = self._make_composite_builder(builders)
# ok to load same *unchanged* builders
self._call_method_under_test(cb, builder_files)
# unless require_modified is set
do_test(cb, builder_files, require_modified=True)
# even if we rewrite the files
builder_files = self.save_builders(builders)
do_test(cb, builder_files, require_modified=True)
# even if we rename the files
builder_files = self.save_builders(builders, prefix='other')
do_test(cb, builder_files, require_modified=True)
# force trumps require_modified
self._call_method_under_test(cb, builder_files, force=True,
require_modified=True)
def test_load_components_older_builder(self):
# make first version of composite ring
builders = self.create_sample_ringbuilders(2)
cb, builder_files = self._make_composite_builder(builders)
old_builders = [copy.deepcopy(b) for b in builders]
# update components and reload
for i, b in enumerate(builders):
self.add_dev_and_rebalance(b)
self.assertLess(old_builders[i].version, b.version)
self.save_builders(builders)
self._call_method_under_test(cb)
orig_version = cb.version
cb.compose() # compose with newer builder versions
self.assertEqual(orig_version + 1, cb.version) # sanity check
# not ok to use old versions of same builders
self.save_builders([old_builders[0], builders[1]])
with self.assertRaises(ValueError) as cm:
self._call_method_under_test(cb)
error_lines = cm.exception.args[0].split('\n')
self.assertIn("Invalid builder change at index 0", error_lines[0])
self.assertIn("Older builder version", error_lines[0])
self.assertFalse(error_lines[1:])
self.assertEqual(orig_version + 1, cb.version)
# not even if one component ring has changed
self.add_dev_and_rebalance(builders[1])
self.save_builders([old_builders[0], builders[1]])
with self.assertRaises(ValueError) as cm:
self._call_method_under_test(cb)
error_lines = cm.exception.args[0].split('\n')
self.assertIn("Invalid builder change at index 0", error_lines[0])
self.assertIn("Older builder version", error_lines[0])
self.assertFalse(error_lines[1:])
self.assertEqual(orig_version + 1, cb.version)
self.assertIsNone(cb._builders)
# unless we ignore errors
self._call_method_under_test(cb, force=True)
self.assertEqual(old_builders[0].version, cb._builders[0].version)
def test_load_components_different_number_builders(self):
# not ok to use a different number of component rings
builders = self.create_sample_ringbuilders(4)
def do_test(bad_builders):
cb, builder_files = self._make_composite_builder(builders[:3])
# expect an error
with self.assertRaises(ValueError) as cm:
self._call_method_under_test(
cb, self.save_builders(bad_builders))
error_lines = cm.exception.args[0].split('\n')
self.assertFalse(error_lines[1:])
self.assertEqual(1, cb.version)
# unless we ignore errors
self._call_method_under_test(cb, self.save_builders(bad_builders),
force=True)
self.assertEqual(len(bad_builders), len(cb._builders))
return error_lines
error_lines = do_test(builders[:2]) # too few
self.assertIn("Missing builder at index 2", error_lines[0])
error_lines = do_test(builders) # too many
self.assertIn("Unexpected extra builder at index 3", error_lines[0])
def test_load_components_different_builders(self):
# not ok to change component rings
builders = self.create_sample_ringbuilders(3)
cb, builder_files = self._make_composite_builder(builders[:2])
# ensure builder[0] is newer version so that's not the problem
self.add_dev_and_rebalance(builders[0])
different_files = self.save_builders([builders[0], builders[2]])
with self.assertRaises(ValueError) as cm:
self._call_method_under_test(cb, different_files)
error_lines = cm.exception.args[0].split('\n')
self.assertIn("Invalid builder change at index 1", error_lines[0])
self.assertIn("Attribute mismatch for id", error_lines[0])
self.assertFalse(error_lines[1:])
self.assertEqual(1, cb.version)
# ok if we force
self._call_method_under_test(cb, different_files, force=True)
self.assertEqual(different_files, cb._builder_files)
def test_load_component_different_builder_order(self):
# not ok to change order of component rings
builders = self.create_sample_ringbuilders(4)
cb, builder_files = self._make_composite_builder(builders)
builder_files.reverse()
with self.assertRaises(ValueError) as cm:
self._call_method_under_test(cb, builder_files)
error_lines = cm.exception.args[0].split('\n')
for i, line in enumerate(error_lines):
self.assertIn("Invalid builder change at index %s" % i, line)
self.assertIn("Attribute mismatch for id", line)
self.assertEqual(1, cb.version)
# ok if we force
self._call_method_under_test(cb, builder_files, force=True)
self.assertEqual(builder_files, cb._builder_files)
def test_load_components_replica_count_changed(self):
# not ok to change the number of replicas in a ring
builders = self.create_sample_ringbuilders(3)
cb, builder_files = self._make_composite_builder(builders)
builders[0].set_replicas(4)
self.save_builders(builders)
with self.assertRaises(ValueError) as cm:
self._call_method_under_test(cb)
error_lines = cm.exception.args[0].split('\n')
for i, line in enumerate(error_lines):
self.assertIn("Invalid builder change at index 0", line)
self.assertIn("Attribute mismatch for replicas", line)
self.assertEqual(1, cb.version)
# ok if we force
self._call_method_under_test(cb, force=True)
class TestComposeLoadComponents(TestLoadComponents):
def _call_method_under_test(self, cb, *args, **kwargs):
cb.compose(*args, **kwargs)
def test_load_components_replica_count_changed(self):
# For compose method this test differs from superclass when the force
# flag is used, because although the force flag causes load_components
# to skip checks, the actual ring composition fails.
# not ok to change the number of replicas in a ring
builders = self.create_sample_ringbuilders(3)
cb, builder_files = self._make_composite_builder(builders)
builders[0].set_replicas(4)
self.save_builders(builders)
with self.assertRaises(ValueError) as cm:
self._call_method_under_test(cb)
error_lines = cm.exception.args[0].split('\n')
for i, line in enumerate(error_lines):
self.assertIn("Invalid builder change at index 0", line)
self.assertIn("Attribute mismatch for replicas", line)
self.assertEqual(1, cb.version)
# if we force, then load_components succeeds but the compose pre
# validate will fail because the builder needs rebalancing
with self.assertRaises(ValueError) as cm:
self._call_method_under_test(cb, force=True)
error_lines = cm.exception.args[0].split('\n')
self.assertIn("Problem with builders", error_lines[0])
self.assertIn("Builder needs rebalance", error_lines[1])
self.assertFalse(error_lines[2:])
self.assertEqual(1, cb.version)
class TestCooperativeRingBuilder(BaseTestCompositeBuilder):
def _make_coop_builder(self, region, composite_builder, rebalance=False,
min_part_hours=1):
rb = CooperativeRingBuilder(8, 3, min_part_hours, composite_builder)
if composite_builder._builders is None:
composite_builder._builders = [rb]
for i in range(3):
self.add_dev(rb, region=region)
if rebalance:
rb.rebalance()
self.assertEqual(self._partition_counts(rb),
[256, 256, 256]) # sanity check
return rb
def _partition_counts(self, builder):
"""
Returns an array mapping device id's to (number of
partitions assigned to that device).
"""
c = Counter(builder.devs[dev_id]['id']
for part2dev_id in builder._replica2part2dev
for dev_id in part2dev_id)
return [c[d['id']] for d in builder.devs]
def get_moved_parts(self, after, before):
def uniqueness(dev):
return dev['ip'], dev['port'], dev['device']
moved_parts = set()
for p in range(before.parts):
if ({uniqueness(dev) for dev in before._devs_for_part(p)} !=
{uniqueness(dev) for dev in after._devs_for_part(p)}):
moved_parts.add(p)
return moved_parts
def num_parts_can_move(self, builder):
# note that can_part_move() gives consideration to the
# _part_moved_bitmap which is only reset when a rebalance starts
return len(
[p for p in range(builder.parts)
if super(CooperativeRingBuilder, builder)._can_part_move(p)])
@mock.patch('swift.common.ring.builder.time')
def _check_rebalance_respects_cobuilder_part_moves(
self, min_part_hours, mock_time):
mock_time.return_value = now = int(time.time())
builder_files = []
cb = CompositeRingBuilder()
for i in (1, 2, 3):
b = self._make_coop_builder(i, cb, min_part_hours=min_part_hours)
fname = os.path.join(self.tmpdir, 'builder_%s.builder' % i)
b.save(fname)
builder_files.append(fname)
builder_files, builders = cb.load_components(builder_files)
# all cobuilders can perform initial rebalance
cb.rebalance()
exp = [256, 256, 256]
self.assertEqual(exp, self._partition_counts(builders[0]))
self.assertEqual(exp, self._partition_counts(builders[1]))
self.assertEqual(exp, self._partition_counts(builders[2]))
exp = min_part_hours * 3600
self.assertEqual(exp, builders[0].min_part_seconds_left)
self.assertEqual(exp, builders[1].min_part_seconds_left)
self.assertEqual(exp, builders[2].min_part_seconds_left)
# jump forwards min_part_hours
now += min_part_hours * 3600
mock_time.return_value = now
old_builders = []
for builder in builders:
old_builder = CooperativeRingBuilder(8, 3, min_part_hours, None)
old_builder.copy_from(copy.deepcopy(builder.to_dict()))
old_builders.append(old_builder)
for builder in builders:
self.add_dev(builder)
# sanity checks: all builders are ready for rebalance
self.assertEqual(0, builders[0].min_part_seconds_left)
self.assertEqual(0, builders[1].min_part_seconds_left)
self.assertEqual(0, builders[2].min_part_seconds_left)
# ... but last_part_moves not yet updated to current epoch
if min_part_hours > 0:
self.assertEqual(0, self.num_parts_can_move(builders[0]))
self.assertEqual(0, self.num_parts_can_move(builders[1]))
self.assertEqual(0, self.num_parts_can_move(builders[2]))
with mock.patch('swift.common.ring.composite_builder.shuffle',
lambda x: x):
cb.rebalance()
rb1_parts_moved = self.get_moved_parts(builders[0], old_builders[0])
self.assertEqual(192, len(rb1_parts_moved))
self.assertEqual(self._partition_counts(builders[0]),
[192, 192, 192, 192])
rb2_parts_moved = self.get_moved_parts(builders[1], old_builders[1])
self.assertEqual(64, len(rb2_parts_moved))
counts = self._partition_counts(builders[1])
self.assertEqual(counts[3], 64)
self.assertEqual([234, 235, 235], sorted(counts[:3]))
self.assertFalse(rb2_parts_moved.intersection(rb1_parts_moved))
# rb3 can't rebalance - all parts moved while rebalancing rb1 and rb2
self.assertEqual(
0, len(self.get_moved_parts(builders[2], old_builders[2])))
# jump forwards min_part_hours, all builders can move all parts again,
# so now rb2 should be able to further rebalance
now += min_part_hours * 3600
mock_time.return_value = now
old_builders = []
for builder in builders:
old_builder = CooperativeRingBuilder(8, 3, min_part_hours, None)
old_builder.copy_from(copy.deepcopy(builder.to_dict()))
old_builders.append(old_builder)
with mock.patch('swift.common.ring.composite_builder.shuffle',
lambda x: x):
cb.rebalance()
rb2_parts_moved = self.get_moved_parts(builders[1], old_builders[1])
self.assertGreater(len(rb2_parts_moved), 64)
self.assertGreater(self._partition_counts(builders[1])[3], 64)
self.assertLess(self.num_parts_can_move(builders[2]), 256)
self.assertEqual(256, self.num_parts_can_move(builders[0]))
# and rb3 should also have been able to move some parts
rb3_parts_moved = self.get_moved_parts(builders[2], old_builders[2])
self.assertGreater(len(rb3_parts_moved), 0)
self.assertFalse(rb3_parts_moved.intersection(rb2_parts_moved))
# but cobuilders will not prevent a new rb rebalancing for first time
rb4 = self._make_coop_builder(4, cb, rebalance=False,
min_part_hours=min_part_hours)
builders.append(rb4)
builder_files = []
for i, builder in enumerate(builders):
fname = os.path.join(self.tmpdir, 'builder_%s.builder' % i)
builder.save(fname)
builder_files.append(fname)
cb = CompositeRingBuilder()
builder_files, builders = cb.load_components(builder_files)
cb.rebalance()
self.assertEqual(256, len(self.get_moved_parts(builders[3], rb4)))
def test_rebalance_respects_cobuilder_part_moves(self):
self._check_rebalance_respects_cobuilder_part_moves(1)
self._check_rebalance_respects_cobuilder_part_moves(0)
@mock.patch('swift.common.ring.builder.time')
def _check_rebalance_cobuilder_states(
self, min_part_hours, mock_time):
@contextmanager
def mock_rebalance():
# wrap rebalance() in order to capture builder states before and
# after each component rebalance
orig_rebalance = RingBuilder.rebalance
# a dict mapping builder -> (list of captured builder states)
captured_builder_states = defaultdict(list)
def update_states():
for b in cb._builders:
rb = CooperativeRingBuilder(8, 3, min_part_hours, None)
rb.copy_from(copy.deepcopy(b.to_dict()))
rb._part_moved_bitmap = bytearray(b._part_moved_bitmap)
captured_builder_states[b].append(rb)
def wrap_rebalance(builder_instance):
update_states()
results = orig_rebalance(builder_instance)
update_states()
return results
with mock.patch('swift.common.ring.RingBuilder.rebalance',
wrap_rebalance):
yield captured_builder_states
mock_time.return_value = now = int(time.time())
builder_files = []
cb = CompositeRingBuilder()
for i in (1, 2, 3):
b = self._make_coop_builder(i, cb, min_part_hours=min_part_hours)
fname = os.path.join(self.tmpdir, 'builder_%s.builder' % i)
b.save(fname)
builder_files.append(fname)
builder_files, builders = cb.load_components(builder_files)
# all cobuilders can perform initial rebalance
cb.rebalance()
# jump forwards min_part_hours
now += min_part_hours * 3600
mock_time.return_value = now
for builder in builders:
self.add_dev(builder)
with mock.patch('swift.common.ring.composite_builder.shuffle',
lambda x: x):
with mock_rebalance() as captured_states:
cb.rebalance()
# sanity - state captured before and after each component rebalance
self.assertEqual(len(builders), len(captured_states))
for states in captured_states.values():
self.assertEqual(2 * len(builders), len(states))
# for each component we have a list of it's builder states
rb1s = captured_states[builders[0]]
rb2s = captured_states[builders[1]]
rb3s = captured_states[builders[2]]
# rebalancing will update epoch for all builders' last_part_moves
self.assertEqual(now, rb1s[0]._last_part_moves_epoch)
self.assertEqual(now, rb2s[0]._last_part_moves_epoch)
self.assertEqual(now, rb3s[0]._last_part_moves_epoch)
# so, in state before any component rebalance, all can now move parts
# N.B. num_parts_can_move gathers super class's (i.e. RingBuilder)
# _can_part_move so that it doesn't refer to cobuilders state.
self.assertEqual(256, self.num_parts_can_move(rb1s[0]))
self.assertEqual(256, self.num_parts_can_move(rb2s[0]))
self.assertEqual(256, self.num_parts_can_move(rb3s[0]))
# after first component has been rebalanced it has moved parts
self.assertEqual(64, self.num_parts_can_move(rb1s[1]))
self.assertEqual(256, self.num_parts_can_move(rb2s[2]))
self.assertEqual(256, self.num_parts_can_move(rb3s[2]))
rb1_parts_moved = self.get_moved_parts(rb1s[1], rb1s[0])
self.assertEqual(192, len(rb1_parts_moved))
self.assertEqual(self._partition_counts(rb1s[1]),
[192, 192, 192, 192])
# rebalancing rb2 - rb2 in isolation could potentially move all parts
# so would move 192 parts to new device, but it is constrained by rb1
# only having 64 parts that can move
rb2_parts_moved = self.get_moved_parts(rb2s[3], rb2s[2])
self.assertEqual(64, len(rb2_parts_moved))
counts = self._partition_counts(rb2s[3])
self.assertEqual(counts[3], 64)
self.assertEqual([234, 235, 235], sorted(counts[:3]))
self.assertFalse(rb2_parts_moved.intersection(rb1_parts_moved))
self.assertEqual(192, self.num_parts_can_move(rb2s[3]))
self.assertEqual(64, self.num_parts_can_move(rb1s[3]))
# rb3 can't rebalance - all parts moved while rebalancing rb1 and rb2
self.assertEqual(0, len(self.get_moved_parts(rb3s[5], rb3s[0])))
def test_rebalance_cobuilder_states(self):
self._check_rebalance_cobuilder_states(1)
self._check_rebalance_cobuilder_states(0)
def _check_rebalance_cobuilders_calls(self, min_part_hours):
# verify that co-builder methods are called during one builder's
# rebalance
@contextmanager
def mock_update_last_part_moves():
# intercept calls to RingBuilder._update_last_part_moves (yes, the
# superclass method) and populate a dict mapping builder instance
# to a list of that builder's parent builder when method was called
calls = []
orig_func = RingBuilder._update_last_part_moves
def fake_update(builder):
calls.append(builder)
return orig_func(builder)
with mock.patch(
'swift.common.ring.RingBuilder._update_last_part_moves',
fake_update):
yield calls
@contextmanager
def mock_can_part_move():
# intercept calls to RingBuilder._can_part_move (yes, the
# superclass method) and populate a dict mapping builder instance
# to a list of that builder's parent builder when method was called
calls = defaultdict(list)
orig_func = RingBuilder._can_part_move
def fake_can_part_move(builder, part):
calls[builder].append(part)
return orig_func(builder, part)
with mock.patch('swift.common.ring.RingBuilder._can_part_move',
fake_can_part_move):
yield calls
cb = CompositeRingBuilder()
rb1 = self._make_coop_builder(1, cb, min_part_hours=min_part_hours)
rb2 = self._make_coop_builder(2, cb, min_part_hours=min_part_hours)
cb._builders = [rb1, rb2]
# composite rebalance updates last_part_moves before any component
# rebalance - after that expect no more updates
with mock_update_last_part_moves() as update_calls:
cb.update_last_part_moves()
self.assertEqual({rb1, rb2}, set(update_calls))
with mock_update_last_part_moves() as update_calls:
with mock_can_part_move() as can_part_move_calls:
rb2.rebalance()
self.assertFalse(update_calls)
# rb1 has never been rebalanced so no calls propagate from its
# can_part_move method to its superclass _can_part_move method
self.assertEqual({rb2}, set(can_part_move_calls))
with mock_update_last_part_moves() as update_calls:
with mock_can_part_move() as can_part_move_calls:
rb1.rebalance()
self.assertFalse(update_calls)
# rb1 is being rebalanced so gets checked, and rb2 also gets checked
self.assertEqual({rb1, rb2}, set(can_part_move_calls))
self.assertEqual(768, len(can_part_move_calls[rb1]))
self.assertEqual(768, len(can_part_move_calls[rb2]))
def test_rebalance_cobuilders_calls(self):
self._check_rebalance_cobuilders_calls(1)
self._check_rebalance_cobuilders_calls(0)
def test_save_then_load(self):
cb = CompositeRingBuilder()
coop_rb = self._make_coop_builder(1, cb, rebalance=True)
builder_file = os.path.join(self.tmpdir, 'test.builder')
coop_rb.save(builder_file)
cb = CompositeRingBuilder()
loaded_coop_rb = CooperativeRingBuilder.load(builder_file,
parent_builder=cb)
self.assertIs(cb, loaded_coop_rb.parent_builder)
self.assertEqual(coop_rb.to_dict(), loaded_coop_rb.to_dict())
# check can be loaded as superclass
loaded_rb = RingBuilder.load(builder_file)
self.assertEqual(coop_rb.to_dict(), loaded_rb.to_dict())
# check can load a saved superclass
rb = RingBuilder(6, 3, 0)
for _ in range(3):
self.add_dev(rb, region=1)
rb.save(builder_file)
cb = CompositeRingBuilder()
loaded_coop_rb = CooperativeRingBuilder.load(builder_file,
parent_builder=cb)
self.assertIs(cb, loaded_coop_rb.parent_builder)
self.assertEqual(rb.to_dict(), loaded_coop_rb.to_dict())
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/ring/test_composite_builder.py |
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import errno
import mock
import operator
import os
import unittest
import six.moves.cPickle as pickle
from array import array
from collections import Counter, defaultdict
from math import ceil
from tempfile import mkdtemp
from shutil import rmtree
import sys
import random
import uuid
import itertools
from six.moves import range
from swift.common import exceptions
from swift.common import ring
from swift.common.ring import utils
from swift.common.ring.builder import MAX_BALANCE
def _partition_counts(builder, key='id'):
"""
Returns a dictionary mapping the given device key to (number of
partitions assigned to that key).
"""
return Counter(builder.devs[dev_id][key]
for part2dev_id in builder._replica2part2dev
for dev_id in part2dev_id)
class TestRingBuilder(unittest.TestCase):
def setUp(self):
self.testdir = mkdtemp()
def tearDown(self):
rmtree(self.testdir, ignore_errors=1)
def _get_population_by_region(self, builder):
"""
Returns a dictionary mapping region to number of partitions in that
region.
"""
return _partition_counts(builder, key='region')
def test_init(self):
rb = ring.RingBuilder(8, 3, 1)
self.assertEqual(rb.part_power, 8)
self.assertEqual(rb.replicas, 3)
self.assertEqual(rb.min_part_hours, 1)
self.assertEqual(rb.parts, 2 ** 8)
self.assertEqual(rb.devs, [])
self.assertFalse(rb.devs_changed)
self.assertEqual(rb.version, 0)
self.assertIsNotNone(rb._last_part_moves)
rd = rb.get_ring()
self.assertEqual(rd.devs, [])
self.assertEqual(rd.version, 0)
self.assertIsNone(rd.next_part_power)
self.assertEqual(rd.replica_count, 0)
def test_overlarge_part_powers(self):
expected_msg = 'part_power must be at most 32 (was 33)'
with self.assertRaises(ValueError) as ctx:
ring.RingBuilder(33, 3, 1)
self.assertEqual(str(ctx.exception), expected_msg)
def test_oversmall_part_powers(self):
expected_msg = 'part_power must be at least 0 (was -1)'
with self.assertRaises(ValueError) as ctx:
ring.RingBuilder(-1, 3, 1)
self.assertEqual(str(ctx.exception), expected_msg)
def test_insufficient_replicas(self):
expected_msg = 'replicas must be at least 1 (was 0.999000)'
with self.assertRaises(ValueError) as ctx:
ring.RingBuilder(8, 0.999, 1)
self.assertEqual(str(ctx.exception), expected_msg)
def test_negative_min_part_hours(self):
expected_msg = 'min_part_hours must be non-negative (was -1)'
with self.assertRaises(ValueError) as ctx:
ring.RingBuilder(8, 3, -1)
self.assertEqual(str(ctx.exception), expected_msg)
def test_deepcopy(self):
rb = ring.RingBuilder(8, 3, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda1'})
rb.add_dev({'id': 4, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdb1'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sda1'})
rb.add_dev({'id': 5, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sdb1'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 2, 'weight': 1,
'ip': '127.0.0.1', 'port': 10002, 'device': 'sda1'})
rb.add_dev({'id': 6, 'region': 0, 'zone': 2, 'weight': 1,
'ip': '127.0.0.1', 'port': 10002, 'device': 'sdb1'})
# more devices in zone #1
rb.add_dev({'id': 3, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10004, 'device': 'sdc1'})
rb.add_dev({'id': 7, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10004, 'device': 'sdd1'})
rb.rebalance()
rb_copy = copy.deepcopy(rb)
self.assertEqual(rb.to_dict(), rb_copy.to_dict())
self.assertIsNot(rb.devs, rb_copy.devs)
self.assertIsNot(rb._replica2part2dev, rb_copy._replica2part2dev)
self.assertIsNot(rb._last_part_moves, rb_copy._last_part_moves)
self.assertIsNot(rb._remove_devs, rb_copy._remove_devs)
self.assertIsNot(rb._dispersion_graph, rb_copy._dispersion_graph)
def test_get_ring(self):
rb = ring.RingBuilder(8, 3, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda1'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sda1'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 2, 'weight': 1,
'ip': '127.0.0.1', 'port': 10002, 'device': 'sda1'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10004, 'device': 'sda1'})
rb.remove_dev(1)
rb.rebalance()
r = rb.get_ring()
self.assertIsInstance(r, ring.RingData)
r2 = rb.get_ring()
self.assertIs(r, r2)
rb.rebalance()
r3 = rb.get_ring()
self.assertIsNot(r3, r2)
r4 = rb.get_ring()
self.assertIs(r3, r4)
def test_rebalance_with_seed(self):
devs = [(0, 10000), (1, 10001), (2, 10002), (1, 10003)]
ring_builders = []
for n in range(3):
rb = ring.RingBuilder(8, 3, 1)
idx = 0
for zone, port in devs:
for d in ('sda1', 'sdb1'):
rb.add_dev({'id': idx, 'region': 0, 'zone': zone,
'ip': '127.0.0.1', 'port': port,
'device': d, 'weight': 1})
idx += 1
ring_builders.append(rb)
rb0 = ring_builders[0]
rb1 = ring_builders[1]
rb2 = ring_builders[2]
r0 = rb0.get_ring()
self.assertIs(rb0.get_ring(), r0)
rb0.rebalance() # NO SEED
rb1.rebalance(seed=10)
rb2.rebalance(seed=10)
r1 = rb1.get_ring()
r2 = rb2.get_ring()
self.assertIsNot(rb0.get_ring(), r0)
self.assertNotEqual(r0.to_dict(), r1.to_dict())
self.assertEqual(r1.to_dict(), r2.to_dict())
# check that random state is reset
pre_state = random.getstate()
rb2.rebalance(seed=10)
self.assertEqual(pre_state, random.getstate(),
"Random state was not reset")
pre_state = random.getstate()
with mock.patch.object(rb2, "_build_replica_plan",
side_effect=Exception()):
self.assertRaises(Exception, rb2.rebalance, seed=10)
self.assertEqual(pre_state, random.getstate(),
"Random state was not reset")
def test_rebalance_part_on_deleted_other_part_on_drained(self):
rb = ring.RingBuilder(8, 3, 1)
rb.add_dev({'id': 0, 'region': 1, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda1'})
rb.add_dev({'id': 1, 'region': 1, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sda1'})
rb.add_dev({'id': 2, 'region': 1, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10002, 'device': 'sda1'})
rb.add_dev({'id': 3, 'region': 1, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10003, 'device': 'sda1'})
rb.add_dev({'id': 4, 'region': 1, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10004, 'device': 'sda1'})
rb.add_dev({'id': 5, 'region': 1, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10005, 'device': 'sda1'})
rb.rebalance(seed=1)
# We want a partition where 1 replica is on a removed device, 1
# replica is on a 0-weight device, and 1 on a normal device. To
# guarantee we have one, we see where partition 123 is, then
# manipulate its devices accordingly.
zero_weight_dev_id = rb._replica2part2dev[1][123]
delete_dev_id = rb._replica2part2dev[2][123]
rb.set_dev_weight(zero_weight_dev_id, 0.0)
rb.remove_dev(delete_dev_id)
rb.rebalance()
def test_set_replicas(self):
rb = ring.RingBuilder(8, 3.2, 1)
rb.devs_changed = False
rb.set_replicas(3.25)
self.assertTrue(rb.devs_changed)
rb.devs_changed = False
rb.set_replicas(3.2500001)
self.assertFalse(rb.devs_changed)
def test_add_dev(self):
rb = ring.RingBuilder(8, 3, 1)
dev = {'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda1'}
dev_id = rb.add_dev(dev)
self.assertRaises(exceptions.DuplicateDeviceError, rb.add_dev, dev)
self.assertEqual(dev_id, 0)
rb = ring.RingBuilder(8, 3, 1)
# test add new dev with no id
dev_id = rb.add_dev({'zone': 0, 'region': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 6200,
'device': 'sda2'})
self.assertEqual(rb.devs[0]['id'], 0)
self.assertEqual(dev_id, 0)
# test add another dev with no id
dev_id = rb.add_dev({'zone': 3, 'region': 2, 'weight': 1,
'ip': '127.0.0.1', 'port': 6200,
'device': 'sda3'})
self.assertEqual(rb.devs[1]['id'], 1)
self.assertEqual(dev_id, 1)
# some keys are required
self.assertRaises(ValueError, rb.add_dev, {})
stub_dev = {'weight': 1, 'ip': '127.0.0.1', 'port': 7000}
for key in (stub_dev.keys()):
dev = stub_dev.copy()
dev.pop(key)
self.assertRaises(ValueError, rb.add_dev, dev)
def test_set_dev_weight(self):
rb = ring.RingBuilder(8, 3, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 0.5,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda1'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'weight': 0.5,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sda1'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10002, 'device': 'sda1'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 2, 'weight': 1,
'ip': '127.0.0.1', 'port': 10003, 'device': 'sda1'})
rb.rebalance()
r = rb.get_ring()
counts = {}
for part2dev_id in r._replica2part2dev_id:
for dev_id in part2dev_id:
counts[dev_id] = counts.get(dev_id, 0) + 1
self.assertEqual(counts, {0: 128, 1: 128, 2: 256, 3: 256})
rb.set_dev_weight(0, 0.75)
rb.set_dev_weight(1, 0.25)
rb.pretend_min_part_hours_passed()
rb.rebalance()
r = rb.get_ring()
counts = {}
for part2dev_id in r._replica2part2dev_id:
for dev_id in part2dev_id:
counts[dev_id] = counts.get(dev_id, 0) + 1
self.assertEqual(counts, {0: 192, 1: 64, 2: 256, 3: 256})
def test_remove_dev(self):
rb = ring.RingBuilder(8, 3, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda1'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sda1'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 2, 'weight': 1,
'ip': '127.0.0.1', 'port': 10002, 'device': 'sda1'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 3, 'weight': 1,
'ip': '127.0.0.1', 'port': 10003, 'device': 'sda1'})
rb.rebalance()
r = rb.get_ring()
counts = {}
for part2dev_id in r._replica2part2dev_id:
for dev_id in part2dev_id:
counts[dev_id] = counts.get(dev_id, 0) + 1
self.assertEqual(counts, {0: 192, 1: 192, 2: 192, 3: 192})
rb.remove_dev(1)
rb.pretend_min_part_hours_passed()
rb.rebalance()
r = rb.get_ring()
counts = {}
for part2dev_id in r._replica2part2dev_id:
for dev_id in part2dev_id:
counts[dev_id] = counts.get(dev_id, 0) + 1
self.assertEqual(counts, {0: 256, 2: 256, 3: 256})
def test_round_off_error(self):
# 3 nodes with 11 disks each is particularly problematic. Probably has
# to do with the binary repr. of 1/33? Those ones look suspicious...
#
# >>> bin(int(struct.pack('!f', 1.0/(33)).encode('hex'), 16))
# '0b111100111110000011111000010000'
rb = ring.RingBuilder(8, 3, 1)
for dev_id, (region, zone) in enumerate(
11 * [(0, 0), (1, 10), (1, 11)]):
rb.add_dev({'id': dev_id, 'region': region, 'zone': zone,
'weight': 1, 'ip': '127.0.0.1',
'port': 10000 + region * 100 + zone,
'device': 'sda%d' % dev_id})
rb.rebalance()
self.assertEqual(_partition_counts(rb, 'zone'),
{0: 256, 10: 256, 11: 256})
wanted_by_zone = defaultdict(lambda: defaultdict(int))
for dev in rb._iter_devs():
wanted_by_zone[dev['zone']][dev['parts_wanted']] += 1
# We're nicely balanced, but parts_wanted is slightly lumpy
# because reasons.
self.assertEqual(wanted_by_zone, {
0: {0: 10, 1: 1},
10: {0: 11},
11: {0: 10, -1: 1}})
def test_remove_a_lot(self):
rb = ring.RingBuilder(3, 3, 1)
rb.add_dev({'id': 0, 'device': 'd0', 'ip': '10.0.0.1',
'port': 6202, 'weight': 1000.0, 'region': 0, 'zone': 1})
rb.add_dev({'id': 1, 'device': 'd1', 'ip': '10.0.0.2',
'port': 6202, 'weight': 1000.0, 'region': 0, 'zone': 2})
rb.add_dev({'id': 2, 'device': 'd2', 'ip': '10.0.0.3',
'port': 6202, 'weight': 1000.0, 'region': 0, 'zone': 3})
rb.add_dev({'id': 3, 'device': 'd3', 'ip': '10.0.0.1',
'port': 6202, 'weight': 1000.0, 'region': 0, 'zone': 1})
rb.add_dev({'id': 4, 'device': 'd4', 'ip': '10.0.0.2',
'port': 6202, 'weight': 1000.0, 'region': 0, 'zone': 2})
rb.add_dev({'id': 5, 'device': 'd5', 'ip': '10.0.0.3',
'port': 6202, 'weight': 1000.0, 'region': 0, 'zone': 3})
rb.rebalance()
rb.validate()
# this has to put more than 1/3 of the partitions in the
# cluster on removed devices in order to ensure that at least
# one partition has multiple replicas that need to move.
#
# (for an N-replica ring, it's more than 1/N of the
# partitions, of course)
rb.remove_dev(3)
rb.remove_dev(4)
rb.remove_dev(5)
rb.rebalance()
rb.validate()
def test_remove_zero_weighted(self):
rb = ring.RingBuilder(8, 3, 0)
rb.add_dev({'id': 0, 'device': 'd0', 'ip': '10.0.0.1',
'port': 6202, 'weight': 1000.0, 'region': 0, 'zone': 1})
rb.add_dev({'id': 1, 'device': 'd1', 'ip': '10.0.0.2',
'port': 6202, 'weight': 0.0, 'region': 0, 'zone': 2})
rb.add_dev({'id': 2, 'device': 'd2', 'ip': '10.0.0.3',
'port': 6202, 'weight': 1000.0, 'region': 0, 'zone': 3})
rb.add_dev({'id': 3, 'device': 'd3', 'ip': '10.0.0.1',
'port': 6202, 'weight': 1000.0, 'region': 0, 'zone': 1})
rb.rebalance()
rb.remove_dev(1)
parts, balance, removed = rb.rebalance()
self.assertEqual(removed, 1)
def test_shuffled_gather(self):
if self._shuffled_gather_helper() and \
self._shuffled_gather_helper():
raise AssertionError('It is highly likely the ring is no '
'longer shuffling the set of partitions '
'to reassign on a rebalance.')
def _shuffled_gather_helper(self):
rb = ring.RingBuilder(8, 3, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda1'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sda1'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 2, 'weight': 1,
'ip': '127.0.0.1', 'port': 10002, 'device': 'sda1'})
rb.rebalance()
rb.add_dev({'id': 3, 'region': 0, 'zone': 3, 'weight': 1,
'ip': '127.0.0.1', 'port': 10003, 'device': 'sda1'})
replica_plan = rb._build_replica_plan()
rb._set_parts_wanted(replica_plan)
for dev in rb._iter_devs():
dev['tiers'] = utils.tiers_for_dev(dev)
assign_parts = defaultdict(list)
rb._gather_parts_for_balance(assign_parts, replica_plan, False)
max_run = 0
run = 0
last_part = 0
for part, _ in assign_parts.items():
if part > last_part:
run += 1
else:
if run > max_run:
max_run = run
run = 0
last_part = part
if run > max_run:
max_run = run
return max_run > len(assign_parts) / 2
def test_initial_balance(self):
# 2 boxes, 2 drives each in zone 1
# 1 box, 2 drives in zone 2
#
# This is balanceable, but there used to be some nondeterminism in
# rebalance() that would sometimes give you an imbalanced ring.
rb = ring.RingBuilder(8, 3, 1)
rb.add_dev({'region': 1, 'zone': 1, 'weight': 4000.0,
'ip': '10.1.1.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'region': 1, 'zone': 1, 'weight': 4000.0,
'ip': '10.1.1.1', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'region': 1, 'zone': 1, 'weight': 4000.0,
'ip': '10.1.1.2', 'port': 10000, 'device': 'sda'})
rb.add_dev({'region': 1, 'zone': 1, 'weight': 4000.0,
'ip': '10.1.1.2', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'region': 1, 'zone': 2, 'weight': 4000.0,
'ip': '10.1.1.3', 'port': 10000, 'device': 'sda'})
rb.add_dev({'region': 1, 'zone': 2, 'weight': 4000.0,
'ip': '10.1.1.3', 'port': 10000, 'device': 'sdb'})
_, balance, _ = rb.rebalance(seed=2)
# maybe not *perfect*, but should be close
self.assertLessEqual(balance, 1)
def test_multitier_partial(self):
# Multitier test, nothing full
rb = ring.RingBuilder(8, 3, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 1, 'region': 1, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 2, 'region': 2, 'zone': 2, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdc'})
rb.add_dev({'id': 3, 'region': 3, 'zone': 3, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdd'})
rb.rebalance()
rb.validate()
for part in range(rb.parts):
counts = defaultdict(lambda: defaultdict(int))
for replica in range(rb.replicas):
dev = rb.devs[rb._replica2part2dev[replica][part]]
counts['region'][dev['region']] += 1
counts['zone'][dev['zone']] += 1
if any(c > 1 for c in counts['region'].values()):
raise AssertionError(
"Partition %d not evenly region-distributed (got %r)" %
(part, counts['region']))
if any(c > 1 for c in counts['zone'].values()):
raise AssertionError(
"Partition %d not evenly zone-distributed (got %r)" %
(part, counts['zone']))
# Multitier test, zones full, nodes not full
rb = ring.RingBuilder(8, 6, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdc'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sdd'})
rb.add_dev({'id': 4, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sde'})
rb.add_dev({'id': 5, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sdf'})
rb.add_dev({'id': 6, 'region': 0, 'zone': 2, 'weight': 1,
'ip': '127.0.0.1', 'port': 10002, 'device': 'sdg'})
rb.add_dev({'id': 7, 'region': 0, 'zone': 2, 'weight': 1,
'ip': '127.0.0.1', 'port': 10002, 'device': 'sdh'})
rb.add_dev({'id': 8, 'region': 0, 'zone': 2, 'weight': 1,
'ip': '127.0.0.1', 'port': 10002, 'device': 'sdi'})
rb.rebalance()
rb.validate()
for part in range(rb.parts):
counts = defaultdict(lambda: defaultdict(int))
for replica in range(rb.replicas):
dev = rb.devs[rb._replica2part2dev[replica][part]]
counts['zone'][dev['zone']] += 1
counts['dev_id'][dev['id']] += 1
if counts['zone'] != {0: 2, 1: 2, 2: 2}:
raise AssertionError(
"Partition %d not evenly distributed (got %r)" %
(part, counts['zone']))
for dev_id, replica_count in counts['dev_id'].items():
if replica_count > 1:
raise AssertionError(
"Partition %d is on device %d more than once (%r)" %
(part, dev_id, counts['dev_id']))
def test_multitier_full(self):
# Multitier test, #replicas == #devs
rb = ring.RingBuilder(8, 6, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdc'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sdd'})
rb.add_dev({'id': 4, 'region': 0, 'zone': 2, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sde'})
rb.add_dev({'id': 5, 'region': 0, 'zone': 2, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sdf'})
rb.rebalance()
rb.validate()
for part in range(rb.parts):
counts = defaultdict(lambda: defaultdict(int))
for replica in range(rb.replicas):
dev = rb.devs[rb._replica2part2dev[replica][part]]
counts['zone'][dev['zone']] += 1
counts['dev_id'][dev['id']] += 1
if counts['zone'] != {0: 2, 1: 2, 2: 2}:
raise AssertionError(
"Partition %d not evenly distributed (got %r)" %
(part, counts['zone']))
for dev_id, replica_count in counts['dev_id'].items():
if replica_count != 1:
raise AssertionError(
"Partition %d is on device %d %d times, not 1 (%r)" %
(part, dev_id, replica_count, counts['dev_id']))
def test_multitier_overfull(self):
# Multitier test, #replicas > #zones (to prove even distribution)
rb = ring.RingBuilder(8, 8, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 6, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdg'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdc'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sdd'})
rb.add_dev({'id': 7, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sdh'})
rb.add_dev({'id': 4, 'region': 0, 'zone': 2, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sde'})
rb.add_dev({'id': 5, 'region': 0, 'zone': 2, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sdf'})
rb.add_dev({'id': 8, 'region': 0, 'zone': 2, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sdi'})
rb.rebalance()
rb.validate()
for part in range(rb.parts):
counts = defaultdict(lambda: defaultdict(int))
for replica in range(rb.replicas):
dev = rb.devs[rb._replica2part2dev[replica][part]]
counts['zone'][dev['zone']] += 1
counts['dev_id'][dev['id']] += 1
self.assertEqual(8, sum(counts['zone'].values()))
for zone, replica_count in counts['zone'].items():
if replica_count not in (2, 3):
raise AssertionError(
"Partition %d not evenly distributed (got %r)" %
(part, counts['zone']))
for dev_id, replica_count in counts['dev_id'].items():
if replica_count not in (1, 2):
raise AssertionError(
"Partition %d is on device %d %d times, "
"not 1 or 2 (%r)" %
(part, dev_id, replica_count, counts['dev_id']))
def test_multitier_expansion_more_devices(self):
rb = ring.RingBuilder(8, 6, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 0.5,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 1, 'weight': 0.5,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 2, 'weight': 0.5,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdc'})
rb.add_dev({'id': 6, 'region': 0, 'zone': 0, 'weight': 0.5,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 7, 'region': 0, 'zone': 1, 'weight': 0.5,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 8, 'region': 0, 'zone': 2, 'weight': 0.5,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdc'})
rb.rebalance()
rb.validate()
rb.add_dev({'id': 3, 'region': 0, 'zone': 0, 'weight': 0.5,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdd'})
rb.add_dev({'id': 4, 'region': 0, 'zone': 1, 'weight': 0.5,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sde'})
rb.add_dev({'id': 5, 'region': 0, 'zone': 2, 'weight': 0.5,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdf'})
rb.add_dev({'id': 9, 'region': 0, 'zone': 0, 'weight': 0.5,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdd'})
rb.add_dev({'id': 10, 'region': 0, 'zone': 1, 'weight': 0.5,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sde'})
rb.add_dev({'id': 11, 'region': 0, 'zone': 2, 'weight': 0.5,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdf'})
for _ in range(5):
rb.pretend_min_part_hours_passed()
rb.rebalance()
rb.validate()
for part in range(rb.parts):
counts = dict(zone=defaultdict(int),
dev_id=defaultdict(int))
for replica in range(rb.replicas):
dev = rb.devs[rb._replica2part2dev[replica][part]]
counts['zone'][dev['zone']] += 1
counts['dev_id'][dev['id']] += 1
self.assertEqual({0: 2, 1: 2, 2: 2}, dict(counts['zone']))
# each part is assigned once to six unique devices
self.assertEqual(list(counts['dev_id'].values()), [1] * 6)
self.assertEqual(len(set(counts['dev_id'].keys())), 6)
def test_multitier_part_moves_with_0_min_part_hours(self):
rb = ring.RingBuilder(8, 3, 0)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda1'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdd1'})
rb.add_dev({'id': 4, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sde1'})
rb.rebalance()
rb.validate()
# min_part_hours is 0, so we're clear to move 2 replicas to
# new devs
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdb1'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdc1'})
rb.rebalance()
rb.validate()
for part in range(rb.parts):
devs = set()
for replica in range(rb.replicas):
devs.add(rb._replica2part2dev[replica][part])
if len(devs) != 3:
raise AssertionError(
"Partition %d not on 3 devs (got %r)" % (part, devs))
def test_multitier_part_moves_with_positive_min_part_hours(self):
rb = ring.RingBuilder(8, 3, 99)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda1'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdd1'})
rb.add_dev({'id': 4, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sde1'})
rb.rebalance()
rb.validate()
# min_part_hours is >0, so we'll only be able to move 1
# replica to a new home
rb.add_dev({'id': 1, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdb1'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdc1'})
rb.pretend_min_part_hours_passed()
rb.rebalance()
rb.validate()
for part in range(rb.parts):
devs = set()
for replica in range(rb.replicas):
devs.add(rb._replica2part2dev[replica][part])
if not any(rb.devs[dev_id]['zone'] == 1 for dev_id in devs):
raise AssertionError(
"Partition %d did not move (got %r)" % (part, devs))
def test_multitier_dont_move_too_many_replicas(self):
rb = ring.RingBuilder(8, 3, 1)
# there'll be at least one replica in z0 and z1
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 0.5,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda1'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 1, 'weight': 0.5,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdb1'})
rb.add_dev({'id': 5, 'region': 0, 'zone': 0, 'weight': 0.5,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda1'})
rb.add_dev({'id': 6, 'region': 0, 'zone': 1, 'weight': 0.5,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdb1'})
rb.rebalance()
rb.validate()
# only 1 replica should move
rb.add_dev({'id': 2, 'region': 0, 'zone': 2, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdd1'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 3, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sde1'})
rb.add_dev({'id': 4, 'region': 0, 'zone': 4, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdf1'})
rb.pretend_min_part_hours_passed()
rb.rebalance()
rb.validate()
for part in range(rb.parts):
zones = set()
for replica in range(rb.replicas):
zones.add(rb.devs[rb._replica2part2dev[replica][part]]['zone'])
if len(zones) != 3:
raise AssertionError(
"Partition %d not in 3 zones (got %r)" % (part, zones))
if 0 not in zones or 1 not in zones:
raise AssertionError(
"Partition %d not in zones 0 and 1 (got %r)" %
(part, zones))
def test_min_part_hours_zero_will_move_one_replica(self):
rb = ring.RingBuilder(8, 3, 0)
# there'll be at least one replica in z0 and z1
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 0.5,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda1'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 1, 'weight': 0.5,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdb1'})
rb.add_dev({'id': 5, 'region': 0, 'zone': 0, 'weight': 0.5,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda1'})
rb.add_dev({'id': 6, 'region': 0, 'zone': 1, 'weight': 0.5,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdb1'})
rb.rebalance(seed=1)
rb.validate()
rb.add_dev({'id': 2, 'region': 0, 'zone': 2, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdd1'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 3, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sde1'})
rb.add_dev({'id': 4, 'region': 0, 'zone': 4, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdf1'})
rb.rebalance(seed=3)
rb.validate()
self.assertEqual(0, rb.dispersion)
# Only one replica could move, so some zones are quite unbalanced
self.assertAlmostEqual(rb.get_balance(), 66.66, delta=0.5)
# There was only zone 0 and 1 before adding more devices. Only one
# replica should have been moved, therefore we expect 256 parts in zone
# 0 and 1, and a total of 256 in zone 2,3, and 4
expected = defaultdict(int, {0: 256, 1: 256, 2: 86, 3: 85, 4: 85})
self.assertEqual(expected, _partition_counts(rb, key='zone'))
zone_histogram = defaultdict(int)
for part in range(rb.parts):
zones = [
rb.devs[rb._replica2part2dev[replica][part]]['zone']
for replica in range(rb.replicas)]
zone_histogram[tuple(sorted(zones))] += 1
# We expect that every partition moved exactly one replica
expected = {
(0, 1, 2): 86,
(0, 1, 3): 85,
(0, 1, 4): 85,
}
self.assertEqual(zone_histogram, expected)
# After rebalancing one more times, we expect that everything is in a
# good state
rb.rebalance(seed=3)
self.assertEqual(0, rb.dispersion)
# a balance of w/i a 1% isn't too bad for 3 replicas on 7
# devices when part power is only 8
self.assertAlmostEqual(rb.get_balance(), 0, delta=0.5)
# every zone has either 153 or 154 parts
for zone, count in _partition_counts(
rb, key='zone').items():
self.assertAlmostEqual(153.5, count, delta=1)
parts_with_moved_count = defaultdict(int)
for part in range(rb.parts):
zones = set()
for replica in range(rb.replicas):
zones.add(rb.devs[rb._replica2part2dev[replica][part]]['zone'])
moved_replicas = len(zones - {0, 1})
parts_with_moved_count[moved_replicas] += 1
# as usual, the real numbers depend on the seed, but we want to
# validate a few things here:
#
# 1) every part had to move one replica to hit dispersion (so no
# one can have a moved count 0)
#
# 2) it's quite reasonable that some small percent of parts will
# have a replica in {0, 1, X} (meaning only one replica of the
# part moved)
#
# 3) when min_part_hours is 0, more than one replica of a part
# can move in a rebalance, and since that movement would get to
# better dispersion faster we expect to observe most parts in
# {[0,1], X, X} (meaning *two* replicas of the part moved)
#
# 4) there's plenty of weight in z0 & z1 to hold a whole
# replicanth, so there is no reason for any part to have to move
# all three replicas out of those zones (meaning no one can have
# a moved count 3)
#
expected = {
1: 52,
2: 204,
}
self.assertEqual(parts_with_moved_count, expected)
def test_ever_rebalanced(self):
rb = ring.RingBuilder(8, 3, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda1'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sda1'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 2, 'weight': 1,
'ip': '127.0.0.1', 'port': 10002, 'device': 'sda1'})
self.assertFalse(rb.ever_rebalanced)
builder_file = os.path.join(self.testdir, 'test.buider')
rb.save(builder_file)
rb = ring.RingBuilder.load(builder_file)
self.assertFalse(rb.ever_rebalanced)
rb.rebalance()
self.assertTrue(rb.ever_rebalanced)
rb.save(builder_file)
rb = ring.RingBuilder.load(builder_file)
self.assertTrue(rb.ever_rebalanced)
def test_rerebalance(self):
rb = ring.RingBuilder(8, 3, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda1'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sda1'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 2, 'weight': 1,
'ip': '127.0.0.1', 'port': 10002, 'device': 'sda1'})
self.assertFalse(rb.ever_rebalanced)
rb.rebalance()
self.assertTrue(rb.ever_rebalanced)
counts = _partition_counts(rb)
self.assertEqual(counts, {0: 256, 1: 256, 2: 256})
rb.add_dev({'id': 3, 'region': 0, 'zone': 3, 'weight': 1,
'ip': '127.0.0.1', 'port': 10003, 'device': 'sda1'})
rb.pretend_min_part_hours_passed()
rb.rebalance()
self.assertTrue(rb.ever_rebalanced)
counts = _partition_counts(rb)
self.assertEqual(counts, {0: 192, 1: 192, 2: 192, 3: 192})
rb.set_dev_weight(3, 100)
rb.rebalance()
counts = _partition_counts(rb)
self.assertEqual(counts[3], 256)
def test_add_rebalance_add_rebalance_delete_rebalance(self):
# Test for https://bugs.launchpad.net/swift/+bug/845952
# min_part of 0 to allow for rapid rebalancing
rb = ring.RingBuilder(8, 3, 0)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda1'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sda1'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 2, 'weight': 1,
'ip': '127.0.0.1', 'port': 10002, 'device': 'sda1'})
rb.rebalance()
rb.validate()
rb.add_dev({'id': 3, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10003, 'device': 'sda1'})
rb.add_dev({'id': 4, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10004, 'device': 'sda1'})
rb.add_dev({'id': 5, 'region': 0, 'zone': 2, 'weight': 1,
'ip': '127.0.0.1', 'port': 10005, 'device': 'sda1'})
rb.rebalance()
rb.validate()
rb.remove_dev(1)
# well now we have only one device in z0
rb.set_overload(0.5)
rb.rebalance()
rb.validate()
def test_remove_last_partition_from_zero_weight(self):
rb = ring.RingBuilder(4, 3, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 1, 'weight': 1.0,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 2, 'weight': 1.0,
'ip': '127.0.0.2', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 4, 'region': 0, 'zone': 2, 'weight': 1.0,
'ip': '127.0.0.2', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 3, 'weight': 1.0,
'ip': '127.0.0.3', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 5, 'region': 0, 'zone': 3, 'weight': 1.0,
'ip': '127.0.0.3', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 6, 'region': 0, 'zone': 3, 'weight': 1.0,
'ip': '127.0.0.3', 'port': 10000, 'device': 'sdc'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 3, 'weight': 0.4,
'ip': '127.0.0.3', 'port': 10001, 'device': 'zero'})
zero_weight_dev = 3
rb.rebalance(seed=1)
# We want at least one partition with replicas only in zone 2 and 3
# due to device weights. It would *like* to spread out into zone 1,
# but can't, due to device weight.
#
# Also, we want such a partition to have a replica on device 3,
# which we will then reduce to zero weight. This should cause the
# removal of the replica from device 3.
#
# Getting this to happen by chance is hard, so let's just set up a
# builder so that it's in the state we want. This is a synthetic
# example; while the bug has happened on a real cluster, that
# builder file had a part_power of 16, so its contents are much too
# big to include here.
rb._replica2part2dev = [
# these are the relevant ones
# | | |
# v v v
array('H', [2, 5, 6, 2, 5, 6, 2, 5, 6, 2, 5, 6, 2, 5, 6, 2]),
array('H', [1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4]),
array('H', [0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 5, 6, 2, 5, 6])]
# fix up bookkeeping
new_dev_parts = defaultdict(int)
for part2dev_id in rb._replica2part2dev:
for dev_id in part2dev_id:
new_dev_parts[dev_id] += 1
for dev in rb._iter_devs():
dev['parts'] = new_dev_parts[dev['id']]
rb.set_dev_weight(zero_weight_dev, 0.0)
rb.pretend_min_part_hours_passed()
rb.rebalance(seed=1)
node_counts = defaultdict(int)
for part2dev_id in rb._replica2part2dev:
for dev_id in part2dev_id:
node_counts[dev_id] += 1
self.assertEqual(node_counts[zero_weight_dev], 0)
# it's as balanced as it gets, so nothing moves anymore
rb.pretend_min_part_hours_passed()
parts_moved, _balance, _removed = rb.rebalance(seed=1)
new_node_counts = defaultdict(int)
for part2dev_id in rb._replica2part2dev:
for dev_id in part2dev_id:
new_node_counts[dev_id] += 1
del node_counts[zero_weight_dev]
self.assertEqual(node_counts, new_node_counts)
self.assertEqual(parts_moved, 0)
def test_part_swapping_problem(self):
rb = ring.RingBuilder(4, 3, 1)
# 127.0.0.1 (2 devs)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 100,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'weight': 100,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdb'})
# 127.0.0.2 (3 devs)
rb.add_dev({'id': 2, 'region': 0, 'zone': 0, 'weight': 100,
'ip': '127.0.0.2', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 0, 'weight': 100,
'ip': '127.0.0.2', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 4, 'region': 0, 'zone': 0, 'weight': 100,
'ip': '127.0.0.2', 'port': 10000, 'device': 'sdc'})
expected = {
'127.0.0.1': 1.2,
'127.0.0.2': 1.7999999999999998,
}
for wr in (rb._build_weighted_replicas_by_tier(),
rb._build_wanted_replicas_by_tier(),
rb._build_target_replicas_by_tier()):
self.assertEqual(expected, {t[-1]: r for (t, r) in
wr.items() if len(t) == 3})
self.assertEqual(rb.get_required_overload(), 0)
rb.rebalance(seed=3)
# so 127.0.0.1 ended up with...
tier = (0, 0, '127.0.0.1')
# ... 6 parts with 1 replicas
self.assertEqual(rb._dispersion_graph[tier][1], 12)
# ... 4 parts with 2 replicas
self.assertEqual(rb._dispersion_graph[tier][2], 4)
# but since we only have two tiers, this is *totally* dispersed
self.assertEqual(0, rb.dispersion)
# small rings are hard to balance...
expected = {0: 10, 1: 10, 2: 10, 3: 9, 4: 9}
self.assertEqual(expected, {d['id']: d['parts']
for d in rb._iter_devs()})
# everyone wants 9.6 parts
expected = {
0: 4.166666666666671,
1: 4.166666666666671,
2: 4.166666666666671,
3: -6.25,
4: -6.25,
}
self.assertEqual(expected, rb._build_balance_per_dev())
# original sorted _replica2part2dev
"""
rb._replica2part2dev = [
array('H', [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1]),
array('H', [1, 1, 1, 1, 2, 2, 2, 3, 3, 3, 2, 2, 2, 3, 3, 3]),
array('H', [2, 2, 2, 2, 3, 3, 4, 4, 4, 4, 3, 4, 4, 4, 4, 4])]
"""
# now imagine if we came along this _replica2part2dev through no
# fault of our own; if instead of the 12 parts with only one
# replica on 127.0.0.1 being split evenly (6 and 6) on device's
# 0 and 1 - device 1 inexplicitly had 3 extra parts
rb._replica2part2dev = [
# these are the relevant one's here
# | | |
# v v v
array('H', [0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1]),
array('H', [1, 1, 1, 1, 2, 2, 2, 3, 3, 3, 2, 2, 2, 3, 3, 3]),
array('H', [2, 2, 2, 2, 3, 3, 4, 4, 4, 4, 3, 4, 4, 4, 4, 4])]
# fix up bookkeeping
new_dev_parts = defaultdict(int)
for part2dev_id in rb._replica2part2dev:
for dev_id in part2dev_id:
new_dev_parts[dev_id] += 1
for dev in rb._iter_devs():
dev['parts'] = new_dev_parts[dev['id']]
# reset the _last_part_gather_start otherwise
# there is a chance it'll unluckly wrap and try and
# move one of the device 1's from replica 2
# causing the intermitant failure in bug 1724356
rb._last_part_gather_start = 0
rb.pretend_min_part_hours_passed()
rb.rebalance()
expected = {
0: 4.166666666666671,
1: 4.166666666666671,
2: 4.166666666666671,
3: -6.25,
4: -6.25,
}
self.assertEqual(expected, rb._build_balance_per_dev())
self.assertEqual(rb.get_balance(), 6.25)
def test_wrong_tier_with_no_where_to_go(self):
rb = ring.RingBuilder(4, 3, 1)
# 127.0.0.1 (even devices)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 100,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 0, 'weight': 900,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 4, 'region': 0, 'zone': 0, 'weight': 900,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdc'})
rb.add_dev({'id': 6, 'region': 0, 'zone': 0, 'weight': 900,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdd'})
# 127.0.0.2 (odd devices)
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'weight': 500,
'ip': '127.0.0.2', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 0, 'weight': 500,
'ip': '127.0.0.2', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 5, 'region': 0, 'zone': 0, 'weight': 500,
'ip': '127.0.0.2', 'port': 10000, 'device': 'sdc'})
rb.add_dev({'id': 7, 'region': 0, 'zone': 0, 'weight': 500,
'ip': '127.0.0.2', 'port': 10000, 'device': 'sdd'})
expected = {
'127.0.0.1': 1.75,
'127.0.0.2': 1.25,
}
for wr in (rb._build_weighted_replicas_by_tier(),
rb._build_wanted_replicas_by_tier(),
rb._build_target_replicas_by_tier()):
self.assertEqual(expected, {t[-1]: r for (t, r) in
wr.items() if len(t) == 3})
self.assertEqual(rb.get_required_overload(), 0)
rb.rebalance(seed=3)
# so 127.0.0.1 ended up with...
tier = (0, 0, '127.0.0.1')
# ... 4 parts with 1 replicas
self.assertEqual(rb._dispersion_graph[tier][1], 4)
# ... 12 parts with 2 replicas
self.assertEqual(rb._dispersion_graph[tier][2], 12)
# ... and of course 0 parts with 3 replicas
self.assertEqual(rb._dispersion_graph[tier][3], 0)
# but since we only have two tiers, this is *totally* dispersed
self.assertEqual(0, rb.dispersion)
# small rings are hard to balance, but it's possible when
# part-replicas (3 * 2 ** 4) can go evenly into device weights
# (4800) like we've done here
expected = {
0: 1,
2: 9,
4: 9,
6: 9,
1: 5,
3: 5,
5: 5,
7: 5,
}
self.assertEqual(expected, {d['id']: d['parts']
for d in rb._iter_devs()})
expected = {
0: 0.0,
1: 0.0,
2: 0.0,
3: 0.0,
4: 0.0,
5: 0.0,
6: 0.0,
7: 0.0,
}
self.assertEqual(expected, rb._build_balance_per_dev())
# all devices have exactly the # of parts they want
expected = {
0: 0,
2: 0,
4: 0,
6: 0,
1: 0,
3: 0,
5: 0,
7: 0,
}
self.assertEqual(expected, {d['id']: d['parts_wanted']
for d in rb._iter_devs()})
# original sorted _replica2part2dev
"""
rb._replica2part2dev = [
array('H', [0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 4, 4, 4, 4, 4, 4, ]),
array('H', [4, 4, 4, 6, 6, 6, 6, 6, 6, 6, 6, 6, 1, 1, 1, 1, ]),
array('H', [1, 3, 3, 3, 3, 3, 5, 5, 5, 5, 5, 7, 7, 7, 7, 7, ])]
"""
# now imagine if we came along this _replica2part2dev through no
# fault of our own; and device 0 had extra parts, but both
# copies of the other replicas were already in the other tier!
rb._replica2part2dev = [
# these are the relevant one's here
# | |
# v v
array('H', [2, 2, 2, 2, 2, 2, 2, 2, 2, 4, 4, 4, 4, 4, 0, 0]),
array('H', [4, 4, 4, 4, 6, 6, 6, 6, 6, 6, 6, 6, 6, 1, 1, 1]),
array('H', [1, 1, 3, 3, 3, 3, 5, 5, 5, 5, 5, 7, 7, 7, 7, 7])]
# fix up bookkeeping
new_dev_parts = defaultdict(int)
for part2dev_id in rb._replica2part2dev:
for dev_id in part2dev_id:
new_dev_parts[dev_id] += 1
for dev in rb._iter_devs():
dev['parts'] = new_dev_parts[dev['id']]
replica_plan = rb._build_replica_plan()
rb._set_parts_wanted(replica_plan)
expected = {
0: -1, # this device wants to shed
2: 0,
4: 0,
6: 0,
1: 0,
3: 1, # there's devices with room on the other server
5: 0,
7: 0,
}
self.assertEqual(expected, {d['id']: d['parts_wanted']
for d in rb._iter_devs()})
self.assertEqual(rb.get_balance(), 100)
rb.pretend_min_part_hours_passed()
# There's something like a 11% chance that we won't be able to get to
# a balance of 0 (and a 6% chance that we won't change anything at all)
# Pick a seed to make this pass.
rb.rebalance(seed=123)
self.assertEqual(rb.get_balance(), 0)
def test_multiple_duplicate_device_assignment(self):
rb = ring.RingBuilder(4, 4, 1)
devs = [
'r1z1-127.0.0.1:6200/d1',
'r1z1-127.0.0.1:6201/d2',
'r1z1-127.0.0.1:6202/d3',
'r1z1-127.0.0.1:33443/d4',
'r1z1-127.0.0.2:6200/d5',
'r1z1-127.0.0.2:6201/d6',
'r1z1-127.0.0.2:6202/d7',
'r1z1-127.0.0.2:6202/d8',
]
for add_value in devs:
dev = utils.parse_add_value(add_value)
dev['weight'] = 1.0
rb.add_dev(dev)
rb.rebalance()
rb._replica2part2dev = [
# these are the relevant one's here
# | | | | |
# v v v v v
array('H', [0, 1, 2, 3, 3, 0, 0, 0, 4, 6, 4, 4, 4, 4, 4, 4]),
array('H', [0, 1, 3, 1, 1, 1, 1, 1, 5, 7, 5, 5, 5, 5, 5, 5]),
array('H', [0, 1, 2, 2, 2, 2, 2, 2, 4, 6, 6, 6, 6, 6, 6, 6]),
array('H', [0, 3, 2, 3, 3, 3, 3, 3, 5, 7, 7, 7, 7, 7, 7, 7])
# ^
# |
# this sort of thing worked already
]
# fix up bookkeeping
new_dev_parts = defaultdict(int)
for part2dev_id in rb._replica2part2dev:
for dev_id in part2dev_id:
new_dev_parts[dev_id] += 1
for dev in rb._iter_devs():
dev['parts'] = new_dev_parts[dev['id']]
rb.pretend_min_part_hours_passed()
rb.rebalance()
rb.validate()
def test_region_fullness_with_balanceable_ring(self):
rb = ring.RingBuilder(8, 3, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda1'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sda1'})
rb.add_dev({'id': 2, 'region': 1, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10003, 'device': 'sda1'})
rb.add_dev({'id': 3, 'region': 1, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10004, 'device': 'sda1'})
rb.add_dev({'id': 4, 'region': 2, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10005, 'device': 'sda1'})
rb.add_dev({'id': 5, 'region': 2, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10006, 'device': 'sda1'})
rb.add_dev({'id': 6, 'region': 3, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10007, 'device': 'sda1'})
rb.add_dev({'id': 7, 'region': 3, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10008, 'device': 'sda1'})
rb.rebalance(seed=2)
population_by_region = self._get_population_by_region(rb)
self.assertEqual(population_by_region,
{0: 192, 1: 192, 2: 192, 3: 192})
def test_region_fullness_with_unbalanceable_ring(self):
rb = ring.RingBuilder(8, 3, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 2,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda1'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 1, 'weight': 2,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sda1'})
rb.add_dev({'id': 2, 'region': 1, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10003, 'device': 'sda1'})
rb.add_dev({'id': 3, 'region': 1, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10004, 'device': 'sda1'})
rb.rebalance(seed=2)
population_by_region = self._get_population_by_region(rb)
self.assertEqual(population_by_region, {0: 512, 1: 256})
def test_adding_region_slowly_with_unbalanceable_ring(self):
rb = ring.RingBuilder(8, 3, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 0.5,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda1'})
rb.add_dev({'id': 4, 'region': 0, 'zone': 0, 'weight': 0.5,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdb1'})
rb.add_dev({'id': 5, 'region': 0, 'zone': 0, 'weight': 0.5,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdc1'})
rb.add_dev({'id': 6, 'region': 0, 'zone': 0, 'weight': 0.5,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdd1'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 1, 'weight': 0.5,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sda1'})
rb.add_dev({'id': 7, 'region': 0, 'zone': 1, 'weight': 0.5,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sdb1'})
rb.add_dev({'id': 8, 'region': 0, 'zone': 1, 'weight': 0.5,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sdc1'})
rb.add_dev({'id': 9, 'region': 0, 'zone': 1, 'weight': 0.5,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sdd1'})
rb.rebalance(seed=2)
rb.add_dev({'id': 2, 'region': 1, 'zone': 0, 'weight': 0.25,
'ip': '127.0.0.1', 'port': 10003, 'device': 'sda1'})
rb.add_dev({'id': 3, 'region': 1, 'zone': 1, 'weight': 0.25,
'ip': '127.0.0.1', 'port': 10004, 'device': 'sda1'})
rb.pretend_min_part_hours_passed()
changed_parts, _balance, _removed = rb.rebalance(seed=2)
# there's not enough room in r1 for every partition to have a replica
# in it, so only 86 assignments occur in r1 (that's ~1/5 of the total,
# since r1 has 1/5 of the weight).
population_by_region = self._get_population_by_region(rb)
self.assertEqual(population_by_region, {0: 682, 1: 86})
# really 86 parts *should* move (to the new region) but to avoid
# accidentally picking up too many and causing some parts to randomly
# flop around devices in the original region - our gather algorithm
# is conservative when picking up only from devices that are for sure
# holding more parts than they want (math.ceil() of the replica_plan)
# which guarantees any parts picked up will have new homes in a better
# tier or failure_domain.
self.assertEqual(86, changed_parts)
# and since there's not enough room, subsequent rebalances will not
# cause additional assignments to r1
rb.pretend_min_part_hours_passed()
rb.rebalance(seed=2)
rb.validate()
population_by_region = self._get_population_by_region(rb)
self.assertEqual(population_by_region, {0: 682, 1: 86})
# after you add more weight, more partition assignments move
rb.set_dev_weight(2, 0.5)
rb.set_dev_weight(3, 0.5)
rb.pretend_min_part_hours_passed()
rb.rebalance(seed=2)
rb.validate()
population_by_region = self._get_population_by_region(rb)
self.assertEqual(population_by_region, {0: 614, 1: 154})
rb.set_dev_weight(2, 1.0)
rb.set_dev_weight(3, 1.0)
rb.pretend_min_part_hours_passed()
rb.rebalance(seed=2)
rb.validate()
population_by_region = self._get_population_by_region(rb)
self.assertEqual(population_by_region, {0: 512, 1: 256})
def test_avoid_tier_change_new_region(self):
rb = ring.RingBuilder(8, 3, 1)
for i in range(5):
rb.add_dev({'id': i, 'region': 0, 'zone': 0, 'weight': 100,
'ip': '127.0.0.1', 'port': i, 'device': 'sda1'})
rb.rebalance(seed=2)
# Add a new device in new region to a balanced ring
rb.add_dev({'id': 5, 'region': 1, 'zone': 0, 'weight': 0,
'ip': '127.0.0.5', 'port': 10000, 'device': 'sda1'})
# Increase the weight of region 1 slowly
moved_partitions = []
errors = []
for weight in range(0, 101, 10):
rb.set_dev_weight(5, weight)
rb.pretend_min_part_hours_passed()
changed_parts, _balance, _removed = rb.rebalance(seed=2)
rb.validate()
moved_partitions.append(changed_parts)
# Ensure that the second region has enough partitions
# Otherwise there will be replicas at risk
min_parts_for_r1 = ceil(weight / (500.0 + weight) * 768)
parts_for_r1 = self._get_population_by_region(rb).get(1, 0)
try:
self.assertEqual(min_parts_for_r1, parts_for_r1)
except AssertionError:
errors.append('weight %s got %s parts but expected %s' % (
weight, parts_for_r1, min_parts_for_r1))
self.assertFalse(errors)
# Number of partitions moved on each rebalance
# 10/510 * 768 ~ 15.06 -> move at least 15 partitions in first step
ref = [0, 16, 14, 14, 13, 13, 13, 12, 11, 12, 10]
self.assertEqual(ref, moved_partitions)
def test_set_replicas_increase(self):
rb = ring.RingBuilder(8, 2, 0)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda1'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sda1'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 2, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sda1'})
rb.rebalance()
rb.validate()
rb.replicas = 2.1
rb.rebalance()
rb.validate()
self.assertEqual([len(p2d) for p2d in rb._replica2part2dev],
[256, 256, 25])
rb.replicas = 2.2
rb.rebalance()
rb.validate()
self.assertEqual([len(p2d) for p2d in rb._replica2part2dev],
[256, 256, 51])
def test_set_replicas_decrease(self):
rb = ring.RingBuilder(4, 5, 0)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda1'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sda1'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda1'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sda1'})
rb.add_dev({'id': 4, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda1'})
rb.add_dev({'id': 5, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sda1'})
rb.rebalance()
rb.validate()
rb.replicas = 4.9
rb.rebalance()
rb.validate()
self.assertEqual([len(p2d) for p2d in rb._replica2part2dev],
[16, 16, 16, 16, 14])
# cross a couple of integer thresholds (4 and 3)
rb.replicas = 2.5
rb.rebalance()
rb.validate()
self.assertEqual([len(p2d) for p2d in rb._replica2part2dev],
[16, 16, 8])
def test_fractional_replicas_rebalance(self):
rb = ring.RingBuilder(8, 2.5, 0)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda1'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sda1'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 2, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sda1'})
rb.rebalance() # passes by not crashing
rb.validate() # also passes by not crashing
self.assertEqual([len(p2d) for p2d in rb._replica2part2dev],
[256, 256, 128])
def test_create_add_dev_add_replica_rebalance(self):
rb = ring.RingBuilder(8, 3, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 3,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'weight': 3,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 0, 'weight': 3,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 0, 'weight': 3,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.set_replicas(4)
rb.rebalance() # this would crash since parts_wanted was not set
rb.validate()
def test_reduce_replicas_after_remove_device(self):
rb = ring.RingBuilder(8, 3, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 3,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'weight': 3,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 0, 'weight': 3,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.rebalance()
rb.remove_dev(0)
self.assertRaises(exceptions.RingValidationError, rb.rebalance)
rb.set_replicas(2)
rb.rebalance()
rb.validate()
def test_rebalance_post_upgrade(self):
rb = ring.RingBuilder(8, 3, 1)
# 5 devices: 5 is the smallest number that does not divide 3 * 2^8,
# which forces some rounding to happen.
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdc'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdd'})
rb.add_dev({'id': 4, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sde'})
rb.rebalance()
rb.validate()
# Older versions of the ring builder code would round down when
# computing parts_wanted, while the new code rounds up. Make sure we
# can handle a ring built by the old method.
#
# This code mimics the old _set_parts_wanted.
weight_of_one_part = rb.weight_of_one_part()
for dev in rb._iter_devs():
if not dev['weight']:
dev['parts_wanted'] = -rb.parts * rb.replicas
else:
dev['parts_wanted'] = (
int(weight_of_one_part * dev['weight']) -
dev['parts'])
rb.pretend_min_part_hours_passed()
rb.rebalance() # this crashes unless rebalance resets parts_wanted
rb.validate()
def test_add_replicas_then_rebalance_respects_weight(self):
rb = ring.RingBuilder(8, 3, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 3,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'weight': 3,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdc'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdd'})
rb.add_dev({'id': 4, 'region': 0, 'zone': 1, 'weight': 3,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sde'})
rb.add_dev({'id': 5, 'region': 0, 'zone': 1, 'weight': 3,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdf'})
rb.add_dev({'id': 6, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdg'})
rb.add_dev({'id': 7, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdh'})
rb.add_dev({'id': 8, 'region': 0, 'zone': 2, 'weight': 3,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdi'})
rb.add_dev({'id': 9, 'region': 0, 'zone': 2, 'weight': 3,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdj'})
rb.add_dev({'id': 10, 'region': 0, 'zone': 2, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdk'})
rb.add_dev({'id': 11, 'region': 0, 'zone': 2, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdl'})
rb.rebalance(seed=1)
r = rb.get_ring()
counts = {}
for part2dev_id in r._replica2part2dev_id:
for dev_id in part2dev_id:
counts[dev_id] = counts.get(dev_id, 0) + 1
self.assertEqual(counts, {0: 96, 1: 96,
2: 32, 3: 32,
4: 96, 5: 96,
6: 32, 7: 32,
8: 96, 9: 96,
10: 32, 11: 32})
rb.replicas *= 2
rb.rebalance(seed=1)
r = rb.get_ring()
counts = {}
for part2dev_id in r._replica2part2dev_id:
for dev_id in part2dev_id:
counts[dev_id] = counts.get(dev_id, 0) + 1
self.assertEqual(counts, {0: 192, 1: 192,
2: 64, 3: 64,
4: 192, 5: 192,
6: 64, 7: 64,
8: 192, 9: 192,
10: 64, 11: 64})
def test_overload(self):
rb = ring.RingBuilder(8, 3, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdd'})
rb.add_dev({'id': 4, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sde'})
rb.add_dev({'id': 5, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdf'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sdb'})
rb.add_dev({'id': 6, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sdg'})
rb.add_dev({'id': 7, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sdh'})
rb.add_dev({'id': 8, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sdi'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 2, 'weight': 2,
'ip': '127.0.0.2', 'port': 10002, 'device': 'sdc'})
rb.add_dev({'id': 9, 'region': 0, 'zone': 2, 'weight': 2,
'ip': '127.0.0.2', 'port': 10002, 'device': 'sdj'})
rb.add_dev({'id': 10, 'region': 0, 'zone': 2, 'weight': 2,
'ip': '127.0.0.2', 'port': 10002, 'device': 'sdk'})
rb.add_dev({'id': 11, 'region': 0, 'zone': 2, 'weight': 2,
'ip': '127.0.0.2', 'port': 10002, 'device': 'sdl'})
rb.rebalance(seed=12345)
rb.validate()
# sanity check: balance respects weights, so default
part_counts = _partition_counts(rb, key='zone')
self.assertEqual(part_counts[0], 192)
self.assertEqual(part_counts[1], 192)
self.assertEqual(part_counts[2], 384)
# Devices 0 and 1 take 10% more than their fair shares by weight since
# overload is 10% (0.1).
rb.set_overload(0.1)
rb.pretend_min_part_hours_passed()
rb.rebalance(seed=12345)
part_counts = _partition_counts(rb, key='zone')
self.assertEqual({0: 212, 1: 211, 2: 345}, part_counts)
# Now, devices 0 and 1 take 50% more than their fair shares by
# weight.
rb.set_overload(0.5)
for _ in range(3):
rb.pretend_min_part_hours_passed()
rb.rebalance(seed=12345)
part_counts = _partition_counts(rb, key='zone')
self.assertEqual({0: 256, 1: 256, 2: 256}, part_counts)
# Devices 0 and 1 may take up to 75% over their fair share, but the
# placement algorithm only wants to spread things out evenly between
# all drives, so the devices stay at 50% more.
rb.set_overload(0.75)
for _ in range(3):
rb.pretend_min_part_hours_passed()
rb.rebalance(seed=12345)
part_counts = _partition_counts(rb, key='zone')
self.assertEqual(part_counts[0], 256)
self.assertEqual(part_counts[1], 256)
self.assertEqual(part_counts[2], 256)
def test_unoverload(self):
# Start off needing overload to balance, then add capacity until we
# don't need overload any more and see that things still balance.
# Overload doesn't prevent optimal balancing.
rb = ring.RingBuilder(8, 3, 1)
rb.set_overload(0.125)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 4, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 5, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.2', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 6, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.2', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 7, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.2', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 8, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.2', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 0, 'weight': 2,
'ip': '127.0.0.3', 'port': 10000, 'device': 'sdc'})
rb.add_dev({'id': 9, 'region': 0, 'zone': 0, 'weight': 2,
'ip': '127.0.0.3', 'port': 10000, 'device': 'sdc'})
rb.add_dev({'id': 10, 'region': 0, 'zone': 0, 'weight': 2,
'ip': '127.0.0.3', 'port': 10000, 'device': 'sdc'})
rb.add_dev({'id': 11, 'region': 0, 'zone': 0, 'weight': 2,
'ip': '127.0.0.3', 'port': 10000, 'device': 'sdc'})
rb.rebalance(seed=12345)
# sanity check: our overload is big enough to balance things
part_counts = _partition_counts(rb, key='ip')
self.assertEqual(part_counts['127.0.0.1'], 216)
self.assertEqual(part_counts['127.0.0.2'], 216)
self.assertEqual(part_counts['127.0.0.3'], 336)
# Add some weight: balance improves
for dev in rb.devs:
if dev['ip'] in ('127.0.0.1', '127.0.0.2'):
rb.set_dev_weight(dev['id'], 1.22)
rb.pretend_min_part_hours_passed()
rb.rebalance(seed=12345)
part_counts = _partition_counts(rb, key='ip')
self.assertEqual({
'127.0.0.1': 237,
'127.0.0.2': 237,
'127.0.0.3': 294,
}, part_counts)
# Even out the weights: balance becomes perfect
for dev in rb.devs:
if dev['ip'] in ('127.0.0.1', '127.0.0.2'):
rb.set_dev_weight(dev['id'], 2)
rb.pretend_min_part_hours_passed()
rb.rebalance(seed=12345)
part_counts = _partition_counts(rb, key='ip')
self.assertEqual(part_counts['127.0.0.1'], 256)
self.assertEqual(part_counts['127.0.0.2'], 256)
self.assertEqual(part_counts['127.0.0.3'], 256)
# Add a new server: balance stays optimal
rb.add_dev({'id': 12, 'region': 0, 'zone': 0,
'weight': 2,
'ip': '127.0.0.4', 'port': 10000, 'device': 'sdd'})
rb.add_dev({'id': 13, 'region': 0, 'zone': 0,
'weight': 2,
'ip': '127.0.0.4', 'port': 10000, 'device': 'sde'})
rb.add_dev({'id': 14, 'region': 0, 'zone': 0,
'weight': 2,
'ip': '127.0.0.4', 'port': 10000, 'device': 'sdf'})
rb.add_dev({'id': 15, 'region': 0, 'zone': 0,
'weight': 2,
'ip': '127.0.0.4', 'port': 10000, 'device': 'sdf'})
# we're moving more than 1/3 of the replicas but fewer than 2/3, so
# we have to do this twice
rb.pretend_min_part_hours_passed()
rb.rebalance(seed=12345)
rb.pretend_min_part_hours_passed()
rb.rebalance(seed=12345)
expected = {
'127.0.0.1': 192,
'127.0.0.2': 192,
'127.0.0.3': 192,
'127.0.0.4': 192,
}
part_counts = _partition_counts(rb, key='ip')
self.assertEqual(part_counts, expected)
def test_overload_keeps_balanceable_things_balanced_initially(self):
rb = ring.RingBuilder(8, 3, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 8,
'ip': '10.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'weight': 8,
'ip': '10.0.0.1', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 0, 'weight': 4,
'ip': '10.0.0.2', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 0, 'weight': 4,
'ip': '10.0.0.2', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 4, 'region': 0, 'zone': 0, 'weight': 4,
'ip': '10.0.0.3', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 5, 'region': 0, 'zone': 0, 'weight': 4,
'ip': '10.0.0.3', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 6, 'region': 0, 'zone': 0, 'weight': 4,
'ip': '10.0.0.4', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 7, 'region': 0, 'zone': 0, 'weight': 4,
'ip': '10.0.0.4', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 8, 'region': 0, 'zone': 0, 'weight': 4,
'ip': '10.0.0.5', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 9, 'region': 0, 'zone': 0, 'weight': 4,
'ip': '10.0.0.5', 'port': 10000, 'device': 'sdb'})
rb.set_overload(99999)
rb.rebalance(seed=12345)
part_counts = _partition_counts(rb)
self.assertEqual(part_counts, {
0: 128,
1: 128,
2: 64,
3: 64,
4: 64,
5: 64,
6: 64,
7: 64,
8: 64,
9: 64,
})
def test_overload_keeps_balanceable_things_balanced_on_rebalance(self):
rb = ring.RingBuilder(8, 3, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 8,
'ip': '10.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'weight': 8,
'ip': '10.0.0.1', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 0, 'weight': 4,
'ip': '10.0.0.2', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 0, 'weight': 4,
'ip': '10.0.0.2', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 4, 'region': 0, 'zone': 0, 'weight': 4,
'ip': '10.0.0.3', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 5, 'region': 0, 'zone': 0, 'weight': 4,
'ip': '10.0.0.3', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 6, 'region': 0, 'zone': 0, 'weight': 4,
'ip': '10.0.0.4', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 7, 'region': 0, 'zone': 0, 'weight': 4,
'ip': '10.0.0.4', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 8, 'region': 0, 'zone': 0, 'weight': 4,
'ip': '10.0.0.5', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 9, 'region': 0, 'zone': 0, 'weight': 4,
'ip': '10.0.0.5', 'port': 10000, 'device': 'sdb'})
rb.set_overload(99999)
rb.rebalance(seed=123)
part_counts = _partition_counts(rb)
self.assertEqual(part_counts, {
0: 128,
1: 128,
2: 64,
3: 64,
4: 64,
5: 64,
6: 64,
7: 64,
8: 64,
9: 64,
})
# swap weights between 10.0.0.1 and 10.0.0.2
rb.set_dev_weight(0, 4)
rb.set_dev_weight(1, 4)
rb.set_dev_weight(2, 8)
rb.set_dev_weight(1, 8)
rb.rebalance(seed=456)
part_counts = _partition_counts(rb)
self.assertEqual(part_counts, {
0: 128,
1: 128,
2: 64,
3: 64,
4: 64,
5: 64,
6: 64,
7: 64,
8: 64,
9: 64,
})
def test_server_per_port(self):
# 3 servers, 3 disks each, with each disk on its own port
rb = ring.RingBuilder(8, 3, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '10.0.0.1', 'port': 10000, 'device': 'sdx'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '10.0.0.1', 'port': 10001, 'device': 'sdy'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '10.0.0.2', 'port': 10000, 'device': 'sdx'})
rb.add_dev({'id': 4, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '10.0.0.2', 'port': 10001, 'device': 'sdy'})
rb.add_dev({'id': 6, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '10.0.0.3', 'port': 10000, 'device': 'sdx'})
rb.add_dev({'id': 7, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '10.0.0.3', 'port': 10001, 'device': 'sdy'})
rb.rebalance(seed=1)
rb.add_dev({'id': 2, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '10.0.0.1', 'port': 10002, 'device': 'sdz'})
rb.add_dev({'id': 5, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '10.0.0.2', 'port': 10002, 'device': 'sdz'})
rb.add_dev({'id': 8, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '10.0.0.3', 'port': 10002, 'device': 'sdz'})
rb.pretend_min_part_hours_passed()
rb.rebalance(seed=1)
poorly_dispersed = []
for part in range(rb.parts):
on_nodes = set()
for replica in range(rb.replicas):
dev_id = rb._replica2part2dev[replica][part]
on_nodes.add(rb.devs[dev_id]['ip'])
if len(on_nodes) < rb.replicas:
poorly_dispersed.append(part)
self.assertEqual(poorly_dispersed, [])
def test_load(self):
rb = ring.RingBuilder(8, 3, 1)
devs = [{'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.0', 'port': 10000, 'device': 'sda1',
'meta': 'meta0'},
{'id': 1, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sdb1',
'meta': 'meta1'},
{'id': 2, 'region': 0, 'zone': 2, 'weight': 2,
'ip': '127.0.0.2', 'port': 10002, 'device': 'sdc1',
'meta': 'meta2'},
{'id': 3, 'region': 0, 'zone': 3, 'weight': 2,
'ip': '127.0.0.3', 'port': 10003, 'device': 'sdd1'}]
for d in devs:
rb.add_dev(d)
rb.rebalance()
real_pickle = pickle.load
fake_open = mock.mock_open()
io_error_not_found = IOError()
io_error_not_found.errno = errno.ENOENT
io_error_no_perm = IOError()
io_error_no_perm.errno = errno.EPERM
io_error_generic = IOError()
io_error_generic.errno = errno.EOPNOTSUPP
try:
# test a legit builder
fake_pickle = mock.Mock(return_value=rb)
pickle.load = fake_pickle
builder = ring.RingBuilder.load('fake.builder', open=fake_open)
self.assertEqual(fake_pickle.call_count, 1)
fake_open.assert_has_calls([mock.call('fake.builder', 'rb')])
self.assertEqual(builder, rb)
fake_pickle.reset_mock()
# test old style builder
fake_pickle.return_value = rb.to_dict()
pickle.load = fake_pickle
builder = ring.RingBuilder.load('fake.builder', open=fake_open)
fake_open.assert_has_calls([mock.call('fake.builder', 'rb')])
self.assertEqual(builder.devs, rb.devs)
fake_pickle.reset_mock()
# test old devs but no meta
no_meta_builder = rb
for dev in no_meta_builder.devs:
del(dev['meta'])
fake_pickle.return_value = no_meta_builder
pickle.load = fake_pickle
builder = ring.RingBuilder.load('fake.builder', open=fake_open)
fake_open.assert_has_calls([mock.call('fake.builder', 'rb')])
self.assertEqual(builder.devs, rb.devs)
# test an empty builder
fake_pickle.side_effect = EOFError
pickle.load = fake_pickle
self.assertRaises(exceptions.UnPicklingError,
ring.RingBuilder.load, 'fake.builder',
open=fake_open)
# test a corrupted builder
fake_pickle.side_effect = pickle.UnpicklingError
pickle.load = fake_pickle
self.assertRaises(exceptions.UnPicklingError,
ring.RingBuilder.load, 'fake.builder',
open=fake_open)
# test some error
fake_pickle.side_effect = AttributeError
pickle.load = fake_pickle
self.assertRaises(exceptions.UnPicklingError,
ring.RingBuilder.load, 'fake.builder',
open=fake_open)
finally:
pickle.load = real_pickle
# test non existent builder file
fake_open.side_effect = io_error_not_found
self.assertRaises(exceptions.FileNotFoundError,
ring.RingBuilder.load, 'fake.builder',
open=fake_open)
# test non accessible builder file
fake_open.side_effect = io_error_no_perm
self.assertRaises(exceptions.PermissionError,
ring.RingBuilder.load, 'fake.builder',
open=fake_open)
# test an error other then ENOENT and ENOPERM
fake_open.side_effect = io_error_generic
self.assertRaises(IOError,
ring.RingBuilder.load, 'fake.builder',
open=fake_open)
def test_save_load(self):
rb = ring.RingBuilder(8, 3, 1)
devs = [{'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.0', 'port': 10000,
'replication_ip': '127.0.0.0', 'replication_port': 10000,
'device': 'sda1', 'meta': 'meta0'},
{'id': 1, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001,
'replication_ip': '127.0.0.1', 'replication_port': 10001,
'device': 'sdb1', 'meta': 'meta1'},
{'id': 2, 'region': 0, 'zone': 2, 'weight': 2,
'ip': '127.0.0.2', 'port': 10002,
'replication_ip': '127.0.0.2', 'replication_port': 10002,
'device': 'sdc1', 'meta': 'meta2'},
{'id': 3, 'region': 0, 'zone': 3, 'weight': 2,
'ip': '127.0.0.3', 'port': 10003,
'replication_ip': '127.0.0.3', 'replication_port': 10003,
'device': 'sdd1', 'meta': ''}]
rb.set_overload(3.14159)
for d in devs:
rb.add_dev(d)
rb.rebalance()
builder_file = os.path.join(self.testdir, 'test_save.builder')
rb.save(builder_file)
loaded_rb = ring.RingBuilder.load(builder_file)
self.maxDiff = None
self.assertEqual(loaded_rb.to_dict(), rb.to_dict())
self.assertEqual(loaded_rb.overload, 3.14159)
@mock.patch('six.moves.builtins.open', autospec=True)
@mock.patch('swift.common.ring.builder.pickle.dump', autospec=True)
def test_save(self, mock_pickle_dump, mock_open):
mock_open.return_value = mock_fh = mock.MagicMock()
rb = ring.RingBuilder(8, 3, 1)
devs = [{'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.0', 'port': 10000, 'device': 'sda1',
'meta': 'meta0'},
{'id': 1, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sdb1',
'meta': 'meta1'},
{'id': 2, 'region': 0, 'zone': 2, 'weight': 2,
'ip': '127.0.0.2', 'port': 10002, 'device': 'sdc1',
'meta': 'meta2'},
{'id': 3, 'region': 0, 'zone': 3, 'weight': 2,
'ip': '127.0.0.3', 'port': 10003, 'device': 'sdd1'}]
for d in devs:
rb.add_dev(d)
rb.rebalance()
rb.save('some.builder')
mock_open.assert_called_once_with('some.builder', 'wb')
mock_pickle_dump.assert_called_once_with(rb.to_dict(),
mock_fh.__enter__(),
protocol=2)
def test_id(self):
rb = ring.RingBuilder(8, 3, 1)
# check id is assigned after save
builder_file = os.path.join(self.testdir, 'test_save.builder')
rb.save(builder_file)
assigned_id = rb.id
# check id doesn't change when builder is saved again
rb.save(builder_file)
self.assertEqual(assigned_id, rb.id)
# check same id after loading
loaded_rb = ring.RingBuilder.load(builder_file)
self.assertEqual(assigned_id, loaded_rb.id)
# check id doesn't change when loaded builder is saved
rb.save(builder_file)
self.assertEqual(assigned_id, rb.id)
# check same id after loading again
loaded_rb = ring.RingBuilder.load(builder_file)
self.assertEqual(assigned_id, loaded_rb.id)
# check id remains once assigned, even when save fails
with self.assertRaises(IOError):
rb.save(os.path.join(
self.testdir, 'non_existent_dir', 'test_save.file'))
self.assertEqual(assigned_id, rb.id)
# sanity check that different builders get different id's
other_rb = ring.RingBuilder(8, 3, 1)
other_builder_file = os.path.join(self.testdir, 'test_save_2.builder')
other_rb.save(other_builder_file)
self.assertNotEqual(assigned_id, other_rb.id)
def test_id_copy_from(self):
# copy_from preserves the same id
orig_rb = ring.RingBuilder(8, 3, 1)
copy_rb = ring.RingBuilder(8, 3, 1)
copy_rb.copy_from(orig_rb)
for rb in (orig_rb, copy_rb):
with self.assertRaises(AttributeError) as cm:
rb.id
self.assertIn('id attribute has not been initialised',
cm.exception.args[0])
builder_file = os.path.join(self.testdir, 'test_save.builder')
orig_rb.save(builder_file)
copy_rb = ring.RingBuilder(8, 3, 1)
copy_rb.copy_from(orig_rb)
self.assertEqual(orig_rb.id, copy_rb.id)
def test_id_legacy_builder_file(self):
builder_file = os.path.join(self.testdir, 'legacy.builder')
def do_test():
# load legacy file
loaded_rb = ring.RingBuilder.load(builder_file)
with self.assertRaises(AttributeError) as cm:
loaded_rb.id
self.assertIn('id attribute has not been initialised',
cm.exception.args[0])
# check saving assigns an id, and that it is persisted
loaded_rb.save(builder_file)
assigned_id = loaded_rb.id
self.assertIsNotNone(assigned_id)
loaded_rb = ring.RingBuilder.load(builder_file)
self.assertEqual(assigned_id, loaded_rb.id)
# older builders had no id so the pickled builder dict had no id key
rb = ring.RingBuilder(8, 3, 1)
orig_to_dict = rb.to_dict
def mock_to_dict():
result = orig_to_dict()
result.pop('id')
return result
with mock.patch.object(rb, 'to_dict', mock_to_dict):
rb.save(builder_file)
do_test()
# even older builders pickled the class instance, which would have had
# no _id attribute
rb = ring.RingBuilder(8, 3, 1)
del rb.logger # logger type cannot be pickled
del rb._id
builder_file = os.path.join(self.testdir, 'legacy.builder')
with open(builder_file, 'wb') as f:
pickle.dump(rb, f, protocol=2)
do_test()
def test_id_not_initialised_errors(self):
rb = ring.RingBuilder(8, 3, 1)
# id is not set until builder has been saved
with self.assertRaises(AttributeError) as cm:
rb.id
self.assertIn('id attribute has not been initialised',
cm.exception.args[0])
# save must succeed for id to be assigned
with self.assertRaises(IOError):
rb.save(os.path.join(
self.testdir, 'non-existent-dir', 'foo.builder'))
with self.assertRaises(AttributeError) as cm:
rb.id
self.assertIn('id attribute has not been initialised',
cm.exception.args[0])
def test_search_devs(self):
rb = ring.RingBuilder(8, 3, 1)
devs = [{'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.0', 'port': 10000, 'device': 'sda1',
'meta': 'meta0'},
{'id': 1, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sdb1',
'meta': 'meta1'},
{'id': 2, 'region': 1, 'zone': 2, 'weight': 2,
'ip': '127.0.0.2', 'port': 10002, 'device': 'sdc1',
'meta': 'meta2'},
{'id': 3, 'region': 1, 'zone': 3, 'weight': 2,
'ip': '127.0.0.3', 'port': 10003, 'device': 'sdd1',
'meta': 'meta3'},
{'id': 4, 'region': 2, 'zone': 4, 'weight': 1,
'ip': '127.0.0.4', 'port': 10004, 'device': 'sde1',
'meta': 'meta4', 'replication_ip': '127.0.0.10',
'replication_port': 20000},
{'id': 5, 'region': 2, 'zone': 5, 'weight': 2,
'ip': '127.0.0.5', 'port': 10005, 'device': 'sdf1',
'meta': 'meta5', 'replication_ip': '127.0.0.11',
'replication_port': 20001},
{'id': 6, 'region': 2, 'zone': 6, 'weight': 2,
'ip': '127.0.0.6', 'port': 10006, 'device': 'sdg1',
'meta': 'meta6', 'replication_ip': '127.0.0.12',
'replication_port': 20002}]
for d in devs:
rb.add_dev(d)
rb.rebalance()
res = rb.search_devs({'region': 0})
self.assertEqual(res, [devs[0], devs[1]])
res = rb.search_devs({'region': 1})
self.assertEqual(res, [devs[2], devs[3]])
res = rb.search_devs({'region': 1, 'zone': 2})
self.assertEqual(res, [devs[2]])
res = rb.search_devs({'id': 1})
self.assertEqual(res, [devs[1]])
res = rb.search_devs({'zone': 1})
self.assertEqual(res, [devs[1]])
res = rb.search_devs({'ip': '127.0.0.1'})
self.assertEqual(res, [devs[1]])
res = rb.search_devs({'ip': '127.0.0.1', 'port': 10001})
self.assertEqual(res, [devs[1]])
res = rb.search_devs({'port': 10001})
self.assertEqual(res, [devs[1]])
res = rb.search_devs({'replication_ip': '127.0.0.10'})
self.assertEqual(res, [devs[4]])
res = rb.search_devs({'replication_ip': '127.0.0.10',
'replication_port': 20000})
self.assertEqual(res, [devs[4]])
res = rb.search_devs({'replication_port': 20000})
self.assertEqual(res, [devs[4]])
res = rb.search_devs({'device': 'sdb1'})
self.assertEqual(res, [devs[1]])
res = rb.search_devs({'meta': 'meta1'})
self.assertEqual(res, [devs[1]])
def test_validate(self):
rb = ring.RingBuilder(8, 3, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda1'})
rb.add_dev({'id': 4, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda1'})
rb.add_dev({'id': 5, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda1'})
rb.add_dev({'id': 6, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda1'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sda1'})
rb.add_dev({'id': 7, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sda1'})
rb.add_dev({'id': 8, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sda1'})
rb.add_dev({'id': 9, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sda1'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 2, 'weight': 2,
'ip': '127.0.0.1', 'port': 10002, 'device': 'sda1'})
rb.add_dev({'id': 10, 'region': 0, 'zone': 2, 'weight': 2,
'ip': '127.0.0.1', 'port': 10002, 'device': 'sda1'})
rb.add_dev({'id': 11, 'region': 0, 'zone': 2, 'weight': 2,
'ip': '127.0.0.1', 'port': 10002, 'device': 'sda1'})
rb.add_dev({'id': 12, 'region': 0, 'zone': 2, 'weight': 2,
'ip': '127.0.0.1', 'port': 10002, 'device': 'sda1'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 3, 'weight': 2,
'ip': '127.0.0.1', 'port': 10003, 'device': 'sda1'})
rb.add_dev({'id': 13, 'region': 0, 'zone': 3, 'weight': 2,
'ip': '127.0.0.1', 'port': 10003, 'device': 'sda1'})
rb.add_dev({'id': 14, 'region': 0, 'zone': 3, 'weight': 2,
'ip': '127.0.0.1', 'port': 10003, 'device': 'sda1'})
rb.add_dev({'id': 15, 'region': 0, 'zone': 3, 'weight': 2,
'ip': '127.0.0.1', 'port': 10003, 'device': 'sda1'})
# Degenerate case: devices added but not rebalanced yet
self.assertRaises(exceptions.RingValidationError, rb.validate)
rb.rebalance()
counts = _partition_counts(rb, key='zone')
self.assertEqual(counts, {0: 128, 1: 128, 2: 256, 3: 256})
dev_usage, worst = rb.validate()
self.assertIsNone(dev_usage)
self.assertIsNone(worst)
dev_usage, worst = rb.validate(stats=True)
self.assertEqual(list(dev_usage), [32, 32, 64, 64,
32, 32, 32, # added zone0
32, 32, 32, # added zone1
64, 64, 64, # added zone2
64, 64, 64, # added zone3
])
self.assertEqual(int(worst), 0)
# min part hours should pin all the parts assigned to this zero
# weight device onto it such that the balance will look horrible
rb.set_dev_weight(2, 0)
rb.rebalance()
self.assertEqual(rb.validate(stats=True)[1], MAX_BALANCE)
# Test not all partitions doubly accounted for
rb.devs[1]['parts'] -= 1
self.assertRaises(exceptions.RingValidationError, rb.validate)
rb.devs[1]['parts'] += 1
# Test non-numeric port
rb.devs[1]['port'] = '10001'
self.assertRaises(exceptions.RingValidationError, rb.validate)
rb.devs[1]['port'] = 10001
# Test partition on nonexistent device
rb.pretend_min_part_hours_passed()
orig_dev_id = rb._replica2part2dev[0][0]
rb._replica2part2dev[0][0] = len(rb.devs)
self.assertRaises(exceptions.RingValidationError, rb.validate)
rb._replica2part2dev[0][0] = orig_dev_id
# Tests that validate can handle 'holes' in .devs
rb.remove_dev(2)
rb.pretend_min_part_hours_passed()
rb.rebalance()
rb.validate(stats=True)
# Test partition assigned to a hole
if rb.devs[2]:
rb.remove_dev(2)
rb.pretend_min_part_hours_passed()
orig_dev_id = rb._replica2part2dev[0][0]
rb._replica2part2dev[0][0] = 2
self.assertRaises(exceptions.RingValidationError, rb.validate)
rb._replica2part2dev[0][0] = orig_dev_id
# Validate that zero weight devices with no partitions don't count on
# the 'worst' value.
self.assertNotEqual(rb.validate(stats=True)[1], MAX_BALANCE)
rb.add_dev({'id': 16, 'region': 0, 'zone': 0, 'weight': 0,
'ip': '127.0.0.1', 'port': 10004, 'device': 'sda1'})
rb.pretend_min_part_hours_passed()
rb.rebalance()
self.assertNotEqual(rb.validate(stats=True)[1], MAX_BALANCE)
def test_validate_partial_replica(self):
rb = ring.RingBuilder(8, 2.5, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sdb'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sdc'})
rb.rebalance()
rb.validate() # sanity
self.assertEqual(len(rb._replica2part2dev[0]), 256)
self.assertEqual(len(rb._replica2part2dev[1]), 256)
self.assertEqual(len(rb._replica2part2dev[2]), 128)
# now swap partial replica part maps
rb._replica2part2dev[1], rb._replica2part2dev[2] = \
rb._replica2part2dev[2], rb._replica2part2dev[1]
self.assertRaises(exceptions.RingValidationError, rb.validate)
def test_validate_duplicate_part_assignment(self):
rb = ring.RingBuilder(8, 3, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sdb'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sdc'})
rb.rebalance()
rb.validate() # sanity
# now double up a device assignment
rb._replica2part2dev[1][200] = rb._replica2part2dev[2][200]
with self.assertRaises(exceptions.RingValidationError) as e:
rb.validate()
expected = 'The partition 200 has been assigned to duplicate devices'
self.assertIn(expected, str(e.exception))
def test_get_part_devices(self):
rb = ring.RingBuilder(8, 3, 1)
self.assertEqual(rb.get_part_devices(0), [])
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda1'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sda1'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 2, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sda1'})
rb.rebalance()
part_devs = sorted(rb.get_part_devices(0),
key=operator.itemgetter('id'))
self.assertEqual(part_devs, [rb.devs[0], rb.devs[1], rb.devs[2]])
def test_get_part_devices_partial_replicas(self):
rb = ring.RingBuilder(8, 2.5, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda1'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 1, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sda1'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 2, 'weight': 1,
'ip': '127.0.0.1', 'port': 10001, 'device': 'sda1'})
rb.rebalance(seed=4)
# note: partition 255 will only have 2 replicas
part_devs = sorted(rb.get_part_devices(255),
key=operator.itemgetter('id'))
self.assertEqual(part_devs, [rb.devs[1], rb.devs[2]])
def test_dispersion_with_zero_weight_devices(self):
rb = ring.RingBuilder(8, 3.0, 0)
# add two devices to a single server in a single zone
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdc'})
# and a zero weight device
rb.add_dev({'id': 3, 'region': 0, 'zone': 0, 'weight': 0,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdd'})
rb.rebalance()
self.assertEqual(rb.dispersion, 0.0)
self.assertEqual(rb._dispersion_graph, {
(0,): [0, 0, 0, 256],
(0, 0): [0, 0, 0, 256],
(0, 0, '127.0.0.1'): [0, 0, 0, 256],
(0, 0, '127.0.0.1', 0): [0, 256, 0, 0],
(0, 0, '127.0.0.1', 1): [0, 256, 0, 0],
(0, 0, '127.0.0.1', 2): [0, 256, 0, 0],
})
def test_dispersion_with_zero_weight_devices_with_parts(self):
rb = ring.RingBuilder(8, 3.0, 1)
# add four devices to a single server in a single zone
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdc'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdd'})
rb.rebalance(seed=1)
self.assertEqual(rb.dispersion, 0.0)
self.assertEqual(rb._dispersion_graph, {
(0,): [0, 0, 0, 256],
(0, 0): [0, 0, 0, 256],
(0, 0, '127.0.0.1'): [0, 0, 0, 256],
(0, 0, '127.0.0.1', 0): [64, 192, 0, 0],
(0, 0, '127.0.0.1', 1): [64, 192, 0, 0],
(0, 0, '127.0.0.1', 2): [64, 192, 0, 0],
(0, 0, '127.0.0.1', 3): [64, 192, 0, 0],
})
# now mark a device 2 for decom
rb.set_dev_weight(2, 0.0)
# we'll rebalance but can't move any parts
rb.rebalance(seed=1)
# zero weight tier has one copy of 1/4 part-replica
self.assertEqual(rb.dispersion, 25.0)
self.assertEqual(rb._dispersion_graph, {
(0,): [0, 0, 0, 256],
(0, 0): [0, 0, 0, 256],
(0, 0, '127.0.0.1'): [0, 0, 0, 256],
(0, 0, '127.0.0.1', 0): [64, 192, 0, 0],
(0, 0, '127.0.0.1', 1): [64, 192, 0, 0],
(0, 0, '127.0.0.1', 2): [64, 192, 0, 0],
(0, 0, '127.0.0.1', 3): [64, 192, 0, 0],
})
# unlock the stuck parts
rb.pretend_min_part_hours_passed()
rb.rebalance(seed=3)
self.assertEqual(rb.dispersion, 0.0)
self.assertEqual(rb._dispersion_graph, {
(0,): [0, 0, 0, 256],
(0, 0): [0, 0, 0, 256],
(0, 0, '127.0.0.1'): [0, 0, 0, 256],
(0, 0, '127.0.0.1', 0): [0, 256, 0, 0],
(0, 0, '127.0.0.1', 1): [0, 256, 0, 0],
(0, 0, '127.0.0.1', 3): [0, 256, 0, 0],
})
@unittest.skipIf(sys.version_info < (3,),
"Seed-specific tests don't work well between python "
"versions. This test is now PY3 only")
def test_undispersable_zone_converge_on_balance(self):
rb = ring.RingBuilder(8, 6, 0)
dev_id = 0
# 3 regions, 2 zone for each region, 1 server with only *one* device in
# each zone (this is an absolutely pathological case)
for r in range(3):
for z in range(2):
ip = '127.%s.%s.1' % (r, z)
dev_id += 1
rb.add_dev({'id': dev_id, 'region': r, 'zone': z,
'weight': 1000, 'ip': ip, 'port': 10000,
'device': 'd%s' % dev_id})
rb.rebalance(seed=5)
# sanity, all balanced and 0 dispersion
self.assertEqual(rb.get_balance(), 0)
self.assertEqual(rb.dispersion, 0)
# add one device to the server in z1 for each region, N.B. when we
# *balance* this topology we will have very bad dispersion (too much
# weight in z1 compared to z2!)
for r in range(3):
z = 0
ip = '127.%s.%s.1' % (r, z)
dev_id += 1
rb.add_dev({'id': dev_id, 'region': r, 'zone': z,
'weight': 1000, 'ip': ip, 'port': 10000,
'device': 'd%s' % dev_id})
changed_part, _, _ = rb.rebalance(seed=5)
# sanity, all part but only one replica moved to new devices
self.assertEqual(changed_part, 2 ** 8)
# so the first time, rings are still unbalanced becase we'll only move
# one replica of each part.
self.assertEqual(rb.get_balance(), 50.1953125)
self.assertEqual(rb.dispersion, 16.6015625)
# N.B. since we mostly end up grabbing parts by "weight forced" some
# seeds given some specific ring state will randomly pick bad
# part-replicas that end up going back down onto the same devices
changed_part, _, _ = rb.rebalance(seed=5)
self.assertEqual(changed_part, 15)
# ... this isn't a really "desirable" behavior, but even with bad luck,
# things do get better
self.assertEqual(rb.get_balance(), 47.265625)
self.assertEqual(rb.dispersion, 16.6015625)
# but if you stick with it, eventually the next rebalance, will get to
# move "the right" part-replicas, resulting in near optimal balance
changed_part, _, _ = rb.rebalance(seed=5)
self.assertEqual(changed_part, 167)
self.assertEqual(rb.get_balance(), 14.453125)
self.assertEqual(rb.dispersion, 16.6015625)
@unittest.skipIf(sys.version_info < (3,),
"Seed-specific tests don't work well between python "
"versions. This test is now PY3 only")
def test_undispersable_server_converge_on_balance(self):
rb = ring.RingBuilder(8, 6, 0)
dev_id = 0
# 3 zones, 2 server for each zone, 2 device for each server
for z in range(3):
for i in range(2):
ip = '127.0.%s.%s' % (z, i + 1)
for d in range(2):
dev_id += 1
rb.add_dev({'id': dev_id, 'region': 1, 'zone': z,
'weight': 1000, 'ip': ip, 'port': 10000,
'device': 'd%s' % dev_id})
rb.rebalance(seed=7)
# sanity, all balanced and 0 dispersion
self.assertEqual(rb.get_balance(), 0)
self.assertEqual(rb.dispersion, 0)
# add one device for first server for each zone
for z in range(3):
ip = '127.0.%s.1' % z
dev_id += 1
rb.add_dev({'id': dev_id, 'region': 1, 'zone': z,
'weight': 1000, 'ip': ip, 'port': 10000,
'device': 'd%s' % dev_id})
changed_part, _, _ = rb.rebalance(seed=7)
# sanity, all part but only one replica moved to new devices
self.assertEqual(changed_part, 2 ** 8)
# but the first time, those are still unbalance becase ring builder
# can move only one replica for each part
self.assertEqual(rb.get_balance(), 17.96875)
self.assertEqual(rb.dispersion, 9.9609375)
rb.rebalance(seed=7)
# converge into around 0~1
self.assertGreaterEqual(rb.get_balance(), 0)
self.assertLess(rb.get_balance(), 1)
# dispersion doesn't get any worse
self.assertEqual(rb.dispersion, 9.9609375)
def test_effective_overload(self):
rb = ring.RingBuilder(8, 3, 1)
# z0
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 100,
'ip': '127.0.0.0', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'weight': 100,
'ip': '127.0.0.0', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 0, 'weight': 100,
'ip': '127.0.0.0', 'port': 10000, 'device': 'sdb'})
# z1
rb.add_dev({'id': 3, 'region': 0, 'zone': 1, 'weight': 100,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 4, 'region': 0, 'zone': 1, 'weight': 100,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 5, 'region': 0, 'zone': 1, 'weight': 100,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdc'})
# z2
rb.add_dev({'id': 6, 'region': 0, 'zone': 2, 'weight': 100,
'ip': '127.0.0.2', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 7, 'region': 0, 'zone': 2, 'weight': 100,
'ip': '127.0.0.2', 'port': 10000, 'device': 'sdb'})
# this ring requires overload
required = rb.get_required_overload()
self.assertGreater(required, 0.1)
# and we'll use a little bit
rb.set_overload(0.1)
rb.rebalance(seed=7)
rb.validate()
# but with-out enough overload we're not dispersed
self.assertGreater(rb.dispersion, 0)
# add the other dev to z2
rb.add_dev({'id': 8, 'region': 0, 'zone': 2, 'weight': 100,
'ip': '127.0.0.2', 'port': 10000, 'device': 'sdc'})
# but also fail another device in the same!
rb.remove_dev(6)
# we still require overload
required = rb.get_required_overload()
self.assertGreater(required, 0.1)
rb.pretend_min_part_hours_passed()
rb.rebalance(seed=7)
rb.validate()
# ... and without enough we're full dispersed
self.assertGreater(rb.dispersion, 0)
# ok, let's fix z2's weight for real
rb.add_dev({'id': 6, 'region': 0, 'zone': 2, 'weight': 100,
'ip': '127.0.0.2', 'port': 10000, 'device': 'sda'})
# ... technically, we no longer require overload
self.assertEqual(rb.get_required_overload(), 0.0)
# so let's rebalance w/o resetting min_part_hours
rb.rebalance(seed=7)
rb.validate()
# ... and that got it in one pass boo-yah!
self.assertEqual(rb.dispersion, 0)
def zone_weights_over_device_count(self):
rb = ring.RingBuilder(8, 3, 1)
# z0
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 100,
'ip': '127.0.0.0', 'port': 10000, 'device': 'sda'})
# z1
rb.add_dev({'id': 1, 'region': 0, 'zone': 1, 'weight': 100,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
# z2
rb.add_dev({'id': 2, 'region': 0, 'zone': 2, 'weight': 200,
'ip': '127.0.0.2', 'port': 10000, 'device': 'sda'})
rb.rebalance(seed=7)
rb.validate()
self.assertEqual(rb.dispersion, 0)
self.assertAlmostEqual(rb.get_balance(), (1.0 / 3.0) * 100)
def test_more_devices_than_replicas_validation_when_removed_dev(self):
rb = ring.RingBuilder(8, 3, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'ip': '127.0.0.1',
'port': 6200, 'weight': 1.0, 'device': 'sda'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'ip': '127.0.0.1',
'port': 6200, 'weight': 1.0, 'device': 'sdb'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 0, 'ip': '127.0.0.1',
'port': 6200, 'weight': 1.0, 'device': 'sdc'})
rb.rebalance()
rb.remove_dev(2)
with self.assertRaises(ValueError) as e:
rb.set_dev_weight(2, 1)
msg = "Can not set weight of dev_id 2 because it is marked " \
"for removal"
self.assertIn(msg, str(e.exception))
with self.assertRaises(exceptions.RingValidationError) as e:
rb.rebalance()
msg = 'Replica count of 3 requires more than 2 devices'
self.assertIn(msg, str(e.exception))
def _add_dev_delete_first_n(self, add_dev_count, n):
rb = ring.RingBuilder(8, 3, 1)
dev_names = ['sda', 'sdb', 'sdc', 'sdd', 'sde', 'sdf']
for i in range(add_dev_count):
if i < len(dev_names):
dev_name = dev_names[i]
else:
dev_name = 'sda'
rb.add_dev({'id': i, 'region': 0, 'zone': 0, 'ip': '127.0.0.1',
'port': 6200, 'weight': 1.0, 'device': dev_name})
rb.rebalance()
if (n > 0):
rb.pretend_min_part_hours_passed()
# remove first n
for i in range(n):
rb.remove_dev(i)
rb.pretend_min_part_hours_passed()
rb.rebalance()
return rb
def test_reuse_of_dev_holes_without_id(self):
# try with contiguous holes at beginning
add_dev_count = 6
rb = self._add_dev_delete_first_n(add_dev_count, add_dev_count - 3)
new_dev_id = rb.add_dev({'region': 0, 'zone': 0, 'ip': '127.0.0.1',
'port': 6200, 'weight': 1.0,
'device': 'sda'})
self.assertLess(new_dev_id, add_dev_count)
# try with non-contiguous holes
# [0, 1, None, 3, 4, None]
rb2 = ring.RingBuilder(8, 3, 1)
for i in range(6):
rb2.add_dev({'region': 0, 'zone': 0, 'ip': '127.0.0.1',
'port': 6200, 'weight': 1.0, 'device': 'sda'})
rb2.rebalance()
rb2.pretend_min_part_hours_passed()
rb2.remove_dev(2)
rb2.remove_dev(5)
rb2.pretend_min_part_hours_passed()
rb2.rebalance()
first = rb2.add_dev({'region': 0, 'zone': 0, 'ip': '127.0.0.1',
'port': 6200, 'weight': 1.0, 'device': 'sda'})
second = rb2.add_dev({'region': 0, 'zone': 0, 'ip': '127.0.0.1',
'port': 6200, 'weight': 1.0, 'device': 'sda'})
# add a new one (without reusing a hole)
third = rb2.add_dev({'region': 0, 'zone': 0, 'ip': '127.0.0.1',
'port': 6200, 'weight': 1.0, 'device': 'sda'})
self.assertEqual(first, 2)
self.assertEqual(second, 5)
self.assertEqual(third, 6)
def test_reuse_of_dev_holes_with_id(self):
add_dev_count = 6
rb = self._add_dev_delete_first_n(add_dev_count, add_dev_count - 3)
# add specifying id
exp_new_dev_id = 2
# [dev, dev, None, dev, dev, None]
try:
new_dev_id = rb.add_dev({'id': exp_new_dev_id, 'region': 0,
'zone': 0, 'ip': '127.0.0.1',
'port': 6200, 'weight': 1.0,
'device': 'sda'})
self.assertEqual(new_dev_id, exp_new_dev_id)
except exceptions.DuplicateDeviceError:
self.fail("device hole not reused")
def test_prepare_increase_partition_power(self):
ring_file = os.path.join(self.testdir, 'test_partpower.ring.gz')
rb = ring.RingBuilder(8, 3.0, 1)
self.assertEqual(rb.part_power, 8)
# add more devices than replicas to the ring
for i in range(10):
dev = "sdx%s" % i
rb.add_dev({'id': i, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': dev})
rb.rebalance(seed=1)
self.assertFalse(rb.cancel_increase_partition_power())
self.assertEqual(rb.part_power, 8)
self.assertIsNone(rb.next_part_power)
self.assertFalse(rb.finish_increase_partition_power())
self.assertEqual(rb.part_power, 8)
self.assertIsNone(rb.next_part_power)
self.assertTrue(rb.prepare_increase_partition_power())
self.assertEqual(rb.part_power, 8)
self.assertEqual(rb.next_part_power, 9)
# Save .ring.gz, and load ring from it to ensure prev/next is set
rd = rb.get_ring()
rd.save(ring_file)
r = ring.Ring(ring_file)
expected_part_shift = 32 - 8
self.assertEqual(expected_part_shift, r._part_shift)
self.assertEqual(9, r.next_part_power)
def test_increase_partition_power(self):
rb = ring.RingBuilder(8, 3.0, 1)
self.assertEqual(rb.part_power, 8)
# add more devices than replicas to the ring
for i in range(10):
dev = "sdx%s" % i
rb.add_dev({'id': i, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': dev})
rb.rebalance(seed=1)
# Let's save the ring, and get the nodes for an object
ring_file = os.path.join(self.testdir, 'test_partpower.ring.gz')
rd = rb.get_ring()
rd.save(ring_file)
r = ring.Ring(ring_file)
old_part, old_nodes = r.get_nodes("acc", "cont", "obj")
old_version = rb.version
self.assertTrue(rb.prepare_increase_partition_power())
self.assertTrue(rb.increase_partition_power())
rb.validate()
changed_parts, _balance, removed_devs = rb.rebalance()
self.assertEqual(changed_parts, 0)
self.assertEqual(removed_devs, 0)
# Make sure cancellation is not possible
# after increasing the partition power
self.assertFalse(rb.cancel_increase_partition_power())
old_ring = r
rd = rb.get_ring()
rd.save(ring_file)
r = ring.Ring(ring_file)
new_part, new_nodes = r.get_nodes("acc", "cont", "obj")
# sanity checks
self.assertEqual(9, rb.part_power)
self.assertEqual(9, rb.next_part_power)
self.assertEqual(rb.version, old_version + 3)
# make sure there is always the same device assigned to every pair of
# partitions
for replica in rb._replica2part2dev:
for part in range(0, len(replica), 2):
dev = replica[part]
next_dev = replica[part + 1]
self.assertEqual(dev, next_dev)
# same for last_part moves
for part in range(0, rb.parts, 2):
this_last_moved = rb._last_part_moves[part]
next_last_moved = rb._last_part_moves[part + 1]
self.assertEqual(this_last_moved, next_last_moved)
for i in range(100):
suffix = uuid.uuid4()
account = 'account_%s' % suffix
container = 'container_%s' % suffix
obj = 'obj_%s' % suffix
old_part, old_nodes = old_ring.get_nodes(account, container, obj)
new_part, new_nodes = r.get_nodes(account, container, obj)
# Due to the increased partition power, the partition each object
# is assigned to has changed. If the old partition was X, it will
# now be either located in 2*X or 2*X+1
self.assertIn(new_part, [old_part * 2, old_part * 2 + 1])
# Importantly, we expect the objects to be placed on the same
# nodes after increasing the partition power
self.assertEqual(old_nodes, new_nodes)
def test_finalize_increase_partition_power(self):
ring_file = os.path.join(self.testdir, 'test_partpower.ring.gz')
rb = ring.RingBuilder(8, 3.0, 1)
self.assertEqual(rb.part_power, 8)
# add more devices than replicas to the ring
for i in range(10):
dev = "sdx%s" % i
rb.add_dev({'id': i, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': dev})
rb.rebalance(seed=1)
self.assertTrue(rb.prepare_increase_partition_power())
# Make sure this doesn't do any harm before actually increasing the
# partition power
self.assertFalse(rb.finish_increase_partition_power())
self.assertEqual(rb.next_part_power, 9)
self.assertTrue(rb.increase_partition_power())
self.assertFalse(rb.prepare_increase_partition_power())
self.assertEqual(rb.part_power, 9)
self.assertEqual(rb.next_part_power, 9)
self.assertTrue(rb.finish_increase_partition_power())
self.assertEqual(rb.part_power, 9)
self.assertIsNone(rb.next_part_power)
# Save .ring.gz, and load ring from it to ensure prev/next is set
rd = rb.get_ring()
rd.save(ring_file)
r = ring.Ring(ring_file)
expected_part_shift = 32 - 9
self.assertEqual(expected_part_shift, r._part_shift)
self.assertIsNone(r.next_part_power)
def test_prepare_increase_partition_power_failed(self):
rb = ring.RingBuilder(8, 3.0, 1)
self.assertEqual(rb.part_power, 8)
self.assertTrue(rb.prepare_increase_partition_power())
self.assertEqual(rb.next_part_power, 9)
# next_part_power is still set, do not increase again
self.assertFalse(rb.prepare_increase_partition_power())
self.assertEqual(rb.next_part_power, 9)
def test_increase_partition_power_failed(self):
rb = ring.RingBuilder(8, 3.0, 1)
self.assertEqual(rb.part_power, 8)
# add more devices than replicas to the ring
for i in range(10):
dev = "sdx%s" % i
rb.add_dev({'id': i, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': dev})
rb.rebalance(seed=1)
# next_part_power not set, can't increase the part power
self.assertFalse(rb.increase_partition_power())
self.assertEqual(rb.part_power, 8)
self.assertTrue(rb.prepare_increase_partition_power())
self.assertTrue(rb.increase_partition_power())
self.assertEqual(rb.part_power, 9)
# part_power already increased
self.assertFalse(rb.increase_partition_power())
self.assertEqual(rb.part_power, 9)
def test_cancel_increase_partition_power(self):
rb = ring.RingBuilder(8, 3.0, 1)
self.assertEqual(rb.part_power, 8)
# add more devices than replicas to the ring
for i in range(10):
dev = "sdx%s" % i
rb.add_dev({'id': i, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': dev})
rb.rebalance(seed=1)
old_version = rb.version
self.assertTrue(rb.prepare_increase_partition_power())
# sanity checks
self.assertEqual(8, rb.part_power)
self.assertEqual(9, rb.next_part_power)
self.assertEqual(rb.version, old_version + 1)
self.assertTrue(rb.cancel_increase_partition_power())
rb.validate()
self.assertEqual(8, rb.part_power)
self.assertEqual(8, rb.next_part_power)
self.assertEqual(rb.version, old_version + 2)
class TestGetRequiredOverload(unittest.TestCase):
maxDiff = None
def test_none_needed(self):
rb = ring.RingBuilder(8, 3, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdc'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 0, 'weight': 1,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdd'})
# 4 equal-weight devs and 3 replicas: this can be balanced without
# resorting to overload at all
self.assertAlmostEqual(rb.get_required_overload(), 0)
expected = {
(0, 0, '127.0.0.1', 0): 0.75,
(0, 0, '127.0.0.1', 1): 0.75,
(0, 0, '127.0.0.1', 2): 0.75,
(0, 0, '127.0.0.1', 3): 0.75,
}
weighted_replicas = rb._build_weighted_replicas_by_tier()
self.assertEqual(expected, {
tier: weighted
for (tier, weighted) in weighted_replicas.items()
if len(tier) == 4})
wanted_replicas = rb._build_wanted_replicas_by_tier()
self.assertEqual(expected,
{tier: weighted
for (tier, weighted) in wanted_replicas.items()
if len(tier) == 4})
# since no overload is needed, target_replicas is the same
rb.set_overload(0.10)
target_replicas = rb._build_target_replicas_by_tier()
self.assertEqual(expected,
{tier: weighted
for (tier, weighted) in target_replicas.items()
if len(tier) == 4})
# ... no matter how high you go!
rb.set_overload(100.0)
target_replicas = rb._build_target_replicas_by_tier()
self.assertEqual(expected,
{tier: weighted
for (tier, weighted) in target_replicas.items()
if len(tier) == 4})
# 3 equal-weight devs and 3 replicas: this can also be balanced
rb.remove_dev(3)
self.assertAlmostEqual(rb.get_required_overload(), 0)
expected = {
(0, 0, '127.0.0.1', 0): 1.0,
(0, 0, '127.0.0.1', 1): 1.0,
(0, 0, '127.0.0.1', 2): 1.0,
}
weighted_replicas = rb._build_weighted_replicas_by_tier()
self.assertEqual(expected,
{tier: weighted
for (tier, weighted) in weighted_replicas.items()
if len(tier) == 4})
wanted_replicas = rb._build_wanted_replicas_by_tier()
self.assertEqual(expected,
{tier: weighted
for (tier, weighted) in wanted_replicas.items()
if len(tier) == 4})
# ... still no overload
rb.set_overload(100.0)
target_replicas = rb._build_target_replicas_by_tier()
self.assertEqual(expected,
{tier: weighted
for (tier, weighted) in target_replicas.items()
if len(tier) == 4})
def test_equal_replica_and_devices_count_ignore_weights(self):
rb = ring.RingBuilder(8, 3, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 7.47,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'weight': 5.91,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 0, 'weight': 6.44,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
expected = {
0: 1.0,
1: 1.0,
2: 1.0,
}
# simplicity itself
self.assertEqual(expected, {
t[-1]: r for (t, r) in
rb._build_weighted_replicas_by_tier().items()
if len(t) == 4})
self.assertEqual(expected, {
t[-1]: r for (t, r) in
rb._build_wanted_replicas_by_tier().items()
if len(t) == 4})
self.assertEqual(expected, {
t[-1]: r for (t, r) in
rb._build_target_replicas_by_tier().items()
if len(t) == 4})
# ... no overload required!
self.assertEqual(0, rb.get_required_overload())
rb.rebalance()
expected = {
0: 256,
1: 256,
2: 256,
}
self.assertEqual(expected, {d['id']: d['parts'] for d in
rb._iter_devs()})
def test_small_zone(self):
rb = ring.RingBuilder(8, 3, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 4,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'weight': 4,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 1, 'weight': 4,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdc'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 1, 'weight': 4,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdd'})
rb.add_dev({'id': 4, 'region': 0, 'zone': 2, 'weight': 4,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdc'})
rb.add_dev({'id': 5, 'region': 0, 'zone': 2, 'weight': 3,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdd'})
expected = {
(0, 0): 1.0434782608695652,
(0, 1): 1.0434782608695652,
(0, 2): 0.9130434782608695,
}
weighted_replicas = rb._build_weighted_replicas_by_tier()
self.assertEqual(expected,
{tier: weighted
for (tier, weighted) in weighted_replicas.items()
if len(tier) == 2})
expected = {
(0, 0): 1.0,
(0, 1): 1.0,
(0, 2): 1.0,
}
wanted_replicas = rb._build_wanted_replicas_by_tier()
self.assertEqual(expected,
{tier: weighted
for (tier, weighted) in wanted_replicas.items()
if len(tier) == 2})
# the device tier is interesting because one of the devices in zone
# two has a different weight
expected = {
0: 0.5217391304347826,
1: 0.5217391304347826,
2: 0.5217391304347826,
3: 0.5217391304347826,
4: 0.5217391304347826,
5: 0.3913043478260869,
}
self.assertEqual(expected,
{tier[3]: weighted
for (tier, weighted) in weighted_replicas.items()
if len(tier) == 4})
# ... but, each pair of devices still needs to hold a whole
# replicanth; which we'll try distribute fairly among devices in
# zone 2, so that they can share the burden and ultimately the
# required overload will be as small as possible.
expected = {
0: 0.5,
1: 0.5,
2: 0.5,
3: 0.5,
4: 0.5714285714285715,
5: 0.42857142857142855,
}
self.assertEqual(expected,
{tier[3]: weighted
for (tier, weighted) in wanted_replicas.items()
if len(tier) == 4})
# full dispersion requires zone two's devices to eat more than
# they're weighted for
self.assertAlmostEqual(rb.get_required_overload(), 0.095238,
delta=1e-5)
# so... if we give it enough overload it we should get full dispersion
rb.set_overload(0.1)
target_replicas = rb._build_target_replicas_by_tier()
self.assertEqual(expected,
{tier[3]: weighted
for (tier, weighted) in target_replicas.items()
if len(tier) == 4})
def test_multiple_small_zones(self):
rb = ring.RingBuilder(8, 3, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 500,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'weight': 500,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 8, 'region': 0, 'zone': 0, 'weight': 500,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 9, 'region': 0, 'zone': 0, 'weight': 500,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 1, 'weight': 150,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdc'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 1, 'weight': 150,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdd'})
rb.add_dev({'id': 10, 'region': 0, 'zone': 1, 'weight': 150,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdd'})
rb.add_dev({'id': 4, 'region': 0, 'zone': 2, 'weight': 100,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdc'})
rb.add_dev({'id': 5, 'region': 0, 'zone': 2, 'weight': 100,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdd'})
rb.add_dev({'id': 6, 'region': 0, 'zone': 3, 'weight': 100,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdc'})
rb.add_dev({'id': 7, 'region': 0, 'zone': 3, 'weight': 100,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdd'})
expected = {
(0, 0): 2.1052631578947367,
(0, 1): 0.47368421052631576,
(0, 2): 0.21052631578947367,
(0, 3): 0.21052631578947367,
}
weighted_replicas = rb._build_weighted_replicas_by_tier()
self.assertEqual(expected,
{tier: weighted
for (tier, weighted) in weighted_replicas.items()
if len(tier) == 2})
# without any overload, we get weight
target_replicas = rb._build_target_replicas_by_tier()
self.assertEqual(expected,
{tier: r
for (tier, r) in target_replicas.items()
if len(tier) == 2})
expected = {
(0, 0): 1.0,
(0, 1): 1.0,
(0, 2): 0.49999999999999994,
(0, 3): 0.49999999999999994,
}
wanted_replicas = rb._build_wanted_replicas_by_tier()
self.assertEqual(expected,
{t: r
for (t, r) in wanted_replicas.items()
if len(t) == 2})
self.assertEqual(1.3750000000000002, rb.get_required_overload())
# with enough overload we get the full dispersion
rb.set_overload(1.5)
target_replicas = rb._build_target_replicas_by_tier()
self.assertEqual(expected,
{tier: r
for (tier, r) in target_replicas.items()
if len(tier) == 2})
# with not enough overload, we get somewhere in the middle
rb.set_overload(1.0)
expected = {
(0, 0): 1.3014354066985647,
(0, 1): 0.8564593301435406,
(0, 2): 0.4210526315789473,
(0, 3): 0.4210526315789473,
}
target_replicas = rb._build_target_replicas_by_tier()
self.assertEqual(expected,
{tier: r
for (tier, r) in target_replicas.items()
if len(tier) == 2})
def test_big_zone(self):
rb = ring.RingBuilder(8, 3, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 100,
'ip': '127.0.0.0', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'weight': 100,
'ip': '127.0.0.0', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 1, 'weight': 60,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 1, 'weight': 60,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 4, 'region': 0, 'zone': 2, 'weight': 60,
'ip': '127.0.0.2', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 5, 'region': 0, 'zone': 2, 'weight': 60,
'ip': '127.0.0.2', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 6, 'region': 0, 'zone': 3, 'weight': 60,
'ip': '127.0.0.3', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 7, 'region': 0, 'zone': 3, 'weight': 60,
'ip': '127.0.0.3', 'port': 10000, 'device': 'sdb'})
expected = {
(0, 0): 1.0714285714285714,
(0, 1): 0.6428571428571429,
(0, 2): 0.6428571428571429,
(0, 3): 0.6428571428571429,
}
weighted_replicas = rb._build_weighted_replicas_by_tier()
self.assertEqual(expected,
{tier: weighted
for (tier, weighted) in weighted_replicas.items()
if len(tier) == 2})
expected = {
(0, 0): 1.0,
(0, 1): 0.6666666666666667,
(0, 2): 0.6666666666666667,
(0, 3): 0.6666666666666667,
}
wanted_replicas = rb._build_wanted_replicas_by_tier()
self.assertEqual(expected,
{tier: weighted
for (tier, weighted) in wanted_replicas.items()
if len(tier) == 2})
# when all the devices and servers in a zone are evenly weighted
# it will accurately proxy their required overload, all the
# zones besides 0 require the same overload
t = random.choice([t for t in weighted_replicas
if len(t) == 2
and t[1] != 0])
expected_overload = ((wanted_replicas[t] - weighted_replicas[t])
/ weighted_replicas[t])
self.assertAlmostEqual(rb.get_required_overload(),
expected_overload)
# but if you only give it out half of that
rb.set_overload(expected_overload / 2.0)
# ... you can expect it's not going to full disperse
expected = {
(0, 0): 1.0357142857142856,
(0, 1): 0.6547619047619049,
(0, 2): 0.6547619047619049,
(0, 3): 0.6547619047619049,
}
target_replicas = rb._build_target_replicas_by_tier()
self.assertEqual(expected,
{tier: weighted
for (tier, weighted) in target_replicas.items()
if len(tier) == 2})
def test_enormous_zone(self):
rb = ring.RingBuilder(8, 3, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 500,
'ip': '127.0.0.0', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'weight': 500,
'ip': '127.0.0.0', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 0, 'weight': 500,
'ip': '127.0.0.0', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 0, 'weight': 500,
'ip': '127.0.0.0', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 4, 'region': 0, 'zone': 1, 'weight': 60,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 5, 'region': 0, 'zone': 1, 'weight': 60,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 6, 'region': 0, 'zone': 2, 'weight': 60,
'ip': '127.0.0.2', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 7, 'region': 0, 'zone': 2, 'weight': 60,
'ip': '127.0.0.2', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 8, 'region': 0, 'zone': 3, 'weight': 60,
'ip': '127.0.0.2', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 9, 'region': 0, 'zone': 3, 'weight': 60,
'ip': '127.0.0.2', 'port': 10000, 'device': 'sdb'})
expected = {
(0, 0): 2.542372881355932,
(0, 1): 0.15254237288135591,
(0, 2): 0.15254237288135591,
(0, 3): 0.15254237288135591,
}
weighted_replicas = rb._build_weighted_replicas_by_tier()
self.assertEqual(expected,
{tier: weighted
for (tier, weighted) in weighted_replicas.items()
if len(tier) == 2})
expected = {
(0, 0): 1.0,
(0, 1): 0.6666666666666667,
(0, 2): 0.6666666666666667,
(0, 3): 0.6666666666666667,
}
wanted_replicas = rb._build_wanted_replicas_by_tier()
self.assertEqual(expected,
{tier: weighted
for (tier, weighted) in wanted_replicas.items()
if len(tier) == 2})
# ouch, those "tiny" devices need to hold 3x more than their
# weighted for!
self.assertAlmostEqual(rb.get_required_overload(), 3.370370,
delta=1e-5)
# let's get a little crazy, and let devices eat up to 1x more than
# their capacity is weighted for - see how far that gets us...
rb.set_overload(1)
target_replicas = rb._build_target_replicas_by_tier()
expected = {
(0, 0): 2.084745762711864,
(0, 1): 0.30508474576271183,
(0, 2): 0.30508474576271183,
(0, 3): 0.30508474576271183,
}
self.assertEqual(expected,
{tier: weighted
for (tier, weighted) in target_replicas.items()
if len(tier) == 2})
def test_two_big_two_small(self):
rb = ring.RingBuilder(8, 3, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 100,
'ip': '127.0.0.0', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'weight': 100,
'ip': '127.0.0.0', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 1, 'weight': 100,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 1, 'weight': 100,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 4, 'region': 0, 'zone': 2, 'weight': 45,
'ip': '127.0.0.2', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 5, 'region': 0, 'zone': 2, 'weight': 45,
'ip': '127.0.0.2', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 6, 'region': 0, 'zone': 3, 'weight': 35,
'ip': '127.0.0.2', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 7, 'region': 0, 'zone': 3, 'weight': 35,
'ip': '127.0.0.2', 'port': 10000, 'device': 'sdb'})
expected = {
(0, 0): 1.0714285714285714,
(0, 1): 1.0714285714285714,
(0, 2): 0.48214285714285715,
(0, 3): 0.375,
}
weighted_replicas = rb._build_weighted_replicas_by_tier()
self.assertEqual(expected,
{tier: weighted
for (tier, weighted) in weighted_replicas.items()
if len(tier) == 2})
expected = {
(0, 0): 1.0,
(0, 1): 1.0,
(0, 2): 0.5625,
(0, 3): 0.43749999999999994,
}
wanted_replicas = rb._build_wanted_replicas_by_tier()
self.assertEqual(expected,
{tier: weighted
for (tier, weighted) in wanted_replicas.items()
if len(tier) == 2})
# I'm not sure it's significant or coincidental that the devices
# in zone 2 & 3 who end up splitting the 3rd replica turn out to
# need to eat ~1/6th extra replicanths
self.assertAlmostEqual(rb.get_required_overload(), 1.0 / 6.0)
# ... *so* 10% isn't *quite* enough
rb.set_overload(0.1)
target_replicas = rb._build_target_replicas_by_tier()
expected = {
(0, 0): 1.0285714285714285,
(0, 1): 1.0285714285714285,
(0, 2): 0.5303571428571429,
(0, 3): 0.4125,
}
self.assertEqual(expected,
{tier: weighted
for (tier, weighted) in target_replicas.items()
if len(tier) == 2})
# ... but 20% will do the trick!
rb.set_overload(0.2)
target_replicas = rb._build_target_replicas_by_tier()
expected = {
(0, 0): 1.0,
(0, 1): 1.0,
(0, 2): 0.5625,
(0, 3): 0.43749999999999994,
}
self.assertEqual(expected,
{tier: weighted
for (tier, weighted) in target_replicas.items()
if len(tier) == 2})
def test_multiple_replicas_each(self):
rb = ring.RingBuilder(8, 7, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': 80,
'ip': '127.0.0.0', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'weight': 80,
'ip': '127.0.0.0', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 2, 'region': 0, 'zone': 0, 'weight': 80,
'ip': '127.0.0.0', 'port': 10000, 'device': 'sdc'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 0, 'weight': 80,
'ip': '127.0.0.0', 'port': 10000, 'device': 'sdd'})
rb.add_dev({'id': 4, 'region': 0, 'zone': 0, 'weight': 80,
'ip': '127.0.0.0', 'port': 10000, 'device': 'sde'})
rb.add_dev({'id': 5, 'region': 0, 'zone': 1, 'weight': 70,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 6, 'region': 0, 'zone': 1, 'weight': 70,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdb'})
rb.add_dev({'id': 7, 'region': 0, 'zone': 1, 'weight': 70,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdc'})
rb.add_dev({'id': 8, 'region': 0, 'zone': 1, 'weight': 70,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sdd'})
expected = {
(0, 0): 4.117647058823529,
(0, 1): 2.8823529411764706,
}
weighted_replicas = rb._build_weighted_replicas_by_tier()
self.assertEqual(expected,
{tier: weighted
for (tier, weighted) in weighted_replicas.items()
if len(tier) == 2})
expected = {
(0, 0): 4.0,
(0, 1): 3.0,
}
wanted_replicas = rb._build_wanted_replicas_by_tier()
self.assertEqual(expected,
{tier: weighted
for (tier, weighted) in wanted_replicas.items()
if len(tier) == 2})
# I guess 2.88 => 3.0 is about a 4% increase
self.assertAlmostEqual(rb.get_required_overload(),
0.040816326530612256)
# ... 10% is plenty enough here
rb.set_overload(0.1)
target_replicas = rb._build_target_replicas_by_tier()
self.assertEqual(expected,
{tier: weighted
for (tier, weighted) in target_replicas.items()
if len(tier) == 2})
def test_small_extra_server_in_zone_with_multiple_replicas(self):
rb = ring.RingBuilder(8, 5, 1)
# z0
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'ip': '127.0.0.1',
'port': 6200, 'device': 'sda', 'weight': 1000})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'ip': '127.0.0.1',
'port': 6200, 'device': 'sdb', 'weight': 1000})
rb.add_dev({'id': 2, 'region': 0, 'zone': 0, 'ip': '127.0.0.1',
'port': 6200, 'device': 'sdc', 'weight': 1000})
# z1
rb.add_dev({'id': 3, 'region': 0, 'zone': 1, 'ip': '127.0.0.2',
'port': 6200, 'device': 'sda', 'weight': 1000})
rb.add_dev({'id': 4, 'region': 0, 'zone': 1, 'ip': '127.0.0.2',
'port': 6200, 'device': 'sdb', 'weight': 1000})
rb.add_dev({'id': 5, 'region': 0, 'zone': 1, 'ip': '127.0.0.2',
'port': 6200, 'device': 'sdc', 'weight': 1000})
# z1 - extra small server
rb.add_dev({'id': 6, 'region': 0, 'zone': 1, 'ip': '127.0.0.3',
'port': 6200, 'device': 'sda', 'weight': 50})
expected = {
(0, 0): 2.479338842975207,
(0, 1): 2.5206611570247937,
}
weighted_replicas = rb._build_weighted_replicas_by_tier()
self.assertEqual(expected, {t: r for (t, r) in
weighted_replicas.items()
if len(t) == 2})
# dispersion is fine with this at the zone tier
wanted_replicas = rb._build_wanted_replicas_by_tier()
self.assertEqual(expected, {t: r for (t, r) in
wanted_replicas.items()
if len(t) == 2})
# ... but not ok with that tiny server
expected = {
'127.0.0.1': 2.479338842975207,
'127.0.0.2': 1.5206611570247937,
'127.0.0.3': 1.0,
}
self.assertEqual(expected, {t[-1]: r for (t, r) in
wanted_replicas.items()
if len(t) == 3})
self.assertAlmostEqual(23.2, rb.get_required_overload())
def test_multiple_replicas_in_zone_with_single_device(self):
rb = ring.RingBuilder(8, 5, 0)
# z0
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'ip': '127.0.0.1',
'port': 6200, 'device': 'sda', 'weight': 100})
# z1
rb.add_dev({'id': 1, 'region': 0, 'zone': 1, 'ip': '127.0.1.1',
'port': 6200, 'device': 'sda', 'weight': 100})
rb.add_dev({'id': 2, 'region': 0, 'zone': 1, 'ip': '127.0.1.1',
'port': 6200, 'device': 'sdb', 'weight': 100})
rb.add_dev({'id': 3, 'region': 0, 'zone': 1, 'ip': '127.0.1.2',
'port': 6200, 'device': 'sdc', 'weight': 100})
rb.add_dev({'id': 4, 'region': 0, 'zone': 1, 'ip': '127.0.1.2',
'port': 6200, 'device': 'sdd', 'weight': 100})
# first things first, make sure we do this right
rb.rebalance()
# each device get's a sing replica of every part
expected = {
0: 256,
1: 256,
2: 256,
3: 256,
4: 256,
}
self.assertEqual(expected, {d['id']: d['parts']
for d in rb._iter_devs()})
# but let's make sure we're thinking about it right too
expected = {
0: 1.0,
1: 1.0,
2: 1.0,
3: 1.0,
4: 1.0,
}
# by weight everyone is equal
weighted_replicas = rb._build_weighted_replicas_by_tier()
self.assertEqual(expected, {t[-1]: r for (t, r) in
weighted_replicas.items()
if len(t) == 4})
# wanted might have liked to have fewer replicas in z1, but the
# single device in z0 limits us one replica per device
with rb.debug():
wanted_replicas = rb._build_wanted_replicas_by_tier()
self.assertEqual(expected, {t[-1]: r for (t, r) in
wanted_replicas.items()
if len(t) == 4})
# even with some overload - still one replica per device
rb.set_overload(1.0)
target_replicas = rb._build_target_replicas_by_tier()
self.assertEqual(expected, {t[-1]: r for (t, r) in
target_replicas.items()
if len(t) == 4})
# when overload can not change the outcome none is required
self.assertEqual(0.0, rb.get_required_overload())
# even though dispersion is terrible (in z1 particularly)
self.assertEqual(20.0, rb.dispersion)
def test_one_big_guy_does_not_spoil_his_buddy(self):
rb = ring.RingBuilder(8, 3, 0)
# z0
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'ip': '127.0.0.1',
'port': 6200, 'device': 'sda', 'weight': 100})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'ip': '127.0.0.2',
'port': 6200, 'device': 'sda', 'weight': 100})
# z1
rb.add_dev({'id': 2, 'region': 0, 'zone': 1, 'ip': '127.0.1.1',
'port': 6200, 'device': 'sda', 'weight': 100})
rb.add_dev({'id': 3, 'region': 0, 'zone': 1, 'ip': '127.0.1.2',
'port': 6200, 'device': 'sda', 'weight': 100})
# z2
rb.add_dev({'id': 4, 'region': 0, 'zone': 2, 'ip': '127.0.2.1',
'port': 6200, 'device': 'sda', 'weight': 100})
rb.add_dev({'id': 5, 'region': 0, 'zone': 2, 'ip': '127.0.2.2',
'port': 6200, 'device': 'sda', 'weight': 10000})
# obviously d5 gets one whole replica; the other two replicas
# are split evenly among the five other devices
# (i.e. ~0.4 replicanths for each 100 units of weight)
expected = {
0: 0.39999999999999997,
1: 0.39999999999999997,
2: 0.39999999999999997,
3: 0.39999999999999997,
4: 0.39999999999999997,
5: 1.0,
}
weighted_replicas = rb._build_weighted_replicas_by_tier()
self.assertEqual(expected, {t[-1]: r for (t, r) in
weighted_replicas.items()
if len(t) == 4})
# with no overload we get the "balanced" placement
target_replicas = rb._build_target_replicas_by_tier()
self.assertEqual(expected, {t[-1]: r for (t, r) in
target_replicas.items()
if len(t) == 4})
# but in reality, these devices having such disparate weights
# leads to a *terrible* balance even w/o overload!
rb.rebalance(seed=9)
self.assertEqual(rb.get_balance(), 1308.2031249999998)
# even though part assignment is pretty reasonable
expected = {
0: 103,
1: 102,
2: 103,
3: 102,
4: 102,
5: 256,
}
self.assertEqual(expected, {
d['id']: d['parts'] for d in rb._iter_devs()})
# so whats happening is the small devices are holding *way* more
# *real* parts than their *relative* portion of the weight would
# like them too!
expected = {
0: 1308.2031249999998,
1: 1294.5312499999998,
2: 1308.2031249999998,
3: 1294.5312499999998,
4: 1294.5312499999998,
5: -65.0,
}
self.assertEqual(expected, rb._build_balance_per_dev())
# increasing overload moves towards one replica in each tier
rb.set_overload(0.20)
expected = {
0: 0.48,
1: 0.48,
2: 0.48,
3: 0.48,
4: 0.30857142857142855,
5: 0.7714285714285714,
}
target_replicas = rb._build_target_replicas_by_tier()
self.assertEqual(expected, {t[-1]: r for (t, r) in
target_replicas.items()
if len(t) == 4})
# ... and as always increasing overload makes balance *worse*
rb.rebalance(seed=17)
self.assertEqual(rb.get_balance(), 1581.6406249999998)
# but despite the overall trend toward imbalance, in the tier with the
# huge device, we want to see the small device (d4) try to shed parts
# as effectively as it can to the huge device in the same tier (d5)
# this is a useful behavior anytime when for whatever reason a device
# w/i a tier wants parts from another device already in the same tier
# another example is `test_one_small_guy_does_not_spoil_his_buddy`
expected = {
0: 123,
1: 123,
2: 123,
3: 123,
4: 79,
5: 197,
}
self.assertEqual(expected, {
d['id']: d['parts'] for d in rb._iter_devs()})
# *see*, at least *someones* balance is getting better!
expected = {
0: 1581.6406249999998,
1: 1581.6406249999998,
2: 1581.6406249999998,
3: 1581.6406249999998,
4: 980.078125,
5: -73.06640625,
}
self.assertEqual(expected, rb._build_balance_per_dev())
def test_one_small_guy_does_not_spoil_his_buddy(self):
rb = ring.RingBuilder(8, 3, 0)
# z0
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'ip': '127.0.0.1',
'port': 6200, 'device': 'sda', 'weight': 10000})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'ip': '127.0.0.2',
'port': 6200, 'device': 'sda', 'weight': 10000})
# z1
rb.add_dev({'id': 2, 'region': 0, 'zone': 1, 'ip': '127.0.1.1',
'port': 6200, 'device': 'sda', 'weight': 10000})
rb.add_dev({'id': 3, 'region': 0, 'zone': 1, 'ip': '127.0.1.2',
'port': 6200, 'device': 'sda', 'weight': 10000})
# z2
rb.add_dev({'id': 4, 'region': 0, 'zone': 2, 'ip': '127.0.2.1',
'port': 6200, 'device': 'sda', 'weight': 10000})
rb.add_dev({'id': 5, 'region': 0, 'zone': 2, 'ip': '127.0.2.2',
'port': 6200, 'device': 'sda', 'weight': 100})
# it's almost like 3.0 / 5 ~= 0.6, but that one little guy get's
# his fair share
expected = {
0: 0.5988023952095808,
1: 0.5988023952095808,
2: 0.5988023952095808,
3: 0.5988023952095808,
4: 0.5988023952095808,
5: 0.005988023952095809,
}
weighted_replicas = rb._build_weighted_replicas_by_tier()
self.assertEqual(expected, {t[-1]: r for (t, r) in
weighted_replicas.items()
if len(t) == 4})
# with no overload we get a nice balanced placement
target_replicas = rb._build_target_replicas_by_tier()
self.assertEqual(expected, {t[-1]: r for (t, r) in
target_replicas.items()
if len(t) == 4})
rb.rebalance(seed=9)
# part placement looks goods
expected = {
0: 154,
1: 153,
2: 153,
3: 153,
4: 153,
5: 2,
}
self.assertEqual(expected, {
d['id']: d['parts'] for d in rb._iter_devs()})
# ... balance is a little lumpy on the small guy since he wants
# one and a half parts :\
expected = {
0: 0.4609375000000142,
1: -0.1914062499999858,
2: -0.1914062499999858,
3: -0.1914062499999858,
4: -0.1914062499999858,
5: 30.46875,
}
self.assertEqual(expected, rb._build_balance_per_dev())
self.assertEqual(rb.get_balance(), 30.46875)
# increasing overload moves towards one replica in each tier
rb.set_overload(0.3)
expected = {
0: 0.553443113772455,
1: 0.553443113772455,
2: 0.553443113772455,
3: 0.553443113772455,
4: 0.778443113772455,
5: 0.007784431137724551,
}
target_replicas = rb._build_target_replicas_by_tier()
self.assertEqual(expected, {t[-1]: r for (t, r) in
target_replicas.items()
if len(t) == 4})
# ... and as always increasing overload makes balance *worse*
rb.rebalance(seed=12)
self.assertEqual(rb.get_balance(), 30.46875)
# the little guy it really struggling to take his share tho
expected = {
0: 142,
1: 141,
2: 142,
3: 141,
4: 200,
5: 2,
}
self.assertEqual(expected, {
d['id']: d['parts'] for d in rb._iter_devs()})
# ... and you can see it in the balance!
expected = {
0: -7.367187499999986,
1: -8.019531249999986,
2: -7.367187499999986,
3: -8.019531249999986,
4: 30.46875,
5: 30.46875,
}
self.assertEqual(expected, rb._build_balance_per_dev())
rb.set_overload(0.5)
expected = {
0: 0.5232035928143712,
1: 0.5232035928143712,
2: 0.5232035928143712,
3: 0.5232035928143712,
4: 0.8982035928143712,
5: 0.008982035928143714,
}
target_replicas = rb._build_target_replicas_by_tier()
self.assertEqual(expected, {t[-1]: r for (t, r) in
target_replicas.items()
if len(t) == 4})
# because the device is so small, balance get's bad quick
rb.rebalance(seed=17)
self.assertEqual(rb.get_balance(), 95.703125)
# but despite the overall trend toward imbalance, the little guy
# isn't really taking on many new parts!
expected = {
0: 134,
1: 134,
2: 134,
3: 133,
4: 230,
5: 3,
}
self.assertEqual(expected, {
d['id']: d['parts'] for d in rb._iter_devs()})
# *see*, at everyone's balance is getting worse *together*!
expected = {
0: -12.585937499999986,
1: -12.585937499999986,
2: -12.585937499999986,
3: -13.238281249999986,
4: 50.0390625,
5: 95.703125,
}
self.assertEqual(expected, rb._build_balance_per_dev())
def test_two_servers_with_more_than_one_replica(self):
rb = ring.RingBuilder(8, 3, 0)
# z0
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'ip': '127.0.0.1',
'port': 6200, 'device': 'sda', 'weight': 60})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'ip': '127.0.0.2',
'port': 6200, 'device': 'sda', 'weight': 60})
rb.add_dev({'id': 2, 'region': 0, 'zone': 0, 'ip': '127.0.0.3',
'port': 6200, 'device': 'sda', 'weight': 60})
# z1
rb.add_dev({'id': 3, 'region': 0, 'zone': 1, 'ip': '127.0.1.1',
'port': 6200, 'device': 'sda', 'weight': 80})
rb.add_dev({'id': 4, 'region': 0, 'zone': 1, 'ip': '127.0.1.2',
'port': 6200, 'device': 'sda', 'weight': 128})
# z2
rb.add_dev({'id': 5, 'region': 0, 'zone': 2, 'ip': '127.0.2.1',
'port': 6200, 'device': 'sda', 'weight': 80})
rb.add_dev({'id': 6, 'region': 0, 'zone': 2, 'ip': '127.0.2.2',
'port': 6200, 'device': 'sda', 'weight': 240})
rb.set_overload(0.1)
rb.rebalance()
self.assertEqual(12.161458333333343, rb.get_balance())
replica_plan = rb._build_target_replicas_by_tier()
for dev in rb._iter_devs():
tier = (dev['region'], dev['zone'], dev['ip'], dev['id'])
expected_parts = replica_plan[tier] * rb.parts
self.assertAlmostEqual(dev['parts'], expected_parts,
delta=1)
def test_multi_zone_with_failed_device(self):
rb = ring.RingBuilder(8, 3, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'ip': '127.0.0.1',
'port': 6200, 'device': 'sda', 'weight': 2000})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'ip': '127.0.0.1',
'port': 6200, 'device': 'sdb', 'weight': 2000})
rb.add_dev({'id': 2, 'region': 0, 'zone': 1, 'ip': '127.0.0.2',
'port': 6200, 'device': 'sda', 'weight': 2000})
rb.add_dev({'id': 3, 'region': 0, 'zone': 1, 'ip': '127.0.0.2',
'port': 6200, 'device': 'sdb', 'weight': 2000})
rb.add_dev({'id': 4, 'region': 0, 'zone': 2, 'ip': '127.0.0.3',
'port': 6200, 'device': 'sda', 'weight': 2000})
rb.add_dev({'id': 5, 'region': 0, 'zone': 2, 'ip': '127.0.0.3',
'port': 6200, 'device': 'sdb', 'weight': 2000})
# sanity, balanced and dispersed
expected = {
(0, 0): 1.0,
(0, 1): 1.0,
(0, 2): 1.0,
}
weighted_replicas = rb._build_weighted_replicas_by_tier()
self.assertEqual(expected,
{tier: weighted
for (tier, weighted) in weighted_replicas.items()
if len(tier) == 2})
wanted_replicas = rb._build_wanted_replicas_by_tier()
self.assertEqual(expected,
{tier: weighted
for (tier, weighted) in wanted_replicas.items()
if len(tier) == 2})
self.assertEqual(rb.get_required_overload(), 0.0)
# fail a device in zone 2
rb.remove_dev(4)
expected = {
0: 0.6,
1: 0.6,
2: 0.6,
3: 0.6,
5: 0.6,
}
weighted_replicas = rb._build_weighted_replicas_by_tier()
self.assertEqual(expected,
{tier[3]: weighted
for (tier, weighted) in weighted_replicas.items()
if len(tier) == 4})
expected = {
0: 0.5,
1: 0.5,
2: 0.5,
3: 0.5,
5: 1.0,
}
wanted_replicas = rb._build_wanted_replicas_by_tier()
self.assertEqual(expected,
{tier[3]: weighted
for (tier, weighted) in wanted_replicas.items()
if len(tier) == 4})
# does this make sense? every zone was holding 1/3rd of the
# replicas, so each device was 1/6th, remove a device and
# suddenly it's holding *both* sixths which is 2/3rds?
self.assertAlmostEqual(rb.get_required_overload(), 2.0 / 3.0)
# 10% isn't nearly enough
rb.set_overload(0.1)
target_replicas = rb._build_target_replicas_by_tier()
expected = {
0: 0.585,
1: 0.585,
2: 0.585,
3: 0.585,
5: 0.6599999999999999,
}
self.assertEqual(expected,
{tier[3]: weighted
for (tier, weighted) in target_replicas.items()
if len(tier) == 4})
# 50% isn't even enough
rb.set_overload(0.5)
target_replicas = rb._build_target_replicas_by_tier()
expected = {
0: 0.525,
1: 0.525,
2: 0.525,
3: 0.525,
5: 0.8999999999999999,
}
self.assertEqual(expected,
{tier[3]: weighted
for (tier, weighted) in target_replicas.items()
if len(tier) == 4})
# even 65% isn't enough (but it's getting closer)
rb.set_overload(0.65)
target_replicas = rb._build_target_replicas_by_tier()
expected = {
0: 0.5025000000000001,
1: 0.5025000000000001,
2: 0.5025000000000001,
3: 0.5025000000000001,
5: 0.99,
}
self.assertEqual(expected,
{tier[3]: weighted
for (tier, weighted) in target_replicas.items()
if len(tier) == 4})
def test_balanced_zones_unbalanced_servers(self):
rb = ring.RingBuilder(8, 3, 1)
# zone 0 server 127.0.0.1
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'ip': '127.0.0.1',
'port': 6200, 'device': 'sda', 'weight': 3000})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'ip': '127.0.0.1',
'port': 6200, 'device': 'sdb', 'weight': 3000})
rb.add_dev({'id': 2, 'region': 0, 'zone': 0, 'ip': '127.0.0.1',
'port': 6200, 'device': 'sda', 'weight': 3000})
# zone 1 server 127.0.0.2
rb.add_dev({'id': 4, 'region': 0, 'zone': 1, 'ip': '127.0.0.2',
'port': 6200, 'device': 'sda', 'weight': 4000})
rb.add_dev({'id': 5, 'region': 0, 'zone': 1, 'ip': '127.0.0.2',
'port': 6200, 'device': 'sdb', 'weight': 4000})
# zone 1 (again) server 127.0.0.3
rb.add_dev({'id': 6, 'region': 0, 'zone': 1, 'ip': '127.0.0.3',
'port': 6200, 'device': 'sda', 'weight': 1000})
weighted_replicas = rb._build_weighted_replicas_by_tier()
# zones are evenly weighted
expected = {
(0, 0): 1.5,
(0, 1): 1.5,
}
self.assertEqual(expected,
{tier: weighted
for (tier, weighted) in weighted_replicas.items()
if len(tier) == 2})
# ... but servers are not
expected = {
'127.0.0.1': 1.5,
'127.0.0.2': 1.3333333333333333,
'127.0.0.3': 0.16666666666666666,
}
self.assertEqual(expected,
{tier[2]: weighted
for (tier, weighted) in weighted_replicas.items()
if len(tier) == 3})
# make sure wanted will even it out
expected = {
'127.0.0.1': 1.5,
'127.0.0.2': 1.0,
'127.0.0.3': 0.4999999999999999,
}
wanted_replicas = rb._build_wanted_replicas_by_tier()
self.assertEqual(expected,
{tier[2]: weighted
for (tier, weighted) in wanted_replicas.items()
if len(tier) == 3})
# so it wants 1/6th and eats 1/2 - that's 2/6ths more than it
# wants which is a 200% increase
self.assertAlmostEqual(rb.get_required_overload(), 2.0)
# the overload doesn't effect the tiers that are already dispersed
rb.set_overload(1)
target_replicas = rb._build_target_replicas_by_tier()
expected = {
'127.0.0.1': 1.5,
# notice with half the overload 1/6th replicanth swapped servers
'127.0.0.2': 1.1666666666666665,
'127.0.0.3': 0.3333333333333333,
}
self.assertEqual(expected,
{tier[2]: weighted
for (tier, weighted) in target_replicas.items()
if len(tier) == 3})
def test_adding_second_zone(self):
rb = ring.RingBuilder(3, 3, 1)
# zone 0 server 127.0.0.1
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'ip': '127.0.0.1',
'port': 6200, 'device': 'sda', 'weight': 2000})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'ip': '127.0.0.1',
'port': 6200, 'device': 'sdb', 'weight': 2000})
# zone 0 server 127.0.0.2
rb.add_dev({'id': 2, 'region': 0, 'zone': 0, 'ip': '127.0.0.2',
'port': 6200, 'device': 'sda', 'weight': 2000})
rb.add_dev({'id': 3, 'region': 0, 'zone': 0, 'ip': '127.0.0.2',
'port': 6200, 'device': 'sdb', 'weight': 2000})
# zone 0 server 127.0.0.3
rb.add_dev({'id': 4, 'region': 0, 'zone': 0, 'ip': '127.0.0.3',
'port': 6200, 'device': 'sda', 'weight': 2000})
rb.add_dev({'id': 5, 'region': 0, 'zone': 0, 'ip': '127.0.0.3',
'port': 6200, 'device': 'sdb', 'weight': 2000})
# sanity, balanced and dispersed
expected = {
'127.0.0.1': 1.0,
'127.0.0.2': 1.0,
'127.0.0.3': 1.0,
}
weighted_replicas = rb._build_weighted_replicas_by_tier()
self.assertEqual(expected,
{tier[2]: weighted
for (tier, weighted) in weighted_replicas.items()
if len(tier) == 3})
wanted_replicas = rb._build_wanted_replicas_by_tier()
self.assertEqual(expected,
{tier[2]: weighted
for (tier, weighted) in wanted_replicas.items()
if len(tier) == 3})
self.assertEqual(rb.get_required_overload(), 0)
# start adding a second zone
# zone 1 server 127.0.1.1
rb.add_dev({'id': 6, 'region': 0, 'zone': 1, 'ip': '127.0.1.1',
'port': 6200, 'device': 'sda', 'weight': 100})
rb.add_dev({'id': 7, 'region': 0, 'zone': 1, 'ip': '127.0.1.1',
'port': 6200, 'device': 'sdb', 'weight': 100})
# zone 1 server 127.0.1.2
rb.add_dev({'id': 8, 'region': 0, 'zone': 1, 'ip': '127.0.1.2',
'port': 6200, 'device': 'sda', 'weight': 100})
rb.add_dev({'id': 9, 'region': 0, 'zone': 1, 'ip': '127.0.1.2',
'port': 6200, 'device': 'sdb', 'weight': 100})
# zone 1 server 127.0.1.3
rb.add_dev({'id': 10, 'region': 0, 'zone': 1, 'ip': '127.0.1.3',
'port': 6200, 'device': 'sda', 'weight': 100})
rb.add_dev({'id': 11, 'region': 0, 'zone': 1, 'ip': '127.0.1.3',
'port': 6200, 'device': 'sdb', 'weight': 100})
# this messes things up pretty royally
expected = {
'127.0.0.1': 0.9523809523809523,
'127.0.0.2': 0.9523809523809523,
'127.0.0.3': 0.9523809523809523,
'127.0.1.1': 0.047619047619047616,
'127.0.1.2': 0.047619047619047616,
'127.0.1.3': 0.047619047619047616,
}
weighted_replicas = rb._build_weighted_replicas_by_tier()
self.assertEqual(expected,
{tier[2]: weighted
for (tier, weighted) in weighted_replicas.items()
if len(tier) == 3})
expected = {
'127.0.0.1': 0.6666666666666667,
'127.0.0.2': 0.6666666666666667,
'127.0.0.3': 0.6666666666666667,
'127.0.1.1': 0.3333333333333333,
'127.0.1.2': 0.3333333333333333,
'127.0.1.3': 0.3333333333333333,
}
wanted_replicas = rb._build_wanted_replicas_by_tier()
self.assertEqual(expected,
{tier[2]: weighted
for (tier, weighted) in wanted_replicas.items()
if len(tier) == 3})
# so dispersion would require these devices hold 6x more than
# prescribed by weight, defeating any attempt at gradually
# anything
self.assertAlmostEqual(rb.get_required_overload(), 6.0)
# so let's suppose we only allow for 10% overload
rb.set_overload(0.10)
target_replicas = rb._build_target_replicas_by_tier()
expected = {
# we expect servers in zone 0 to be between 0.952 and 0.666
'127.0.0.1': 0.9476190476190476,
'127.0.0.2': 0.9476190476190476,
'127.0.0.3': 0.9476190476190476,
# we expect servers in zone 1 to be between 0.0476 and 0.333
# and in fact its ~10% increase (very little compared to 6x!)
'127.0.1.1': 0.052380952380952375,
'127.0.1.2': 0.052380952380952375,
'127.0.1.3': 0.052380952380952375,
}
self.assertEqual(expected,
{tier[2]: weighted
for (tier, weighted) in target_replicas.items()
if len(tier) == 3})
def test_gradual_replica_count(self):
rb = ring.RingBuilder(3, 2.5, 1)
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'ip': '127.0.0.1',
'port': 6200, 'device': 'sda', 'weight': 2000})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'ip': '127.0.0.1',
'port': 6200, 'device': 'sdb', 'weight': 2000})
rb.add_dev({'id': 2, 'region': 0, 'zone': 0, 'ip': '127.0.0.2',
'port': 6200, 'device': 'sda', 'weight': 2000})
rb.add_dev({'id': 3, 'region': 0, 'zone': 0, 'ip': '127.0.0.2',
'port': 6200, 'device': 'sdb', 'weight': 2000})
expected = {
0: 0.625,
1: 0.625,
2: 0.625,
3: 0.625,
}
weighted_replicas = rb._build_weighted_replicas_by_tier()
self.assertEqual(expected, {
tier[3]: weighted
for (tier, weighted) in weighted_replicas.items()
if len(tier) == 4})
wanted_replicas = rb._build_wanted_replicas_by_tier()
self.assertEqual(expected, {
tier[3]: wanted
for (tier, wanted) in wanted_replicas.items()
if len(tier) == 4})
self.assertEqual(rb.get_required_overload(), 0)
# server 127.0.0.2 will have only one device
rb.remove_dev(2)
# server 127.0.0.1 has twice the capacity of 127.0.0.2
expected = {
'127.0.0.1': 1.6666666666666667,
'127.0.0.2': 0.8333333333333334,
}
weighted_replicas = rb._build_weighted_replicas_by_tier()
self.assertEqual(expected, {
tier[2]: weighted
for (tier, weighted) in weighted_replicas.items()
if len(tier) == 3})
# dispersion requirements extend only to whole replicas
expected = {
'127.0.0.1': 1.4999999999999998,
'127.0.0.2': 1.0,
}
wanted_replicas = rb._build_wanted_replicas_by_tier()
self.assertEqual(expected, {
tier[2]: wanted
for (tier, wanted) in wanted_replicas.items()
if len(tier) == 3})
# 5/6ths to a whole replicanth is a 20% increase
self.assertAlmostEqual(rb.get_required_overload(), 0.2)
# so let's suppose we only allow for 10% overload
rb.set_overload(0.1)
target_replicas = rb._build_target_replicas_by_tier()
expected = {
'127.0.0.1': 1.5833333333333333,
'127.0.0.2': 0.9166666666666667,
}
self.assertEqual(expected, {
tier[2]: wanted
for (tier, wanted) in target_replicas.items()
if len(tier) == 3})
def test_perfect_four_zone_four_replica_bad_placement(self):
rb = ring.RingBuilder(4, 4, 1)
# this weight is sorta nuts, but it's really just to help the
# weight_of_one_part hit a magic number where floats mess up
# like they would on ring with a part power of 19 and 100's of
# 1000's of units of weight.
weight = 21739130434795e-11
# r0z0
rb.add_dev({'id': 0, 'region': 0, 'zone': 0, 'weight': weight,
'ip': '127.0.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 1, 'region': 0, 'zone': 0, 'weight': weight,
'ip': '127.0.0.2', 'port': 10000, 'device': 'sdb'})
# r0z1
rb.add_dev({'id': 2, 'region': 0, 'zone': 1, 'weight': weight,
'ip': '127.0.1.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 3, 'region': 0, 'zone': 1, 'weight': weight,
'ip': '127.0.1.2', 'port': 10000, 'device': 'sdb'})
# r1z0
rb.add_dev({'id': 4, 'region': 1, 'zone': 0, 'weight': weight,
'ip': '127.1.0.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 5, 'region': 1, 'zone': 0, 'weight': weight,
'ip': '127.1.0.2', 'port': 10000, 'device': 'sdb'})
# r1z1
rb.add_dev({'id': 6, 'region': 1, 'zone': 1, 'weight': weight,
'ip': '127.1.1.1', 'port': 10000, 'device': 'sda'})
rb.add_dev({'id': 7, 'region': 1, 'zone': 1, 'weight': weight,
'ip': '127.1.1.2', 'port': 10000, 'device': 'sdb'})
# the replica plan is sound
expectations = {
# tier_len => expected replicas
1: {
(0,): 2.0,
(1,): 2.0,
},
2: {
(0, 0): 1.0,
(0, 1): 1.0,
(1, 0): 1.0,
(1, 1): 1.0,
}
}
wr = rb._build_replica_plan()
for tier_len, expected in expectations.items():
self.assertEqual(expected, {t: r['max'] for (t, r) in
wr.items() if len(t) == tier_len})
# even thought a naive ceil of weights is surprisingly wrong
expectations = {
# tier_len => expected replicas
1: {
(0,): 3.0,
(1,): 3.0,
},
2: {
(0, 0): 2.0,
(0, 1): 2.0,
(1, 0): 2.0,
(1, 1): 2.0,
}
}
wr = rb._build_weighted_replicas_by_tier()
for tier_len, expected in expectations.items():
self.assertEqual(expected, {t: ceil(r) for (t, r) in
wr.items() if len(t) == tier_len})
class TestRingBuilderDispersion(unittest.TestCase):
def setUp(self):
self.devs = ('d%s' % i for i in itertools.count())
def assertAlmostPartCount(self, counts, expected, delta=3):
msgs = []
failed = False
for k, p in sorted(expected.items()):
try:
self.assertAlmostEqual(counts[k], p, delta=delta)
except KeyError:
self.fail('%r is missing the key %r' % (counts, k))
except AssertionError:
failed = True
state = '!='
else:
state = 'ok'
msgs.append('parts in %s was %s expected %s (%s)' % (
k, counts[k], p, state))
if failed:
self.fail('some part counts not close enough '
'to expected:\n' + '\n'.join(msgs))
def test_rebalance_dispersion(self):
rb = ring.RingBuilder(8, 6, 0)
for i in range(6):
rb.add_dev({'region': 0, 'zone': 0, 'ip': '127.0.0.1',
'port': 6000, 'weight': 1.0,
'device': next(self.devs)})
rb.rebalance()
self.assertEqual(0, rb.dispersion)
for z in range(2):
for i in range(6):
rb.add_dev({'region': 0, 'zone': z + 1, 'ip': '127.0.1.1',
'port': 6000, 'weight': 1.0,
'device': next(self.devs)})
self.assertAlmostPartCount(_partition_counts(rb, 'zone'),
{0: 1536, 1: 0, 2: 0})
rb.rebalance()
self.assertEqual(rb.dispersion, 50.0)
expected = {0: 1280, 1: 128, 2: 128}
self.assertAlmostPartCount(_partition_counts(rb, 'zone'),
expected)
report = dict(utils.dispersion_report(
rb, r'r\d+z\d+$', verbose=True)['graph'])
counts = {int(k.split('z')[1]): d['placed_parts']
for k, d in report.items()}
self.assertAlmostPartCount(counts, expected)
rb.rebalance()
self.assertEqual(rb.dispersion, 33.333333333333336)
expected = {0: 1024, 1: 256, 2: 256}
self.assertAlmostPartCount(_partition_counts(rb, 'zone'),
expected)
report = dict(utils.dispersion_report(
rb, r'r\d+z\d+$', verbose=True)['graph'])
counts = {int(k.split('z')[1]): d['placed_parts']
for k, d in report.items()}
self.assertAlmostPartCount(counts, expected)
rb.rebalance()
self.assertEqual(rb.dispersion, 16.666666666666668)
expected = {0: 768, 1: 384, 2: 384}
self.assertAlmostPartCount(_partition_counts(rb, 'zone'),
expected)
report = dict(utils.dispersion_report(
rb, r'r\d+z\d+$', verbose=True)['graph'])
counts = {int(k.split('z')[1]): d['placed_parts']
for k, d in report.items()}
self.assertAlmostPartCount(counts, expected)
rb.rebalance()
self.assertEqual(0, rb.dispersion)
expected = {0: 512, 1: 512, 2: 512}
self.assertAlmostPartCount(_partition_counts(rb, 'zone'), expected)
report = dict(utils.dispersion_report(
rb, r'r\d+z\d+$', verbose=True)['graph'])
counts = {int(k.split('z')[1]): d['placed_parts']
for k, d in report.items()}
self.assertAlmostPartCount(counts, expected)
def test_weight_dispersion(self):
rb = ring.RingBuilder(8, 3, 0)
for i in range(2):
for d in range(3):
rb.add_dev({'region': 0, 'zone': 0, 'ip': '127.0.%s.1' % i,
'port': 6000, 'weight': 1.0,
'device': next(self.devs)})
for d in range(3):
rb.add_dev({'region': 0, 'zone': 0, 'ip': '127.0.2.1',
'port': 6000, 'weight': 10.0,
'device': next(self.devs)})
rb.rebalance()
# each tier should only have 1 replicanth, but the big server has 2
# replicas of every part and 3 replicas another 1/2 - so our total
# dispersion is greater than one replicanth, it's 1.5
self.assertEqual(50.0, rb.dispersion)
expected = {
'127.0.0.1': 64,
'127.0.1.1': 64,
'127.0.2.1': 640,
}
self.assertAlmostPartCount(_partition_counts(rb, 'ip'),
expected)
report = dict(utils.dispersion_report(
rb, r'r\d+z\d+-[^/]*$', verbose=True)['graph'])
counts = {k.split('-')[1]: d['placed_parts']
for k, d in report.items()}
self.assertAlmostPartCount(counts, expected)
def test_multiple_tier_dispersion(self):
rb = ring.RingBuilder(10, 8, 0)
r_z_to_ip_count = {
(0, 0): 2,
(1, 1): 1,
(1, 2): 2,
}
ip_index = 0
for (r, z), ip_count in sorted(r_z_to_ip_count.items()):
for i in range(ip_count):
ip_index += 1
for d in range(3):
rb.add_dev({'region': r, 'zone': z,
'ip': '127.%s.%s.%s' % (r, z, ip_index),
'port': 6000, 'weight': 1.0,
'device': next(self.devs)})
for i in range(3):
# it might take a few rebalances for all the right part replicas to
# balance from r1z2 into r1z1
rb.rebalance()
self.assertAlmostEqual(15.52734375, rb.dispersion, delta=5.0)
self.assertAlmostEqual(0.0, rb.get_balance(), delta=0.5)
expected = {
'127.0.0.1': 1638,
'127.0.0.2': 1638,
'127.1.1.3': 1638,
'127.1.2.4': 1638,
'127.1.2.5': 1638,
}
delta = 10
self.assertAlmostPartCount(_partition_counts(rb, 'ip'), expected,
delta=delta)
report = dict(utils.dispersion_report(
rb, r'r\d+z\d+-[^/]*$', verbose=True)['graph'])
counts = {k.split('-')[1]: d['placed_parts']
for k, d in report.items()}
self.assertAlmostPartCount(counts, expected, delta=delta)
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/ring/test_builder.py |
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import array
import collections
import six.moves.cPickle as pickle
import os
import unittest
import stat
from contextlib import closing
from gzip import GzipFile
from tempfile import mkdtemp
from shutil import rmtree
from time import sleep, time
import sys
import copy
import mock
from six.moves import range
from swift.common import ring, utils
from swift.common.ring import utils as ring_utils
from swift.common.utils import md5
class TestRingBase(unittest.TestCase):
longMessage = True
def setUp(self):
self._orig_hash_suffix = utils.HASH_PATH_SUFFIX
self._orig_hash_prefix = utils.HASH_PATH_PREFIX
utils.HASH_PATH_SUFFIX = b'endcap'
utils.HASH_PATH_PREFIX = b''
def tearDown(self):
utils.HASH_PATH_SUFFIX = self._orig_hash_suffix
utils.HASH_PATH_PREFIX = self._orig_hash_prefix
class TestRingData(unittest.TestCase):
def setUp(self):
self.testdir = os.path.join(os.path.dirname(__file__), 'ring_data')
rmtree(self.testdir, ignore_errors=1)
os.mkdir(self.testdir)
def tearDown(self):
rmtree(self.testdir, ignore_errors=1)
def assert_ring_data_equal(self, rd_expected, rd_got):
self.assertEqual(rd_expected._replica2part2dev_id,
rd_got._replica2part2dev_id)
self.assertEqual(rd_expected.devs, rd_got.devs)
self.assertEqual(rd_expected._part_shift, rd_got._part_shift)
self.assertEqual(rd_expected.next_part_power, rd_got.next_part_power)
self.assertEqual(rd_expected.version, rd_got.version)
def test_attrs(self):
r2p2d = [[0, 1, 0, 1], [0, 1, 0, 1]]
d = [{'id': 0, 'zone': 0, 'region': 0, 'ip': '10.1.1.0', 'port': 7000,
'replication_ip': '10.1.1.0', 'replication_port': 7000},
{'id': 1, 'zone': 1, 'region': 1, 'ip': '10.1.1.1', 'port': 7000,
'replication_ip': '10.1.1.1', 'replication_port': 7000}]
s = 30
rd = ring.RingData(r2p2d, d, s)
self.assertEqual(rd._replica2part2dev_id, r2p2d)
self.assertEqual(rd.devs, d)
self.assertEqual(rd._part_shift, s)
def test_can_load_pickled_ring_data(self):
rd = ring.RingData(
[[0, 1, 0, 1], [0, 1, 0, 1]],
[{'id': 0, 'zone': 0, 'ip': '10.1.1.0', 'port': 7000},
{'id': 1, 'zone': 1, 'ip': '10.1.1.1', 'port': 7000}],
30)
ring_fname = os.path.join(self.testdir, 'foo.ring.gz')
for p in range(pickle.HIGHEST_PROTOCOL):
with closing(GzipFile(ring_fname, 'wb')) as f:
pickle.dump(rd, f, protocol=p)
meta_only = ring.RingData.load(ring_fname, metadata_only=True)
self.assertEqual([
{'id': 0, 'zone': 0, 'region': 1,
'ip': '10.1.1.0', 'port': 7000,
'replication_ip': '10.1.1.0', 'replication_port': 7000},
{'id': 1, 'zone': 1, 'region': 1,
'ip': '10.1.1.1', 'port': 7000,
'replication_ip': '10.1.1.1', 'replication_port': 7000},
], meta_only.devs)
# Pickled rings can't load only metadata, so you get it all
self.assert_ring_data_equal(rd, meta_only)
ring_data = ring.RingData.load(ring_fname)
self.assert_ring_data_equal(rd, ring_data)
def test_roundtrip_serialization(self):
ring_fname = os.path.join(self.testdir, 'foo.ring.gz')
rd = ring.RingData(
[array.array('H', [0, 1, 0, 1]), array.array('H', [0, 1, 0, 1])],
[{'id': 0, 'zone': 0}, {'id': 1, 'zone': 1}], 30)
rd.save(ring_fname)
meta_only = ring.RingData.load(ring_fname, metadata_only=True)
self.assertEqual([
{'id': 0, 'zone': 0, 'region': 1},
{'id': 1, 'zone': 1, 'region': 1},
], meta_only.devs)
self.assertEqual([], meta_only._replica2part2dev_id)
rd2 = ring.RingData.load(ring_fname)
self.assert_ring_data_equal(rd, rd2)
def test_load_closes_file(self):
ring_fname = os.path.join(self.testdir, 'foo.ring.gz')
rd = ring.RingData(
[array.array('H', [0, 1, 0, 1]), array.array('H', [0, 1, 0, 1])],
[{'id': 0, 'zone': 0}, {'id': 1, 'zone': 1}], 30)
rd.save(ring_fname)
class MockReader(ring.ring.RingReader):
calls = []
def close(self):
self.calls.append(('close', self.fp))
return super(MockReader, self).close()
with mock.patch('swift.common.ring.ring.RingReader',
MockReader) as mock_reader:
ring.RingData.load(ring_fname)
self.assertEqual([('close', mock.ANY)], mock_reader.calls)
self.assertTrue(mock_reader.calls[0][1].closed)
def test_byteswapped_serialization(self):
# Manually byte swap a ring and write it out, claiming it was written
# on a different endian machine. Then read it back in and see if it's
# the same as the non-byte swapped original.
ring_fname = os.path.join(self.testdir, 'foo.ring.gz')
data = [array.array('H', [0, 1, 0, 1]), array.array('H', [0, 1, 0, 1])]
swapped_data = copy.deepcopy(data)
for x in swapped_data:
x.byteswap()
with mock.patch.object(sys, 'byteorder',
'big' if sys.byteorder == 'little'
else 'little'):
rds = ring.RingData(swapped_data,
[{'id': 0, 'zone': 0}, {'id': 1, 'zone': 1}],
30)
rds.save(ring_fname)
rd1 = ring.RingData(data, [{'id': 0, 'zone': 0}, {'id': 1, 'zone': 1}],
30)
rd2 = ring.RingData.load(ring_fname)
self.assert_ring_data_equal(rd1, rd2)
def test_deterministic_serialization(self):
"""
Two identical rings should produce identical .gz files on disk.
"""
os.mkdir(os.path.join(self.testdir, '1'))
os.mkdir(os.path.join(self.testdir, '2'))
# These have to have the same filename (not full path,
# obviously) since the filename gets encoded in the gzip data.
ring_fname1 = os.path.join(self.testdir, '1', 'the.ring.gz')
ring_fname2 = os.path.join(self.testdir, '2', 'the.ring.gz')
rd = ring.RingData(
[array.array('H', [0, 1, 0, 1]), array.array('H', [0, 1, 0, 1])],
[{'id': 0, 'zone': 0}, {'id': 1, 'zone': 1}], 30)
rd.save(ring_fname1)
rd.save(ring_fname2)
with open(ring_fname1, 'rb') as ring1:
with open(ring_fname2, 'rb') as ring2:
self.assertEqual(ring1.read(), ring2.read())
def test_permissions(self):
ring_fname = os.path.join(self.testdir, 'stat.ring.gz')
rd = ring.RingData(
[array.array('H', [0, 1, 0, 1]), array.array('H', [0, 1, 0, 1])],
[{'id': 0, 'zone': 0}, {'id': 1, 'zone': 1}], 30)
rd.save(ring_fname)
ring_mode = stat.S_IMODE(os.stat(ring_fname).st_mode)
expected_mode = (stat.S_IRUSR | stat.S_IWUSR |
stat.S_IRGRP | stat.S_IROTH)
self.assertEqual(
ring_mode, expected_mode,
'Ring has mode 0%o, expected 0%o' % (ring_mode, expected_mode))
def test_replica_count(self):
rd = ring.RingData(
[[0, 1, 0, 1], [0, 1, 0, 1]],
[{'id': 0, 'zone': 0, 'ip': '10.1.1.0', 'port': 7000},
{'id': 1, 'zone': 1, 'ip': '10.1.1.1', 'port': 7000}],
30)
self.assertEqual(rd.replica_count, 2)
rd = ring.RingData(
[[0, 1, 0, 1], [0, 1, 0]],
[{'id': 0, 'zone': 0, 'ip': '10.1.1.0', 'port': 7000},
{'id': 1, 'zone': 1, 'ip': '10.1.1.1', 'port': 7000}],
30)
self.assertEqual(rd.replica_count, 1.75)
class TestRing(TestRingBase):
def setUp(self):
super(TestRing, self).setUp()
self.testdir = mkdtemp()
self.testgz = os.path.join(self.testdir, 'whatever.ring.gz')
self.intended_replica2part2dev_id = [
array.array('H', [0, 1, 0, 1]),
array.array('H', [0, 1, 0, 1]),
array.array('H', [3, 4, 3, 4])]
self.intended_devs = [{'id': 0, 'region': 0, 'zone': 0, 'weight': 1.0,
'ip': '10.1.1.1', 'port': 6200,
'replication_ip': '10.1.0.1',
'replication_port': 6066},
{'id': 1, 'region': 0, 'zone': 0, 'weight': 1.0,
'ip': '10.1.1.1', 'port': 6200,
'replication_ip': '10.1.0.2',
'replication_port': 6066},
None,
{'id': 3, 'region': 0, 'zone': 2, 'weight': 1.0,
'ip': '10.1.2.1', 'port': 6200,
'replication_ip': '10.2.0.1',
'replication_port': 6066},
{'id': 4, 'region': 0, 'zone': 2, 'weight': 1.0,
'ip': '10.1.2.2', 'port': 6200,
'replication_ip': '10.2.0.1',
'replication_port': 6066}]
self.intended_part_shift = 30
self.intended_reload_time = 15
ring.RingData(
self.intended_replica2part2dev_id,
self.intended_devs, self.intended_part_shift).save(self.testgz)
self.ring = ring.Ring(
self.testdir,
reload_time=self.intended_reload_time, ring_name='whatever')
def tearDown(self):
super(TestRing, self).tearDown()
rmtree(self.testdir, ignore_errors=1)
def test_creation(self):
self.assertEqual(self.ring._replica2part2dev_id,
self.intended_replica2part2dev_id)
self.assertEqual(self.ring._part_shift, self.intended_part_shift)
self.assertEqual(self.ring.devs, self.intended_devs)
self.assertEqual(self.ring.reload_time, self.intended_reload_time)
self.assertEqual(self.ring.serialized_path, self.testgz)
self.assertIsNone(self.ring.version)
with open(self.testgz, 'rb') as fp:
expected_md5 = md5(usedforsecurity=False)
expected_size = 0
for chunk in iter(lambda: fp.read(2 ** 16), b''):
expected_md5.update(chunk)
expected_size += len(chunk)
self.assertEqual(self.ring.md5, expected_md5.hexdigest())
self.assertEqual(self.ring.size, expected_size)
# test invalid endcap
with mock.patch.object(utils, 'HASH_PATH_SUFFIX', b''), \
mock.patch.object(utils, 'HASH_PATH_PREFIX', b''), \
mock.patch.object(utils, 'SWIFT_CONF_FILE', ''):
self.assertRaises(IOError, ring.Ring, self.testdir, 'whatever')
def test_replica_count(self):
self.assertEqual(self.ring.replica_count, 3)
self.ring._replica2part2dev_id.append([0])
self.assertEqual(self.ring.replica_count, 3.25)
def test_has_changed(self):
self.assertFalse(self.ring.has_changed())
os.utime(self.testgz, (time() + 60, time() + 60))
self.assertTrue(self.ring.has_changed())
def test_reload(self):
os.utime(self.testgz, (time() - 300, time() - 300))
self.ring = ring.Ring(self.testdir, reload_time=0.001,
ring_name='whatever')
orig_mtime = self.ring._mtime
self.assertEqual(len(self.ring.devs), 5)
self.intended_devs.append(
{'id': 3, 'region': 0, 'zone': 3, 'weight': 1.0,
'ip': '10.1.1.1', 'port': 9876})
ring.RingData(
self.intended_replica2part2dev_id,
self.intended_devs, self.intended_part_shift).save(self.testgz)
sleep(0.1)
self.ring.get_nodes('a')
self.assertEqual(len(self.ring.devs), 6)
self.assertNotEqual(self.ring._mtime, orig_mtime)
os.utime(self.testgz, (time() - 300, time() - 300))
self.ring = ring.Ring(self.testdir, reload_time=0.001,
ring_name='whatever')
orig_mtime = self.ring._mtime
self.assertEqual(len(self.ring.devs), 6)
self.intended_devs.append(
{'id': 5, 'region': 0, 'zone': 4, 'weight': 1.0,
'ip': '10.5.5.5', 'port': 9876})
ring.RingData(
self.intended_replica2part2dev_id,
self.intended_devs, self.intended_part_shift).save(self.testgz)
sleep(0.1)
self.ring.get_part_nodes(0)
self.assertEqual(len(self.ring.devs), 7)
self.assertNotEqual(self.ring._mtime, orig_mtime)
os.utime(self.testgz, (time() - 300, time() - 300))
self.ring = ring.Ring(self.testdir, reload_time=0.001,
ring_name='whatever')
orig_mtime = self.ring._mtime
part, nodes = self.ring.get_nodes('a')
self.assertEqual(len(self.ring.devs), 7)
self.intended_devs.append(
{'id': 6, 'region': 0, 'zone': 5, 'weight': 1.0,
'ip': '10.6.6.6', 'port': 6200})
ring.RingData(
self.intended_replica2part2dev_id,
self.intended_devs, self.intended_part_shift).save(self.testgz)
sleep(0.1)
next(self.ring.get_more_nodes(part))
self.assertEqual(len(self.ring.devs), 8)
self.assertNotEqual(self.ring._mtime, orig_mtime)
os.utime(self.testgz, (time() - 300, time() - 300))
self.ring = ring.Ring(self.testdir, reload_time=0.001,
ring_name='whatever')
orig_mtime = self.ring._mtime
self.assertEqual(len(self.ring.devs), 8)
self.intended_devs.append(
{'id': 5, 'region': 0, 'zone': 4, 'weight': 1.0,
'ip': '10.5.5.5', 'port': 6200})
ring.RingData(
self.intended_replica2part2dev_id,
self.intended_devs, self.intended_part_shift).save(self.testgz)
sleep(0.1)
self.assertEqual(len(self.ring.devs), 9)
self.assertNotEqual(self.ring._mtime, orig_mtime)
def test_reload_without_replication(self):
replication_less_devs = [{'id': 0, 'region': 0, 'zone': 0,
'weight': 1.0, 'ip': '10.1.1.1',
'port': 6200},
{'id': 1, 'region': 0, 'zone': 0,
'weight': 1.0, 'ip': '10.1.1.1',
'port': 6200},
None,
{'id': 3, 'region': 0, 'zone': 2,
'weight': 1.0, 'ip': '10.1.2.1',
'port': 6200},
{'id': 4, 'region': 0, 'zone': 2,
'weight': 1.0, 'ip': '10.1.2.2',
'port': 6200}]
intended_devs = [{'id': 0, 'region': 0, 'zone': 0, 'weight': 1.0,
'ip': '10.1.1.1', 'port': 6200,
'replication_ip': '10.1.1.1',
'replication_port': 6200},
{'id': 1, 'region': 0, 'zone': 0, 'weight': 1.0,
'ip': '10.1.1.1', 'port': 6200,
'replication_ip': '10.1.1.1',
'replication_port': 6200},
None,
{'id': 3, 'region': 0, 'zone': 2, 'weight': 1.0,
'ip': '10.1.2.1', 'port': 6200,
'replication_ip': '10.1.2.1',
'replication_port': 6200},
{'id': 4, 'region': 0, 'zone': 2, 'weight': 1.0,
'ip': '10.1.2.2', 'port': 6200,
'replication_ip': '10.1.2.2',
'replication_port': 6200}]
testgz = os.path.join(self.testdir, 'without_replication.ring.gz')
ring.RingData(
self.intended_replica2part2dev_id,
replication_less_devs, self.intended_part_shift).save(testgz)
self.ring = ring.Ring(
self.testdir,
reload_time=self.intended_reload_time,
ring_name='without_replication')
self.assertEqual(self.ring.devs, intended_devs)
def test_reload_old_style_pickled_ring(self):
devs = [{'id': 0, 'zone': 0,
'weight': 1.0, 'ip': '10.1.1.1',
'port': 6200},
{'id': 1, 'zone': 0,
'weight': 1.0, 'ip': '10.1.1.1',
'port': 6200},
None,
{'id': 3, 'zone': 2,
'weight': 1.0, 'ip': '10.1.2.1',
'port': 6200},
{'id': 4, 'zone': 2,
'weight': 1.0, 'ip': '10.1.2.2',
'port': 6200}]
intended_devs = [{'id': 0, 'region': 1, 'zone': 0, 'weight': 1.0,
'ip': '10.1.1.1', 'port': 6200,
'replication_ip': '10.1.1.1',
'replication_port': 6200},
{'id': 1, 'region': 1, 'zone': 0, 'weight': 1.0,
'ip': '10.1.1.1', 'port': 6200,
'replication_ip': '10.1.1.1',
'replication_port': 6200},
None,
{'id': 3, 'region': 1, 'zone': 2, 'weight': 1.0,
'ip': '10.1.2.1', 'port': 6200,
'replication_ip': '10.1.2.1',
'replication_port': 6200},
{'id': 4, 'region': 1, 'zone': 2, 'weight': 1.0,
'ip': '10.1.2.2', 'port': 6200,
'replication_ip': '10.1.2.2',
'replication_port': 6200}]
# simulate an old-style pickled ring
testgz = os.path.join(self.testdir,
'without_replication_or_region.ring.gz')
ring_data = ring.RingData(self.intended_replica2part2dev_id,
devs,
self.intended_part_shift)
# an old-style pickled ring won't have region data
for dev in ring_data.devs:
if dev:
del dev["region"]
gz_file = GzipFile(testgz, 'wb')
pickle.dump(ring_data, gz_file, protocol=2)
gz_file.close()
self.ring = ring.Ring(
self.testdir,
reload_time=self.intended_reload_time,
ring_name='without_replication_or_region')
self.assertEqual(self.ring.devs, intended_devs)
def test_get_part(self):
part1 = self.ring.get_part('a')
nodes1 = self.ring.get_part_nodes(part1)
part2, nodes2 = self.ring.get_nodes('a')
self.assertEqual(part1, part2)
self.assertEqual(nodes1, nodes2)
def test_get_part_nodes(self):
part, nodes = self.ring.get_nodes('a')
self.assertEqual(nodes, self.ring.get_part_nodes(part))
def test_get_nodes(self):
# Yes, these tests are deliberately very fragile. We want to make sure
# that if someones changes the results the ring produces, they know it.
self.assertRaises(TypeError, self.ring.get_nodes)
part, nodes = self.ring.get_nodes('a')
self.assertEqual(part, 0)
self.assertEqual(nodes, [dict(node, index=i) for i, node in
enumerate([self.intended_devs[0],
self.intended_devs[3]])])
part, nodes = self.ring.get_nodes('a1')
self.assertEqual(part, 0)
self.assertEqual(nodes, [dict(node, index=i) for i, node in
enumerate([self.intended_devs[0],
self.intended_devs[3]])])
part, nodes = self.ring.get_nodes('a4')
self.assertEqual(part, 1)
self.assertEqual(nodes, [dict(node, index=i) for i, node in
enumerate([self.intended_devs[1],
self.intended_devs[4]])])
part, nodes = self.ring.get_nodes('aa')
self.assertEqual(part, 1)
self.assertEqual(nodes, [dict(node, index=i) for i, node in
enumerate([self.intended_devs[1],
self.intended_devs[4]])])
part, nodes = self.ring.get_nodes('a', 'c1')
self.assertEqual(part, 0)
self.assertEqual(nodes, [dict(node, index=i) for i, node in
enumerate([self.intended_devs[0],
self.intended_devs[3]])])
part, nodes = self.ring.get_nodes('a', 'c0')
self.assertEqual(part, 3)
self.assertEqual(nodes, [dict(node, index=i) for i, node in
enumerate([self.intended_devs[1],
self.intended_devs[4]])])
part, nodes = self.ring.get_nodes('a', 'c3')
self.assertEqual(part, 2)
self.assertEqual(nodes, [dict(node, index=i) for i, node in
enumerate([self.intended_devs[0],
self.intended_devs[3]])])
part, nodes = self.ring.get_nodes('a', 'c2')
self.assertEqual(nodes, [dict(node, index=i) for i, node in
enumerate([self.intended_devs[0],
self.intended_devs[3]])])
part, nodes = self.ring.get_nodes('a', 'c', 'o1')
self.assertEqual(part, 1)
self.assertEqual(nodes, [dict(node, index=i) for i, node in
enumerate([self.intended_devs[1],
self.intended_devs[4]])])
part, nodes = self.ring.get_nodes('a', 'c', 'o5')
self.assertEqual(part, 0)
self.assertEqual(nodes, [dict(node, index=i) for i, node in
enumerate([self.intended_devs[0],
self.intended_devs[3]])])
part, nodes = self.ring.get_nodes('a', 'c', 'o0')
self.assertEqual(part, 0)
self.assertEqual(nodes, [dict(node, index=i) for i, node in
enumerate([self.intended_devs[0],
self.intended_devs[3]])])
part, nodes = self.ring.get_nodes('a', 'c', 'o2')
self.assertEqual(part, 2)
self.assertEqual(nodes, [dict(node, index=i) for i, node in
enumerate([self.intended_devs[0],
self.intended_devs[3]])])
def add_dev_to_ring(self, new_dev):
self.ring.devs.append(new_dev)
self.ring._rebuild_tier_data()
@unittest.skipIf(sys.version_info < (3,),
"Seed-specific tests don't work well between python "
"versions. This test is now PY3 only")
def test_get_more_nodes(self):
# Yes, these tests are deliberately very fragile. We want to make sure
# that if someone changes the results the ring produces, they know it.
exp_part = 6
exp_devs = [102, 39, 93]
exp_zones = set([8, 9, 4])
exp_handoffs = [
69, 10, 22, 35, 56, 83, 100, 42, 92, 25, 50, 74, 61, 4,
13, 67, 8, 20, 106, 47, 89, 27, 59, 76, 97, 37, 85, 64,
0, 15, 32, 52, 79, 71, 11, 23, 99, 44, 90, 68, 6, 18,
96, 36, 84, 103, 41, 95, 33, 54, 81, 24, 48, 72, 60, 3,
12, 63, 2, 17, 28, 58, 75, 66, 7, 19, 104, 40, 94, 107,
45, 87, 101, 43, 91, 29, 57, 77, 62, 5, 14, 105, 46, 88,
98, 38, 86, 70, 9, 21, 65, 1, 16, 34, 55, 82, 31, 53,
78, 30, 51, 80, 26, 49, 73]
exp_first_handoffs = [
28, 34, 101, 99, 35, 62, 69, 65, 71, 67, 60, 34,
34, 101, 96, 98, 101, 27, 25, 106, 61, 63, 60,
104, 106, 65, 106, 31, 25, 25, 32, 62, 70, 35, 31,
99, 35, 33, 33, 64, 64, 32, 98, 69, 60, 102, 68,
33, 34, 60, 26, 60, 98, 32, 29, 60, 107, 96, 31,
65, 32, 26, 103, 62, 96, 62, 25, 103, 34, 30, 107,
104, 25, 97, 32, 65, 102, 24, 67, 97, 70, 63, 35,
105, 33, 104, 69, 29, 63, 30, 24, 102, 60, 30, 26,
105, 103, 104, 35, 24, 30, 64, 99, 27, 71, 107,
30, 25, 34, 33, 32, 62, 100, 103, 32, 33, 34, 99,
70, 32, 68, 69, 33, 27, 71, 101, 102, 99, 30, 31,
98, 71, 34, 33, 31, 100, 61, 107, 106, 66, 97,
106, 96, 101, 34, 33, 33, 28, 106, 30, 64, 96,
104, 105, 67, 32, 99, 102, 102, 30, 97, 105, 34,
99, 31, 61, 64, 29, 64, 61, 30, 101, 106, 60, 35,
34, 64, 61, 65, 101, 65, 62, 69, 60, 102, 107, 30,
28, 28, 34, 28, 65, 99, 105, 33, 62, 99, 71, 29,
66, 61, 101, 104, 104, 33, 96, 26, 62, 24, 64, 25,
99, 97, 35, 103, 32, 67, 70, 102, 26, 99, 102,
105, 65, 97, 31, 60, 60, 103, 98, 97, 98, 35, 66,
24, 98, 71, 0, 24, 67, 67, 30, 62, 69, 105, 71,
64, 101, 65, 32, 102, 35, 31, 34, 29, 105]
rb = ring.RingBuilder(8, 3, 1)
next_dev_id = 0
for zone in range(1, 10):
for server in range(1, 5):
for device in range(1, 4):
rb.add_dev({'id': next_dev_id,
'ip': '1.2.%d.%d' % (zone, server),
'port': 1234 + device,
'zone': zone, 'region': 0,
'weight': 1.0,
'device': "d%s" % device})
next_dev_id += 1
rb.rebalance(seed=43)
rb.get_ring().save(self.testgz)
r = ring.Ring(self.testdir, ring_name='whatever')
# every part has the same number of handoffs
part_handoff_counts = set()
for part in range(r.partition_count):
part_handoff_counts.add(len(list(r.get_more_nodes(part))))
self.assertEqual(part_handoff_counts, {105})
# which less the primaries - is every device in the ring
self.assertEqual(len(list(rb._iter_devs())) - rb.replicas, 105)
part, devs = r.get_nodes('a', 'c', 'o')
primary_zones = set([d['zone'] for d in devs])
self.assertEqual(part, exp_part)
self.assertEqual([d['id'] for d in devs], exp_devs)
self.assertEqual(primary_zones, exp_zones)
devs = list(r.get_more_nodes(part))
self.assertEqual(len(devs), len(exp_handoffs))
dev_ids = [d['id'] for d in devs]
self.assertEqual(dev_ids, exp_handoffs)
# We mark handoffs so code consuming extra nodes can reason about how
# far they've gone
for i, d in enumerate(devs):
self.assertEqual(d['handoff_index'], i)
# The first 6 replicas plus the 3 primary nodes should cover all 9
# zones in this test
seen_zones = set(primary_zones)
seen_zones.update([d['zone'] for d in devs[:6]])
self.assertEqual(seen_zones, set(range(1, 10)))
# The first handoff nodes for each partition in the ring
devs = []
for part in range(r.partition_count):
devs.append(next(r.get_more_nodes(part))['id'])
self.assertEqual(devs, exp_first_handoffs)
# Add a new device we can handoff to.
zone = 5
server = 0
rb.add_dev({'id': next_dev_id,
'ip': '1.2.%d.%d' % (zone, server),
'port': 1234, 'zone': zone, 'region': 0, 'weight': 1.0,
'device': 'xd0'})
next_dev_id += 1
rb.pretend_min_part_hours_passed()
num_parts_changed, _balance, _removed_dev = rb.rebalance(seed=43)
rb.get_ring().save(self.testgz)
r = ring.Ring(self.testdir, ring_name='whatever')
# so now we expect the device list to be longer by one device
part_handoff_counts = set()
for part in range(r.partition_count):
part_handoff_counts.add(len(list(r.get_more_nodes(part))))
self.assertEqual(part_handoff_counts, {106})
self.assertEqual(len(list(rb._iter_devs())) - rb.replicas, 106)
# I don't think there's any special reason this dev goes at this index
exp_handoffs.insert(33, rb.devs[-1]['id'])
# We would change expectations here, but in this part only the added
# device changed at all.
part, devs = r.get_nodes('a', 'c', 'o')
primary_zones = set([d['zone'] for d in devs])
self.assertEqual(part, exp_part)
self.assertEqual([d['id'] for d in devs], exp_devs)
self.assertEqual(primary_zones, exp_zones)
devs = list(r.get_more_nodes(part))
dev_ids = [d['id'] for d in devs]
self.assertEqual(len(dev_ids), len(exp_handoffs))
for index, dev in enumerate(dev_ids):
self.assertEqual(
dev, exp_handoffs[index],
'handoff differs at position %d\n%s\n%s' % (
index, dev_ids[index:], exp_handoffs[index:]))
# The handoffs still cover all the non-primary zones first
seen_zones = set(primary_zones)
seen_zones.update([d['zone'] for d in devs[:6]])
self.assertEqual(seen_zones, set(range(1, 10)))
# Change expectations for the rest of the parts
devs = []
for part in range(r.partition_count):
devs.append(next(r.get_more_nodes(part))['id'])
changed_first_handoff = 0
for part in range(r.partition_count):
if devs[part] != exp_first_handoffs[part]:
changed_first_handoff += 1
exp_first_handoffs[part] = devs[part]
self.assertEqual(devs, exp_first_handoffs)
self.assertEqual(changed_first_handoff, num_parts_changed)
# Remove a device - no need to fluff min_part_hours.
rb.remove_dev(0)
num_parts_changed, _balance, _removed_dev = rb.rebalance(seed=87)
rb.get_ring().save(self.testgz)
r = ring.Ring(self.testdir, ring_name='whatever')
# so now we expect the device list to be shorter by one device
part_handoff_counts = set()
for part in range(r.partition_count):
part_handoff_counts.add(len(list(r.get_more_nodes(part))))
self.assertEqual(part_handoff_counts, {105})
self.assertEqual(len(list(rb._iter_devs())) - rb.replicas, 105)
# Change expectations for our part
exp_handoffs.remove(0)
first_matches = 0
total_changed = 0
devs = list(d['id'] for d in r.get_more_nodes(exp_part))
for i, part in enumerate(devs):
if exp_handoffs[i] != devs[i]:
total_changed += 1
exp_handoffs[i] = devs[i]
if not total_changed:
first_matches += 1
self.assertEqual(devs, exp_handoffs)
# the first 32 handoffs were the same across the rebalance
self.assertEqual(first_matches, 32)
# but as you dig deeper some of the differences show up
self.assertEqual(total_changed, 27)
# Change expectations for the rest of the parts
devs = []
for part in range(r.partition_count):
devs.append(next(r.get_more_nodes(part))['id'])
changed_first_handoff = 0
for part in range(r.partition_count):
if devs[part] != exp_first_handoffs[part]:
changed_first_handoff += 1
exp_first_handoffs[part] = devs[part]
self.assertEqual(devs, exp_first_handoffs)
self.assertEqual(changed_first_handoff, num_parts_changed)
# Test
part, devs = r.get_nodes('a', 'c', 'o')
primary_zones = set([d['zone'] for d in devs])
self.assertEqual(part, exp_part)
self.assertEqual([d['id'] for d in devs], exp_devs)
self.assertEqual(primary_zones, exp_zones)
devs = list(r.get_more_nodes(part))
dev_ids = [d['id'] for d in devs]
self.assertEqual(len(dev_ids), len(exp_handoffs))
for index, dev in enumerate(dev_ids):
self.assertEqual(
dev, exp_handoffs[index],
'handoff differs at position %d\n%s\n%s' % (
index, dev_ids[index:], exp_handoffs[index:]))
seen_zones = set(primary_zones)
seen_zones.update([d['zone'] for d in devs[:6]])
self.assertEqual(seen_zones, set(range(1, 10)))
devs = []
for part in range(r.partition_count):
devs.append(next(r.get_more_nodes(part))['id'])
for part in range(r.partition_count):
self.assertEqual(
devs[part], exp_first_handoffs[part],
'handoff for partitition %d is now device id %d' % (
part, devs[part]))
# Add a partial replica
rb.set_replicas(3.5)
num_parts_changed, _balance, _removed_dev = rb.rebalance(seed=164)
rb.get_ring().save(self.testgz)
r = ring.Ring(self.testdir, ring_name='whatever')
# Change expectations
# We have another replica now
exp_devs.append(13)
exp_zones.add(2)
# and therefore one less handoff
exp_handoffs = exp_handoffs[:-1]
# Caused some major changes in the sequence of handoffs for our test
# partition, but at least the first stayed the same.
devs = list(d['id'] for d in r.get_more_nodes(exp_part))
first_matches = 0
total_changed = 0
for i, part in enumerate(devs):
if exp_handoffs[i] != devs[i]:
total_changed += 1
exp_handoffs[i] = devs[i]
if not total_changed:
first_matches += 1
# most seeds seem to throw out first handoff stabilization with
# replica_count change
self.assertEqual(first_matches, 0)
# and lots of other handoff changes...
self.assertEqual(total_changed, 95)
self.assertEqual(devs, exp_handoffs)
# Change expectations for the rest of the parts
devs = []
for part in range(r.partition_count):
devs.append(next(r.get_more_nodes(part))['id'])
changed_first_handoff = 0
for part in range(r.partition_count):
if devs[part] != exp_first_handoffs[part]:
changed_first_handoff += 1
exp_first_handoffs[part] = devs[part]
self.assertEqual(devs, exp_first_handoffs)
self.assertLessEqual(changed_first_handoff, num_parts_changed)
# Test
part, devs = r.get_nodes('a', 'c', 'o')
primary_zones = set([d['zone'] for d in devs])
self.assertEqual(part, exp_part)
self.assertEqual([d['id'] for d in devs], exp_devs)
self.assertEqual(primary_zones, exp_zones)
devs = list(r.get_more_nodes(part))
dev_ids = [d['id'] for d in devs]
self.assertEqual(len(dev_ids), len(exp_handoffs))
for index, dev in enumerate(dev_ids):
self.assertEqual(
dev, exp_handoffs[index],
'handoff differs at position %d\n%s\n%s' % (
index, dev_ids[index:], exp_handoffs[index:]))
seen_zones = set(primary_zones)
seen_zones.update([d['zone'] for d in devs[:6]])
self.assertEqual(seen_zones, set(range(1, 10)))
devs = []
for part in range(r.partition_count):
devs.append(next(r.get_more_nodes(part))['id'])
for part in range(r.partition_count):
self.assertEqual(
devs[part], exp_first_handoffs[part],
'handoff for partitition %d is now device id %d' % (
part, devs[part]))
# One last test of a partial replica partition
exp_part2 = 136
exp_devs2 = [35, 56, 83]
exp_zones2 = set([3, 5, 7])
exp_handoffs2 = [
61, 4, 13, 86, 103, 41, 63, 2, 17, 95, 70, 67, 8, 20,
106, 100, 11, 23, 87, 47, 51, 42, 30, 24, 48, 72, 27,
59, 76, 97, 38, 90, 108, 79, 55, 68, 6, 18, 105, 71,
62, 5, 14, 107, 89, 7, 45, 69, 10, 22, 12, 99, 44, 46,
88, 74, 39, 15, 102, 93, 85, 34, 98, 29, 57, 77, 84, 9,
21, 58, 78, 32, 52, 66, 19, 28, 75, 65, 1, 16, 33, 37,
49, 82, 31, 53, 54, 81, 96, 92, 3, 25, 50, 60, 36, 101,
43, 104, 40, 94, 64, 80, 26, 73, 91]
part2, devs2 = r.get_nodes('a', 'c', 'o2')
primary_zones2 = set([d['zone'] for d in devs2])
self.assertEqual(part2, exp_part2)
self.assertEqual([d['id'] for d in devs2], exp_devs2)
self.assertEqual(primary_zones2, exp_zones2)
devs2 = list(r.get_more_nodes(part2))
dev_ids2 = [d['id'] for d in devs2]
self.assertEqual(len(dev_ids2), len(exp_handoffs2))
for index, dev in enumerate(dev_ids2):
self.assertEqual(
dev, exp_handoffs2[index],
'handoff differs at position %d\n%s\n%s' % (
index, dev_ids2[index:], exp_handoffs2[index:]))
seen_zones = set(primary_zones2)
seen_zones.update([d['zone'] for d in devs2[:6]])
self.assertEqual(seen_zones, set(range(1, 10)))
# Test distribution across regions
rb.set_replicas(3)
for region in range(1, 5):
rb.add_dev({'id': next_dev_id,
'ip': '1.%d.1.%d' % (region, server), 'port': 1234,
# 108.0 is the weight of all devices created prior to
# this test in region 0; this way all regions have
# equal combined weight
'zone': 1, 'region': region, 'weight': 108.0,
'device': 'sdx'})
next_dev_id += 1
rb.pretend_min_part_hours_passed()
rb.rebalance(seed=1)
rb.pretend_min_part_hours_passed()
rb.rebalance(seed=1)
rb.get_ring().save(self.testgz)
r = ring.Ring(self.testdir, ring_name='whatever')
# There's 5 regions now, so the primary nodes + first 2 handoffs
# should span all 5 regions
part, devs = r.get_nodes('a1', 'c1', 'o1')
primary_regions = set([d['region'] for d in devs])
primary_zones = set([(d['region'], d['zone']) for d in devs])
more_devs = list(r.get_more_nodes(part))
seen_regions = set(primary_regions)
seen_regions.update([d['region'] for d in more_devs[:2]])
self.assertEqual(seen_regions, set(range(0, 5)))
# There are 13 zones now, so the first 13 nodes should all have
# distinct zones (that's r0z0, r0z1, ..., r0z8, r1z1, r2z1, r3z1, and
# r4z1).
seen_zones = set(primary_zones)
seen_zones.update([(d['region'], d['zone']) for d in more_devs[:10]])
self.assertEqual(13, len(seen_zones))
# Here's a brittle canary-in-the-coalmine test to make sure the region
# handoff computation didn't change accidentally
exp_handoffs = [111, 112, 83, 45, 21, 95, 51, 26, 3, 102, 72, 80, 59,
61, 14, 89, 105, 31, 1, 39, 90, 16, 86, 75, 49, 42, 35,
71, 99, 20, 97, 27, 54, 67, 8, 11, 37, 108, 73, 78, 23,
53, 79, 82, 57, 106, 85, 22, 25, 13, 47, 76, 18, 84,
81, 12, 32, 17, 103, 41, 19, 50, 52, 4, 94, 64, 48, 63,
43, 66, 104, 6, 62, 87, 69, 68, 46, 98, 77, 2, 107, 93,
9, 28, 55, 33, 5, 92, 74, 96, 7, 40, 30, 100, 36, 15,
88, 58, 24, 56, 34, 101, 60, 10, 38, 29, 70, 44, 91]
dev_ids = [d['id'] for d in more_devs]
self.assertEqual(len(dev_ids), len(exp_handoffs))
for index, dev_id in enumerate(dev_ids):
self.assertEqual(
dev_id, exp_handoffs[index],
'handoff differs at position %d\n%s\n%s' % (
index, dev_ids[index:], exp_handoffs[index:]))
def test_get_more_nodes_with_zero_weight_region(self):
rb = ring.RingBuilder(8, 3, 1)
devs = [
ring_utils.parse_add_value(v) for v in [
'r1z1-127.0.0.1:6200/d1',
'r1z1-127.0.0.1:6201/d2',
'r1z1-127.0.0.1:6202/d3',
'r1z1-127.0.0.1:6203/d4',
'r1z2-127.0.0.2:6200/d1',
'r1z2-127.0.0.2:6201/d2',
'r1z2-127.0.0.2:6202/d3',
'r1z2-127.0.0.2:6203/d4',
'r2z1-127.0.1.1:6200/d1',
'r2z1-127.0.1.1:6201/d2',
'r2z1-127.0.1.1:6202/d3',
'r2z1-127.0.1.1:6203/d4',
'r2z2-127.0.1.2:6200/d1',
'r2z2-127.0.1.2:6201/d2',
'r2z2-127.0.1.2:6202/d3',
'r2z2-127.0.1.2:6203/d4',
]
]
for dev in devs:
if dev['region'] == 2:
dev['weight'] = 0.0
else:
dev['weight'] = 1.0
rb.add_dev(dev)
rb.rebalance()
rb.get_ring().save(self.testgz)
r = ring.Ring(self.testdir, ring_name='whatever')
self.assertEqual(r.version, rb.version)
class CountingRingTable(object):
def __init__(self, table):
self.table = table
self.count = 0
def __iter__(self):
self._iter = iter(self.table)
return self
def __next__(self):
self.count += 1
return next(self._iter)
# complete the api
next = __next__
def __getitem__(self, key):
return self.table[key]
histogram = collections.defaultdict(int)
for part in range(r.partition_count):
counting_table = CountingRingTable(r._replica2part2dev_id)
with mock.patch.object(r, '_replica2part2dev_id', counting_table):
node_iter = r.get_more_nodes(part)
next(node_iter)
histogram[counting_table.count] += 1
# Don't let our summing muddy our histogram
histogram = dict(histogram)
# sanity
self.assertEqual(1, r._num_regions)
self.assertEqual(2, r._num_zones)
self.assertEqual(256, r.partition_count)
# We always do one loop (including the StopIteration) while getting
# primaries, so every part should hit next() at least 5 times
self.assertEqual(sum(histogram.get(x, 0) for x in range(5)), 0,
histogram)
# Most of the parts should find a handoff device in the next partition,
# but because some of the primary devices may *also* be used for that
# partition, that means 5, 6, or 7 calls to next().
self.assertGreater(sum(histogram.get(x, 0) for x in range(8)), 160,
histogram)
# Want 90% confidence that it'll happen within two partitions
self.assertGreater(sum(histogram.get(x, 0) for x in range(12)), 230,
histogram)
# Tail should fall off fairly quickly
self.assertLess(sum(histogram.get(x, 0) for x in range(20, 100)), 5,
histogram)
# Hard limit at 50 (we've seen as bad as 41, 45)
self.assertEqual(sum(histogram.get(x, 0) for x in range(50, 100)), 0,
histogram)
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/common/ring/test_ring.py |
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Tests for swift.account.backend """
from collections import defaultdict
import json
import pickle
import os
from time import sleep, time
from uuid import uuid4
import sqlite3
import itertools
from contextlib import contextmanager
import random
import mock
import base64
import shutil
import six
from swift.account.backend import AccountBroker
from swift.common.utils import Timestamp
from test.unit import patch_policies, with_tempdir, make_timestamp_iter
from swift.common.db import DatabaseConnectionError, TombstoneReclaimer
from swift.common.request_helpers import get_reserved_name
from swift.common.storage_policy import StoragePolicy, POLICIES
from swift.common.utils import md5
from test.unit.common import test_db
@patch_policies
class TestAccountBroker(test_db.TestDbBase):
"""Tests for AccountBroker"""
def setUp(self):
super(TestAccountBroker, self).setUp()
# tests seem to assume x-timestamp was set by the proxy before "now"
self.ts = make_timestamp_iter(offset=-1)
def test_creation(self):
# Test AccountBroker.__init__
db_file = self.get_db_path()
broker = AccountBroker(db_file, account='a')
self.assertEqual(broker.db_file, db_file)
try:
with broker.get() as conn:
pass
except DatabaseConnectionError as e:
self.assertTrue(hasattr(e, 'path'))
self.assertEqual(e.path, db_file)
self.assertTrue(hasattr(e, 'msg'))
self.assertEqual(e.msg, "DB doesn't exist")
except Exception as e:
self.fail("Unexpected exception raised: %r" % e)
else:
self.fail("Expected a DatabaseConnectionError exception")
broker.initialize(Timestamp('1').internal)
with broker.get() as conn:
curs = conn.cursor()
curs.execute('SELECT 1')
self.assertEqual(curs.fetchall()[0][0], 1)
def test_initialize_fail(self):
broker = AccountBroker(self.get_db_path())
with self.assertRaises(ValueError) as cm:
broker.initialize(Timestamp('1').internal)
self.assertEqual(str(cm.exception), 'Attempting to create a new'
' database with no account set')
def test_exception(self):
# Test AccountBroker throwing a conn away after exception
first_conn = None
broker = AccountBroker(self.get_db_path(), account='a')
broker.initialize(Timestamp('1').internal)
with broker.get() as conn:
first_conn = conn
try:
with broker.get() as conn:
self.assertEqual(first_conn, conn)
raise Exception('OMG')
except Exception:
pass
self.assertIsNone(broker.conn)
def test_empty(self):
# Test AccountBroker.empty
broker = AccountBroker(self.get_db_path(), account='a')
broker.initialize(Timestamp('1').internal)
self.assertTrue(broker.empty())
broker.put_container('o', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx)
self.assertFalse(broker.empty())
sleep(.00001)
broker.put_container('o', 0, Timestamp.now().internal, 0, 0,
POLICIES.default.idx)
self.assertTrue(broker.empty())
def test_is_status_deleted(self):
# Test AccountBroker.is_status_deleted
broker1 = AccountBroker(self.get_db_path(), account='a')
broker1.initialize(Timestamp.now().internal)
self.assertFalse(broker1.is_status_deleted())
broker1.delete_db(Timestamp.now().internal)
self.assertTrue(broker1.is_status_deleted())
broker2 = AccountBroker(self.get_db_path(), account='a')
broker2.initialize(Timestamp.now().internal)
# Set delete_timestamp greater than put_timestamp
broker2.merge_timestamps(
time(), Timestamp.now().internal,
Timestamp(time() + 999).internal)
self.assertTrue(broker2.is_status_deleted())
def test_reclaim(self):
broker = AccountBroker(self.get_db_path(), account='test_account')
broker.initialize(Timestamp('1').internal)
broker.put_container('c', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx)
# commit pending file into db
broker._commit_puts()
with broker.get() as conn:
self.assertEqual(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 0").fetchone()[0], 1)
self.assertEqual(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 1").fetchone()[0], 0)
broker.reclaim(Timestamp(time() - 999).internal, time())
with broker.get() as conn:
self.assertEqual(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 0").fetchone()[0], 1)
self.assertEqual(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 1").fetchone()[0], 0)
sleep(.00001)
broker.put_container('c', 0, Timestamp.now().internal, 0, 0,
POLICIES.default.idx)
broker._commit_puts()
with broker.get() as conn:
self.assertEqual(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 0").fetchone()[0], 0)
self.assertEqual(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 1").fetchone()[0], 1)
broker.reclaim(Timestamp(time() - 999).internal, time())
with broker.get() as conn:
self.assertEqual(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 0").fetchone()[0], 0)
self.assertEqual(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 1").fetchone()[0], 1)
sleep(.00001)
broker.reclaim(Timestamp.now().internal, time())
with broker.get() as conn:
self.assertEqual(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 0").fetchone()[0], 0)
self.assertEqual(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 1").fetchone()[0], 0)
# Test reclaim after deletion. Create 3 test containers
broker.put_container('x', 0, 0, 0, 0, POLICIES.default.idx)
broker.put_container('y', 0, 0, 0, 0, POLICIES.default.idx)
broker.put_container('z', 0, 0, 0, 0, POLICIES.default.idx)
broker._commit_puts()
broker.reclaim(Timestamp.now().internal, time())
# Now delete the account
broker.delete_db(Timestamp.now().internal)
broker.reclaim(Timestamp.now().internal, time())
def test_batched_reclaim(self):
num_of_containers = 60
container_specs = []
now = time()
top_of_the_minute = now - (now % 60)
c = itertools.cycle([True, False])
for m, is_deleted in six.moves.zip(range(num_of_containers), c):
offset = top_of_the_minute - (m * 60)
container_specs.append((Timestamp(offset), is_deleted))
random.seed(now)
random.shuffle(container_specs)
policy_indexes = list(p.idx for p in POLICIES)
broker = AccountBroker(self.get_db_path(), account='test_account')
broker.initialize(Timestamp('1').internal)
for i, container_spec in enumerate(container_specs):
# with container12 before container2 and shuffled ts.internal we
# shouldn't be able to accidently rely on any implicit ordering
name = 'container%s' % i
pidx = random.choice(policy_indexes)
ts, is_deleted = container_spec
if is_deleted:
broker.put_container(name, 0, ts.internal, 0, 0, pidx)
else:
broker.put_container(name, ts.internal, 0, 0, 0, pidx)
# commit pending file into db
broker._commit_puts()
def count_reclaimable(conn, reclaim_age):
return conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 1 AND delete_timestamp < ?", (reclaim_age,)
).fetchone()[0]
# This is intended to divide the set of timestamps exactly in half
# regardless of the value of now
reclaim_age = top_of_the_minute + 1 - (num_of_containers / 2 * 60)
with broker.get() as conn:
self.assertEqual(count_reclaimable(conn, reclaim_age),
num_of_containers / 4)
trace = []
class TracingReclaimer(TombstoneReclaimer):
def _reclaim(self, conn):
trace.append(
(self.age_timestamp, self.marker,
count_reclaimable(conn, self.age_timestamp)))
return super(TracingReclaimer, self)._reclaim(conn)
with mock.patch(
'swift.common.db.TombstoneReclaimer', TracingReclaimer), \
mock.patch('swift.common.db.RECLAIM_PAGE_SIZE', 10):
broker.reclaim(reclaim_age, reclaim_age)
with broker.get() as conn:
self.assertEqual(count_reclaimable(conn, reclaim_age), 0)
self.assertEqual(3, len(trace), trace)
self.assertEqual([age for age, marker, reclaimable in trace],
[reclaim_age] * 3)
# markers are in-order
self.assertLess(trace[0][1], trace[1][1])
self.assertLess(trace[1][1], trace[2][1])
# reclaimable count gradually decreases
# generally, count1 > count2 > count3, but because of the randomness
# we may occassionally have count1 == count2 or count2 == count3
self.assertGreaterEqual(trace[0][2], trace[1][2])
self.assertGreaterEqual(trace[1][2], trace[2][2])
# technically, this might happen occasionally, but *really* rarely
self.assertTrue(trace[0][2] > trace[1][2] or
trace[1][2] > trace[2][2])
def test_delete_db_status(self):
start = next(self.ts)
broker = AccountBroker(self.get_db_path(), account='a')
broker.initialize(start.internal)
info = broker.get_info()
self.assertEqual(info['put_timestamp'], start.internal)
self.assertGreaterEqual(Timestamp(info['created_at']), start)
self.assertEqual(info['delete_timestamp'], '0')
if self.__class__ == TestAccountBrokerBeforeMetadata:
self.assertEqual(info['status_changed_at'], '0')
else:
self.assertEqual(info['status_changed_at'], start.internal)
# delete it
delete_timestamp = next(self.ts)
broker.delete_db(delete_timestamp.internal)
info = broker.get_info()
self.assertEqual(info['put_timestamp'], start.internal)
self.assertGreaterEqual(Timestamp(info['created_at']), start)
self.assertEqual(info['delete_timestamp'], delete_timestamp.internal)
self.assertEqual(info['status_changed_at'], delete_timestamp.internal)
def test_delete_container(self):
# Test AccountBroker.delete_container
broker = AccountBroker(self.get_db_path(), account='a')
broker.initialize(Timestamp('1').internal)
broker.put_container('o', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx)
# commit pending file into db
broker._commit_puts()
with broker.get() as conn:
self.assertEqual(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 0").fetchone()[0], 1)
self.assertEqual(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 1").fetchone()[0], 0)
sleep(.00001)
broker.put_container('o', 0, Timestamp.now().internal, 0, 0,
POLICIES.default.idx)
broker._commit_puts()
with broker.get() as conn:
self.assertEqual(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 0").fetchone()[0], 0)
self.assertEqual(conn.execute(
"SELECT count(*) FROM container "
"WHERE deleted = 1").fetchone()[0], 1)
def test_put_container(self):
# Test AccountBroker.put_container
broker = AccountBroker(self.get_db_path(), account='a')
broker.initialize(Timestamp('1').internal)
# Create initial container
timestamp = Timestamp.now().internal
broker.put_container('"{<container \'&\' name>}"', timestamp, 0, 0, 0,
POLICIES.default.idx)
# commit pending file into db
broker._commit_puts()
with broker.get() as conn:
self.assertEqual(conn.execute(
"SELECT name FROM container").fetchone()[0],
'"{<container \'&\' name>}"')
self.assertEqual(conn.execute(
"SELECT put_timestamp FROM container").fetchone()[0],
timestamp)
self.assertEqual(conn.execute(
"SELECT deleted FROM container").fetchone()[0], 0)
# Reput same event
broker.put_container('"{<container \'&\' name>}"', timestamp, 0, 0, 0,
POLICIES.default.idx)
broker._commit_puts()
with broker.get() as conn:
self.assertEqual(conn.execute(
"SELECT name FROM container").fetchone()[0],
'"{<container \'&\' name>}"')
self.assertEqual(conn.execute(
"SELECT put_timestamp FROM container").fetchone()[0],
timestamp)
self.assertEqual(conn.execute(
"SELECT deleted FROM container").fetchone()[0], 0)
# Put new event
sleep(.00001)
timestamp = Timestamp.now().internal
broker.put_container('"{<container \'&\' name>}"', timestamp, 0, 0, 0,
POLICIES.default.idx)
broker._commit_puts()
with broker.get() as conn:
self.assertEqual(conn.execute(
"SELECT name FROM container").fetchone()[0],
'"{<container \'&\' name>}"')
self.assertEqual(conn.execute(
"SELECT put_timestamp FROM container").fetchone()[0],
timestamp)
self.assertEqual(conn.execute(
"SELECT deleted FROM container").fetchone()[0], 0)
# Put old event
otimestamp = Timestamp(float(Timestamp(timestamp)) - 1).internal
broker.put_container('"{<container \'&\' name>}"', otimestamp, 0, 0, 0,
POLICIES.default.idx)
broker._commit_puts()
with broker.get() as conn:
self.assertEqual(conn.execute(
"SELECT name FROM container").fetchone()[0],
'"{<container \'&\' name>}"')
self.assertEqual(conn.execute(
"SELECT put_timestamp FROM container").fetchone()[0],
timestamp)
self.assertEqual(conn.execute(
"SELECT deleted FROM container").fetchone()[0], 0)
# Put old delete event
dtimestamp = Timestamp(float(Timestamp(timestamp)) - 1).internal
broker.put_container('"{<container \'&\' name>}"', 0, dtimestamp, 0, 0,
POLICIES.default.idx)
broker._commit_puts()
with broker.get() as conn:
self.assertEqual(conn.execute(
"SELECT name FROM container").fetchone()[0],
'"{<container \'&\' name>}"')
self.assertEqual(conn.execute(
"SELECT put_timestamp FROM container").fetchone()[0],
timestamp)
self.assertEqual(conn.execute(
"SELECT delete_timestamp FROM container").fetchone()[0],
dtimestamp)
self.assertEqual(conn.execute(
"SELECT deleted FROM container").fetchone()[0], 0)
# Put new delete event
sleep(.00001)
timestamp = Timestamp.now().internal
broker.put_container('"{<container \'&\' name>}"', 0, timestamp, 0, 0,
POLICIES.default.idx)
broker._commit_puts()
with broker.get() as conn:
self.assertEqual(conn.execute(
"SELECT name FROM container").fetchone()[0],
'"{<container \'&\' name>}"')
self.assertEqual(conn.execute(
"SELECT delete_timestamp FROM container").fetchone()[0],
timestamp)
self.assertEqual(conn.execute(
"SELECT deleted FROM container").fetchone()[0], 1)
# Put new event
sleep(.00001)
timestamp = Timestamp.now().internal
broker.put_container('"{<container \'&\' name>}"', timestamp, 0, 0, 0,
POLICIES.default.idx)
broker._commit_puts()
with broker.get() as conn:
self.assertEqual(conn.execute(
"SELECT name FROM container").fetchone()[0],
'"{<container \'&\' name>}"')
self.assertEqual(conn.execute(
"SELECT put_timestamp FROM container").fetchone()[0],
timestamp)
self.assertEqual(conn.execute(
"SELECT deleted FROM container").fetchone()[0], 0)
def test_get_info(self):
# Test AccountBroker.get_info
broker = AccountBroker(self.get_db_path(), account='test1')
broker.initialize(Timestamp('1').internal)
info = broker.get_info()
self.assertEqual(info['account'], 'test1')
self.assertEqual(info['hash'], '00000000000000000000000000000000')
self.assertEqual(info['put_timestamp'], Timestamp(1).internal)
self.assertEqual(info['delete_timestamp'], '0')
if self.__class__ == TestAccountBrokerBeforeMetadata:
self.assertEqual(info['status_changed_at'], '0')
else:
self.assertEqual(info['status_changed_at'], Timestamp(1).internal)
info = broker.get_info()
self.assertEqual(info['container_count'], 0)
broker.put_container('c1', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx)
info = broker.get_info()
self.assertEqual(info['container_count'], 1)
sleep(.00001)
broker.put_container('c2', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx)
info = broker.get_info()
self.assertEqual(info['container_count'], 2)
sleep(.00001)
broker.put_container('c2', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx)
info = broker.get_info()
self.assertEqual(info['container_count'], 2)
sleep(.00001)
broker.put_container('c1', 0, Timestamp.now().internal, 0, 0,
POLICIES.default.idx)
info = broker.get_info()
self.assertEqual(info['container_count'], 1)
sleep(.00001)
broker.put_container('c2', 0, Timestamp.now().internal, 0, 0,
POLICIES.default.idx)
info = broker.get_info()
self.assertEqual(info['container_count'], 0)
def test_list_containers_iter(self):
# Test AccountBroker.list_containers_iter
broker = AccountBroker(self.get_db_path(), account='a')
broker.initialize(Timestamp('1').internal)
for cont1 in range(4):
for cont2 in range(125):
broker.put_container('%d-%04d' % (cont1, cont2),
Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx)
for cont in range(125):
broker.put_container('2-0051-%04d' % cont,
Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx)
for cont in range(125):
broker.put_container('3-%04d-0049' % cont,
Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx)
listing = broker.list_containers_iter(100, '', None, None, '')
self.assertEqual(len(listing), 100)
self.assertEqual(listing[0][0], '0-0000')
self.assertEqual(listing[-1][0], '0-0099')
listing = broker.list_containers_iter(100, '', '0-0050', None, '')
self.assertEqual(len(listing), 50)
self.assertEqual(listing[0][0], '0-0000')
self.assertEqual(listing[-1][0], '0-0049')
listing = broker.list_containers_iter(100, '0-0099', None, None, '')
self.assertEqual(len(listing), 100)
self.assertEqual(listing[0][0], '0-0100')
self.assertEqual(listing[-1][0], '1-0074')
listing = broker.list_containers_iter(55, '1-0074', None, None, '')
self.assertEqual(len(listing), 55)
self.assertEqual(listing[0][0], '1-0075')
self.assertEqual(listing[-1][0], '2-0004')
listing = broker.list_containers_iter(10, '', None, '0-01', '')
self.assertEqual(len(listing), 10)
self.assertEqual(listing[0][0], '0-0100')
self.assertEqual(listing[-1][0], '0-0109')
listing = broker.list_containers_iter(10, '', None, '0-01', '-')
self.assertEqual(len(listing), 10)
self.assertEqual(listing[0][0], '0-0100')
self.assertEqual(listing[-1][0], '0-0109')
listing = broker.list_containers_iter(10, '', None, '0-00', '-',
reverse=True)
self.assertEqual(len(listing), 10)
self.assertEqual(listing[0][0], '0-0099')
self.assertEqual(listing[-1][0], '0-0090')
listing = broker.list_containers_iter(10, '', None, '0-', '-')
self.assertEqual(len(listing), 10)
self.assertEqual(listing[0][0], '0-0000')
self.assertEqual(listing[-1][0], '0-0009')
listing = broker.list_containers_iter(10, '', None, '0-', '-',
reverse=True)
self.assertEqual(len(listing), 10)
self.assertEqual(listing[0][0], '0-0124')
self.assertEqual(listing[-1][0], '0-0115')
listing = broker.list_containers_iter(10, '', None, '', '-')
self.assertEqual(len(listing), 4)
self.assertEqual([row[0] for row in listing],
['0-', '1-', '2-', '3-'])
listing = broker.list_containers_iter(10, '', None, '', '-',
reverse=True)
self.assertEqual(len(listing), 4)
self.assertEqual([row[0] for row in listing],
['3-', '2-', '1-', '0-'])
listing = broker.list_containers_iter(10, '2-', None, None, '-')
self.assertEqual(len(listing), 1)
self.assertEqual([row[0] for row in listing], ['3-'])
listing = broker.list_containers_iter(10, '2-', None, None, '-',
reverse=True)
self.assertEqual(len(listing), 2)
self.assertEqual([row[0] for row in listing], ['1-', '0-'])
listing = broker.list_containers_iter(10, '2.', None, None, '-',
reverse=True)
self.assertEqual(len(listing), 3)
self.assertEqual([row[0] for row in listing], ['2-', '1-', '0-'])
listing = broker.list_containers_iter(10, '', None, '2', '-')
self.assertEqual(len(listing), 1)
self.assertEqual([row[0] for row in listing], ['2-'])
listing = broker.list_containers_iter(10, '2-0050', None, '2-', '-')
self.assertEqual(len(listing), 10)
self.assertEqual(listing[0][0], '2-0051')
self.assertEqual(listing[1][0], '2-0051-')
self.assertEqual(listing[2][0], '2-0052')
self.assertEqual(listing[-1][0], '2-0059')
listing = broker.list_containers_iter(10, '3-0045', None, '3-', '-')
self.assertEqual(len(listing), 10)
self.assertEqual([row[0] for row in listing],
['3-0045-', '3-0046', '3-0046-', '3-0047',
'3-0047-', '3-0048', '3-0048-', '3-0049',
'3-0049-', '3-0050'])
broker.put_container('3-0049-', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx)
listing = broker.list_containers_iter(10, '3-0048', None, None, None)
self.assertEqual(len(listing), 10)
self.assertEqual([row[0] for row in listing],
['3-0048-0049', '3-0049', '3-0049-', '3-0049-0049',
'3-0050', '3-0050-0049', '3-0051', '3-0051-0049',
'3-0052', '3-0052-0049'])
listing = broker.list_containers_iter(10, '3-0048', None, '3-', '-')
self.assertEqual(len(listing), 10)
self.assertEqual([row[0] for row in listing],
['3-0048-', '3-0049', '3-0049-', '3-0050',
'3-0050-', '3-0051', '3-0051-', '3-0052',
'3-0052-', '3-0053'])
listing = broker.list_containers_iter(10, None, None, '3-0049-', '-')
self.assertEqual(len(listing), 2)
self.assertEqual([row[0] for row in listing],
['3-0049-', '3-0049-0049'])
def test_list_objects_iter_order_and_reverse(self):
# Test ContainerBroker.list_objects_iter
broker = AccountBroker(self.get_db_path(), account='a')
broker.initialize(Timestamp('1').internal, 0)
broker.put_container(
'c1', Timestamp(0).internal, 0, 0, 0, POLICIES.default.idx)
broker.put_container(
'c10', Timestamp(0).internal, 0, 0, 0, POLICIES.default.idx)
broker.put_container(
'C1', Timestamp(0).internal, 0, 0, 0, POLICIES.default.idx)
broker.put_container(
'c2', Timestamp(0).internal, 0, 0, 0, POLICIES.default.idx)
broker.put_container(
'c3', Timestamp(0).internal, 0, 0, 0, POLICIES.default.idx)
broker.put_container(
'C4', Timestamp(0).internal, 0, 0, 0, POLICIES.default.idx)
listing = broker.list_containers_iter(100, None, None, '', '',
reverse=False)
self.assertEqual([row[0] for row in listing],
['C1', 'C4', 'c1', 'c10', 'c2', 'c3'])
listing = broker.list_containers_iter(100, None, None, '', '',
reverse=True)
self.assertEqual([row[0] for row in listing],
['c3', 'c2', 'c10', 'c1', 'C4', 'C1'])
listing = broker.list_containers_iter(2, None, None, '', '',
reverse=True)
self.assertEqual([row[0] for row in listing],
['c3', 'c2'])
listing = broker.list_containers_iter(100, 'c2', 'C4', '', '',
reverse=True)
self.assertEqual([row[0] for row in listing],
['c10', 'c1'])
def test_list_container_iter_with_reserved_name(self):
# Test ContainerBroker.list_objects_iter
broker = AccountBroker(self.get_db_path(), account='a')
broker.initialize(next(self.ts).internal, 0)
broker.put_container(
'foo', next(self.ts).internal, 0, 0, 0, POLICIES.default.idx)
broker.put_container(
get_reserved_name('foo'), next(self.ts).internal, 0, 0, 0,
POLICIES.default.idx)
listing = broker.list_containers_iter(100, None, None, '', '')
self.assertEqual([row[0] for row in listing], ['foo'])
listing = broker.list_containers_iter(100, None, None, '', '',
reverse=True)
self.assertEqual([row[0] for row in listing], ['foo'])
listing = broker.list_containers_iter(100, None, None, '', '',
allow_reserved=True)
self.assertEqual([row[0] for row in listing],
[get_reserved_name('foo'), 'foo'])
listing = broker.list_containers_iter(100, None, None, '', '',
reverse=True,
allow_reserved=True)
self.assertEqual([row[0] for row in listing],
['foo', get_reserved_name('foo')])
def test_reverse_prefix_delim(self):
expectations = [
{
'containers': [
'topdir1-subdir1,0-c1',
'topdir1-subdir1,1-c1',
'topdir1-subdir1-c1',
],
'params': {
'prefix': 'topdir1-',
'delimiter': '-',
},
'expected': [
'topdir1-subdir1,0-',
'topdir1-subdir1,1-',
'topdir1-subdir1-',
],
},
{
'containers': [
'topdir1-subdir1,0-c1',
'topdir1-subdir1,1-c1',
'topdir1-subdir1-c1',
'topdir1-subdir1.',
'topdir1-subdir1.-c1',
],
'params': {
'prefix': 'topdir1-',
'delimiter': '-',
},
'expected': [
'topdir1-subdir1,0-',
'topdir1-subdir1,1-',
'topdir1-subdir1-',
'topdir1-subdir1.',
'topdir1-subdir1.-',
],
},
{
'containers': [
'topdir1-subdir1-c1',
'topdir1-subdir1,0-c1',
'topdir1-subdir1,1-c1',
],
'params': {
'prefix': 'topdir1-',
'delimiter': '-',
'reverse': True,
},
'expected': [
'topdir1-subdir1-',
'topdir1-subdir1,1-',
'topdir1-subdir1,0-',
],
},
{
'containers': [
'topdir1-subdir1.-c1',
'topdir1-subdir1.',
'topdir1-subdir1-c1',
'topdir1-subdir1-',
'topdir1-subdir1,',
'topdir1-subdir1,0-c1',
'topdir1-subdir1,1-c1',
],
'params': {
'prefix': 'topdir1-',
'delimiter': '-',
'reverse': True,
},
'expected': [
'topdir1-subdir1.-',
'topdir1-subdir1.',
'topdir1-subdir1-',
'topdir1-subdir1,1-',
'topdir1-subdir1,0-',
'topdir1-subdir1,',
],
},
{
'containers': [
'1',
'2',
'3:1',
'3:2:1',
'3:2:2',
'3:3',
'4',
],
'params': {
'prefix': '3:',
'delimiter': ':',
'reverse': True,
},
'expected': [
'3:3',
'3:2:',
'3:1',
],
},
]
default_listing_params = {
'limit': 10000,
'marker': '',
'end_marker': None,
'prefix': None,
'delimiter': None,
}
failures = []
for expected in expectations:
broker = AccountBroker(self.get_db_path(), account='a')
broker.initialize(next(self.ts).internal, 0)
for name in expected['containers']:
broker.put_container(name, next(self.ts).internal, 0, 0, 0,
POLICIES.default.idx)
# commit pending file into db
broker._commit_puts()
params = default_listing_params.copy()
params.update(expected['params'])
listing = list(c[0] for c in broker.list_containers_iter(**params))
if listing != expected['expected']:
expected['listing'] = listing
failures.append(
"With containers %(containers)r, the params %(params)r "
"produced %(listing)r instead of %(expected)r" % expected)
self.assertFalse(failures, "Found the following failures:\n%s" %
'\n'.join(failures))
def test_double_check_trailing_delimiter(self):
# Test AccountBroker.list_containers_iter for an
# account that has an odd container with a trailing delimiter
broker = AccountBroker(self.get_db_path(), account='a')
broker.initialize(Timestamp('1').internal)
broker.put_container('a', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx)
broker.put_container('a-', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx)
broker.put_container('a-a', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx)
broker.put_container('a-a-a', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx)
broker.put_container('a-a-b', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx)
broker.put_container('a-b', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx)
# NB: ord(".") == ord("-") + 1
broker.put_container('a.', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx)
broker.put_container('a.b', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx)
broker.put_container('b', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx)
broker.put_container('b-a', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx)
broker.put_container('b-b', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx)
broker.put_container('c', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx)
listing = broker.list_containers_iter(15, None, None, None, None)
self.assertEqual([row[0] for row in listing],
['a', 'a-', 'a-a', 'a-a-a', 'a-a-b', 'a-b', 'a.',
'a.b', 'b', 'b-a', 'b-b', 'c'])
listing = broker.list_containers_iter(15, None, None, '', '-')
self.assertEqual([row[0] for row in listing],
['a', 'a-', 'a.', 'a.b', 'b', 'b-', 'c'])
listing = broker.list_containers_iter(15, None, None, 'a-', '-')
self.assertEqual([row[0] for row in listing],
['a-', 'a-a', 'a-a-', 'a-b'])
listing = broker.list_containers_iter(15, None, None, 'b-', '-')
self.assertEqual([row[0] for row in listing], ['b-a', 'b-b'])
def test_chexor(self):
broker = AccountBroker(self.get_db_path(), account='a')
broker.initialize(Timestamp('1').internal)
broker.put_container('a', Timestamp(1).internal,
Timestamp(0).internal, 0, 0,
POLICIES.default.idx)
broker.put_container('b', Timestamp(2).internal,
Timestamp(0).internal, 0, 0,
POLICIES.default.idx)
text = '%s-%s' % ('a', "%s-%s-%s-%s" % (
Timestamp(1).internal, Timestamp(0).internal, 0, 0))
hasha = md5(text.encode('ascii'), usedforsecurity=False).digest()
text = '%s-%s' % ('b', "%s-%s-%s-%s" % (
Timestamp(2).internal, Timestamp(0).internal, 0, 0))
hashb = md5(text.encode('ascii'), usedforsecurity=False).digest()
hashc = ''.join(('%02x' % (ord(a) ^ ord(b) if six.PY2 else a ^ b)
for a, b in zip(hasha, hashb)))
self.assertEqual(broker.get_info()['hash'], hashc)
broker.put_container('b', Timestamp(3).internal,
Timestamp(0).internal, 0, 0,
POLICIES.default.idx)
text = '%s-%s' % ('b', "%s-%s-%s-%s" % (
Timestamp(3).internal, Timestamp(0).internal, 0, 0))
hashb = md5(text.encode('ascii'), usedforsecurity=False).digest()
hashc = ''.join(('%02x' % (ord(a) ^ ord(b) if six.PY2 else a ^ b)
for a, b in zip(hasha, hashb)))
self.assertEqual(broker.get_info()['hash'], hashc)
def test_merge_items(self):
broker1 = AccountBroker(self.get_db_path(), account='a')
broker1.initialize(Timestamp('1').internal)
broker2 = AccountBroker(self.get_db_path(), account='a')
broker2.initialize(Timestamp('1').internal)
broker1.put_container('a', Timestamp(1).internal, 0, 0, 0,
POLICIES.default.idx)
broker1.put_container('b', Timestamp(2).internal, 0, 0, 0,
POLICIES.default.idx)
id = broker1.get_info()['id']
broker2.merge_items(broker1.get_items_since(
broker2.get_sync(id), 1000), id)
items = broker2.get_items_since(-1, 1000)
self.assertEqual(len(items), 2)
self.assertEqual(['a', 'b'], sorted([rec['name'] for rec in items]))
broker1.put_container('c', Timestamp(3).internal, 0, 0, 0,
POLICIES.default.idx)
broker2.merge_items(broker1.get_items_since(
broker2.get_sync(id), 1000), id)
items = broker2.get_items_since(-1, 1000)
self.assertEqual(len(items), 3)
self.assertEqual(['a', 'b', 'c'],
sorted([rec['name'] for rec in items]))
def test_merge_items_overwrite_unicode(self):
snowman = u'\N{SNOWMAN}'
if six.PY2:
snowman = snowman.encode('utf-8')
broker1 = AccountBroker(self.get_db_path(), account='a')
broker1.initialize(Timestamp('1').internal, 0)
id1 = broker1.get_info()['id']
broker2 = AccountBroker(self.get_db_path(), account='a')
broker2.initialize(Timestamp('1').internal, 0)
broker1.put_container(snowman, Timestamp(2).internal, 0, 1, 100,
POLICIES.default.idx)
broker1.put_container('b', Timestamp(3).internal, 0, 0, 0,
POLICIES.default.idx)
broker2.merge_items(json.loads(json.dumps(broker1.get_items_since(
broker2.get_sync(id1), 1000))), id1)
broker1.put_container(snowman, Timestamp(4).internal, 0, 2, 200,
POLICIES.default.idx)
broker2.merge_items(json.loads(json.dumps(broker1.get_items_since(
broker2.get_sync(id1), 1000))), id1)
items = broker2.get_items_since(-1, 1000)
self.assertEqual(['b', snowman],
sorted([rec['name'] for rec in items]))
items_by_name = dict((rec['name'], rec) for rec in items)
self.assertEqual(items_by_name[snowman]['object_count'], 2)
self.assertEqual(items_by_name[snowman]['bytes_used'], 200)
self.assertEqual(items_by_name['b']['object_count'], 0)
self.assertEqual(items_by_name['b']['bytes_used'], 0)
@with_tempdir
def test_load_old_pending_puts(self, tempdir):
# pending puts from pre-storage-policy account brokers won't contain
# the storage policy index
broker_path = os.path.join(tempdir, 'test-load-old.db')
broker = AccountBroker(broker_path, account='real')
broker.initialize(Timestamp(1).internal)
with open(broker.pending_file, 'a+b') as pending:
pending.write(b':')
pending.write(base64.b64encode(pickle.dumps(
# name, put_timestamp, delete_timestamp, object_count,
# bytes_used, deleted
('oldcon', Timestamp(200).internal,
Timestamp(0).internal,
896, 9216695, 0))))
broker._commit_puts()
with broker.get() as conn:
results = list(conn.execute('''
SELECT name, storage_policy_index FROM container
'''))
self.assertEqual(len(results), 1)
self.assertEqual(dict(results[0]),
{'name': 'oldcon', 'storage_policy_index': 0})
@with_tempdir
def test_get_info_stale_read_ok(self, tempdir):
# test getting a stale read from the db
broker_path = os.path.join(tempdir, 'test-load-old.db')
def mock_commit_puts():
raise sqlite3.OperationalError('unable to open database file')
broker = AccountBroker(broker_path, account='real',
stale_reads_ok=True)
broker.initialize(Timestamp(1).internal)
with open(broker.pending_file, 'a+b') as pending:
pending.write(b':')
pending.write(base64.b64encode(pickle.dumps(
# name, put_timestamp, delete_timestamp, object_count,
# bytes_used, deleted
('oldcon', Timestamp(200).internal,
Timestamp(0).internal,
896, 9216695, 0))))
broker._commit_puts = mock_commit_puts
broker.get_info()
@with_tempdir
def test_get_info_no_stale_reads(self, tempdir):
broker_path = os.path.join(tempdir, 'test-load-old.db')
def mock_commit_puts():
raise sqlite3.OperationalError('unable to open database file')
broker = AccountBroker(broker_path, account='real',
stale_reads_ok=False)
broker.initialize(Timestamp(1).internal)
with open(broker.pending_file, 'a+b') as pending:
pending.write(b':')
pending.write(base64.b64encode(pickle.dumps(
# name, put_timestamp, delete_timestamp, object_count,
# bytes_used, deleted
('oldcon', Timestamp(200).internal,
Timestamp(0).internal,
896, 9216695, 0))))
broker._commit_puts = mock_commit_puts
with self.assertRaises(sqlite3.OperationalError) as exc_context:
broker.get_info()
self.assertIn('unable to open database file',
str(exc_context.exception))
@patch_policies([StoragePolicy(0, 'zero', False),
StoragePolicy(1, 'one', True),
StoragePolicy(2, 'two', False),
StoragePolicy(3, 'three', False)])
def test_get_policy_stats(self):
broker = AccountBroker(self.get_db_path(), account='a')
broker.initialize(next(self.ts).internal)
# check empty policy_stats
self.assertTrue(broker.empty())
policy_stats = broker.get_policy_stats()
self.assertEqual(policy_stats, {})
# add some empty containers
for policy in POLICIES:
container_name = 'c-%s' % policy.name
put_timestamp = next(self.ts)
broker.put_container(container_name,
put_timestamp.internal, 0,
0, 0,
policy.idx)
policy_stats = broker.get_policy_stats()
stats = policy_stats[policy.idx]
if 'container_count' in stats:
self.assertEqual(stats['container_count'], 1)
self.assertEqual(stats['object_count'], 0)
self.assertEqual(stats['bytes_used'], 0)
# update the containers object & byte count
for policy in POLICIES:
container_name = 'c-%s' % policy.name
put_timestamp = next(self.ts)
count = policy.idx * 100 # good as any integer
broker.put_container(container_name,
put_timestamp.internal, 0,
count, count,
policy.idx)
policy_stats = broker.get_policy_stats()
stats = policy_stats[policy.idx]
if 'container_count' in stats:
self.assertEqual(stats['container_count'], 1)
self.assertEqual(stats['object_count'], count)
self.assertEqual(stats['bytes_used'], count)
# check all the policy_stats at once
for policy_index, stats in policy_stats.items():
policy = POLICIES[policy_index]
count = policy.idx * 100 # coupled with policy for test
if 'container_count' in stats:
self.assertEqual(stats['container_count'], 1)
self.assertEqual(stats['object_count'], count)
self.assertEqual(stats['bytes_used'], count)
# now delete the containers one by one
for policy in POLICIES:
container_name = 'c-%s' % policy.name
delete_timestamp = next(self.ts)
broker.put_container(container_name,
0, delete_timestamp.internal,
0, 0,
policy.idx)
policy_stats = broker.get_policy_stats()
stats = policy_stats[policy.idx]
if 'container_count' in stats:
self.assertEqual(stats['container_count'], 0)
self.assertEqual(stats['object_count'], 0)
self.assertEqual(stats['bytes_used'], 0)
@patch_policies([StoragePolicy(0, 'zero', False),
StoragePolicy(1, 'one', True)])
def test_policy_stats_tracking(self):
broker = AccountBroker(self.get_db_path(), account='a')
broker.initialize(next(self.ts).internal)
# policy 0
broker.put_container('con1', next(self.ts).internal, 0, 12, 2798641, 0)
broker.put_container('con1', next(self.ts).internal, 0, 13, 8156441, 0)
# policy 1
broker.put_container('con2', next(self.ts).internal, 0, 7, 5751991, 1)
broker.put_container('con2', next(self.ts).internal, 0, 8, 6085379, 1)
stats = broker.get_policy_stats()
self.assertEqual(len(stats), 2)
if 'container_count' in stats[0]:
self.assertEqual(stats[0]['container_count'], 1)
self.assertEqual(stats[0]['object_count'], 13)
self.assertEqual(stats[0]['bytes_used'], 8156441)
if 'container_count' in stats[1]:
self.assertEqual(stats[1]['container_count'], 1)
self.assertEqual(stats[1]['object_count'], 8)
self.assertEqual(stats[1]['bytes_used'], 6085379)
# Break encapsulation here to make sure that there's only 2 rows in
# the stats table. It's possible that there could be 4 rows (one per
# put_container) but that they came out in the right order so that
# get_policy_stats() collapsed them down to the right number. To prove
# that's not so, we have to go peek at the broker's internals.
with broker.get() as conn:
nrows = conn.execute(
"SELECT COUNT(*) FROM policy_stat").fetchall()[0][0]
self.assertEqual(nrows, 2)
@with_tempdir
def test_newid(self, tempdir):
# test DatabaseBroker.newid
db_path = os.path.join(
tempdir, "d1234", 'accounts', 'part', 'suffix', 'hsh')
os.makedirs(db_path)
broker = AccountBroker(os.path.join(db_path, 'my.db'),
account='a')
broker.initialize(Timestamp('1').internal, 0)
id = broker.get_info()['id']
broker.newid('someid')
self.assertNotEqual(id, broker.get_info()['id'])
# ends in the device name (from the path) unless it's an old
# container with just a uuid4 (tested in legecy broker
# tests e.g *BeforeMetaData)
if len(id) > 36:
self.assertTrue(id.endswith('d1234'))
# But the newid'ed version will now have the decide
self.assertTrue(broker.get_info()['id'].endswith('d1234'))
# if we move the broker (happens after an rsync)
new_db_path = os.path.join(
tempdir, "d5678", 'contianers', 'part', 'suffix', 'hsh')
os.makedirs(new_db_path)
shutil.copy(os.path.join(db_path, 'my.db'),
os.path.join(new_db_path, 'my.db'))
new_broker = AccountBroker(os.path.join(new_db_path, 'my.db'),
account='a')
new_broker.newid(id)
# ends in the device name (from the path)
self.assertFalse(new_broker.get_info()['id'].endswith('d1234'))
self.assertTrue(new_broker.get_info()['id'].endswith('d5678'))
def prespi_AccountBroker_initialize(self, conn, put_timestamp, **kwargs):
"""
The AccountBroker initialze() function before we added the
policy stat table. Used by test_policy_table_creation() to
make sure that the AccountBroker will correctly add the table
for cases where the DB existed before the policy support was added.
:param conn: DB connection object
:param put_timestamp: put timestamp
"""
if not self.account:
raise ValueError(
'Attempting to create a new database with no account set')
self.create_container_table(conn)
self.create_account_stat_table(conn, put_timestamp)
def premetadata_create_account_stat_table(self, conn, put_timestamp):
"""
Copied from AccountBroker before the metadata column was
added; used for testing with TestAccountBrokerBeforeMetadata.
Create account_stat table which is specific to the account DB.
:param conn: DB connection object
:param put_timestamp: put timestamp
"""
conn.executescript('''
CREATE TABLE account_stat (
account TEXT,
created_at TEXT,
put_timestamp TEXT DEFAULT '0',
delete_timestamp TEXT DEFAULT '0',
container_count INTEGER,
object_count INTEGER DEFAULT 0,
bytes_used INTEGER DEFAULT 0,
hash TEXT default '00000000000000000000000000000000',
id TEXT,
status TEXT DEFAULT '',
status_changed_at TEXT DEFAULT '0'
);
INSERT INTO account_stat (container_count) VALUES (0);
''')
conn.execute('''
UPDATE account_stat SET account = ?, created_at = ?, id = ?,
put_timestamp = ?
''', (self.account, Timestamp.now().internal, str(uuid4()),
put_timestamp))
class TestCommonAccountBroker(test_db.TestExampleBroker):
broker_class = AccountBroker
server_type = 'account'
def setUp(self):
super(TestCommonAccountBroker, self).setUp()
self.policy = random.choice(list(POLICIES))
def put_item(self, broker, timestamp):
broker.put_container('test', timestamp, 0, 0, 0,
int(self.policy))
def delete_item(self, broker, timestamp):
broker.put_container('test', 0, timestamp, 0, 0,
int(self.policy))
class TestAccountBrokerBeforeMetadata(TestAccountBroker):
"""
Tests for AccountBroker against databases created before
the metadata column was added.
"""
def setUp(self):
super(TestAccountBroker, self).setUp()
# tests seem to assume x-timestamp was set by the proxy before "now"
self.ts = make_timestamp_iter(offset=-1)
self._imported_create_account_stat_table = \
AccountBroker.create_account_stat_table
AccountBroker.create_account_stat_table = \
premetadata_create_account_stat_table
broker = AccountBroker(self.get_db_path(), account='a')
broker.initialize(Timestamp('1').internal)
exc = None
with broker.get() as conn:
try:
conn.execute('SELECT metadata FROM account_stat')
except BaseException as err:
exc = err
self.assertIn('no such column: metadata', str(exc))
def tearDown(self):
AccountBroker.create_account_stat_table = \
self._imported_create_account_stat_table
broker = AccountBroker(self.get_db_path(), account='a')
broker.initialize(Timestamp('1').internal)
with broker.get() as conn:
conn.execute('SELECT metadata FROM account_stat')
super(TestAccountBrokerBeforeMetadata, self).tearDown()
def prespi_create_container_table(self, conn):
"""
Copied from AccountBroker before the sstoage_policy_index column was
added; used for testing with TestAccountBrokerBeforeSPI.
Create container table which is specific to the account DB.
:param conn: DB connection object
"""
conn.executescript("""
CREATE TABLE container (
ROWID INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT,
put_timestamp TEXT,
delete_timestamp TEXT,
object_count INTEGER,
bytes_used INTEGER,
deleted INTEGER DEFAULT 0
);
CREATE INDEX ix_container_deleted_name ON
container (deleted, name);
CREATE TRIGGER container_insert AFTER INSERT ON container
BEGIN
UPDATE account_stat
SET container_count = container_count + (1 - new.deleted),
object_count = object_count + new.object_count,
bytes_used = bytes_used + new.bytes_used,
hash = chexor(hash, new.name,
new.put_timestamp || '-' ||
new.delete_timestamp || '-' ||
new.object_count || '-' || new.bytes_used);
END;
CREATE TRIGGER container_update BEFORE UPDATE ON container
BEGIN
SELECT RAISE(FAIL, 'UPDATE not allowed; DELETE and INSERT');
END;
CREATE TRIGGER container_delete AFTER DELETE ON container
BEGIN
UPDATE account_stat
SET container_count = container_count - (1 - old.deleted),
object_count = object_count - old.object_count,
bytes_used = bytes_used - old.bytes_used,
hash = chexor(hash, old.name,
old.put_timestamp || '-' ||
old.delete_timestamp || '-' ||
old.object_count || '-' || old.bytes_used);
END;
""")
class TestAccountBrokerBeforeSPI(TestAccountBroker):
"""
Tests for AccountBroker against databases created before
the storage_policy_index column was added.
"""
def setUp(self):
super(TestAccountBrokerBeforeSPI, self).setUp()
# tests seem to assume x-timestamp was set by the proxy before "now"
self.ts = make_timestamp_iter(offset=-1)
self._imported_create_container_table = \
AccountBroker.create_container_table
AccountBroker.create_container_table = \
prespi_create_container_table
self._imported_initialize = AccountBroker._initialize
AccountBroker._initialize = prespi_AccountBroker_initialize
broker = AccountBroker(self.get_db_path(), account='a')
broker.initialize(Timestamp('1').internal)
exc = None
with broker.get() as conn:
try:
conn.execute('SELECT storage_policy_index FROM container')
except BaseException as err:
exc = err
self.assertIn('no such column: storage_policy_index', str(exc))
with broker.get() as conn:
try:
conn.execute('SELECT * FROM policy_stat')
except sqlite3.OperationalError as err:
self.assertIn('no such table: policy_stat', str(err))
else:
self.fail('database created with policy_stat table')
def tearDown(self):
AccountBroker.create_container_table = \
self._imported_create_container_table
AccountBroker._initialize = self._imported_initialize
broker = AccountBroker(self.get_db_path(), account='a')
broker.initialize(Timestamp('1').internal)
with broker.get() as conn:
conn.execute('SELECT storage_policy_index FROM container')
super(TestAccountBrokerBeforeSPI, self).tearDown()
@with_tempdir
def test_policy_table_migration(self, tempdir):
db_path = os.path.join(tempdir, 'account.db')
# first init an acct DB without the policy_stat table present
broker = AccountBroker(db_path, account='a')
broker.initialize(Timestamp('1').internal)
def confirm_no_table():
with broker.get() as conn:
try:
conn.execute('''
SELECT * FROM policy_stat
''').fetchone()[0]
except sqlite3.OperationalError as err:
# confirm that the table really isn't there
self.assertIn('no such table: policy_stat', str(err))
else:
self.fail('broker did not raise sqlite3.OperationalError '
'trying to select from policy_stat table!')
confirm_no_table()
# make sure we can HEAD this thing w/o the table
stats = broker.get_policy_stats()
self.assertEqual(len(stats), 0)
confirm_no_table()
# now do a PUT to create the table
broker.put_container('o', Timestamp.now().internal, 0, 0, 0,
POLICIES.default.idx)
broker._commit_puts_stale_ok()
# now confirm that the table was created
with broker.get() as conn:
conn.execute('SELECT * FROM policy_stat')
stats = broker.get_policy_stats()
self.assertEqual(len(stats), 1)
@with_tempdir
def test_policy_table_migration_in_get_policy_stats(self, tempdir):
db_path = os.path.join(tempdir, 'account.db')
# first init an acct DB without the policy_stat table present
broker = AccountBroker(db_path, account='a')
broker.initialize(Timestamp('1').internal)
# And manually add some container records for the default policy
with broker.get() as conn:
conn.execute('''
INSERT INTO container (
name, put_timestamp, delete_timestamp,
object_count, bytes_used
) VALUES (
'c', '%s', '0', 0, 0
)''' % Timestamp.now().internal).fetchone()
conn.commit()
def confirm_no_table():
with broker.get() as conn:
try:
conn.execute('''
SELECT * FROM policy_stat
''').fetchone()[0]
except sqlite3.OperationalError as err:
# confirm that the table really isn't there
self.assertIn('no such table: policy_stat', str(err))
else:
self.fail('broker did not raise sqlite3.OperationalError '
'trying to select from policy_stat table!')
confirm_no_table()
# make sure we can HEAD this thing w/o the table
stats = broker.get_policy_stats()
self.assertEqual(len(stats), 0)
confirm_no_table()
# but if we pass in do_migrations (like in the auditor), it comes in
stats = broker.get_policy_stats(do_migrations=True)
self.assertEqual(len(stats), 1)
# double check that it really exists
with broker.get() as conn:
conn.execute('SELECT * FROM policy_stat')
@patch_policies
@with_tempdir
def test_container_table_migration(self, tempdir):
db_path = os.path.join(tempdir, 'account.db')
# first init an acct DB without the policy_stat table present
broker = AccountBroker(db_path, account='a')
broker.initialize(Timestamp('1').internal)
with broker.get() as conn:
try:
conn.execute('''
SELECT storage_policy_index FROM container
''').fetchone()[0]
except sqlite3.OperationalError as err:
# confirm that the table doesn't have this column
self.assertIn('no such column: storage_policy_index',
str(err))
else:
self.fail('broker did not raise sqlite3.OperationalError '
'trying to select from storage_policy_index '
'from container table!')
# manually insert an existing row to avoid migration
timestamp = Timestamp.now().internal
with broker.get() as conn:
conn.execute('''
INSERT INTO container (name, put_timestamp,
delete_timestamp, object_count, bytes_used,
deleted)
VALUES (?, ?, ?, ?, ?, ?)
''', ('test_name', timestamp, 0, 1, 2, 0))
conn.commit()
# make sure we can iter containers without the migration
for c in broker.list_containers_iter(1, None, None, None, None):
self.assertEqual(c, ('test_name', 1, 2, timestamp, 0))
# stats table is mysteriously empty...
stats = broker.get_policy_stats()
self.assertEqual(len(stats), 0)
# now do a PUT with a different value for storage_policy_index
# which will update the DB schema as well as update policy_stats
# for legacy containers in the DB (those without an SPI)
other_policy = [p for p in POLICIES if p.idx != 0][0]
broker.put_container('test_second', Timestamp.now().internal,
0, 3, 4, other_policy.idx)
broker._commit_puts_stale_ok()
with broker.get() as conn:
rows = conn.execute('''
SELECT name, storage_policy_index FROM container
''').fetchall()
for row in rows:
if row[0] == 'test_name':
self.assertEqual(row[1], 0)
else:
self.assertEqual(row[1], other_policy.idx)
# we should have stats for both containers
stats = broker.get_policy_stats()
self.assertEqual(len(stats), 2)
if 'container_count' in stats[0]:
self.assertEqual(stats[0]['container_count'], 1)
self.assertEqual(stats[0]['object_count'], 1)
self.assertEqual(stats[0]['bytes_used'], 2)
if 'container_count' in stats[1]:
self.assertEqual(stats[1]['container_count'], 1)
self.assertEqual(stats[1]['object_count'], 3)
self.assertEqual(stats[1]['bytes_used'], 4)
# now lets delete a container and make sure policy_stats is OK
with broker.get() as conn:
conn.execute('''
DELETE FROM container WHERE name = ?
''', ('test_name',))
conn.commit()
stats = broker.get_policy_stats()
self.assertEqual(len(stats), 2)
if 'container_count' in stats[0]:
self.assertEqual(stats[0]['container_count'], 0)
self.assertEqual(stats[0]['object_count'], 0)
self.assertEqual(stats[0]['bytes_used'], 0)
if 'container_count' in stats[1]:
self.assertEqual(stats[1]['container_count'], 1)
self.assertEqual(stats[1]['object_count'], 3)
self.assertEqual(stats[1]['bytes_used'], 4)
@with_tempdir
def test_half_upgraded_database(self, tempdir):
db_path = os.path.join(tempdir, 'account.db')
broker = AccountBroker(db_path, account='a')
broker.initialize(next(self.ts).internal)
self.assertTrue(broker.empty())
# add a container (to pending file)
broker.put_container('c', next(self.ts).internal, 0, 0, 0,
POLICIES.default.idx)
real_get = broker.get
called = []
class ExpectedError(Exception):
'''Expected error to be raised during the test'''
@contextmanager
def mock_get():
with real_get() as conn:
class MockConn(object):
def __init__(self, real_conn):
self.real_conn = real_conn
@property
def cursor(self):
return self.real_conn.cursor
@property
def execute(self):
return self.real_conn.execute
def executescript(self, script):
if called:
raise ExpectedError('kaboom!')
called.append(script)
yield MockConn(conn)
broker.get = mock_get
try:
broker._commit_puts()
except ExpectedError:
pass
else:
self.fail('mock exception was not raised')
self.assertEqual(len(called), 1)
self.assertIn('CREATE TABLE policy_stat', called[0])
# nothing was committed
broker = AccountBroker(db_path, account='a')
with broker.get() as conn:
try:
conn.execute('SELECT * FROM policy_stat')
except sqlite3.OperationalError as err:
self.assertIn('no such table: policy_stat', str(err))
else:
self.fail('half upgraded database!')
container_count = conn.execute(
'SELECT count(*) FROM container').fetchone()[0]
self.assertEqual(container_count, 0)
# try again to commit puts
self.assertFalse(broker.empty())
# full migration successful
with broker.get() as conn:
conn.execute('SELECT * FROM policy_stat')
conn.execute('SELECT storage_policy_index FROM container')
@with_tempdir
def test_pre_storage_policy_replication(self, tempdir):
# make and two account database "replicas"
old_broker = AccountBroker(os.path.join(tempdir, 'old_account.db'),
account='a')
old_broker.initialize(next(self.ts).internal)
new_broker = AccountBroker(os.path.join(tempdir, 'new_account.db'),
account='a')
new_broker.initialize(next(self.ts).internal)
timestamp = next(self.ts).internal
# manually insert an existing row to avoid migration for old database
with old_broker.get() as conn:
conn.execute('''
INSERT INTO container (name, put_timestamp,
delete_timestamp, object_count, bytes_used,
deleted)
VALUES (?, ?, ?, ?, ?, ?)
''', ('test_name', timestamp, 0, 1, 2, 0))
conn.commit()
# get replication info and rows form old database
info = old_broker.get_info()
rows = old_broker.get_items_since(0, 10)
# "send" replication rows to new database
new_broker.merge_items(rows, info['id'])
# make sure "test_name" container in new database
self.assertEqual(new_broker.get_info()['container_count'], 1)
for c in new_broker.list_containers_iter(1, None, None, None, None):
self.assertEqual(c, ('test_name', 1, 2, timestamp, 0))
# full migration successful
with new_broker.get() as conn:
conn.execute('SELECT * FROM policy_stat')
conn.execute('SELECT storage_policy_index FROM container')
def pre_track_containers_create_policy_stat(self, conn):
"""
Copied from AccountBroker before the container_count column was
added.
Create policy_stat table which is specific to the account DB.
Not a part of Pluggable Back-ends, internal to the baseline code.
:param conn: DB connection object
"""
conn.executescript("""
CREATE TABLE policy_stat (
storage_policy_index INTEGER PRIMARY KEY,
object_count INTEGER DEFAULT 0,
bytes_used INTEGER DEFAULT 0
);
INSERT OR IGNORE INTO policy_stat (
storage_policy_index, object_count, bytes_used
)
SELECT 0, object_count, bytes_used
FROM account_stat
WHERE container_count > 0;
""")
def pre_track_containers_create_container_table(self, conn):
"""
Copied from AccountBroker before the container_count column was
added (using old stat trigger script)
Create container table which is specific to the account DB.
:param conn: DB connection object
"""
# revert to old trigger script to support one of the tests
OLD_POLICY_STAT_TRIGGER_SCRIPT = """
CREATE TRIGGER container_insert_ps AFTER INSERT ON container
BEGIN
INSERT OR IGNORE INTO policy_stat
(storage_policy_index, object_count, bytes_used)
VALUES (new.storage_policy_index, 0, 0);
UPDATE policy_stat
SET object_count = object_count + new.object_count,
bytes_used = bytes_used + new.bytes_used
WHERE storage_policy_index = new.storage_policy_index;
END;
CREATE TRIGGER container_delete_ps AFTER DELETE ON container
BEGIN
UPDATE policy_stat
SET object_count = object_count - old.object_count,
bytes_used = bytes_used - old.bytes_used
WHERE storage_policy_index = old.storage_policy_index;
END;
"""
conn.executescript("""
CREATE TABLE container (
ROWID INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT,
put_timestamp TEXT,
delete_timestamp TEXT,
object_count INTEGER,
bytes_used INTEGER,
deleted INTEGER DEFAULT 0,
storage_policy_index INTEGER DEFAULT 0
);
CREATE INDEX ix_container_deleted_name ON
container (deleted, name);
CREATE TRIGGER container_insert AFTER INSERT ON container
BEGIN
UPDATE account_stat
SET container_count = container_count + (1 - new.deleted),
object_count = object_count + new.object_count,
bytes_used = bytes_used + new.bytes_used,
hash = chexor(hash, new.name,
new.put_timestamp || '-' ||
new.delete_timestamp || '-' ||
new.object_count || '-' || new.bytes_used);
END;
CREATE TRIGGER container_update BEFORE UPDATE ON container
BEGIN
SELECT RAISE(FAIL, 'UPDATE not allowed; DELETE and INSERT');
END;
CREATE TRIGGER container_delete AFTER DELETE ON container
BEGIN
UPDATE account_stat
SET container_count = container_count - (1 - old.deleted),
object_count = object_count - old.object_count,
bytes_used = bytes_used - old.bytes_used,
hash = chexor(hash, old.name,
old.put_timestamp || '-' ||
old.delete_timestamp || '-' ||
old.object_count || '-' || old.bytes_used);
END;
""" + OLD_POLICY_STAT_TRIGGER_SCRIPT)
class AccountBrokerPreTrackContainerCountSetup(test_db.TestDbBase):
def assertUnmigrated(self, broker):
with broker.get() as conn:
try:
conn.execute('''
SELECT container_count FROM policy_stat
''').fetchone()[0]
except sqlite3.OperationalError as err:
# confirm that the column really isn't there
self.assertIn('no such column: container_count', str(err))
else:
self.fail('broker did not raise sqlite3.OperationalError '
'trying to select container_count from policy_stat!')
def setUp(self):
super(AccountBrokerPreTrackContainerCountSetup, self).setUp()
# use old version of policy_stat
self._imported_create_policy_stat_table = \
AccountBroker.create_policy_stat_table
AccountBroker.create_policy_stat_table = \
pre_track_containers_create_policy_stat
# use old container table so we use old trigger for
# updating policy_stat
self._imported_create_container_table = \
AccountBroker.create_container_table
AccountBroker.create_container_table = \
pre_track_containers_create_container_table
broker = AccountBroker(self.get_db_path(), account='a')
broker.initialize(Timestamp('1').internal)
self.assertUnmigrated(broker)
# tests seem to assume x-timestamp was set by the proxy before "now"
self.ts = make_timestamp_iter(offset=-1)
self.db_path = os.path.join(self.testdir, 'sda', 'accounts',
'0', '0', '0', 'test.db')
self.broker = AccountBroker(self.get_db_path(), account='a')
self.broker.initialize(next(self.ts).internal)
# Common sanity-check that our starting, pre-migration state correctly
# does not have the container_count column.
self.assertUnmigrated(self.broker)
def tearDown(self):
self.restore_account_broker()
broker = AccountBroker(self.get_db_path(), account='a')
broker.initialize(Timestamp('1').internal)
with broker.get() as conn:
conn.execute('SELECT container_count FROM policy_stat')
super(AccountBrokerPreTrackContainerCountSetup, self).tearDown()
def restore_account_broker(self):
AccountBroker.create_policy_stat_table = \
self._imported_create_policy_stat_table
AccountBroker.create_container_table = \
self._imported_create_container_table
@patch_policies([StoragePolicy(0, 'zero', False),
StoragePolicy(1, 'one', True),
StoragePolicy(2, 'two', False),
StoragePolicy(3, 'three', False)])
class TestAccountBrokerBeforePerPolicyContainerTrack(
AccountBrokerPreTrackContainerCountSetup, TestAccountBroker):
"""
Tests for AccountBroker against databases created before
the container_count column was added to the policy_stat table.
"""
def test_policy_table_cont_count_do_migrations(self):
# add a few containers
num_containers = 8
policies = itertools.cycle(POLICIES)
per_policy_container_counts = defaultdict(int)
# add a few container entries
for i in range(num_containers):
name = 'test-container-%02d' % i
policy = next(policies)
self.broker.put_container(name, next(self.ts).internal,
0, 0, 0, int(policy))
per_policy_container_counts[int(policy)] += 1
total_container_count = self.broker.get_info()['container_count']
self.assertEqual(total_container_count, num_containers)
# still un-migrated
self.assertUnmigrated(self.broker)
policy_stats = self.broker.get_policy_stats()
self.assertEqual(len(policy_stats), len(per_policy_container_counts))
for stats in policy_stats.values():
self.assertEqual(stats['object_count'], 0)
self.assertEqual(stats['bytes_used'], 0)
# un-migrated dbs should not return container_count
self.assertNotIn('container_count', stats)
# now force the migration
policy_stats = self.broker.get_policy_stats(do_migrations=True)
self.assertEqual(len(policy_stats), len(per_policy_container_counts))
for policy_index, stats in policy_stats.items():
self.assertEqual(stats['object_count'], 0)
self.assertEqual(stats['bytes_used'], 0)
self.assertEqual(stats['container_count'],
per_policy_container_counts[policy_index])
def test_policy_table_cont_count_update_get_stats(self):
# add a few container entries
for policy in POLICIES:
for i in range(0, policy.idx + 1):
container_name = 'c%s_0' % policy.idx
self.broker.put_container('c%s_%s' % (policy.idx, i),
0, 0, 0, 0, policy.idx)
# _commit_puts_stale_ok() called by get_policy_stats()
# calling get_policy_stats() with do_migrations will alter the table
# and populate it based on what's in the container table now
stats = self.broker.get_policy_stats(do_migrations=True)
# now confirm that the column was created
with self.broker.get() as conn:
conn.execute('SELECT container_count FROM policy_stat')
# confirm stats reporting back correctly
self.assertEqual(len(stats), 4)
for policy in POLICIES:
self.assertEqual(stats[policy.idx]['container_count'],
policy.idx + 1)
# now delete one from each policy and check the stats
with self.broker.get() as conn:
for policy in POLICIES:
container_name = 'c%s_0' % policy.idx
conn.execute('''
DELETE FROM container
WHERE name = ?
''', (container_name,))
conn.commit()
stats = self.broker.get_policy_stats()
self.assertEqual(len(stats), 4)
for policy in POLICIES:
self.assertEqual(stats[policy.idx]['container_count'],
policy.idx)
# now put them back and make sure things are still cool
for policy in POLICIES:
container_name = 'c%s_0' % policy.idx
self.broker.put_container(container_name, 0, 0, 0, 0, policy.idx)
# _commit_puts_stale_ok() called by get_policy_stats()
# confirm stats reporting back correctly
stats = self.broker.get_policy_stats()
self.assertEqual(len(stats), 4)
for policy in POLICIES:
self.assertEqual(stats[policy.idx]['container_count'],
policy.idx + 1)
def test_per_policy_cont_count_migration_with_deleted(self):
num_containers = 15
policies = itertools.cycle(POLICIES)
container_policy_map = {}
# add a few container entries
for i in range(num_containers):
name = 'test-container-%02d' % i
policy = next(policies)
self.broker.put_container(name, next(self.ts).internal,
0, 0, 0, int(policy))
# keep track of stub container policies
container_policy_map[name] = policy
# delete about half of the containers
for i in range(0, num_containers, 2):
name = 'test-container-%02d' % i
policy = container_policy_map[name]
self.broker.put_container(name, 0, next(self.ts).internal,
0, 0, int(policy))
total_container_count = self.broker.get_info()['container_count']
self.assertEqual(total_container_count, num_containers // 2)
# trigger migration
policy_info = self.broker.get_policy_stats(do_migrations=True)
self.assertEqual(len(policy_info), min(num_containers, len(POLICIES)))
policy_container_count = sum(p['container_count'] for p in
policy_info.values())
self.assertEqual(total_container_count, policy_container_count)
def test_per_policy_cont_count_migration_with_single_policy(self):
num_containers = 100
with patch_policies(legacy_only=True):
policy = POLICIES[0]
# add a few container entries
for i in range(num_containers):
name = 'test-container-%02d' % i
self.broker.put_container(name, next(self.ts).internal,
0, 0, 0, int(policy))
# delete about half of the containers
for i in range(0, num_containers, 2):
name = 'test-container-%02d' % i
self.broker.put_container(name, 0, next(self.ts).internal,
0, 0, int(policy))
total_container_count = self.broker.get_info()['container_count']
# trigger migration
policy_info = self.broker.get_policy_stats(do_migrations=True)
self.assertEqual(total_container_count, num_containers / 2)
self.assertEqual(len(policy_info), 1)
policy_container_count = sum(p['container_count'] for p in
policy_info.values())
self.assertEqual(total_container_count, policy_container_count)
def test_per_policy_cont_count_migration_impossible(self):
with patch_policies(legacy_only=True):
# add a container for the legacy policy
policy = POLICIES[0]
self.broker.put_container('test-legacy-container',
next(self.ts).internal, 0, 0, 0,
int(policy))
# now create an impossible situation by adding a container for a
# policy index that doesn't exist
non_existent_policy_index = int(policy) + 1
self.broker.put_container('test-non-existent-policy',
next(self.ts).internal, 0, 0, 0,
non_existent_policy_index)
total_container_count = self.broker.get_info()['container_count']
# trigger migration
policy_info = self.broker.get_policy_stats(do_migrations=True)
self.assertEqual(total_container_count, 2)
self.assertEqual(len(policy_info), 2)
for policy_stat in policy_info.values():
self.assertEqual(policy_stat['container_count'], 1)
def test_migrate_add_storage_policy_index_fail(self):
broker = AccountBroker(self.db_path, account='a')
broker.initialize(Timestamp('1').internal)
with mock.patch.object(
broker, 'create_policy_stat_table',
side_effect=sqlite3.OperationalError('foobar')):
with broker.get() as conn:
self.assertRaisesRegex(
sqlite3.OperationalError, '.*foobar.*',
broker._migrate_add_storage_policy_index,
conn=conn)
| swift-master | test/unit/account/test_backend.py |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import itertools
import time
import unittest
import json
import mock
from swift.account import utils, backend
from swift.common.storage_policy import POLICIES, StoragePolicy
from swift.common.swob import Request
from swift.common.utils import Timestamp
from swift.common.header_key_dict import HeaderKeyDict
from swift.common.request_helpers import get_reserved_name
from test.unit import patch_policies, make_timestamp_iter
from test.unit.common.test_db import TestDbBase
class TestFakeAccountBroker(unittest.TestCase):
def test_fake_broker_get_info(self):
broker = utils.FakeAccountBroker()
now = time.time()
with mock.patch('time.time', new=lambda: now):
info = broker.get_info()
timestamp = Timestamp(now)
expected = {
'container_count': 0,
'object_count': 0,
'bytes_used': 0,
'created_at': timestamp.internal,
'put_timestamp': timestamp.internal,
}
self.assertEqual(info, expected)
def test_fake_broker_list_containers_iter(self):
broker = utils.FakeAccountBroker()
self.assertEqual(broker.list_containers_iter(), [])
def test_fake_broker_metadata(self):
broker = utils.FakeAccountBroker()
self.assertEqual(broker.metadata, {})
def test_fake_broker_get_policy_stats(self):
broker = utils.FakeAccountBroker()
self.assertEqual(broker.get_policy_stats(), {})
class TestAccountUtils(TestDbBase):
server_type = 'account'
def setUp(self):
super(TestAccountUtils, self).setUp()
self.ts = make_timestamp_iter()
def test_get_response_headers_fake_broker(self):
broker = utils.FakeAccountBroker()
now = time.time()
expected = {
'X-Account-Container-Count': 0,
'X-Account-Object-Count': 0,
'X-Account-Bytes-Used': 0,
'X-Timestamp': Timestamp(now).normal,
'X-PUT-Timestamp': Timestamp(now).normal,
}
with mock.patch('time.time', new=lambda: now):
resp_headers = utils.get_response_headers(broker)
self.assertEqual(resp_headers, expected)
def test_get_response_headers_empty_memory_broker(self):
broker = backend.AccountBroker(self.db_path, account='a')
now = time.time()
with mock.patch('time.time', new=lambda: now):
broker.initialize(Timestamp(now).internal)
expected = {
'X-Account-Container-Count': 0,
'X-Account-Object-Count': 0,
'X-Account-Bytes-Used': 0,
'X-Timestamp': Timestamp(now).normal,
'X-PUT-Timestamp': Timestamp(now).normal,
}
resp_headers = utils.get_response_headers(broker)
self.assertEqual(resp_headers, expected)
@patch_policies
def test_get_response_headers_with_data(self):
broker = backend.AccountBroker(self.db_path, account='a')
now = time.time()
with mock.patch('time.time', new=lambda: now):
broker.initialize(Timestamp(now).internal)
# add some container data
ts = (Timestamp(t).internal for t in itertools.count(int(now)))
total_containers = 0
total_objects = 0
total_bytes = 0
for policy in POLICIES:
delete_timestamp = next(ts)
put_timestamp = next(ts)
object_count = int(policy)
bytes_used = int(policy) * 10
broker.put_container('c-%s' % policy.name, put_timestamp,
delete_timestamp, object_count, bytes_used,
int(policy))
total_containers += 1
total_objects += object_count
total_bytes += bytes_used
expected = HeaderKeyDict({
'X-Account-Container-Count': total_containers,
'X-Account-Object-Count': total_objects,
'X-Account-Bytes-Used': total_bytes,
'X-Timestamp': Timestamp(now).normal,
'X-PUT-Timestamp': Timestamp(now).normal,
})
for policy in POLICIES:
prefix = 'X-Account-Storage-Policy-%s-' % policy.name
expected[prefix + 'Container-Count'] = 1
expected[prefix + 'Object-Count'] = int(policy)
expected[prefix + 'Bytes-Used'] = int(policy) * 10
resp_headers = utils.get_response_headers(broker)
per_policy_container_headers = [
h for h in resp_headers if
h.lower().startswith('x-account-storage-policy-') and
h.lower().endswith('-container-count')]
self.assertTrue(per_policy_container_headers)
for key, value in resp_headers.items():
expected_value = expected.pop(key)
self.assertEqual(expected_value, str(value),
'value for %r was %r not %r' % (
key, value, expected_value))
self.assertFalse(expected)
@patch_policies
def test_get_response_headers_with_legacy_data(self):
broker = backend.AccountBroker(self.db_path, account='a')
now = time.time()
with mock.patch('time.time', new=lambda: now):
broker.initialize(Timestamp(now).internal)
# add some container data
ts = (Timestamp(t).internal for t in itertools.count(int(now)))
total_containers = 0
total_objects = 0
total_bytes = 0
for policy in POLICIES:
delete_timestamp = next(ts)
put_timestamp = next(ts)
object_count = int(policy)
bytes_used = int(policy) * 10
broker.put_container('c-%s' % policy.name, put_timestamp,
delete_timestamp, object_count, bytes_used,
int(policy))
total_containers += 1
total_objects += object_count
total_bytes += bytes_used
expected = HeaderKeyDict({
'X-Account-Container-Count': total_containers,
'X-Account-Object-Count': total_objects,
'X-Account-Bytes-Used': total_bytes,
'X-Timestamp': Timestamp(now).normal,
'X-PUT-Timestamp': Timestamp(now).normal,
})
for policy in POLICIES:
prefix = 'X-Account-Storage-Policy-%s-' % policy.name
expected[prefix + 'Object-Count'] = int(policy)
expected[prefix + 'Bytes-Used'] = int(policy) * 10
orig_policy_stats = broker.get_policy_stats
def stub_policy_stats(*args, **kwargs):
policy_stats = orig_policy_stats(*args, **kwargs)
for stats in policy_stats.values():
# legacy db's won't return container_count
del stats['container_count']
return policy_stats
broker.get_policy_stats = stub_policy_stats
resp_headers = utils.get_response_headers(broker)
per_policy_container_headers = [
h for h in resp_headers if
h.lower().startswith('x-account-storage-policy-') and
h.lower().endswith('-container-count')]
self.assertFalse(per_policy_container_headers)
for key, value in resp_headers.items():
expected_value = expected.pop(key)
self.assertEqual(expected_value, str(value),
'value for %r was %r not %r' % (
key, value, expected_value))
self.assertFalse(expected)
def test_account_listing_response(self):
req = Request.blank('')
now = time.time()
with mock.patch('time.time', new=lambda: now):
resp = utils.account_listing_response('a', req, 'text/plain')
self.assertEqual(resp.status_int, 204)
expected = HeaderKeyDict({
'Content-Type': 'text/plain; charset=utf-8',
'X-Account-Container-Count': 0,
'X-Account-Object-Count': 0,
'X-Account-Bytes-Used': 0,
'X-Timestamp': Timestamp(now).normal,
'X-PUT-Timestamp': Timestamp(now).normal,
})
self.assertEqual(expected, resp.headers)
self.assertEqual(b'', resp.body)
@patch_policies([StoragePolicy(0, 'zero', is_default=True)])
def test_account_listing_reserved_names(self):
broker = backend.AccountBroker(self.db_path, account='a')
put_timestamp = next(self.ts)
now = time.time()
with mock.patch('time.time', new=lambda: now):
broker.initialize(put_timestamp.internal)
container_timestamp = next(self.ts)
broker.put_container(get_reserved_name('foo'),
container_timestamp.internal, 0, 10, 100, 0)
req = Request.blank('')
resp = utils.account_listing_response(
'a', req, 'application/json', broker)
self.assertEqual(resp.status_int, 200)
expected = HeaderKeyDict({
'Content-Type': 'application/json; charset=utf-8',
'Content-Length': 2,
'X-Account-Container-Count': 1,
'X-Account-Object-Count': 10,
'X-Account-Bytes-Used': 100,
'X-Timestamp': Timestamp(now).normal,
'X-PUT-Timestamp': put_timestamp.normal,
'X-Account-Storage-Policy-Zero-Container-Count': 1,
'X-Account-Storage-Policy-Zero-Object-Count': 10,
'X-Account-Storage-Policy-Zero-Bytes-Used': 100,
})
self.assertEqual(expected, resp.headers)
self.assertEqual(b'[]', resp.body)
req = Request.blank('', headers={
'X-Backend-Allow-Reserved-Names': 'true'})
resp = utils.account_listing_response(
'a', req, 'application/json', broker)
self.assertEqual(resp.status_int, 200)
expected = HeaderKeyDict({
'Content-Type': 'application/json; charset=utf-8',
'Content-Length': 97,
'X-Account-Container-Count': 1,
'X-Account-Object-Count': 10,
'X-Account-Bytes-Used': 100,
'X-Timestamp': Timestamp(now).normal,
'X-PUT-Timestamp': put_timestamp.normal,
'X-Account-Storage-Policy-Zero-Container-Count': 1,
'X-Account-Storage-Policy-Zero-Object-Count': 10,
'X-Account-Storage-Policy-Zero-Bytes-Used': 100,
})
self.assertEqual(expected, resp.headers)
expected = [{
"last_modified": container_timestamp.isoformat,
"count": 10,
"bytes": 100,
"name": get_reserved_name('foo'),
}]
self.assertEqual(sorted(json.dumps(expected).encode('ascii')),
sorted(resp.body))
| swift-master | test/unit/account/test_utils.py |
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import time
import random
import shutil
import tempfile
import unittest
from mock import patch, call, DEFAULT
import eventlet
from swift.account import reaper
from swift.account.backend import DATADIR
from swift.common.exceptions import ClientException
from swift.common.utils import normalize_timestamp, Timestamp
from test import unit
from test.debug_logger import debug_logger
from swift.common.storage_policy import StoragePolicy, POLICIES
class FakeBroker(object):
def __init__(self):
self.info = {}
def get_info(self):
return self.info
class FakeAccountBroker(object):
def __init__(self, containers, logger):
self.containers = containers
self.containers_yielded = []
def get_info(self):
info = {'account': 'a',
'delete_timestamp': time.time() - 10}
return info
def list_containers_iter(self, limit, marker, *args, **kwargs):
if not kwargs.pop('allow_reserved'):
raise RuntimeError('Expected allow_reserved to be True!')
if kwargs:
raise RuntimeError('Got unexpected keyword arguments: %r' % (
kwargs, ))
for cont in self.containers:
if cont > marker:
yield cont, None, None, None, None
limit -= 1
if limit <= 0:
break
def is_status_deleted(self):
return True
def empty(self):
return False
class FakeRing(object):
def __init__(self):
self.nodes = [{'id': '1',
'ip': '10.10.10.1',
'port': 6202,
'device': 'sda1'},
{'id': '2',
'ip': '10.10.10.2',
'port': 6202,
'device': 'sda1'},
{'id': '3',
'ip': '10.10.10.3',
'port': 6202,
'device': None},
{'id': '4',
'ip': '10.10.10.1',
'port': 6202,
'device': 'sda2'},
{'id': '5',
'ip': '10.10.10.1',
'port': 6202,
'device': 'sda3'},
]
def get_nodes(self, *args, **kwargs):
return ('partition', self.nodes)
def get_part_nodes(self, *args, **kwargs):
return self.nodes
acc_nodes = [{'device': 'sda1',
'ip': '',
'port': ''},
{'device': 'sda1',
'ip': '',
'port': ''},
{'device': 'sda1',
'ip': '',
'port': ''},
{'device': 'sda1',
'ip': '',
'port': ''},
{'device': 'sda1',
'ip': '',
'port': ''}]
cont_nodes = [{'device': 'sda1',
'ip': '',
'port': ''},
{'device': 'sda1',
'ip': '',
'port': ''},
{'device': 'sda1',
'ip': '',
'port': ''},
{'device': 'sda1',
'ip': '',
'port': ''},
{'device': 'sda1',
'ip': '',
'port': ''}]
@unit.patch_policies([StoragePolicy(0, 'zero', False,
object_ring=unit.FakeRing()),
StoragePolicy(1, 'one', True,
object_ring=unit.FakeRing(replicas=4))])
class TestReaper(unittest.TestCase):
def setUp(self):
self.to_delete = []
self.myexp = ClientException("", http_host=None,
http_port=None,
http_device=None,
http_status=404,
http_reason=None
)
def tearDown(self):
for todel in self.to_delete:
shutil.rmtree(todel)
def fake_direct_delete_object(self, *args, **kwargs):
if self.amount_fail < self.max_fail:
self.amount_fail += 1
raise self.myexp
if self.reap_obj_timeout:
raise eventlet.Timeout()
def fake_direct_delete_container(self, *args, **kwargs):
if self.amount_delete_fail < self.max_delete_fail:
self.amount_delete_fail += 1
raise self.myexp
def fake_direct_get_container(self, *args, **kwargs):
if self.get_fail:
raise self.myexp
if self.timeout:
raise eventlet.Timeout()
objects = [{'name': u'o1'},
{'name': u'o2'},
{'name': u'o3'},
{'name': u'o4'}]
return None, [o for o in objects if o['name'] > kwargs['marker']]
def fake_container_ring(self):
return FakeRing()
def fake_reap_object(self, *args, **kwargs):
if self.reap_obj_fail:
raise Exception
def prepare_data_dir(self, ts=False, device='sda1'):
devices_path = tempfile.mkdtemp()
# will be deleted by teardown
self.to_delete.append(devices_path)
path = os.path.join(devices_path, device, DATADIR)
os.makedirs(path)
path = os.path.join(path, '100',
'a86', 'a8c682d2472e1720f2d81ff8993aba6')
os.makedirs(path)
suffix = 'db'
if ts:
suffix = 'ts'
with open(os.path.join(path, 'a8c682203aba6.%s' % suffix), 'w') as fd:
fd.write('')
return devices_path
def init_reaper(self, conf=None, myips=None, fakelogger=False):
if conf is None:
conf = {}
if myips is None:
myips = ['10.10.10.1']
r = reaper.AccountReaper(conf)
r.myips = myips
if fakelogger:
r.logger = debug_logger('test-reaper')
return r
def fake_reap_account(self, *args, **kwargs):
self.called_amount += 1
def fake_account_ring(self):
return FakeRing()
def test_creation(self):
# later config should be extended to assert more config options
r = reaper.AccountReaper({'node_timeout': '3.5'})
self.assertEqual(r.node_timeout, 3.5)
def test_delay_reaping_conf_default(self):
r = reaper.AccountReaper({})
self.assertEqual(r.delay_reaping, 0)
r = reaper.AccountReaper({'delay_reaping': ''})
self.assertEqual(r.delay_reaping, 0)
def test_delay_reaping_conf_set(self):
r = reaper.AccountReaper({'delay_reaping': '123'})
self.assertEqual(r.delay_reaping, 123)
def test_delay_reaping_conf_bad_value(self):
self.assertRaises(ValueError, reaper.AccountReaper,
{'delay_reaping': 'abc'})
def test_reap_warn_after_conf_set(self):
conf = {'delay_reaping': '2', 'reap_warn_after': '3'}
r = reaper.AccountReaper(conf)
self.assertEqual(r.reap_not_done_after, 5)
def test_reap_warn_after_conf_bad_value(self):
self.assertRaises(ValueError, reaper.AccountReaper,
{'reap_warn_after': 'abc'})
def test_reap_delay(self):
time_value = [100]
def _time():
return time_value[0]
time_orig = reaper.time
try:
reaper.time = _time
r = reaper.AccountReaper({'delay_reaping': '10'})
b = FakeBroker()
b.info['delete_timestamp'] = normalize_timestamp(110)
self.assertFalse(r.reap_account(b, 0, None))
b.info['delete_timestamp'] = normalize_timestamp(100)
self.assertFalse(r.reap_account(b, 0, None))
b.info['delete_timestamp'] = normalize_timestamp(90)
self.assertFalse(r.reap_account(b, 0, None))
# KeyError raised immediately as reap_account tries to get the
# account's name to do the reaping.
b.info['delete_timestamp'] = normalize_timestamp(89)
self.assertRaises(KeyError, r.reap_account, b, 0, None)
b.info['delete_timestamp'] = normalize_timestamp(1)
self.assertRaises(KeyError, r.reap_account, b, 0, None)
finally:
reaper.time = time_orig
def test_reset_stats(self):
conf = {}
r = reaper.AccountReaper(conf)
self.assertDictEqual(r.stats_return_codes, {})
self.assertEqual(r.stats_containers_deleted, 0)
self.assertEqual(r.stats_containers_remaining, 0)
self.assertEqual(r.stats_containers_possibly_remaining, 0)
self.assertEqual(r.stats_objects_deleted, 0)
self.assertEqual(r.stats_objects_remaining, 0)
self.assertEqual(r.stats_objects_possibly_remaining, 0)
# also make sure reset actually resets values
r.stats_return_codes = {"hello": "swift"}
r.stats_containers_deleted = random.randint(1, 100)
r.stats_containers_remaining = random.randint(1, 100)
r.stats_containers_possibly_remaining = random.randint(1, 100)
r.stats_objects_deleted = random.randint(1, 100)
r.stats_objects_remaining = random.randint(1, 100)
r.stats_objects_possibly_remaining = random.randint(1, 100)
r.reset_stats()
self.assertDictEqual(r.stats_return_codes, {})
self.assertEqual(r.stats_containers_deleted, 0)
self.assertEqual(r.stats_containers_remaining, 0)
self.assertEqual(r.stats_containers_possibly_remaining, 0)
self.assertEqual(r.stats_objects_deleted, 0)
self.assertEqual(r.stats_objects_remaining, 0)
self.assertEqual(r.stats_objects_possibly_remaining, 0)
def test_reap_object(self):
conf = {
'mount_check': 'false',
}
r = reaper.AccountReaper(conf, logger=debug_logger())
mock_path = 'swift.account.reaper.direct_delete_object'
for policy in POLICIES:
r.reset_stats()
with patch(mock_path) as fake_direct_delete:
with patch('swift.common.utils.Timestamp.now') as mock_now:
mock_now.return_value = Timestamp(1429117638.86767)
r.reap_object('a', 'c', 'partition', cont_nodes, 'o',
policy.idx)
mock_now.assert_called_once_with()
for i, call_args in enumerate(
fake_direct_delete.call_args_list):
cnode = cont_nodes[i % len(cont_nodes)]
host = '%(ip)s:%(port)s' % cnode
device = cnode['device']
headers = {
'X-Container-Host': host,
'X-Container-Partition': 'partition',
'X-Container-Device': device,
'X-Backend-Storage-Policy-Index': policy.idx,
'X-Timestamp': '1429117638.86767',
'x-backend-use-replication-network': 'true',
}
ring = r.get_object_ring(policy.idx)
expected = call(dict(ring.devs[i], index=i), 0,
'a', 'c', 'o',
headers=headers, conn_timeout=0.5,
response_timeout=10)
self.assertEqual(call_args, expected)
self.assertEqual(policy.object_ring.replicas - 1, i)
self.assertEqual(r.stats_objects_deleted,
policy.object_ring.replicas)
def test_reap_object_fail(self):
r = self.init_reaper({}, fakelogger=True)
self.amount_fail = 0
self.max_fail = 1
self.reap_obj_timeout = False
policy = random.choice(list(POLICIES))
with patch('swift.account.reaper.direct_delete_object',
self.fake_direct_delete_object):
r.reap_object('a', 'c', 'partition', cont_nodes, 'o',
policy.idx)
# IMHO, the stat handling in the node loop of reap object is
# over indented, but no one has complained, so I'm not inclined
# to move it. However it's worth noting we're currently keeping
# stats on deletes per *replica* - which is rather obvious from
# these tests, but this results is surprising because of some
# funny logic to *skip* increments on successful deletes of
# replicas until we have more successful responses than
# failures. This means that while the first replica doesn't
# increment deleted because of the failure, the second one
# *does* get successfully deleted, but *also does not* increment
# the counter (!?).
#
# In the three replica case this leaves only the last deleted
# object incrementing the counter - in the four replica case
# this leaves the last two.
#
# Basically this test will always result in:
# deleted == num_replicas - 2
self.assertEqual(r.stats_objects_deleted,
policy.object_ring.replicas - 2)
self.assertEqual(r.stats_objects_remaining, 1)
self.assertEqual(r.stats_objects_possibly_remaining, 1)
self.assertEqual(r.stats_return_codes[2],
policy.object_ring.replicas - 1)
self.assertEqual(r.stats_return_codes[4], 1)
def test_reap_object_timeout(self):
r = self.init_reaper({}, fakelogger=True)
self.amount_fail = 1
self.max_fail = 0
self.reap_obj_timeout = True
with patch('swift.account.reaper.direct_delete_object',
self.fake_direct_delete_object):
r.reap_object('a', 'c', 'partition', cont_nodes, 'o', 1)
self.assertEqual(r.stats_objects_deleted, 0)
self.assertEqual(r.stats_objects_remaining, 4)
self.assertEqual(r.stats_objects_possibly_remaining, 0)
self.assertTrue(r.logger.get_lines_for_level(
'error')[-1].startswith('Timeout Exception'))
def test_reap_object_non_exist_policy_index(self):
r = self.init_reaper({}, fakelogger=True)
r.reap_object('a', 'c', 'partition', cont_nodes, 'o', 2)
self.assertEqual(r.stats_objects_deleted, 0)
self.assertEqual(r.stats_objects_remaining, 1)
self.assertEqual(r.stats_objects_possibly_remaining, 0)
@patch('swift.account.reaper.Ring',
lambda *args, **kwargs: unit.FakeRing())
def test_reap_container(self):
policy = random.choice(list(POLICIES))
r = self.init_reaper({}, fakelogger=True)
with patch.multiple('swift.account.reaper',
direct_get_container=DEFAULT,
direct_delete_object=DEFAULT,
direct_delete_container=DEFAULT) as mocks:
headers = {'X-Backend-Storage-Policy-Index': policy.idx}
obj_listing = [{'name': 'o'}]
def fake_get_container(*args, **kwargs):
try:
obj = obj_listing.pop(0)
except IndexError:
obj_list = []
else:
obj_list = [obj]
return headers, obj_list
mocks['direct_get_container'].side_effect = fake_get_container
with patch('swift.common.utils.Timestamp.now') as mock_now:
mock_now.side_effect = [Timestamp(1429117638.86767),
Timestamp(1429117639.67676)]
r.reap_container('a', 'partition', acc_nodes, 'c')
# verify calls to direct_delete_object
mock_calls = mocks['direct_delete_object'].call_args_list
self.assertEqual(policy.object_ring.replicas, len(mock_calls))
for call_args in mock_calls:
_args, kwargs = call_args
self.assertEqual(kwargs['headers']
['X-Backend-Storage-Policy-Index'],
policy.idx)
self.assertEqual(kwargs['headers']
['X-Timestamp'],
'1429117638.86767')
# verify calls to direct_delete_container
self.assertEqual(mocks['direct_delete_container'].call_count, 3)
for i, call_args in enumerate(
mocks['direct_delete_container'].call_args_list):
anode = acc_nodes[i % len(acc_nodes)]
host = '%(ip)s:%(port)s' % anode
device = anode['device']
headers = {
'X-Account-Host': host,
'X-Account-Partition': 'partition',
'X-Account-Device': device,
'X-Account-Override-Deleted': 'yes',
'X-Timestamp': '1429117639.67676',
'x-backend-use-replication-network': 'true',
}
ring = r.get_object_ring(policy.idx)
expected = call(dict(ring.devs[i], index=i), 0, 'a', 'c',
headers=headers, conn_timeout=0.5,
response_timeout=10)
self.assertEqual(call_args, expected)
self.assertEqual(r.stats_objects_deleted, policy.object_ring.replicas)
def test_reap_container_get_object_fail(self):
r = self.init_reaper({}, fakelogger=True)
self.get_fail = True
self.reap_obj_fail = False
self.amount_delete_fail = 0
self.max_delete_fail = 0
with patch('swift.account.reaper.direct_get_container',
self.fake_direct_get_container), \
patch('swift.account.reaper.direct_delete_container',
self.fake_direct_delete_container), \
patch('swift.account.reaper.AccountReaper.get_container_ring',
self.fake_container_ring), \
patch('swift.account.reaper.AccountReaper.reap_object',
self.fake_reap_object):
r.reap_container('a', 'partition', acc_nodes, 'c')
self.assertEqual(
r.logger.statsd_client.get_increment_counts()['return_codes.4'], 1)
self.assertEqual(r.stats_containers_deleted, 1)
def test_reap_container_partial_fail(self):
r = self.init_reaper({}, fakelogger=True)
self.get_fail = False
self.timeout = False
self.reap_obj_fail = False
self.amount_delete_fail = 0
self.max_delete_fail = 4
with patch('swift.account.reaper.direct_get_container',
self.fake_direct_get_container), \
patch('swift.account.reaper.direct_delete_container',
self.fake_direct_delete_container), \
patch('swift.account.reaper.AccountReaper.get_container_ring',
self.fake_container_ring), \
patch('swift.account.reaper.AccountReaper.reap_object',
self.fake_reap_object):
r.reap_container('a', 'partition', acc_nodes, 'c')
self.assertEqual(
r.logger.statsd_client.get_increment_counts()['return_codes.4'], 4)
self.assertEqual(r.stats_containers_possibly_remaining, 1)
def test_reap_container_full_fail(self):
r = self.init_reaper({}, fakelogger=True)
self.get_fail = False
self.timeout = False
self.reap_obj_fail = False
self.amount_delete_fail = 0
self.max_delete_fail = 5
with patch('swift.account.reaper.direct_get_container',
self.fake_direct_get_container), \
patch('swift.account.reaper.direct_delete_container',
self.fake_direct_delete_container), \
patch('swift.account.reaper.AccountReaper.get_container_ring',
self.fake_container_ring), \
patch('swift.account.reaper.AccountReaper.reap_object',
self.fake_reap_object):
r.reap_container('a', 'partition', acc_nodes, 'c')
self.assertEqual(
r.logger.statsd_client.get_increment_counts()['return_codes.4'], 5)
self.assertEqual(r.stats_containers_remaining, 1)
def test_reap_container_get_object_timeout(self):
r = self.init_reaper({}, fakelogger=True)
self.get_fail = False
self.timeout = True
self.reap_obj_fail = False
self.amount_delete_fail = 0
self.max_delete_fail = 0
with patch('swift.account.reaper.direct_get_container',
self.fake_direct_get_container), \
patch('swift.account.reaper.direct_delete_container',
self.fake_direct_delete_container), \
patch('swift.account.reaper.AccountReaper.get_container_ring',
self.fake_container_ring), \
patch('swift.account.reaper.AccountReaper.reap_object',
self.fake_reap_object):
r.reap_container('a', 'partition', acc_nodes, 'c')
self.assertTrue(r.logger.get_lines_for_level(
'error')[-1].startswith('Timeout Exception'))
@patch('swift.account.reaper.Ring',
lambda *args, **kwargs: unit.FakeRing())
def test_reap_container_non_exist_policy_index(self):
r = self.init_reaper({}, fakelogger=True)
with patch.multiple('swift.account.reaper',
direct_get_container=DEFAULT,
direct_delete_object=DEFAULT,
direct_delete_container=DEFAULT) as mocks:
headers = {'X-Backend-Storage-Policy-Index': 2}
obj_listing = [{'name': 'o'}]
def fake_get_container(*args, **kwargs):
try:
obj = obj_listing.pop(0)
except IndexError:
obj_list = []
else:
obj_list = [obj]
return headers, obj_list
mocks['direct_get_container'].side_effect = fake_get_container
r.reap_container('a', 'partition', acc_nodes, 'c')
self.assertEqual(r.logger.get_lines_for_level('error'), [
'ERROR: invalid storage policy index: 2'])
def fake_reap_container(self, *args, **kwargs):
self.called_amount += 1
self.r.stats_containers_deleted = 1
self.r.stats_objects_deleted = 1
self.r.stats_containers_remaining = 1
self.r.stats_objects_remaining = 1
self.r.stats_containers_possibly_remaining = 1
self.r.stats_objects_possibly_remaining = 1
self.r.stats_return_codes[2] = \
self.r.stats_return_codes.get(2, 0) + 1
def test_reap_account(self):
containers = ('c1', 'c2', 'c3', 'c4')
broker = FakeAccountBroker(containers, debug_logger())
self.called_amount = 0
self.r = r = self.init_reaper({}, fakelogger=True)
r.start_time = time.time()
with patch('swift.account.reaper.AccountReaper.reap_container',
self.fake_reap_container), \
patch('swift.account.reaper.AccountReaper.get_account_ring',
self.fake_account_ring):
nodes = r.get_account_ring().get_part_nodes()
for container_shard, node in enumerate(nodes):
self.assertTrue(
r.reap_account(broker, 'partition', nodes,
container_shard=container_shard))
self.assertEqual(self.called_amount, 4)
info_lines = r.logger.get_lines_for_level('info')
self.assertEqual(len(info_lines), 10)
for start_line, stat_line in zip(*[iter(info_lines)] * 2):
self.assertEqual(start_line, 'Beginning pass on account a')
self.assertTrue(stat_line.find('1 containers deleted'))
self.assertTrue(stat_line.find('1 objects deleted'))
self.assertTrue(stat_line.find('1 containers remaining'))
self.assertTrue(stat_line.find('1 objects remaining'))
self.assertTrue(stat_line.find('1 containers possibly remaining'))
self.assertTrue(stat_line.find('1 objects possibly remaining'))
self.assertTrue(stat_line.find('return codes: 2 2xxs'))
@patch('swift.account.reaper.Ring',
lambda *args, **kwargs: unit.FakeRing())
def test_basic_reap_account(self):
self.r = reaper.AccountReaper({})
self.r.account_ring = None
self.r.get_account_ring()
self.assertEqual(self.r.account_ring.replica_count, 3)
self.assertEqual(len(self.r.account_ring.devs), 3)
def test_reap_account_no_container(self):
broker = FakeAccountBroker(tuple(), debug_logger())
self.r = r = self.init_reaper({}, fakelogger=True)
self.called_amount = 0
r.start_time = time.time()
with patch('swift.account.reaper.AccountReaper.reap_container',
self.fake_reap_container), \
patch('swift.account.reaper.AccountReaper.get_account_ring',
self.fake_account_ring):
nodes = r.get_account_ring().get_part_nodes()
self.assertTrue(r.reap_account(broker, 'partition', nodes))
self.assertTrue(r.logger.get_lines_for_level(
'info')[-1].startswith('Completed pass'))
self.assertEqual(self.called_amount, 0)
def test_reap_device(self):
devices = self.prepare_data_dir()
self.called_amount = 0
conf = {'devices': devices}
r = self.init_reaper(conf)
with patch('swift.account.reaper.AccountBroker',
FakeAccountBroker), \
patch('swift.account.reaper.AccountReaper.get_account_ring',
self.fake_account_ring), \
patch('swift.account.reaper.AccountReaper.reap_account',
self.fake_reap_account):
r.reap_device('sda1')
self.assertEqual(self.called_amount, 1)
def test_reap_device_with_ts(self):
devices = self.prepare_data_dir(ts=True)
self.called_amount = 0
conf = {'devices': devices}
r = self.init_reaper(conf=conf)
with patch('swift.account.reaper.AccountBroker',
FakeAccountBroker), \
patch('swift.account.reaper.AccountReaper.get_account_ring',
self.fake_account_ring), \
patch('swift.account.reaper.AccountReaper.reap_account',
self.fake_reap_account):
r.reap_device('sda1')
self.assertEqual(self.called_amount, 0)
def test_reap_device_with_not_my_ip(self):
devices = self.prepare_data_dir()
self.called_amount = 0
conf = {'devices': devices}
r = self.init_reaper(conf, myips=['10.10.1.2'])
with patch('swift.account.reaper.AccountBroker',
FakeAccountBroker), \
patch('swift.account.reaper.AccountReaper.get_account_ring',
self.fake_account_ring), \
patch('swift.account.reaper.AccountReaper.reap_account',
self.fake_reap_account):
r.reap_device('sda1')
self.assertEqual(self.called_amount, 0)
def test_reap_device_with_sharding(self):
devices = self.prepare_data_dir()
conf = {'devices': devices}
r = self.init_reaper(conf, myips=['10.10.10.2'])
container_shard_used = [-1]
def fake_reap_account(*args, **kwargs):
container_shard_used[0] = kwargs.get('container_shard')
with patch('swift.account.reaper.AccountBroker',
FakeAccountBroker), \
patch('swift.account.reaper.AccountReaper.get_account_ring',
self.fake_account_ring), \
patch('swift.account.reaper.AccountReaper.reap_account',
fake_reap_account):
r.reap_device('sda1')
# 10.10.10.2 is second node from ring
self.assertEqual(container_shard_used[0], 1)
def test_reap_device_with_sharding_and_various_devices(self):
devices = self.prepare_data_dir(device='sda2')
conf = {'devices': devices}
r = self.init_reaper(conf)
container_shard_used = [-1]
def fake_reap_account(*args, **kwargs):
container_shard_used[0] = kwargs.get('container_shard')
with patch('swift.account.reaper.AccountBroker',
FakeAccountBroker), \
patch('swift.account.reaper.AccountReaper.get_account_ring',
self.fake_account_ring), \
patch('swift.account.reaper.AccountReaper.reap_account',
fake_reap_account):
r.reap_device('sda2')
# 10.10.10.2 is second node from ring
self.assertEqual(container_shard_used[0], 3)
devices = self.prepare_data_dir(device='sda3')
conf = {'devices': devices}
r = self.init_reaper(conf)
container_shard_used = [-1]
with patch('swift.account.reaper.AccountBroker',
FakeAccountBroker), \
patch('swift.account.reaper.AccountReaper.get_account_ring',
self.fake_account_ring), \
patch('swift.account.reaper.AccountReaper.reap_account',
fake_reap_account):
r.reap_device('sda3')
# 10.10.10.2 is second node from ring
self.assertEqual(container_shard_used[0], 4)
def test_reap_account_with_sharding(self):
devices = self.prepare_data_dir()
self.called_amount = 0
conf = {'devices': devices}
r = self.init_reaper(conf, myips=['10.10.10.2'], fakelogger=True)
container_reaped = [0]
def fake_list_containers_iter(self, *args, **kwargs):
if not kwargs.pop('allow_reserved'):
raise RuntimeError('Expected allow_reserved to be True!')
if kwargs:
raise RuntimeError('Got unexpected keyword arguments: %r' % (
kwargs, ))
for container in self.containers:
if container in self.containers_yielded:
continue
yield container, None, None, None, None
self.containers_yielded.append(container)
def fake_reap_container(self, account, account_partition,
account_nodes, container):
container_reaped[0] += 1
fake_ring = FakeRing()
fake_logger = debug_logger()
with patch('swift.account.reaper.AccountBroker',
FakeAccountBroker), \
patch(
'swift.account.reaper.AccountBroker.list_containers_iter',
fake_list_containers_iter), \
patch('swift.account.reaper.AccountReaper.reap_container',
fake_reap_container):
fake_broker = FakeAccountBroker(['c', 'd', 'e', 'f', 'g'],
fake_logger)
r.reap_account(fake_broker, 10, fake_ring.nodes, 0)
self.assertEqual(container_reaped[0], 0)
fake_broker = FakeAccountBroker(['c', 'd', 'e', 'f', 'g'],
fake_logger)
container_reaped[0] = 0
r.reap_account(fake_broker, 10, fake_ring.nodes, 1)
self.assertEqual(container_reaped[0], 1)
container_reaped[0] = 0
fake_broker = FakeAccountBroker(['c', 'd', 'e', 'f', 'g'],
fake_logger)
r.reap_account(fake_broker, 10, fake_ring.nodes, 2)
self.assertEqual(container_reaped[0], 0)
container_reaped[0] = 0
fake_broker = FakeAccountBroker(['c', 'd', 'e', 'f', 'g'],
fake_logger)
r.reap_account(fake_broker, 10, fake_ring.nodes, 3)
self.assertEqual(container_reaped[0], 3)
container_reaped[0] = 0
fake_broker = FakeAccountBroker(['c', 'd', 'e', 'f', 'g'],
fake_logger)
r.reap_account(fake_broker, 10, fake_ring.nodes, 4)
self.assertEqual(container_reaped[0], 1)
def test_run_once(self):
def prepare_data_dir():
devices_path = tempfile.mkdtemp()
# will be deleted by teardown
self.to_delete.append(devices_path)
path = os.path.join(devices_path, 'sda1', DATADIR)
os.makedirs(path)
return devices_path
def init_reaper(devices):
r = reaper.AccountReaper({'devices': devices})
return r
devices = prepare_data_dir()
r = init_reaper(devices)
with patch('swift.account.reaper.AccountReaper.reap_device') as foo, \
unit.mock_check_drive(ismount=True):
r.run_once()
self.assertEqual(foo.called, 1)
with patch('swift.account.reaper.AccountReaper.reap_device') as foo, \
unit.mock_check_drive(ismount=False):
r.run_once()
self.assertFalse(foo.called)
with patch('swift.account.reaper.AccountReaper.reap_device') as foo:
r.logger = debug_logger('test-reaper')
r.devices = 'thisdeviceisbad'
r.run_once()
self.assertTrue(r.logger.get_lines_for_level(
'error')[-1].startswith('Exception in top-level account reaper'))
def test_run_forever(self):
def fake_sleep(val):
self.val = val
def fake_random():
return 1
def fake_run_once():
raise Exception('exit')
def init_reaper():
r = reaper.AccountReaper({'interval': 1})
r.run_once = fake_run_once
return r
r = init_reaper()
with patch('swift.account.reaper.sleep', fake_sleep):
with patch('swift.account.reaper.random.random', fake_random):
with self.assertRaises(Exception) as raised:
r.run_forever()
self.assertEqual(self.val, 1)
self.assertEqual(str(raised.exception), 'exit')
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/account/test_reaper.py |
swift-master | test/unit/account/__init__.py |
|
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import defaultdict
import itertools
import unittest
import time
import os
import random
from swift.account import auditor
from swift.common.storage_policy import POLICIES
from swift.common.utils import Timestamp
from test.debug_logger import debug_logger
from test.unit import patch_policies, with_tempdir
from test.unit.account.test_backend import (
AccountBrokerPreTrackContainerCountSetup)
@patch_policies
class TestAuditorRealBrokerMigration(
AccountBrokerPreTrackContainerCountSetup, unittest.TestCase):
def test_db_migration(self):
# add a few containers
policies = itertools.cycle(POLICIES)
num_containers = len(POLICIES) * 3
per_policy_container_counts = defaultdict(int)
for i in range(num_containers):
name = 'test-container-%02d' % i
policy = next(policies)
self.broker.put_container(name, next(self.ts).internal,
0, 0, 0, int(policy))
per_policy_container_counts[int(policy)] += 1
self.broker._commit_puts()
self.assertEqual(num_containers,
self.broker.get_info()['container_count'])
# still un-migrated
self.assertUnmigrated(self.broker)
# run auditor, and validate migration
conf = {'devices': self.testdir, 'mount_check': False,
'recon_cache_path': self.testdir}
test_auditor = auditor.AccountAuditor(conf, logger=debug_logger())
test_auditor.run_once()
self.restore_account_broker()
broker = auditor.AccountBroker(self.db_path, account='a')
broker.initialize(Timestamp('1').internal, 0)
# go after rows directly to avoid unintentional migration
with broker.get() as conn:
rows = conn.execute('''
SELECT storage_policy_index, container_count
FROM policy_stat
''').fetchall()
for policy_index, container_count in rows:
self.assertEqual(container_count,
per_policy_container_counts[policy_index])
class TestAuditorRealBroker(unittest.TestCase):
def setUp(self):
self.logger = debug_logger()
@with_tempdir
def test_db_validate_fails(self, tempdir):
ts = (Timestamp(t).internal for t in itertools.count(int(time.time())))
db_path = os.path.join(tempdir, 'sda', 'accounts',
'0', '0', '0', 'test.db')
broker = auditor.AccountBroker(db_path, account='a')
broker.initialize(next(ts))
# add a few containers
policies = itertools.cycle(POLICIES)
num_containers = len(POLICIES) * 3
per_policy_container_counts = defaultdict(int)
for i in range(num_containers):
name = 'test-container-%02d' % i
policy = next(policies)
broker.put_container(name, next(ts), 0, 0, 0, int(policy))
per_policy_container_counts[int(policy)] += 1
broker._commit_puts()
self.assertEqual(broker.get_info()['container_count'], num_containers)
messed_up_policy = random.choice(list(POLICIES))
# now mess up a policy_stats table count
with broker.get() as conn:
conn.executescript('''
UPDATE policy_stat
SET container_count = container_count - 1
WHERE storage_policy_index = %d;
''' % int(messed_up_policy))
# validate it's messed up
policy_stats = broker.get_policy_stats()
self.assertEqual(
policy_stats[int(messed_up_policy)]['container_count'],
per_policy_container_counts[int(messed_up_policy)] - 1)
# do an audit
conf = {'devices': tempdir, 'mount_check': False,
'recon_cache_path': tempdir}
test_auditor = auditor.AccountAuditor(conf, logger=self.logger)
test_auditor.run_once()
# validate errors
self.assertEqual(test_auditor.failures, 1)
error_lines = test_auditor.logger.get_lines_for_level('error')
self.assertEqual(len(error_lines), 1)
error_message = error_lines[0]
self.assertIn(broker.db_file, error_message)
self.assertIn(
'The total container_count for the account a (%d) does not match '
'the sum of container_count across policies (%d)'
% (num_containers, num_containers - 1), error_message)
self.assertEqual(
test_auditor.logger.statsd_client.get_increment_counts(),
{'failures': 1})
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/account/test_auditor.py |
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import errno
import os
import mock
import posix
import unittest
from tempfile import mkdtemp
from shutil import rmtree
import itertools
import random
from io import BytesIO
import json
from six import StringIO
from six.moves.urllib.parse import quote
import xml.dom.minidom
from swift import __version__ as swift_version
from swift.common.swob import (Request, WsgiBytesIO, HTTPNoContent)
from swift.common.constraints import ACCOUNT_LISTING_LIMIT
from swift.account.backend import AccountBroker
from swift.account.server import AccountController
from swift.common.utils import (normalize_timestamp, replication, public,
mkdirs, storage_directory, Timestamp)
from swift.common.request_helpers import get_sys_meta_prefix, get_reserved_name
from test.debug_logger import debug_logger
from test.unit import patch_policies, mock_check_drive, make_timestamp_iter
from swift.common.storage_policy import StoragePolicy, POLICIES
@patch_policies
class TestAccountController(unittest.TestCase):
"""Test swift.account.server.AccountController"""
def setUp(self):
"""Set up for testing swift.account.server.AccountController"""
self.testdir_base = mkdtemp()
self.testdir = os.path.join(self.testdir_base, 'account_server')
mkdirs(os.path.join(self.testdir, 'sda1'))
self.logger = debug_logger()
self.controller = AccountController(
{'devices': self.testdir, 'mount_check': 'false'},
logger=self.logger)
self.ts = make_timestamp_iter()
def tearDown(self):
"""Tear down for testing swift.account.server.AccountController"""
try:
rmtree(self.testdir_base)
except OSError as err:
if err.errno != errno.ENOENT:
raise
def test_init(self):
conf = {
'devices': self.testdir,
'mount_check': 'false',
}
AccountController(conf, logger=self.logger)
self.assertEqual(self.logger.get_lines_for_level('warning'), [])
conf['auto_create_account_prefix'] = '-'
AccountController(conf, logger=self.logger)
self.assertEqual(self.logger.get_lines_for_level('warning'), [
'Option auto_create_account_prefix is deprecated. '
'Configure auto_create_account_prefix under the '
'swift-constraints section of swift.conf. This option '
'will be ignored in a future release.'
])
def test_OPTIONS(self):
server_handler = AccountController(
{'devices': self.testdir, 'mount_check': 'false'})
req = Request.blank('/sda1/p/a/c/o', {'REQUEST_METHOD': 'OPTIONS'})
req.content_length = 0
resp = server_handler.OPTIONS(req)
self.assertEqual(200, resp.status_int)
for verb in 'OPTIONS GET POST PUT DELETE HEAD REPLICATE'.split():
self.assertIn(verb, resp.headers['Allow'].split(', '))
self.assertEqual(len(resp.headers['Allow'].split(', ')), 7)
self.assertEqual(resp.headers['Server'],
(server_handler.server_type + '/' + swift_version))
def test_insufficient_storage_mount_check_true(self):
conf = {'devices': self.testdir, 'mount_check': 'true'}
account_controller = AccountController(conf)
self.assertTrue(account_controller.mount_check)
for method in account_controller.allowed_methods:
if method == 'OPTIONS':
continue
req = Request.blank('/sda1/p/a-or-suff', method=method,
headers={'x-timestamp': '1'})
with mock_check_drive() as mocks:
try:
resp = req.get_response(account_controller)
self.assertEqual(resp.status_int, 507)
mocks['ismount'].return_value = True
resp = req.get_response(account_controller)
self.assertNotEqual(resp.status_int, 507)
# feel free to rip out this last assertion...
expected = 2 if method == 'PUT' else 4
self.assertEqual(resp.status_int // 100, expected)
except AssertionError as e:
self.fail('%s for %s' % (e, method))
def test_insufficient_storage_mount_check_false(self):
conf = {'devices': self.testdir, 'mount_check': 'false'}
account_controller = AccountController(conf)
self.assertFalse(account_controller.mount_check)
for method in account_controller.allowed_methods:
if method == 'OPTIONS':
continue
req = Request.blank('/sda1/p/a-or-suff', method=method,
headers={'x-timestamp': '1'})
with mock_check_drive() as mocks:
try:
resp = req.get_response(account_controller)
self.assertEqual(resp.status_int, 507)
mocks['isdir'].return_value = True
resp = req.get_response(account_controller)
self.assertNotEqual(resp.status_int, 507)
# feel free to rip out this last assertion...
expected = 2 if method == 'PUT' else 4
self.assertEqual(resp.status_int // 100, expected)
except AssertionError as e:
self.fail('%s for %s' % (e, method))
def test_DELETE_not_found(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'DELETE',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 404)
self.assertNotIn('X-Account-Status', resp.headers)
def test_DELETE_empty(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'DELETE',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers['X-Account-Status'], 'Deleted')
def test_DELETE_not_empty(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'DELETE',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
# We now allow deleting non-empty accounts
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers['X-Account-Status'], 'Deleted')
def test_DELETE_now_empty(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank(
'/sda1/p/a/c1',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '2',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'DELETE',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers['X-Account-Status'], 'Deleted')
def test_DELETE_invalid_partition(self):
req = Request.blank('/sda1/./a', environ={'REQUEST_METHOD': 'DELETE',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 400)
def test_DELETE_timestamp_not_float(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': 'not-float'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 400)
def test_REPLICATE_insufficient_space(self):
conf = {'devices': self.testdir,
'mount_check': 'false',
'fallocate_reserve': '2%'}
account_controller = AccountController(conf)
req = Request.blank('/sda1/p/a',
environ={'REQUEST_METHOD': 'REPLICATE'})
statvfs_result = posix.statvfs_result([
4096, # f_bsize
4096, # f_frsize
2854907, # f_blocks
59000, # f_bfree
57000, # f_bavail (just under 2% free)
1280000, # f_files
1266040, # f_ffree,
1266040, # f_favail,
4096, # f_flag
255, # f_namemax
])
with mock.patch('os.statvfs',
return_value=statvfs_result) as mock_statvfs:
resp = req.get_response(account_controller)
self.assertEqual(resp.status_int, 507)
self.assertEqual(mock_statvfs.mock_calls,
[mock.call(os.path.join(self.testdir, 'sda1'))])
def test_REPLICATE_rsync_then_merge_works(self):
def fake_rsync_then_merge(self, drive, db_file, args):
return HTTPNoContent()
with mock.patch("swift.common.db_replicator.ReplicatorRpc."
"rsync_then_merge", fake_rsync_then_merge):
req = Request.blank('/sda1/p/a/',
environ={'REQUEST_METHOD': 'REPLICATE'},
headers={})
json_string = b'["rsync_then_merge", "a.db"]'
inbuf = WsgiBytesIO(json_string)
req.environ['wsgi.input'] = inbuf
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
def test_REPLICATE_complete_rsync_works(self):
def fake_complete_rsync(self, drive, db_file, args):
return HTTPNoContent()
# check complete_rsync
with mock.patch("swift.common.db_replicator.ReplicatorRpc."
"complete_rsync", fake_complete_rsync):
req = Request.blank('/sda1/p/a/',
environ={'REQUEST_METHOD': 'REPLICATE'},
headers={})
json_string = b'["complete_rsync", "a.db"]'
inbuf = WsgiBytesIO(json_string)
req.environ['wsgi.input'] = inbuf
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
def test_REPLICATE_value_error_works(self):
req = Request.blank('/sda1/p/a/',
environ={'REQUEST_METHOD': 'REPLICATE'},
headers={})
# check valuerror
wsgi_input_valuerror = b'["sync" : sync, "-1"]'
inbuf1 = WsgiBytesIO(wsgi_input_valuerror)
req.environ['wsgi.input'] = inbuf1
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 400)
def test_REPLICATE_unknown_sync(self):
# First without existing DB file
req = Request.blank('/sda1/p/a/',
environ={'REQUEST_METHOD': 'REPLICATE'},
headers={})
json_string = b'["unknown_sync", "a.db"]'
inbuf = WsgiBytesIO(json_string)
req.environ['wsgi.input'] = inbuf
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 404)
mkdirs(os.path.join(self.testdir, 'sda1', 'accounts', 'p', 'a', 'a'))
db_file = os.path.join(self.testdir, 'sda1',
storage_directory('accounts', 'p', 'a'),
'a' + '.db')
open(db_file, 'w')
req = Request.blank('/sda1/p/a/',
environ={'REQUEST_METHOD': 'REPLICATE'},
headers={})
json_string = b'["unknown_sync", "a.db"]'
inbuf = WsgiBytesIO(json_string)
req.environ['wsgi.input'] = inbuf
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 500)
def test_HEAD_not_found(self):
# Test the case in which account does not exist (can be recreated)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 404)
self.assertNotIn('X-Account-Status', resp.headers)
# Test the case in which account was deleted but not yet reaped
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'DELETE',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['X-Account-Status'], 'Deleted')
def test_HEAD_empty_account(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers['x-account-container-count'], '0')
self.assertEqual(resp.headers['x-account-object-count'], '0')
self.assertEqual(resp.headers['x-account-bytes-used'], '0')
def test_HEAD_with_containers(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c2', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '2',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers['x-account-container-count'], '2')
self.assertEqual(resp.headers['x-account-object-count'], '0')
self.assertEqual(resp.headers['x-account-bytes-used'], '0')
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '1',
'X-Bytes-Used': '2',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c2', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '2',
'X-Delete-Timestamp': '0',
'X-Object-Count': '3',
'X-Bytes-Used': '4',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD',
'HTTP_X_TIMESTAMP': '5'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers['x-account-container-count'], '2')
self.assertEqual(resp.headers['x-account-object-count'], '4')
self.assertEqual(resp.headers['x-account-bytes-used'], '6')
def test_HEAD_invalid_partition(self):
req = Request.blank('/sda1/./a', environ={'REQUEST_METHOD': 'HEAD',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 400)
def test_HEAD_invalid_content_type(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'},
headers={'Accept': 'application/plain'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 406)
def test_HEAD_invalid_accept(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'},
headers={'Accept': 'application/plain;q=1;q=0.5'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 400)
self.assertEqual(resp.body, b'')
def test_HEAD_invalid_format(self):
format = '%D1%BD%8A9' # invalid UTF-8; should be %E1%BD%8A9 (E -> D)
req = Request.blank('/sda1/p/a?format=' + format,
environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 400)
def test_PUT_not_found(self):
req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-PUT-Timestamp': normalize_timestamp(1),
'X-DELETE-Timestamp': normalize_timestamp(0),
'X-Object-Count': '1',
'X-Bytes-Used': '1',
'X-Timestamp': normalize_timestamp(0)})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 404)
self.assertNotIn('X-Account-Status', resp.headers)
def test_PUT_insufficient_space(self):
conf = {'devices': self.testdir,
'mount_check': 'false',
'fallocate_reserve': '2%'}
account_controller = AccountController(conf)
req = Request.blank(
'/sda1/p/a',
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '1517612949.541469'})
statvfs_result = posix.statvfs_result([
4096, # f_bsize
4096, # f_frsize
2854907, # f_blocks
59000, # f_bfree
57000, # f_bavail (just under 2% free)
1280000, # f_files
1266040, # f_ffree,
1266040, # f_favail,
4096, # f_flag
255, # f_namemax
])
with mock.patch('os.statvfs',
return_value=statvfs_result) as mock_statvfs:
resp = req.get_response(account_controller)
self.assertEqual(resp.status_int, 507)
self.assertEqual(mock_statvfs.mock_calls,
[mock.call(os.path.join(self.testdir, 'sda1'))])
def test_PUT(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 202)
def test_PUT_simulated_create_race(self):
state = ['initial']
from swift.account.backend import AccountBroker as OrigAcBr
class InterceptedAcBr(OrigAcBr):
def __init__(self, *args, **kwargs):
super(InterceptedAcBr, self).__init__(*args, **kwargs)
if state[0] == 'initial':
# Do nothing initially
pass
elif state[0] == 'race':
# Save the original db_file attribute value
self._saved_db_file = self.db_file
self._db_file += '.doesnotexist'
def initialize(self, *args, **kwargs):
if state[0] == 'initial':
# Do nothing initially
pass
elif state[0] == 'race':
# Restore the original db_file attribute to get the race
# behavior
self._db_file = self._saved_db_file
return super(InterceptedAcBr, self).initialize(*args, **kwargs)
with mock.patch("swift.account.server.AccountBroker", InterceptedAcBr):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
state[0] = "race"
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 202)
def test_PUT_after_DELETE(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(1)})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': normalize_timestamp(1)})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(2)})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 403)
self.assertEqual(resp.body, b'Recently deleted')
self.assertEqual(resp.headers['X-Account-Status'], 'Deleted')
def test_create_reserved_namespace_account(self):
path = '/sda1/p/%s' % get_reserved_name('a')
req = Request.blank(path, method='PUT', headers={
'X-Timestamp': next(self.ts).internal})
resp = req.get_response(self.controller)
self.assertEqual(resp.status, '201 Created')
path = '/sda1/p/%s' % get_reserved_name('foo', 'bar')
req = Request.blank(path, method='PUT', headers={
'X-Timestamp': next(self.ts).internal})
resp = req.get_response(self.controller)
self.assertEqual(resp.status, '201 Created')
def test_create_invalid_reserved_namespace_account(self):
account_name = get_reserved_name('foo', 'bar')[1:]
path = '/sda1/p/%s' % account_name
req = Request.blank(path, method='PUT', headers={
'X-Timestamp': next(self.ts).internal})
resp = req.get_response(self.controller)
self.assertEqual(resp.status, '400 Bad Request')
def test_create_reserved_container_in_account(self):
# create account
path = '/sda1/p/a'
req = Request.blank(path, method='PUT', headers={
'X-Timestamp': next(self.ts).internal})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
# put null container in it
path += '/%s' % get_reserved_name('c', 'stuff')
req = Request.blank(path, method='PUT', headers={
'X-Timestamp': next(self.ts).internal,
'X-Put-Timestamp': next(self.ts).internal,
'X-Delete-Timestamp': 0,
'X-Object-Count': 0,
'X-Bytes-Used': 0,
})
resp = req.get_response(self.controller)
self.assertEqual(resp.status, '201 Created')
def test_create_invalid_reserved_container_in_account(self):
# create account
path = '/sda1/p/a'
req = Request.blank(path, method='PUT', headers={
'X-Timestamp': next(self.ts).internal})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
# put invalid container in it
path += '/%s' % get_reserved_name('c', 'stuff')[1:]
req = Request.blank(path, method='PUT', headers={
'X-Timestamp': next(self.ts).internal,
'X-Put-Timestamp': next(self.ts).internal,
'X-Delete-Timestamp': 0,
'X-Object-Count': 0,
'X-Bytes-Used': 0,
})
resp = req.get_response(self.controller)
self.assertEqual(resp.status, '400 Bad Request')
def test_PUT_non_utf8_metadata(self):
# Set metadata header
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(1),
'X-Account-Meta-Test': b'\xff'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 400)
# Set sysmeta header
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(1),
'X-Account-Sysmeta-Access-Control': b'\xff'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 400)
# Send other
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(1),
'X-Will-Not-Be-Saved': b'\xff'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 202)
def test_utf8_metadata(self):
ts_str = normalize_timestamp(1)
def get_test_meta(method, headers):
# Set metadata header
headers.setdefault('X-Timestamp', ts_str)
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': method},
headers=headers)
resp = req.get_response(self.controller)
self.assertIn(resp.status_int, (201, 202, 204))
db_path = os.path.join(*next(
(dir_name, file_name)
for dir_name, _, files in os.walk(self.testdir)
for file_name in files if file_name.endswith('.db')
))
broker = AccountBroker(db_path)
# Why not use broker.metadata, you ask? Because we want to get
# as close to the on-disk format as is reasonable.
result = json.loads(broker.get_raw_metadata())
# Clear it out for the next run
with broker.get() as conn:
conn.execute("UPDATE account_stat SET metadata=''")
conn.commit()
return result
wsgi_str = '\xf0\x9f\x91\x8d'
uni_str = u'\U0001f44d'
self.assertEqual(
get_test_meta('PUT', {'x-account-sysmeta-' + wsgi_str: wsgi_str}),
{u'X-Account-Sysmeta-' + uni_str: [uni_str, ts_str]})
self.assertEqual(
get_test_meta('PUT', {'x-account-meta-' + wsgi_str: wsgi_str}),
{u'X-Account-Meta-' + uni_str: [uni_str, ts_str]})
self.assertEqual(
get_test_meta('POST', {'x-account-sysmeta-' + wsgi_str: wsgi_str}),
{u'X-Account-Sysmeta-' + uni_str: [uni_str, ts_str]})
self.assertEqual(
get_test_meta('POST', {'x-account-meta-' + wsgi_str: wsgi_str}),
{u'X-Account-Meta-' + uni_str: [uni_str, ts_str]})
def test_PUT_GET_metadata(self):
# Set metadata header
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(1),
'X-Account-Meta-Test': 'Value'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers.get('x-account-meta-test'), 'Value')
# Set another metadata header, ensuring old one doesn't disappear
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(1),
'X-Account-Meta-Test2': 'Value2'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers.get('x-account-meta-test'), 'Value')
self.assertEqual(resp.headers.get('x-account-meta-test2'), 'Value2')
# Update metadata header
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(3),
'X-Account-Meta-Test': 'New Value'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers.get('x-account-meta-test'), 'New Value')
# Send old update to metadata header
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(2),
'X-Account-Meta-Test': 'Old Value'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers.get('x-account-meta-test'), 'New Value')
# Remove metadata header (by setting it to empty)
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(4),
'X-Account-Meta-Test': ''})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertNotIn('x-account-meta-test', resp.headers)
def test_PUT_GET_sys_metadata(self):
prefix = get_sys_meta_prefix('account')
hdr = '%stest' % prefix
hdr2 = '%stest2' % prefix
# Set metadata header
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(1),
hdr.title(): 'Value'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers.get(hdr), 'Value')
# Set another metadata header, ensuring old one doesn't disappear
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(1),
hdr2.title(): 'Value2'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers.get(hdr), 'Value')
self.assertEqual(resp.headers.get(hdr2), 'Value2')
# Update metadata header
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(3),
hdr.title(): 'New Value'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers.get(hdr), 'New Value')
# Send old update to metadata header
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(2),
hdr.title(): 'Old Value'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers.get(hdr), 'New Value')
# Remove metadata header (by setting it to empty)
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(4),
hdr.title(): ''})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 202)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertNotIn(hdr, resp.headers)
def test_PUT_invalid_partition(self):
req = Request.blank('/sda1/./a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 400)
def test_POST_HEAD_metadata(self):
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(1)})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
# Set metadata header
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(1),
'X-Account-Meta-Test': 'Value'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204, resp.body)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers.get('x-account-meta-test'), 'Value')
# Update metadata header
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(3),
'X-Account-Meta-Test': 'New Value'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers.get('x-account-meta-test'), 'New Value')
# Send old update to metadata header
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(2),
'X-Account-Meta-Test': 'Old Value'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers.get('x-account-meta-test'), 'New Value')
# Remove metadata header (by setting it to empty)
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(4),
'X-Account-Meta-Test': ''})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertNotIn('x-account-meta-test', resp.headers)
def test_POST_HEAD_sys_metadata(self):
prefix = get_sys_meta_prefix('account')
hdr = '%stest' % prefix
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(1)})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
# Set metadata header
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(1),
hdr.title(): 'Value'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers.get(hdr), 'Value')
# Update metadata header
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(3),
hdr.title(): 'New Value'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers.get(hdr), 'New Value')
# Send old update to metadata header
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(2),
hdr.title(): 'Old Value'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers.get(hdr), 'New Value')
# Remove metadata header (by setting it to empty)
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(4),
hdr.title(): ''})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertNotIn(hdr, resp.headers)
def test_POST_invalid_partition(self):
req = Request.blank('/sda1/./a', environ={'REQUEST_METHOD': 'POST',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 400)
def test_POST_insufficient_space(self):
conf = {'devices': self.testdir,
'mount_check': 'false',
'fallocate_reserve': '2%'}
account_controller = AccountController(conf)
req = Request.blank(
'/sda1/p/a',
environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': '1517611584.937603'})
statvfs_result = posix.statvfs_result([
4096, # f_bsize
4096, # f_frsize
2854907, # f_blocks
59000, # f_bfree
57000, # f_bavail (just under 2% free)
1280000, # f_files
1266040, # f_ffree,
1266040, # f_favail,
4096, # f_flag
255, # f_namemax
])
with mock.patch('os.statvfs',
return_value=statvfs_result) as mock_statvfs:
resp = req.get_response(account_controller)
self.assertEqual(resp.status_int, 507)
self.assertEqual(mock_statvfs.mock_calls,
[mock.call(os.path.join(self.testdir, 'sda1'))])
def test_POST_timestamp_not_float(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'POST',
'HTTP_X_TIMESTAMP': '0'},
headers={'X-Timestamp': 'not-float'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 400)
def test_POST_after_DELETE_not_found(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'DELETE',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'POST',
'HTTP_X_TIMESTAMP': '2'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['X-Account-Status'], 'Deleted')
def test_GET_not_found_plain(self):
# Test the case in which account does not exist (can be recreated)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 404)
self.assertNotIn('X-Account-Status', resp.headers)
# Test the case in which account was deleted but not yet reaped
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'DELETE',
'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['X-Account-Status'], 'Deleted')
def test_GET_not_found_json(self):
req = Request.blank('/sda1/p/a?format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 404)
def test_GET_not_found_xml(self):
req = Request.blank('/sda1/p/a?format=xml',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 404)
def test_GET_empty_account_plain(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(resp.headers['Content-Type'],
'text/plain; charset=utf-8')
def test_GET_empty_account_json(self):
req = Request.blank('/sda1/p/a?format=json',
environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a?format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['Content-Type'],
'application/json; charset=utf-8')
def test_GET_empty_account_xml(self):
req = Request.blank('/sda1/p/a?format=xml',
environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a?format=xml',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['Content-Type'],
'application/xml; charset=utf-8')
def test_GET_invalid_accept(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'},
headers={'Accept': 'application/plain;q=foo'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 400)
self.assertEqual(resp.body, b'Invalid Accept header')
def test_GET_over_limit(self):
req = Request.blank(
'/sda1/p/a?limit=%d' % (ACCOUNT_LISTING_LIMIT + 1),
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 412)
def test_GET_with_containers_plain(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c2', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '2',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body.strip().split(b'\n'),
[b'c1', b'c2'])
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '1',
'X-Bytes-Used': '2',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c2', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '2',
'X-Delete-Timestamp': '0',
'X-Object-Count': '3',
'X-Bytes-Used': '4',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body.strip().split(b'\n'),
[b'c1', b'c2'])
self.assertEqual(resp.content_type, 'text/plain')
self.assertEqual(resp.charset, 'utf-8')
# test unknown format uses default plain
req = Request.blank('/sda1/p/a?format=somethinglese',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body.strip().split(b'\n'),
[b'c1', b'c2'])
self.assertEqual(resp.content_type, 'text/plain')
self.assertEqual(resp.charset, 'utf-8')
def test_GET_with_containers_json(self):
put_timestamps = {}
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
put_timestamps['c1'] = normalize_timestamp(1)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': put_timestamps['c1'],
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
put_timestamps['c2'] = normalize_timestamp(2)
req = Request.blank('/sda1/p/a/c2', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': put_timestamps['c2'],
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a?format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(
json.loads(resp.body),
[{'count': 0, 'bytes': 0, 'name': 'c1',
'last_modified': Timestamp(put_timestamps['c1']).isoformat},
{'count': 0, 'bytes': 0, 'name': 'c2',
'last_modified': Timestamp(put_timestamps['c2']).isoformat}])
put_timestamps['c1'] = normalize_timestamp(3)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': put_timestamps['c1'],
'X-Delete-Timestamp': '0',
'X-Object-Count': '1',
'X-Bytes-Used': '2',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
put_timestamps['c2'] = normalize_timestamp(4)
req = Request.blank('/sda1/p/a/c2', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': put_timestamps['c2'],
'X-Delete-Timestamp': '0',
'X-Object-Count': '3',
'X-Bytes-Used': '4',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a?format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(
json.loads(resp.body),
[{'count': 1, 'bytes': 2, 'name': 'c1',
'last_modified': Timestamp(put_timestamps['c1']).isoformat},
{'count': 3, 'bytes': 4, 'name': 'c2',
'last_modified': Timestamp(put_timestamps['c2']).isoformat}])
self.assertEqual(resp.content_type, 'application/json')
self.assertEqual(resp.charset, 'utf-8')
def test_GET_with_containers_xml(self):
put_timestamps = {}
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
put_timestamps['c1'] = normalize_timestamp(1)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': put_timestamps['c1'],
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
put_timestamps['c2'] = normalize_timestamp(2)
req = Request.blank('/sda1/p/a/c2', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': put_timestamps['c2'],
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a?format=xml',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.content_type, 'application/xml')
self.assertEqual(resp.status_int, 200)
dom = xml.dom.minidom.parseString(resp.body)
self.assertEqual(dom.firstChild.nodeName, 'account')
listing = \
[n for n in dom.firstChild.childNodes if n.nodeName != '#text']
self.assertEqual(len(listing), 2)
self.assertEqual(listing[0].nodeName, 'container')
container = [n for n in listing[0].childNodes if n.nodeName != '#text']
self.assertEqual(sorted([n.nodeName for n in container]),
['bytes', 'count', 'last_modified', 'name'])
node = [n for n in container if n.nodeName == 'name'][0]
self.assertEqual(node.firstChild.nodeValue, 'c1')
node = [n for n in container if n.nodeName == 'count'][0]
self.assertEqual(node.firstChild.nodeValue, '0')
node = [n for n in container if n.nodeName == 'bytes'][0]
self.assertEqual(node.firstChild.nodeValue, '0')
node = [n for n in container if n.nodeName == 'last_modified'][0]
self.assertEqual(node.firstChild.nodeValue,
Timestamp(put_timestamps['c1']).isoformat)
self.assertEqual(listing[-1].nodeName, 'container')
container = \
[n for n in listing[-1].childNodes if n.nodeName != '#text']
self.assertEqual(sorted([n.nodeName for n in container]),
['bytes', 'count', 'last_modified', 'name'])
node = [n for n in container if n.nodeName == 'name'][0]
self.assertEqual(node.firstChild.nodeValue, 'c2')
node = [n for n in container if n.nodeName == 'count'][0]
self.assertEqual(node.firstChild.nodeValue, '0')
node = [n for n in container if n.nodeName == 'bytes'][0]
self.assertEqual(node.firstChild.nodeValue, '0')
node = [n for n in container if n.nodeName == 'last_modified'][0]
self.assertEqual(node.firstChild.nodeValue,
Timestamp(put_timestamps['c2']).isoformat)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '1',
'X-Bytes-Used': '2',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c2', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '2',
'X-Delete-Timestamp': '0',
'X-Object-Count': '3',
'X-Bytes-Used': '4',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a?format=xml',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
dom = xml.dom.minidom.parseString(resp.body)
self.assertEqual(dom.firstChild.nodeName, 'account')
listing = \
[n for n in dom.firstChild.childNodes if n.nodeName != '#text']
self.assertEqual(len(listing), 2)
self.assertEqual(listing[0].nodeName, 'container')
container = [n for n in listing[0].childNodes if n.nodeName != '#text']
self.assertEqual(sorted([n.nodeName for n in container]),
['bytes', 'count', 'last_modified', 'name'])
node = [n for n in container if n.nodeName == 'name'][0]
self.assertEqual(node.firstChild.nodeValue, 'c1')
node = [n for n in container if n.nodeName == 'count'][0]
self.assertEqual(node.firstChild.nodeValue, '1')
node = [n for n in container if n.nodeName == 'bytes'][0]
self.assertEqual(node.firstChild.nodeValue, '2')
node = [n for n in container if n.nodeName == 'last_modified'][0]
self.assertEqual(node.firstChild.nodeValue,
Timestamp(put_timestamps['c1']).isoformat)
self.assertEqual(listing[-1].nodeName, 'container')
container = [
n for n in listing[-1].childNodes if n.nodeName != '#text']
self.assertEqual(sorted([n.nodeName for n in container]),
['bytes', 'count', 'last_modified', 'name'])
node = [n for n in container if n.nodeName == 'name'][0]
self.assertEqual(node.firstChild.nodeValue, 'c2')
node = [n for n in container if n.nodeName == 'count'][0]
self.assertEqual(node.firstChild.nodeValue, '3')
node = [n for n in container if n.nodeName == 'bytes'][0]
self.assertEqual(node.firstChild.nodeValue, '4')
node = [n for n in container if n.nodeName == 'last_modified'][0]
self.assertEqual(node.firstChild.nodeValue,
Timestamp(put_timestamps['c2']).isoformat)
self.assertEqual(resp.charset, 'utf-8')
def test_GET_xml_escapes_account_name(self):
req = Request.blank(
'/sda1/p/%22%27', # "'
environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank(
'/sda1/p/%22%27?format=xml',
environ={'REQUEST_METHOD': 'GET', 'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
dom = xml.dom.minidom.parseString(resp.body)
self.assertEqual(dom.firstChild.attributes['name'].value, '"\'')
def test_GET_xml_escapes_container_name(self):
req = Request.blank(
'/sda1/p/a',
environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank(
'/sda1/p/a/%22%3Cword', # "<word
environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '1',
'HTTP_X_PUT_TIMESTAMP': '1', 'HTTP_X_OBJECT_COUNT': '0',
'HTTP_X_DELETE_TIMESTAMP': '0', 'HTTP_X_BYTES_USED': '1'})
req.get_response(self.controller)
req = Request.blank(
'/sda1/p/a?format=xml',
environ={'REQUEST_METHOD': 'GET', 'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
dom = xml.dom.minidom.parseString(resp.body)
self.assertEqual(
dom.firstChild.firstChild.nextSibling.firstChild.firstChild.data,
'"<word')
def test_GET_xml_escapes_container_name_as_subdir(self):
req = Request.blank(
'/sda1/p/a',
environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank(
'/sda1/p/a/%22%3Cword-test', # "<word-test
environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '1',
'HTTP_X_PUT_TIMESTAMP': '1', 'HTTP_X_OBJECT_COUNT': '0',
'HTTP_X_DELETE_TIMESTAMP': '0', 'HTTP_X_BYTES_USED': '1'})
req.get_response(self.controller)
req = Request.blank(
'/sda1/p/a?format=xml&delimiter=-',
environ={'REQUEST_METHOD': 'GET', 'HTTP_X_TIMESTAMP': '1'})
resp = req.get_response(self.controller)
dom = xml.dom.minidom.parseString(resp.body)
self.assertEqual(
dom.firstChild.firstChild.nextSibling.attributes['name'].value,
'"<word-')
def test_GET_limit_marker_plain(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
put_timestamp = normalize_timestamp(0)
for c in range(5):
req = Request.blank(
'/sda1/p/a/c%d' % c,
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': put_timestamp,
'X-Delete-Timestamp': '0',
'X-Object-Count': '2',
'X-Bytes-Used': '3',
'X-Timestamp': put_timestamp})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a?limit=3',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body.strip().split(b'\n'),
[b'c0', b'c1', b'c2'])
req = Request.blank('/sda1/p/a?limit=3&marker=c2',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body.strip().split(b'\n'),
[b'c3', b'c4'])
def test_GET_limit_marker_json(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
for c in range(5):
put_timestamp = normalize_timestamp(c + 1)
req = Request.blank(
'/sda1/p/a/c%d' % c,
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': put_timestamp,
'X-Delete-Timestamp': '0',
'X-Object-Count': '2',
'X-Bytes-Used': '3',
'X-Timestamp': put_timestamp})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a?limit=3&format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
expected = [{'count': 2, 'bytes': 3, 'name': 'c0',
'last_modified': Timestamp('1').isoformat},
{'count': 2, 'bytes': 3, 'name': 'c1',
'last_modified': Timestamp('2').isoformat},
{'count': 2, 'bytes': 3, 'name': 'c2',
'last_modified': Timestamp('3').isoformat}]
self.assertEqual(json.loads(resp.body), expected)
req = Request.blank('/sda1/p/a?limit=3&marker=c2&format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
expected = [{'count': 2, 'bytes': 3, 'name': 'c3',
'last_modified': Timestamp('4').isoformat},
{'count': 2, 'bytes': 3, 'name': 'c4',
'last_modified': Timestamp('5').isoformat}]
self.assertEqual(json.loads(resp.body), expected)
def test_GET_limit_marker_xml(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
for c in range(5):
put_timestamp = normalize_timestamp(c + 1)
req = Request.blank(
'/sda1/p/a/c%d' % c,
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': put_timestamp,
'X-Delete-Timestamp': '0',
'X-Object-Count': '2',
'X-Bytes-Used': '3',
'X-Timestamp': put_timestamp})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a?limit=3&format=xml',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
dom = xml.dom.minidom.parseString(resp.body)
self.assertEqual(dom.firstChild.nodeName, 'account')
listing = \
[n for n in dom.firstChild.childNodes if n.nodeName != '#text']
self.assertEqual(len(listing), 3)
self.assertEqual(listing[0].nodeName, 'container')
container = [n for n in listing[0].childNodes if n.nodeName != '#text']
self.assertEqual(sorted([n.nodeName for n in container]),
['bytes', 'count', 'last_modified', 'name'])
node = [n for n in container if n.nodeName == 'name'][0]
self.assertEqual(node.firstChild.nodeValue, 'c0')
node = [n for n in container if n.nodeName == 'count'][0]
self.assertEqual(node.firstChild.nodeValue, '2')
node = [n for n in container if n.nodeName == 'bytes'][0]
self.assertEqual(node.firstChild.nodeValue, '3')
node = [n for n in container if n.nodeName == 'last_modified'][0]
self.assertEqual(node.firstChild.nodeValue,
Timestamp('1').isoformat)
self.assertEqual(listing[-1].nodeName, 'container')
container = [
n for n in listing[-1].childNodes if n.nodeName != '#text']
self.assertEqual(sorted([n.nodeName for n in container]),
['bytes', 'count', 'last_modified', 'name'])
node = [n for n in container if n.nodeName == 'name'][0]
self.assertEqual(node.firstChild.nodeValue, 'c2')
node = [n for n in container if n.nodeName == 'count'][0]
self.assertEqual(node.firstChild.nodeValue, '2')
node = [n for n in container if n.nodeName == 'bytes'][0]
self.assertEqual(node.firstChild.nodeValue, '3')
node = [n for n in container if n.nodeName == 'last_modified'][0]
self.assertEqual(node.firstChild.nodeValue,
Timestamp('3').isoformat)
req = Request.blank('/sda1/p/a?limit=3&marker=c2&format=xml',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
dom = xml.dom.minidom.parseString(resp.body)
self.assertEqual(dom.firstChild.nodeName, 'account')
listing = \
[n for n in dom.firstChild.childNodes if n.nodeName != '#text']
self.assertEqual(len(listing), 2)
self.assertEqual(listing[0].nodeName, 'container')
container = [n for n in listing[0].childNodes if n.nodeName != '#text']
self.assertEqual(sorted([n.nodeName for n in container]),
['bytes', 'count', 'last_modified', 'name'])
node = [n for n in container if n.nodeName == 'name'][0]
self.assertEqual(node.firstChild.nodeValue, 'c3')
node = [n for n in container if n.nodeName == 'count'][0]
self.assertEqual(node.firstChild.nodeValue, '2')
node = [n for n in container if n.nodeName == 'bytes'][0]
self.assertEqual(node.firstChild.nodeValue, '3')
node = [n for n in container if n.nodeName == 'last_modified'][0]
self.assertEqual(node.firstChild.nodeValue,
Timestamp('4').isoformat)
self.assertEqual(listing[-1].nodeName, 'container')
container = [
n for n in listing[-1].childNodes if n.nodeName != '#text']
self.assertEqual(sorted([n.nodeName for n in container]),
['bytes', 'count', 'last_modified', 'name'])
node = [n for n in container if n.nodeName == 'name'][0]
self.assertEqual(node.firstChild.nodeValue, 'c4')
node = [n for n in container if n.nodeName == 'count'][0]
self.assertEqual(node.firstChild.nodeValue, '2')
node = [n for n in container if n.nodeName == 'bytes'][0]
self.assertEqual(node.firstChild.nodeValue, '3')
node = [n for n in container if n.nodeName == 'last_modified'][0]
self.assertEqual(node.firstChild.nodeValue,
Timestamp('5').isoformat)
def test_GET_accept_wildcard(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
req.accept = '*/*'
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body, b'c1\n')
def test_GET_accept_application_wildcard(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
req.accept = 'application/*'
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(len(json.loads(resp.body)), 1)
def test_GET_accept_json(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
req.accept = 'application/json'
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(len(json.loads(resp.body)), 1)
def test_GET_accept_xml(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
req.accept = 'application/xml'
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
dom = xml.dom.minidom.parseString(resp.body)
self.assertEqual(dom.firstChild.nodeName, 'account')
listing = \
[n for n in dom.firstChild.childNodes if n.nodeName != '#text']
self.assertEqual(len(listing), 1)
def test_GET_accept_conflicting(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a?format=plain',
environ={'REQUEST_METHOD': 'GET'})
req.accept = 'application/json'
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body, b'c1\n')
def test_GET_accept_not_valid(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
req.accept = 'application/xml*'
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 406)
def test_GET_prefix_delimiter_plain(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
for first in range(3):
req = Request.blank(
'/sda1/p/a/sub.%s' % first,
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
for second in range(3):
req = Request.blank(
'/sda1/p/a/sub.%s.%s' % (first, second),
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a?delimiter=.',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body.strip().split(b'\n'),
[b'sub.'])
req = Request.blank('/sda1/p/a?prefix=sub.&delimiter=.',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(
resp.body.strip().split(b'\n'),
[b'sub.0', b'sub.0.', b'sub.1', b'sub.1.', b'sub.2', b'sub.2.'])
req = Request.blank('/sda1/p/a?prefix=sub.1.&delimiter=.',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body.strip().split(b'\n'),
[b'sub.1.0', b'sub.1.1', b'sub.1.2'])
def test_GET_prefix_delimiter_json(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
for first in range(3):
req = Request.blank(
'/sda1/p/a/sub.%s' % first,
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
for second in range(3):
req = Request.blank(
'/sda1/p/a/sub.%s.%s' % (first, second),
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a?delimiter=.&format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual([n.get('name', 's:' + n.get('subdir', 'error'))
for n in json.loads(resp.body)], ['s:sub.'])
req = Request.blank('/sda1/p/a?prefix=sub.&delimiter=.&format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(
[n.get('name', 's:' + n.get('subdir', 'error'))
for n in json.loads(resp.body)],
['sub.0', 's:sub.0.', 'sub.1', 's:sub.1.', 'sub.2', 's:sub.2.'])
req = Request.blank('/sda1/p/a?prefix=sub.1.&delimiter=.&format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(
[n.get('name', 's:' + n.get('subdir', 'error'))
for n in json.loads(resp.body)],
['sub.1.0', 'sub.1.1', 'sub.1.2'])
def test_GET_prefix_delimiter_xml(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
for first in range(3):
req = Request.blank(
'/sda1/p/a/sub.%s' % first,
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
for second in range(3):
req = Request.blank(
'/sda1/p/a/sub.%s.%s' % (first, second),
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank(
'/sda1/p/a?delimiter=.&format=xml',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
dom = xml.dom.minidom.parseString(resp.body)
listing = []
for node1 in dom.firstChild.childNodes:
if node1.nodeName == 'subdir':
listing.append('s:' + node1.attributes['name'].value)
elif node1.nodeName == 'container':
for node2 in node1.childNodes:
if node2.nodeName == 'name':
listing.append(node2.firstChild.nodeValue)
self.assertEqual(listing, ['s:sub.'])
req = Request.blank(
'/sda1/p/a?prefix=sub.&delimiter=.&format=xml',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
dom = xml.dom.minidom.parseString(resp.body)
listing = []
for node1 in dom.firstChild.childNodes:
if node1.nodeName == 'subdir':
listing.append('s:' + node1.attributes['name'].value)
elif node1.nodeName == 'container':
for node2 in node1.childNodes:
if node2.nodeName == 'name':
listing.append(node2.firstChild.nodeValue)
self.assertEqual(
listing,
['sub.0', 's:sub.0.', 'sub.1', 's:sub.1.', 'sub.2', 's:sub.2.'])
req = Request.blank(
'/sda1/p/a?prefix=sub.1.&delimiter=.&format=xml',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
dom = xml.dom.minidom.parseString(resp.body)
listing = []
for node1 in dom.firstChild.childNodes:
if node1.nodeName == 'subdir':
listing.append('s:' + node1.attributes['name'].value)
elif node1.nodeName == 'container':
for node2 in node1.childNodes:
if node2.nodeName == 'name':
listing.append(node2.firstChild.nodeValue)
self.assertEqual(listing, ['sub.1.0', 'sub.1.1', 'sub.1.2'])
def test_GET_leading_delimiter(self):
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT',
'HTTP_X_TIMESTAMP': '0'})
resp = req.get_response(self.controller)
for first in range(3):
req = Request.blank(
'/sda1/p/a/.sub.%s' % first,
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
for second in range(3):
req = Request.blank(
'/sda1/p/a/.sub.%s.%s' % (first, second),
environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a?delimiter=.',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body.strip().split(b'\n'),
[b'.'])
req = Request.blank('/sda1/p/a?prefix=.&delimiter=.',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body.strip().split(b'\n'),
[b'.sub.'])
req = Request.blank('/sda1/p/a?prefix=.sub.&delimiter=.',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(
resp.body.strip().split(b'\n'),
[b'.sub.0', b'.sub.0.', b'.sub.1', b'.sub.1.',
b'.sub.2', b'.sub.2.'])
req = Request.blank('/sda1/p/a?prefix=.sub.1.&delimiter=.',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body.strip().split(b'\n'),
[b'.sub.1.0', b'.sub.1.1', b'.sub.1.2'])
def test_GET_multichar_delimiter(self):
self.maxDiff = None
req = Request.blank('/sda1/p/a', method='PUT', headers={
'x-timestamp': '0'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201, resp.body)
for i in ('US~~TX~~A', 'US~~TX~~B', 'US~~OK~~A', 'US~~OK~~B',
'US~~OK~Tulsa~~A', 'US~~OK~Tulsa~~B',
'US~~UT~~A', 'US~~UT~~~B'):
req = Request.blank('/sda1/p/a/%s' % i, method='PUT', headers={
'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank(
'/sda1/p/a?prefix=US~~&delimiter=~~&format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(
json.loads(resp.body),
[{"subdir": "US~~OK~Tulsa~~"},
{"subdir": "US~~OK~~"},
{"subdir": "US~~TX~~"},
{"subdir": "US~~UT~~"}])
req = Request.blank(
'/sda1/p/a?prefix=US~~&delimiter=~~&format=json&reverse=on',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(
json.loads(resp.body),
[{"subdir": "US~~UT~~"},
{"subdir": "US~~TX~~"},
{"subdir": "US~~OK~~"},
{"subdir": "US~~OK~Tulsa~~"}])
req = Request.blank(
'/sda1/p/a?prefix=US~~UT&delimiter=~~&format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(
json.loads(resp.body),
[{"subdir": "US~~UT~~"}])
req = Request.blank(
'/sda1/p/a?prefix=US~~UT&delimiter=~~&format=json&reverse=on',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(
json.loads(resp.body),
[{"subdir": "US~~UT~~"}])
req = Request.blank(
'/sda1/p/a?prefix=US~~UT~&delimiter=~~&format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(
[{k: v for k, v in item.items() if k in ('subdir', 'name')}
for item in json.loads(resp.body)],
[{"name": "US~~UT~~A"},
{"subdir": "US~~UT~~~"}])
req = Request.blank(
'/sda1/p/a?prefix=US~~UT~&delimiter=~~&format=json&reverse=on',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(
[{k: v for k, v in item.items() if k in ('subdir', 'name')}
for item in json.loads(resp.body)],
[{"subdir": "US~~UT~~~"},
{"name": "US~~UT~~A"}])
req = Request.blank(
'/sda1/p/a?prefix=US~~UT~~&delimiter=~~&format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(
[{k: v for k, v in item.items() if k in ('subdir', 'name')}
for item in json.loads(resp.body)],
[{"name": "US~~UT~~A"},
{"name": "US~~UT~~~B"}])
req = Request.blank(
'/sda1/p/a?prefix=US~~UT~~&delimiter=~~&format=json&reverse=on',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(
[{k: v for k, v in item.items() if k in ('subdir', 'name')}
for item in json.loads(resp.body)],
[{"name": "US~~UT~~~B"},
{"name": "US~~UT~~A"}])
req = Request.blank(
'/sda1/p/a?prefix=US~~UT~~~&delimiter=~~&format=json',
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(
[{k: v for k, v in item.items() if k in ('subdir', 'name')}
for item in json.loads(resp.body)],
[{"name": "US~~UT~~~B"}])
def _expected_listing(self, containers):
return [dict(
last_modified=c['timestamp'].isoformat, **{
k: v for k, v in c.items()
if k != 'timestamp'
}) for c in sorted(containers, key=lambda c: c['name'])]
def _report_containers(self, containers, account='a'):
req = Request.blank('/sda1/p/%s' % account, method='PUT', headers={
'x-timestamp': next(self.ts).internal})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int // 100, 2, resp.body)
for container in containers:
path = '/sda1/p/%s/%s' % (account, container['name'])
req = Request.blank(path, method='PUT', headers={
'X-Put-Timestamp': container['timestamp'].internal,
'X-Delete-Timestamp': container.get(
'deleted', Timestamp(0)).internal,
'X-Object-Count': container['count'],
'X-Bytes-Used': container['bytes'],
})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int // 100, 2, resp.body)
def test_delimiter_with_reserved_and_no_public(self):
containers = [{
'name': get_reserved_name('null', 'test01'),
'bytes': 200,
'count': 2,
'timestamp': next(self.ts),
}]
self._report_containers(containers)
req = Request.blank('/sda1/p/a', headers={
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body), [])
req = Request.blank('/sda1/p/a', headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
self._expected_listing(containers))
req = Request.blank('/sda1/p/a?prefix=%s&delimiter=l' %
get_reserved_name('nul'), headers={
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body), [])
req = Request.blank('/sda1/p/a?prefix=%s&delimiter=l' %
get_reserved_name('nul'), headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body), [{
'subdir': '%s' % get_reserved_name('null')}])
def test_delimiter_with_reserved_and_public(self):
containers = [{
'name': get_reserved_name('null', 'test01'),
'bytes': 200,
'count': 2,
'timestamp': next(self.ts),
}, {
'name': 'nullish',
'bytes': 10,
'count': 10,
'timestamp': next(self.ts),
}]
self._report_containers(containers)
req = Request.blank('/sda1/p/a?prefix=nul&delimiter=l', headers={
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body), [{'subdir': 'null'}])
# allow-reserved header doesn't really make a difference
req = Request.blank('/sda1/p/a?prefix=nul&delimiter=l', headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body), [{'subdir': 'null'}])
req = Request.blank('/sda1/p/a?prefix=%s&delimiter=l' %
get_reserved_name('nul'), headers={
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body), [])
req = Request.blank('/sda1/p/a?prefix=%s&delimiter=l' %
get_reserved_name('nul'), headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body), [{
'subdir': '%s' % get_reserved_name('null')}])
req = Request.blank('/sda1/p/a?delimiter=%00', headers={
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
self._expected_listing(containers)[1:])
req = Request.blank('/sda1/p/a?delimiter=%00', headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
[{'subdir': '\x00'}] +
self._expected_listing(containers)[1:])
def test_markers_with_reserved(self):
containers = [{
'name': get_reserved_name('null', 'test01'),
'bytes': 200,
'count': 2,
'timestamp': next(self.ts),
}, {
'name': get_reserved_name('null', 'test02'),
'bytes': 10,
'count': 10,
'timestamp': next(self.ts),
}]
self._report_containers(containers)
req = Request.blank('/sda1/p/a?marker=%s' %
get_reserved_name('null', ''), headers={
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body), [])
req = Request.blank('/sda1/p/a?marker=%s' %
get_reserved_name('null', ''), headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
self._expected_listing(containers))
req = Request.blank('/sda1/p/a?marker=%s' % quote(
self._expected_listing(containers)[0]['name']), headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
self._expected_listing(containers)[1:])
containers.append({
'name': get_reserved_name('null', 'test03'),
'bytes': 300,
'count': 30,
'timestamp': next(self.ts),
})
self._report_containers(containers)
req = Request.blank('/sda1/p/a?marker=%s' % quote(
self._expected_listing(containers)[0]['name']), headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
self._expected_listing(containers)[1:])
req = Request.blank('/sda1/p/a?marker=%s' % quote(
self._expected_listing(containers)[1]['name']), headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
self._expected_listing(containers)[-1:])
def test_prefix_with_reserved(self):
containers = [{
'name': get_reserved_name('null', 'test01'),
'bytes': 200,
'count': 2,
'timestamp': next(self.ts),
}, {
'name': get_reserved_name('null', 'test02'),
'bytes': 10,
'count': 10,
'timestamp': next(self.ts),
}, {
'name': get_reserved_name('null', 'foo'),
'bytes': 10,
'count': 10,
'timestamp': next(self.ts),
}, {
'name': get_reserved_name('nullish'),
'bytes': 300,
'count': 32,
'timestamp': next(self.ts),
}]
self._report_containers(containers)
req = Request.blank('/sda1/p/a?prefix=%s' %
get_reserved_name('null', 'test'), headers={
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body), [])
req = Request.blank('/sda1/p/a?prefix=%s' %
get_reserved_name('null', 'test'), headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
self._expected_listing(containers[:2]))
def test_prefix_and_delim_with_reserved(self):
containers = [{
'name': get_reserved_name('null', 'test01'),
'bytes': 200,
'count': 2,
'timestamp': next(self.ts),
}, {
'name': get_reserved_name('null', 'test02'),
'bytes': 10,
'count': 10,
'timestamp': next(self.ts),
}, {
'name': get_reserved_name('null', 'foo'),
'bytes': 10,
'count': 10,
'timestamp': next(self.ts),
}, {
'name': get_reserved_name('nullish'),
'bytes': 300,
'count': 32,
'timestamp': next(self.ts),
}]
self._report_containers(containers)
req = Request.blank('/sda1/p/a?prefix=%s&delimiter=%s' % (
get_reserved_name('null'), get_reserved_name()), headers={
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body), [])
req = Request.blank('/sda1/p/a?prefix=%s&delimiter=%s' % (
get_reserved_name('null'), get_reserved_name()), headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
expected = [{'subdir': get_reserved_name('null', '')}] + \
self._expected_listing(containers[-1:])
self.assertEqual(json.loads(resp.body), expected)
def test_reserved_markers_with_non_reserved(self):
containers = [{
'name': get_reserved_name('null', 'test01'),
'bytes': 200,
'count': 2,
'timestamp': next(self.ts),
}, {
'name': get_reserved_name('null', 'test02'),
'bytes': 10,
'count': 10,
'timestamp': next(self.ts),
}, {
'name': 'nullish',
'bytes': 300,
'count': 32,
'timestamp': next(self.ts),
}]
self._report_containers(containers)
req = Request.blank('/sda1/p/a?marker=%s' %
get_reserved_name('null', ''), headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
self._expected_listing(containers))
req = Request.blank('/sda1/p/a?marker=%s' %
get_reserved_name('null', ''), headers={
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
[c for c in self._expected_listing(containers)
if get_reserved_name() not in c['name']])
req = Request.blank('/sda1/p/a?marker=%s' %
get_reserved_name('null', ''), headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
self._expected_listing(containers))
req = Request.blank('/sda1/p/a?marker=%s' % quote(
self._expected_listing(containers)[0]['name']), headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
self._expected_listing(containers)[1:])
def test_null_markers(self):
containers = [{
'name': get_reserved_name('null', ''),
'bytes': 200,
'count': 2,
'timestamp': next(self.ts),
}, {
'name': get_reserved_name('null', 'test01'),
'bytes': 200,
'count': 2,
'timestamp': next(self.ts),
}, {
'name': 'null',
'bytes': 300,
'count': 32,
'timestamp': next(self.ts),
}]
self._report_containers(containers)
req = Request.blank('/sda1/p/a?marker=%s' % get_reserved_name('null'),
headers={'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
self._expected_listing(containers)[-1:])
req = Request.blank('/sda1/p/a?marker=%s' % get_reserved_name('null'),
headers={'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
self._expected_listing(containers))
req = Request.blank('/sda1/p/a?marker=%s' %
get_reserved_name('null', ''), headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
self._expected_listing(containers)[1:])
req = Request.blank('/sda1/p/a?marker=%s' %
get_reserved_name('null', 'test00'), headers={
'X-Backend-Allow-Reserved-Names': 'true',
'Accept': 'application/json'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200, resp.body)
self.assertEqual(json.loads(resp.body),
self._expected_listing(containers)[1:])
def test_through_call(self):
inbuf = BytesIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
outbuf.write(args[0])
self.controller.__call__({'REQUEST_METHOD': 'GET',
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '404 ')
def test_through_call_invalid_path(self):
inbuf = BytesIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
outbuf.write(args[0])
self.controller.__call__({'REQUEST_METHOD': 'GET',
'SCRIPT_NAME': '',
'PATH_INFO': '/bob',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '400 ')
def test_through_call_invalid_path_utf8(self):
inbuf = BytesIO()
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
outbuf.write(args[0])
self.controller.__call__({'REQUEST_METHOD': 'GET',
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c\xd8\x3e%20',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '412 ')
def test_invalid_method_doesnt_exist(self):
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
outbuf.write(args[0])
self.controller.__call__({'REQUEST_METHOD': 'method_doesnt_exist',
'PATH_INFO': '/sda1/p/a'},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '405 ')
def test_invalid_method_is_not_public(self):
errbuf = StringIO()
outbuf = StringIO()
def start_response(*args):
outbuf.write(args[0])
self.controller.__call__({'REQUEST_METHOD': '__init__',
'PATH_INFO': '/sda1/p/a'},
start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertEqual(outbuf.getvalue()[:4], '405 ')
def test_params_format(self):
Request.blank('/sda1/p/a',
headers={'X-Timestamp': normalize_timestamp(1)},
environ={'REQUEST_METHOD': 'PUT'}).get_response(
self.controller)
for format in ('xml', 'json'):
req = Request.blank('/sda1/p/a?format=%s' % format,
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
def test_params_utf8(self):
# Bad UTF8 sequence, all parameters should cause 400 error
for param in ('delimiter', 'limit', 'marker', 'prefix', 'end_marker',
'format'):
req = Request.blank('/sda1/p/a?%s=\xce' % param,
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 400,
"%d on param %s" % (resp.status_int, param))
Request.blank('/sda1/p/a',
headers={'X-Timestamp': normalize_timestamp(1)},
environ={'REQUEST_METHOD': 'PUT'}).get_response(
self.controller)
# Good UTF8 sequence, ignored for limit, doesn't affect other queries
for param in ('limit', 'marker', 'prefix', 'end_marker', 'format',
'delimiter'):
req = Request.blank('/sda1/p/a?%s=\xce\xa9' % param,
environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204,
"%d on param %s" % (resp.status_int, param))
def test_PUT_auto_create(self):
headers = {'x-put-timestamp': normalize_timestamp(1),
'x-delete-timestamp': normalize_timestamp(0),
'x-object-count': '0',
'x-bytes-used': '0'}
req = Request.blank('/sda1/p/a/c',
environ={'REQUEST_METHOD': 'PUT'},
headers=dict(headers))
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 404)
req = Request.blank('/sda1/p/.a/c',
environ={'REQUEST_METHOD': 'PUT'},
headers=dict(headers))
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/.c',
environ={'REQUEST_METHOD': 'PUT'},
headers=dict(headers))
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 404)
def test_content_type_on_HEAD(self):
Request.blank('/sda1/p/a',
headers={'X-Timestamp': normalize_timestamp(1)},
environ={'REQUEST_METHOD': 'PUT'}).get_response(
self.controller)
env = {'REQUEST_METHOD': 'HEAD'}
req = Request.blank('/sda1/p/a?format=xml', environ=env)
resp = req.get_response(self.controller)
self.assertEqual(resp.content_type, 'application/xml')
req = Request.blank('/sda1/p/a?format=json', environ=env)
resp = req.get_response(self.controller)
self.assertEqual(resp.content_type, 'application/json')
self.assertEqual(resp.charset, 'utf-8')
req = Request.blank('/sda1/p/a', environ=env)
resp = req.get_response(self.controller)
self.assertEqual(resp.content_type, 'text/plain')
self.assertEqual(resp.charset, 'utf-8')
req = Request.blank(
'/sda1/p/a', headers={'Accept': 'application/json'}, environ=env)
resp = req.get_response(self.controller)
self.assertEqual(resp.content_type, 'application/json')
self.assertEqual(resp.charset, 'utf-8')
req = Request.blank(
'/sda1/p/a', headers={'Accept': 'application/xml'}, environ=env)
resp = req.get_response(self.controller)
self.assertEqual(resp.content_type, 'application/xml')
self.assertEqual(resp.charset, 'utf-8')
def test_serv_reserv(self):
# Test replication_server flag was set from configuration file.
conf = {'devices': self.testdir, 'mount_check': 'false'}
self.assertTrue(AccountController(conf).replication_server)
for val in [True, '1', 'True', 'true']:
conf['replication_server'] = val
self.assertTrue(AccountController(conf).replication_server)
for val in [False, 0, '0', 'False', 'false', 'test_string']:
conf['replication_server'] = val
self.assertFalse(AccountController(conf).replication_server)
def test_list_allowed_methods(self):
# Test list of allowed_methods
obj_methods = ['DELETE', 'PUT', 'HEAD', 'GET', 'POST']
repl_methods = ['REPLICATE']
for method_name in obj_methods:
method = getattr(self.controller, method_name)
self.assertFalse(hasattr(method, 'replication'))
for method_name in repl_methods:
method = getattr(self.controller, method_name)
self.assertEqual(method.replication, True)
def test_correct_allowed_method(self):
# Test correct work for allowed method using
# swift.account.server.AccountController.__call__
inbuf = BytesIO()
errbuf = StringIO()
self.controller = AccountController(
{'devices': self.testdir,
'mount_check': 'false',
'replication_server': 'false'})
def start_response(*args):
pass
method = 'PUT'
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
method_res = mock.MagicMock()
mock_method = public(lambda x: mock.MagicMock(return_value=method_res))
with mock.patch.object(self.controller, method,
new=mock_method):
mock_method.replication = False
response = self.controller(env, start_response)
self.assertEqual(response, method_res)
def test_not_allowed_method(self):
# Test correct work for NOT allowed method using
# swift.account.server.AccountController.__call__
inbuf = BytesIO()
errbuf = StringIO()
self.controller = AccountController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false'})
def start_response(*args):
pass
method = 'PUT'
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
answer = [b'<html><h1>Method Not Allowed</h1><p>The method is not '
b'allowed for this resource.</p></html>']
mock_method = replication(public(lambda x: mock.MagicMock()))
with mock.patch.object(self.controller, method,
new=mock_method):
mock_method.replication = True
response = self.controller.__call__(env, start_response)
self.assertEqual(response, answer)
def test_replicaiton_server_call_all_methods(self):
inbuf = BytesIO()
errbuf = StringIO()
outbuf = StringIO()
self.controller = AccountController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'true'})
def start_response(*args):
outbuf.write(args[0])
obj_methods = ['PUT', 'HEAD', 'GET', 'POST', 'DELETE', 'OPTIONS']
for method in obj_methods:
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'HTTP_X_TIMESTAMP': next(self.ts).internal,
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
self.controller(env, start_response)
self.assertEqual(errbuf.getvalue(), '')
self.assertIn(outbuf.getvalue()[:4], ('200 ', '201 ', '204 '))
def test__call__raise_timeout(self):
inbuf = WsgiBytesIO()
errbuf = StringIO()
self.logger = debug_logger('test')
self.account_controller = AccountController(
{'devices': self.testdir, 'mount_check': 'false',
'replication_server': 'false', 'log_requests': 'false'},
logger=self.logger)
def start_response(*args):
pass
method = 'PUT'
env = {'REQUEST_METHOD': method,
'SCRIPT_NAME': '',
'PATH_INFO': '/sda1/p/a/c',
'SERVER_NAME': '127.0.0.1',
'SERVER_PORT': '8080',
'SERVER_PROTOCOL': 'HTTP/1.0',
'CONTENT_LENGTH': '0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': inbuf,
'wsgi.errors': errbuf,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False}
@public
def mock_put_method(*args, **kwargs):
raise Exception()
with mock.patch.object(self.account_controller, method,
new=mock_put_method):
response = self.account_controller.__call__(env, start_response)
self.assertTrue(response[0].decode('ascii').startswith(
'Traceback (most recent call last):'))
self.assertEqual(self.logger.get_lines_for_level('error'), [
'ERROR __call__ error with %(method)s %(path)s : ' % {
'method': 'PUT', 'path': '/sda1/p/a/c'},
])
self.assertEqual(self.logger.get_lines_for_level('info'), [])
def test_GET_log_requests_true(self):
self.controller.logger = debug_logger()
self.controller.log_requests = True
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 404)
self.assertTrue(self.controller.logger.log_dict['info'])
def test_GET_log_requests_false(self):
self.controller.logger = debug_logger()
self.controller.log_requests = False
req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 404)
self.assertFalse(self.controller.logger.log_dict['info'])
def test_log_line_format(self):
req = Request.blank(
'/sda1/p/a',
environ={'REQUEST_METHOD': 'HEAD', 'REMOTE_ADDR': '1.2.3.4'})
self.controller.logger = debug_logger()
with mock.patch(
'time.time',
mock.MagicMock(side_effect=[10000.0, 10001.0, 10002.0,
10002.0, 10002.0])):
with mock.patch(
'os.getpid', mock.MagicMock(return_value=1234)):
req.get_response(self.controller)
self.assertEqual(
self.controller.logger.get_lines_for_level('info'),
['1.2.3.4 - - [01/Jan/1970:02:46:42 +0000] "HEAD /sda1/p/a" 404 '
'- "-" "-" "-" 2.0000 "-" 1234 -'])
def test_policy_stats_with_legacy(self):
ts = itertools.count()
# create the account
req = Request.blank('/sda1/p/a', method='PUT', headers={
'X-Timestamp': normalize_timestamp(next(ts))})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201) # sanity
# add a container
req = Request.blank('/sda1/p/a/c1', method='PUT', headers={
'X-Put-Timestamp': normalize_timestamp(next(ts)),
'X-Delete-Timestamp': '0',
'X-Object-Count': '2',
'X-Bytes-Used': '4',
})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
# read back rollup
for method in ('GET', 'HEAD'):
req = Request.blank('/sda1/p/a', method=method)
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int // 100, 2)
self.assertEqual(resp.headers['X-Account-Object-Count'], '2')
self.assertEqual(resp.headers['X-Account-Bytes-Used'], '4')
self.assertEqual(
resp.headers['X-Account-Storage-Policy-%s-Object-Count' %
POLICIES[0].name], '2')
self.assertEqual(
resp.headers['X-Account-Storage-Policy-%s-Bytes-Used' %
POLICIES[0].name], '4')
self.assertEqual(
resp.headers['X-Account-Storage-Policy-%s-Container-Count' %
POLICIES[0].name], '1')
def test_policy_stats_non_default(self):
ts = itertools.count()
# create the account
req = Request.blank('/sda1/p/a', method='PUT', headers={
'X-Timestamp': normalize_timestamp(next(ts))})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201) # sanity
# add a container
non_default_policies = [p for p in POLICIES if not p.is_default]
policy = random.choice(non_default_policies)
req = Request.blank('/sda1/p/a/c1', method='PUT', headers={
'X-Put-Timestamp': normalize_timestamp(next(ts)),
'X-Delete-Timestamp': '0',
'X-Object-Count': '2',
'X-Bytes-Used': '4',
'X-Backend-Storage-Policy-Index': policy.idx,
})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
# read back rollup
for method in ('GET', 'HEAD'):
req = Request.blank('/sda1/p/a', method=method)
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int // 100, 2)
self.assertEqual(resp.headers['X-Account-Object-Count'], '2')
self.assertEqual(resp.headers['X-Account-Bytes-Used'], '4')
self.assertEqual(
resp.headers['X-Account-Storage-Policy-%s-Object-Count' %
policy.name], '2')
self.assertEqual(
resp.headers['X-Account-Storage-Policy-%s-Bytes-Used' %
policy.name], '4')
self.assertEqual(
resp.headers['X-Account-Storage-Policy-%s-Container-Count' %
policy.name], '1')
def test_empty_policy_stats(self):
ts = itertools.count()
# create the account
req = Request.blank('/sda1/p/a', method='PUT', headers={
'X-Timestamp': normalize_timestamp(next(ts))})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201) # sanity
for method in ('GET', 'HEAD'):
req = Request.blank('/sda1/p/a', method=method)
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int // 100, 2)
for key in resp.headers:
self.assertNotIn('storage-policy', key.lower())
def test_empty_except_for_used_policies(self):
ts = itertools.count()
# create the account
req = Request.blank('/sda1/p/a', method='PUT', headers={
'X-Timestamp': normalize_timestamp(next(ts))})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201) # sanity
# starts empty
for method in ('GET', 'HEAD'):
req = Request.blank('/sda1/p/a', method=method)
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int // 100, 2)
for key in resp.headers:
self.assertNotIn('storage-policy', key.lower())
# add a container
policy = random.choice(POLICIES)
req = Request.blank('/sda1/p/a/c1', method='PUT', headers={
'X-Put-Timestamp': normalize_timestamp(next(ts)),
'X-Delete-Timestamp': '0',
'X-Object-Count': '2',
'X-Bytes-Used': '4',
'X-Backend-Storage-Policy-Index': policy.idx,
})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
# only policy of the created container should be in headers
for method in ('GET', 'HEAD'):
req = Request.blank('/sda1/p/a', method=method)
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int // 100, 2)
for key in resp.headers:
if 'storage-policy' in key.lower():
self.assertIn(policy.name.lower(), key.lower())
def test_multiple_policies_in_use(self):
ts = itertools.count()
# create the account
req = Request.blank('/sda1/p/a', method='PUT', headers={
'X-Timestamp': normalize_timestamp(next(ts))})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201) # sanity
# add some containers
for policy in POLICIES:
count = policy.idx * 100 # good as any integer
container_path = '/sda1/p/a/c_%s' % policy.name
req = Request.blank(
container_path, method='PUT', headers={
'X-Put-Timestamp': normalize_timestamp(next(ts)),
'X-Delete-Timestamp': '0',
'X-Object-Count': count,
'X-Bytes-Used': count,
'X-Backend-Storage-Policy-Index': policy.idx,
})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a', method='HEAD')
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int // 100, 2)
# check container counts in roll up headers
total_object_count = 0
total_bytes_used = 0
for key in resp.headers:
if 'storage-policy' not in key.lower():
continue
for policy in POLICIES:
if policy.name.lower() not in key.lower():
continue
if key.lower().endswith('object-count'):
object_count = int(resp.headers[key])
self.assertEqual(policy.idx * 100, object_count)
total_object_count += object_count
if key.lower().endswith('bytes-used'):
bytes_used = int(resp.headers[key])
self.assertEqual(policy.idx * 100, bytes_used)
total_bytes_used += bytes_used
expected_total_count = sum([p.idx * 100 for p in POLICIES])
self.assertEqual(expected_total_count, total_object_count)
self.assertEqual(expected_total_count, total_bytes_used)
@patch_policies([StoragePolicy(0, 'zero', False),
StoragePolicy(1, 'one', True),
StoragePolicy(2, 'two', False),
StoragePolicy(3, 'three', False)])
class TestNonLegacyDefaultStoragePolicy(TestAccountController):
pass
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/account/test_server.py |
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import time
import unittest
import shutil
from swift.account import replicator, backend, server
from swift.common.utils import normalize_timestamp
from swift.common.storage_policy import POLICIES
from test.unit.common import test_db_replicator
class TestReplicatorSync(test_db_replicator.TestReplicatorSync):
backend = backend.AccountBroker
datadir = server.DATADIR
replicator_daemon = replicator.AccountReplicator
def test_sync(self):
broker = self._get_broker('a', node_index=0)
put_timestamp = normalize_timestamp(time.time())
broker.initialize(put_timestamp)
# "replicate" to same database
daemon = replicator.AccountReplicator({})
part, node = self._get_broker_part_node(broker)
info = broker.get_replication_info()
success = daemon._repl_to_node(node, broker, part, info)
# nothing to do
self.assertTrue(success)
self.assertEqual(1, daemon.stats['no_change'])
def test_sync_remote_missing(self):
broker = self._get_broker('a', node_index=0)
put_timestamp = time.time()
broker.initialize(put_timestamp)
# "replicate" to all other nodes
part, node = self._get_broker_part_node(broker)
daemon = self._run_once(node)
# complete rsync
self.assertEqual(2, daemon.stats['rsync'])
local_info = self._get_broker(
'a', node_index=0).get_info()
for i in range(1, 3):
remote_broker = self._get_broker('a', node_index=i)
self.assertTrue(os.path.exists(remote_broker.db_file))
remote_info = remote_broker.get_info()
for k, v in local_info.items():
if k == 'id':
continue
self.assertEqual(remote_info[k], v,
"mismatch remote %s %r != %r" % (
k, remote_info[k], v))
def test_sync_remote_missing_most_rows(self):
put_timestamp = time.time()
# create "local" broker
broker = self._get_broker('a', node_index=0)
broker.initialize(put_timestamp)
# create "remote" broker
remote_broker = self._get_broker('a', node_index=1)
remote_broker.initialize(put_timestamp)
# add a row to "local" db
broker.put_container('/a/c', time.time(), 0, 0, 0,
POLICIES.default.idx)
# replicate
daemon = replicator.AccountReplicator({'per_diff': 1})
def _rsync_file(db_file, remote_file, **kwargs):
remote_server, remote_path = remote_file.split('/', 1)
dest_path = os.path.join(self.root, remote_path)
shutil.copy(db_file, dest_path)
return True
daemon._rsync_file = _rsync_file
part, node = self._get_broker_part_node(remote_broker)
info = broker.get_replication_info()
success = daemon._repl_to_node(node, broker, part, info)
self.assertTrue(success)
# row merge
self.assertEqual(1, daemon.stats['remote_merge'])
local_info = self._get_broker(
'a', node_index=0).get_info()
remote_info = self._get_broker(
'a', node_index=1).get_info()
for k, v in local_info.items():
if k == 'id':
continue
self.assertEqual(remote_info[k], v,
"mismatch remote %s %r != %r" % (
k, remote_info[k], v))
def test_sync_remote_missing_one_rows(self):
put_timestamp = time.time()
# create "local" broker
broker = self._get_broker('a', node_index=0)
broker.initialize(put_timestamp)
# create "remote" broker
remote_broker = self._get_broker('a', node_index=1)
remote_broker.initialize(put_timestamp)
# add some rows to both db
for i in range(10):
put_timestamp = time.time()
for db in (broker, remote_broker):
path = '/a/c_%s' % i
db.put_container(path, put_timestamp, 0, 0, 0,
POLICIES.default.idx)
# now a row to the "local" broker only
broker.put_container('/a/c_missing', time.time(), 0, 0, 0,
POLICIES.default.idx)
# replicate
daemon = replicator.AccountReplicator({})
part, node = self._get_broker_part_node(remote_broker)
info = broker.get_replication_info()
success = daemon._repl_to_node(node, broker, part, info)
self.assertTrue(success)
# row merge
self.assertEqual(1, daemon.stats['diff'])
local_info = self._get_broker(
'a', node_index=0).get_info()
remote_info = self._get_broker(
'a', node_index=1).get_info()
for k, v in local_info.items():
if k == 'id':
continue
self.assertEqual(remote_info[k], v,
"mismatch remote %s %r != %r" % (
k, remote_info[k], v))
if __name__ == '__main__':
unittest.main()
| swift-master | test/unit/account/test_replicator.py |
#!/usr/bin/env python
# Copyright (c) 2020 SwiftStack, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import json
import os
import os.path
import sys
import threading
import time
import traceback
from six.moves import urllib
from six.moves import socketserver
from six.moves import SimpleHTTPServer
try:
import selenium.webdriver
except ImportError:
selenium = None
import swiftclient.client
DEFAULT_ENV = {
'OS_AUTH_URL': os.environ.get('ST_AUTH',
'http://localhost:8080/auth/v1.0'),
'OS_USERNAME': os.environ.get('ST_USER', 'test:tester'),
'OS_PASSWORD': os.environ.get('ST_KEY', 'testing'),
'OS_STORAGE_URL': None,
'S3_ENDPOINT': 'http://localhost:8080',
'S3_USER': 'test:tester',
'S3_KEY': 'testing',
}
ENV = {key: os.environ.get(key, default)
for key, default in DEFAULT_ENV.items()}
TEST_TIMEOUT = 120.0 # seconds
STEPS = 500
# Hack up stdlib so SimpleHTTPRequestHandler works well on py2, too
this_dir = os.path.realpath(os.path.dirname(__file__))
os.getcwd = lambda: this_dir
class CORSSiteHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def log_message(self, fmt, *args):
pass # quiet, you!
class CORSSiteServer(socketserver.TCPServer):
allow_reuse_address = True
class CORSSite(threading.Thread):
def __init__(self, bind_port=8000):
super(CORSSite, self).__init__()
self.server = None
self.bind_port = bind_port
def run(self):
self.server = CORSSiteServer(
('0.0.0.0', self.bind_port),
CORSSiteHandler)
self.server.serve_forever()
def terminate(self):
if self.server is not None:
self.server.shutdown()
self.join()
class Zeroes(object):
BUF = b'\x00' * 64 * 1024
def __init__(self, size=0):
self.pos = 0
self.size = size
def __iter__(self):
while self.pos < self.size:
chunk = self.BUF[:self.size - self.pos]
self.pos += len(chunk)
yield chunk
def __len__(self):
return self.size
def setup(args):
conn = swiftclient.client.Connection(
ENV['OS_AUTH_URL'],
ENV['OS_USERNAME'],
ENV['OS_PASSWORD'],
timeout=30) # We've seen request times as high as 7-8s in the gate
cluster_info = conn.get_capabilities()
conn.put_container('private', {
'X-Container-Read': '',
'X-Container-Meta-Access-Control-Allow-Origin': '',
})
conn.put_container('referrer-allowed', {
'X-Container-Read': '.r:%s' % args.hostname,
'X-Container-Meta-Access-Control-Allow-Origin': (
'http://%s:%d' % (args.hostname, args.port)),
})
conn.put_container('other-referrer-allowed', {
'X-Container-Read': '.r:other-host',
'X-Container-Meta-Access-Control-Allow-Origin': 'http://other-host',
})
conn.put_container('public-with-cors', {
'X-Container-Read': '.r:*,.rlistings',
'X-Container-Meta-Access-Control-Allow-Origin': '*',
})
conn.put_container('private-with-cors', {
'X-Container-Read': '',
'X-Container-Meta-Access-Control-Allow-Origin': '*',
})
conn.put_container('public-no-cors', {
'X-Container-Read': '.r:*,.rlistings',
'X-Container-Meta-Access-Control-Allow-Origin': '',
})
conn.put_container('public-segments', {
'X-Container-Read': '.r:*',
'X-Container-Meta-Access-Control-Allow-Origin': '',
})
for container in ('private', 'referrer-allowed', 'other-referrer-allowed',
'public-with-cors', 'private-with-cors',
'public-no-cors'):
conn.put_object(container, 'obj', Zeroes(1024), headers={
'X-Object-Meta-Mtime': str(time.time())})
for n in range(10):
segment_etag = conn.put_object(
'public-segments', 'seg%02d' % n, Zeroes(1024 * 1024),
headers={'Content-Type': 'application/swiftclient-segment'})
conn.put_object(
'public-with-cors', 'dlo/seg%02d' % n, Zeroes(1024 * 1024),
headers={'Content-Type': 'application/swiftclient-segment'})
conn.put_object('public-with-cors', 'dlo-with-unlistable-segments', b'',
headers={'X-Object-Manifest': 'public-segments/seg'})
conn.put_object('public-with-cors', 'dlo', b'',
headers={'X-Object-Manifest': 'public-with-cors/dlo/seg'})
if 'slo' in cluster_info:
conn.put_object('public-with-cors', 'slo', json.dumps([
{'path': 'public-segments/seg%02d' % n, 'etag': segment_etag}
for n in range(10)]), query_string='multipart-manifest=put')
if 'symlink' in cluster_info:
for tgt in ('private', 'public-with-cors', 'public-no-cors'):
conn.put_object('public-with-cors', 'symlink-to-' + tgt, b'',
headers={'X-Symlink-Target': tgt + '/obj'})
def get_results_table(browser):
result_table = browser.find_element_by_id('results')
for row in result_table.find_elements_by_xpath('./tr'):
cells = row.find_elements_by_xpath('td')
yield (
cells[0].text,
browser.name + ': ' + cells[1].text,
cells[2].text)
def run(args, url):
results = []
browsers = list(ALL_BROWSERS) if 'all' in args.browsers else args.browsers
ran_one = False
for browser_name in browsers:
kwargs = {}
try:
options = getattr(
selenium.webdriver, browser_name.title() + 'Options')()
options.headless = True
kwargs['options'] = options
except AttributeError:
# not all browser types have Options class
pass
driver = getattr(selenium.webdriver, browser_name.title())
try:
browser = driver(**kwargs)
except Exception as e:
if not ('needs to be in PATH' in str(e) or
'SafariDriver was not found' in str(e)):
traceback.print_exc()
results.append(('SKIP', browser_name, str(e).strip()))
continue
ran_one = True
try:
browser.get(url)
start = time.time()
for _ in range(STEPS):
status = browser.find_element_by_id('status').text
if status.startswith('Complete'):
results.extend(get_results_table(browser))
break
time.sleep(TEST_TIMEOUT / STEPS)
else:
try:
results.extend(get_results_table(browser))
except Exception:
pass # worth a shot
# that took a sec; give it *one last chance* to succeed
status = browser.find_element_by_id('status').text
if not status.startswith('Complete'):
results.append((
'ERROR', browser_name, 'Timed out (%s)' % status))
continue
sys.stderr.write('Tested %s in %.1fs\n' % (
browser_name, time.time() - start))
except Exception as e:
results.append(('ERROR', browser_name, str(e).strip()))
finally:
browser.close()
if args.output is not None:
fp = open(args.output, 'w')
else:
fp = sys.stdout
fp.write('1..%d\n' % len(results))
rc = 0
if not ran_one:
rc += 1 # make sure "no tests ran" translates to "failed"
for test, (status, name, details) in enumerate(results, start=1):
if status == 'PASS':
fp.write('ok %d - %s\n' % (test, name))
elif status == 'SKIP':
fp.write('ok %d - %s # skip %s\n' % (test, name, details))
else:
fp.write('not ok %d - %s\n' % (test, name))
fp.write(' %s%s\n' % (status, ':' if details else ''))
if details:
fp.write(''.join(
' ' + line + '\n'
for line in details.split('\n')))
rc += 1
if fp is not sys.stdout:
fp.close()
return rc
ALL_BROWSERS = [
'firefox',
'chrome',
'safari',
'edge',
'ie',
]
if __name__ == '__main__':
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description='Set up and run CORS functional tests',
epilog='''The tests consist of three parts:
setup - Create several test containers with well-known names, set appropriate
ACLs and CORS metadata, and upload some test objects.
serve - Serve a static website on localhost which, on load, will make several
CORS requests and verify expected behavior.
run - Use Selenium to load the website, wait for and scrape the results,
and output them in TAP format.
By default, perform all three parts. You can skip some or all of the parts
with the --no-setup, --no-serve, and --no-run options.
''')
parser.add_argument('-P', '--port', type=int, default=8000)
parser.add_argument('-H', '--hostname', default='localhost')
parser.add_argument('--no-setup', action='store_true')
parser.add_argument('--no-serve', action='store_true')
parser.add_argument('--no-run', action='store_true')
parser.add_argument('-o', '--output')
parser.add_argument('browsers', nargs='*',
default='all',
choices=['all'] + ALL_BROWSERS)
args = parser.parse_args()
if not args.no_setup:
setup(args)
if args.no_serve:
site = None
else:
site = CORSSite(args.port)
should_run = not args.no_run
if should_run and not selenium:
print('Selenium not available; cannot run tests automatically')
should_run = False
if ENV['OS_STORAGE_URL'] is None:
ENV['OS_STORAGE_URL'] = swiftclient.client.get_auth(
ENV['OS_AUTH_URL'],
ENV['OS_USERNAME'],
ENV['OS_PASSWORD'],
timeout=1)[0]
url = 'http://%s:%d/#%s' % (args.hostname, args.port, '&'.join(
'%s=%s' % (urllib.parse.quote(key), urllib.parse.quote(val))
for key, val in ENV.items()))
rc = 0
if should_run:
if site:
site.start()
try:
rc = run(args, url)
finally:
if site:
site.terminate()
else:
if site:
print('Serving test at %s' % url)
try:
site.run()
except KeyboardInterrupt:
pass
exit(rc)
| swift-master | test/cors/main.py |
#!/usr/bin/python
# Copyright (c) 2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from six.moves.urllib.parse import urlparse, urlunparse
import uuid
from random import shuffle
try:
from keystoneclient.v3 import ksc
except ImportError:
ksc = None
from swiftclient import get_auth, http_connection
import test.functional as tf
def setUpModule():
tf.setup_package()
def tearDownModule():
tf.teardown_package()
TEST_CASE_FORMAT = (
'http_method', 'header', 'account_name', 'container_name', 'object_name',
'prep_container_header', 'reseller_prefix', 'target_user_name',
'auth_user_name', 'service_user_name', 'expected')
# http_method : HTTP methods such as PUT, GET, POST, HEAD and so on
# header : headers for a request
# account_name : Account name. Usually the name will be automatically
# created by keystone
# container_name : Container name. If 'UUID' is specified, a container
# name will be created automatically
# object_name : Object name. If 'UUID' is specified, a container
# name will be created automatically
# prep_container_header : headers which will be set on the container
# reseller_prefix : Reseller prefix that will be used for request url.
# Can be None or SERVICE to select the user account
# prefix or the service prefix respectively
# target_user_name : a user name which is used for getting the project id
# of the target
# auth_user_name : a user name which is used for getting a token for
# X-Auth_Token
# service_user_name : a user name which is used for getting a token for
# X-Service-Token
# expected : expected status code
#
# a combination of account_name, container_name and object_name
# represents a target.
# +------------+--------------+-----------+---------+
# |account_name|container_name|object_name| target |
# +------------+--------------+-----------+---------+
# | None | None | None | account |
# +------------+--------------+-----------+---------+
# | None | 'UUID' | None |container|
# +------------+--------------+-----------+---------+
# | None | 'UUID' | 'UUID' | object |
# +------------+--------------+-----------+---------+
#
# The following users are required to run this functional test.
# No.6, tester6, is added for this test.
# +----+-----------+-------+---------+-------------+
# |No. | Domain |Project|User name| Role |
# +----+-----------+-------+---------+-------------+
# | 1 | default | test | tester | admin |
# +----+-----------+-------+---------+-------------+
# | 2 | default | test2 | tester2 | admin |
# +----+-----------+-------+---------+-------------+
# | 3 | default | test | tester3 | _member_ |
# +----+-----------+-------+---------+-------------+
# | 4 |test-domain| test4 | tester4 | admin |
# +----+-----------+-------+---------+-------------+
# | 5 | default | test5 | tester5 | service |
# +----+-----------+-------+---------+-------------+
# | 6 | default | test | tester6 |ResellerAdmin|
# +----+-----------+-------+---------+-------------+
# A scenario of put for account, container and object with
# several roles.
RBAC_PUT = [
# PUT container in own account: ok
('PUT', None, None, 'UUID', None, None,
None, 'tester', 'tester', None, 201),
('PUT', None, None, 'UUID', None, None,
None, 'tester', 'tester', 'tester', 201),
# PUT container in other users account: not allowed for role admin
('PUT', None, None, 'UUID', None, None,
None, 'tester2', 'tester', None, 403),
('PUT', None, None, 'UUID', None, None,
None, 'tester4', 'tester', None, 403),
# PUT container in other users account: not allowed for role _member_
('PUT', None, None, 'UUID', None, None,
None, 'tester3', 'tester3', None, 403),
('PUT', None, None, 'UUID', None, None,
None, 'tester2', 'tester3', None, 403),
('PUT', None, None, 'UUID', None, None,
None, 'tester4', 'tester3', None, 403),
# PUT container in other users account: allowed for role ResellerAdmin
('PUT', None, None, 'UUID', None, None,
None, 'tester6', 'tester6', None, 201),
('PUT', None, None, 'UUID', None, None,
None, 'tester2', 'tester6', None, 201),
('PUT', None, None, 'UUID', None, None,
None, 'tester4', 'tester6', None, 201),
# PUT object in own account: ok
('PUT', None, None, 'UUID', 'UUID', None,
None, 'tester', 'tester', None, 201),
('PUT', None, None, 'UUID', 'UUID', None,
None, 'tester', 'tester', 'tester', 201),
# PUT object in other users account: not allowed for role admin
('PUT', None, None, 'UUID', 'UUID', None,
None, 'tester2', 'tester', None, 403),
('PUT', None, None, 'UUID', 'UUID', None,
None, 'tester4', 'tester', None, 403),
# PUT object in other users account: not allowed for role _member_
('PUT', None, None, 'UUID', 'UUID', None,
None, 'tester3', 'tester3', None, 403),
('PUT', None, None, 'UUID', 'UUID', None,
None, 'tester2', 'tester3', None, 403),
('PUT', None, None, 'UUID', 'UUID', None,
None, 'tester4', 'tester3', None, 403),
# PUT object in other users account: allowed for role ResellerAdmin
('PUT', None, None, 'UUID', 'UUID', None,
None, 'tester6', 'tester6', None, 201),
('PUT', None, None, 'UUID', 'UUID', None,
None, 'tester2', 'tester6', None, 201),
('PUT', None, None, 'UUID', 'UUID', None,
None, 'tester4', 'tester6', None, 201)
]
RBAC_PUT_WITH_SERVICE_PREFIX = [
# PUT container in own account: ok
('PUT', None, None, 'UUID', None, None,
None, 'tester', 'tester', 'tester5', 201),
# PUT container in other users account: not allowed for role service
('PUT', None, None, 'UUID', None, None,
None, 'tester', 'tester3', 'tester5', 403),
('PUT', None, None, 'UUID', None, None,
None, 'tester', None, 'tester5', 401),
('PUT', None, None, 'UUID', None, None,
None, 'tester5', 'tester5', None, 403),
('PUT', None, None, 'UUID', None, None,
None, 'tester2', 'tester5', None, 403),
('PUT', None, None, 'UUID', None, None,
None, 'tester4', 'tester5', None, 403),
# PUT object in own account: ok
('PUT', None, None, 'UUID', 'UUID', None,
None, 'tester', 'tester', 'tester5', 201),
# PUT object in other users account: not allowed for role service
('PUT', None, None, 'UUID', 'UUID', None,
None, 'tester', 'tester3', 'tester5', 403),
('PUT', None, None, 'UUID', 'UUID', None,
None, 'tester', None, 'tester5', 401),
('PUT', None, None, 'UUID', 'UUID', None,
None, 'tester5', 'tester5', None, 403),
('PUT', None, None, 'UUID', 'UUID', None,
None, 'tester2', 'tester5', None, 403),
('PUT', None, None, 'UUID', 'UUID', None,
None, 'tester4', 'tester5', None, 403),
# All following actions are using SERVICE prefix
# PUT container in own account: ok
('PUT', None, None, 'UUID', None, None,
'SERVICE', 'tester', 'tester', 'tester5', 201),
# PUT container fails if wrong user, or only one token sent
('PUT', None, None, 'UUID', None, None,
'SERVICE', 'tester', 'tester3', 'tester5', 403),
('PUT', None, None, 'UUID', None, None,
'SERVICE', 'tester', 'tester', None, 403),
('PUT', None, None, 'UUID', None, None,
'SERVICE', 'tester', 'tester', 'tester', 403),
('PUT', None, None, 'UUID', None, None,
'SERVICE', 'tester', None, 'tester5', 401),
# PUT object in own account: ok
('PUT', None, None, 'UUID', 'UUID', None,
'SERVICE', 'tester', 'tester', 'tester5', 201),
# PUT object fails if wrong user, or only one token sent
('PUT', None, None, 'UUID', 'UUID', None,
'SERVICE', 'tester', 'tester3', 'tester5', 403),
('PUT', None, None, 'UUID', 'UUID', None,
'SERVICE', 'tester', 'tester', None, 403),
('PUT', None, None, 'UUID', 'UUID', None,
'SERVICE', 'tester', 'tester', 'tester', 403),
('PUT', None, None, 'UUID', 'UUID', None,
'SERVICE', 'tester', None, 'tester5', 401),
]
# A scenario of delete for account, container and object with
# several roles.
RBAC_DELETE = [
# DELETE container in own account: ok
('DELETE', None, None, 'UUID', None, None,
None, 'tester', 'tester', None, 204),
('DELETE', None, None, 'UUID', None, None,
None, 'tester', 'tester', 'tester', 204),
# DELETE container in other users account: not allowed for role admin
('DELETE', None, None, 'UUID', None, None,
None, 'tester2', 'tester', None, 403),
('DELETE', None, None, 'UUID', None, None,
None, 'tester4', 'tester', None, 403),
# DELETE container in other users account: not allowed for role _member_
('DELETE', None, None, 'UUID', None, None,
None, 'tester3', 'tester3', None, 403),
('DELETE', None, None, 'UUID', None, None,
None, 'tester2', 'tester3', None, 403),
('DELETE', None, None, 'UUID', None, None,
None, 'tester4', 'tester3', None, 403),
# DELETE container in other users account: allowed for role ResellerAdmin
('DELETE', None, None, 'UUID', None, None,
None, 'tester6', 'tester6', None, 204),
('DELETE', None, None, 'UUID', None, None,
None, 'tester2', 'tester6', None, 204),
('DELETE', None, None, 'UUID', None, None,
None, 'tester4', 'tester6', None, 204),
# DELETE object in own account: ok
('DELETE', None, None, 'UUID', 'UUID', None,
None, 'tester', 'tester', None, 204),
('DELETE', None, None, 'UUID', 'UUID', None,
None, 'tester', 'tester', 'tester', 204),
# DELETE object in other users account: not allowed for role admin
('DELETE', None, None, 'UUID', 'UUID', None,
None, 'tester2', 'tester', None, 403),
('DELETE', None, None, 'UUID', 'UUID', None,
None, 'tester4', 'tester', None, 403),
# DELETE object in other users account: not allowed for role _member_
('DELETE', None, None, 'UUID', 'UUID', None,
None, 'tester3', 'tester3', None, 403),
('DELETE', None, None, 'UUID', 'UUID', None,
None, 'tester2', 'tester3', None, 403),
('DELETE', None, None, 'UUID', 'UUID', None,
None, 'tester4', 'tester3', None, 403),
# DELETE object in other users account: allowed for role ResellerAdmin
('DELETE', None, None, 'UUID', 'UUID', None,
None, 'tester6', 'tester6', None, 204),
('DELETE', None, None, 'UUID', 'UUID', None,
None, 'tester2', 'tester6', None, 204),
('DELETE', None, None, 'UUID', 'UUID', None,
None, 'tester4', 'tester6', None, 204)
]
RBAC_DELETE_WITH_SERVICE_PREFIX = [
# DELETE container in own account: ok
('DELETE', None, None, 'UUID', None, None,
None, 'tester', 'tester', 'tester5', 204),
# DELETE container in other users account: not allowed for role service
('DELETE', None, None, 'UUID', None, None,
None, 'tester', 'tester3', 'tester5', 403),
('DELETE', None, None, 'UUID', None, None,
None, 'tester', None, 'tester5', 401),
('DELETE', None, None, 'UUID', None, None,
None, 'tester5', 'tester5', None, 403),
('DELETE', None, None, 'UUID', None, None,
None, 'tester2', 'tester5', None, 403),
('DELETE', None, None, 'UUID', None, None,
None, 'tester4', 'tester5', None, 403),
# DELETE object in own account: ok
('DELETE', None, None, 'UUID', 'UUID', None,
None, 'tester', 'tester', 'tester5', 204),
# DELETE object in other users account: not allowed for role service
('DELETE', None, None, 'UUID', 'UUID', None,
None, 'tester', 'tester3', 'tester5', 403),
('DELETE', None, None, 'UUID', 'UUID', None,
None, 'tester', None, 'tester5', 401),
('DELETE', None, None, 'UUID', 'UUID', None,
None, 'tester5', 'tester5', None, 403),
('DELETE', None, None, 'UUID', 'UUID', None,
None, 'tester2', 'tester5', None, 403),
('DELETE', None, None, 'UUID', 'UUID', None,
None, 'tester4', 'tester5', None, 403),
# All following actions are using SERVICE prefix
# DELETE container in own account: ok
('DELETE', None, None, 'UUID', None, None,
'SERVICE', 'tester', 'tester', 'tester5', 204),
# DELETE container fails if wrong user, or only one token sent
('DELETE', None, None, 'UUID', None, None,
'SERVICE', 'tester', 'tester3', 'tester5', 403),
('DELETE', None, None, 'UUID', None, None,
'SERVICE', 'tester', 'tester', None, 403),
('DELETE', None, None, 'UUID', None, None,
'SERVICE', 'tester', 'tester', 'tester', 403),
('DELETE', None, None, 'UUID', None, None,
'SERVICE', 'tester', None, 'tester5', 401),
# DELETE object in own account: ok
('DELETE', None, None, 'UUID', 'UUID', None,
'SERVICE', 'tester', 'tester', 'tester5', 204),
# DELETE object fails if wrong user, or only one token sent
('DELETE', None, None, 'UUID', 'UUID', None,
'SERVICE', 'tester', 'tester3', 'tester5', 403),
('DELETE', None, None, 'UUID', 'UUID', None,
'SERVICE', 'tester', 'tester', None, 403),
('DELETE', None, None, 'UUID', 'UUID', None,
'SERVICE', 'tester', 'tester', 'tester', 403),
('DELETE', None, None, 'UUID', 'UUID', None,
'SERVICE', 'tester', None, 'tester5', 401)
]
# A scenario of get for account, container and object with
# several roles.
RBAC_GET = [
# GET own account: ok
('GET', None, None, None, None, None,
None, 'tester', 'tester', None, 200),
('GET', None, None, None, None, None,
None, 'tester', 'tester', 'tester', 200),
# GET other users account: not allowed for role admin
('GET', None, None, None, None, None,
None, 'tester2', 'tester', None, 403),
('GET', None, None, None, None, None,
None, 'tester4', 'tester', None, 403),
# GET other users account: not allowed for role _member_
('GET', None, None, None, None, None,
None, 'tester3', 'tester3', None, 403),
('GET', None, None, None, None, None,
None, 'tester2', 'tester3', None, 403),
('GET', None, None, None, None, None,
None, 'tester4', 'tester3', None, 403),
# GET other users account: allowed for role ResellerAdmin
('GET', None, None, None, None, None,
None, 'tester6', 'tester6', None, 200),
('GET', None, None, None, None, None,
None, 'tester2', 'tester6', None, 200),
('GET', None, None, None, None, None,
None, 'tester4', 'tester6', None, 200),
# GET container in own account: ok
('GET', None, None, 'UUID', None, None,
None, 'tester', 'tester', None, 200),
('GET', None, None, 'UUID', None, None,
None, 'tester', 'tester', 'tester', 200),
# GET container in other users account: not allowed for role admin
('GET', None, None, 'UUID', None, None,
None, 'tester2', 'tester', None, 403),
('GET', None, None, 'UUID', None, None,
None, 'tester4', 'tester', None, 403),
# GET container in other users account: not allowed for role _member_
('GET', None, None, 'UUID', None, None,
None, 'tester3', 'tester3', None, 403),
('GET', None, None, 'UUID', None, None,
None, 'tester2', 'tester3', None, 403),
('GET', None, None, 'UUID', None, None,
None, 'tester4', 'tester3', None, 403),
# GET container in other users account: allowed for role ResellerAdmin
('GET', None, None, 'UUID', None, None,
None, 'tester6', 'tester6', None, 200),
('GET', None, None, 'UUID', None, None,
None, 'tester2', 'tester6', None, 200),
('GET', None, None, 'UUID', None, None,
None, 'tester4', 'tester6', None, 200),
# GET object in own account: ok
('GET', None, None, 'UUID', 'UUID', None,
None, 'tester', 'tester', None, 200),
('GET', None, None, 'UUID', 'UUID', None,
None, 'tester', 'tester', 'tester', 200),
# GET object in other users account: not allowed for role admin
('GET', None, None, 'UUID', 'UUID', None,
None, 'tester2', 'tester', None, 403),
('GET', None, None, 'UUID', 'UUID', None,
None, 'tester4', 'tester', None, 403),
# GET object in other users account: not allowed for role _member_
('GET', None, None, 'UUID', 'UUID', None,
None, 'tester3', 'tester3', None, 403),
('GET', None, None, 'UUID', 'UUID', None,
None, 'tester2', 'tester3', None, 403),
('GET', None, None, 'UUID', 'UUID', None,
None, 'tester4', 'tester3', None, 403),
# GET object in other users account: allowed for role ResellerAdmin
('GET', None, None, 'UUID', 'UUID', None,
None, 'tester6', 'tester6', None, 200),
('GET', None, None, 'UUID', 'UUID', None,
None, 'tester2', 'tester6', None, 200),
('GET', None, None, 'UUID', 'UUID', None,
None, 'tester4', 'tester6', None, 200)
]
RBAC_GET_WITH_SERVICE_PREFIX = [
# GET own account: ok
('GET', None, None, None, None, None,
None, 'tester', 'tester', 'tester5', 200),
# GET other account: not allowed for role service
('GET', None, None, None, None, None,
None, 'tester', 'tester3', 'tester5', 403),
('GET', None, None, None, None, None,
None, 'tester', None, 'tester5', 401),
('GET', None, None, None, None, None,
None, 'tester5', 'tester5', None, 403),
('GET', None, None, None, None, None,
None, 'tester2', 'tester5', None, 403),
('GET', None, None, None, None, None,
None, 'tester4', 'tester5', None, 403),
# GET container in own account: ok
('GET', None, None, 'UUID', None, None,
None, 'tester', 'tester', 'tester5', 200),
# GET container in other users account: not allowed for role service
('GET', None, None, 'UUID', None, None,
None, 'tester', 'tester3', 'tester5', 403),
('GET', None, None, 'UUID', None, None,
None, 'tester', None, 'tester5', 401),
('GET', None, None, 'UUID', None, None,
None, 'tester5', 'tester5', None, 403),
('GET', None, None, 'UUID', None, None,
None, 'tester2', 'tester5', None, 403),
('GET', None, None, 'UUID', None, None,
None, 'tester4', 'tester5', None, 403),
# GET object in own account: ok
('GET', None, None, 'UUID', 'UUID', None,
None, 'tester', 'tester', 'tester5', 200),
# GET object fails if wrong user, or only one token sent
('GET', None, None, 'UUID', 'UUID', None,
None, 'tester', 'tester3', 'tester5', 403),
('GET', None, None, 'UUID', 'UUID', None,
None, 'tester', None, 'tester5', 401),
('GET', None, None, 'UUID', 'UUID', None,
None, 'tester5', 'tester5', None, 403),
('GET', None, None, 'UUID', 'UUID', None,
None, 'tester2', 'tester5', None, 403),
('GET', None, None, 'UUID', 'UUID', None,
None, 'tester4', 'tester5', None, 403),
# All following actions are using SERVICE prefix
# GET own account: ok
('GET', None, None, None, None, None,
'SERVICE', 'tester', 'tester', 'tester5', 200),
# GET other account: not allowed for role service
('GET', None, None, None, None, None,
'SERVICE', 'tester', 'tester3', 'tester5', 403),
('GET', None, None, None, None, None,
'SERVICE', 'tester', 'tester', None, 403),
('GET', None, None, None, None, None,
'SERVICE', 'tester', 'tester', 'tester', 403),
('GET', None, None, None, None, None,
'SERVICE', 'tester', None, 'tester5', 401),
# GET container in own account: ok
('GET', None, None, 'UUID', None, None,
'SERVICE', 'tester', 'tester', 'tester5', 200),
# GET container fails if wrong user, or only one token sent
('GET', None, None, 'UUID', None, None,
'SERVICE', 'tester', 'tester3', 'tester5', 403),
('GET', None, None, 'UUID', None, None,
'SERVICE', 'tester', 'tester', None, 403),
('GET', None, None, 'UUID', None, None,
'SERVICE', 'tester', 'tester', 'tester', 403),
('GET', None, None, 'UUID', None, None,
'SERVICE', 'tester', None, 'tester5', 401),
# GET object in own account: ok
('GET', None, None, 'UUID', 'UUID', None,
'SERVICE', 'tester', 'tester', 'tester5', 200),
# GET object fails if wrong user, or only one token sent
('GET', None, None, 'UUID', 'UUID', None,
'SERVICE', 'tester', 'tester3', 'tester5', 403),
('GET', None, None, 'UUID', 'UUID', None,
'SERVICE', 'tester', 'tester', None, 403),
('GET', None, None, 'UUID', 'UUID', None,
'SERVICE', 'tester', 'tester', 'tester', 403),
('GET', None, None, 'UUID', 'UUID', None,
'SERVICE', 'tester', None, 'tester5', 401)
]
# A scenario of head for account, container and object with
# several roles.
RBAC_HEAD = [
# HEAD own account: ok
('HEAD', None, None, None, None, None,
None, 'tester', 'tester', None, 204),
('HEAD', None, None, None, None, None,
None, 'tester', 'tester', 'tester', 204),
# HEAD other users account: not allowed for role admin
('HEAD', None, None, None, None, None,
None, 'tester2', 'tester', None, 403),
('HEAD', None, None, None, None, None,
None, 'tester4', 'tester', None, 403),
# HEAD other users account: not allowed for role _member_
('HEAD', None, None, None, None, None,
None, 'tester3', 'tester3', None, 403),
('HEAD', None, None, None, None, None,
None, 'tester2', 'tester3', None, 403),
('HEAD', None, None, None, None, None,
None, 'tester4', 'tester3', None, 403),
# HEAD other users account: allowed for role ResellerAdmin
('HEAD', None, None, None, None, None,
None, 'tester6', 'tester6', None, 204),
('HEAD', None, None, None, None, None,
None, 'tester2', 'tester6', None, 204),
('HEAD', None, None, None, None, None,
None, 'tester4', 'tester6', None, 204),
# HEAD container in own account: ok
('HEAD', None, None, 'UUID', None, None,
None, 'tester', 'tester', None, 204),
('HEAD', None, None, 'UUID', None, None,
None, 'tester', 'tester', 'tester', 204),
# HEAD container in other users account: not allowed for role admin
('HEAD', None, None, 'UUID', None, None,
None, 'tester2', 'tester', None, 403),
('HEAD', None, None, 'UUID', None, None,
None, 'tester4', 'tester', None, 403),
# HEAD container in other users account: not allowed for role _member_
('HEAD', None, None, 'UUID', None, None,
None, 'tester3', 'tester3', None, 403),
('HEAD', None, None, 'UUID', None, None,
None, 'tester2', 'tester3', None, 403),
('HEAD', None, None, 'UUID', None, None,
None, 'tester4', 'tester3', None, 403),
# HEAD container in other users account: allowed for role ResellerAdmin
('HEAD', None, None, 'UUID', None, None,
None, 'tester6', 'tester6', None, 204),
('HEAD', None, None, 'UUID', None, None,
None, 'tester2', 'tester6', None, 204),
('HEAD', None, None, 'UUID', None, None,
None, 'tester4', 'tester6', None, 204),
# HEAD object in own account: ok
('HEAD', None, None, 'UUID', 'UUID', None,
None, 'tester', 'tester', None, 200),
('HEAD', None, None, 'UUID', 'UUID', None,
None, 'tester', 'tester', 'tester', 200),
# HEAD object in other users account: not allowed for role admin
('HEAD', None, None, 'UUID', 'UUID', None,
None, 'tester2', 'tester', None, 403),
('HEAD', None, None, 'UUID', 'UUID', None,
None, 'tester4', 'tester', None, 403),
# HEAD object in other users account: not allowed for role _member_
('HEAD', None, None, 'UUID', 'UUID', None,
None, 'tester3', 'tester3', None, 403),
('HEAD', None, None, 'UUID', 'UUID', None,
None, 'tester2', 'tester3', None, 403),
('HEAD', None, None, 'UUID', 'UUID', None,
None, 'tester4', 'tester3', None, 403),
# HEAD object in other users account: allowed for role ResellerAdmin
('HEAD', None, None, 'UUID', 'UUID', None,
None, 'tester6', 'tester6', None, 200),
('HEAD', None, None, 'UUID', 'UUID', None,
None, 'tester2', 'tester6', None, 200),
('HEAD', None, None, 'UUID', 'UUID', None,
None, 'tester4', 'tester6', None, 200)
]
RBAC_HEAD_WITH_SERVICE_PREFIX = [
# HEAD own account: ok
('HEAD', None, None, None, None, None,
None, 'tester', 'tester', 'tester5', 204),
# HEAD other account: not allowed for role service
('HEAD', None, None, None, None, None,
None, 'tester', 'tester3', 'tester5', 403),
('HEAD', None, None, None, None, None,
None, 'tester', None, 'tester5', 401),
('HEAD', None, None, None, None, None,
None, 'tester5', 'tester5', None, 403),
('HEAD', None, None, None, None, None,
None, 'tester2', 'tester5', None, 403),
('HEAD', None, None, None, None, None,
None, 'tester4', 'tester5', None, 403),
# HEAD container in own account: ok
('HEAD', None, None, 'UUID', None, None,
None, 'tester', 'tester', 'tester5', 204),
# HEAD container in other users account: not allowed for role service
('HEAD', None, None, 'UUID', None, None,
None, 'tester', 'tester3', 'tester5', 403),
('HEAD', None, None, 'UUID', None, None,
None, 'tester', None, 'tester5', 401),
('HEAD', None, None, 'UUID', None, None,
None, 'tester5', 'tester5', None, 403),
('HEAD', None, None, 'UUID', None, None,
None, 'tester2', 'tester5', None, 403),
('HEAD', None, None, 'UUID', None, None,
None, 'tester4', 'tester5', None, 403),
# HEAD object in own account: ok
('HEAD', None, None, 'UUID', 'UUID', None,
None, 'tester', 'tester', 'tester5', 200),
# HEAD object fails if wrong user, or only one token sent
('HEAD', None, None, 'UUID', 'UUID', None,
None, 'tester', 'tester3', 'tester5', 403),
('HEAD', None, None, 'UUID', 'UUID', None,
None, 'tester', None, 'tester5', 401),
('HEAD', None, None, 'UUID', 'UUID', None,
None, 'tester5', 'tester5', None, 403),
('HEAD', None, None, 'UUID', 'UUID', None,
None, 'tester2', 'tester5', None, 403),
('HEAD', None, None, 'UUID', 'UUID', None,
None, 'tester4', 'tester5', None, 403),
# All following actions are using SERVICE prefix
# HEAD own account: ok
('HEAD', None, None, None, None, None,
'SERVICE', 'tester', 'tester', 'tester5', 204),
# HEAD other account: not allowed for role service
('HEAD', None, None, None, None, None,
'SERVICE', 'tester', 'tester3', 'tester5', 403),
('HEAD', None, None, None, None, None,
'SERVICE', 'tester', 'tester', None, 403),
('HEAD', None, None, None, None, None,
'SERVICE', 'tester', 'tester', 'tester', 403),
('HEAD', None, None, None, None, None,
'SERVICE', 'tester', None, 'tester5', 401),
# HEAD container in own account: ok
('HEAD', None, None, 'UUID', None, None,
'SERVICE', 'tester', 'tester', 'tester5', 204),
# HEAD container in other users account: not allowed for role service
('HEAD', None, None, 'UUID', None, None,
'SERVICE', 'tester', 'tester3', 'tester5', 403),
('HEAD', None, None, 'UUID', None, None,
'SERVICE', 'tester', 'tester', None, 403),
('HEAD', None, None, 'UUID', None, None,
'SERVICE', 'tester', 'tester', 'tester', 403),
('HEAD', None, None, 'UUID', None, None,
'SERVICE', 'tester', None, 'tester5', 401),
# HEAD object in own account: ok
('HEAD', None, None, 'UUID', 'UUID', None,
'SERVICE', 'tester', 'tester', 'tester5', 200),
# HEAD object fails if wrong user, or only one token sent
('HEAD', None, None, 'UUID', 'UUID', None,
'SERVICE', 'tester', 'tester3', 'tester5', 403),
('HEAD', None, None, 'UUID', 'UUID', None,
'SERVICE', 'tester', 'tester', None, 403),
('HEAD', None, None, 'UUID', 'UUID', None,
'SERVICE', 'tester', 'tester', 'tester', 403),
('HEAD', None, None, 'UUID', 'UUID', None,
'SERVICE', 'tester', None, 'tester5', 401)
]
# A scenario of post for account, container and object with
# several roles.
RBAC_POST = [
# POST own account: ok
('POST', None, None, None, None, None,
None, 'tester', 'tester', None, 204),
('POST', None, None, None, None, None,
None, 'tester', 'tester', 'tester', 204),
# POST other users account: not allowed for role admin
('POST', None, None, None, None, None,
None, 'tester2', 'tester', None, 403),
('POST', None, None, None, None, None,
None, 'tester4', 'tester', None, 403),
# POST other users account: not allowed for role _member_
('POST', None, None, None, None, None,
None, 'tester3', 'tester3', None, 403),
('POST', None, None, None, None, None,
None, 'tester2', 'tester3', None, 403),
('POST', None, None, None, None, None,
None, 'tester4', 'tester3', None, 403),
# POST other users account: allowed for role ResellerAdmin
('POST', None, None, None, None, None,
None, 'tester6', 'tester6', None, 204),
('POST', None, None, None, None, None,
None, 'tester2', 'tester6', None, 204),
('POST', None, None, None, None, None,
None, 'tester4', 'tester6', None, 204),
# POST container in own account: ok
('POST', None, None, 'UUID', None, None,
None, 'tester', 'tester', None, 204),
('POST', None, None, 'UUID', None, None,
None, 'tester', 'tester', 'tester', 204),
# POST container in other users account: not allowed for role admin
('POST', None, None, 'UUID', None, None,
None, 'tester2', 'tester', None, 403),
('POST', None, None, 'UUID', None, None,
None, 'tester4', 'tester', None, 403),
# POST container in other users account: not allowed for role _member_
('POST', None, None, 'UUID', None, None,
None, 'tester3', 'tester3', None, 403),
('POST', None, None, 'UUID', None, None,
None, 'tester2', 'tester3', None, 403),
('POST', None, None, 'UUID', None, None,
None, 'tester4', 'tester3', None, 403),
# POST container in other users account: allowed for role ResellerAdmin
('POST', None, None, 'UUID', None, None,
None, 'tester6', 'tester6', None, 204),
('POST', None, None, 'UUID', None, None,
None, 'tester2', 'tester6', None, 204),
('POST', None, None, 'UUID', None, None,
None, 'tester4', 'tester6', None, 204),
# POST object in own account: ok
('POST', None, None, 'UUID', 'UUID', None,
None, 'tester', 'tester', None, 202),
('POST', None, None, 'UUID', 'UUID', None,
None, 'tester', 'tester', 'tester', 202),
# POST object in other users account: not allowed for role admin
('POST', None, None, 'UUID', 'UUID', None,
None, 'tester2', 'tester', None, 403),
('POST', None, None, 'UUID', 'UUID', None,
None, 'tester4', 'tester', None, 403),
# POST object in other users account: not allowed for role _member_
('POST', None, None, 'UUID', 'UUID', None,
None, 'tester3', 'tester3', None, 403),
('POST', None, None, 'UUID', 'UUID', None,
None, 'tester2', 'tester3', None, 403),
('POST', None, None, 'UUID', 'UUID', None,
None, 'tester4', 'tester3', None, 403),
# POST object in other users account: allowed for role ResellerAdmin
('POST', None, None, 'UUID', 'UUID', None,
None, 'tester6', 'tester6', None, 202),
('POST', None, None, 'UUID', 'UUID', None,
None, 'tester2', 'tester6', None, 202),
('POST', None, None, 'UUID', 'UUID', None,
None, 'tester4', 'tester6', None, 202)
]
RBAC_POST_WITH_SERVICE_PREFIX = [
# POST own account: ok
('POST', None, None, None, None, None,
None, 'tester', 'tester', 'tester5', 204),
# POST own account: ok
('POST', None, None, None, None, None,
None, 'tester', 'tester3', 'tester5', 403),
('POST', None, None, None, None, None,
None, 'tester', None, 'tester5', 401),
('POST', None, None, None, None, None,
None, 'tester5', 'tester5', None, 403),
('POST', None, None, None, None, None,
None, 'tester2', 'tester5', None, 403),
('POST', None, None, None, None, None,
None, 'tester4', 'tester5', None, 403),
# POST container in own account: ok
('POST', None, None, 'UUID', None, None,
None, 'tester', 'tester', 'tester5', 204),
# POST container in other users account: not allowed for role service
('POST', None, None, 'UUID', None, None,
None, 'tester', 'tester3', 'tester5', 403),
('POST', None, None, 'UUID', None, None,
None, 'tester', None, 'tester5', 401),
('POST', None, None, 'UUID', None, None,
None, 'tester5', 'tester5', None, 403),
('POST', None, None, 'UUID', None, None,
None, 'tester2', 'tester5', None, 403),
('POST', None, None, 'UUID', None, None,
None, 'tester4', 'tester5', None, 403),
# POST object in own account: ok
('POST', None, None, 'UUID', 'UUID', None,
None, 'tester', 'tester', 'tester5', 202),
# POST object fails if wrong user, or only one token sent
('POST', None, None, 'UUID', 'UUID', None,
None, 'tester', 'tester3', 'tester5', 403),
('POST', None, None, 'UUID', 'UUID', None,
None, 'tester', None, 'tester5', 401),
('POST', None, None, 'UUID', 'UUID', None,
None, 'tester5', 'tester5', None, 403),
('POST', None, None, 'UUID', 'UUID', None,
None, 'tester2', 'tester5', None, 403),
('POST', None, None, 'UUID', 'UUID', None,
None, 'tester4', 'tester5', None, 403),
# All following actions are using SERVICE prefix
# POST own account: ok
('POST', None, None, None, None, None,
'SERVICE', 'tester', 'tester', 'tester5', 204),
# POST other account: not allowed for role service
('POST', None, None, None, None, None,
'SERVICE', 'tester', 'tester3', 'tester5', 403),
('POST', None, None, None, None, None,
'SERVICE', 'tester', 'tester', None, 403),
('POST', None, None, None, None, None,
'SERVICE', 'tester', 'tester', 'tester', 403),
('POST', None, None, None, None, None,
'SERVICE', 'tester', None, 'tester5', 401),
# POST container in own account: ok
('POST', None, None, 'UUID', None, None,
'SERVICE', 'tester', 'tester', 'tester5', 204),
# POST container in other users account: not allowed for role service
('POST', None, None, 'UUID', None, None,
'SERVICE', 'tester', 'tester3', 'tester5', 403),
('POST', None, None, 'UUID', None, None,
'SERVICE', 'tester', 'tester', None, 403),
('POST', None, None, 'UUID', None, None,
'SERVICE', 'tester', 'tester', 'tester', 403),
('POST', None, None, 'UUID', None, None,
'SERVICE', 'tester', None, 'tester5', 401),
# POST object in own account: ok
('POST', None, None, 'UUID', 'UUID', None,
'SERVICE', 'tester', 'tester', 'tester5', 202),
# POST object fails if wrong user, or only one token sent
('POST', None, None, 'UUID', 'UUID', None,
'SERVICE', 'tester', 'tester3', 'tester5', 403),
('POST', None, None, 'UUID', 'UUID', None,
'SERVICE', 'tester', 'tester', None, 403),
('POST', None, None, 'UUID', 'UUID', None,
'SERVICE', 'tester', 'tester', 'tester', 403),
('POST', None, None, 'UUID', 'UUID', None,
'SERVICE', 'tester', None, 'tester5', 401)
]
# A scenario of options for account, container and object with
# several roles.
RBAC_OPTIONS = [
# OPTIONS request is always ok
('OPTIONS', None, None, None, None, None,
None, 'tester', 'tester', None, 200),
('OPTIONS', None, None, None, None, None,
None, 'tester', 'tester', 'tester', 200),
('OPTIONS', None, None, None, None, None,
None, 'tester2', 'tester', None, 200),
('OPTIONS', None, None, None, None, None,
None, 'tester4', 'tester', None, 200),
('OPTIONS', None, None, None, None, None,
None, 'tester3', 'tester3', None, 200),
('OPTIONS', None, None, None, None, None,
None, 'tester2', 'tester3', None, 200),
('OPTIONS', None, None, None, None, None,
None, 'tester4', 'tester3', None, 200),
('OPTIONS', None, None, None, None, None,
None, 'tester6', 'tester6', None, 200),
('OPTIONS', None, None, None, None, None,
None, 'tester2', 'tester6', None, 200),
('OPTIONS', None, None, None, None, None,
None, 'tester4', 'tester6', None, 200),
('OPTIONS', None, None, 'UUID', None, None,
None, 'tester', 'tester', None, 200),
('OPTIONS', None, None, 'UUID', None, None,
None, 'tester', 'tester', 'tester', 200),
('OPTIONS', None, None, 'UUID', None, None,
None, 'tester2', 'tester', None, 200),
('OPTIONS', None, None, 'UUID', None, None,
None, 'tester4', 'tester', None, 200),
('OPTIONS', None, None, 'UUID', None, None,
None, 'tester3', 'tester3', None, 200),
('OPTIONS', None, None, 'UUID', None, None,
None, 'tester2', 'tester3', None, 200),
('OPTIONS', None, None, 'UUID', None, None,
None, 'tester4', 'tester3', None, 200),
('OPTIONS', None, None, 'UUID', None, None,
None, 'tester6', 'tester6', None, 200),
('OPTIONS', None, None, 'UUID', None, None,
None, 'tester2', 'tester6', None, 200),
('OPTIONS', None, None, 'UUID', None, None,
None, 'tester4', 'tester6', None, 200),
('OPTIONS', None, None, 'UUID', 'UUID', None,
None, 'tester', 'tester', None, 200),
('OPTIONS', None, None, 'UUID', 'UUID', None,
None, 'tester', 'tester', 'tester', 200),
('OPTIONS', None, None, 'UUID', 'UUID', None,
None, 'tester2', 'tester', None, 200),
('OPTIONS', None, None, 'UUID', 'UUID', None,
None, 'tester4', 'tester', None, 200),
('OPTIONS', None, None, 'UUID', 'UUID', None,
None, 'tester3', 'tester3', None, 200),
('OPTIONS', None, None, 'UUID', 'UUID', None,
None, 'tester2', 'tester3', None, 200),
('OPTIONS', None, None, 'UUID', 'UUID', None,
None, 'tester4', 'tester3', None, 200),
('OPTIONS', None, None, 'UUID', 'UUID', None,
None, 'tester6', 'tester6', None, 200),
('OPTIONS', None, None, 'UUID', 'UUID', None,
None, 'tester2', 'tester6', None, 200),
('OPTIONS', None, None, 'UUID', 'UUID', None,
None, 'tester4', 'tester6', None, 200),
('OPTIONS', None, None, None, None,
{"X-Container-Meta-Access-Control-Allow-Origin": "*"},
None, 'tester', 'tester', None, 200),
('OPTIONS', None, None, None, None,
{"X-Container-Meta-Access-Control-Allow-Origin": "http://invalid.com"},
None, 'tester', 'tester', None, 200),
('OPTIONS', None, None, 'UUID', None,
{"X-Container-Meta-Access-Control-Allow-Origin": "*"},
None, 'tester', 'tester', None, 200),
('OPTIONS', None, None, 'UUID', None,
{"X-Container-Meta-Access-Control-Allow-Origin": "http://invalid.com"},
None, 'tester', 'tester', None, 200),
('OPTIONS', None, None, 'UUID', 'UUID',
{"X-Container-Meta-Access-Control-Allow-Origin": "*"},
None, 'tester', 'tester', None, 200),
('OPTIONS', None, None, 'UUID', 'UUID',
{"X-Container-Meta-Access-Control-Allow-Origin": "http://invalid.com"},
None, 'tester', 'tester', None, 200),
('OPTIONS',
{"Origin": "http://localhost", "Access-Control-Request-Method": "GET"},
None, None, None, None, None, 'tester', 'tester', None, 200),
('OPTIONS',
{"Origin": "http://localhost", "Access-Control-Request-Method": "GET"},
None, None, None,
{"X-Container-Meta-Access-Control-Allow-Origin": "*"},
None, 'tester', 'tester', None, 200),
('OPTIONS',
{"Origin": "http://localhost", "Access-Control-Request-Method": "GET"},
None, None, None,
{"X-Container-Meta-Access-Control-Allow-Origin": "http://invalid.com"},
None, 'tester', 'tester', None, 200),
('OPTIONS',
{"Origin": "http://localhost", "Access-Control-Request-Method": "GET"},
None, 'UUID', None, None, None, 'tester', 'tester', None, 401),
('OPTIONS',
{"Origin": "http://localhost", "Access-Control-Request-Method": "GET"},
None, 'UUID', None,
{"X-Container-Meta-Access-Control-Allow-Origin": "*"},
None, 'tester', 'tester', None, 200),
# Not OK for container: wrong origin
('OPTIONS',
{"Origin": "http://localhost", "Access-Control-Request-Method": "GET"},
None, 'UUID', None,
{"X-Container-Meta-Access-Control-Allow-Origin": "http://invalid.com"},
None, 'tester', 'tester', None, 401),
# Not OK for object: missing X-Container-Meta-Access-Control-Allow-Origin
('OPTIONS',
{"Origin": "http://localhost", "Access-Control-Request-Method": "GET"},
None, 'UUID', 'UUID', None, None, 'tester', 'tester', None, 401),
('OPTIONS',
{"Origin": "http://localhost", "Access-Control-Request-Method": "GET"},
None, 'UUID', 'UUID',
{"X-Container-Meta-Access-Control-Allow-Origin": "*"},
None, 'tester', None, None, 200),
# Not OK for object: wrong origin
('OPTIONS',
{"Origin": "http://localhost", "Access-Control-Request-Method": "GET"},
None, 'UUID', 'UUID',
{"X-Container-Meta-Access-Control-Allow-Origin": "http://invalid.com"},
None, 'tester', 'tester', None, 401)
]
RBAC_OPTIONS_WITH_SERVICE_PREFIX = [
# OPTIONS request is always ok
('OPTIONS', None, None, None, None, None,
None, 'tester', 'tester', 'tester5', 200),
('OPTIONS', None, None, None, None, None,
None, 'tester', 'tester3', 'tester5', 200),
('OPTIONS', None, None, None, None, None,
None, 'tester', None, 'tester5', 200),
('OPTIONS', None, None, None, None, None,
None, 'tester5', 'tester5', None, 200),
('OPTIONS', None, None, None, None, None,
None, 'tester2', 'tester5', None, 200),
('OPTIONS', None, None, None, None, None,
None, 'tester4', 'tester5', None, 200),
('OPTIONS', None, None, 'UUID', None, None,
None, 'tester', 'tester', 'tester5', 200),
('OPTIONS', None, None, 'UUID', None, None,
None, 'tester', 'tester3', 'tester5', 200),
('OPTIONS', None, None, 'UUID', None, None,
None, 'tester', None, 'tester5', 200),
('OPTIONS', None, None, 'UUID', None, None,
None, 'tester5', 'tester5', None, 200),
('OPTIONS', None, None, 'UUID', None, None,
None, 'tester2', 'tester5', None, 200),
('OPTIONS', None, None, 'UUID', None, None,
None, 'tester4', 'tester5', None, 200),
('OPTIONS', None, None, 'UUID', 'UUID', None,
None, 'tester', 'tester', 'tester5', 200),
('OPTIONS', None, None, 'UUID', 'UUID', None,
None, 'tester', 'tester3', 'tester5', 200),
('OPTIONS', None, None, 'UUID', 'UUID', None,
None, 'tester', None, 'tester5', 200),
('OPTIONS', None, None, 'UUID', 'UUID', None,
None, 'tester5', 'tester5', None, 200),
('OPTIONS', None, None, 'UUID', 'UUID', None,
None, 'tester2', 'tester5', None, 200),
('OPTIONS', None, None, 'UUID', 'UUID', None,
None, 'tester4', 'tester5', None, 200),
('OPTIONS', None, None, None, None, None,
'SERVICE', 'tester', 'tester', 'tester5', 200),
('OPTIONS', None, None, None, None, None,
'SERVICE', 'tester', 'tester3', 'tester5', 200),
('OPTIONS', None, None, None, None, None,
'SERVICE', 'tester', 'tester', None, 200),
('OPTIONS', None, None, None, None, None,
'SERVICE', 'tester', 'tester', 'tester', 200),
('OPTIONS', None, None, None, None, None,
'SERVICE', 'tester', None, 'tester5', 200),
('OPTIONS', None, None, 'UUID', None, None,
'SERVICE', 'tester', 'tester', 'tester5', 200),
('OPTIONS', None, None, 'UUID', None, None,
'SERVICE', 'tester', 'tester3', 'tester5', 200),
('OPTIONS', None, None, 'UUID', None, None,
'SERVICE', 'tester', 'tester', None, 200),
('OPTIONS', None, None, 'UUID', None, None,
'SERVICE', 'tester', 'tester', 'tester', 200),
('OPTIONS', None, None, 'UUID', None, None,
'SERVICE', 'tester', None, 'tester5', 200),
('OPTIONS', None, None, 'UUID', 'UUID', None,
'SERVICE', 'tester', 'tester', 'tester5', 200),
('OPTIONS', None, None, 'UUID', 'UUID', None,
'SERVICE', 'tester', 'tester3', 'tester5', 200),
('OPTIONS', None, None, 'UUID', 'UUID', None,
'SERVICE', 'tester', 'tester', None, 200),
('OPTIONS', None, None, 'UUID', 'UUID', None,
'SERVICE', 'tester', 'tester', 'tester', 200),
('OPTIONS', None, None, 'UUID', 'UUID', None,
'SERVICE', 'tester', None, 'tester5', 200)
]
# A scenario of put for container ACL
ACL_PUT = [
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test:tester3'},
None, 'tester3', 'tester3', None, 403),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 403),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '%(test_id)s:tester3'},
None, 'tester3', 'tester3', None, 403),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '%(test_id)s:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 403),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test:tester2'},
None, 'tester3', 'tester3', None, 403),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test:*'},
None, 'tester3', 'tester3', None, 403),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test'},
None, 'tester3', 'tester3', None, 403),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '%(test_id)s'},
None, 'tester3', 'tester3', None, 403),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test2:tester3'},
None, 'tester3', 'tester3', None, 403),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test2:tester2'},
None, 'tester3', 'tester3', None, 403),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test2:*'},
None, 'tester3', 'tester3', None, 403),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test2'},
None, 'tester3', 'tester3', None, 403),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '*:tester3'},
None, 'tester3', 'tester3', None, 403),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '*:tester2'},
None, 'tester3', 'tester3', None, 403),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '*:*'},
None, 'tester3', 'tester3', None, 403),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '*'},
None, 'tester3', 'tester3', None, 403),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.r:*'},
None, 'tester3', 'tester3', None, 403),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.r:*,.rlistings'},
None, 'tester3', 'tester3', None, 403),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.r:invalid.domain.com'},
None, 'tester3', 'tester3', None, 403),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.r:invalid.domain.com,.rlistings'},
None, 'tester3', 'tester3', None, 403),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.rlistings'},
None, 'tester3', 'tester3', None, 403),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test:tester3'},
None, 'tester3', 'tester3', None, 201),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 201),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '%(test_id)s:tester3'},
None, 'tester3', 'tester3', None, 201),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '%(test_id)s:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 201),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test:tester2'},
None, 'tester3', 'tester3', None, 403),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test:*'},
None, 'tester3', 'tester3', None, 201),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test'},
None, 'tester3', 'tester3', None, 403),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '%(test_id)s'},
None, 'tester3', 'tester3', None, 403),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test2:tester3'},
None, 'tester3', 'tester3', None, 403),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test2:tester2'},
None, 'tester3', 'tester3', None, 403),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test2:*'},
None, 'tester3', 'tester3', None, 403),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test2'},
None, 'tester3', 'tester3', None, 403),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '*:tester3'},
None, 'tester3', 'tester3', None, 201),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '*:tester2'},
None, 'tester3', 'tester3', None, 403),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '*:*'},
None, 'tester3', 'tester3', None, 201),
('PUT',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '*'},
None, 'tester3', 'tester3', None, 403),
('PUT',
None,
None, 'UUID', 'UUID',
None,
None, 'tester3', 'tester3', None, 403)
]
# A scenario of delete for container ACL
ACL_DELETE = [
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test:tester3'},
None, 'tester3', 'tester3', None, 403),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 403),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '%(test_id)s:tester3'},
None, 'tester3', 'tester3', None, 403),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '%(test_id)s:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 403),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test:tester2'},
None, 'tester3', 'tester3', None, 403),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test:*'},
None, 'tester3', 'tester3', None, 403),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test'},
None, 'tester3', 'tester3', None, 403),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '%(test_id)s'},
None, 'tester3', 'tester3', None, 403),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test2:tester3'},
None, 'tester3', 'tester3', None, 403),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test2:tester2'},
None, 'tester3', 'tester3', None, 403),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test2:*'},
None, 'tester3', 'tester3', None, 403),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test2'},
None, 'tester3', 'tester3', None, 403),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '*:tester3'},
None, 'tester3', 'tester3', None, 403),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '*:tester2'},
None, 'tester3', 'tester3', None, 403),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '*:*'},
None, 'tester3', 'tester3', None, 403),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '*'},
None, 'tester3', 'tester3', None, 403),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.r:*'},
None, 'tester3', 'tester3', None, 403),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.r:*,.rlistings'},
None, 'tester3', 'tester3', None, 403),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.r:invalid.domain.com'},
None, 'tester3', 'tester3', None, 403),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.r:invalid.domain.com,.rlistings'},
None, 'tester3', 'tester3', None, 403),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.rlistings'},
None, 'tester3', 'tester3', None, 403),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test:tester3'},
None, 'tester3', 'tester3', None, 204),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 204),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '%(test_id)s:tester3'},
None, 'tester3', 'tester3', None, 204),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '%(test_id)s:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 204),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test:tester2'},
None, 'tester3', 'tester3', None, 403),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test:*'},
None, 'tester3', 'tester3', None, 204),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test'},
None, 'tester3', 'tester3', None, 403),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '%(test_id)s'},
None, 'tester3', 'tester3', None, 403),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test2:tester3'},
None, 'tester3', 'tester3', None, 403),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test2:tester2'},
None, 'tester3', 'tester3', None, 403),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test2:*'},
None, 'tester3', 'tester3', None, 403),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test2'},
None, 'tester3', 'tester3', None, 403),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '*:tester3'},
None, 'tester3', 'tester3', None, 204),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '*:tester2'},
None, 'tester3', 'tester3', None, 403),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '*:*'},
None, 'tester3', 'tester3', None, 204),
('DELETE',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '*'},
None, 'tester3', 'tester3', None, 403),
('DELETE',
None,
None, 'UUID', 'UUID',
None,
None, 'tester3', 'tester3', None, 403)
]
# A scenario of get for container ACL
ACL_GET = [
('GET',
None,
None, 'UUID', None,
{'X-Container-Read': 'test:tester3'},
None, 'tester3', 'tester3', None, 200),
('GET',
None,
None, 'UUID', None,
{'X-Container-Read': 'test:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 200),
('GET',
None,
None, 'UUID', None,
{'X-Container-Read': '%(test_id)s:tester3'},
None, 'tester3', 'tester3', None, 200),
('GET',
None,
None, 'UUID', None,
{'X-Container-Read': '%(test_id)s:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 200),
('GET',
None,
None, 'UUID', None,
{'X-Container-Read': 'test:tester2'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', None,
{'X-Container-Read': 'test:*'},
None, 'tester3', 'tester3', None, 200),
('GET',
None,
None, 'UUID', None,
{'X-Container-Read': 'test'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', None,
{'X-Container-Read': '%(test_id)s'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', None,
{'X-Container-Read': 'test2:tester3'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', None,
{'X-Container-Read': 'test2:tester2'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', None,
{'X-Container-Read': 'test2:*'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', None,
{'X-Container-Read': 'test2'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', None,
{'X-Container-Read': '*:tester3'},
None, 'tester3', 'tester3', None, 200),
('GET',
None,
None, 'UUID', None,
{'X-Container-Read': '*:tester2'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', None,
{'X-Container-Read': '*:*'},
None, 'tester3', 'tester3', None, 200),
('GET',
None,
None, 'UUID', None,
{'X-Container-Read': '*'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', None,
{'X-Container-Read': '.r:*'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', None,
{'X-Container-Read': '.r:*,.rlistings'},
None, 'tester3', 'tester3', None, 200),
('GET',
None,
None, 'UUID', None,
{'X-Container-Read': '.r:invalid.domain.com'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', None,
{'X-Container-Read': '.r:invalid.domain.com,.rlistings'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', None,
{'X-Container-Read': '.rlistings'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', None,
{'X-Container-Write': 'test:tester3'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', None,
{'X-Container-Write': 'test:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', None,
{'X-Container-Write': '%(test_id)s:tester3'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', None,
{'X-Container-Write': '%(test_id)s:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', None,
{'X-Container-Write': 'test:tester2'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', None,
{'X-Container-Write': 'test:*'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', None,
{'X-Container-Write': 'test'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', None,
{'X-Container-Write': '%(test_id)s'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', None,
{'X-Container-Write': 'test2:tester3'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', None,
{'X-Container-Write': 'test2:tester2'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', None,
{'X-Container-Write': 'test2:*'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', None,
{'X-Container-Write': 'test2'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', None,
{'X-Container-Write': '*:tester3'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', None,
{'X-Container-Write': '*:tester2'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', None,
{'X-Container-Write': '*:*'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', None,
{'X-Container-Write': '*'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', None,
None,
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test:tester3'},
None, 'tester3', 'tester3', None, 200),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 200),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '%(test_id)s:tester3'},
None, 'tester3', 'tester3', None, 200),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '%(test_id)s:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 200),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test:tester2'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test:*'},
None, 'tester3', 'tester3', None, 200),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '%(test_id)s'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test2:tester3'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test2:tester2'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test2:*'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test2'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '*:tester3'},
None, 'tester3', 'tester3', None, 200),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '*:tester2'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '*:*'},
None, 'tester3', 'tester3', None, 200),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '*'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.r:*'},
None, 'tester3', 'tester3', None, 200),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.r:*,.rlistings'},
None, 'tester3', 'tester3', None, 200),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.r:invalid.domain.com'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.r:invalid.domain.com,.rlistings'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.rlistings'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test:tester3'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '%(test_id)s:tester3'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '%(test_id)s:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test:tester2'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test:*'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '%(test_id)s'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test2:tester3'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test2:tester2'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test2:*'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test2'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '*:tester3'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '*:tester2'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '*:*'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '*'},
None, 'tester3', 'tester3', None, 403),
('GET',
None,
None, 'UUID', 'UUID',
None,
None, 'tester3', 'tester3', None, 403)
]
# A scenario of head for container ACL
ACL_HEAD = [
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Read': 'test:tester3'},
None, 'tester3', 'tester3', None, 204),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Read': 'test:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 204),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Read': '%(test_id)s:tester3'},
None, 'tester3', 'tester3', None, 204),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Read': '%(test_id)s:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 204),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Read': 'test:tester2'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Read': 'test:*'},
None, 'tester3', 'tester3', None, 204),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Read': 'test'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Read': '%(test_id)s'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Read': 'test2:tester3'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Read': 'test2:tester2'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Read': 'test2:*'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Read': 'test2'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Read': '*:tester3'},
None, 'tester3', 'tester3', None, 204),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Read': '*:tester2'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Read': '*:*'},
None, 'tester3', 'tester3', None, 204),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Read': '*'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Read': '.r:*'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Read': '.r:*,.rlistings'},
None, 'tester3', 'tester3', None, 204),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Read': '.r:invalid.domain.com'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Read': '.r:invalid.domain.com,.rlistings'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Read': '.rlistings'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Write': 'test:tester3'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Write': 'test:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Write': '%(test_id)s:tester3'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Write': '%(test_id)s:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Write': 'test:tester2'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Write': 'test:*'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Write': 'test'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Write': '%(test_id)s'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Write': 'test2:tester3'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Write': 'test2:tester2'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Write': 'test2:*'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Write': 'test2'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Write': '*:tester3'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Write': '*:tester2'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Write': '*:*'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', None,
{'X-Container-Write': '*'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', None,
None,
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test:tester3'},
None, 'tester3', 'tester3', None, 200),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 200),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '%(test_id)s:tester3'},
None, 'tester3', 'tester3', None, 200),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '%(test_id)s:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 200),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test:tester2'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test:*'},
None, 'tester3', 'tester3', None, 200),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '%(test_id)s'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test2:tester3'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test2:tester2'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test2:*'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test2'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '*:tester3'},
None, 'tester3', 'tester3', None, 200),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '*:tester2'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '*:*'},
None, 'tester3', 'tester3', None, 200),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '*'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.r:*'},
None, 'tester3', 'tester3', None, 200),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.r:*,.rlistings'},
None, 'tester3', 'tester3', None, 200),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.r:invalid.domain.com'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.r:invalid.domain.com,.rlistings'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.rlistings'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test:tester3'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '%(test_id)s:tester3'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '%(test_id)s:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test:tester2'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test:*'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '%(test_id)s'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test2:tester3'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test2:tester2'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test2:*'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test2'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '*:tester3'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '*:tester2'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '*:*'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '*'},
None, 'tester3', 'tester3', None, 403),
('HEAD',
None,
None, 'UUID', 'UUID',
None,
None, 'tester3', 'tester3', None, 403)
]
# A scenario of post for container ACL
ACL_POST = [
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test:tester3'},
None, 'tester3', 'tester3', None, 403),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 403),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '%(test_id)s:tester3'},
None, 'tester3', 'tester3', None, 403),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '%(test_id)s:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 403),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test:tester2'},
None, 'tester3', 'tester3', None, 403),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test:*'},
None, 'tester3', 'tester3', None, 403),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test'},
None, 'tester3', 'tester3', None, 403),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '%(test_id)s'},
None, 'tester3', 'tester3', None, 403),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test2:tester3'},
None, 'tester3', 'tester3', None, 403),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test2:tester2'},
None, 'tester3', 'tester3', None, 403),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test2:*'},
None, 'tester3', 'tester3', None, 403),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test2'},
None, 'tester3', 'tester3', None, 403),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '*:tester3'},
None, 'tester3', 'tester3', None, 403),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '*:tester2'},
None, 'tester3', 'tester3', None, 403),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '*:*'},
None, 'tester3', 'tester3', None, 403),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '*'},
None, 'tester3', 'tester3', None, 403),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.r:*'},
None, 'tester3', 'tester3', None, 403),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.r:*,.rlistings'},
None, 'tester3', 'tester3', None, 403),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.r:invalid.domain.com'},
None, 'tester3', 'tester3', None, 403),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.r:invalid.domain.com,.rlistings'},
None, 'tester3', 'tester3', None, 403),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.rlistings'},
None, 'tester3', 'tester3', None, 403),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test:tester3'},
None, 'tester3', 'tester3', None, 202),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 202),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '%(test_id)s:tester3'},
None, 'tester3', 'tester3', None, 202),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '%(test_id)s:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 202),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test:tester2'},
None, 'tester3', 'tester3', None, 403),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test:*'},
None, 'tester3', 'tester3', None, 202),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test'},
None, 'tester3', 'tester3', None, 403),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '%(test_id)s'},
None, 'tester3', 'tester3', None, 403),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test2:tester3'},
None, 'tester3', 'tester3', None, 403),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test2:tester2'},
None, 'tester3', 'tester3', None, 403),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test2:*'},
None, 'tester3', 'tester3', None, 403),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test2'},
None, 'tester3', 'tester3', None, 403),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '*:tester3'},
None, 'tester3', 'tester3', None, 202),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '*:tester2'},
None, 'tester3', 'tester3', None, 403),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '*:*'},
None, 'tester3', 'tester3', None, 202),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '*'},
None, 'tester3', 'tester3', None, 403),
('POST',
None,
None, 'UUID', 'UUID',
None,
None, 'tester3', 'tester3', None, 403),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test',
'X-Container-Write': 'test:tester3'},
None, 'tester3', 'tester3', None, 202),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test:tester3',
'X-Container-Write': 'test:tester3'},
None, 'tester3', 'tester3', None, 202),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test:tester2',
'X-Container-Write': 'test:tester3'},
None, 'tester3', 'tester3', None, 202),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test:%(tester3_id)s',
'X-Container-Write': 'test:tester3'},
None, 'tester3', 'tester3', None, 202),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test:*',
'X-Container-Write': 'test:tester3'},
None, 'tester3', 'tester3', None, 202),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test2',
'X-Container-Write': 'test:tester3'},
None, 'tester3', 'tester3', None, 202),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test2:tester3',
'X-Container-Write': 'test:tester3'},
None, 'tester3', 'tester3', None, 202),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test2:tester2',
'X-Container-Write': 'test:tester3'},
None, 'tester3', 'tester3', None, 202),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test2:*',
'X-Container-Write': 'test:tester3'},
None, 'tester3', 'tester3', None, 202),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '%(test_id)s',
'X-Container-Write': 'test:tester3'},
None, 'tester3', 'tester3', None, 202),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '%(test_id)s:tester3',
'X-Container-Write': 'test:tester3'},
None, 'tester3', 'tester3', None, 202),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '%(test_id)s:%(tester3_id)s',
'X-Container-Write': 'test:tester3'},
None, 'tester3', 'tester3', None, 202),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '*',
'X-Container-Write': 'test:tester3'},
None, 'tester3', 'tester3', None, 202),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '*:tester3',
'X-Container-Write': 'test:tester3'},
None, 'tester3', 'tester3', None, 202),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '*:tester2',
'X-Container-Write': 'test:tester3'},
None, 'tester3', 'tester3', None, 202),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '*:*',
'X-Container-Write': 'test:tester3'},
None, 'tester3', 'tester3', None, 202),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.rlistings',
'X-Container-Write': 'test:tester3'},
None, 'tester3', 'tester3', None, 202),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.r:*',
'X-Container-Write': 'test:tester3'},
None, 'tester3', 'tester3', None, 202),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.r:*,.rlistings',
'X-Container-Write': 'test:tester3'},
None, 'tester3', 'tester3', None, 202),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.r:invalid.domain.com',
'X-Container-Write': 'test:tester3'},
None, 'tester3', 'tester3', None, 202),
('POST',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.r:invalid.domain.com,.rlistings',
'X-Container-Write': 'test:tester3'},
None, 'tester3', 'tester3', None, 202)
]
# A scenario of options for container ACL
ACL_OPTIONS = [
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Read': 'test:tester3'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Read': 'test:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Read': '%(test_id)s:tester3'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Read': '%(test_id)s:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Read': 'test:tester2'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Read': 'test:*'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Read': 'test'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Read': '%(test_id)s'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Read': 'test2:tester3'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Read': 'test2:tester2'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Read': 'test2:*'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Read': 'test2'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Read': '*:tester3'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Read': '*:tester2'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Read': '*:*'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Read': '*'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Read': '.r:*'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Read': '.r:*,.rlistings'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Read': '.r:invalid.domain.com'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Read': '.r:invalid.domain.com,.rlistings'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Read': '.rlistings'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Write': 'test:tester3'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Write': 'test:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Write': '%(test_id)s:tester3'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Write': '%(test_id)s:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Write': 'test:tester2'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Write': 'test:*'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Write': 'test'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Write': '%(test_id)s'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Write': 'test2:tester3'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Write': 'test2:tester2'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Write': 'test2:*'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Write': 'test2'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Write': '*:tester3'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Write': '*:tester2'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Write': '*:*'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
{'X-Container-Write': '*'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', None,
None,
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test:tester3'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '%(test_id)s:tester3'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '%(test_id)s:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test:tester2'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test:*'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '%(test_id)s'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test2:tester3'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test2:tester2'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test2:*'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': 'test2'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '*:tester3'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '*:tester2'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '*:*'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '*'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.r:*'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.r:*,.rlistings'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.r:invalid.domain.com'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.r:invalid.domain.com,.rlistings'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Read': '.rlistings'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test:tester3'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '%(test_id)s:tester3'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '%(test_id)s:%(tester3_id)s'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test:tester2'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test:*'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '%(test_id)s'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test2:tester3'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test2:tester2'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test2:*'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': 'test2'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '*:tester3'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '*:tester2'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '*:*'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
{'X-Container-Write': '*'},
None, 'tester3', 'tester3', None, 200),
('OPTIONS',
None,
None, 'UUID', 'UUID',
None,
None, 'tester3', 'tester3', None, 200)
]
# http_method : HTTP methods such as PUT, GET, POST, HEAD and so on
# auth_user_name : a user name which is used for getting a token for
# expected : expected status code
TEST_CASE_INFO_FORMAT = ('http_method', 'auth_user_name', 'expected')
RBAC_INFO_GET = [
('GET', 'tester', 200),
('GET', 'tester6', 200),
('GET', 'tester3', 200),
('GET', None, 200)
]
RBAC_INFO_HEAD = [
('HEAD', 'tester', 200),
('HEAD', 'tester6', 200),
('HEAD', 'tester3', 200),
('HEAD', None, 200)
]
RBAC_INFO_OPTIONS = [
('OPTIONS', 'tester', 200),
('OPTIONS', 'tester6', 200),
('OPTIONS', 'tester3', 200),
('OPTIONS', None, 200)
]
RBAC_INFO_GET_WITH_SERVICE_PREFIX = [
('GET', 'tester5', 200)
]
RBAC_INFO_HEAD_WITH_SERVICE_PREFIX = [
('HEAD', 'tester5', 200)
]
RBAC_INFO_OPTIONS_WITH_SERVICE_PREFIX = [
('OPTIONS', 'tester5', 200)
]
class BaseClient(object):
def __init__(self):
self._set_users()
self.auth_url = tf.swift_test_auth
self.insecure = tf.insecure
self.auth_version = tf.swift_test_auth_version
def _set_users(self):
self.users = {}
for index in range(6):
self.users[tf.swift_test_user[index]] = {
'account': tf.swift_test_tenant[index],
'password': tf.swift_test_key[index],
'domain': tf.swift_test_domain[index]}
class KeystoneClient(BaseClient):
def get_id_info(self):
id_info = {}
for user_name, user_info in self.users.items():
if user_name != '':
user_id, project_id = self._get_id(user_name)
id_info[user_name + '_id'] = user_id
id_info[user_info['account'] + '_id'] = project_id
return id_info
def _get_id(self, user_name):
info = self.users.get(user_name)
keystone_client = ksc.Client(
auth_url=self.auth_url,
version=(self.auth_version,),
username=user_name,
password=info['password'],
project_name=info['account'],
project_domain_name=info['domain'],
user_domain_name=info['domain'])
return keystone_client.user_id, keystone_client.project_id
class SwiftClient(BaseClient):
_tokens = {}
def _get_auth(self, user_name):
info = self.users.get(user_name)
if info is None:
return None, None
os_options = {'user_domain_name': info['domain'],
'project_domain_name': info['domain']}
authargs = dict(snet=False, tenant_name=info['account'],
auth_version=self.auth_version, os_options=os_options,
insecure=self.insecure)
storage_url, token = get_auth(
self.auth_url, user_name, info['password'], **authargs)
return storage_url, token
def auth(self, user_name):
storage_url, token = SwiftClient._tokens.get(user_name, (None, None))
if not token:
SwiftClient._tokens[user_name] = self._get_auth(user_name)
storage_url, token = SwiftClient._tokens.get(user_name)
return storage_url, token
def send_request(self, method, url, token=None, headers=None,
service_token=None):
headers = {} if headers is None else headers.copy()
headers.update({'Content-Type': 'application/json',
'Accept': 'application/json'})
if token:
headers['X-Auth-Token'] = token
if service_token:
headers['X-Service-Token'] = service_token
if self.insecure:
parsed, conn = http_connection(url, insecure=self.insecure)
else:
parsed, conn = http_connection(url)
conn.request(method, parsed.path, headers=headers)
resp = conn.getresponse()
return resp
class BaseTestAC(unittest.TestCase):
def setUp(self):
if ksc is None:
raise unittest.SkipTest('keystoneclient is not available')
self.reseller_admin = tf.swift_test_user[5]
self.client = SwiftClient()
def _create_resource_url(self, storage_url, account=None,
container=None, obj=None, reseller_prefix=None):
# e.g.
# storage_url = 'http://localhost/v1/AUTH_xxx'
# storage_url_list[:-1] is ['http:', '', 'localhost', 'v1']
# storage_url_list[-1] is 'AUTH_xxx'
storage_url_list = storage_url.rstrip('/').split('/')
base_url = '/'.join(storage_url_list[:-1])
if account is None:
account = storage_url_list[-1]
if reseller_prefix == 'SERVICE':
# replace endpoint reseller prefix with service reseller prefix
i = (account.index('_') + 1) if '_' in account else 0
account = tf.swift_test_service_prefix + account[i:]
return '/'.join([part for part in (base_url, account, container, obj)
if part])
def _put_container(self, storage_url, token, test_case):
resource_url = self._create_resource_url(
storage_url,
test_case['account_name'],
test_case['container_name'],
reseller_prefix=test_case['reseller_prefix'])
self.created_resources.append(resource_url)
self.client.send_request('PUT', resource_url, token,
headers=test_case['prep_container_header'])
def _put_object(self, storage_url, token, test_case):
resource_url = self._create_resource_url(
storage_url,
test_case['account_name'],
test_case['container_name'],
test_case['object_name'],
reseller_prefix=test_case['reseller_prefix'])
self.created_resources.append(resource_url)
self.client.send_request('PUT', resource_url, token)
def _get_storage_url_and_token(self, storage_url_user, token_user):
storage_url, _junk = self.client.auth(storage_url_user)
_junk, token = self.client.auth(token_user)
return storage_url, token
def _prepare(self, test_case):
storage_url, reseller_token = self._get_storage_url_and_token(
test_case['target_user_name'], self.reseller_admin)
if test_case['http_method'] in ('GET', 'POST', 'DELETE', 'HEAD',
'OPTIONS'):
temp_test_case = test_case.copy()
if test_case['container_name'] is None:
# When the target is for account, dummy container will be
# created to create an account. This account is created by
# account_autocreate.
temp_test_case['container_name'] = uuid.uuid4().hex
self._put_container(storage_url, reseller_token, temp_test_case)
if test_case['object_name']:
self._put_object(storage_url, reseller_token, test_case)
elif test_case['http_method'] in ('PUT',):
if test_case['object_name']:
self._put_container(storage_url, reseller_token, test_case)
def _execute(self, test_case):
storage_url, token = self._get_storage_url_and_token(
test_case['target_user_name'], test_case['auth_user_name'])
service_user = test_case['service_user_name']
service_token = (None if service_user is None
else self.client.auth(service_user)[1])
resource_url = self._create_resource_url(
storage_url,
test_case['account_name'],
test_case['container_name'],
test_case['object_name'],
test_case['reseller_prefix'])
if test_case['http_method'] in ('PUT'):
self.created_resources.append(resource_url)
resp = self.client.send_request(test_case['http_method'],
resource_url,
token,
headers=test_case['header'],
service_token=service_token)
return resp.status
def _cleanup(self):
_junk, reseller_token = self.client.auth(self.reseller_admin)
for resource_url in reversed(self.created_resources):
resp = self.client.send_request('DELETE', resource_url,
reseller_token)
self.assertIn(resp.status, (204, 404))
def _convert_data(self, data):
test_case = dict(zip(TEST_CASE_FORMAT, data))
if test_case['container_name'] == 'UUID':
test_case['container_name'] = uuid.uuid4().hex
if test_case['object_name'] == 'UUID':
test_case['object_name'] = uuid.uuid4().hex
return test_case
def _run_scenario(self, scenario):
for data in scenario:
test_case = self._convert_data(data)
self.created_resources = []
try:
self._prepare(test_case)
result = self._execute(test_case)
self.assertEqual(test_case['expected'],
result,
'Expected %s but got %s for test case %s' %
(test_case['expected'], result, test_case))
finally:
self._cleanup()
class TestRBAC(BaseTestAC):
def test_rbac(self):
if any((tf.skip, tf.skip2, tf.skip3, tf.skip_if_not_v3,
tf.skip_if_no_reseller_admin)):
raise unittest.SkipTest
scenario_rbac = RBAC_PUT + RBAC_DELETE + RBAC_GET +\
RBAC_HEAD + RBAC_POST + RBAC_OPTIONS
shuffle(scenario_rbac)
self._run_scenario(scenario_rbac)
def test_rbac_with_service_prefix(self):
if any((tf.skip, tf.skip2, tf.skip3, tf.skip_if_not_v3,
tf.skip_service_tokens, tf.skip_if_no_reseller_admin)):
raise unittest.SkipTest
scenario_rbac = RBAC_PUT_WITH_SERVICE_PREFIX +\
RBAC_DELETE_WITH_SERVICE_PREFIX +\
RBAC_GET_WITH_SERVICE_PREFIX +\
RBAC_HEAD_WITH_SERVICE_PREFIX +\
RBAC_POST_WITH_SERVICE_PREFIX +\
RBAC_OPTIONS_WITH_SERVICE_PREFIX
shuffle(scenario_rbac)
self._run_scenario(scenario_rbac)
class TestRBACInfo(BaseTestAC):
def _get_info_url(self):
storage_url, _junk = self.client.auth(self.reseller_admin)
parsed_url = urlparse(storage_url)
info_url_parts = (
parsed_url.scheme, parsed_url.netloc, '/info', '', '', '')
return urlunparse(info_url_parts)
def _prepare(self, test_case):
pass
def _execute(self, test_case):
_junk, token = \
self.client.auth(test_case['auth_user_name'])
resp = self.client.send_request(test_case['http_method'],
self.info_url, token)
return resp.status
def _cleanup(self):
pass
def _convert_data(self, data):
test_case = dict(zip(TEST_CASE_INFO_FORMAT, data))
return test_case
def test_rbac_info(self):
if any((tf.skip, tf.skip2, tf.skip3, tf.skip_if_not_v3,
tf.skip_if_no_reseller_admin)):
raise unittest.SkipTest
self.info_url = self._get_info_url()
scenario_rbac_info = RBAC_INFO_GET + RBAC_INFO_HEAD + RBAC_INFO_OPTIONS
shuffle(scenario_rbac_info)
self._run_scenario(scenario_rbac_info)
def test_rbac_info_with_service_prefix(self):
if any((tf.skip, tf.skip2, tf.skip3, tf.skip_if_not_v3,
tf.skip_service_tokens, tf.skip_if_no_reseller_admin)):
raise unittest.SkipTest
self.info_url = self._get_info_url()
scenario_rbac_info = RBAC_INFO_GET_WITH_SERVICE_PREFIX +\
RBAC_INFO_HEAD_WITH_SERVICE_PREFIX +\
RBAC_INFO_OPTIONS_WITH_SERVICE_PREFIX
shuffle(scenario_rbac_info)
self._run_scenario(scenario_rbac_info)
class TestContainerACL(BaseTestAC):
def _convert_data(self, data):
test_case = super(TestContainerACL, self)._convert_data(data)
prep_container_header = test_case['prep_container_header']
if prep_container_header is not None:
for header, header_val in prep_container_header.items():
prep_container_header[header] = header_val % self.id_info
return test_case
def test_container_acl(self):
if any((tf.skip, tf.skip2, tf.skip3, tf.skip_if_not_v3,
tf.skip_if_no_reseller_admin)):
raise unittest.SkipTest
self.id_info = KeystoneClient().get_id_info()
scenario_container_acl = ACL_PUT + ACL_DELETE + ACL_GET +\
ACL_HEAD + ACL_POST + ACL_OPTIONS
shuffle(scenario_container_acl)
self._run_scenario(scenario_container_acl)
if __name__ == '__main__':
unittest.main()
| swift-master | test/functional/test_access_control.py |
#!/usr/bin/python
# Copyright (c) 2010-2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import hmac
import unittest
import itertools
import hashlib
import six
import time
from six.moves import urllib
from uuid import uuid4
from swift.common.http import is_success
from swift.common.swob import normalize_etag
from swift.common.utils import json, MD5_OF_EMPTY_STRING, md5
from swift.common.middleware.slo import SloGetContext
from test.functional import check_response, retry, requires_acls, \
cluster_info, SkipTest
from test.functional.tests import Base, TestFileComparisonEnv, Utils, BaseEnv
from test.functional.test_slo import TestSloEnv
from test.functional.test_dlo import TestDloEnv
from test.functional.test_tempurl import TestContainerTempurlEnv, \
TestTempurlEnv
from test.functional.swift_test_client import ResponseError
import test.functional as tf
from test.unit import group_by_byte
TARGET_BODY = b'target body'
def setUpModule():
tf.setup_package()
if 'symlink' not in cluster_info:
raise SkipTest("Symlinks not enabled")
def tearDownModule():
tf.teardown_package()
class TestSymlinkEnv(BaseEnv):
link_cont = uuid4().hex
tgt_cont = uuid4().hex
tgt_obj = uuid4().hex
@classmethod
def setUp(cls):
if tf.skip or tf.skip2:
raise SkipTest
cls._create_container(cls.tgt_cont) # use_account=1
cls._create_container(cls.link_cont) # use_account=1
# container in account 2
cls._create_container(cls.link_cont, use_account=2)
cls._create_tgt_object()
@classmethod
def containers(cls):
return (cls.link_cont, cls.tgt_cont)
@classmethod
def target_content_location(cls, override_obj=None, override_account=None):
account = override_account or tf.parsed[0].path.split('/', 2)[2]
return '/v1/%s/%s/%s' % (account, cls.tgt_cont,
override_obj or cls.tgt_obj)
@classmethod
def _make_request(cls, url, token, parsed, conn, method,
container, obj='', headers=None, body=b'',
query_args=None):
headers = headers or {}
headers.update({'X-Auth-Token': token})
path = '%s/%s/%s' % (parsed.path, container, obj) if obj \
else '%s/%s' % (parsed.path, container)
if query_args:
path += '?%s' % query_args
conn.request(method, path, body, headers)
resp = check_response(conn)
# to read the buffer and keep it in the attribute, call resp.content
resp.content
return resp
@classmethod
def _create_container(cls, name, headers=None, use_account=1):
headers = headers or {}
resp = retry(cls._make_request, method='PUT', container=name,
headers=headers, use_account=use_account)
if resp.status not in (201, 202):
raise ResponseError(resp)
return name
@classmethod
def _create_tgt_object(cls, body=TARGET_BODY):
resp = retry(cls._make_request, method='PUT',
headers={'Content-Type': 'application/target'},
container=cls.tgt_cont, obj=cls.tgt_obj,
body=body)
if resp.status != 201:
raise ResponseError(resp)
# sanity: successful put response has content-length 0
cls.tgt_length = str(len(body))
cls.tgt_etag = resp.getheader('etag')
resp = retry(cls._make_request, method='GET',
container=cls.tgt_cont, obj=cls.tgt_obj)
if resp.status != 200 and resp.content != body:
raise ResponseError(resp)
@classmethod
def tearDown(cls):
delete_containers = [
(use_account, containers) for use_account, containers in
enumerate([cls.containers(), [cls.link_cont]], 1)]
# delete objects inside container
for use_account, containers in delete_containers:
if use_account == 2 and tf.skip2:
continue
for container in containers:
while True:
cont = container
resp = retry(cls._make_request, method='GET',
container=cont, query_args='format=json',
use_account=use_account)
if resp.status == 404:
break
if not is_success(resp.status):
raise ResponseError(resp)
objs = json.loads(resp.content)
if not objs:
break
for obj in objs:
resp = retry(cls._make_request, method='DELETE',
container=container, obj=obj['name'],
use_account=use_account)
if resp.status not in (204, 404):
raise ResponseError(resp)
# delete the containers
for use_account, containers in delete_containers:
if use_account == 2 and tf.skip2:
continue
for container in containers:
resp = retry(cls._make_request, method='DELETE',
container=container,
use_account=use_account)
if resp.status not in (204, 404):
raise ResponseError(resp)
class TestSymlink(Base):
env = TestSymlinkEnv
@classmethod
def setUpClass(cls):
# To skip env setup for class setup, instead setUp the env for each
# test method
pass
def setUp(self):
self.env.setUp()
def object_name_generator():
while True:
yield uuid4().hex
self.obj_name_gen = object_name_generator()
self._account_name = None
def tearDown(self):
self.env.tearDown()
@property
def account_name(self):
if not self._account_name:
self._account_name = tf.parsed[0].path.split('/', 2)[2]
return self._account_name
def _make_request(self, url, token, parsed, conn, method,
container, obj='', headers=None, body=b'',
query_args=None, allow_redirects=True):
headers = headers or {}
headers.update({'X-Auth-Token': token})
path = '%s/%s/%s' % (parsed.path, container, obj) if obj \
else '%s/%s' % (parsed.path, container)
if query_args:
path += '?%s' % query_args
conn.requests_args['allow_redirects'] = allow_redirects
conn.request(method, path, body, headers)
resp = check_response(conn)
# to read the buffer and keep it in the attribute, call resp.content
resp.content
return resp
def _make_request_with_symlink_get(self, url, token, parsed, conn, method,
container, obj, headers=None, body=b''):
resp = self._make_request(
url, token, parsed, conn, method, container, obj, headers, body,
query_args='symlink=get')
return resp
def _test_put_symlink(self, link_cont, link_obj, tgt_cont, tgt_obj):
headers = {'X-Symlink-Target': '%s/%s' % (tgt_cont, tgt_obj)}
resp = retry(self._make_request, method='PUT',
container=link_cont, obj=link_obj,
headers=headers)
self.assertEqual(resp.status, 201)
def _test_put_symlink_with_etag(self, link_cont, link_obj, tgt_cont,
tgt_obj, etag, headers=None):
headers = headers or {}
headers.update({'X-Symlink-Target': '%s/%s' % (tgt_cont, tgt_obj),
'X-Symlink-Target-Etag': etag})
resp = retry(self._make_request, method='PUT',
container=link_cont, obj=link_obj,
headers=headers)
self.assertEqual(resp.status, 201, resp.content)
def _test_get_as_target_object(
self, link_cont, link_obj, expected_content_location,
use_account=1):
resp = retry(
self._make_request, method='GET',
container=link_cont, obj=link_obj, use_account=use_account)
self.assertEqual(resp.status, 200, resp.content)
self.assertEqual(resp.content, TARGET_BODY)
self.assertEqual(resp.getheader('content-length'),
str(self.env.tgt_length))
self.assertEqual(resp.getheader('etag'), self.env.tgt_etag)
self.assertIn('Content-Location', resp.headers)
self.assertEqual(expected_content_location,
resp.getheader('content-location'))
return resp
def _test_head_as_target_object(self, link_cont, link_obj, use_account=1):
resp = retry(
self._make_request, method='HEAD',
container=link_cont, obj=link_obj, use_account=use_account)
self.assertEqual(resp.status, 200)
def _assertLinkObject(self, link_cont, link_obj, use_account=1):
# HEAD on link_obj itself
resp = retry(
self._make_request_with_symlink_get, method='HEAD',
container=link_cont, obj=link_obj, use_account=use_account)
self.assertEqual(resp.status, 200)
self.assertTrue(resp.getheader('x-symlink-target'))
# GET on link_obj itself
resp = retry(
self._make_request_with_symlink_get, method='GET',
container=link_cont, obj=link_obj, use_account=use_account)
self.assertEqual(resp.status, 200)
self.assertEqual(resp.content, b'')
self.assertEqual(resp.getheader('content-length'), str(0))
self.assertTrue(resp.getheader('x-symlink-target'))
def _assertSymlink(self, link_cont, link_obj,
expected_content_location=None, use_account=1):
expected_content_location = \
expected_content_location or self.env.target_content_location()
# sanity: HEAD/GET on link_obj
self._assertLinkObject(link_cont, link_obj, use_account)
# HEAD target object via symlink
self._test_head_as_target_object(
link_cont=link_cont, link_obj=link_obj, use_account=use_account)
# GET target object via symlink
self._test_get_as_target_object(
link_cont=link_cont, link_obj=link_obj, use_account=use_account,
expected_content_location=expected_content_location)
def test_symlink_with_encoded_target_name(self):
# makes sure to test encoded characters as symlink target
target_obj = 'dealde%2Fl04 011e%204c8df/flash.png'
link_obj = uuid4().hex
# create target using unnormalized path
resp = retry(
self._make_request, method='PUT', container=self.env.tgt_cont,
obj=target_obj, body=TARGET_BODY)
self.assertEqual(resp.status, 201)
# you can get it using either name
resp = retry(
self._make_request, method='GET', container=self.env.tgt_cont,
obj=target_obj)
self.assertEqual(resp.status, 200)
self.assertEqual(resp.content, TARGET_BODY)
normalized_quoted_obj = 'dealde/l04%20011e%204c8df/flash.png'
self.assertEqual(normalized_quoted_obj, urllib.parse.quote(
urllib.parse.unquote(target_obj)))
resp = retry(
self._make_request, method='GET', container=self.env.tgt_cont,
obj=normalized_quoted_obj)
self.assertEqual(resp.status, 200)
self.assertEqual(resp.content, TARGET_BODY)
# create a symlink using the un-normalized target path
self._test_put_symlink(link_cont=self.env.link_cont, link_obj=link_obj,
tgt_cont=self.env.tgt_cont,
tgt_obj=target_obj)
# and it's normalized
self._assertSymlink(
self.env.link_cont, link_obj,
expected_content_location=self.env.target_content_location(
normalized_quoted_obj))
# create a symlink using the normalized target path
self._test_put_symlink(link_cont=self.env.link_cont, link_obj=link_obj,
tgt_cont=self.env.tgt_cont,
tgt_obj=normalized_quoted_obj)
# and it's ALSO normalized
self._assertSymlink(
self.env.link_cont, link_obj,
expected_content_location=self.env.target_content_location(
normalized_quoted_obj))
def test_symlink_put_head_get(self):
link_obj = uuid4().hex
# PUT link_obj
self._test_put_symlink(link_cont=self.env.link_cont, link_obj=link_obj,
tgt_cont=self.env.tgt_cont,
tgt_obj=self.env.tgt_obj)
self._assertSymlink(self.env.link_cont, link_obj)
def test_symlink_with_etag_put_head_get(self):
link_obj = uuid4().hex
# PUT link_obj
self._test_put_symlink_with_etag(link_cont=self.env.link_cont,
link_obj=link_obj,
tgt_cont=self.env.tgt_cont,
tgt_obj=self.env.tgt_obj,
etag=self.env.tgt_etag)
self._assertSymlink(self.env.link_cont, link_obj)
resp = retry(
self._make_request, method='GET',
container=self.env.link_cont, obj=link_obj,
headers={'If-Match': self.env.tgt_etag})
self.assertEqual(resp.status, 200)
self.assertEqual(resp.getheader('content-location'),
self.env.target_content_location())
resp = retry(
self._make_request, method='GET',
container=self.env.link_cont, obj=link_obj,
headers={'If-Match': 'not-the-etag'})
self.assertEqual(resp.status, 412)
self.assertEqual(resp.getheader('content-location'),
self.env.target_content_location())
def test_static_symlink_with_bad_etag_put_head_get(self):
link_obj = uuid4().hex
# PUT link_obj
self._test_put_symlink_with_etag(link_cont=self.env.link_cont,
link_obj=link_obj,
tgt_cont=self.env.tgt_cont,
tgt_obj=self.env.tgt_obj,
etag=self.env.tgt_etag)
# overwrite tgt object
self.env._create_tgt_object(body='updated target body')
resp = retry(
self._make_request, method='HEAD',
container=self.env.link_cont, obj=link_obj)
self.assertEqual(resp.status, 409)
# but we still know where it points
self.assertEqual(resp.getheader('content-location'),
self.env.target_content_location())
resp = retry(
self._make_request, method='GET',
container=self.env.link_cont, obj=link_obj)
self.assertEqual(resp.status, 409)
self.assertEqual(resp.getheader('content-location'),
self.env.target_content_location())
# uses a mechanism entirely divorced from if-match
resp = retry(
self._make_request, method='GET',
container=self.env.link_cont, obj=link_obj,
headers={'If-Match': self.env.tgt_etag})
self.assertEqual(resp.status, 409)
self.assertEqual(resp.getheader('content-location'),
self.env.target_content_location())
resp = retry(
self._make_request, method='GET',
container=self.env.link_cont, obj=link_obj,
headers={'If-Match': 'not-the-etag'})
self.assertEqual(resp.status, 409)
self.assertEqual(resp.getheader('content-location'),
self.env.target_content_location())
resp = retry(
self._make_request, method='DELETE',
container=self.env.tgt_cont, obj=self.env.tgt_obj)
# not-found-ness trumps if-match-ness
resp = retry(
self._make_request, method='GET',
container=self.env.link_cont, obj=link_obj)
self.assertEqual(resp.status, 404)
self.assertEqual(resp.getheader('content-location'),
self.env.target_content_location())
def test_dynamic_link_to_static_link(self):
static_link_obj = uuid4().hex
# PUT static_link to tgt_obj
self._test_put_symlink_with_etag(link_cont=self.env.link_cont,
link_obj=static_link_obj,
tgt_cont=self.env.tgt_cont,
tgt_obj=self.env.tgt_obj,
etag=self.env.tgt_etag)
symlink_obj = uuid4().hex
# PUT symlink to static_link
self._test_put_symlink(link_cont=self.env.link_cont,
link_obj=symlink_obj,
tgt_cont=self.env.link_cont,
tgt_obj=static_link_obj)
self._test_get_as_target_object(
link_cont=self.env.link_cont, link_obj=symlink_obj,
expected_content_location=self.env.target_content_location())
def test_static_link_to_dynamic_link(self):
symlink_obj = uuid4().hex
# PUT symlink to tgt_obj
self._test_put_symlink(link_cont=self.env.link_cont,
link_obj=symlink_obj,
tgt_cont=self.env.tgt_cont,
tgt_obj=self.env.tgt_obj)
static_link_obj = uuid4().hex
# PUT a static_link to the symlink
self._test_put_symlink_with_etag(link_cont=self.env.link_cont,
link_obj=static_link_obj,
tgt_cont=self.env.link_cont,
tgt_obj=symlink_obj,
etag=MD5_OF_EMPTY_STRING)
self._test_get_as_target_object(
link_cont=self.env.link_cont, link_obj=static_link_obj,
expected_content_location=self.env.target_content_location())
def test_static_link_to_nowhere(self):
missing_obj = uuid4().hex
static_link_obj = uuid4().hex
# PUT a static_link to the missing name
headers = {
'X-Symlink-Target': '%s/%s' % (self.env.link_cont, missing_obj),
'X-Symlink-Target-Etag': MD5_OF_EMPTY_STRING}
resp = retry(self._make_request, method='PUT',
container=self.env.link_cont, obj=static_link_obj,
headers=headers)
self.assertEqual(resp.status, 409)
self.assertEqual(resp.content, b'X-Symlink-Target does not exist')
def test_static_link_to_broken_symlink(self):
symlink_obj = uuid4().hex
# PUT symlink to tgt_obj
self._test_put_symlink(link_cont=self.env.link_cont,
link_obj=symlink_obj,
tgt_cont=self.env.tgt_cont,
tgt_obj=self.env.tgt_obj)
static_link_obj = uuid4().hex
# PUT a static_link to the symlink
self._test_put_symlink_with_etag(link_cont=self.env.link_cont,
link_obj=static_link_obj,
tgt_cont=self.env.link_cont,
tgt_obj=symlink_obj,
etag=MD5_OF_EMPTY_STRING)
# break the symlink
resp = retry(
self._make_request, method='DELETE',
container=self.env.tgt_cont, obj=self.env.tgt_obj)
self.assertEqual(resp.status // 100, 2)
# sanity
resp = retry(
self._make_request, method='GET',
container=self.env.link_cont, obj=symlink_obj)
self.assertEqual(resp.status, 404)
# static_link is broken too!
resp = retry(
self._make_request, method='GET',
container=self.env.link_cont, obj=static_link_obj)
self.assertEqual(resp.status, 404)
# interestingly you may create a static_link to a broken symlink
broken_static_link_obj = uuid4().hex
# PUT a static_link to the broken symlink
self._test_put_symlink_with_etag(link_cont=self.env.link_cont,
link_obj=broken_static_link_obj,
tgt_cont=self.env.link_cont,
tgt_obj=symlink_obj,
etag=MD5_OF_EMPTY_STRING)
def test_symlink_get_ranged(self):
link_obj = uuid4().hex
# PUT symlink
self._test_put_symlink(link_cont=self.env.link_cont, link_obj=link_obj,
tgt_cont=self.env.tgt_cont,
tgt_obj=self.env.tgt_obj)
headers = {'Range': 'bytes=7-10'}
resp = retry(self._make_request, method='GET',
container=self.env.link_cont, obj=link_obj,
headers=headers)
self.assertEqual(resp.status, 206)
self.assertEqual(resp.content, b'body')
def test_create_symlink_before_target(self):
link_obj = uuid4().hex
target_obj = uuid4().hex
# PUT link_obj before target object is written
# PUT, GET, HEAD (on symlink) should all work ok without target object
self._test_put_symlink(link_cont=self.env.link_cont, link_obj=link_obj,
tgt_cont=self.env.tgt_cont, tgt_obj=target_obj)
# Try to GET target via symlink.
# 404 will be returned with Content-Location of target path.
resp = retry(
self._make_request, method='GET',
container=self.env.link_cont, obj=link_obj, use_account=1)
self.assertEqual(resp.status, 404)
self.assertIn('Content-Location', resp.headers)
self.assertEqual(self.env.target_content_location(target_obj),
resp.getheader('content-location'))
# HEAD on target object via symlink should return a 404 since target
# object has not yet been written
resp = retry(
self._make_request, method='HEAD',
container=self.env.link_cont, obj=link_obj)
self.assertEqual(resp.status, 404)
# GET on target object directly
resp = retry(
self._make_request, method='GET',
container=self.env.tgt_cont, obj=target_obj)
self.assertEqual(resp.status, 404)
# Now let's write target object and symlink will be able to return
# object
resp = retry(
self._make_request, method='PUT', container=self.env.tgt_cont,
obj=target_obj, body=TARGET_BODY)
self.assertEqual(resp.status, 201)
# successful put response has content-length 0
target_length = str(len(TARGET_BODY))
target_etag = resp.getheader('etag')
# sanity: HEAD/GET on link_obj itself
self._assertLinkObject(self.env.link_cont, link_obj)
# HEAD target object via symlink
self._test_head_as_target_object(
link_cont=self.env.link_cont, link_obj=link_obj)
# GET target object via symlink
resp = retry(self._make_request, method='GET',
container=self.env.link_cont, obj=link_obj)
self.assertEqual(resp.status, 200)
self.assertEqual(resp.content, TARGET_BODY)
self.assertEqual(resp.getheader('content-length'), str(target_length))
self.assertEqual(resp.getheader('etag'), target_etag)
self.assertIn('Content-Location', resp.headers)
self.assertEqual(self.env.target_content_location(target_obj),
resp.getheader('content-location'))
def test_symlink_chain(self):
# Testing to symlink chain like symlink -> symlink -> target.
symloop_max = cluster_info['symlink']['symloop_max']
# create symlink chain in a container. To simplify,
# use target container for all objects (symlinks and target) here
previous = self.env.tgt_obj
container = self.env.tgt_cont
for link_obj in itertools.islice(self.obj_name_gen, symloop_max):
# PUT link_obj point to tgt_obj
self._test_put_symlink(
link_cont=container, link_obj=link_obj,
tgt_cont=container, tgt_obj=previous)
# set corrent link_obj to previous
previous = link_obj
# the last link is valid for symloop_max constraint
max_chain_link = link_obj
self._assertSymlink(link_cont=container, link_obj=max_chain_link)
# PUT a new link_obj points to the max_chain_link
# that will result in 409 error on the HEAD/GET.
too_many_chain_link = next(self.obj_name_gen)
self._test_put_symlink(
link_cont=container, link_obj=too_many_chain_link,
tgt_cont=container, tgt_obj=max_chain_link)
# try to HEAD to target object via too_many_chain_link
resp = retry(self._make_request, method='HEAD',
container=container,
obj=too_many_chain_link)
self.assertEqual(resp.status, 409)
self.assertEqual(resp.content, b'')
# try to GET to target object via too_many_chain_link
resp = retry(self._make_request, method='GET',
container=container,
obj=too_many_chain_link)
self.assertEqual(resp.status, 409)
self.assertEqual(
resp.content,
b'Too many levels of symbolic links, maximum allowed is %d' %
symloop_max)
# However, HEAD/GET to the (just) link is still ok
self._assertLinkObject(container, too_many_chain_link)
def test_symlink_chain_with_etag(self):
# Testing to symlink chain like symlink -> symlink -> target.
symloop_max = cluster_info['symlink']['symloop_max']
# create symlink chain in a container. To simplify,
# use target container for all objects (symlinks and target) here
previous = self.env.tgt_obj
container = self.env.tgt_cont
for link_obj in itertools.islice(self.obj_name_gen, symloop_max):
# PUT link_obj point to tgt_obj
self._test_put_symlink_with_etag(link_cont=container,
link_obj=link_obj,
tgt_cont=container,
tgt_obj=previous,
etag=self.env.tgt_etag)
# set current link_obj to previous
previous = link_obj
# the last link is valid for symloop_max constraint
max_chain_link = link_obj
self._assertSymlink(link_cont=container, link_obj=max_chain_link)
# chained etag validation works as long as the target symlink works
headers = {'X-Symlink-Target': '%s/%s' % (container, max_chain_link),
'X-Symlink-Target-Etag': 'not-the-real-etag'}
resp = retry(self._make_request, method='PUT',
container=container, obj=uuid4().hex,
headers=headers)
self.assertEqual(resp.status, 409)
# PUT a new link_obj pointing to the max_chain_link can validate the
# ETag but will result in 409 error on the HEAD/GET.
too_many_chain_link = next(self.obj_name_gen)
self._test_put_symlink_with_etag(
link_cont=container, link_obj=too_many_chain_link,
tgt_cont=container, tgt_obj=max_chain_link,
etag=self.env.tgt_etag)
# try to HEAD to target object via too_many_chain_link
resp = retry(self._make_request, method='HEAD',
container=container,
obj=too_many_chain_link)
self.assertEqual(resp.status, 409)
self.assertEqual(resp.content, b'')
# try to GET to target object via too_many_chain_link
resp = retry(self._make_request, method='GET',
container=container,
obj=too_many_chain_link)
self.assertEqual(resp.status, 409)
self.assertEqual(
resp.content,
b'Too many levels of symbolic links, maximum allowed is %d' %
symloop_max)
# However, HEAD/GET to the (just) link is still ok
self._assertLinkObject(container, too_many_chain_link)
def test_symlink_and_slo_manifest_chain(self):
if 'slo' not in cluster_info:
raise SkipTest
symloop_max = cluster_info['symlink']['symloop_max']
# create symlink chain in a container. To simplify,
# use target container for all objects (symlinks and target) here
previous = self.env.tgt_obj
container = self.env.tgt_cont
# make symlink and slo manifest chain
# e.g. slo -> symlink -> symlink -> slo -> symlink -> symlink -> target
for _ in range(SloGetContext.max_slo_recursion_depth or 1):
for link_obj in itertools.islice(self.obj_name_gen, symloop_max):
# PUT link_obj point to previous object
self._test_put_symlink(
link_cont=container, link_obj=link_obj,
tgt_cont=container, tgt_obj=previous)
# next link will point to this link
previous = link_obj
else:
# PUT a manifest with single segment to the symlink
manifest_obj = next(self.obj_name_gen)
manifest = json.dumps(
[{'path': '/%s/%s' % (container, link_obj)}])
resp = retry(self._make_request, method='PUT',
container=container, obj=manifest_obj,
body=manifest,
query_args='multipart-manifest=put')
self.assertEqual(resp.status, 201) # sanity
previous = manifest_obj
# From the last manifest to the final target obj length is
# symloop_max * max_slo_recursion_depth
max_recursion_manifest = previous
# Check GET to max_recursion_manifest returns valid target object
resp = retry(
self._make_request, method='GET', container=container,
obj=max_recursion_manifest)
self.assertEqual(resp.status, 200)
self.assertEqual(resp.content, TARGET_BODY)
self.assertEqual(resp.getheader('content-length'),
str(self.env.tgt_length))
# N.B. since the last manifest is slo so it will remove
# content-location info from the response header
self.assertNotIn('Content-Location', resp.headers)
# sanity: one more link to the slo can work still
one_more_link = next(self.obj_name_gen)
self._test_put_symlink(
link_cont=container, link_obj=one_more_link,
tgt_cont=container, tgt_obj=max_recursion_manifest)
resp = retry(
self._make_request, method='GET', container=container,
obj=one_more_link)
self.assertEqual(resp.status, 200)
self.assertEqual(resp.content, TARGET_BODY)
self.assertEqual(resp.getheader('content-length'),
str(self.env.tgt_length))
self.assertIn('Content-Location', resp.headers)
self.assertIn('%s/%s' % (container, max_recursion_manifest),
resp.getheader('content-location'))
# PUT a new slo manifest point to the max_recursion_manifest
# Symlink and slo manifest chain from the new manifest to the final
# target has (max_slo_recursion_depth + 1) manifests.
too_many_recursion_manifest = next(self.obj_name_gen)
manifest = json.dumps(
[{'path': '/%s/%s' % (container, max_recursion_manifest)}])
resp = retry(self._make_request, method='PUT',
container=container, obj=too_many_recursion_manifest,
body=manifest.encode('ascii'),
query_args='multipart-manifest=put')
self.assertEqual(resp.status, 201) # sanity
# Check GET to too_many_recursion_mani returns 409 error
resp = retry(self._make_request, method='GET',
container=container, obj=too_many_recursion_manifest)
self.assertEqual(resp.status, 409)
# N.B. This error message is from slo middleware that uses default.
self.assertEqual(
resp.content,
b'<html><h1>Conflict</h1><p>There was a conflict when trying to'
b' complete your request.</p></html>')
def test_symlink_put_missing_target_container(self):
link_obj = uuid4().hex
# set only object, no container in the prefix
headers = {'X-Symlink-Target': self.env.tgt_obj}
resp = retry(self._make_request, method='PUT',
container=self.env.link_cont, obj=link_obj,
headers=headers)
self.assertEqual(resp.status, 412)
self.assertEqual(resp.content,
b'X-Symlink-Target header must be of the form'
b' <container name>/<object name>')
def test_symlink_put_non_zero_length(self):
link_obj = uuid4().hex
headers = {'X-Symlink-Target':
'%s/%s' % (self.env.tgt_cont, self.env.tgt_obj)}
resp = retry(
self._make_request, method='PUT', container=self.env.link_cont,
obj=link_obj, body=b'non-zero-length', headers=headers)
self.assertEqual(resp.status, 400)
self.assertEqual(resp.content,
b'Symlink requests require a zero byte body')
def test_symlink_target_itself(self):
link_obj = uuid4().hex
headers = {
'X-Symlink-Target': '%s/%s' % (self.env.link_cont, link_obj)}
resp = retry(self._make_request, method='PUT',
container=self.env.link_cont, obj=link_obj,
headers=headers)
self.assertEqual(resp.status, 400)
self.assertEqual(resp.content, b'Symlink cannot target itself')
def test_symlink_target_each_other(self):
symloop_max = cluster_info['symlink']['symloop_max']
link_obj1 = uuid4().hex
link_obj2 = uuid4().hex
# PUT two links which targets each other
self._test_put_symlink(
link_cont=self.env.link_cont, link_obj=link_obj1,
tgt_cont=self.env.link_cont, tgt_obj=link_obj2)
self._test_put_symlink(
link_cont=self.env.link_cont, link_obj=link_obj2,
tgt_cont=self.env.link_cont, tgt_obj=link_obj1)
for obj in (link_obj1, link_obj2):
# sanity: HEAD/GET on the link itself is ok
self._assertLinkObject(self.env.link_cont, obj)
for obj in (link_obj1, link_obj2):
resp = retry(self._make_request, method='HEAD',
container=self.env.link_cont, obj=obj)
self.assertEqual(resp.status, 409)
resp = retry(self._make_request, method='GET',
container=self.env.link_cont, obj=obj)
self.assertEqual(resp.status, 409)
self.assertEqual(
resp.content,
b'Too many levels of symbolic links, maximum allowed is %d' %
symloop_max)
def test_symlink_put_copy_from(self):
link_obj1 = uuid4().hex
link_obj2 = uuid4().hex
self._test_put_symlink(link_cont=self.env.link_cont,
link_obj=link_obj1,
tgt_cont=self.env.tgt_cont,
tgt_obj=self.env.tgt_obj)
copy_src = '%s/%s' % (self.env.link_cont, link_obj1)
# copy symlink
headers = {'X-Copy-From': copy_src}
resp = retry(self._make_request_with_symlink_get,
method='PUT',
container=self.env.link_cont, obj=link_obj2,
headers=headers)
self.assertEqual(resp.status, 201)
self._assertSymlink(link_cont=self.env.link_cont, link_obj=link_obj2)
@requires_acls
def test_symlink_put_copy_from_cross_account(self):
link_obj1 = uuid4().hex
link_obj2 = uuid4().hex
self._test_put_symlink(link_cont=self.env.link_cont,
link_obj=link_obj1,
tgt_cont=self.env.tgt_cont,
tgt_obj=self.env.tgt_obj)
copy_src = '%s/%s' % (self.env.link_cont, link_obj1)
perm_two = tf.swift_test_perm[1]
# add X-Content-Read to account 1 link_cont and tgt_cont
# permit account 2 to read account 1 link_cont to perform copy_src
# and tgt_cont so that link_obj2 can refer to tgt_object
# this ACL allows the copy to succeed
headers = {'X-Container-Read': perm_two}
resp = retry(
self._make_request, method='POST',
container=self.env.link_cont, headers=headers)
self.assertEqual(resp.status, 204)
# this ACL allows link_obj in account 2 to target object in account 1
resp = retry(self._make_request, method='POST',
container=self.env.tgt_cont, headers=headers)
self.assertEqual(resp.status, 204)
# copy symlink itself to a different account w/o
# X-Symlink-Target-Account. This operation will result in copying
# symlink to the account 2 container that points to the
# container/object in the account 2.
# (the container/object is not prepared)
headers = {'X-Copy-From-Account': self.account_name,
'X-Copy-From': copy_src}
resp = retry(self._make_request_with_symlink_get, method='PUT',
container=self.env.link_cont, obj=link_obj2,
headers=headers, use_account=2)
self.assertEqual(resp.status, 201)
# sanity: HEAD/GET on link_obj itself
self._assertLinkObject(self.env.link_cont, link_obj2, use_account=2)
account_two = tf.parsed[1].path.split('/', 2)[2]
# no target object in the account 2
for method in ('HEAD', 'GET'):
resp = retry(
self._make_request, method=method,
container=self.env.link_cont, obj=link_obj2, use_account=2)
self.assertEqual(resp.status, 404)
self.assertIn('content-location', resp.headers)
self.assertEqual(
self.env.target_content_location(override_account=account_two),
resp.getheader('content-location'))
# copy symlink itself to a different account with target account
# the target path will be in account 1
# the target path will have an object
headers = {'X-Symlink-target-Account': self.account_name,
'X-Copy-From-Account': self.account_name,
'X-Copy-From': copy_src}
resp = retry(
self._make_request_with_symlink_get, method='PUT',
container=self.env.link_cont, obj=link_obj2,
headers=headers, use_account=2)
self.assertEqual(resp.status, 201)
self._assertSymlink(link_cont=self.env.link_cont, link_obj=link_obj2,
use_account=2)
def test_symlink_copy_from_target(self):
link_obj1 = uuid4().hex
obj2 = uuid4().hex
self._test_put_symlink(link_cont=self.env.link_cont,
link_obj=link_obj1,
tgt_cont=self.env.tgt_cont,
tgt_obj=self.env.tgt_obj)
copy_src = '%s/%s' % (self.env.link_cont, link_obj1)
# issuing a COPY request to a symlink w/o symlink=get, should copy
# the target object, not the symlink itself
headers = {'X-Copy-From': copy_src}
resp = retry(self._make_request, method='PUT',
container=self.env.tgt_cont, obj=obj2,
headers=headers)
self.assertEqual(resp.status, 201)
# HEAD to the copied object
resp = retry(self._make_request, method='HEAD',
container=self.env.tgt_cont, obj=obj2)
self.assertEqual(200, resp.status)
self.assertNotIn('Content-Location', resp.headers)
# GET to the copied object
resp = retry(self._make_request, method='GET',
container=self.env.tgt_cont, obj=obj2)
# But... this is a raw object (not a symlink)
self.assertEqual(200, resp.status)
self.assertNotIn('Content-Location', resp.headers)
self.assertEqual(TARGET_BODY, resp.content)
def test_symlink_copy(self):
link_obj1 = uuid4().hex
link_obj2 = uuid4().hex
self._test_put_symlink(link_cont=self.env.link_cont,
link_obj=link_obj1,
tgt_cont=self.env.tgt_cont,
tgt_obj=self.env.tgt_obj)
copy_dst = '%s/%s' % (self.env.link_cont, link_obj2)
# copy symlink
headers = {'Destination': copy_dst}
resp = retry(
self._make_request_with_symlink_get, method='COPY',
container=self.env.link_cont, obj=link_obj1, headers=headers)
self.assertEqual(resp.status, 201)
self._assertSymlink(link_cont=self.env.link_cont, link_obj=link_obj2)
def test_symlink_copy_target(self):
link_obj1 = uuid4().hex
obj2 = uuid4().hex
self._test_put_symlink(link_cont=self.env.link_cont,
link_obj=link_obj1,
tgt_cont=self.env.tgt_cont,
tgt_obj=self.env.tgt_obj)
copy_dst = '%s/%s' % (self.env.tgt_cont, obj2)
# copy target object
headers = {'Destination': copy_dst}
resp = retry(self._make_request, method='COPY',
container=self.env.link_cont, obj=link_obj1,
headers=headers)
self.assertEqual(resp.status, 201)
# HEAD to target object via symlink
resp = retry(self._make_request, method='HEAD',
container=self.env.tgt_cont, obj=obj2)
self.assertEqual(resp.status, 200)
self.assertNotIn('Content-Location', resp.headers)
# GET to the copied object that should be a raw object (not symlink)
resp = retry(self._make_request, method='GET',
container=self.env.tgt_cont, obj=obj2)
self.assertEqual(resp.status, 200)
self.assertEqual(resp.content, TARGET_BODY)
self.assertNotIn('Content-Location', resp.headers)
def test_post_symlink(self):
link_obj = uuid4().hex
value1 = uuid4().hex
self._test_put_symlink(link_cont=self.env.link_cont,
link_obj=link_obj,
tgt_cont=self.env.tgt_cont,
tgt_obj=self.env.tgt_obj)
# POSTing to a symlink is not allowed and should return a 307
headers = {'X-Object-Meta-Alpha': 'apple'}
resp = retry(
self._make_request, method='POST', container=self.env.link_cont,
obj=link_obj, headers=headers, allow_redirects=False)
self.assertEqual(resp.status, 307)
# we are using account 0 in this test
expected_location_hdr = "%s/%s/%s" % (
tf.parsed[0].path, self.env.tgt_cont, self.env.tgt_obj)
self.assertEqual(resp.getheader('Location'), expected_location_hdr)
# Read header from symlink itself. The metadata is applied to symlink
resp = retry(self._make_request_with_symlink_get, method='GET',
container=self.env.link_cont, obj=link_obj)
self.assertEqual(resp.status, 200)
self.assertEqual(resp.getheader('X-Object-Meta-Alpha'), 'apple')
# Post the target object directly
headers = {'x-object-meta-test': value1}
resp = retry(
self._make_request, method='POST', container=self.env.tgt_cont,
obj=self.env.tgt_obj, headers=headers)
self.assertEqual(resp.status, 202)
resp = retry(self._make_request, method='GET',
container=self.env.tgt_cont, obj=self.env.tgt_obj)
self.assertEqual(resp.status, 200)
self.assertEqual(resp.getheader('X-Object-Meta-Test'), value1)
# Read header from target object via symlink, should exist now.
resp = retry(
self._make_request, method='GET', container=self.env.link_cont,
obj=link_obj)
self.assertEqual(resp.status, 200)
self.assertEqual(resp.getheader('X-Object-Meta-Test'), value1)
# sanity: no X-Object-Meta-Alpha exists in the response header
self.assertNotIn('X-Object-Meta-Alpha', resp.headers)
def test_post_to_broken_dynamic_symlink(self):
# create a symlink to nowhere
link_obj = '%s-the-link' % uuid4().hex
tgt_obj = '%s-no-where' % uuid4().hex
headers = {'X-Symlink-Target': '%s/%s' % (self.env.tgt_cont, tgt_obj)}
resp = retry(self._make_request, method='PUT',
container=self.env.link_cont, obj=link_obj,
headers=headers)
self.assertEqual(resp.status, 201)
# it's a real link!
self._assertLinkObject(self.env.link_cont, link_obj)
# ... it's just broken
resp = retry(
self._make_request, method='GET',
container=self.env.link_cont, obj=link_obj)
self.assertEqual(resp.status, 404)
target_path = '/v1/%s/%s/%s' % (
self.account_name, self.env.tgt_cont, tgt_obj)
self.assertEqual(target_path, resp.headers['Content-Location'])
# we'll redirect with the Location header to the (invalid) target
headers = {'X-Object-Meta-Alpha': 'apple'}
resp = retry(
self._make_request, method='POST', container=self.env.link_cont,
obj=link_obj, headers=headers, allow_redirects=False)
self.assertEqual(resp.status, 307)
self.assertEqual(target_path, resp.headers['Location'])
# and of course metadata *is* applied to the link
resp = retry(
self._make_request_with_symlink_get, method='HEAD',
container=self.env.link_cont, obj=link_obj)
self.assertEqual(resp.status, 200)
self.assertTrue(resp.getheader('X-Object-Meta-Alpha'), 'apple')
def test_post_to_broken_static_symlink(self):
link_obj = uuid4().hex
# PUT link_obj
self._test_put_symlink_with_etag(link_cont=self.env.link_cont,
link_obj=link_obj,
tgt_cont=self.env.tgt_cont,
tgt_obj=self.env.tgt_obj,
etag=self.env.tgt_etag)
# overwrite tgt object
old_tgt_etag = normalize_etag(self.env.tgt_etag)
self.env._create_tgt_object(body='updated target body')
# sanity
resp = retry(
self._make_request, method='HEAD',
container=self.env.link_cont, obj=link_obj)
self.assertEqual(resp.status, 409)
# but POST will still 307
headers = {'X-Object-Meta-Alpha': 'apple'}
resp = retry(
self._make_request, method='POST', container=self.env.link_cont,
obj=link_obj, headers=headers, allow_redirects=False)
self.assertEqual(resp.status, 307)
target_path = '/v1/%s/%s/%s' % (
self.account_name, self.env.tgt_cont, self.env.tgt_obj)
self.assertEqual(target_path, resp.headers['Location'])
# but we give you the Etag just like... FYI?
self.assertEqual(old_tgt_etag, resp.headers['X-Symlink-Target-Etag'])
def test_post_with_symlink_header(self):
# POSTing to a symlink is not allowed and should return a 307
# updating the symlink target with a POST should always fail
headers = {'X-Symlink-Target': 'container/new_target'}
resp = retry(
self._make_request, method='POST', container=self.env.tgt_cont,
obj=self.env.tgt_obj, headers=headers, allow_redirects=False)
self.assertEqual(resp.status, 400)
self.assertEqual(resp.content,
b'A PUT request is required to set a symlink target')
def test_overwrite_symlink(self):
link_obj = uuid4().hex
new_tgt_obj = "new_target_object_name"
new_tgt = '%s/%s' % (self.env.tgt_cont, new_tgt_obj)
self._test_put_symlink(link_cont=self.env.link_cont, link_obj=link_obj,
tgt_cont=self.env.tgt_cont,
tgt_obj=self.env.tgt_obj)
# sanity
self._assertSymlink(self.env.link_cont, link_obj)
# Overwrite symlink with PUT
self._test_put_symlink(link_cont=self.env.link_cont, link_obj=link_obj,
tgt_cont=self.env.tgt_cont,
tgt_obj=new_tgt_obj)
# head symlink to check X-Symlink-Target header
resp = retry(self._make_request_with_symlink_get, method='HEAD',
container=self.env.link_cont, obj=link_obj)
self.assertEqual(resp.status, 200)
# target should remain with old target
self.assertEqual(resp.getheader('X-Symlink-Target'), new_tgt)
def test_delete_symlink(self):
link_obj = uuid4().hex
self._test_put_symlink(link_cont=self.env.link_cont, link_obj=link_obj,
tgt_cont=self.env.tgt_cont,
tgt_obj=self.env.tgt_obj)
resp = retry(self._make_request, method='DELETE',
container=self.env.link_cont, obj=link_obj)
self.assertEqual(resp.status, 204)
# make sure target object was not deleted and is still reachable
resp = retry(self._make_request, method='GET',
container=self.env.tgt_cont, obj=self.env.tgt_obj)
self.assertEqual(resp.status, 200)
self.assertEqual(resp.content, TARGET_BODY)
@requires_acls
def test_symlink_put_target_account(self):
if tf.skip or tf.skip2:
raise SkipTest
link_obj = uuid4().hex
# create symlink in account 2
# pointing to account 1
headers = {'X-Symlink-Target-Account': self.account_name,
'X-Symlink-Target':
'%s/%s' % (self.env.tgt_cont, self.env.tgt_obj)}
resp = retry(self._make_request, method='PUT',
container=self.env.link_cont, obj=link_obj,
headers=headers, use_account=2)
self.assertEqual(resp.status, 201)
perm_two = tf.swift_test_perm[1]
# sanity test:
# it should be ok to get the symlink itself, but not the target object
# because the read acl has not been configured yet
self._assertLinkObject(self.env.link_cont, link_obj, use_account=2)
resp = retry(
self._make_request, method='GET',
container=self.env.link_cont, obj=link_obj, use_account=2)
self.assertEqual(resp.status, 403)
# still know where it's pointing
self.assertEqual(resp.getheader('content-location'),
self.env.target_content_location())
# add X-Content-Read to account 1 tgt_cont
# permit account 2 to read account 1 tgt_cont
# add acl to allow reading from source
headers = {'X-Container-Read': perm_two}
resp = retry(self._make_request, method='POST',
container=self.env.tgt_cont, headers=headers)
self.assertEqual(resp.status, 204)
# GET on link_obj itself
self._assertLinkObject(self.env.link_cont, link_obj, use_account=2)
# GET to target object via symlink
resp = self._test_get_as_target_object(
self.env.link_cont, link_obj,
expected_content_location=self.env.target_content_location(),
use_account=2)
@requires_acls
def test_symlink_with_etag_put_target_account(self):
if tf.skip or tf.skip2:
raise SkipTest
link_obj = uuid4().hex
# try to create a symlink in account 2 pointing to account 1
symlink_headers = {
'X-Symlink-Target-Account': self.account_name,
'X-Symlink-Target':
'%s/%s' % (self.env.tgt_cont, self.env.tgt_obj),
'X-Symlink-Target-Etag': self.env.tgt_etag}
resp = retry(self._make_request, method='PUT',
container=self.env.link_cont, obj=link_obj,
headers=symlink_headers, use_account=2)
# since we don't have read access to verify the object we get the
# permissions error
self.assertEqual(resp.status, 403)
perm_two = tf.swift_test_perm[1]
# add X-Content-Read to account 1 tgt_cont
# permit account 2 to read account 1 tgt_cont
# add acl to allow reading from source
acl_headers = {'X-Container-Read': perm_two}
resp = retry(self._make_request, method='POST',
container=self.env.tgt_cont, headers=acl_headers)
self.assertEqual(resp.status, 204)
# now we can create the symlink
resp = retry(self._make_request, method='PUT',
container=self.env.link_cont, obj=link_obj,
headers=symlink_headers, use_account=2)
self.assertEqual(resp.status, 201)
self._assertLinkObject(self.env.link_cont, link_obj, use_account=2)
# GET to target object via symlink
resp = self._test_get_as_target_object(
self.env.link_cont, link_obj,
expected_content_location=self.env.target_content_location(),
use_account=2)
# Overwrite target
resp = retry(self._make_request, method='PUT',
container=self.env.tgt_cont, obj=self.env.tgt_obj,
body='some other content')
self.assertEqual(resp.status, 201)
# link is now broken
resp = retry(
self._make_request, method='GET',
container=self.env.link_cont, obj=link_obj, use_account=2)
self.assertEqual(resp.status, 409)
# but we still know where it points
self.assertEqual(resp.getheader('content-location'),
self.env.target_content_location())
# sanity test, remove permissions
headers = {'X-Remove-Container-Read': 'remove'}
resp = retry(self._make_request, method='POST',
container=self.env.tgt_cont, headers=headers)
self.assertEqual(resp.status, 204)
# it should be ok to get the symlink itself, but not the target object
# because the read acl has been revoked
self._assertLinkObject(self.env.link_cont, link_obj, use_account=2)
resp = retry(
self._make_request, method='GET',
container=self.env.link_cont, obj=link_obj, use_account=2)
self.assertEqual(resp.status, 403)
# Still know where it is, though
self.assertEqual(resp.getheader('content-location'),
self.env.target_content_location())
def test_symlink_invalid_etag(self):
link_obj = uuid4().hex
headers = {'X-Symlink-Target': '%s/%s' % (self.env.tgt_cont,
self.env.tgt_obj),
'X-Symlink-Target-Etag': 'not-the-real-etag'}
resp = retry(self._make_request, method='PUT',
container=self.env.link_cont, obj=link_obj,
headers=headers)
self.assertEqual(resp.status, 409)
self.assertEqual(resp.content,
b"Object Etag 'ab706c400731332bffa67ed4bc15dcac' "
b"does not match X-Symlink-Target-Etag header "
b"'not-the-real-etag'")
def test_symlink_object_listing(self):
link_obj = uuid4().hex
self._test_put_symlink(link_cont=self.env.link_cont, link_obj=link_obj,
tgt_cont=self.env.tgt_cont,
tgt_obj=self.env.tgt_obj)
# sanity
self._assertSymlink(self.env.link_cont, link_obj)
resp = retry(self._make_request, method='GET',
container=self.env.link_cont,
query_args='format=json')
self.assertEqual(resp.status, 200)
object_list = json.loads(resp.content)
self.assertEqual(len(object_list), 1)
obj_info = object_list[0]
self.assertIn('symlink_path', obj_info)
self.assertEqual(self.env.target_content_location(),
obj_info['symlink_path'])
self.assertNotIn('symlink_etag', obj_info)
def test_static_link_object_listing(self):
link_obj = uuid4().hex
self._test_put_symlink_with_etag(link_cont=self.env.link_cont,
link_obj=link_obj,
tgt_cont=self.env.tgt_cont,
tgt_obj=self.env.tgt_obj,
etag=self.env.tgt_etag)
# sanity
self._assertSymlink(self.env.link_cont, link_obj)
resp = retry(self._make_request, method='GET',
container=self.env.link_cont,
query_args='format=json')
self.assertEqual(resp.status, 200)
object_list = json.loads(resp.content)
self.assertEqual(len(object_list), 1)
self.assertIn('symlink_path', object_list[0])
self.assertEqual(self.env.target_content_location(),
object_list[0]['symlink_path'])
obj_info = object_list[0]
self.assertIn('symlink_etag', obj_info)
self.assertEqual(normalize_etag(self.env.tgt_etag),
obj_info['symlink_etag'])
self.assertEqual(int(self.env.tgt_length),
obj_info['symlink_bytes'])
self.assertEqual(obj_info['content_type'], 'application/target')
# POSTing to a static_link can change the listing Content-Type
headers = {'Content-Type': 'application/foo'}
resp = retry(
self._make_request, method='POST', container=self.env.link_cont,
obj=link_obj, headers=headers, allow_redirects=False)
self.assertEqual(resp.status, 307)
resp = retry(self._make_request, method='GET',
container=self.env.link_cont,
query_args='format=json')
self.assertEqual(resp.status, 200)
object_list = json.loads(resp.content)
self.assertEqual(len(object_list), 1)
obj_info = object_list[0]
self.assertEqual(obj_info['content_type'], 'application/foo')
class TestCrossPolicySymlinkEnv(TestSymlinkEnv):
multiple_policies_enabled = None
@classmethod
def setUp(cls):
if tf.skip or tf.skip2:
raise SkipTest
if cls.multiple_policies_enabled is None:
try:
cls.policies = tf.FunctionalStoragePolicyCollection.from_info()
except AssertionError:
pass
if cls.policies and len(cls.policies) > 1:
cls.multiple_policies_enabled = True
else:
cls.multiple_policies_enabled = False
return
link_policy = cls.policies.select()
tgt_policy = cls.policies.exclude(name=link_policy['name']).select()
link_header = {'X-Storage-Policy': link_policy['name']}
tgt_header = {'X-Storage-Policy': tgt_policy['name']}
cls._create_container(cls.link_cont, headers=link_header)
cls._create_container(cls.tgt_cont, headers=tgt_header)
# container in account 2
cls._create_container(cls.link_cont, headers=link_header,
use_account=2)
cls._create_tgt_object()
class TestCrossPolicySymlink(TestSymlink):
env = TestCrossPolicySymlinkEnv
def setUp(self):
super(TestCrossPolicySymlink, self).setUp()
if self.env.multiple_policies_enabled is False:
raise SkipTest('Cross policy test requires multiple policies')
elif self.env.multiple_policies_enabled is not True:
# just some sanity checking
raise Exception("Expected multiple_policies_enabled "
"to be True/False, got %r" % (
self.env.multiple_policies_enabled,))
def tearDown(self):
self.env.tearDown()
class TestSymlinkSlo(Base):
"""
Just some sanity testing of SLO + symlinks.
It is basically a copy of SLO tests in test_slo, but the tested object is
a symlink to the manifest (instead of the manifest itself)
"""
env = TestSloEnv
def setUp(self):
super(TestSymlinkSlo, self).setUp()
if self.env.slo_enabled is False:
raise SkipTest("SLO not enabled")
elif self.env.slo_enabled is not True:
# just some sanity checking
raise Exception(
"Expected slo_enabled to be True/False, got %r" %
(self.env.slo_enabled,))
self.file_symlink = self.env.container.file(uuid4().hex)
self.account_name = self.env.container.conn.storage_path.rsplit(
'/', 1)[-1]
def test_symlink_target_slo_manifest(self):
self.file_symlink.write(hdrs={'X-Symlink-Target':
'%s/%s' % (self.env.container.name,
'manifest-abcde')})
self.assertEqual([
(b'a', 1024 * 1024),
(b'b', 1024 * 1024),
(b'c', 1024 * 1024),
(b'd', 1024 * 1024),
(b'e', 1),
], group_by_byte(self.file_symlink.read()))
manifest_body = self.file_symlink.read(parms={
'multipart-manifest': 'get'})
self.assertEqual(
[seg['hash'] for seg in json.loads(manifest_body)],
[self.env.seg_info['seg_%s' % c]['etag'] for c in 'abcde'])
for obj_info in self.env.container.files(parms={'format': 'json'}):
if obj_info['name'] == self.file_symlink.name:
break
else:
self.fail('Unable to find file_symlink in listing.')
obj_info.pop('last_modified')
self.assertEqual(obj_info, {
'name': self.file_symlink.name,
'content_type': 'application/octet-stream',
'hash': 'd41d8cd98f00b204e9800998ecf8427e',
'bytes': 0,
'symlink_path': '/v1/%s/%s/manifest-abcde' % (
self.account_name, self.env.container.name),
})
def test_static_link_target_slo_manifest(self):
manifest_info = self.env.container2.file(
"manifest-abcde").info(parms={
'multipart-manifest': 'get'})
manifest_etag = manifest_info['etag']
self.file_symlink.write(hdrs={
'X-Symlink-Target': '%s/%s' % (
self.env.container2.name, 'manifest-abcde'),
'X-Symlink-Target-Etag': manifest_etag,
})
self.assertEqual([
(b'a', 1024 * 1024),
(b'b', 1024 * 1024),
(b'c', 1024 * 1024),
(b'd', 1024 * 1024),
(b'e', 1),
], group_by_byte(self.file_symlink.read()))
manifest_body = self.file_symlink.read(parms={
'multipart-manifest': 'get'})
self.assertEqual(
[seg['hash'] for seg in json.loads(manifest_body)],
[self.env.seg_info['seg_%s' % c]['etag'] for c in 'abcde'])
# check listing
for obj_info in self.env.container.files(parms={'format': 'json'}):
if obj_info['name'] == self.file_symlink.name:
break
else:
self.fail('Unable to find file_symlink in listing.')
obj_info.pop('last_modified')
self.maxDiff = None
slo_info = self.env.container2.file("manifest-abcde").info()
self.assertEqual(obj_info, {
'name': self.file_symlink.name,
'content_type': 'application/octet-stream',
'hash': u'd41d8cd98f00b204e9800998ecf8427e',
'bytes': 0,
'slo_etag': slo_info['etag'],
'symlink_path': '/v1/%s/%s/manifest-abcde' % (
self.account_name, self.env.container2.name),
'symlink_bytes': 4 * 2 ** 20 + 1,
'symlink_etag': normalize_etag(manifest_etag),
})
def test_static_link_target_slo_manifest_wrong_etag(self):
# try the slo "etag"
slo_etag = self.env.container2.file(
"manifest-abcde").info()['etag']
self.assertRaises(ResponseError, self.file_symlink.write, hdrs={
'X-Symlink-Target': '%s/%s' % (
self.env.container2.name, 'manifest-abcde'),
'X-Symlink-Target-Etag': slo_etag,
})
self.assert_status(409) # quotes OK, but doesn't match
# try the slo etag w/o the quotes
slo_etag = slo_etag.strip('"')
self.assertRaises(ResponseError, self.file_symlink.write, hdrs={
'X-Symlink-Target': '%s/%s' % (
self.env.container2.name, 'manifest-abcde'),
'X-Symlink-Target-Etag': slo_etag,
})
self.assert_status(409) # that still doesn't match
def test_static_link_target_symlink_to_slo_manifest(self):
# write symlink
self.file_symlink.write(hdrs={'X-Symlink-Target':
'%s/%s' % (self.env.container.name,
'manifest-abcde')})
# write static_link
file_static_link = self.env.container.file(uuid4().hex)
file_static_link.write(hdrs={
'X-Symlink-Target': '%s/%s' % (
self.file_symlink.container, self.file_symlink.name),
'X-Symlink-Target-Etag': MD5_OF_EMPTY_STRING,
})
# validate reads
self.assertEqual([
(b'a', 1024 * 1024),
(b'b', 1024 * 1024),
(b'c', 1024 * 1024),
(b'd', 1024 * 1024),
(b'e', 1),
], group_by_byte(file_static_link.read()))
manifest_body = file_static_link.read(parms={
'multipart-manifest': 'get'})
self.assertEqual(
[seg['hash'] for seg in json.loads(manifest_body)],
[self.env.seg_info['seg_%s' % c]['etag'] for c in 'abcde'])
# check listing
for obj_info in self.env.container.files(parms={'format': 'json'}):
if obj_info['name'] == file_static_link.name:
break
else:
self.fail('Unable to find file_symlink in listing.')
obj_info.pop('last_modified')
self.maxDiff = None
self.assertEqual(obj_info, {
'name': file_static_link.name,
'content_type': 'application/octet-stream',
'hash': 'd41d8cd98f00b204e9800998ecf8427e',
'bytes': 0,
'symlink_path': u'/v1/%s/%s/%s' % (
self.account_name, self.file_symlink.container,
self.file_symlink.name),
# the only time bytes/etag aren't the target object are when they
# validate through another static_link
'symlink_bytes': 0,
'symlink_etag': MD5_OF_EMPTY_STRING,
})
def test_symlink_target_slo_nested_manifest(self):
self.file_symlink.write(hdrs={'X-Symlink-Target':
'%s/%s' % (self.env.container.name,
'manifest-abcde-submanifest')})
self.assertEqual([
(b'a', 1024 * 1024),
(b'b', 1024 * 1024),
(b'c', 1024 * 1024),
(b'd', 1024 * 1024),
(b'e', 1),
], group_by_byte(self.file_symlink.read()))
def test_slo_get_ranged_manifest(self):
self.file_symlink.write(hdrs={'X-Symlink-Target':
'%s/%s' % (self.env.container.name,
'ranged-manifest')})
self.assertEqual([
(b'c', 1),
(b'd', 1024 * 1024),
(b'e', 1),
(b'a', 512 * 1024),
(b'b', 512 * 1024),
(b'c', 1),
(b'd', 1),
], group_by_byte(self.file_symlink.read()))
def test_slo_ranged_get(self):
self.file_symlink.write(hdrs={'X-Symlink-Target':
'%s/%s' % (self.env.container.name,
'manifest-abcde')})
file_contents = self.file_symlink.read(size=1024 * 1024 + 2,
offset=1024 * 1024 - 1)
self.assertEqual([
(b'a', 1),
(b'b', 1024 * 1024),
(b'c', 1),
], group_by_byte(file_contents))
class TestSymlinkSloEnv(TestSloEnv):
@classmethod
def create_links_to_segments(cls, container):
seg_info = {}
for letter in ('a', 'b'):
seg_name = "linkto_seg_%s" % letter
file_item = container.file(seg_name)
sym_hdr = {'X-Symlink-Target': '%s/seg_%s' % (container.name,
letter)}
file_item.write(hdrs=sym_hdr)
seg_info[seg_name] = {
'path': '/%s/%s' % (container.name, seg_name)}
return seg_info
@classmethod
def setUp(cls):
super(TestSymlinkSloEnv, cls).setUp()
cls.link_seg_info = cls.create_links_to_segments(cls.container)
file_item = cls.container.file("manifest-linkto-ab")
file_item.write(
json.dumps([cls.link_seg_info['linkto_seg_a'],
cls.link_seg_info['linkto_seg_b']]).encode('ascii'),
parms={'multipart-manifest': 'put'})
class TestSymlinkToSloSegments(Base):
"""
This test class will contain various tests where the segments of the SLO
manifest are symlinks to the actual segments. Again the tests are basicaly
a copy/paste of the tests in test_slo, only the manifest has been modified
to contain symlinks as the segments.
"""
env = TestSymlinkSloEnv
def setUp(self):
super(TestSymlinkToSloSegments, self).setUp()
if self.env.slo_enabled is False:
raise SkipTest("SLO not enabled")
elif self.env.slo_enabled is not True:
# just some sanity checking
raise Exception(
"Expected slo_enabled to be True/False, got %r" %
(self.env.slo_enabled,))
def test_slo_get_simple_manifest_with_links(self):
file_item = self.env.container.file("manifest-linkto-ab")
self.assertEqual([
(b'a', 1024 * 1024),
(b'b', 1024 * 1024),
], group_by_byte(file_item.read()))
def test_slo_container_listing(self):
# the listing object size should equal the sum of the size of the
# segments, not the size of the manifest body
file_item = self.env.container.file(Utils.create_name())
file_item.write(
json.dumps([
self.env.link_seg_info['linkto_seg_a']]).encode('ascii'),
parms={'multipart-manifest': 'put'})
# The container listing has the etag of the actual manifest object
# contents which we get using multipart-manifest=get. New enough swift
# also exposes the etag that we get when NOT using
# multipart-manifest=get. Verify that both remain consistent when the
# object is updated with a POST.
file_item.initialize()
slo_etag = file_item.etag
file_item.initialize(parms={'multipart-manifest': 'get'})
manifest_etag = file_item.etag
listing = self.env.container.files(parms={'format': 'json'})
for f_dict in listing:
if f_dict['name'] == file_item.name:
self.assertEqual(1024 * 1024, f_dict['bytes'])
self.assertEqual('application/octet-stream',
f_dict['content_type'])
if tf.cluster_info.get('etag_quoter', {}).get(
'enable_by_default'):
self.assertEqual(manifest_etag, '"%s"' % f_dict['hash'])
else:
self.assertEqual(manifest_etag, f_dict['hash'])
self.assertEqual(slo_etag, f_dict['slo_etag'])
break
else:
self.fail('Failed to find manifest file in container listing')
# now POST updated content-type file
file_item.content_type = 'image/jpeg'
file_item.sync_metadata({'X-Object-Meta-Test': 'blah'})
file_item.initialize()
self.assertEqual('image/jpeg', file_item.content_type) # sanity
# verify that the container listing is consistent with the file
listing = self.env.container.files(parms={'format': 'json'})
for f_dict in listing:
if f_dict['name'] == file_item.name:
self.assertEqual(1024 * 1024, f_dict['bytes'])
self.assertEqual(file_item.content_type,
f_dict['content_type'])
if tf.cluster_info.get('etag_quoter', {}).get(
'enable_by_default'):
self.assertEqual(manifest_etag, '"%s"' % f_dict['hash'])
else:
self.assertEqual(manifest_etag, f_dict['hash'])
self.assertEqual(slo_etag, f_dict['slo_etag'])
break
else:
self.fail('Failed to find manifest file in container listing')
# now POST with no change to content-type
file_item.sync_metadata({'X-Object-Meta-Test': 'blah'},
cfg={'no_content_type': True})
file_item.initialize()
self.assertEqual('image/jpeg', file_item.content_type) # sanity
# verify that the container listing is consistent with the file
listing = self.env.container.files(parms={'format': 'json'})
for f_dict in listing:
if f_dict['name'] == file_item.name:
self.assertEqual(1024 * 1024, f_dict['bytes'])
self.assertEqual(file_item.content_type,
f_dict['content_type'])
if tf.cluster_info.get('etag_quoter', {}).get(
'enable_by_default'):
self.assertEqual(manifest_etag, '"%s"' % f_dict['hash'])
else:
self.assertEqual(manifest_etag, f_dict['hash'])
self.assertEqual(slo_etag, f_dict['slo_etag'])
break
else:
self.fail('Failed to find manifest file in container listing')
def test_slo_etag_is_hash_of_etags(self):
expected_hash = md5(usedforsecurity=False)
expected_hash.update((
md5(b'a' * 1024 * 1024, usedforsecurity=False)
.hexdigest().encode('ascii')))
expected_hash.update((
md5(b'b' * 1024 * 1024, usedforsecurity=False)
.hexdigest().encode('ascii')))
expected_etag = expected_hash.hexdigest()
file_item = self.env.container.file('manifest-linkto-ab')
self.assertEqual('"%s"' % expected_etag, file_item.info()['etag'])
def test_slo_copy(self):
file_item = self.env.container.file("manifest-linkto-ab")
file_item.copy(self.env.container.name, "copied-abcde")
copied = self.env.container.file("copied-abcde")
self.assertEqual([
(b'a', 1024 * 1024),
(b'b', 1024 * 1024),
], group_by_byte(copied.read(parms={'multipart-manifest': 'get'})))
def test_slo_copy_the_manifest(self):
# first just perform some tests of the contents of the manifest itself
source = self.env.container.file("manifest-linkto-ab")
source_contents = source.read(parms={'multipart-manifest': 'get'})
source_json = json.loads(source_contents)
manifest_etag = md5(source_contents, usedforsecurity=False).hexdigest()
if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'):
manifest_etag = '"%s"' % manifest_etag
source.initialize()
slo_etag = source.etag
self.assertEqual('application/octet-stream', source.content_type)
source.initialize(parms={'multipart-manifest': 'get'})
self.assertEqual(manifest_etag, source.etag)
self.assertEqual('application/json; charset=utf-8',
source.content_type)
# now, copy the manifest
self.assertTrue(source.copy(self.env.container.name,
"copied-ab-manifest-only",
parms={'multipart-manifest': 'get'}))
copied = self.env.container.file("copied-ab-manifest-only")
copied_contents = copied.read(parms={'multipart-manifest': 'get'})
try:
copied_json = json.loads(copied_contents)
except ValueError:
self.fail("COPY didn't copy the manifest (invalid json on GET)")
# make sure content of copied manifest is the same as original man.
self.assertEqual(source_json, copied_json)
copied.initialize()
self.assertEqual(copied.etag, slo_etag)
self.assertEqual('application/octet-stream', copied.content_type)
copied.initialize(parms={'multipart-manifest': 'get'})
self.assertEqual(source_contents, copied_contents)
self.assertEqual(copied.etag, manifest_etag)
self.assertEqual('application/json; charset=utf-8',
copied.content_type)
# verify the listing metadata
listing = self.env.container.files(parms={'format': 'json'})
names = {}
for f_dict in listing:
if f_dict['name'] in ('manifest-linkto-ab',
'copied-ab-manifest-only'):
names[f_dict['name']] = f_dict
self.assertIn('manifest-linkto-ab', names)
actual = names['manifest-linkto-ab']
self.assertEqual(2 * 1024 * 1024, actual['bytes'])
self.assertEqual('application/octet-stream', actual['content_type'])
if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'):
self.assertEqual(manifest_etag, '"%s"' % actual['hash'])
else:
self.assertEqual(manifest_etag, actual['hash'])
self.assertEqual(slo_etag, actual['slo_etag'])
self.assertIn('copied-ab-manifest-only', names)
actual = names['copied-ab-manifest-only']
self.assertEqual(2 * 1024 * 1024, actual['bytes'])
self.assertEqual('application/octet-stream', actual['content_type'])
if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'):
self.assertEqual(manifest_etag, '"%s"' % actual['hash'])
else:
self.assertEqual(manifest_etag, actual['hash'])
self.assertEqual(slo_etag, actual['slo_etag'])
class TestSymlinkDlo(Base):
env = TestDloEnv
def test_get_manifest(self):
link_obj = uuid4().hex
file_symlink = self.env.container.file(link_obj)
file_symlink.write(hdrs={'X-Symlink-Target':
'%s/%s' % (self.env.container.name,
'man1')})
self.assertEqual([
(b'a', 10),
(b'b', 10),
(b'c', 10),
(b'd', 10),
(b'e', 10),
], group_by_byte(file_symlink.read()))
link_obj = uuid4().hex
file_symlink = self.env.container.file(link_obj)
file_symlink.write(hdrs={'X-Symlink-Target':
'%s/%s' % (self.env.container.name,
'man2')})
self.assertEqual([
(b'A', 10),
(b'B', 10),
(b'C', 10),
(b'D', 10),
(b'E', 10),
], group_by_byte(file_symlink.read()))
link_obj = uuid4().hex
file_symlink = self.env.container.file(link_obj)
file_symlink.write(hdrs={'X-Symlink-Target':
'%s/%s' % (self.env.container.name,
'manall')})
self.assertEqual([
(b'a', 10),
(b'b', 10),
(b'c', 10),
(b'd', 10),
(b'e', 10),
(b'A', 10),
(b'B', 10),
(b'C', 10),
(b'D', 10),
(b'E', 10),
], group_by_byte(file_symlink.read()))
def test_get_manifest_document_itself(self):
link_obj = uuid4().hex
file_symlink = self.env.container.file(link_obj)
file_symlink.write(hdrs={'X-Symlink-Target':
'%s/%s' % (self.env.container.name,
'man1')})
file_contents = file_symlink.read(parms={'multipart-manifest': 'get'})
self.assertEqual(file_contents, b"man1-contents")
self.assertEqual(file_symlink.info()['x_object_manifest'],
"%s/%s/seg_lower" %
(self.env.container.name, self.env.segment_prefix))
def test_get_range(self):
link_obj = uuid4().hex + "_symlink"
file_symlink = self.env.container.file(link_obj)
file_symlink.write(hdrs={'X-Symlink-Target':
'%s/%s' % (self.env.container.name,
'man1')})
self.assertEqual([
(b'a', 2),
(b'b', 10),
(b'c', 10),
(b'd', 3),
], group_by_byte(file_symlink.read(size=25, offset=8)))
file_contents = file_symlink.read(size=1, offset=47)
self.assertEqual(file_contents, b"e")
def test_get_range_out_of_range(self):
link_obj = uuid4().hex
file_symlink = self.env.container.file(link_obj)
file_symlink.write(hdrs={'X-Symlink-Target':
'%s/%s' % (self.env.container.name,
'man1')})
self.assertRaises(ResponseError, file_symlink.read, size=7, offset=50)
self.assert_status(416)
class TestSymlinkTargetObjectComparisonEnv(TestFileComparisonEnv):
@classmethod
def setUp(cls):
super(TestSymlinkTargetObjectComparisonEnv, cls).setUp()
cls.parms = None
cls.expect_empty_etag = False
cls.expect_body = True
class TestSymlinkComparisonEnv(TestFileComparisonEnv):
@classmethod
def setUp(cls):
super(TestSymlinkComparisonEnv, cls).setUp()
cls.parms = {'symlink': 'get'}
cls.expect_empty_etag = True
cls.expect_body = False
class TestSymlinkTargetObjectComparison(Base):
env = TestSymlinkTargetObjectComparisonEnv
def setUp(self):
super(TestSymlinkTargetObjectComparison, self).setUp()
for file_item in self.env.files:
link_obj = file_item.name + '_symlink'
file_symlink = self.env.container.file(link_obj)
file_symlink.write(hdrs={'X-Symlink-Target':
'%s/%s' % (self.env.container.name,
file_item.name)})
def testIfMatch(self):
for file_item in self.env.files:
link_obj = file_item.name + '_symlink'
file_symlink = self.env.container.file(link_obj)
md5 = MD5_OF_EMPTY_STRING if self.env.expect_empty_etag else \
file_item.md5
hdrs = {'If-Match': md5}
body = file_symlink.read(hdrs=hdrs, parms=self.env.parms)
if self.env.expect_body:
self.assertTrue(body)
else:
self.assertEqual(b'', body)
self.assert_status(200)
self.assert_etag(md5)
hdrs = {'If-Match': 'bogus'}
self.assertRaises(ResponseError, file_symlink.read, hdrs=hdrs,
parms=self.env.parms)
self.assert_status(412)
self.assert_etag(md5)
def testIfMatchMultipleEtags(self):
for file_item in self.env.files:
link_obj = file_item.name + '_symlink'
file_symlink = self.env.container.file(link_obj)
md5 = MD5_OF_EMPTY_STRING if self.env.expect_empty_etag else \
file_item.md5
hdrs = {'If-Match': '"bogus1", "%s", "bogus2"' % md5}
body = file_symlink.read(hdrs=hdrs, parms=self.env.parms)
if self.env.expect_body:
self.assertTrue(body)
else:
self.assertEqual(b'', body)
self.assert_status(200)
self.assert_etag(md5)
hdrs = {'If-Match': '"bogus1", "bogus2", "bogus3"'}
self.assertRaises(ResponseError, file_symlink.read, hdrs=hdrs,
parms=self.env.parms)
self.assert_status(412)
self.assert_etag(md5)
def testIfNoneMatch(self):
for file_item in self.env.files:
link_obj = file_item.name + '_symlink'
file_symlink = self.env.container.file(link_obj)
md5 = MD5_OF_EMPTY_STRING if self.env.expect_empty_etag else \
file_item.md5
hdrs = {'If-None-Match': 'bogus'}
body = file_symlink.read(hdrs=hdrs, parms=self.env.parms)
if self.env.expect_body:
self.assertTrue(body)
else:
self.assertEqual(b'', body)
self.assert_status(200)
self.assert_etag(md5)
hdrs = {'If-None-Match': md5}
self.assertRaises(ResponseError, file_symlink.read, hdrs=hdrs,
parms=self.env.parms)
self.assert_status(304)
self.assert_etag(md5)
self.assert_header('accept-ranges', 'bytes')
def testIfNoneMatchMultipleEtags(self):
for file_item in self.env.files:
link_obj = file_item.name + '_symlink'
file_symlink = self.env.container.file(link_obj)
md5 = MD5_OF_EMPTY_STRING if self.env.expect_empty_etag else \
file_item.md5
hdrs = {'If-None-Match': '"bogus1", "bogus2", "bogus3"'}
body = file_symlink.read(hdrs=hdrs, parms=self.env.parms)
if self.env.expect_body:
self.assertTrue(body)
else:
self.assertEqual(b'', body)
self.assert_status(200)
self.assert_etag(md5)
hdrs = {'If-None-Match':
'"bogus1", "bogus2", "%s"' % md5}
self.assertRaises(ResponseError, file_symlink.read, hdrs=hdrs,
parms=self.env.parms)
self.assert_status(304)
self.assert_etag(md5)
self.assert_header('accept-ranges', 'bytes')
def testIfModifiedSince(self):
for file_item in self.env.files:
link_obj = file_item.name + '_symlink'
file_symlink = self.env.container.file(link_obj)
md5 = MD5_OF_EMPTY_STRING if self.env.expect_empty_etag else \
file_item.md5
hdrs = {'If-Modified-Since': self.env.time_old_f1}
body = file_symlink.read(hdrs=hdrs, parms=self.env.parms)
if self.env.expect_body:
self.assertTrue(body)
else:
self.assertEqual(b'', body)
self.assert_status(200)
self.assert_etag(md5)
self.assertTrue(file_symlink.info(hdrs=hdrs, parms=self.env.parms))
hdrs = {'If-Modified-Since': self.env.time_new}
self.assertRaises(ResponseError, file_symlink.read, hdrs=hdrs,
parms=self.env.parms)
self.assert_status(304)
self.assert_etag(md5)
self.assert_header('accept-ranges', 'bytes')
self.assertRaises(ResponseError, file_symlink.info, hdrs=hdrs,
parms=self.env.parms)
self.assert_status(304)
self.assert_etag(md5)
self.assert_header('accept-ranges', 'bytes')
def testIfUnmodifiedSince(self):
for file_item in self.env.files:
link_obj = file_item.name + '_symlink'
file_symlink = self.env.container.file(link_obj)
md5 = MD5_OF_EMPTY_STRING if self.env.expect_empty_etag else \
file_item.md5
hdrs = {'If-Unmodified-Since': self.env.time_new}
body = file_symlink.read(hdrs=hdrs, parms=self.env.parms)
if self.env.expect_body:
self.assertTrue(body)
else:
self.assertEqual(b'', body)
self.assert_status(200)
self.assert_etag(md5)
self.assertTrue(file_symlink.info(hdrs=hdrs, parms=self.env.parms))
hdrs = {'If-Unmodified-Since': self.env.time_old_f2}
self.assertRaises(ResponseError, file_symlink.read, hdrs=hdrs,
parms=self.env.parms)
self.assert_status(412)
self.assert_etag(md5)
self.assertRaises(ResponseError, file_symlink.info, hdrs=hdrs,
parms=self.env.parms)
self.assert_status(412)
self.assert_etag(md5)
def testIfMatchAndUnmodified(self):
for file_item in self.env.files:
link_obj = file_item.name + '_symlink'
file_symlink = self.env.container.file(link_obj)
md5 = MD5_OF_EMPTY_STRING if self.env.expect_empty_etag else \
file_item.md5
hdrs = {'If-Match': md5,
'If-Unmodified-Since': self.env.time_new}
body = file_symlink.read(hdrs=hdrs, parms=self.env.parms)
if self.env.expect_body:
self.assertTrue(body)
else:
self.assertEqual(b'', body)
self.assert_status(200)
self.assert_etag(md5)
hdrs = {'If-Match': 'bogus',
'If-Unmodified-Since': self.env.time_new}
self.assertRaises(ResponseError, file_symlink.read, hdrs=hdrs,
parms=self.env.parms)
self.assert_status(412)
self.assert_etag(md5)
hdrs = {'If-Match': md5,
'If-Unmodified-Since': self.env.time_old_f3}
self.assertRaises(ResponseError, file_symlink.read, hdrs=hdrs,
parms=self.env.parms)
self.assert_status(412)
self.assert_etag(md5)
def testLastModified(self):
file_item = self.env.container.file(Utils.create_name())
file_item.content_type = Utils.create_name()
resp = file_item.write_random_return_resp(self.env.file_size)
put_last_modified = resp.getheader('last-modified')
md5 = file_item.md5
# create symlink
link_obj = file_item.name + '_symlink'
file_symlink = self.env.container.file(link_obj)
file_symlink.write(hdrs={'X-Symlink-Target':
'%s/%s' % (self.env.container.name,
file_item.name)})
info = file_symlink.info()
self.assertIn('last_modified', info)
last_modified = info['last_modified']
self.assertEqual(put_last_modified, info['last_modified'])
hdrs = {'If-Modified-Since': last_modified}
self.assertRaises(ResponseError, file_symlink.read, hdrs=hdrs)
self.assert_status(304)
self.assert_etag(md5)
self.assert_header('accept-ranges', 'bytes')
hdrs = {'If-Unmodified-Since': last_modified}
self.assertTrue(file_symlink.read(hdrs=hdrs))
class TestSymlinkComparison(TestSymlinkTargetObjectComparison):
env = TestSymlinkComparisonEnv
def setUp(self):
super(TestSymlinkComparison, self).setUp()
def testLastModified(self):
file_item = self.env.container.file(Utils.create_name())
file_item.content_type = Utils.create_name()
resp = file_item.write_random_return_resp(self.env.file_size)
put_target_last_modified = resp.getheader('last-modified')
md5 = MD5_OF_EMPTY_STRING
# get different last-modified between file and symlink
time.sleep(1)
# create symlink
link_obj = file_item.name + '_symlink'
file_symlink = self.env.container.file(link_obj)
resp = file_symlink.write(return_resp=True,
hdrs={'X-Symlink-Target':
'%s/%s' % (self.env.container.name,
file_item.name)})
put_sym_last_modified = resp.getheader('last-modified')
info = file_symlink.info(parms=self.env.parms)
self.assertIn('last_modified', info)
last_modified = info['last_modified']
self.assertEqual(put_sym_last_modified, info['last_modified'])
hdrs = {'If-Modified-Since': put_target_last_modified}
body = file_symlink.read(hdrs=hdrs, parms=self.env.parms)
self.assertEqual(b'', body)
self.assert_status(200)
self.assert_etag(md5)
hdrs = {'If-Modified-Since': last_modified}
self.assertRaises(ResponseError, file_symlink.read, hdrs=hdrs,
parms=self.env.parms)
self.assert_status(304)
self.assert_etag(md5)
self.assert_header('accept-ranges', 'bytes')
hdrs = {'If-Unmodified-Since': last_modified}
body = file_symlink.read(hdrs=hdrs, parms=self.env.parms)
self.assertEqual(b'', body)
self.assert_status(200)
self.assert_etag(md5)
class TestSymlinkAccountTempurl(Base):
env = TestTempurlEnv
digest_name = 'sha256'
def setUp(self):
super(TestSymlinkAccountTempurl, self).setUp()
if self.env.tempurl_enabled is False:
raise SkipTest("TempURL not enabled")
elif self.env.tempurl_enabled is not True:
# just some sanity checking
raise Exception(
"Expected tempurl_enabled to be True/False, got %r" %
(self.env.tempurl_enabled,))
if self.digest_name not in cluster_info['tempurl'].get(
'allowed_digests', ['sha1']):
raise SkipTest("tempurl does not support %s signatures" %
self.digest_name)
self.digest = getattr(hashlib, self.digest_name)
self.expires = int(time.time()) + 86400
self.obj_tempurl_parms = self.tempurl_parms(
'GET', self.expires, self.env.conn.make_path(self.env.obj.path),
self.env.tempurl_key)
def tempurl_parms(self, method, expires, path, key):
path = urllib.parse.unquote(path)
if not six.PY2:
method = method.encode('utf8')
path = path.encode('utf8')
key = key.encode('utf8')
sig = hmac.new(
key,
b'%s\n%d\n%s' % (method, expires, path),
self.digest).hexdigest()
return {'temp_url_sig': sig, 'temp_url_expires': str(expires)}
def test_PUT_symlink(self):
new_sym = self.env.container.file(Utils.create_name())
# give out a signature which allows a PUT to new_obj
expires = int(time.time()) + 86400
put_parms = self.tempurl_parms(
'PUT', expires, self.env.conn.make_path(new_sym.path),
self.env.tempurl_key)
# try to create symlink object
try:
new_sym.write(
b'', {'x-symlink-target': 'cont/foo'}, parms=put_parms,
cfg={'no_auth_token': True})
except ResponseError as e:
self.assertEqual(e.status, 400)
else:
self.fail('request did not error')
def test_GET_symlink_inside_container(self):
tgt_obj = self.env.container.file(Utils.create_name())
sym = self.env.container.file(Utils.create_name())
tgt_obj.write(b"target object body")
sym.write(
b'',
{'x-symlink-target': '%s/%s' % (self.env.container.name, tgt_obj)})
expires = int(time.time()) + 86400
get_parms = self.tempurl_parms(
'GET', expires, self.env.conn.make_path(sym.path),
self.env.tempurl_key)
contents = sym.read(parms=get_parms, cfg={'no_auth_token': True})
self.assert_status([200])
self.assertEqual(contents, b"target object body")
def test_GET_symlink_outside_container(self):
tgt_obj = self.env.container.file(Utils.create_name())
tgt_obj.write(b"target object body")
container2 = self.env.account.container(Utils.create_name())
container2.create()
sym = container2.file(Utils.create_name())
sym.write(
b'',
{'x-symlink-target': '%s/%s' % (self.env.container.name, tgt_obj)})
expires = int(time.time()) + 86400
get_parms = self.tempurl_parms(
'GET', expires, self.env.conn.make_path(sym.path),
self.env.tempurl_key)
# cross container tempurl works fine for account tempurl key
contents = sym.read(parms=get_parms, cfg={'no_auth_token': True})
self.assert_status([200])
self.assertEqual(contents, b"target object body")
class TestSymlinkContainerTempurl(Base):
env = TestContainerTempurlEnv
digest_name = 'sha256'
def setUp(self):
super(TestSymlinkContainerTempurl, self).setUp()
if self.env.tempurl_enabled is False:
raise SkipTest("TempURL not enabled")
elif self.env.tempurl_enabled is not True:
# just some sanity checking
raise Exception(
"Expected tempurl_enabled to be True/False, got %r" %
(self.env.tempurl_enabled,))
if self.digest_name not in cluster_info['tempurl'].get(
'allowed_digests', ['sha1']):
raise SkipTest("tempurl does not support %s signatures" %
self.digest_name)
self.digest = getattr(hashlib, self.digest_name)
expires = int(time.time()) + 86400
sig = self.tempurl_sig(
'GET', expires, self.env.conn.make_path(self.env.obj.path),
self.env.tempurl_key)
self.obj_tempurl_parms = {'temp_url_sig': sig,
'temp_url_expires': str(expires)}
def tempurl_sig(self, method, expires, path, key):
path = urllib.parse.unquote(path)
if not six.PY2:
method = method.encode('utf8')
path = path.encode('utf8')
key = key.encode('utf8')
return hmac.new(
key,
b'%s\n%d\n%s' % (method, expires, path),
self.digest).hexdigest()
def test_PUT_symlink(self):
new_sym = self.env.container.file(Utils.create_name())
# give out a signature which allows a PUT to new_obj
expires = int(time.time()) + 86400
sig = self.tempurl_sig(
'PUT', expires, self.env.conn.make_path(new_sym.path),
self.env.tempurl_key)
put_parms = {'temp_url_sig': sig,
'temp_url_expires': str(expires)}
# try to create symlink object, should fail
try:
new_sym.write(
b'', {'x-symlink-target': 'cont/foo'}, parms=put_parms,
cfg={'no_auth_token': True})
except ResponseError as e:
self.assertEqual(e.status, 400)
else:
self.fail('request did not error')
def test_GET_symlink_inside_container(self):
tgt_obj = self.env.container.file(Utils.create_name())
sym = self.env.container.file(Utils.create_name())
tgt_obj.write(b"target object body")
sym.write(
b'',
{'x-symlink-target': '%s/%s' % (self.env.container.name, tgt_obj)})
expires = int(time.time()) + 86400
sig = self.tempurl_sig(
'GET', expires, self.env.conn.make_path(sym.path),
self.env.tempurl_key)
parms = {'temp_url_sig': sig,
'temp_url_expires': str(expires)}
contents = sym.read(parms=parms, cfg={'no_auth_token': True})
self.assert_status([200])
self.assertEqual(contents, b"target object body")
def test_GET_symlink_outside_container(self):
tgt_obj = self.env.container.file(Utils.create_name())
tgt_obj.write(b"target object body")
container2 = self.env.account.container(Utils.create_name())
container2.create()
sym = container2.file(Utils.create_name())
sym.write(
b'',
{'x-symlink-target': '%s/%s' % (self.env.container.name, tgt_obj)})
expires = int(time.time()) + 86400
sig = self.tempurl_sig(
'GET', expires, self.env.conn.make_path(sym.path),
self.env.tempurl_key)
parms = {'temp_url_sig': sig,
'temp_url_expires': str(expires)}
# cross container tempurl does not work for container tempurl key
try:
sym.read(parms=parms, cfg={'no_auth_token': True})
except ResponseError as e:
self.assertEqual(e.status, 401)
else:
self.fail('request did not error')
try:
sym.info(parms=parms, cfg={'no_auth_token': True})
except ResponseError as e:
self.assertEqual(e.status, 401)
else:
self.fail('request did not error')
if __name__ == '__main__':
unittest.main()
| swift-master | test/functional/test_symlink.py |
#!/usr/bin/python
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from test.functional import check_response, retry, SkipTest
import test.functional as tf
def setUpModule():
tf.setup_package()
def tearDownModule():
tf.teardown_package()
class TestHttpProtocol(unittest.TestCase):
existing_metadata = None
def test_invalid_path_info(self):
if tf.skip:
raise SkipTest
def get(url, token, parsed, conn):
path = "/info asdf"
conn.request('GET', path, '', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(get)
resp.read()
self.assertEqual(resp.status, 412)
self.assertIsNotNone(resp.getheader('X-Trans-Id'))
self.assertIsNotNone(resp.getheader('X-Openstack-Request-Id'))
self.assertIn('tx', resp.getheader('X-Trans-Id'))
self.assertIn('tx', resp.getheader('X-Openstack-Request-Id'))
self.assertEqual(resp.getheader('X-Openstack-Request-Id'),
resp.getheader('X-Trans-Id'))
| swift-master | test/functional/test_protocol.py |
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import io
import json
import os
import random
import sys
import socket
import time
from unittest import SkipTest
from xml.dom import minidom
import six
from six.moves import http_client
from six.moves import urllib
from swiftclient import get_auth
from swift.common import constraints
from swift.common.http import is_success
from swift.common.swob import str_to_wsgi, wsgi_to_str
from swift.common.utils import config_true_value, md5
from test import safe_repr
http_client._MAXHEADERS = constraints.MAX_HEADER_COUNT
class AuthenticationFailed(Exception):
pass
class RequestError(Exception):
pass
class ResponseError(Exception):
def __init__(self, response, method=None, path=None, details=None):
self.status = getattr(response, 'status', 0)
self.reason = getattr(response, 'reason', '[unknown]')
self.method = method
self.path = path
self.headers = getattr(response, 'getheaders', lambda: [])()
self.details = details
for name, value in self.headers:
if name.lower() == 'x-trans-id':
self.txid = value
break
else:
self.txid = None
super(ResponseError, self).__init__()
def __str__(self):
return repr(self)
def __repr__(self):
msg = '%d: %r (%r %r) txid=%s' % (
self.status, self.reason, self.method, self.path, self.txid)
if self.details:
msg += '\n%s' % self.details
return msg
def listing_empty(method):
for i in range(6):
if len(method()) == 0:
return True
time.sleep(2 ** i)
return False
def listing_items(method):
marker = None
once = True
items = []
while once or items:
for i in items:
yield i
if once or marker:
if marker:
items = method(parms={'marker': marker})
else:
items = method()
if len(items) == 10000:
marker = items[-1]
else:
marker = None
once = False
else:
items = []
def putrequest(self, method, url, skip_host=False, skip_accept_encoding=False):
'''Send a request to the server.
This is mostly a regurgitation of CPython's HTTPConnection.putrequest,
but fixed up so we can still send arbitrary bytes in the request line
on py3. See also: https://bugs.python.org/issue36274
To use, swap out a HTTP(S)Connection's putrequest with something like::
conn.putrequest = putrequest.__get__(conn)
:param method: specifies an HTTP request method, e.g. 'GET'.
:param url: specifies the object being requested, e.g. '/index.html'.
:param skip_host: if True does not add automatically a 'Host:' header
:param skip_accept_encoding: if True does not add automatically an
'Accept-Encoding:' header
'''
# (Mostly) inline the HTTPConnection implementation; just fix it
# so we can send non-ascii request lines. For comparison, see
# https://github.com/python/cpython/blob/v2.7.16/Lib/httplib.py#L888-L1003
# and https://github.com/python/cpython/blob/v3.7.2/
# Lib/http/client.py#L1061-L1183
if self._HTTPConnection__response \
and self._HTTPConnection__response.isclosed():
self._HTTPConnection__response = None
if self._HTTPConnection__state == http_client._CS_IDLE:
self._HTTPConnection__state = http_client._CS_REQ_STARTED
else:
raise http_client.CannotSendRequest(self._HTTPConnection__state)
self._method = method
if not url:
url = '/'
self._path = url
request = '%s %s %s' % (method, url, self._http_vsn_str)
if not isinstance(request, bytes):
# This choice of encoding is the whole reason we copy/paste from
# cpython. When making backend requests, it should never be
# necessary; however, we have some functional tests that want
# to send non-ascii bytes.
# TODO: when https://bugs.python.org/issue36274 is resolved, make
# sure we fix up our API to match whatever upstream chooses to do
self._output(request.encode('latin1'))
else:
self._output(request)
if self._http_vsn == 11:
if not skip_host:
netloc = ''
if url.startswith('http'):
nil, netloc, nil, nil, nil = urllib.parse.urlsplit(url)
if netloc:
try:
netloc_enc = netloc.encode("ascii")
except UnicodeEncodeError:
netloc_enc = netloc.encode("idna")
self.putheader('Host', netloc_enc)
else:
if self._tunnel_host:
host = self._tunnel_host
port = self._tunnel_port
else:
host = self.host
port = self.port
try:
host_enc = host.encode("ascii")
except UnicodeEncodeError:
host_enc = host.encode("idna")
if host.find(':') >= 0:
host_enc = b'[' + host_enc + b']'
if port == self.default_port:
self.putheader('Host', host_enc)
else:
host_enc = host_enc.decode("ascii")
self.putheader('Host', "%s:%s" % (host_enc, port))
if not skip_accept_encoding:
self.putheader('Accept-Encoding', 'identity')
class Connection(object):
def __init__(self, config):
for key in 'auth_uri username password'.split():
if key not in config:
raise SkipTest(
"Missing required configuration parameter: %s" % key)
self.auth_url = config['auth_uri']
self.insecure = config_true_value(config.get('insecure', 'false'))
self.auth_version = str(config.get('auth_version', '1'))
self.domain = config.get('domain')
self.account = config.get('account')
self.username = config['username']
self.password = config['password']
self.storage_netloc = None
self.storage_path = None
self.conn_class = None
self.connection = None # until you call .http_connect()
@property
def storage_url(self):
return '%s://%s/%s' % (self.storage_scheme, self.storage_netloc,
self.storage_path)
@storage_url.setter
def storage_url(self, value):
if six.PY2 and not isinstance(value, bytes):
value = value.encode('utf-8')
url = urllib.parse.urlparse(value)
if url.scheme == 'http':
self.conn_class = http_client.HTTPConnection
elif url.scheme == 'https':
self.conn_class = http_client.HTTPSConnection
else:
raise ValueError('unexpected protocol %s' % (url.scheme))
self.storage_netloc = url.netloc
# Make sure storage_path is a string and not unicode, since
# keystoneclient (called by swiftclient) returns them in
# unicode and this would cause troubles when doing
# no_safe_quote query.
x = url.path.split('/')
self.storage_path = str('/%s/%s' % (x[1], x[2]))
self.account_name = str(x[2])
@property
def storage_scheme(self):
if self.conn_class is None:
return None
if issubclass(self.conn_class, http_client.HTTPSConnection):
return 'https'
return 'http'
def get_account(self):
return Account(self, self.account)
def authenticate(self):
if self.auth_version == "1" and self.account:
auth_user = '%s:%s' % (self.account, self.username)
else:
auth_user = self.username
if self.insecure:
try:
import requests
from requests.packages.urllib3.exceptions import \
InsecureRequestWarning
except ImportError:
pass
else:
requests.packages.urllib3.disable_warnings(
InsecureRequestWarning)
if self.domain:
os_opts = {'project_domain_name': self.domain,
'user_domain_name': self.domain}
else:
os_opts = {}
authargs = dict(snet=False, tenant_name=self.account,
auth_version=self.auth_version, os_options=os_opts,
insecure=self.insecure)
(storage_url, storage_token) = get_auth(
self.auth_url, auth_user, self.password, **authargs)
if not (storage_url and storage_token):
raise AuthenticationFailed()
self.storage_url = storage_url
self.auth_user = auth_user
# With v2 keystone, storage_token is unicode.
# We want it to be string otherwise this would cause
# troubles when doing query with already encoded
# non ascii characters in its headers.
self.storage_token = str(storage_token)
self.user_acl = '%s:%s' % (self.account, self.username)
self.http_connect()
return self.storage_path, self.storage_token
def cluster_info(self):
"""
Retrieve the data in /info, or {} on 404
"""
status = self.make_request('GET', '/info',
cfg={'absolute_path': True})
if status // 100 == 4:
return {}
if not is_success(status):
raise ResponseError(self.response, 'GET', '/info')
return json.loads(self.response.read())
def http_connect(self):
if self.storage_scheme == 'https' and \
self.insecure and sys.version_info >= (2, 7, 9):
import ssl
self.connection = self.conn_class(
self.storage_netloc,
context=ssl._create_unverified_context())
else:
self.connection = self.conn_class(self.storage_netloc)
self.connection.putrequest = putrequest.__get__(self.connection)
def make_path(self, path=None, cfg=None):
if path is None:
path = []
if cfg is None:
cfg = {}
if cfg.get('version_only_path'):
return '/' + self.storage_path.split('/')[1]
if path:
quote = urllib.parse.quote
if cfg.get('no_quote') or cfg.get('no_path_quote'):
quote = str_to_wsgi
return '%s/%s' % (self.storage_path,
'/'.join([quote(i) for i in path]))
else:
return self.storage_path
def make_headers(self, hdrs, cfg=None):
if cfg is None:
cfg = {}
headers = {}
if not cfg.get('no_auth_token'):
headers['X-Auth-Token'] = self.storage_token
if cfg.get('use_token'):
headers['X-Auth-Token'] = cfg.get('use_token')
if isinstance(hdrs, dict):
headers.update((str_to_wsgi(h), str_to_wsgi(v))
for h, v in hdrs.items())
return headers
def make_request(self, method, path=None, data=b'', hdrs=None, parms=None,
cfg=None):
if path is None:
path = []
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if cfg is None:
cfg = {}
if not cfg.get('absolute_path'):
# Set absolute_path=True to make a request to exactly the given
# path, not storage path + given path. Useful for
# non-account/container/object requests.
path = self.make_path(path, cfg=cfg)
headers = self.make_headers(hdrs, cfg=cfg)
if isinstance(parms, dict) and parms:
quote = urllib.parse.quote
if cfg.get('no_quote') or cfg.get('no_parms_quote'):
quote = lambda x: x
query_args = ['%s=%s' % (quote(x), quote(str(y)))
for (x, y) in parms.items()]
path = '%s?%s' % (path, '&'.join(query_args))
if not cfg.get('no_content_length'):
if cfg.get('set_content_length'):
headers['Content-Length'] = str(cfg.get('set_content_length'))
else:
headers['Content-Length'] = str(len(data))
def try_request():
self.http_connect()
self.connection.request(method, path, data, headers)
return self.connection.getresponse()
try:
self.response = self.request_with_retry(try_request)
except RequestError as e:
details = "{method} {path} headers: {headers} data: {data}".format(
method=method, path=path, headers=headers, data=data)
raise RequestError('Unable to complete request: %s.\n%s' % (
details, str(e)))
return self.response.status
def request_with_retry(self, try_request):
self.response = None
try_count = 0
fail_messages = []
while try_count < 5:
try_count += 1
try:
self.response = try_request()
except socket.timeout as e:
fail_messages.append(safe_repr(e))
continue
except http_client.HTTPException as e:
fail_messages.append(safe_repr(e))
continue
if self.response.status == 401:
fail_messages.append("Response 401")
self.authenticate()
continue
elif self.response.status == 503:
fail_messages.append("Response 503")
if try_count != 5:
time.sleep(5)
continue
break
if self.response:
return self.response
raise RequestError('Attempts: %s, Failures: %s' % (
len(fail_messages), fail_messages))
def put_start(self, path, hdrs=None, parms=None, cfg=None, chunked=False):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if cfg is None:
cfg = {}
self.http_connect()
path = self.make_path(path, cfg)
headers = self.make_headers(hdrs, cfg=cfg)
if chunked:
headers['Transfer-Encoding'] = 'chunked'
headers.pop('Content-Length', None)
if isinstance(parms, dict) and parms:
quote = urllib.parse.quote
if cfg.get('no_quote') or cfg.get('no_parms_quote'):
quote = lambda x: x
query_args = ['%s=%s' % (quote(x), quote(str(y)))
for (x, y) in parms.items()]
path = '%s?%s' % (path, '&'.join(query_args))
self.connection.putrequest('PUT', path)
for key, value in headers.items():
self.connection.putheader(key, value)
self.connection.endheaders()
def put_data(self, data, chunked=False):
if chunked:
self.connection.send(b'%x\r\n%s\r\n' % (len(data), data))
else:
self.connection.send(data)
def put_end(self, chunked=False):
if chunked:
self.connection.send(b'0\r\n\r\n')
self.response = self.connection.getresponse()
# Hope it isn't big!
self.response.body = self.response.read()
self.connection.close()
return self.response.status
class Base(object):
def __str__(self):
return self.name
def header_fields(self, required_fields, optional_fields=None):
if optional_fields is None:
optional_fields = ()
def is_int_header(header):
if header.startswith('x-account-storage-policy-') and \
header.endswith(('-bytes-used', '-object-count')):
return True
return header in (
'content-length',
'x-account-container-count',
'x-account-object-count',
'x-account-bytes-used',
'x-container-object-count',
'x-container-bytes-used',
)
# NB: on py2, headers are always lower; on py3, they match the bytes
# on the wire
headers = dict((wsgi_to_str(h).lower(), wsgi_to_str(v))
for h, v in self.conn.response.getheaders())
ret = {}
for return_key, header in required_fields:
if header not in headers:
raise ValueError("%s was not found in response headers: %r" %
(header, headers))
if is_int_header(header):
ret[return_key] = int(headers[header])
else:
ret[return_key] = headers[header]
for return_key, header in optional_fields:
if header not in headers:
continue
if is_int_header(header):
ret[return_key] = int(headers[header])
else:
ret[return_key] = headers[header]
return ret
class Account(Base):
def __init__(self, conn, name):
self.conn = conn
self.name = str(name)
def update_metadata(self, metadata=None, cfg=None):
if metadata is None:
metadata = {}
if cfg is None:
cfg = {}
headers = dict(("X-Account-Meta-%s" % k, v)
for k, v in metadata.items())
self.conn.make_request('POST', self.path, hdrs=headers, cfg=cfg)
if not is_success(self.conn.response.status):
raise ResponseError(self.conn.response, 'POST',
self.conn.make_path(self.path))
return True
def container(self, container_name):
return Container(self.conn, self.name, container_name)
def containers(self, hdrs=None, parms=None, cfg=None):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if cfg is None:
cfg = {}
format_type = parms.get('format', None)
if format_type not in [None, 'json', 'xml']:
raise RequestError('Invalid format: %s' % format_type)
if format_type is None and 'format' in parms:
del parms['format']
status = self.conn.make_request('GET', self.path, hdrs=hdrs,
parms=parms, cfg=cfg)
if status == 200:
if format_type == 'json':
conts = json.loads(self.conn.response.read())
if six.PY2:
for cont in conts:
cont['name'] = cont['name'].encode('utf-8')
return conts
elif format_type == 'xml':
conts = []
tree = minidom.parseString(self.conn.response.read())
for x in tree.getElementsByTagName('container'):
cont = {}
for key in ['name', 'count', 'bytes', 'last_modified']:
cont[key] = x.getElementsByTagName(key)[0].\
childNodes[0].nodeValue
conts.append(cont)
for cont in conts:
if six.PY2:
cont['name'] = cont['name'].encode('utf-8')
for key in ('count', 'bytes'):
cont[key] = int(cont[key])
return conts
else:
lines = self.conn.response.read().split(b'\n')
if lines and not lines[-1]:
lines = lines[:-1]
if six.PY2:
return lines
return [line.decode('utf-8') for line in lines]
elif status == 204:
return []
raise ResponseError(self.conn.response, 'GET',
self.conn.make_path(self.path))
def delete_containers(self):
for c in listing_items(self.containers):
cont = self.container(c)
cont.update_metadata(hdrs={'x-versions-location': ''},
tolerate_missing=True)
if not cont.delete_recursive():
return False
return listing_empty(self.containers)
def info(self, hdrs=None, parms=None, cfg=None):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if cfg is None:
cfg = {}
if self.conn.make_request('HEAD', self.path, hdrs=hdrs,
parms=parms, cfg=cfg) != 204:
raise ResponseError(self.conn.response, 'HEAD',
self.conn.make_path(self.path))
fields = [['object_count', 'x-account-object-count'],
['container_count', 'x-account-container-count'],
['bytes_used', 'x-account-bytes-used']]
optional_fields = [
['temp-url-key', 'x-account-meta-temp-url-key'],
['temp-url-key-2', 'x-account-meta-temp-url-key-2']]
return self.header_fields(fields, optional_fields=optional_fields)
@property
def path(self):
return []
class Container(Base):
# policy_specified is set in __init__.py when tests are being set up.
policy_specified = None
def __init__(self, conn, account, name):
self.conn = conn
self.account = str(account)
self.name = str(name)
def create(self, hdrs=None, parms=None, cfg=None):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if cfg is None:
cfg = {}
if self.policy_specified and 'X-Storage-Policy' not in hdrs:
hdrs['X-Storage-Policy'] = self.policy_specified
return self.conn.make_request('PUT', self.path, hdrs=hdrs,
parms=parms, cfg=cfg) in (201, 202)
def update_metadata(self, hdrs=None, cfg=None, tolerate_missing=False):
if hdrs is None:
hdrs = {}
if cfg is None:
cfg = {}
self.conn.make_request('POST', self.path, hdrs=hdrs, cfg=cfg)
if is_success(self.conn.response.status):
return True
if tolerate_missing and self.conn.response.status == 404:
return True
raise ResponseError(self.conn.response, 'POST',
self.conn.make_path(self.path))
def delete(self, hdrs=None, parms=None, tolerate_missing=False):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
allowed_codes = (204, 404) if tolerate_missing else (204, )
return self.conn.make_request('DELETE', self.path, hdrs=hdrs,
parms=parms) in allowed_codes
def delete_files(self, tolerate_missing=False):
partialed_files = functools.partial(
self.files, tolerate_missing=tolerate_missing)
for f in listing_items(partialed_files):
file_item = self.file(f)
if not file_item.delete(tolerate_missing=True):
return False
return listing_empty(partialed_files)
def delete_recursive(self):
return self.delete_files(tolerate_missing=True) and \
self.delete(tolerate_missing=True)
def file(self, file_name):
return File(self.conn, self.account, self.name, file_name)
def files(self, hdrs=None, parms=None, cfg=None, tolerate_missing=False):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if cfg is None:
cfg = {}
format_type = parms.get('format', None)
if format_type not in [None, 'plain', 'json', 'xml']:
raise RequestError('Invalid format: %s' % format_type)
if format_type is None and 'format' in parms:
del parms['format']
status = self.conn.make_request('GET', self.path, hdrs=hdrs,
parms=parms, cfg=cfg)
if status == 200:
if format_type == 'json' or 'versions' in parms:
files = json.loads(self.conn.response.read())
if six.PY2:
for file_item in files:
for key in ('name', 'subdir', 'content_type',
'version_id'):
if key in file_item:
file_item[key] = file_item[key].encode('utf-8')
return files
elif format_type == 'xml':
files = []
tree = minidom.parseString(self.conn.response.read())
container = tree.getElementsByTagName('container')[0]
for x in container.childNodes:
file_item = {}
if x.tagName == 'object':
for key in ['name', 'hash', 'bytes', 'content_type',
'last_modified']:
file_item[key] = x.getElementsByTagName(key)[0].\
childNodes[0].nodeValue
elif x.tagName == 'subdir':
file_item['subdir'] = x.getElementsByTagName(
'name')[0].childNodes[0].nodeValue
else:
raise ValueError('Found unexpected element %s'
% x.tagName)
files.append(file_item)
for file_item in files:
if 'subdir' in file_item:
if six.PY2:
file_item['subdir'] = \
file_item['subdir'].encode('utf-8')
else:
if six.PY2:
file_item.update({
k: file_item[k].encode('utf-8')
for k in ('name', 'content_type')})
file_item['bytes'] = int(file_item['bytes'])
return files
else:
content = self.conn.response.read()
if content:
lines = content.split(b'\n')
if lines and not lines[-1]:
lines = lines[:-1]
if six.PY2:
return lines
return [line.decode('utf-8') for line in lines]
else:
return []
elif status == 204 or (status == 404 and tolerate_missing):
return []
raise ResponseError(self.conn.response, 'GET',
self.conn.make_path(self.path, cfg=cfg))
def info(self, hdrs=None, parms=None, cfg=None):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if cfg is None:
cfg = {}
self.conn.make_request('HEAD', self.path, hdrs=hdrs,
parms=parms, cfg=cfg)
if self.conn.response.status == 204:
required_fields = [['bytes_used', 'x-container-bytes-used'],
['object_count', 'x-container-object-count'],
['last_modified', 'last-modified']]
optional_fields = [
# N.B. swift doesn't return both x-versions-location
# and x-history-location at a response so that this is safe
# using same variable "versions" for both and it means
# versioning is enabled.
['versions', 'x-versions-location'],
['versions', 'x-history-location'],
['versions_enabled', 'x-versions-enabled'],
['tempurl_key', 'x-container-meta-temp-url-key'],
['tempurl_key2', 'x-container-meta-temp-url-key-2'],
['container_quota_bytes', 'x-container-meta-quota-bytes']]
return self.header_fields(required_fields, optional_fields)
raise ResponseError(self.conn.response, 'HEAD',
self.conn.make_path(self.path))
@property
def path(self):
return [self.name]
class File(Base):
def __init__(self, conn, account, container, name):
self.conn = conn
self.account = str(account)
self.container = str(container)
self.name = str(name)
self.chunked_write_in_progress = False
self.content_type = None
self.content_range = None
self.size = None
self.metadata = {}
def make_headers(self, cfg=None):
if cfg is None:
cfg = {}
headers = {}
if not cfg.get('no_content_length'):
if cfg.get('set_content_length'):
headers['Content-Length'] = str(cfg.get('set_content_length'))
elif self.size:
headers['Content-Length'] = str(self.size)
else:
headers['Content-Length'] = '0'
if cfg.get('use_token'):
headers['X-Auth-Token'] = cfg.get('use_token')
if cfg.get('no_content_type'):
pass
elif self.content_type:
headers['Content-Type'] = self.content_type
else:
headers['Content-Type'] = 'application/octet-stream'
for key in self.metadata:
headers['X-Object-Meta-' + key] = self.metadata[key]
return headers
@classmethod
def compute_md5sum(cls, data):
block_size = 4096
if isinstance(data, bytes):
data = io.BytesIO(data)
checksum = md5(usedforsecurity=False)
buff = data.read(block_size)
while buff:
checksum.update(buff)
buff = data.read(block_size)
data.seek(0)
return checksum.hexdigest()
def copy(self, dest_cont, dest_file, hdrs=None, parms=None, cfg=None,
return_resp=False):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if cfg is None:
cfg = {}
if 'destination' in cfg:
headers = {'Destination': cfg['destination']}
elif cfg.get('no_destination'):
headers = {}
else:
headers = {'Destination': '%s/%s' % (dest_cont, dest_file)}
headers.update(hdrs)
if 'Destination' in headers:
headers['Destination'] = urllib.parse.quote(headers['Destination'])
if self.conn.make_request('COPY', self.path, hdrs=headers,
cfg=cfg, parms=parms) != 201:
raise ResponseError(self.conn.response, 'COPY',
self.conn.make_path(self.path))
if return_resp:
return self.conn.response
return True
def copy_account(self, dest_account, dest_cont, dest_file,
hdrs=None, parms=None, cfg=None):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if cfg is None:
cfg = {}
if 'destination' in cfg:
headers = {'Destination': cfg['destination']}
elif cfg.get('no_destination'):
headers = {}
else:
headers = {'Destination-Account': dest_account,
'Destination': '%s/%s' % (dest_cont, dest_file)}
headers.update(hdrs)
if 'Destination-Account' in headers:
headers['Destination-Account'] = \
urllib.parse.quote(headers['Destination-Account'])
if 'Destination' in headers:
headers['Destination'] = urllib.parse.quote(headers['Destination'])
if self.conn.make_request('COPY', self.path, hdrs=headers,
cfg=cfg, parms=parms) != 201:
raise ResponseError(self.conn.response, 'COPY',
self.conn.make_path(self.path))
return True
def delete(self, hdrs=None, parms=None, cfg=None, tolerate_missing=False):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if tolerate_missing:
allowed_statuses = (204, 404)
else:
allowed_statuses = (204,)
if self.conn.make_request(
'DELETE', self.path, hdrs=hdrs, cfg=cfg,
parms=parms) not in allowed_statuses:
raise ResponseError(self.conn.response, 'DELETE',
self.conn.make_path(self.path))
return True
def info(self, hdrs=None, parms=None, cfg=None):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if cfg is None:
cfg = {}
if self.conn.make_request('HEAD', self.path, hdrs=hdrs,
parms=parms, cfg=cfg) != 200:
raise ResponseError(self.conn.response, 'HEAD',
self.conn.make_path(self.path))
fields = [['content_length', 'content-length'],
['content_type', 'content-type'],
['last_modified', 'last-modified'],
['etag', 'etag']]
optional_fields = [['x_object_manifest', 'x-object-manifest'],
['x_manifest_etag', 'x-manifest-etag'],
['x_object_version_id', 'x-object-version-id'],
['x_symlink_target', 'x-symlink-target']]
header_fields = self.header_fields(fields,
optional_fields=optional_fields)
header_fields['etag'] = header_fields['etag']
return header_fields
def initialize(self, hdrs=None, parms=None):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if not self.name:
return False
status = self.conn.make_request('HEAD', self.path, hdrs=hdrs,
parms=parms)
if status == 404:
return False
elif not is_success(status):
raise ResponseError(self.conn.response, 'HEAD',
self.conn.make_path(self.path))
for hdr, val in self.conn.response.getheaders():
hdr = wsgi_to_str(hdr).lower()
val = wsgi_to_str(val)
if hdr == 'content-type':
self.content_type = val
if hdr.startswith('x-object-meta-'):
self.metadata[hdr[14:]] = val
if hdr == 'etag':
self.etag = val
if hdr == 'content-length':
self.size = int(val)
if hdr == 'last-modified':
self.last_modified = val
return True
def load_from_filename(self, filename, callback=None):
fobj = open(filename, 'rb')
self.write(fobj, callback=callback)
fobj.close()
@property
def path(self):
return [self.container, self.name]
@classmethod
def random_data(cls, size=None):
if size is None:
size = random.randint(1, 32768)
fd = open('/dev/urandom', 'rb')
data = fd.read(size)
fd.close()
return data
def read(self, size=-1, offset=0, hdrs=None, buffer=None,
callback=None, cfg=None, parms=None):
if cfg is None:
cfg = {}
if parms is None:
parms = {}
if size > 0:
range_string = 'bytes=%d-%d' % (offset, (offset + size) - 1)
if hdrs:
hdrs['Range'] = range_string
else:
hdrs = {'Range': range_string}
status = self.conn.make_request('GET', self.path, hdrs=hdrs,
cfg=cfg, parms=parms)
if not is_success(status):
raise ResponseError(self.conn.response, 'GET',
self.conn.make_path(self.path))
for hdr, val in self.conn.response.getheaders():
if hdr.lower() == 'content-type':
self.content_type = wsgi_to_str(val)
if hdr.lower() == 'content-range':
self.content_range = val
if hasattr(buffer, 'write'):
scratch = self.conn.response.read(8192)
transferred = 0
while len(scratch) > 0:
buffer.write(scratch)
transferred += len(scratch)
if callable(callback):
callback(transferred, self.size)
scratch = self.conn.response.read(8192)
return None
else:
return self.conn.response.read()
def read_md5(self):
status = self.conn.make_request('GET', self.path)
if not is_success(status):
raise ResponseError(self.conn.response, 'GET',
self.conn.make_path(self.path))
checksum = md5(usedforsecurity=False)
scratch = self.conn.response.read(8192)
while len(scratch) > 0:
checksum.update(scratch)
scratch = self.conn.response.read(8192)
return checksum.hexdigest()
def save_to_filename(self, filename, callback=None):
try:
fobj = open(filename, 'wb')
self.read(buffer=fobj, callback=callback)
finally:
fobj.close()
def sync_metadata(self, metadata=None, cfg=None, parms=None):
if cfg is None:
cfg = {}
self.metadata = self.metadata if metadata is None else metadata
if self.metadata:
headers = self.make_headers(cfg=cfg)
if not cfg.get('no_content_length'):
if cfg.get('set_content_length'):
headers['Content-Length'] = str(
cfg.get('set_content_length'))
else:
headers['Content-Length'] = '0'
self.conn.make_request('POST', self.path, hdrs=headers,
parms=parms, cfg=cfg)
if self.conn.response.status != 202:
raise ResponseError(self.conn.response, 'POST',
self.conn.make_path(self.path))
return True
def chunked_write(self, data=None, hdrs=None, parms=None, cfg=None):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if cfg is None:
cfg = {}
if data is not None and self.chunked_write_in_progress:
self.conn.put_data(data, True)
elif data is not None:
self.chunked_write_in_progress = True
headers = self.make_headers(cfg=cfg)
headers.update(hdrs)
self.conn.put_start(self.path, hdrs=headers, parms=parms,
cfg=cfg, chunked=True)
self.conn.put_data(data, True)
elif self.chunked_write_in_progress:
self.chunked_write_in_progress = False
return self.conn.put_end(True) == 201
else:
raise RuntimeError
def write(self, data=b'', hdrs=None, parms=None, callback=None, cfg=None,
return_resp=False):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if cfg is None:
cfg = {}
block_size = 2 ** 20
if all(hasattr(data, attr) for attr in ('flush', 'seek', 'fileno')):
try:
data.flush()
data.seek(0)
except IOError:
pass
self.size = int(os.fstat(data.fileno())[6])
else:
data = io.BytesIO(data)
self.size = data.seek(0, os.SEEK_END)
data.seek(0)
headers = self.make_headers(cfg=cfg)
headers.update(hdrs)
def try_request():
# rewind to be ready for another attempt
data.seek(0)
self.conn.put_start(self.path, hdrs=headers, parms=parms, cfg=cfg)
transferred = 0
for buff in iter(lambda: data.read(block_size), b''):
self.conn.put_data(buff)
transferred += len(buff)
if callable(callback):
callback(transferred, self.size)
self.conn.put_end()
return self.conn.response
try:
self.response = self.conn.request_with_retry(try_request)
except RequestError as e:
raise ResponseError(self.conn.response, 'PUT',
self.conn.make_path(self.path), details=str(e))
if not is_success(self.response.status):
raise ResponseError(self.conn.response, 'PUT',
self.conn.make_path(self.path))
try:
data.seek(0)
except IOError:
pass
self.md5 = self.compute_md5sum(data)
if return_resp:
return self.conn.response
return True
def write_random(self, size=None, hdrs=None, parms=None, cfg=None):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if cfg is None:
cfg = {}
data = self.random_data(size)
if not self.write(data, hdrs=hdrs, parms=parms, cfg=cfg):
raise ResponseError(self.conn.response, 'PUT',
self.conn.make_path(self.path))
self.md5 = self.compute_md5sum(io.BytesIO(data))
return data
def write_random_return_resp(self, size=None, hdrs=None, parms=None,
cfg=None):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if cfg is None:
cfg = {}
data = self.random_data(size)
resp = self.write(data, hdrs=hdrs, parms=parms, cfg=cfg,
return_resp=True)
if not resp:
raise ResponseError(self.conn.response)
self.md5 = self.compute_md5sum(io.BytesIO(data))
return resp
def post(self, hdrs=None, parms=None, cfg=None, return_resp=False):
if hdrs is None:
hdrs = {}
if parms is None:
parms = {}
if cfg is None:
cfg = {}
headers = self.make_headers(cfg=cfg)
headers.update(hdrs)
self.conn.make_request('POST', self.path, hdrs=headers,
parms=parms, cfg=cfg)
if self.conn.response.status != 202:
raise ResponseError(self.conn.response, 'POST',
self.conn.make_path(self.path))
if return_resp:
return self.conn.response
return True
| swift-master | test/functional/swift_test_client.py |
#!/usr/bin/python -u
# Copyright (c) 2010-2016 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import SkipTest
import six
import test.functional as tf
from test.functional import cluster_info
from test.functional.tests import Utils, Base, BaseEnv
from test.functional.swift_test_client import Account, Connection, \
ResponseError
def setUpModule():
tf.setup_package()
def tearDownModule():
tf.teardown_package()
class TestDomainRemapEnv(BaseEnv):
domain_remap_enabled = None # tri-state: None initially, then True/False
dns_safe_account_name = None # tri-state: None initially, then True/False
@classmethod
def setUp(cls):
cls.conn = Connection(tf.config)
cls.conn.authenticate()
if cls.domain_remap_enabled is None:
cls.domain_remap_enabled = 'domain_remap' in cluster_info
if not cls.domain_remap_enabled:
return
if cls.dns_safe_account_name is None:
cls.dns_safe_account_name = ('%' not in cls.conn.account_name)
if not cls.dns_safe_account_name:
return
cls.account = Account(
cls.conn, tf.config.get('account', tf.config['username']))
cls.account.delete_containers()
cls.container = cls.account.container(Utils.create_name())
if not cls.container.create():
raise ResponseError(cls.conn.response)
cls.obj = cls.container.file(Utils.create_name())
cls.obj.write(b'obj contents')
cls.obj_slash = cls.container.file('/v1')
cls.obj_slash.write(b'obj contents')
class TestDomainRemap(Base):
env = TestDomainRemapEnv
set_up = False
def setUp(self):
super(TestDomainRemap, self).setUp()
if self.env.domain_remap_enabled is False:
raise SkipTest("Domain Remap is not enabled")
elif self.env.domain_remap_enabled is not True:
# just some sanity checking
raise Exception(
"Expected domain_remap_enabled to be True/False, got %r" %
(self.env.domain_remap_enabled,))
if self.env.dns_safe_account_name is False:
raise SkipTest("Account name %r cannot work with Domain Remap" %
(self.env.conn.account_name,))
elif self.env.dns_safe_account_name is not True:
# just some sanity checking
raise Exception(
"Expected domain_remap_enabled to be True/False, got %r" %
(self.env.domain_remap_enabled,))
# domain_remap middleware does not advertise its storage_domain values
# in swift /info responses so a storage_domain must be configured in
# test.conf for these tests to succeed
if not tf.config.get('storage_domain'):
raise SkipTest('Domain Remap storage_domain not configured in %s' %
tf.config['__file__'])
storage_domain = tf.config.get('storage_domain')
self.acct_domain_dash = '%s.%s' % (self.env.account.conn.account_name,
storage_domain)
self.acct_domain_underscore = '%s.%s' % (
self.env.account.conn.account_name.replace('_', '-'),
storage_domain)
self.cont_domain_dash = '%s.%s.%s' % (
self.env.container.name,
self.env.account.conn.account_name,
storage_domain)
self.cont_domain_underscore = '%s.%s.%s' % (
self.env.container.name,
self.env.account.conn.account_name.replace('_', '-'),
storage_domain)
def test_GET_remapped_account(self):
for domain in (self.acct_domain_dash, self.acct_domain_underscore):
self.env.account.conn.make_request('GET', '/',
hdrs={'Host': domain},
cfg={'absolute_path': True})
self.assert_status(200)
body = self.env.account.conn.response.read()
if not six.PY2:
body = body.decode('utf8')
self.assertIn(self.env.container.name, body.split('\n'))
path = '/'.join(['', self.env.container.name])
self.env.account.conn.make_request('GET', path,
hdrs={'Host': domain},
cfg={'absolute_path': True})
self.assert_status(200)
body = self.env.account.conn.response.read()
if not six.PY2:
body = body.decode('utf8')
self.assertIn(self.env.obj.name, body.split('\n'))
self.assertIn(self.env.obj_slash.name, body.split('\n'))
for obj in (self.env.obj, self.env.obj_slash):
path = '/'.join(['', self.env.container.name, obj.name])
self.env.account.conn.make_request('GET', path,
hdrs={'Host': domain},
cfg={'absolute_path': True})
self.assert_status(200)
self.assert_body('obj contents')
def test_PUT_remapped_account(self):
for domain in (self.acct_domain_dash, self.acct_domain_underscore):
# Create a container
new_container_name = Utils.create_name()
path = '/'.join(['', new_container_name])
self.env.account.conn.make_request('PUT', path,
data='new obj contents',
hdrs={'Host': domain},
cfg={'absolute_path': True})
self.assert_status(201)
self.assertIn(new_container_name, self.env.account.containers())
# Create an object
new_obj_name = Utils.create_name()
path = '/'.join(['', self.env.container.name, new_obj_name])
self.env.account.conn.make_request('PUT', path,
data='new obj contents',
hdrs={'Host': domain},
cfg={'absolute_path': True})
self.assert_status(201)
new_obj = self.env.container.file(new_obj_name)
self.assertEqual(new_obj.read(), b'new obj contents')
def test_GET_remapped_container(self):
for domain in (self.cont_domain_dash, self.cont_domain_underscore):
self.env.account.conn.make_request('GET', '/',
hdrs={'Host': domain},
cfg={'absolute_path': True})
self.assert_status(200)
body = self.env.account.conn.response.read()
if not six.PY2:
body = body.decode('utf8')
self.assertIn(self.env.obj.name, body.split('\n'))
self.assertIn(self.env.obj_slash.name, body.split('\n'))
for obj in (self.env.obj, self.env.obj_slash):
path = '/'.join(['', obj.name])
self.env.account.conn.make_request('GET', path,
hdrs={'Host': domain},
cfg={'absolute_path': True})
self.assert_status(200)
self.assert_body('obj contents')
def test_PUT_remapped_container(self):
for domain in (self.cont_domain_dash, self.cont_domain_underscore):
new_obj_name = Utils.create_name()
path = '/'.join(['', new_obj_name])
self.env.account.conn.make_request('PUT', path,
data='new obj contents',
hdrs={'Host': domain},
cfg={'absolute_path': True})
self.assert_status(201)
new_obj = self.env.container.file(new_obj_name)
self.assertEqual(new_obj.read(), b'new obj contents')
| swift-master | test/functional/test_domain_remap.py |
#!/usr/bin/python
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import unittest
from uuid import uuid4
from test.functional import check_response, cluster_info, retry, \
requires_acls, load_constraint, requires_policies, SkipTest
import test.functional as tf
import six
from six.moves import range
def setUpModule():
tf.setup_package()
def tearDownModule():
tf.teardown_package()
class TestContainer(unittest.TestCase):
def setUp(self):
if tf.skip:
raise SkipTest
self.name = uuid4().hex
# this container isn't created by default, but will be cleaned up
self.container = uuid4().hex
def put(url, token, parsed, conn):
conn.request('PUT', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(put)
resp.read()
# If the request was received and processed but the container-server
# timed out getting the response back to the proxy, or the proxy timed
# out getting the response back to the client, the next retry will 202
self.assertIn(resp.status, (201, 202))
self.max_meta_count = load_constraint('max_meta_count')
self.max_meta_name_length = load_constraint('max_meta_name_length')
self.max_meta_overall_size = load_constraint('max_meta_overall_size')
self.max_meta_value_length = load_constraint('max_meta_value_length')
def tearDown(self):
if tf.skip:
raise SkipTest
def get(url, token, parsed, conn, container):
conn.request(
'GET', parsed.path + '/' + container + '?format=json', '',
{'X-Auth-Token': token})
return check_response(conn)
def delete(url, token, parsed, conn, container, obj):
if six.PY2:
obj_name = obj['name'].encode('utf8')
else:
obj_name = obj['name']
path = '/'.join([parsed.path, container, obj_name])
conn.request('DELETE', path, '', {'X-Auth-Token': token})
return check_response(conn)
for container in (self.name, self.container):
while True:
resp = retry(get, container)
body = resp.read()
if resp.status == 404:
break
self.assertEqual(resp.status // 100, 2, resp.status)
objs = json.loads(body)
if not objs:
break
for obj in objs:
resp = retry(delete, container, obj)
resp.read()
# Under load, container listing may not upate immediately,
# so we may attempt to delete the same object multiple
# times. Tolerate the object having already been deleted.
self.assertIn(resp.status, (204, 404))
def delete(url, token, parsed, conn, container):
conn.request('DELETE', parsed.path + '/' + container, '',
{'X-Auth-Token': token})
return check_response(conn)
for container in (self.name, self.container):
resp = retry(delete, container)
resp.read()
# self.container may not have been created at all, but even if it
# has, for either container there may be a failure that trips the
# retry despite the request having been successfully processed.
self.assertIn(resp.status, (204, 404))
def test_multi_metadata(self):
if tf.skip:
raise SkipTest
def post(url, token, parsed, conn, name, value):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token, name: value})
return check_response(conn)
def head(url, token, parsed, conn):
conn.request('HEAD', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(post, 'X-Container-Meta-One', '1')
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('x-container-meta-one'), '1')
resp = retry(post, 'X-Container-Meta-Two', '2')
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('x-container-meta-one'), '1')
self.assertEqual(resp.getheader('x-container-meta-two'), '2')
def test_unicode_metadata(self):
if tf.skip:
raise SkipTest
def post(url, token, parsed, conn, name, value):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token, name: value})
return check_response(conn)
def head(url, token, parsed, conn):
conn.request('HEAD', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
uni_key = u'X-Container-Meta-uni\u0E12'
uni_value = u'uni\u0E12'
# Note that py3 has issues with non-ascii header names; see
# https://bugs.python.org/issue37093
if (tf.web_front_end == 'integral' and six.PY2):
resp = retry(post, uni_key, '1')
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader(uni_key.encode('utf-8')), '1')
resp = retry(post, 'X-Container-Meta-uni', uni_value)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
self.assertIn(resp.status, (200, 204))
if six.PY2:
self.assertEqual(resp.getheader('X-Container-Meta-uni'),
uni_value.encode('utf-8'))
else:
self.assertEqual(resp.getheader('X-Container-Meta-uni'),
uni_value)
# See above note about py3 and non-ascii header names
if (tf.web_front_end == 'integral' and six.PY2):
resp = retry(post, uni_key, uni_value)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader(uni_key.encode('utf-8')),
uni_value.encode('utf-8'))
def test_PUT_metadata(self):
if tf.skip:
raise SkipTest
def put(url, token, parsed, conn, name, value):
conn.request('PUT', parsed.path + '/' + name, '',
{'X-Auth-Token': token,
'X-Container-Meta-Test': value})
return check_response(conn)
def head(url, token, parsed, conn, name):
conn.request('HEAD', parsed.path + '/' + name, '',
{'X-Auth-Token': token})
return check_response(conn)
def get(url, token, parsed, conn, name):
conn.request('GET', parsed.path + '/' + name, '',
{'X-Auth-Token': token})
return check_response(conn)
def delete(url, token, parsed, conn, name):
conn.request('DELETE', parsed.path + '/' + name, '',
{'X-Auth-Token': token})
return check_response(conn)
name = uuid4().hex
resp = retry(put, name, 'Value')
resp.read()
self.assertIn(resp.status, (201, 202))
resp = retry(head, name)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('x-container-meta-test'), 'Value')
resp = retry(get, name)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('x-container-meta-test'), 'Value')
resp = retry(delete, name)
resp.read()
self.assertIn(resp.status, (204, 404))
name = uuid4().hex
resp = retry(put, name, '')
resp.read()
self.assertIn(resp.status, (201, 202))
resp = retry(head, name)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertIsNone(resp.getheader('x-container-meta-test'))
resp = retry(get, name)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertIsNone(resp.getheader('x-container-meta-test'))
resp = retry(delete, name)
resp.read()
self.assertIn(resp.status, (204, 404))
def test_POST_metadata(self):
if tf.skip:
raise SkipTest
def post(url, token, parsed, conn, value):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token,
'X-Container-Meta-Test': value})
return check_response(conn)
def head(url, token, parsed, conn):
conn.request('HEAD', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
def get(url, token, parsed, conn):
conn.request('GET', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(head)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertIsNone(resp.getheader('x-container-meta-test'))
resp = retry(get)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertIsNone(resp.getheader('x-container-meta-test'))
resp = retry(post, 'Value')
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('x-container-meta-test'), 'Value')
resp = retry(get)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('x-container-meta-test'), 'Value')
def test_PUT_bad_metadata(self):
if tf.skip:
raise SkipTest
def put(url, token, parsed, conn, name, extra_headers):
headers = {'X-Auth-Token': token}
headers.update(extra_headers)
conn.request('PUT', parsed.path + '/' + name, '', headers)
return check_response(conn)
def delete(url, token, parsed, conn, name):
conn.request('DELETE', parsed.path + '/' + name, '',
{'X-Auth-Token': token})
return check_response(conn)
name = uuid4().hex
resp = retry(
put, name,
{'X-Container-Meta-' + ('k' * self.max_meta_name_length): 'v'})
resp.read()
self.assertIn(resp.status, (201, 202))
resp = retry(delete, name)
resp.read()
self.assertIn(resp.status, (204, 404))
name = uuid4().hex
resp = retry(
put, name,
{'X-Container-Meta-' + (
'k' * (self.max_meta_name_length + 1)): 'v'})
resp.read()
self.assertEqual(resp.status, 400)
resp = retry(delete, name)
resp.read()
self.assertEqual(resp.status, 404)
name = uuid4().hex
resp = retry(
put, name,
{'X-Container-Meta-Too-Long': 'k' * self.max_meta_value_length})
resp.read()
self.assertIn(resp.status, (201, 202))
resp = retry(delete, name)
resp.read()
self.assertIn(resp.status, (204, 404))
name = uuid4().hex
resp = retry(
put, name,
{'X-Container-Meta-Too-Long': 'k' * (
self.max_meta_value_length + 1)})
resp.read()
self.assertEqual(resp.status, 400)
resp = retry(delete, name)
resp.read()
self.assertEqual(resp.status, 404)
name = uuid4().hex
headers = {}
for x in range(self.max_meta_count):
headers['X-Container-Meta-%d' % x] = 'v'
resp = retry(put, name, headers)
resp.read()
self.assertIn(resp.status, (201, 202))
resp = retry(delete, name)
resp.read()
self.assertIn(resp.status, (204, 404))
name = uuid4().hex
headers = {}
for x in range(self.max_meta_count + 1):
headers['X-Container-Meta-%d' % x] = 'v'
resp = retry(put, name, headers)
resp.read()
self.assertEqual(resp.status, 400)
resp = retry(delete, name)
resp.read()
self.assertEqual(resp.status, 404)
name = uuid4().hex
headers = {}
header_value = 'k' * self.max_meta_value_length
size = 0
x = 0
while size < (self.max_meta_overall_size - 4
- self.max_meta_value_length):
size += 4 + self.max_meta_value_length
headers['X-Container-Meta-%04d' % x] = header_value
x += 1
if self.max_meta_overall_size - size > 1:
headers['X-Container-Meta-k'] = \
'v' * (self.max_meta_overall_size - size - 1)
resp = retry(put, name, headers)
resp.read()
self.assertIn(resp.status, (201, 202))
resp = retry(delete, name)
resp.read()
self.assertIn(resp.status, (204, 404))
name = uuid4().hex
headers['X-Container-Meta-k'] = \
'v' * (self.max_meta_overall_size - size)
resp = retry(put, name, headers)
resp.read()
self.assertEqual(resp.status, 400)
resp = retry(delete, name)
resp.read()
self.assertEqual(resp.status, 404)
def test_POST_bad_metadata(self):
if tf.skip:
raise SkipTest
def post(url, token, parsed, conn, extra_headers):
headers = {'X-Auth-Token': token}
headers.update(extra_headers)
conn.request('POST', parsed.path + '/' + self.name, '', headers)
return check_response(conn)
resp = retry(
post,
{'X-Container-Meta-' + ('k' * self.max_meta_name_length): 'v'})
resp.read()
self.assertEqual(resp.status, 204)
# Clear it, so the value-length checking doesn't accidentally trip
# the overall max
resp = retry(
post,
{'X-Container-Meta-' + ('k' * self.max_meta_name_length): ''})
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(
post,
{'X-Container-Meta-' + (
'k' * (self.max_meta_name_length + 1)): 'v'})
resp.read()
self.assertEqual(resp.status, 400)
resp = retry(
post,
{'X-Container-Meta-Too-Long': 'k' * self.max_meta_value_length})
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(
post,
{'X-Container-Meta-Too-Long': 'k' * (
self.max_meta_value_length + 1)})
resp.read()
self.assertEqual(resp.status, 400)
def test_POST_bad_metadata2(self):
if tf.skip:
raise SkipTest
def post(url, token, parsed, conn, extra_headers):
headers = {'X-Auth-Token': token}
headers.update(extra_headers)
conn.request('POST', parsed.path + '/' + self.name, '', headers)
return check_response(conn)
headers = {}
for x in range(self.max_meta_count):
headers['X-Container-Meta-%d' % x] = 'v'
resp = retry(post, headers)
resp.read()
self.assertEqual(resp.status, 204)
headers = {}
for x in range(self.max_meta_count + 1):
headers['X-Container-Meta-%d' % x] = 'v'
resp = retry(post, headers)
resp.read()
self.assertEqual(resp.status, 400)
def test_POST_bad_metadata3(self):
if tf.skip:
raise SkipTest
if tf.in_process:
tf.skip_if_no_xattrs()
def post(url, token, parsed, conn, extra_headers):
headers = {'X-Auth-Token': token}
headers.update(extra_headers)
conn.request('POST', parsed.path + '/' + self.name, '', headers)
return check_response(conn)
headers = {}
header_value = 'k' * self.max_meta_value_length
size = 0
x = 0
while size < (self.max_meta_overall_size - 4
- self.max_meta_value_length):
size += 4 + self.max_meta_value_length
headers['X-Container-Meta-%04d' % x] = header_value
x += 1
if self.max_meta_overall_size - size > 1:
headers['X-Container-Meta-k'] = \
'v' * (self.max_meta_overall_size - size - 1)
resp = retry(post, headers)
resp.read()
self.assertEqual(resp.status, 204)
# this POST includes metadata size that is over limit
headers['X-Container-Meta-k'] = \
'x' * (self.max_meta_overall_size - size)
resp = retry(post, headers)
resp.read()
self.assertEqual(resp.status, 400)
# this POST would be ok and the aggregate backend metadata
# size is on the border
headers = {'X-Container-Meta-k':
'y' * (self.max_meta_overall_size - size - 1)}
resp = retry(post, headers)
resp.read()
self.assertEqual(resp.status, 204)
# this last POST would be ok by itself but takes the aggregate
# backend metadata size over limit
headers = {'X-Container-Meta-k':
'z' * (self.max_meta_overall_size - size)}
resp = retry(post, headers)
resp.read()
self.assertEqual(resp.status, 400)
def test_public_container(self):
if tf.skip:
raise SkipTest
def get(url, token, parsed, conn):
conn.request('GET', parsed.path + '/' + self.name)
return check_response(conn)
try:
resp = retry(get)
raise Exception('Should not have been able to GET')
except Exception as err:
self.assertTrue(str(err).startswith('No result after '), err)
def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token,
'X-Container-Read': '.r:*,.rlistings'})
return check_response(conn)
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get)
resp.read()
self.assertEqual(resp.status, 204)
def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token, 'X-Container-Read': ''})
return check_response(conn)
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 204)
try:
resp = retry(get)
raise Exception('Should not have been able to GET')
except Exception as err:
self.assertTrue(str(err).startswith('No result after '), err)
def test_cross_account_container(self):
if tf.skip or tf.skip2:
raise SkipTest
# Obtain the first account's string
first_account = ['unknown']
def get1(url, token, parsed, conn):
first_account[0] = parsed.path
conn.request('HEAD', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(get1)
resp.read()
# Ensure we can't access the container with the second account
def get2(url, token, parsed, conn):
conn.request('GET', first_account[0] + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(get2, use_account=2)
resp.read()
self.assertEqual(resp.status, 403)
# Make the container accessible by the second account
def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token,
'X-Container-Read': tf.swift_test_perm[1],
'X-Container-Write': tf.swift_test_perm[1]})
return check_response(conn)
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 204)
# Ensure we can now use the container with the second account
resp = retry(get2, use_account=2)
resp.read()
self.assertEqual(resp.status, 204)
# Make the container private again
def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token, 'X-Container-Read': '',
'X-Container-Write': ''})
return check_response(conn)
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 204)
# Ensure we can't access the container with the second account again
resp = retry(get2, use_account=2)
resp.read()
self.assertEqual(resp.status, 403)
def test_cross_account_public_container(self):
if tf.skip or tf.skip2:
raise SkipTest
if tf.in_process:
tf.skip_if_no_xattrs()
# Obtain the first account's string
first_account = ['unknown']
def get1(url, token, parsed, conn):
first_account[0] = parsed.path
conn.request('HEAD', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(get1)
resp.read()
# Ensure we can't access the container with the second account
def get2(url, token, parsed, conn):
conn.request('GET', first_account[0] + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(get2, use_account=2)
resp.read()
self.assertEqual(resp.status, 403)
# Make the container completely public
def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token,
'X-Container-Read': '.r:*,.rlistings'})
return check_response(conn)
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 204)
# Ensure we can now read the container with the second account
resp = retry(get2, use_account=2)
resp.read()
self.assertEqual(resp.status, 204)
# But we shouldn't be able to write with the second account
def put2(url, token, parsed, conn):
conn.request('PUT', first_account[0] + '/' + self.name + '/object',
'test object', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(put2, use_account=2)
resp.read()
self.assertEqual(resp.status, 403)
# Now make the container also writable by the second account
def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token,
'X-Container-Write': tf.swift_test_perm[1]})
return check_response(conn)
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 204)
# Ensure we can still read the container with the second account
resp = retry(get2, use_account=2)
resp.read()
self.assertEqual(resp.status, 204)
# And that we can now write with the second account
resp = retry(put2, use_account=2)
resp.read()
self.assertEqual(resp.status, 201)
def test_nonadmin_user(self):
if tf.skip or tf.skip3:
raise SkipTest
if tf.in_process:
tf.skip_if_no_xattrs()
# Obtain the first account's string
first_account = ['unknown']
def get1(url, token, parsed, conn):
first_account[0] = parsed.path
conn.request('HEAD', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(get1)
resp.read()
# Ensure we can't access the container with the third account
def get3(url, token, parsed, conn):
conn.request('GET', first_account[0] + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(get3, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# Make the container accessible by the third account
def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token,
'X-Container-Read': tf.swift_test_perm[2]})
return check_response(conn)
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 204)
# Ensure we can now read the container with the third account
resp = retry(get3, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
# But we shouldn't be able to write with the third account
def put3(url, token, parsed, conn):
conn.request('PUT', first_account[0] + '/' + self.name + '/object',
'test object', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(put3, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# Now make the container also writable by the third account
def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token,
'X-Container-Write': tf.swift_test_perm[2]})
return check_response(conn)
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 204)
# Ensure we can still read the container with the third account
resp = retry(get3, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
# And that we can now write with the third account
resp = retry(put3, use_account=3)
resp.read()
self.assertEqual(resp.status, 201)
@requires_acls
def test_read_only_acl_listings(self):
if tf.skip3:
raise SkipTest
def get(url, token, parsed, conn):
conn.request('GET', parsed.path, '', {'X-Auth-Token': token})
return check_response(conn)
def post_account(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
def put(url, token, parsed, conn, name):
conn.request('PUT', parsed.path + '/%s' % name, '',
{'X-Auth-Token': token})
return check_response(conn)
# cannot list containers
resp = retry(get, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# grant read-only access
acl_user = tf.swift_test_user[2]
acl = {'read-only': [acl_user]}
headers = {'x-account-access-control': json.dumps(acl)}
resp = retry(post_account, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# read-only can list containers
resp = retry(get, use_account=3)
listing = resp.read()
if not six.PY2:
listing = listing.decode('utf8')
self.assertEqual(resp.status, 200)
self.assertIn(self.name, listing)
# read-only can not create containers
new_container_name = str(uuid4())
resp = retry(put, new_container_name, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# but it can see newly created ones
resp = retry(put, new_container_name, use_account=1)
resp.read()
self.assertEqual(resp.status, 201)
resp = retry(get, use_account=3)
listing = resp.read()
if not six.PY2:
listing = listing.decode('utf8')
self.assertEqual(resp.status, 200)
self.assertIn(new_container_name, listing)
@requires_acls
def test_read_only_acl_metadata(self):
if tf.skip3:
raise SkipTest
def get(url, token, parsed, conn, name):
conn.request('GET', parsed.path + '/%s' % name, '',
{'X-Auth-Token': token})
return check_response(conn)
def post_account(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
def post(url, token, parsed, conn, name, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path + '/%s' % name, '', new_headers)
return check_response(conn)
# add some metadata
value = str(uuid4())
headers = {'x-container-meta-test': value}
resp = retry(post, self.name, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
# cannot see metadata
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# grant read-only access
acl_user = tf.swift_test_user[2]
acl = {'read-only': [acl_user]}
headers = {'x-account-access-control': json.dumps(acl)}
resp = retry(post_account, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# read-only can NOT write container metadata
new_value = str(uuid4())
headers = {'x-container-meta-test': new_value}
resp = retry(post, self.name, headers=headers, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# read-only can read container metadata
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
@requires_acls
def test_read_write_acl_listings(self):
if tf.skip3:
raise SkipTest
def get(url, token, parsed, conn):
conn.request('GET', parsed.path, '', {'X-Auth-Token': token})
return check_response(conn)
def post(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
def put(url, token, parsed, conn, name):
conn.request('PUT', parsed.path + '/%s' % name, '',
{'X-Auth-Token': token})
return check_response(conn)
def delete(url, token, parsed, conn, name):
conn.request('DELETE', parsed.path + '/%s' % name, '',
{'X-Auth-Token': token})
return check_response(conn)
# cannot list containers
resp = retry(get, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# grant read-write access
acl_user = tf.swift_test_user[2]
acl = {'read-write': [acl_user]}
headers = {'x-account-access-control': json.dumps(acl)}
resp = retry(post, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# can list containers
resp = retry(get, use_account=3)
listing = resp.read()
if not six.PY2:
listing = listing.decode('utf8')
self.assertEqual(resp.status, 200)
self.assertIn(self.name, listing)
# can create new containers
new_container_name = str(uuid4())
resp = retry(put, new_container_name, use_account=3)
resp.read()
self.assertIn(resp.status, (201, 202))
resp = retry(get, use_account=3)
listing = resp.read()
if not six.PY2:
listing = listing.decode('utf8')
self.assertEqual(resp.status, 200)
self.assertIn(new_container_name, listing)
# can also delete them
resp = retry(delete, new_container_name, use_account=3)
resp.read()
self.assertIn(resp.status, (204, 404))
resp = retry(get, use_account=3)
listing = resp.read()
if not six.PY2:
listing = listing.decode('utf8')
self.assertEqual(resp.status, 200)
self.assertNotIn(new_container_name, listing)
# even if they didn't create them
empty_container_name = str(uuid4())
resp = retry(put, empty_container_name, use_account=1)
resp.read()
self.assertIn(resp.status, (201, 202))
resp = retry(delete, empty_container_name, use_account=3)
resp.read()
self.assertIn(resp.status, (204, 404))
@requires_acls
def test_read_write_acl_metadata(self):
if tf.skip3:
raise SkipTest
def get(url, token, parsed, conn, name):
conn.request('GET', parsed.path + '/%s' % name, '',
{'X-Auth-Token': token})
return check_response(conn)
def post_account(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
def post(url, token, parsed, conn, name, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path + '/%s' % name, '', new_headers)
return check_response(conn)
# add some metadata
value = str(uuid4())
headers = {'x-container-meta-test': value}
resp = retry(post, self.name, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
# cannot see metadata
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# grant read-write access
acl_user = tf.swift_test_user[2]
acl = {'read-write': [acl_user]}
headers = {'x-account-access-control': json.dumps(acl)}
resp = retry(post_account, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# read-write can read container metadata
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
# read-write can also write container metadata
new_value = str(uuid4())
headers = {'x-container-meta-test': new_value}
resp = retry(post, self.name, headers=headers, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), new_value)
# and remove it
headers = {'x-remove-container-meta-test': 'true'}
resp = retry(post, self.name, headers=headers, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
self.assertIsNone(resp.getheader('X-Container-Meta-Test'))
@requires_acls
def test_admin_acl_listing(self):
if tf.skip3:
raise SkipTest
def get(url, token, parsed, conn):
conn.request('GET', parsed.path, '', {'X-Auth-Token': token})
return check_response(conn)
def post(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
def put(url, token, parsed, conn, name):
conn.request('PUT', parsed.path + '/%s' % name, '',
{'X-Auth-Token': token})
return check_response(conn)
def delete(url, token, parsed, conn, name):
conn.request('DELETE', parsed.path + '/%s' % name, '',
{'X-Auth-Token': token})
return check_response(conn)
# cannot list containers
resp = retry(get, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# grant admin access
acl_user = tf.swift_test_user[2]
acl = {'admin': [acl_user]}
headers = {'x-account-access-control': json.dumps(acl)}
resp = retry(post, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# can list containers
resp = retry(get, use_account=3)
listing = resp.read()
if not six.PY2:
listing = listing.decode('utf8')
self.assertEqual(resp.status, 200)
self.assertIn(self.name, listing)
# can create new containers
new_container_name = str(uuid4())
resp = retry(put, new_container_name, use_account=3)
resp.read()
self.assertEqual(resp.status, 201)
resp = retry(get, use_account=3)
listing = resp.read()
if not six.PY2:
listing = listing.decode('utf8')
self.assertEqual(resp.status, 200)
self.assertIn(new_container_name, listing)
# can also delete them
resp = retry(delete, new_container_name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, use_account=3)
listing = resp.read()
if not six.PY2:
listing = listing.decode('utf8')
self.assertEqual(resp.status, 200)
self.assertNotIn(new_container_name, listing)
# even if they didn't create them
empty_container_name = str(uuid4())
resp = retry(put, empty_container_name, use_account=1)
resp.read()
self.assertEqual(resp.status, 201)
resp = retry(delete, empty_container_name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
@requires_acls
def test_admin_acl_metadata(self):
if tf.skip3:
raise SkipTest
def get(url, token, parsed, conn, name):
conn.request('GET', parsed.path + '/%s' % name, '',
{'X-Auth-Token': token})
return check_response(conn)
def post_account(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
def post(url, token, parsed, conn, name, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path + '/%s' % name, '', new_headers)
return check_response(conn)
# add some metadata
value = str(uuid4())
headers = {'x-container-meta-test': value}
resp = retry(post, self.name, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
# cannot see metadata
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# grant access
acl_user = tf.swift_test_user[2]
acl = {'admin': [acl_user]}
headers = {'x-account-access-control': json.dumps(acl)}
resp = retry(post_account, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# can read container metadata
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
# can also write container metadata
new_value = str(uuid4())
headers = {'x-container-meta-test': new_value}
resp = retry(post, self.name, headers=headers, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), new_value)
# and remove it
headers = {'x-remove-container-meta-test': 'true'}
resp = retry(post, self.name, headers=headers, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
self.assertIsNone(resp.getheader('X-Container-Meta-Test'))
@requires_acls
def test_protected_container_sync(self):
if tf.skip3:
raise SkipTest
def get(url, token, parsed, conn, name):
conn.request('GET', parsed.path + '/%s' % name, '',
{'X-Auth-Token': token})
return check_response(conn)
def post_account(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
def post(url, token, parsed, conn, name, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path + '/%s' % name, '', new_headers)
return check_response(conn)
# add some metadata
value = str(uuid4())
headers = {
'x-container-sync-key': 'secret',
'x-container-meta-test': value,
}
resp = retry(post, self.name, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Sync-Key'), 'secret')
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
# grant read-only access
acl_user = tf.swift_test_user[2]
acl = {'read-only': [acl_user]}
headers = {'x-account-access-control': json.dumps(acl)}
resp = retry(post_account, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# can read container metadata
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
# but not sync-key
self.assertIsNone(resp.getheader('X-Container-Sync-Key'))
# and can not write
headers = {'x-container-sync-key': str(uuid4())}
resp = retry(post, self.name, headers=headers, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# grant read-write access
acl_user = tf.swift_test_user[2]
acl = {'read-write': [acl_user]}
headers = {'x-account-access-control': json.dumps(acl)}
resp = retry(post_account, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# can read container metadata
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
# but not sync-key
self.assertIsNone(resp.getheader('X-Container-Sync-Key'))
# sanity check sync-key w/ account1
resp = retry(get, self.name, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Sync-Key'), 'secret')
# and can write
new_value = str(uuid4())
headers = {
'x-container-sync-key': str(uuid4()),
'x-container-meta-test': new_value,
}
resp = retry(post, self.name, headers=headers, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=1) # validate w/ account1
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), new_value)
# but can not write sync-key
self.assertEqual(resp.getheader('X-Container-Sync-Key'), 'secret')
# grant admin access
acl_user = tf.swift_test_user[2]
acl = {'admin': [acl_user]}
headers = {'x-account-access-control': json.dumps(acl)}
resp = retry(post_account, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# admin can read container metadata
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), new_value)
# and ALSO sync-key
self.assertEqual(resp.getheader('X-Container-Sync-Key'), 'secret')
# admin tester3 can even change sync-key
new_secret = str(uuid4())
headers = {'x-container-sync-key': new_secret}
resp = retry(post, self.name, headers=headers, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Sync-Key'), new_secret)
@requires_acls
def test_protected_container_acl(self):
if tf.skip3:
raise SkipTest
def get(url, token, parsed, conn, name):
conn.request('GET', parsed.path + '/%s' % name, '',
{'X-Auth-Token': token})
return check_response(conn)
def post_account(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
def post(url, token, parsed, conn, name, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path + '/%s' % name, '', new_headers)
return check_response(conn)
# add some container acls
value = str(uuid4())
headers = {
'x-container-read': 'jdoe',
'x-container-write': 'jdoe',
'x-container-meta-test': value,
}
resp = retry(post, self.name, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Read'), 'jdoe')
self.assertEqual(resp.getheader('X-Container-Write'), 'jdoe')
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
# grant read-only access
acl_user = tf.swift_test_user[2]
acl = {'read-only': [acl_user]}
headers = {'x-account-access-control': json.dumps(acl)}
resp = retry(post_account, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# can read container metadata
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
# but not container acl
self.assertIsNone(resp.getheader('X-Container-Read'))
self.assertIsNone(resp.getheader('X-Container-Write'))
# and can not write
headers = {
'x-container-read': 'frank',
'x-container-write': 'frank',
}
resp = retry(post, self.name, headers=headers, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# grant read-write access
acl_user = tf.swift_test_user[2]
acl = {'read-write': [acl_user]}
headers = {'x-account-access-control': json.dumps(acl)}
resp = retry(post_account, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# can read container metadata
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), value)
# but not container acl
self.assertIsNone(resp.getheader('X-Container-Read'))
self.assertIsNone(resp.getheader('X-Container-Write'))
# sanity check container acls with account1
resp = retry(get, self.name, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Read'), 'jdoe')
self.assertEqual(resp.getheader('X-Container-Write'), 'jdoe')
# and can write
new_value = str(uuid4())
headers = {
'x-container-read': 'frank',
'x-container-write': 'frank',
'x-container-meta-test': new_value,
}
resp = retry(post, self.name, headers=headers, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=1) # validate w/ account1
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), new_value)
# but can not write container acls
self.assertEqual(resp.getheader('X-Container-Read'), 'jdoe')
self.assertEqual(resp.getheader('X-Container-Write'), 'jdoe')
# grant admin access
acl_user = tf.swift_test_user[2]
acl = {'admin': [acl_user]}
headers = {'x-account-access-control': json.dumps(acl)}
resp = retry(post_account, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# admin can read container metadata
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Meta-Test'), new_value)
# and ALSO container acls
self.assertEqual(resp.getheader('X-Container-Read'), 'jdoe')
self.assertEqual(resp.getheader('X-Container-Write'), 'jdoe')
# admin tester3 can even change container acls
new_value = str(uuid4())
headers = {
'x-container-read': '.r:*',
}
resp = retry(post, self.name, headers=headers, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, self.name, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
self.assertEqual(resp.getheader('X-Container-Read'), '.r:*')
def test_long_name_content_type(self):
if tf.skip:
raise SkipTest
def put(url, token, parsed, conn):
container_name = 'X' * 2048
conn.request('PUT', '%s/%s' % (parsed.path, container_name),
'there', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 400)
self.assertEqual(resp.getheader('Content-Type'),
'text/html; charset=UTF-8')
def test_null_name(self):
if tf.skip:
raise SkipTest
def put(url, token, parsed, conn):
conn.request('PUT', '%s/abc%%00def' % parsed.path, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(put)
if (tf.web_front_end == 'apache2'):
self.assertEqual(resp.status, 404)
else:
self.assertEqual(resp.read(), b'Invalid UTF8 or contains NULL')
self.assertEqual(resp.status, 412)
def test_create_container_gets_default_policy_by_default(self):
try:
default_policy = \
tf.FunctionalStoragePolicyCollection.from_info().default
except AssertionError:
raise SkipTest()
def put(url, token, parsed, conn):
# using the empty storage policy header value here to ensure
# that the default policy is chosen in case policy_specified is set
# see __init__.py for details on policy_specified
conn.request('PUT', parsed.path + '/' + self.container, '',
{'X-Auth-Token': token, 'X-Storage-Policy': ''})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status // 100, 2)
def head(url, token, parsed, conn):
conn.request('HEAD', parsed.path + '/' + self.container, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(head)
resp.read()
headers = dict((k.lower(), v) for k, v in resp.getheaders())
self.assertEqual(headers.get('x-storage-policy'),
default_policy['name'])
def test_error_invalid_storage_policy_name(self):
def put(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('PUT', parsed.path + '/' + self.container, '',
new_headers)
return check_response(conn)
# create
resp = retry(put, {'X-Storage-Policy': uuid4().hex})
resp.read()
self.assertEqual(resp.status, 400)
@requires_policies
def test_create_non_default_storage_policy_container(self):
policy = self.policies.exclude(default=True).select()
def put(url, token, parsed, conn, headers=None):
base_headers = {'X-Auth-Token': token}
if headers:
base_headers.update(headers)
conn.request('PUT', parsed.path + '/' + self.container, '',
base_headers)
return check_response(conn)
headers = {'X-Storage-Policy': policy['name']}
resp = retry(put, headers=headers)
resp.read()
self.assertEqual(resp.status, 201)
def head(url, token, parsed, conn):
conn.request('HEAD', parsed.path + '/' + self.container, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(head)
resp.read()
headers = dict((k.lower(), v) for k, v in resp.getheaders())
self.assertEqual(headers.get('x-storage-policy'),
policy['name'])
# and test recreate with-out specifying Storage Policy
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 202)
# should still be original storage policy
resp = retry(head)
resp.read()
headers = dict((k.lower(), v) for k, v in resp.getheaders())
self.assertEqual(headers.get('x-storage-policy'),
policy['name'])
# delete it
def delete(url, token, parsed, conn):
conn.request('DELETE', parsed.path + '/' + self.container, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(delete)
resp.read()
self.assertEqual(resp.status, 204)
# verify no policy header
resp = retry(head)
resp.read()
headers = dict((k.lower(), v) for k, v in resp.getheaders())
self.assertIsNone(headers.get('x-storage-policy'))
@requires_policies
def test_conflict_change_storage_policy_with_put(self):
def put(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('PUT', parsed.path + '/' + self.container, '',
new_headers)
return check_response(conn)
# create
policy = self.policies.select()
resp = retry(put, {'X-Storage-Policy': policy['name']})
resp.read()
self.assertEqual(resp.status, 201)
# can't change it
other_policy = self.policies.exclude(name=policy['name']).select()
resp = retry(put, {'X-Storage-Policy': other_policy['name']})
resp.read()
self.assertEqual(resp.status, 409)
def head(url, token, parsed, conn):
conn.request('HEAD', parsed.path + '/' + self.container, '',
{'X-Auth-Token': token})
return check_response(conn)
# still original policy
resp = retry(head)
resp.read()
headers = dict((k.lower(), v) for k, v in resp.getheaders())
self.assertEqual(headers.get('x-storage-policy'),
policy['name'])
@requires_policies
def test_noop_change_storage_policy_with_post(self):
def put(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('PUT', parsed.path + '/' + self.container, '',
new_headers)
return check_response(conn)
# create
policy = self.policies.select()
resp = retry(put, {'X-Storage-Policy': policy['name']})
resp.read()
self.assertEqual(resp.status, 201)
def post(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path + '/' + self.container, '',
new_headers)
return check_response(conn)
# attempt update
for header in ('X-Storage-Policy', 'X-Storage-Policy-Index'):
other_policy = self.policies.exclude(name=policy['name']).select()
resp = retry(post, {header: other_policy['name']})
resp.read()
self.assertEqual(resp.status, 204)
def head(url, token, parsed, conn):
conn.request('HEAD', parsed.path + '/' + self.container, '',
{'X-Auth-Token': token})
return check_response(conn)
# still original policy
resp = retry(head)
resp.read()
headers = dict((k.lower(), v) for k, v in resp.getheaders())
self.assertEqual(headers.get('x-storage-policy'),
policy['name'])
def test_container_quota_bytes(self):
if 'container_quotas' not in cluster_info:
raise SkipTest('Container quotas not enabled')
if tf.in_process:
tf.skip_if_no_xattrs()
def post(url, token, parsed, conn, name, value):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token, name: value})
return check_response(conn)
def head(url, token, parsed, conn):
conn.request('HEAD', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
# set X-Container-Meta-Quota-Bytes is 10
resp = retry(post, 'X-Container-Meta-Quota-Bytes', '10')
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
self.assertIn(resp.status, (200, 204))
# confirm X-Container-Meta-Quota-Bytes
self.assertEqual(resp.getheader('X-Container-Meta-Quota-Bytes'), '10')
def put(url, token, parsed, conn, data):
conn.request('PUT', parsed.path + '/' + self.name + '/object',
data, {'X-Auth-Token': token})
return check_response(conn)
# upload 11 bytes object
resp = retry(put, b'01234567890')
resp.read()
self.assertEqual(resp.status, 413)
# upload 10 bytes object
resp = retry(put, b'0123456789')
resp.read()
self.assertEqual(resp.status, 201)
def get(url, token, parsed, conn):
conn.request('GET', parsed.path + '/' + self.name + '/object',
'', {'X-Auth-Token': token})
return check_response(conn)
# download 10 bytes object
resp = retry(get)
body = resp.read()
self.assertEqual(resp.status, 200)
self.assertEqual(body, b'0123456789')
class BaseTestContainerACLs(unittest.TestCase):
# subclasses can change the account in which container
# is created/deleted by setUp/tearDown
account = 1
def _get_account(self, url, token, parsed, conn):
return parsed.path
def _get_tenant_id(self, url, token, parsed, conn):
account = parsed.path
return account.replace('/v1/AUTH_', '', 1)
def setUp(self):
if tf.skip or tf.skip2 or tf.skip_if_not_v3:
raise SkipTest('AUTH VERSION 3 SPECIFIC TEST')
self.name = uuid4().hex
def put(url, token, parsed, conn):
conn.request('PUT', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(put, use_account=self.account)
resp.read()
self.assertEqual(resp.status, 201)
def tearDown(self):
if tf.skip or tf.skip2 or tf.skip_if_not_v3:
raise SkipTest
def get(url, token, parsed, conn):
conn.request('GET', parsed.path + '/' + self.name + '?format=json',
'', {'X-Auth-Token': token})
return check_response(conn)
def delete(url, token, parsed, conn, obj):
conn.request('DELETE',
'/'.join([parsed.path, self.name, obj['name']]), '',
{'X-Auth-Token': token})
return check_response(conn)
while True:
resp = retry(get, use_account=self.account)
body = resp.read()
self.assertEqual(resp.status // 100, 2, resp.status)
objs = json.loads(body)
if not objs:
break
for obj in objs:
resp = retry(delete, obj, use_account=self.account)
resp.read()
self.assertEqual(resp.status, 204)
def delete(url, token, parsed, conn):
conn.request('DELETE', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(delete, use_account=self.account)
resp.read()
self.assertEqual(resp.status, 204)
def _assert_cross_account_acl_granted(self, granted, grantee_account, acl):
'''
Check whether a given container ACL is granted when a user specified
by account_b attempts to access a container.
'''
# Obtain the first account's string
first_account = retry(self._get_account, use_account=self.account)
# Ensure we can't access the container with the grantee account
def get2(url, token, parsed, conn):
conn.request('GET', first_account + '/' + self.name, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(get2, use_account=grantee_account)
resp.read()
self.assertEqual(resp.status, 403)
def put2(url, token, parsed, conn):
conn.request('PUT', first_account + '/' + self.name + '/object',
'test object', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(put2, use_account=grantee_account)
resp.read()
self.assertEqual(resp.status, 403)
# Post ACL to the container
def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token,
'X-Container-Read': acl,
'X-Container-Write': acl})
return check_response(conn)
resp = retry(post, use_account=self.account)
resp.read()
self.assertEqual(resp.status, 204)
# Check access to container from grantee account with ACL in place
resp = retry(get2, use_account=grantee_account)
resp.read()
expected = 204 if granted else 403
self.assertEqual(resp.status, expected)
resp = retry(put2, use_account=grantee_account)
resp.read()
expected = 201 if granted else 403
self.assertEqual(resp.status, expected)
# Make the container private again
def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.name, '',
{'X-Auth-Token': token, 'X-Container-Read': '',
'X-Container-Write': ''})
return check_response(conn)
resp = retry(post, use_account=self.account)
resp.read()
self.assertEqual(resp.status, 204)
# Ensure we can't access the container with the grantee account again
resp = retry(get2, use_account=grantee_account)
resp.read()
self.assertEqual(resp.status, 403)
resp = retry(put2, use_account=grantee_account)
resp.read()
self.assertEqual(resp.status, 403)
class TestContainerACLsAccount1(BaseTestContainerACLs):
def test_cross_account_acl_names_with_user_in_non_default_domain(self):
# names in acls are disallowed when grantee is in a non-default domain
acl = '%s:%s' % (tf.swift_test_tenant[3], tf.swift_test_user[3])
self._assert_cross_account_acl_granted(False, 4, acl)
def test_cross_account_acl_ids_with_user_in_non_default_domain(self):
# ids are allowed in acls when grantee is in a non-default domain
tenant_id = retry(self._get_tenant_id, use_account=4)
acl = '%s:%s' % (tenant_id, '*')
self._assert_cross_account_acl_granted(True, 4, acl)
def test_cross_account_acl_names_in_default_domain(self):
# names are allowed in acls when grantee and project are in
# the default domain
acl = '%s:%s' % (tf.swift_test_tenant[1], tf.swift_test_user[1])
self._assert_cross_account_acl_granted(True, 2, acl)
def test_cross_account_acl_ids_in_default_domain(self):
# ids are allowed in acls when grantee and project are in
# the default domain
tenant_id = retry(self._get_tenant_id, use_account=2)
acl = '%s:%s' % (tenant_id, '*')
self._assert_cross_account_acl_granted(True, 2, acl)
class TestContainerACLsAccount4(BaseTestContainerACLs):
account = 4
def test_cross_account_acl_names_with_project_in_non_default_domain(self):
# names in acls are disallowed when project is in a non-default domain
acl = '%s:%s' % (tf.swift_test_tenant[0], tf.swift_test_user[0])
self._assert_cross_account_acl_granted(False, 1, acl)
def test_cross_account_acl_ids_with_project_in_non_default_domain(self):
# ids are allowed in acls when project is in a non-default domain
tenant_id = retry(self._get_tenant_id, use_account=1)
acl = '%s:%s' % (tenant_id, '*')
self._assert_cross_account_acl_granted(True, 1, acl)
if __name__ == '__main__':
unittest.main()
| swift-master | test/functional/test_container.py |
# Copyright (c) 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import mock
import os
import six
from six.moves.urllib.parse import urlparse, urlsplit, urlunsplit
import sys
import pickle
import socket
import locale
import eventlet
import eventlet.debug
import functools
import random
import base64
from time import time, sleep
from contextlib import closing
from gzip import GzipFile
from shutil import rmtree
from tempfile import mkdtemp
from unittest import SkipTest
from six.moves.configparser import ConfigParser, NoSectionError
from six.moves import http_client
from six.moves.http_client import HTTPException
from swift.common.middleware.memcache import MemcacheMiddleware
from swift.common.storage_policy import parse_storage_policies, PolicyError
from swift.common.utils import set_swift_dir
from test import get_config, listen_zero
from test.debug_logger import debug_logger
from test.unit import FakeMemcache
# importing skip_if_no_xattrs so that functional tests can grab it from the
# test.functional namespace.
from test.unit import skip_if_no_xattrs as real_skip_if_no_xattrs
from swift.common import constraints, utils, ring, storage_policy
from swift.common.ring import Ring
from swift.common.http_protocol import SwiftHttpProtocol
from swift.common.wsgi import loadapp
from swift.common.utils import config_true_value, split_path
from swift.account import server as account_server
from swift.container import server as container_server
from swift.obj import server as object_server, mem_server as mem_object_server
import swift.proxy.controllers.obj
http_client._MAXHEADERS = constraints.MAX_HEADER_COUNT
DEBUG = True
# In order to get the proper blocking behavior of sockets without using
# threads, where we can set an arbitrary timeout for some piece of code under
# test, we use eventlet with the standard socket library patched. We have to
# perform this setup at module import time, since all the socket module
# bindings in the swiftclient code will have been made by the time nose
# invokes the package or class setup methods.
eventlet.hubs.use_hub(utils.get_hub())
eventlet.patcher.monkey_patch(all=False, socket=True)
eventlet.debug.hub_exceptions(False)
# swift_test_client import from swiftclient, so move after the monkey-patching
from test.functional.swift_test_client import Account, Connection, Container, \
ResponseError
from swiftclient import get_auth, http_connection
has_insecure = False
try:
from swiftclient import __version__ as client_version
# Prevent a ValueError in StrictVersion with '2.0.3.68.ga99c2ff'
client_version = '.'.join(client_version.split('.')[:3])
except ImportError:
# Pre-PBR we had version, not __version__. Anyhow...
client_version = '1.2'
from distutils.version import StrictVersion
if StrictVersion(client_version) >= StrictVersion('2.0'):
has_insecure = True
config = {}
web_front_end = None
normalized_urls = None
# If no config was read, we will fall back to old school env vars
swift_test_auth_version = None
swift_test_auth = os.environ.get('SWIFT_TEST_AUTH')
swift_test_user = [os.environ.get('SWIFT_TEST_USER'), None, None, '', '', '']
swift_test_key = [os.environ.get('SWIFT_TEST_KEY'), None, None, '', '', '']
swift_test_tenant = ['', '', '', '', '', '']
swift_test_perm = ['', '', '', '', '', '']
swift_test_domain = ['', '', '', '', '', '']
swift_test_user_id = ['', '', '', '', '', '']
swift_test_tenant_id = ['', '', '', '', '', '']
skip, skip2, skip3, skip_if_not_v3, skip_service_tokens, \
skip_if_no_reseller_admin = False, False, False, False, False, False
orig_collate = ''
insecure = False
in_process = False
_testdir = _test_servers = _test_coros = _test_socks = None
policy_specified = None
skip_if_no_xattrs = None
class FakeMemcacheMiddleware(MemcacheMiddleware):
"""
Caching middleware that fakes out caching in swift if memcached
does not appear to be running.
"""
def __init__(self, app, conf):
super(FakeMemcacheMiddleware, self).__init__(app, conf)
self.memcache = FakeMemcache()
class InProcessException(BaseException):
pass
def _info(msg):
print(msg, file=sys.stderr)
def _debug(msg):
if DEBUG:
_info('DEBUG: ' + msg)
def _in_process_setup_swift_conf(swift_conf_src, testdir):
# override swift.conf contents for in-process functional test runs
conf = ConfigParser()
conf.read(swift_conf_src)
try:
section = 'swift-hash'
conf.set(section, 'swift_hash_path_suffix', 'inprocfunctests')
conf.set(section, 'swift_hash_path_prefix', 'inprocfunctests')
section = 'swift-constraints'
max_file_size = (8 * 1024 * 1024) + 2 # 8 MB + 2
conf.set(section, 'max_file_size', str(max_file_size))
except NoSectionError:
msg = 'Conf file %s is missing section %s' % (swift_conf_src, section)
raise InProcessException(msg)
test_conf_file = os.path.join(testdir, 'swift.conf')
with open(test_conf_file, 'w') as fp:
conf.write(fp)
return test_conf_file
def _in_process_find_conf_file(conf_src_dir, conf_file_name, use_sample=True):
"""
Look for a file first in conf_src_dir, if it exists, otherwise optionally
look in the source tree sample 'etc' dir.
:param conf_src_dir: Directory in which to search first for conf file. May
be None
:param conf_file_name: Name of conf file
:param use_sample: If True and the conf_file_name is not found, then return
any sample conf file found in the source tree sample
'etc' dir by appending '-sample' to conf_file_name
:returns: Path to conf file
:raises InProcessException: If no conf file is found
"""
dflt_src_dir = os.path.normpath(os.path.join(os.path.abspath(__file__),
os.pardir, os.pardir, os.pardir,
'etc'))
conf_src_dir = dflt_src_dir if conf_src_dir is None else conf_src_dir
conf_file_path = os.path.join(conf_src_dir, conf_file_name)
if os.path.exists(conf_file_path):
return conf_file_path
if use_sample:
# fall back to using the corresponding sample conf file
conf_file_name += '-sample'
conf_file_path = os.path.join(dflt_src_dir, conf_file_name)
if os.path.exists(conf_file_path):
return conf_file_path
msg = 'Failed to find config file %s' % conf_file_name
raise InProcessException(msg)
def _in_process_setup_ring(swift_conf, conf_src_dir, testdir):
"""
If SWIFT_TEST_POLICY is set:
- look in swift.conf file for specified policy
- move this to be policy-0 but preserving its options
- copy its ring file to test dir, changing its devices to suit
in process testing, and renaming it to suit policy-0
Otherwise, create a default ring file.
"""
conf = ConfigParser()
conf.read(swift_conf)
sp_prefix = 'storage-policy:'
try:
# policy index 0 will be created if no policy exists in conf
policies = parse_storage_policies(conf)
except PolicyError as e:
raise InProcessException(e)
# clear all policies from test swift.conf before adding test policy back
for policy in policies:
conf.remove_section(sp_prefix + str(policy.idx))
if policy_specified:
policy_to_test = policies.get_by_name(policy_specified)
if policy_to_test is None:
raise InProcessException('Failed to find policy name "%s"'
% policy_specified)
_info('Using specified policy %s' % policy_to_test.name)
else:
policy_to_test = policies.default
_info('Defaulting to policy %s' % policy_to_test.name)
# make policy_to_test be policy index 0 and default for the test config
sp_zero_section = sp_prefix + '0'
conf.add_section(sp_zero_section)
for (k, v) in policy_to_test.get_info(config=True).items():
conf.set(sp_zero_section, k, str(v))
conf.set(sp_zero_section, 'default', 'True')
with open(swift_conf, 'w') as fp:
conf.write(fp)
# look for a source ring file
ring_file_src = ring_file_test = 'object.ring.gz'
if policy_to_test.idx:
ring_file_src = 'object-%s.ring.gz' % policy_to_test.idx
try:
ring_file_src = _in_process_find_conf_file(conf_src_dir, ring_file_src,
use_sample=False)
except InProcessException:
if policy_specified:
raise InProcessException('Failed to find ring file %s'
% ring_file_src)
ring_file_src = None
ring_file_test = os.path.join(testdir, ring_file_test)
if ring_file_src:
# copy source ring file to a policy-0 test ring file, re-homing servers
_info('Using source ring file %s' % ring_file_src)
ring_data = ring.RingData.load(ring_file_src)
obj_sockets = []
for dev in ring_data.devs:
device = 'sd%c1' % chr(len(obj_sockets) + ord('a'))
utils.mkdirs(os.path.join(_testdir, 'sda1'))
utils.mkdirs(os.path.join(_testdir, 'sda1', 'tmp'))
obj_socket = listen_zero()
obj_sockets.append(obj_socket)
dev['port'] = obj_socket.getsockname()[1]
dev['ip'] = '127.0.0.1'
dev['device'] = device
dev['replication_port'] = dev['port']
dev['replication_ip'] = dev['ip']
ring_data.save(ring_file_test)
else:
# make default test ring, 3 replicas, 4 partitions, 3 devices
# which will work for a replication policy or a 2+1 EC policy
_info('No source object ring file, creating 3rep/4part/3dev ring')
obj_sockets = [listen_zero() for _ in (0, 1, 2)]
replica2part2dev_id = [[0, 1, 2, 0],
[1, 2, 0, 1],
[2, 0, 1, 2]]
devs = [{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1',
'port': obj_sockets[0].getsockname()[1]},
{'id': 1, 'zone': 1, 'device': 'sdb1', 'ip': '127.0.0.1',
'port': obj_sockets[1].getsockname()[1]},
{'id': 2, 'zone': 2, 'device': 'sdc1', 'ip': '127.0.0.1',
'port': obj_sockets[2].getsockname()[1]}]
ring_data = ring.RingData(replica2part2dev_id, devs, 30)
with closing(GzipFile(ring_file_test, 'wb')) as f:
pickle.dump(ring_data, f)
for dev in ring_data.devs:
_debug('Ring file dev: %s' % dev)
return obj_sockets
def _load_encryption(proxy_conf_file, swift_conf_file, **kwargs):
"""
Load encryption configuration and override proxy-server.conf contents.
:param proxy_conf_file: Source proxy conf filename
:param swift_conf_file: Source swift conf filename
:returns: Tuple of paths to the proxy conf file and swift conf file to use
:raises InProcessException: raised if proxy conf contents are invalid
"""
_debug('Setting configuration for encryption')
# The global conf dict cannot be used to modify the pipeline.
# The pipeline loader requires the pipeline to be set in the local_conf.
# If pipeline is set in the global conf dict (which in turn populates the
# DEFAULTS options) then it prevents pipeline being loaded into the local
# conf during wsgi load_app.
# Therefore we must modify the [pipeline:main] section.
conf = ConfigParser()
conf.read(proxy_conf_file)
try:
section = 'pipeline:main'
pipeline = conf.get(section, 'pipeline')
pipeline = pipeline.replace(
"proxy-logging proxy-server",
"keymaster encryption proxy-logging proxy-server")
pipeline = pipeline.replace(
"cache listing_formats",
"cache etag-quoter listing_formats")
conf.set(section, 'pipeline', pipeline)
root_secret = base64.b64encode(os.urandom(32))
if not six.PY2:
root_secret = root_secret.decode('ascii')
conf.set('filter:keymaster', 'encryption_root_secret', root_secret)
conf.set('filter:versioned_writes', 'allow_object_versioning', 'true')
conf.set('filter:etag-quoter', 'enable_by_default', 'true')
except NoSectionError as err:
msg = 'Error problem with proxy conf file %s: %s' % \
(proxy_conf_file, err)
raise InProcessException(msg)
test_conf_file = os.path.join(_testdir, 'proxy-server.conf')
with open(test_conf_file, 'w') as fp:
conf.write(fp)
return test_conf_file, swift_conf_file
def _load_ec_as_default_policy(proxy_conf_file, swift_conf_file, **kwargs):
"""
Override swift.conf [storage-policy:0] section to use a 2+1 EC policy.
:param proxy_conf_file: Source proxy conf filename
:param swift_conf_file: Source swift conf filename
:returns: Tuple of paths to the proxy conf file and swift conf file to use
"""
_debug('Setting configuration for default EC policy')
conf = ConfigParser()
conf.read(swift_conf_file)
# remove existing policy sections that came with swift.conf-sample
for section in list(conf.sections()):
if section.startswith('storage-policy'):
conf.remove_section(section)
# add new policy 0 section for an EC policy
conf.add_section('storage-policy:0')
ec_policy_spec = {
'name': 'ec-test',
'policy_type': 'erasure_coding',
'ec_type': 'liberasurecode_rs_vand',
'ec_num_data_fragments': 2,
'ec_num_parity_fragments': 1,
'ec_object_segment_size': 1048576,
'default': True
}
for k, v in ec_policy_spec.items():
conf.set('storage-policy:0', k, str(v))
with open(swift_conf_file, 'w') as fp:
conf.write(fp)
return proxy_conf_file, swift_conf_file
def _load_domain_remap_staticweb(proxy_conf_file, swift_conf_file, **kwargs):
"""
Load domain_remap and staticweb into proxy server pipeline.
:param proxy_conf_file: Source proxy conf filename
:param swift_conf_file: Source swift conf filename
:returns: Tuple of paths to the proxy conf file and swift conf file to use
:raises InProcessException: raised if proxy conf contents are invalid
"""
_debug('Setting configuration for domain_remap')
# add a domain_remap storage_domain to the test configuration
storage_domain = 'example.net'
global config
config['storage_domain'] = storage_domain
# The global conf dict cannot be used to modify the pipeline.
# The pipeline loader requires the pipeline to be set in the local_conf.
# If pipeline is set in the global conf dict (which in turn populates the
# DEFAULTS options) then it prevents pipeline being loaded into the local
# conf during wsgi load_app.
# Therefore we must modify the [pipeline:main] section.
conf = ConfigParser()
conf.read(proxy_conf_file)
try:
section = 'pipeline:main'
old_pipeline = conf.get(section, 'pipeline')
pipeline = old_pipeline.replace(
" tempauth ",
" tempauth staticweb ")
pipeline = pipeline.replace(
" listing_formats ",
" domain_remap listing_formats ")
if pipeline == old_pipeline:
raise InProcessException(
"Failed to insert domain_remap and staticweb into pipeline: %s"
% old_pipeline)
conf.set(section, 'pipeline', pipeline)
# set storage_domain in domain_remap middleware to match test config
section = 'filter:domain_remap'
conf.set(section, 'storage_domain', storage_domain)
except NoSectionError as err:
msg = 'Error problem with proxy conf file %s: %s' % \
(proxy_conf_file, err)
raise InProcessException(msg)
test_conf_file = os.path.join(_testdir, 'proxy-server.conf')
with open(test_conf_file, 'w') as fp:
conf.write(fp)
return test_conf_file, swift_conf_file
def _load_s3api(proxy_conf_file, swift_conf_file, **kwargs):
"""
Load s3api configuration and override proxy-server.conf contents.
:param proxy_conf_file: Source proxy conf filename
:param swift_conf_file: Source swift conf filename
:returns: Tuple of paths to the proxy conf file and swift conf file to use
:raises InProcessException: raised if proxy conf contents are invalid
"""
_debug('Setting configuration for s3api')
# The global conf dict cannot be used to modify the pipeline.
# The pipeline loader requires the pipeline to be set in the local_conf.
# If pipeline is set in the global conf dict (which in turn populates the
# DEFAULTS options) then it prevents pipeline being loaded into the local
# conf during wsgi load_app.
# Therefore we must modify the [pipeline:main] section.
conf = ConfigParser()
conf.read(proxy_conf_file)
try:
section = 'pipeline:main'
pipeline = conf.get(section, 'pipeline')
pipeline = pipeline.replace(
"tempauth",
"s3api tempauth")
conf.set(section, 'pipeline', pipeline)
conf.set('filter:s3api', 's3_acl', 'true')
conf.set('filter:versioned_writes', 'allow_object_versioning', 'true')
except NoSectionError as err:
msg = 'Error problem with proxy conf file %s: %s' % \
(proxy_conf_file, err)
raise InProcessException(msg)
test_conf_file = os.path.join(_testdir, 'proxy-server.conf')
with open(test_conf_file, 'w') as fp:
conf.write(fp)
return test_conf_file, swift_conf_file
# Mapping from possible values of the variable
# SWIFT_TEST_IN_PROCESS_CONF_LOADER
# to the method to call for loading the associated configuration
# The expected signature for these methods is:
# conf_filename_to_use loader(input_conf_filename, **kwargs)
conf_loaders = {
'encryption': _load_encryption,
'ec': _load_ec_as_default_policy,
}
def in_process_setup(the_object_server=object_server):
_info('IN-PROCESS SERVERS IN USE FOR FUNCTIONAL TESTS')
_info('Using object_server class: %s' % the_object_server.__name__)
conf_src_dir = os.environ.get('SWIFT_TEST_IN_PROCESS_CONF_DIR')
show_debug_logs = os.environ.get('SWIFT_TEST_DEBUG_LOGS')
if conf_src_dir is not None:
if not os.path.isdir(conf_src_dir):
msg = 'Config source %s is not a dir' % conf_src_dir
raise InProcessException(msg)
_info('Using config source dir: %s' % conf_src_dir)
# If SWIFT_TEST_IN_PROCESS_CONF specifies a config source dir then
# prefer config files from there, otherwise read config from source tree
# sample files. A mixture of files from the two sources is allowed.
proxy_conf = _in_process_find_conf_file(conf_src_dir, 'proxy-server.conf')
_info('Using proxy config from %s' % proxy_conf)
swift_conf_src = _in_process_find_conf_file(conf_src_dir, 'swift.conf')
_info('Using swift config from %s' % swift_conf_src)
global _testdir
_testdir = os.path.join(mkdtemp(), 'tmp_functional')
utils.mkdirs(_testdir)
rmtree(_testdir)
utils.mkdirs(os.path.join(_testdir, 'sda1'))
utils.mkdirs(os.path.join(_testdir, 'sda1', 'tmp'))
utils.mkdirs(os.path.join(_testdir, 'sdb1'))
utils.mkdirs(os.path.join(_testdir, 'sdb1', 'tmp'))
utils.mkdirs(os.path.join(_testdir, 'sdc1'))
utils.mkdirs(os.path.join(_testdir, 'sdc1', 'tmp'))
swift_conf = _in_process_setup_swift_conf(swift_conf_src, _testdir)
_info('prepared swift.conf: %s' % swift_conf)
# load s3api and staticweb configs
proxy_conf, swift_conf = _load_s3api(proxy_conf, swift_conf)
proxy_conf, swift_conf = _load_domain_remap_staticweb(proxy_conf,
swift_conf)
# Call the associated method for the value of
# 'SWIFT_TEST_IN_PROCESS_CONF_LOADER', if one exists
conf_loader_label = os.environ.get(
'SWIFT_TEST_IN_PROCESS_CONF_LOADER')
if conf_loader_label is not None:
try:
conf_loader = conf_loaders[conf_loader_label]
_debug('Calling method %s mapped to conf loader %s' %
(conf_loader.__name__, conf_loader_label))
except KeyError as missing_key:
raise InProcessException('No function mapped for conf loader %s' %
missing_key)
try:
# Pass-in proxy_conf, swift_conf files
proxy_conf, swift_conf = conf_loader(proxy_conf, swift_conf)
_debug('Now using proxy conf %s' % proxy_conf)
_debug('Now using swift conf %s' % swift_conf)
except Exception as err: # noqa
raise InProcessException(err)
obj_sockets = _in_process_setup_ring(swift_conf, conf_src_dir, _testdir)
# load new swift.conf file
if set_swift_dir(os.path.dirname(swift_conf)):
constraints.reload_constraints()
storage_policy.reload_storage_policies()
global config
if constraints.SWIFT_CONSTRAINTS_LOADED:
# Use the swift constraints that are loaded for the test framework
# configuration
_c = dict((k, str(v))
for k, v in constraints.EFFECTIVE_CONSTRAINTS.items())
config.update(_c)
else:
# In-process swift constraints were not loaded, somethings wrong
raise SkipTest
global _test_socks
_test_socks = []
# We create the proxy server listening socket to get its port number so
# that we can add it as the "auth_port" value for the functional test
# clients.
prolis = listen_zero()
_test_socks.append(prolis)
# The following set of configuration values is used both for the
# functional test frame work and for the various proxy, account, container
# and object servers.
config.update({
# Values needed by the various in-process swift servers
'devices': _testdir,
'swift_dir': _testdir,
'mount_check': 'false',
'client_timeout': '4',
'container_update_timeout': '3',
'allow_account_management': 'true',
'account_autocreate': 'true',
'allow_versions': 'True',
'allow_versioned_writes': 'True',
# Below are values used by the functional test framework, as well as
# by the various in-process swift servers
'auth_uri': 'http://127.0.0.1:%d/auth/v1.0/' % prolis.getsockname()[1],
's3_storage_url': 'http://%s:%d/' % prolis.getsockname(),
# Primary functional test account (needs admin access to the
# account)
'account': 'test',
'username': 'tester',
'password': 'testing',
's3_access_key': 'test:tester',
's3_secret_key': 'testing',
# Secondary user of the primary test account (needs admin access
# to the account) for s3api
's3_access_key2': 'test:tester2',
's3_secret_key2': 'testing2',
# User on a second account (needs admin access to the account)
'account2': 'test2',
'username2': 'tester2',
'password2': 'testing2',
# User on same account as first, but without admin access
'username3': 'tester3',
'password3': 'testing3',
's3_access_key3': 'test:tester3',
's3_secret_key3': 'testing3',
# Service user and prefix (emulates glance, cinder, etc. user)
'account5': 'test5',
'username5': 'tester5',
'password5': 'testing5',
'service_prefix': 'SERVICE',
# For tempauth middleware. Update reseller_prefix
'reseller_prefix': 'AUTH, SERVICE',
'SERVICE_require_group': 'service',
# Reseller admin user (needs reseller_admin_role)
'account6': 'test6',
'username6': 'tester6',
'password6': 'testing6'
})
acc1lis = listen_zero()
acc2lis = listen_zero()
con1lis = listen_zero()
con2lis = listen_zero()
_test_socks += [acc1lis, acc2lis, con1lis, con2lis] + obj_sockets
account_ring_path = os.path.join(_testdir, 'account.ring.gz')
with closing(GzipFile(account_ring_path, 'wb')) as f:
pickle.dump(ring.RingData([[0, 1, 0, 1], [1, 0, 1, 0]],
[{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1',
'port': acc1lis.getsockname()[1]},
{'id': 1, 'zone': 1, 'device': 'sdb1', 'ip': '127.0.0.1',
'port': acc2lis.getsockname()[1]}], 30),
f)
container_ring_path = os.path.join(_testdir, 'container.ring.gz')
with closing(GzipFile(container_ring_path, 'wb')) as f:
pickle.dump(ring.RingData([[0, 1, 0, 1], [1, 0, 1, 0]],
[{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1',
'port': con1lis.getsockname()[1]},
{'id': 1, 'zone': 1, 'device': 'sdb1', 'ip': '127.0.0.1',
'port': con2lis.getsockname()[1]}], 30),
f)
def get_logger_name(name):
if show_debug_logs:
return debug_logger(name)
else:
return None
acc1srv = account_server.AccountController(
config, logger=get_logger_name('acct1'))
acc2srv = account_server.AccountController(
config, logger=get_logger_name('acct2'))
con1srv = container_server.ContainerController(
config, logger=get_logger_name('cont1'))
con2srv = container_server.ContainerController(
config, logger=get_logger_name('cont2'))
objsrvs = [
(obj_sockets[index],
the_object_server.ObjectController(
config, logger=get_logger_name('obj%d' % (index + 1))))
for index in range(len(obj_sockets))
]
if show_debug_logs:
logger = get_logger_name('proxy')
else:
logger = utils.get_logger(config, 'wsgi-server', log_route='wsgi')
def get_logger(name, *args, **kwargs):
return logger
with mock.patch('swift.common.utils.get_logger', get_logger):
with mock.patch('swift.common.middleware.memcache.MemcacheMiddleware',
FakeMemcacheMiddleware):
try:
app = loadapp(proxy_conf, global_conf=config)
except Exception as e:
raise InProcessException(e)
nl = utils.NullLogger()
global proxy_srv
proxy_srv = prolis
prospa = eventlet.spawn(eventlet.wsgi.server, prolis, app, nl,
protocol=SwiftHttpProtocol)
acc1spa = eventlet.spawn(eventlet.wsgi.server, acc1lis, acc1srv, nl,
protocol=SwiftHttpProtocol)
acc2spa = eventlet.spawn(eventlet.wsgi.server, acc2lis, acc2srv, nl,
protocol=SwiftHttpProtocol)
con1spa = eventlet.spawn(eventlet.wsgi.server, con1lis, con1srv, nl,
protocol=SwiftHttpProtocol)
con2spa = eventlet.spawn(eventlet.wsgi.server, con2lis, con2srv, nl,
protocol=SwiftHttpProtocol)
objspa = [eventlet.spawn(eventlet.wsgi.server, objsrv[0], objsrv[1], nl,
protocol=SwiftHttpProtocol)
for objsrv in objsrvs]
global _test_coros
_test_coros = \
(prospa, acc1spa, acc2spa, con1spa, con2spa) + tuple(objspa)
# Create accounts "test" and "test2"
def create_account(act):
ts = utils.normalize_timestamp(time())
account_ring = Ring(_testdir, ring_name='account')
partition, nodes = account_ring.get_nodes(act)
for node in nodes:
# Note: we are just using the http_connect method in the object
# controller here to talk to the account server nodes.
conn = swift.proxy.controllers.obj.http_connect(
node['ip'], node['port'], node['device'], partition, 'PUT',
'/' + act, {'X-Timestamp': ts, 'x-trans-id': act})
resp = conn.getresponse()
assert resp.status == 201, 'Unable to create account: %s\n%s' % (
resp.status, resp.read())
create_account('AUTH_test')
create_account('AUTH_test2')
cluster_info = {}
def get_cluster_info():
# The fallback constraints used for testing will come from the current
# effective constraints.
eff_constraints = dict(constraints.EFFECTIVE_CONSTRAINTS)
# We'll update those constraints based on what the /info API provides, if
# anything.
global cluster_info
global config
try:
conn = Connection(config)
conn.authenticate()
cluster_info.update(conn.cluster_info())
except (ResponseError, socket.error, SkipTest):
# Failed to get cluster_information via /info API, so fall back on
# test.conf data
pass
else:
try:
eff_constraints.update(cluster_info['swift'])
except KeyError:
# Most likely the swift cluster has "expose_info = false" set
# in its proxy-server.conf file, so we'll just do the best we
# can.
print("** Swift Cluster not exposing /info **", file=sys.stderr)
# Finally, we'll allow any constraint present in the swift-constraints
# section of test.conf to override everything. Note that only those
# constraints defined in the constraints module are converted to integers.
test_constraints = get_config('swift-constraints')
for k in constraints.DEFAULT_CONSTRAINTS:
try:
test_constraints[k] = int(test_constraints[k])
except KeyError:
pass
except ValueError:
print("Invalid constraint value: %s = %s" % (
k, test_constraints[k]), file=sys.stderr)
eff_constraints.update(test_constraints)
# Just make it look like these constraints were loaded from a /info call,
# even if the /info call failed, or when they are overridden by values
# from the swift-constraints section of test.conf
cluster_info['swift'] = eff_constraints
def setup_package():
global policy_specified
global skip_if_no_xattrs
policy_specified = os.environ.get('SWIFT_TEST_POLICY')
in_process_env = os.environ.get('SWIFT_TEST_IN_PROCESS')
if in_process_env is not None:
use_in_process = utils.config_true_value(in_process_env)
else:
use_in_process = None
global in_process
global config
if use_in_process:
# Explicitly set to True, so barrel on ahead with in-process
# functional test setup.
in_process = True
# NOTE: No attempt is made to a read local test.conf file.
else:
if use_in_process is None:
# Not explicitly set, default to using in-process functional tests
# if the test.conf file is not found, or does not provide a usable
# configuration.
config.update(get_config('func_test'))
if not config:
in_process = True
# else... leave in_process value unchanged. It may be that
# setup_package is called twice, in which case in_process_setup may
# have loaded config before we reach here a second time, so the
# existence of config is not reliable to determine that in_process
# should be False. Anyway, it's default value is False.
else:
# Explicitly set to False, do not attempt to use in-process
# functional tests, be sure we attempt to read from local
# test.conf file.
in_process = False
config.update(get_config('func_test'))
if in_process:
in_mem_obj_env = os.environ.get('SWIFT_TEST_IN_MEMORY_OBJ')
in_mem_obj = utils.config_true_value(in_mem_obj_env)
skip_if_no_xattrs = real_skip_if_no_xattrs
try:
in_process_setup(the_object_server=(
mem_object_server if in_mem_obj else object_server))
except InProcessException as exc:
print(('Exception during in-process setup: %s'
% str(exc)), file=sys.stderr)
raise
else:
skip_if_no_xattrs = lambda: None
global web_front_end
web_front_end = config.get('web_front_end', 'integral')
global normalized_urls
normalized_urls = config.get('normalized_urls', False)
global orig_collate
orig_collate = locale.setlocale(locale.LC_COLLATE)
locale.setlocale(locale.LC_COLLATE, config.get('collate', 'C'))
global insecure
insecure = config_true_value(config.get('insecure', False))
global swift_test_auth_version
global swift_test_auth
global swift_test_user
global swift_test_key
global swift_test_tenant
global swift_test_perm
global swift_test_domain
global swift_test_service_prefix
swift_test_service_prefix = None
if config:
swift_test_auth_version = str(config.get('auth_version', '1'))
if 'auth_uri' in config:
swift_test_auth = config['auth_uri']
# Back-fill the individual parts -- really, we should just need
# host and port for s3_test_client, and that's only until we
# improve it to take a s3_storage_url option
parsed = urlsplit(config['auth_uri'])
config.update({
'auth_ssl': str(parsed.scheme == 'https'),
'auth_host': parsed.hostname,
'auth_port': str(
parsed.port if parsed.port is not None else
443 if parsed.scheme == 'https' else 80),
'auth_prefix': parsed.path,
})
config.setdefault('s3_storage_url',
urlunsplit(parsed[:2] + ('', None, None)))
elif 'auth_host' in config:
scheme = 'http'
if config_true_value(config.get('auth_ssl', 'no')):
scheme = 'https'
netloc = config['auth_host']
if 'auth_port' in config:
netloc += ':' + config['auth_port']
auth_prefix = config.get('auth_prefix', '/')
if swift_test_auth_version == "1":
auth_prefix += 'v1.0'
config['auth_uri'] = swift_test_auth = urlunsplit(
(scheme, netloc, auth_prefix, None, None))
config.setdefault('s3_storage_url', urlunsplit(
(scheme, netloc, '', None, None)))
# else, neither auth_uri nor auth_host; swift_test_auth will be unset
# and we'll skip everything later
if 'service_prefix' in config:
swift_test_service_prefix = utils.append_underscore(
config['service_prefix'])
if swift_test_auth_version == "1":
try:
if 'account' in config:
swift_test_user[0] = '%(account)s:%(username)s' % config
else:
swift_test_user[0] = '%(username)s' % config
swift_test_key[0] = config['password']
except KeyError:
# bad config, no account/username configured, tests cannot be
# run
pass
try:
swift_test_user[1] = '%s%s' % (
'%s:' % config['account2'] if 'account2' in config else '',
config['username2'])
swift_test_key[1] = config['password2']
except KeyError:
pass # old config, no second account tests can be run
try:
swift_test_user[2] = '%s%s' % (
'%s:' % config['account'] if 'account'
in config else '', config['username3'])
swift_test_key[2] = config['password3']
except KeyError:
pass # old config, no third account tests can be run
try:
swift_test_user[4] = '%s%s' % (
'%s:' % config['account5'], config['username5'])
swift_test_key[4] = config['password5']
swift_test_tenant[4] = config['account5']
except KeyError:
pass # no service token tests can be run
for _ in range(3):
swift_test_perm[_] = swift_test_user[_]
else:
swift_test_user[0] = config['username']
swift_test_tenant[0] = config['account']
swift_test_key[0] = config['password']
if 'domain' in config:
swift_test_domain[0] = config['domain']
swift_test_user[1] = config['username2']
swift_test_tenant[1] = config['account2']
swift_test_key[1] = config['password2']
if 'domain2' in config:
swift_test_domain[1] = config['domain2']
swift_test_user[2] = config['username3']
swift_test_tenant[2] = config['account']
swift_test_key[2] = config['password3']
if 'domain3' in config:
swift_test_domain[2] = config['domain3']
if 'username4' in config:
swift_test_user[3] = config['username4']
swift_test_tenant[3] = config['account4']
swift_test_key[3] = config['password4']
swift_test_domain[3] = config['domain4']
if 'username5' in config:
swift_test_user[4] = config['username5']
swift_test_tenant[4] = config['account5']
swift_test_key[4] = config['password5']
if 'domain5' in config:
swift_test_domain[4] = config['domain5']
if 'username6' in config:
swift_test_user[5] = config['username6']
swift_test_tenant[5] = config['account6']
swift_test_key[5] = config['password6']
if 'domain6' in config:
swift_test_domain[5] = config['domain6']
for _ in range(5):
swift_test_perm[_] = swift_test_tenant[_] + ':' \
+ swift_test_user[_]
global skip
if not skip:
skip = not all([swift_test_auth, swift_test_user[0],
swift_test_key[0]])
if skip:
print('SKIPPING FUNCTIONAL TESTS DUE TO NO CONFIG',
file=sys.stderr)
global skip2
if not skip2:
skip2 = not all([not skip, swift_test_user[1], swift_test_key[1]])
if not skip and skip2:
print('SKIPPING SECOND ACCOUNT FUNCTIONAL TESTS '
'DUE TO NO CONFIG FOR THEM', file=sys.stderr)
global skip3
if not skip3:
skip3 = not all([not skip, swift_test_user[2], swift_test_key[2]])
if not skip and skip3:
print('SKIPPING THIRD ACCOUNT FUNCTIONAL TESTS '
'DUE TO NO CONFIG FOR THEM', file=sys.stderr)
global skip_if_not_v3
if not skip_if_not_v3:
skip_if_not_v3 = (swift_test_auth_version != '3'
or not all([not skip,
swift_test_user[3],
swift_test_key[3]]))
if not skip and skip_if_not_v3:
print('SKIPPING FUNCTIONAL TESTS SPECIFIC TO AUTH VERSION 3',
file=sys.stderr)
global skip_service_tokens
if not skip_service_tokens:
skip_service_tokens = not all([not skip, swift_test_user[4],
swift_test_key[4], swift_test_tenant[4],
swift_test_service_prefix])
if not skip and skip_service_tokens:
print(
'SKIPPING FUNCTIONAL TESTS SPECIFIC TO SERVICE TOKENS',
file=sys.stderr)
if policy_specified:
policies = FunctionalStoragePolicyCollection.from_info()
for p in policies:
# policy names are case-insensitive
if policy_specified.lower() == p['name'].lower():
_info('Using specified policy %s' % policy_specified)
FunctionalStoragePolicyCollection.policy_specified = p
Container.policy_specified = policy_specified
break
else:
_info(
'SKIPPING FUNCTIONAL TESTS: Failed to find specified policy %s'
% policy_specified)
raise Exception('Failed to find specified policy %s'
% policy_specified)
global skip_if_no_reseller_admin
if not skip_if_no_reseller_admin:
skip_if_no_reseller_admin = not all([not skip, swift_test_user[5],
swift_test_key[5],
swift_test_tenant[5]])
if not skip and skip_if_no_reseller_admin:
print('SKIPPING FUNCTIONAL TESTS DUE TO NO CONFIG FOR '
'RESELLER ADMIN', file=sys.stderr)
get_cluster_info()
def teardown_package():
global orig_collate
locale.setlocale(locale.LC_COLLATE, orig_collate)
# clean up containers and objects left behind after running tests
global config
if config:
try:
conn = Connection(config)
conn.authenticate()
account = Account(conn, config.get('account', config['username']))
account.delete_containers()
except (SkipTest):
pass
global in_process
global _test_socks
if in_process:
try:
for i, server in enumerate(_test_coros):
server.kill()
if not server.dead:
# kill it from the socket level
_test_socks[i].close()
except Exception:
pass
try:
rmtree(os.path.dirname(_testdir))
except Exception:
pass
reset_globals()
class AuthError(Exception):
pass
class InternalServerError(Exception):
pass
url = [None, None, None, None, None]
token = [None, None, None, None, None]
service_token = [None, None, None, None, None]
parsed = [None, None, None, None, None]
conn = [None, None, None, None, None]
def reset_globals():
global url, token, service_token, parsed, conn, config
url = [None, None, None, None, None]
token = [None, None, None, None, None]
service_token = [None, None, None, None, None]
parsed = [None, None, None, None, None]
conn = [None, None, None, None, None]
if config:
config = {}
def connection(url):
if has_insecure:
parsed_url, http_conn = http_connection(url, insecure=insecure)
else:
parsed_url, http_conn = http_connection(url)
orig_request = http_conn.request
# Add the policy header if policy_specified is set
def request_with_policy(method, url, body=None, headers={}):
version, account, container, obj = split_path(url, 1, 4, True)
if policy_specified and method == 'PUT' and container and not obj \
and 'X-Storage-Policy' not in headers:
headers['X-Storage-Policy'] = policy_specified
return orig_request(method, url, body, headers)
http_conn.request = request_with_policy
return parsed_url, http_conn
def get_url_token(user_index, os_options):
authargs = dict(snet=False,
tenant_name=swift_test_tenant[user_index],
auth_version=swift_test_auth_version,
os_options=os_options,
insecure=insecure)
url, token = get_auth(swift_test_auth,
swift_test_user[user_index],
swift_test_key[user_index],
**authargs)
if six.PY2 and not isinstance(url, bytes):
url = url.encode('utf-8')
if six.PY2 and not isinstance(token, bytes):
token = token.encode('utf-8')
return url, token
def retry(func, *args, **kwargs):
"""
You can use the kwargs to override:
'retries' (default: 5)
'use_account' (default: 1) - which user's token to pass
'url_account' (default: matches 'use_account') - which user's storage URL
'resource' (default: url[url_account] - URL to connect to; retry()
will interpolate the variable :storage_url: if present
'service_user' - add a service token from this user (1 indexed)
"""
global url, token, service_token, parsed, conn
retries = kwargs.get('retries', 5)
attempts, backoff = 0, 1
# use account #1 by default; turn user's 1-indexed account into 0-indexed
use_account = kwargs.pop('use_account', 1) - 1
service_user = kwargs.pop('service_user', None)
if service_user:
service_user -= 1 # 0-index
# access our own account by default
url_account = kwargs.pop('url_account', use_account + 1) - 1
os_options = {'user_domain_name': swift_test_domain[use_account],
'project_domain_name': swift_test_domain[use_account]}
while attempts <= retries:
auth_failure = False
attempts += 1
try:
if not url[use_account] or not token[use_account]:
url[use_account], token[use_account] = get_url_token(
use_account, os_options)
parsed[use_account] = conn[use_account] = None
if not parsed[use_account] or not conn[use_account]:
parsed[use_account], conn[use_account] = \
connection(url[use_account])
# default resource is the account url[url_account]
resource = kwargs.pop('resource', '%(storage_url)s')
template_vars = {'storage_url': url[url_account]}
parsed_result = urlparse(resource % template_vars)
if isinstance(service_user, int):
if not service_token[service_user]:
dummy, service_token[service_user] = get_url_token(
service_user, os_options)
kwargs['service_token'] = service_token[service_user]
return func(url[url_account], token[use_account],
parsed_result, conn[url_account],
*args, **kwargs)
except (socket.error, HTTPException):
if attempts > retries:
raise
parsed[use_account] = conn[use_account] = None
if service_user:
service_token[service_user] = None
except AuthError:
auth_failure = True
url[use_account] = token[use_account] = None
if service_user:
service_token[service_user] = None
except InternalServerError:
pass
if attempts <= retries:
if not auth_failure:
sleep(backoff)
backoff *= 2
raise Exception('No result after %s retries.' % retries)
def check_response(conn):
resp = conn.getresponse()
if resp.status == 401:
resp.read()
raise AuthError()
elif resp.status // 100 == 5:
resp.read()
raise InternalServerError()
return resp
def load_constraint(name):
global cluster_info
try:
c = cluster_info['swift'][name]
except KeyError:
raise SkipTest("Missing constraint: %s" % name)
if not isinstance(c, int):
raise SkipTest("Bad value, %r, for constraint: %s" % (c, name))
return c
def get_storage_policy_from_cluster_info(info):
policies = info['swift'].get('policies', {})
default_policy = []
non_default_policies = []
for p in policies:
if p.get('default', {}):
default_policy.append(p)
else:
non_default_policies.append(p)
return default_policy, non_default_policies
def reset_acl():
def post(url, token, parsed, conn):
conn.request('POST', parsed.path, '', {
'X-Auth-Token': token,
'X-Account-Access-Control': '{}'
})
return check_response(conn)
resp = retry(post, use_account=1)
resp.read()
def requires_acls(f):
@functools.wraps(f)
def wrapper(*args, **kwargs):
global skip, cluster_info
if skip or not cluster_info:
raise SkipTest('Requires account ACLs')
# Determine whether this cluster has account ACLs; if not, skip test
if not cluster_info.get('tempauth', {}).get('account_acls'):
raise SkipTest('Requires account ACLs')
if swift_test_auth_version != '1':
# remove when keystoneauth supports account acls
raise SkipTest('Requires account ACLs')
reset_acl()
try:
rv = f(*args, **kwargs)
finally:
reset_acl()
return rv
return wrapper
class FunctionalStoragePolicyCollection(object):
# policy_specified is set in __init__.py when tests are being set up.
policy_specified = None
def __init__(self, policies):
self._all = policies
self.default = None
for p in self:
if p.get('default', False):
assert self.default is None, 'Found multiple default ' \
'policies %r and %r' % (self.default, p)
self.default = p
@classmethod
def from_info(cls, info=None):
if not (info or cluster_info):
get_cluster_info()
info = info or cluster_info
try:
policy_info = info['swift']['policies']
except KeyError:
raise AssertionError('Did not find any policy info in %r' % info)
policies = cls(policy_info)
assert policies.default, \
'Did not find default policy in %r' % policy_info
return policies
def __len__(self):
return len(self._all)
def __iter__(self):
return iter(self._all)
def __getitem__(self, index):
return self._all[index]
def filter(self, **kwargs):
return self.__class__([p for p in self if all(
p.get(k) == v for k, v in kwargs.items())])
def exclude(self, **kwargs):
return self.__class__([p for p in self if all(
p.get(k) != v for k, v in kwargs.items())])
def select(self):
# check that a policy was specified and that it is available
# in the current list (i.e., hasn't been excluded of the current list)
if self.policy_specified and self.policy_specified in self:
return self.policy_specified
else:
return random.choice(self)
def requires_policies(f):
@functools.wraps(f)
def wrapper(self, *args, **kwargs):
if skip:
raise SkipTest
try:
self.policies = FunctionalStoragePolicyCollection.from_info()
except AssertionError:
raise SkipTest("Unable to determine available policies")
if len(self.policies) < 2:
raise SkipTest("Multiple policies not enabled")
return f(self, *args, **kwargs)
return wrapper
def requires_bulk(f):
@functools.wraps(f)
def wrapper(*args, **kwargs):
if skip or not cluster_info:
raise SkipTest('Requires bulk middleware')
# Determine whether this cluster has bulk middleware; if not, skip test
if not cluster_info.get('bulk_upload', {}):
raise SkipTest('Requires bulk middleware')
return f(*args, **kwargs)
return wrapper
| swift-master | test/functional/__init__.py |
#!/usr/bin/python -u
# Copyright (c) 2010-2016 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from six.moves import urllib
from swift.common.swob import str_to_wsgi
import test.functional as tf
from test.functional.tests import Utils, Base, Base2, BaseEnv
from test.functional.swift_test_client import Connection, ResponseError
def setUpModule():
tf.setup_package()
def tearDownModule():
tf.teardown_package()
class TestDloEnv(BaseEnv):
@classmethod
def setUp(cls):
super(TestDloEnv, cls).setUp()
cls.container = cls.account.container(Utils.create_name())
cls.container2 = cls.account.container(Utils.create_name())
for cont in (cls.container, cls.container2):
if not cont.create():
raise ResponseError(cls.conn.response)
prefix = Utils.create_name(10)
cls.segment_prefix = prefix
for letter in ('a', 'b', 'c', 'd', 'e'):
file_item = cls.container.file("%s/seg_lower%s" % (prefix, letter))
file_item.write(letter.encode('ascii') * 10)
file_item = cls.container.file(
"%s/seg_upper_%%ff%s" % (prefix, letter))
file_item.write(letter.upper().encode('ascii') * 10)
for letter in ('f', 'g', 'h', 'i', 'j'):
file_item = cls.container2.file("%s/seg_lower%s" %
(prefix, letter))
file_item.write(letter.encode('ascii') * 10)
man1 = cls.container.file("man1")
man1.write(b'man1-contents',
hdrs={"X-Object-Manifest": "%s/%s/seg_lower" %
(cls.container.name, prefix)})
man2 = cls.container.file("man2")
man2.write(b'man2-contents',
hdrs={"X-Object-Manifest": "%s/%s/seg_upper_%%25ff" %
(cls.container.name, prefix)})
manall = cls.container.file("manall")
manall.write(b'manall-contents',
hdrs={"X-Object-Manifest": "%s/%s/seg" %
(cls.container.name, prefix)})
mancont2 = cls.container.file("mancont2")
mancont2.write(
b'mancont2-contents',
hdrs={"X-Object-Manifest": "%s/%s/seg_lower" %
(cls.container2.name, prefix)})
class TestDlo(Base):
env = TestDloEnv
def test_get_manifest(self):
file_item = self.env.container.file('man1')
file_contents = file_item.read()
self.assertEqual(
file_contents,
b"aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee")
file_item = self.env.container.file('man2')
file_contents = file_item.read()
self.assertEqual(
file_contents,
b"AAAAAAAAAABBBBBBBBBBCCCCCCCCCCDDDDDDDDDDEEEEEEEEEE")
file_item = self.env.container.file('manall')
file_contents = file_item.read()
self.assertEqual(
file_contents,
(b"aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee" +
b"AAAAAAAAAABBBBBBBBBBCCCCCCCCCCDDDDDDDDDDEEEEEEEEEE"))
def test_get_manifest_document_itself(self):
file_item = self.env.container.file('man1')
file_contents = file_item.read(parms={'multipart-manifest': 'get'})
self.assertEqual(file_contents, b"man1-contents")
self.assertEqual(file_item.info()['x_object_manifest'],
"%s/%s/seg_lower" %
(self.env.container.name, self.env.segment_prefix))
def test_get_range(self):
file_item = self.env.container.file('man1')
file_contents = file_item.read(size=25, offset=8)
self.assertEqual(file_contents, b"aabbbbbbbbbbccccccccccddd")
file_contents = file_item.read(size=1, offset=47)
self.assertEqual(file_contents, b"e")
def test_get_multiple_ranges(self):
file_item = self.env.container.file('man1')
file_contents = file_item.read(
hdrs={'Range': 'bytes=0-4,10-14'})
self.assert_status(200) # *not* 206
self.assertEqual(
file_contents,
b"aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee")
def test_get_range_out_of_range(self):
file_item = self.env.container.file('man1')
self.assertRaises(ResponseError, file_item.read, size=7, offset=50)
self.assert_status(416)
self.assert_header('content-range', 'bytes */50')
def test_copy(self):
# Adding a new segment, copying the manifest, and then deleting the
# segment proves that the new object is really the concatenated
# segments and not just a manifest.
f_segment = self.env.container.file("%s/seg_lowerf" %
(self.env.segment_prefix))
f_segment.write(b'ffffffffff')
try:
man1_item = self.env.container.file('man1')
man1_item.copy(self.env.container.name, "copied-man1")
finally:
# try not to leave this around for other tests to stumble over
f_segment.delete(tolerate_missing=True)
file_item = self.env.container.file('copied-man1')
file_contents = file_item.read()
self.assertEqual(
file_contents,
b"aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeeeffffffffff")
# The copied object must not have X-Object-Manifest
self.assertNotIn("x_object_manifest", file_item.info())
def test_copy_account(self):
# dlo use same account and same container only
acct = urllib.parse.unquote(self.env.conn.account_name)
# Adding a new segment, copying the manifest, and then deleting the
# segment proves that the new object is really the concatenated
# segments and not just a manifest.
f_segment = self.env.container.file("%s/seg_lowerf" %
(self.env.segment_prefix))
f_segment.write(b'ffffffffff')
try:
man1_item = self.env.container.file('man1')
man1_item.copy_account(acct,
self.env.container.name,
"copied-man1")
finally:
# try not to leave this around for other tests to stumble over
f_segment.delete(tolerate_missing=True)
file_item = self.env.container.file('copied-man1')
file_contents = file_item.read()
self.assertEqual(
file_contents,
b"aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeeeffffffffff")
# The copied object must not have X-Object-Manifest
self.assertNotIn("x_object_manifest", file_item.info())
def test_copy_manifest(self):
# Copying the manifest with multipart-manifest=get query string
# should result in another manifest
try:
man1_item = self.env.container.file('man1')
man1_item.copy(self.env.container.name, "copied-man1",
parms={'multipart-manifest': 'get'})
copied = self.env.container.file("copied-man1")
copied_contents = copied.read(parms={'multipart-manifest': 'get'})
self.assertEqual(copied_contents, b"man1-contents")
copied_contents = copied.read()
self.assertEqual(
copied_contents,
b"aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee")
self.assertEqual(man1_item.info()['x_object_manifest'],
copied.info()['x_object_manifest'])
finally:
# try not to leave this around for other tests to stumble over
self.env.container.file("copied-man1").delete(
tolerate_missing=True)
def test_dlo_if_match_get(self):
manifest = self.env.container.file("man1")
etag = manifest.info()['etag']
self.assertRaises(ResponseError, manifest.read,
hdrs={'If-Match': 'not-%s' % etag})
self.assert_status(412)
manifest.read(hdrs={'If-Match': etag})
self.assert_status(200)
def test_dlo_if_none_match_get(self):
manifest = self.env.container.file("man1")
etag = manifest.info()['etag']
self.assertRaises(ResponseError, manifest.read,
hdrs={'If-None-Match': etag})
self.assert_status(304)
manifest.read(hdrs={'If-None-Match': "not-%s" % etag})
self.assert_status(200)
def test_dlo_if_match_head(self):
manifest = self.env.container.file("man1")
etag = manifest.info()['etag']
self.assertRaises(ResponseError, manifest.info,
hdrs={'If-Match': 'not-%s' % etag})
self.assert_status(412)
manifest.info(hdrs={'If-Match': etag})
self.assert_status(200)
def test_dlo_if_none_match_head(self):
manifest = self.env.container.file("man1")
etag = manifest.info()['etag']
self.assertRaises(ResponseError, manifest.info,
hdrs={'If-None-Match': etag})
self.assert_status(304)
manifest.info(hdrs={'If-None-Match': "not-%s" % etag})
self.assert_status(200)
def test_dlo_referer_on_segment_container(self):
if 'username3' not in tf.config:
self.skipTest('Requires user 3')
# First the account2 (test3) should fail
config2 = tf.config.copy()
config2['username'] = tf.config['username3']
config2['password'] = tf.config['password3']
conn2 = Connection(config2)
conn2.authenticate()
headers = {'X-Auth-Token': conn2.storage_token,
'Referer': 'http://blah.example.com'}
dlo_file = self.env.container.file("mancont2")
self.assertRaises(ResponseError, dlo_file.read,
hdrs=headers)
self.assert_status(403)
# Now set the referer on the dlo container only
referer_metadata = {'X-Container-Read': '.r:*.example.com,.rlistings'}
self.env.container.update_metadata(referer_metadata)
self.assertRaises(ResponseError, dlo_file.read,
hdrs=headers)
self.assert_status(403)
# Finally set the referer on the segment container
self.env.container2.update_metadata(referer_metadata)
contents = dlo_file.read(hdrs=headers)
self.assertEqual(
contents,
b"ffffffffffgggggggggghhhhhhhhhhiiiiiiiiiijjjjjjjjjj")
def test_dlo_post_with_manifest_header(self):
# verify that performing a POST to a DLO manifest
# preserves the fact that it is a manifest file.
# verify that the x-object-manifest header may be updated.
# create a new manifest for this test to avoid test coupling.
x_o_m = self.env.container.file('man1').info()['x_object_manifest']
file_item = self.env.container.file(Utils.create_name())
file_item.write(b'manifest-contents',
hdrs={"X-Object-Manifest": x_o_m})
# sanity checks
manifest_contents = file_item.read(parms={'multipart-manifest': 'get'})
self.assertEqual(b'manifest-contents', manifest_contents)
expected_contents = ''.join((c * 10) for c in 'abcde').encode('ascii')
contents = file_item.read(parms={})
self.assertEqual(expected_contents, contents)
# POST a modified x-object-manifest value
new_x_o_m = x_o_m.rstrip('lower') + 'upper'
file_item.post({'x-object-meta-foo': 'bar',
'x-object-manifest': new_x_o_m})
# verify that x-object-manifest was updated
file_item.info()
resp_headers = [(h.lower(), v)
for h, v in file_item.conn.response.getheaders()]
self.assertIn(('x-object-manifest', str_to_wsgi(new_x_o_m)),
resp_headers)
self.assertIn(('x-object-meta-foo', 'bar'), resp_headers)
# verify that manifest content was not changed
manifest_contents = file_item.read(parms={'multipart-manifest': 'get'})
self.assertEqual(b'manifest-contents', manifest_contents)
# verify that updated manifest points to new content
expected_contents = ''.join((c * 10) for c in 'ABCDE').encode('ascii')
contents = file_item.read(parms={})
self.assertEqual(expected_contents, contents)
# Now revert the manifest to point to original segments, including a
# multipart-manifest=get param just to check that has no effect
file_item.post({'x-object-manifest': x_o_m},
parms={'multipart-manifest': 'get'})
# verify that x-object-manifest was reverted
info = file_item.info()
self.assertIn('x_object_manifest', info)
self.assertEqual(x_o_m, info['x_object_manifest'])
# verify that manifest content was not changed
manifest_contents = file_item.read(parms={'multipart-manifest': 'get'})
self.assertEqual(b'manifest-contents', manifest_contents)
# verify that updated manifest points new content
expected_contents = ''.join((c * 10) for c in 'abcde').encode('ascii')
contents = file_item.read(parms={})
self.assertEqual(expected_contents, contents)
def test_dlo_post_without_manifest_header(self):
# verify that a POST to a DLO manifest object with no
# x-object-manifest header will cause the existing x-object-manifest
# header to be lost
# create a new manifest for this test to avoid test coupling.
x_o_m = self.env.container.file('man1').info()['x_object_manifest']
file_item = self.env.container.file(Utils.create_name())
file_item.write(b'manifest-contents',
hdrs={"X-Object-Manifest": x_o_m})
# sanity checks
manifest_contents = file_item.read(parms={'multipart-manifest': 'get'})
self.assertEqual(b'manifest-contents', manifest_contents)
expected_contents = ''.join((c * 10) for c in 'abcde').encode('ascii')
contents = file_item.read(parms={})
self.assertEqual(expected_contents, contents)
# POST with no x-object-manifest header
file_item.post({})
# verify that existing x-object-manifest was removed
info = file_item.info()
self.assertNotIn('x_object_manifest', info)
# verify that object content was not changed
manifest_contents = file_item.read(parms={'multipart-manifest': 'get'})
self.assertEqual(b'manifest-contents', manifest_contents)
# verify that object is no longer a manifest
contents = file_item.read(parms={})
self.assertEqual(b'manifest-contents', contents)
def test_dlo_post_with_manifest_regular_object(self):
# verify that performing a POST to a regular object
# with a manifest header will create a DLO.
# Put a regular object
file_item = self.env.container.file(Utils.create_name())
file_item.write(b'file contents', hdrs={})
# sanity checks
file_contents = file_item.read(parms={})
self.assertEqual(b'file contents', file_contents)
# get the path associated with man1
x_o_m = self.env.container.file('man1').info()['x_object_manifest']
# POST a x-object-manifest value to the regular object
file_item.post({'x-object-manifest': x_o_m})
# verify that the file is now a manifest
manifest_contents = file_item.read(parms={'multipart-manifest': 'get'})
self.assertEqual(b'file contents', manifest_contents)
expected_contents = ''.join([(c * 10) for c in 'abcde']).encode()
contents = file_item.read(parms={})
self.assertEqual(expected_contents, contents)
file_item.info()
resp_headers = [(h.lower(), v)
for h, v in file_item.conn.response.getheaders()]
self.assertIn(('x-object-manifest', str_to_wsgi(x_o_m)), resp_headers)
class TestDloUTF8(Base2, TestDlo):
pass
| swift-master | test/functional/test_dlo.py |
#!/usr/bin/python -u
# Copyright (c) 2010-2016 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import functools
import hmac
import hashlib
import json
from copy import deepcopy
import six
from six.moves import urllib
from time import time, strftime, gmtime
from unittest import SkipTest
import test.functional as tf
from swift.common.middleware import tempurl
from test.functional import cluster_info
from test.functional.tests import Utils, Base, Base2, BaseEnv
from test.functional import requires_acls
from test.functional.swift_test_client import Account, Connection, \
ResponseError
def setUpModule():
tf.setup_package()
def tearDownModule():
tf.teardown_package()
class TestTempurlBaseEnv(BaseEnv):
original_account_meta = None
@classmethod
def setUp(cls):
super(TestTempurlBaseEnv, cls).setUp()
cls.original_account_meta = cls.account.info()
@classmethod
def tearDown(cls):
if cls.original_account_meta:
# restore any tempurl keys that the tests may have overwritten
cls.account.update_metadata(
dict((k, cls.original_account_meta.get(k, ''))
for k in ('temp-url-key', 'temp-url-key-2',)))
class TestTempurlEnv(TestTempurlBaseEnv):
tempurl_enabled = None # tri-state: None initially, then True/False
@classmethod
def setUp(cls):
if cls.tempurl_enabled is None:
cls.tempurl_enabled = 'tempurl' in cluster_info
if not cls.tempurl_enabled:
return
super(TestTempurlEnv, cls).setUp()
cls.tempurl_key = Utils.create_name()
cls.tempurl_key2 = Utils.create_name()
cls.account.update_metadata({
'temp-url-key': cls.tempurl_key,
'temp-url-key-2': cls.tempurl_key2
})
cls.container = cls.account.container(Utils.create_name())
if not cls.container.create():
raise ResponseError(cls.conn.response)
cls.obj = cls.container.file(Utils.create_name())
cls.obj.write(b"obj contents")
cls.other_obj = cls.container.file(Utils.create_name())
cls.other_obj.write(b"other obj contents")
class TestTempurl(Base):
env = TestTempurlEnv
digest_name = 'sha256'
def setUp(self):
super(TestTempurl, self).setUp()
if self.env.tempurl_enabled is False:
raise SkipTest("TempURL not enabled")
elif self.env.tempurl_enabled is not True:
# just some sanity checking
raise Exception(
"Expected tempurl_enabled to be True/False, got %r" %
(self.env.tempurl_enabled,))
# N.B. The default to 'sha1' in case the info has nothing is for
# extremely old clusters, which presumably use SHA1.
if self.digest_name not in cluster_info['tempurl'].get(
'allowed_digests', ['sha1']):
raise SkipTest("tempurl does not support %s signatures" %
self.digest_name)
self.digest = getattr(hashlib, self.digest_name)
self.expires = int(time()) + 86400
self.expires_8601 = strftime(
tempurl.EXPIRES_ISO8601_FORMAT, gmtime(self.expires))
self.obj_tempurl_parms = self.tempurl_parms(
'GET', self.expires, self.env.conn.make_path(self.env.obj.path),
self.env.tempurl_key)
def tempurl_parms(self, method, expires, path, key):
path = urllib.parse.unquote(path)
if not six.PY2:
method = method.encode('utf8')
path = path.encode('utf8')
key = key.encode('utf8')
sig = hmac.new(
key,
b'%s\n%d\n%s' % (method, expires, path),
self.digest).hexdigest()
return {'temp_url_sig': sig, 'temp_url_expires': str(expires)}
def test_GET(self):
for e in (str(self.expires), self.expires_8601):
self.obj_tempurl_parms['temp_url_expires'] = e
contents = self.env.obj.read(
parms=self.obj_tempurl_parms,
cfg={'no_auth_token': True})
self.assertEqual(contents, b"obj contents")
# GET tempurls also allow HEAD requests
self.assertTrue(self.env.obj.info(parms=self.obj_tempurl_parms,
cfg={'no_auth_token': True}))
def test_GET_with_key_2(self):
expires = int(time()) + 86400
parms = self.tempurl_parms(
'GET', expires, self.env.conn.make_path(self.env.obj.path),
self.env.tempurl_key2)
contents = self.env.obj.read(parms=parms, cfg={'no_auth_token': True})
self.assertEqual(contents, b"obj contents")
def test_GET_DLO_inside_container(self):
seg1 = self.env.container.file(
"get-dlo-inside-seg1" + Utils.create_name())
seg2 = self.env.container.file(
"get-dlo-inside-seg2" + Utils.create_name())
seg1.write(b"one fish two fish ")
seg2.write(b"red fish blue fish")
manifest = self.env.container.file("manifest" + Utils.create_name())
manifest.write(
b'',
hdrs={"X-Object-Manifest": "%s/get-dlo-inside-seg" %
(self.env.container.name,)})
expires = int(time()) + 86400
parms = self.tempurl_parms(
'GET', expires, self.env.conn.make_path(manifest.path),
self.env.tempurl_key)
contents = manifest.read(parms=parms, cfg={'no_auth_token': True})
self.assertEqual(contents, b"one fish two fish red fish blue fish")
def test_GET_DLO_outside_container(self):
seg1 = self.env.container.file(
"get-dlo-outside-seg1" + Utils.create_name())
seg2 = self.env.container.file(
"get-dlo-outside-seg2" + Utils.create_name())
seg1.write(b"one fish two fish ")
seg2.write(b"red fish blue fish")
container2 = self.env.account.container(Utils.create_name())
container2.create()
manifest = container2.file("manifest" + Utils.create_name())
manifest.write(
b'',
hdrs={"X-Object-Manifest": "%s/get-dlo-outside-seg" %
(self.env.container.name,)})
expires = int(time()) + 86400
parms = self.tempurl_parms(
'GET', expires, self.env.conn.make_path(manifest.path),
self.env.tempurl_key)
# cross container tempurl works fine for account tempurl key
contents = manifest.read(parms=parms, cfg={'no_auth_token': True})
self.assertEqual(contents, b"one fish two fish red fish blue fish")
self.assert_status([200])
def test_PUT(self):
new_obj = self.env.container.file(Utils.create_name())
expires = int(time()) + 86400
expires_8601 = strftime(
tempurl.EXPIRES_ISO8601_FORMAT, gmtime(expires))
put_parms = self.tempurl_parms(
'PUT', expires, self.env.conn.make_path(new_obj.path),
self.env.tempurl_key)
for e in (str(expires), expires_8601):
put_parms['temp_url_expires'] = e
new_obj.write(b'new obj contents',
parms=put_parms, cfg={'no_auth_token': True})
self.assertEqual(new_obj.read(), b"new obj contents")
# PUT tempurls also allow HEAD requests
self.assertTrue(new_obj.info(parms=put_parms,
cfg={'no_auth_token': True}))
def test_PUT_manifest_access(self):
new_obj = self.env.container.file(Utils.create_name())
# give out a signature which allows a PUT to new_obj
expires = int(time()) + 86400
put_parms = self.tempurl_parms(
'PUT', expires, self.env.conn.make_path(new_obj.path),
self.env.tempurl_key)
# try to create manifest pointing to some random container
try:
new_obj.write(b'', {
'x-object-manifest': '%s/foo' % 'some_random_container'
}, parms=put_parms, cfg={'no_auth_token': True})
except ResponseError as e:
self.assertEqual(e.status, 400)
else:
self.fail('request did not error')
# create some other container
other_container = self.env.account.container(Utils.create_name())
if not other_container.create():
raise ResponseError(self.conn.response)
# try to create manifest pointing to new container
try:
new_obj.write(b'', {
'x-object-manifest': '%s/foo' % other_container
}, parms=put_parms, cfg={'no_auth_token': True})
except ResponseError as e:
self.assertEqual(e.status, 400)
else:
self.fail('request did not error')
# try again using a tempurl POST to an already created object
new_obj.write(b'', {}, parms=put_parms, cfg={'no_auth_token': True})
expires = int(time()) + 86400
post_parms = self.tempurl_parms(
'POST', expires, self.env.conn.make_path(new_obj.path),
self.env.tempurl_key)
try:
new_obj.post({'x-object-manifest': '%s/foo' % other_container},
parms=post_parms, cfg={'no_auth_token': True})
except ResponseError as e:
self.assertEqual(e.status, 400)
else:
self.fail('request did not error')
def test_HEAD(self):
expires = int(time()) + 86400
head_parms = self.tempurl_parms(
'HEAD', expires, self.env.conn.make_path(self.env.obj.path),
self.env.tempurl_key)
self.assertTrue(self.env.obj.info(parms=head_parms,
cfg={'no_auth_token': True}))
# HEAD tempurls don't allow PUT or GET requests, despite the fact that
# PUT and GET tempurls both allow HEAD requests
self.assertRaises(ResponseError, self.env.other_obj.read,
cfg={'no_auth_token': True},
parms=head_parms)
self.assert_status([401])
self.assertRaises(ResponseError, self.env.other_obj.write,
b'new contents',
cfg={'no_auth_token': True},
parms=head_parms)
self.assert_status([401])
def test_different_object(self):
contents = self.env.obj.read(
parms=self.obj_tempurl_parms,
cfg={'no_auth_token': True})
self.assertEqual(contents, b"obj contents")
self.assertRaises(ResponseError, self.env.other_obj.read,
cfg={'no_auth_token': True},
parms=self.obj_tempurl_parms)
self.assert_status([401])
def test_changing_sig(self):
contents = self.env.obj.read(
parms=self.obj_tempurl_parms,
cfg={'no_auth_token': True})
self.assertEqual(contents, b"obj contents")
parms = self.obj_tempurl_parms.copy()
if parms['temp_url_sig'][0] == 'a':
parms['temp_url_sig'] = 'b' + parms['temp_url_sig'][1:]
else:
parms['temp_url_sig'] = 'a' + parms['temp_url_sig'][1:]
self.assertRaises(ResponseError, self.env.obj.read,
cfg={'no_auth_token': True},
parms=parms)
self.assert_status([401])
def test_changing_expires(self):
contents = self.env.obj.read(
parms=self.obj_tempurl_parms,
cfg={'no_auth_token': True})
self.assertEqual(contents, b"obj contents")
parms = self.obj_tempurl_parms.copy()
if parms['temp_url_expires'][-1] == '0':
parms['temp_url_expires'] = parms['temp_url_expires'][:-1] + '1'
else:
parms['temp_url_expires'] = parms['temp_url_expires'][:-1] + '0'
self.assertRaises(ResponseError, self.env.obj.read,
cfg={'no_auth_token': True},
parms=parms)
self.assert_status([401])
class TestTempurlPrefix(TestTempurl):
def tempurl_parms(self, method, expires, path, key,
prefix=None):
path_parts = urllib.parse.unquote(path).split('/')
if prefix is None:
# Choose the first 4 chars of object name as prefix.
if six.PY2:
prefix = path_parts[4].decode('utf8')[:4].encode('utf8')
else:
prefix = path_parts[4][:4]
prefix_to_hash = '/'.join(path_parts[0:4]) + '/' + prefix
if not six.PY2:
method = method.encode('utf8')
prefix_to_hash = prefix_to_hash.encode('utf8')
key = key.encode('utf8')
sig = hmac.new(
key,
b'%s\n%d\nprefix:%s' % (method, expires, prefix_to_hash),
self.digest).hexdigest()
return {
'temp_url_sig': sig, 'temp_url_expires': str(expires),
'temp_url_prefix': prefix}
def test_empty_prefix(self):
parms = self.tempurl_parms(
'GET', self.expires,
self.env.conn.make_path(self.env.obj.path),
self.env.tempurl_key, '')
contents = self.env.obj.read(
parms=parms,
cfg={'no_auth_token': True})
self.assertEqual(contents, b"obj contents")
def test_no_prefix_match(self):
prefix = 'b' if self.env.obj.name[0] == 'a' else 'a'
parms = self.tempurl_parms(
'GET', self.expires,
self.env.conn.make_path(self.env.obj.path),
self.env.tempurl_key, prefix)
self.assertRaises(ResponseError, self.env.obj.read,
cfg={'no_auth_token': True},
parms=parms)
self.assert_status([401])
def test_object_url_with_prefix(self):
parms = super(TestTempurlPrefix, self).tempurl_parms(
'GET', self.expires,
self.env.conn.make_path(self.env.obj.path),
self.env.tempurl_key)
parms['temp_url_prefix'] = self.env.obj.name
self.assertRaises(ResponseError, self.env.obj.read,
cfg={'no_auth_token': True},
parms=parms)
self.assert_status([401])
def test_missing_query_parm(self):
del self.obj_tempurl_parms['temp_url_prefix']
self.assertRaises(ResponseError, self.env.obj.read,
cfg={'no_auth_token': True},
parms=self.obj_tempurl_parms)
self.assert_status([401])
class TestTempurlUTF8(Base2, TestTempurl):
pass
class TestTempurlPrefixUTF8(Base2, TestTempurlPrefix):
pass
class TestContainerTempurlEnv(BaseEnv):
tempurl_enabled = None # tri-state: None initially, then True/False
@classmethod
def setUp(cls):
if cls.tempurl_enabled is None:
cls.tempurl_enabled = 'tempurl' in cluster_info
if not cls.tempurl_enabled:
return
super(TestContainerTempurlEnv, cls).setUp()
cls.tempurl_key = Utils.create_name()
cls.tempurl_key2 = Utils.create_name()
if not tf.skip2:
# creating another account and connection
# for ACL tests
config2 = deepcopy(tf.config)
config2['account'] = tf.config['account2']
config2['username'] = tf.config['username2']
config2['password'] = tf.config['password2']
cls.conn2 = Connection(config2)
cls.conn2.authenticate()
cls.account2 = Account(
cls.conn2, config2.get('account', config2['username']))
cls.account2 = cls.conn2.get_account()
cls.container = cls.account.container(Utils.create_name())
if not tf.skip2:
if not cls.container.create({
'x-container-meta-temp-url-key': cls.tempurl_key,
'x-container-meta-temp-url-key-2': cls.tempurl_key2,
'x-container-read': cls.account2.name}):
raise ResponseError(cls.conn.response)
else:
if not cls.container.create({
'x-container-meta-temp-url-key': cls.tempurl_key,
'x-container-meta-temp-url-key-2': cls.tempurl_key2}):
raise ResponseError(cls.conn.response)
cls.obj = cls.container.file(Utils.create_name())
cls.obj.write(b"obj contents")
cls.other_obj = cls.container.file(Utils.create_name())
cls.other_obj.write(b"other obj contents")
class TestContainerTempurl(Base):
env = TestContainerTempurlEnv
digest_name = 'sha256'
def setUp(self):
super(TestContainerTempurl, self).setUp()
if self.env.tempurl_enabled is False:
raise SkipTest("TempURL not enabled")
elif self.env.tempurl_enabled is not True:
# just some sanity checking
raise Exception(
"Expected tempurl_enabled to be True/False, got %r" %
(self.env.tempurl_enabled,))
if self.digest_name not in cluster_info['tempurl'].get(
'allowed_digests', ['sha1']):
raise SkipTest("tempurl does not support %s signatures" %
self.digest_name)
self.digest = getattr(hashlib, self.digest_name)
expires = int(time()) + 86400
sig = self.tempurl_sig(
'GET', expires, self.env.conn.make_path(self.env.obj.path),
self.env.tempurl_key)
self.obj_tempurl_parms = {'temp_url_sig': sig,
'temp_url_expires': str(expires)}
def tempurl_sig(self, method, expires, path, key):
path = urllib.parse.unquote(path)
if not six.PY2:
method = method.encode('utf8')
path = path.encode('utf8')
key = key.encode('utf8')
print(key, method, expires, path)
return hmac.new(
key,
b'%s\n%d\n%s' % (method, expires, path),
self.digest).hexdigest()
def test_GET(self):
contents = self.env.obj.read(
parms=self.obj_tempurl_parms,
cfg={'no_auth_token': True})
self.assertEqual(contents, b"obj contents")
# GET tempurls also allow HEAD requests
self.assertTrue(self.env.obj.info(parms=self.obj_tempurl_parms,
cfg={'no_auth_token': True}))
def test_GET_with_key_2(self):
expires = int(time()) + 86400
sig = self.tempurl_sig(
'GET', expires, self.env.conn.make_path(self.env.obj.path),
self.env.tempurl_key2)
parms = {'temp_url_sig': sig,
'temp_url_expires': str(expires)}
contents = self.env.obj.read(parms=parms, cfg={'no_auth_token': True})
self.assertEqual(contents, b"obj contents")
def test_PUT(self):
new_obj = self.env.container.file(Utils.create_name())
expires = int(time()) + 86400
sig = self.tempurl_sig(
'PUT', expires, self.env.conn.make_path(new_obj.path),
self.env.tempurl_key)
put_parms = {'temp_url_sig': sig,
'temp_url_expires': str(expires)}
new_obj.write(b'new obj contents',
parms=put_parms, cfg={'no_auth_token': True})
self.assertEqual(new_obj.read(), b"new obj contents")
# PUT tempurls also allow HEAD requests
self.assertTrue(new_obj.info(parms=put_parms,
cfg={'no_auth_token': True}))
def test_HEAD(self):
expires = int(time()) + 86400
sig = self.tempurl_sig(
'HEAD', expires, self.env.conn.make_path(self.env.obj.path),
self.env.tempurl_key)
head_parms = {'temp_url_sig': sig,
'temp_url_expires': str(expires)}
self.assertTrue(self.env.obj.info(parms=head_parms,
cfg={'no_auth_token': True}))
# HEAD tempurls don't allow PUT or GET requests, despite the fact that
# PUT and GET tempurls both allow HEAD requests
self.assertRaises(ResponseError, self.env.other_obj.read,
cfg={'no_auth_token': True},
parms=self.obj_tempurl_parms)
self.assert_status([401])
self.assertRaises(ResponseError, self.env.other_obj.write,
b'new contents',
cfg={'no_auth_token': True},
parms=self.obj_tempurl_parms)
self.assert_status([401])
def test_different_object(self):
contents = self.env.obj.read(
parms=self.obj_tempurl_parms,
cfg={'no_auth_token': True})
self.assertEqual(contents, b"obj contents")
self.assertRaises(ResponseError, self.env.other_obj.read,
cfg={'no_auth_token': True},
parms=self.obj_tempurl_parms)
self.assert_status([401])
def test_changing_sig(self):
contents = self.env.obj.read(
parms=self.obj_tempurl_parms,
cfg={'no_auth_token': True})
self.assertEqual(contents, b"obj contents")
parms = self.obj_tempurl_parms.copy()
if parms['temp_url_sig'][0] == 'a':
parms['temp_url_sig'] = 'b' + parms['temp_url_sig'][1:]
else:
parms['temp_url_sig'] = 'a' + parms['temp_url_sig'][1:]
self.assertRaises(ResponseError, self.env.obj.read,
cfg={'no_auth_token': True},
parms=parms)
self.assert_status([401])
def test_changing_expires(self):
contents = self.env.obj.read(
parms=self.obj_tempurl_parms,
cfg={'no_auth_token': True})
self.assertEqual(contents, b"obj contents")
parms = self.obj_tempurl_parms.copy()
if parms['temp_url_expires'][-1] == '0':
parms['temp_url_expires'] = parms['temp_url_expires'][:-1] + '1'
else:
parms['temp_url_expires'] = parms['temp_url_expires'][:-1] + '0'
self.assertRaises(ResponseError, self.env.obj.read,
cfg={'no_auth_token': True},
parms=parms)
self.assert_status([401])
@requires_acls
def test_tempurl_keys_visible_to_account_owner(self):
metadata = self.env.container.info()
self.assertEqual(metadata.get('tempurl_key'), self.env.tempurl_key)
self.assertEqual(metadata.get('tempurl_key2'), self.env.tempurl_key2)
@requires_acls
def test_tempurl_keys_hidden_from_acl_readonly(self):
if tf.skip2:
raise SkipTest('Account2 not set')
metadata = self.env.container.info(cfg={
'use_token': self.env.conn2.storage_token})
self.assertNotIn(
'tempurl_key', metadata,
'Container TempURL key found, should not be visible '
'to readonly ACLs')
self.assertNotIn(
'tempurl_key2', metadata,
'Container TempURL key-2 found, should not be visible '
'to readonly ACLs')
def test_GET_DLO_inside_container(self):
seg1 = self.env.container.file(
"get-dlo-inside-seg1" + Utils.create_name())
seg2 = self.env.container.file(
"get-dlo-inside-seg2" + Utils.create_name())
seg1.write(b"one fish two fish ")
seg2.write(b"red fish blue fish")
manifest = self.env.container.file("manifest" + Utils.create_name())
manifest.write(
b'',
hdrs={"X-Object-Manifest": "%s/get-dlo-inside-seg" %
(self.env.container.name,)})
expires = int(time()) + 86400
sig = self.tempurl_sig(
'GET', expires, self.env.conn.make_path(manifest.path),
self.env.tempurl_key)
parms = {'temp_url_sig': sig,
'temp_url_expires': str(expires)}
contents = manifest.read(parms=parms, cfg={'no_auth_token': True})
self.assertEqual(contents, b"one fish two fish red fish blue fish")
def test_GET_DLO_outside_container(self):
container2 = self.env.account.container(Utils.create_name())
container2.create()
seg1 = container2.file(
"get-dlo-outside-seg1" + Utils.create_name())
seg2 = container2.file(
"get-dlo-outside-seg2" + Utils.create_name())
seg1.write(b"one fish two fish ")
seg2.write(b"red fish blue fish")
manifest = self.env.container.file("manifest" + Utils.create_name())
manifest.write(
b'',
hdrs={"X-Object-Manifest": "%s/get-dlo-outside-seg" %
(container2.name,)})
expires = int(time()) + 86400
sig = self.tempurl_sig(
'GET', expires, self.env.conn.make_path(manifest.path),
self.env.tempurl_key)
parms = {'temp_url_sig': sig,
'temp_url_expires': str(expires)}
# cross container tempurl does not work for container tempurl key
try:
manifest.read(parms=parms, cfg={'no_auth_token': True})
except ResponseError as e:
self.assertEqual(e.status, 401)
else:
self.fail('request did not error')
try:
manifest.info(parms=parms, cfg={'no_auth_token': True})
except ResponseError as e:
self.assertEqual(e.status, 401)
else:
self.fail('request did not error')
class TestContainerTempurlUTF8(Base2, TestContainerTempurl):
pass
class TestSloTempurlEnv(TestTempurlBaseEnv):
enabled = None # tri-state: None initially, then True/False
@classmethod
def setUp(cls):
super(TestSloTempurlEnv, cls).setUp()
if cls.enabled is None:
cls.enabled = 'tempurl' in cluster_info and 'slo' in cluster_info
cls.tempurl_key = Utils.create_name()
cls.account.update_metadata({'temp-url-key': cls.tempurl_key})
cls.manifest_container = cls.account.container(Utils.create_name())
cls.segments_container = cls.account.container(Utils.create_name())
if not cls.manifest_container.create():
raise ResponseError(cls.conn.response)
if not cls.segments_container.create():
raise ResponseError(cls.conn.response)
seg1 = cls.segments_container.file(Utils.create_name())
seg1.write(b'1' * 1024 * 1024)
seg2 = cls.segments_container.file(Utils.create_name())
seg2.write(b'2' * 1024 * 1024)
cls.manifest_data = [{'size_bytes': 1024 * 1024,
'etag': seg1.md5,
'path': '/%s/%s' % (cls.segments_container.name,
seg1.name)},
{'size_bytes': 1024 * 1024,
'etag': seg2.md5,
'path': '/%s/%s' % (cls.segments_container.name,
seg2.name)}]
cls.manifest = cls.manifest_container.file(Utils.create_name())
cls.manifest.write(
json.dumps(cls.manifest_data).encode('ascii'),
parms={'multipart-manifest': 'put'})
class TestSloTempurl(Base):
env = TestSloTempurlEnv
digest_name = 'sha256'
def setUp(self):
super(TestSloTempurl, self).setUp()
if self.env.enabled is False:
raise SkipTest("TempURL and SLO not both enabled")
elif self.env.enabled is not True:
# just some sanity checking
raise Exception(
"Expected enabled to be True/False, got %r" %
(self.env.enabled,))
if self.digest_name not in cluster_info['tempurl'].get(
'allowed_digests', ['sha1']):
raise SkipTest("tempurl does not support %s signatures" %
self.digest_name)
self.digest = getattr(hashlib, self.digest_name)
def tempurl_sig(self, method, expires, path, key):
path = urllib.parse.unquote(path)
if not six.PY2:
method = method.encode('utf8')
path = path.encode('utf8')
key = key.encode('utf8')
return hmac.new(
key,
b'%s\n%d\n%s' % (method, expires, path),
self.digest).hexdigest()
def test_GET(self):
expires = int(time()) + 86400
sig = self.tempurl_sig(
'GET', expires, self.env.conn.make_path(self.env.manifest.path),
self.env.tempurl_key)
parms = {'temp_url_sig': sig, 'temp_url_expires': str(expires)}
contents = self.env.manifest.read(
parms=parms,
cfg={'no_auth_token': True})
self.assertEqual(len(contents), 2 * 1024 * 1024)
# GET tempurls also allow HEAD requests
self.assertTrue(self.env.manifest.info(
parms=parms, cfg={'no_auth_token': True}))
class TestSloTempurlUTF8(Base2, TestSloTempurl):
pass
def requires_digest(digest):
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
if digest not in cluster_info['tempurl'].get(
'allowed_digests', ['sha1']):
raise SkipTest("tempurl does not support %s signatures" %
digest)
return func(*args, **kwargs)
return wrapper
return decorator
class TestTempurlAlgorithms(Base):
env = TestTempurlEnv
def setUp(self):
super(TestTempurlAlgorithms, self).setUp()
if self.env.tempurl_enabled is False:
raise SkipTest("TempURL not enabled")
elif self.env.tempurl_enabled is not True:
# just some sanity checking
raise Exception(
"Expected tempurl_enabled to be True/False, got %r" %
(self.env.tempurl_enabled,))
def get_sig(self, expires, digest, encoding):
path = urllib.parse.unquote(self.env.conn.make_path(self.env.obj.path))
if six.PY2:
key = self.env.tempurl_key
else:
path = path.encode('utf8')
key = self.env.tempurl_key.encode('utf8')
sig = hmac.new(
key,
b'GET\n%d\n%s' % (expires, path),
getattr(hashlib, digest))
if encoding == 'hex':
return sig.hexdigest()
elif encoding == 'base64':
return digest + ':' + base64.b64encode(
sig.digest()).decode('ascii')
elif encoding == 'base64-no-padding':
return digest + ':' + base64.b64encode(
sig.digest()).decode('ascii').strip('=')
elif encoding == 'url-safe-base64':
return digest + ':' + base64.urlsafe_b64encode(
sig.digest()).decode('ascii')
else:
raise ValueError('Unrecognized encoding: %r' % encoding)
def _do_test(self, digest, encoding):
expires = int(time()) + 86400
sig = self.get_sig(expires, digest, encoding)
if encoding == 'url-safe-base64':
# Make sure that we're actually testing url-safe-ness
while '-' not in sig and '_' not in sig:
expires += 1
sig = self.get_sig(expires, digest, encoding)
parms = {'temp_url_sig': sig, 'temp_url_expires': str(expires)}
contents = self.env.obj.read(
parms=parms,
cfg={'no_auth_token': True})
self.assertEqual(contents, b"obj contents")
# GET tempurls also allow HEAD requests
self.assertTrue(self.env.obj.info(
parms=parms, cfg={'no_auth_token': True}))
@requires_digest('sha1')
def test_sha1(self):
self._do_test('sha1', 'hex')
self._do_test('sha1', 'base64')
self._do_test('sha1', 'base64-no-padding')
self._do_test('sha1', 'url-safe-base64')
@requires_digest('sha256')
def test_sha256(self):
# apparently Cloud Files supports hex-encoded SHA-256
# let's not break that just for the sake of being different
self._do_test('sha256', 'hex')
self._do_test('sha256', 'base64')
self._do_test('sha256', 'base64-no-padding')
self._do_test('sha256', 'url-safe-base64')
@requires_digest('sha512')
def test_sha512(self):
self._do_test('sha512', 'hex')
self._do_test('sha512', 'base64')
self._do_test('sha512', 'base64-no-padding')
self._do_test('sha512', 'url-safe-base64')
| swift-master | test/functional/test_tempurl.py |
#!/usr/bin/python -u
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from copy import deepcopy
import json
import time
import unittest
import six
from six.moves.urllib.parse import quote, unquote
from unittest import SkipTest
import test.functional as tf
from swift.common.utils import MD5_OF_EMPTY_STRING
from test.functional.tests import Base, Base2, BaseEnv, Utils
from test.functional import cluster_info
from test.functional.swift_test_client import Account, Connection, \
ResponseError
def setUpModule():
tf.setup_package()
def tearDownModule():
tf.teardown_package()
class TestObjectVersioningEnv(BaseEnv):
versioning_enabled = None # tri-state: None initially, then True/False
location_header_key = 'X-Versions-Location'
account2 = None
@classmethod
def setUp(cls):
super(TestObjectVersioningEnv, cls).setUp()
if not tf.skip2:
# Second connection for ACL tests
config2 = deepcopy(tf.config)
config2['account'] = tf.config['account2']
config2['username'] = tf.config['username2']
config2['password'] = tf.config['password2']
cls.conn2 = Connection(config2)
cls.conn2.authenticate()
if six.PY2:
# avoid getting a prefix that stops halfway through an encoded
# character
prefix = Utils.create_name().decode("utf-8")[:10].encode("utf-8")
else:
prefix = Utils.create_name()[:10]
cls.versions_container = cls.account.container(prefix + "-versions")
if not cls.versions_container.create():
raise ResponseError(cls.conn.response)
cls.container = cls.account.container(prefix + "-objs")
container_headers = {
cls.location_header_key: quote(cls.versions_container.name)}
if not cls.container.create(hdrs=container_headers):
if cls.conn.response.status == 412:
cls.versioning_enabled = False
return
raise ResponseError(cls.conn.response)
container_info = cls.container.info()
# if versioning is off, then cls.location_header_key won't persist
cls.versioning_enabled = 'versions' in container_info
if not tf.skip2:
# setup another account to test ACLs
config2 = deepcopy(tf.config)
config2['account'] = tf.config['account2']
config2['username'] = tf.config['username2']
config2['password'] = tf.config['password2']
cls.conn2 = Connection(config2)
cls.storage_url2, cls.storage_token2 = cls.conn2.authenticate()
cls.account2 = cls.conn2.get_account()
cls.account2.delete_containers()
if not tf.skip3:
# setup another account with no access to anything to test ACLs
config3 = deepcopy(tf.config)
config3['account'] = tf.config['account']
config3['username'] = tf.config['username3']
config3['password'] = tf.config['password3']
cls.conn3 = Connection(config3)
cls.storage_url3, cls.storage_token3 = cls.conn3.authenticate()
cls.account3 = cls.conn3.get_account()
@classmethod
def tearDown(cls):
if cls.account:
cls.account.delete_containers()
if cls.account2:
cls.account2.delete_containers()
class TestCrossPolicyObjectVersioningEnv(BaseEnv):
# tri-state: None initially, then True/False
versioning_enabled = None
multiple_policies_enabled = None
policies = None
location_header_key = 'X-Versions-Location'
account2 = None
@classmethod
def setUp(cls):
super(TestCrossPolicyObjectVersioningEnv, cls).setUp()
if cls.multiple_policies_enabled is None:
try:
cls.policies = tf.FunctionalStoragePolicyCollection.from_info()
except AssertionError:
pass
if cls.policies and len(cls.policies) > 1:
cls.multiple_policies_enabled = True
else:
cls.multiple_policies_enabled = False
cls.versioning_enabled = True
# We don't actually know the state of versioning, but without
# multiple policies the tests should be skipped anyway. Claiming
# versioning support lets us report the right reason for skipping.
return
policy = cls.policies.select()
version_policy = cls.policies.exclude(name=policy['name']).select()
if not tf.skip2:
# Second connection for ACL tests
config2 = deepcopy(tf.config)
config2['account'] = tf.config['account2']
config2['username'] = tf.config['username2']
config2['password'] = tf.config['password2']
cls.conn2 = Connection(config2)
cls.conn2.authenticate()
if six.PY2:
# avoid getting a prefix that stops halfway through an encoded
# character
prefix = Utils.create_name().decode("utf-8")[:10].encode("utf-8")
else:
prefix = Utils.create_name()[:10]
cls.versions_container = cls.account.container(prefix + "-versions")
if not cls.versions_container.create(
{'X-Storage-Policy': policy['name']}):
raise ResponseError(cls.conn.response)
cls.container = cls.account.container(prefix + "-objs")
if not cls.container.create(
hdrs={cls.location_header_key: cls.versions_container.name,
'X-Storage-Policy': version_policy['name']}):
if cls.conn.response.status == 412:
cls.versioning_enabled = False
return
raise ResponseError(cls.conn.response)
container_info = cls.container.info()
# if versioning is off, then X-Versions-Location won't persist
cls.versioning_enabled = 'versions' in container_info
if not tf.skip2:
# setup another account to test ACLs
config2 = deepcopy(tf.config)
config2['account'] = tf.config['account2']
config2['username'] = tf.config['username2']
config2['password'] = tf.config['password2']
cls.conn2 = Connection(config2)
cls.storage_url2, cls.storage_token2 = cls.conn2.authenticate()
cls.account2 = cls.conn2.get_account()
cls.account2.delete_containers()
if not tf.skip3:
# setup another account with no access to anything to test ACLs
config3 = deepcopy(tf.config)
config3['account'] = tf.config['account']
config3['username'] = tf.config['username3']
config3['password'] = tf.config['password3']
cls.conn3 = Connection(config3)
cls.storage_url3, cls.storage_token3 = cls.conn3.authenticate()
cls.account3 = cls.conn3.get_account()
@classmethod
def tearDown(cls):
if cls.account:
cls.account.delete_containers()
if cls.account2:
cls.account2.delete_containers()
class TestObjectVersioningHistoryModeEnv(TestObjectVersioningEnv):
location_header_key = 'X-History-Location'
class TestObjectVersioning(Base):
env = TestObjectVersioningEnv
def setUp(self):
super(TestObjectVersioning, self).setUp()
if self.env.versioning_enabled is False:
raise SkipTest("Object versioning not enabled")
elif self.env.versioning_enabled is not True:
# just some sanity checking
raise Exception(
"Expected versioning_enabled to be True/False, got %r" %
(self.env.versioning_enabled,))
def _tear_down_files(self):
try:
# only delete files and not containers
# as they were configured in self.env
# get rid of any versions so they aren't restored
self.env.versions_container.delete_files()
# get rid of originals
self.env.container.delete_files()
# in history mode, deleted originals got copied to versions, so
# clear that again
self.env.versions_container.delete_files()
except ResponseError:
pass
def tearDown(self):
super(TestObjectVersioning, self).tearDown()
self._tear_down_files()
def test_clear_version_option(self):
# sanity
header_val = quote(self.env.versions_container.name)
self.assertEqual(self.env.container.info()['versions'], header_val)
self.env.container.update_metadata(
hdrs={self.env.location_header_key: ''})
self.assertIsNone(self.env.container.info().get('versions'))
# set location back to the way it was
self.env.container.update_metadata(
hdrs={self.env.location_header_key: header_val})
self.assertEqual(self.env.container.info()['versions'], header_val)
def _test_overwriting_setup(self, obj_name=None):
container = self.env.container
versions_container = self.env.versions_container
cont_info = container.info()
self.assertEqual(cont_info['versions'], quote(versions_container.name))
expected_content_types = []
obj_name = obj_name or Utils.create_name()
versioned_obj = container.file(obj_name)
put_headers = {'Content-Type': 'text/jibberish01',
'Content-Encoding': 'gzip',
'Content-Disposition': 'attachment; filename=myfile'}
versioned_obj.write(b"aaaaa", hdrs=put_headers)
obj_info = versioned_obj.info()
self.assertEqual('text/jibberish01', obj_info['content_type'])
expected_content_types.append('text/jibberish01')
# the allowed headers are configurable in object server, so we cannot
# assert that content-encoding or content-disposition get *copied* to
# the object version unless they were set on the original PUT, so
# populate expected_headers by making a HEAD on the original object
resp_headers = {
h.lower(): v for h, v in versioned_obj.conn.response.getheaders()}
expected_headers = {}
for k, v in put_headers.items():
if k.lower() in resp_headers:
expected_headers[k] = v
self.assertEqual(0, versions_container.info()['object_count'])
versioned_obj.write(b"bbbbb", hdrs={'Content-Type': 'text/jibberish02',
'X-Object-Meta-Foo': 'Bar'})
versioned_obj.initialize()
self.assertEqual(versioned_obj.content_type, 'text/jibberish02')
expected_content_types.append('text/jibberish02')
self.assertEqual(versioned_obj.metadata['foo'], 'Bar')
# the old version got saved off
self.assertEqual(1, versions_container.info()['object_count'])
versioned_obj_name = versions_container.files()[0]
prev_version = versions_container.file(versioned_obj_name)
prev_version.initialize()
self.assertEqual(b"aaaaa", prev_version.read())
self.assertEqual(prev_version.content_type, 'text/jibberish01')
resp_headers = {
h.lower(): v for h, v in prev_version.conn.response.getheaders()}
for k, v in expected_headers.items():
self.assertIn(k.lower(), resp_headers)
self.assertEqual(v, resp_headers[k.lower()])
# make sure the new obj metadata did not leak to the prev. version
self.assertNotIn('foo', prev_version.metadata)
# check that POST does not create a new version
versioned_obj.sync_metadata(metadata={'fu': 'baz'})
self.assertEqual(1, versions_container.info()['object_count'])
# if we overwrite it again, there are two versions
versioned_obj.write(b"ccccc")
self.assertEqual(2, versions_container.info()['object_count'])
expected_content_types.append('text/jibberish02')
versioned_obj_name = versions_container.files()[1]
prev_version = versions_container.file(versioned_obj_name)
prev_version.initialize()
self.assertEqual(b"bbbbb", prev_version.read())
self.assertEqual(prev_version.content_type, 'text/jibberish02')
self.assertNotIn('foo', prev_version.metadata)
self.assertIn('fu', prev_version.metadata)
# versioned_obj keeps the newest content
self.assertEqual(b"ccccc", versioned_obj.read())
# test copy from a different container
src_container = self.env.account.container(Utils.create_name())
self.assertTrue(src_container.create())
src_name = Utils.create_name()
src_obj = src_container.file(src_name)
src_obj.write(b"ddddd", hdrs={'Content-Type': 'text/jibberish04'})
src_obj.copy(container.name, obj_name)
self.assertEqual(b"ddddd", versioned_obj.read())
versioned_obj.initialize()
self.assertEqual(versioned_obj.content_type, 'text/jibberish04')
expected_content_types.append('text/jibberish04')
# make sure versions container has the previous version
self.assertEqual(3, versions_container.info()['object_count'])
versioned_obj_name = versions_container.files()[2]
prev_version = versions_container.file(versioned_obj_name)
prev_version.initialize()
self.assertEqual(b"ccccc", prev_version.read())
# for further use in the mode-specific tests
return (versioned_obj, expected_headers, expected_content_types)
def test_overwriting(self):
versions_container = self.env.versions_container
versioned_obj, expected_headers, expected_content_types = \
self._test_overwriting_setup()
# pop one for the current version
expected_content_types.pop()
self.assertEqual(expected_content_types, [
o['content_type'] for o in versions_container.files(
parms={'format': 'json'})])
# test delete
versioned_obj.delete()
self.assertEqual(b"ccccc", versioned_obj.read())
expected_content_types.pop()
self.assertEqual(expected_content_types, [
o['content_type'] for o in versions_container.files(
parms={'format': 'json'})])
versioned_obj.delete()
self.assertEqual(b"bbbbb", versioned_obj.read())
expected_content_types.pop()
self.assertEqual(expected_content_types, [
o['content_type'] for o in versions_container.files(
parms={'format': 'json'})])
versioned_obj.delete()
self.assertEqual(b"aaaaa", versioned_obj.read())
self.assertEqual(0, versions_container.info()['object_count'])
# verify that all the original object headers have been copied back
obj_info = versioned_obj.info()
self.assertEqual('text/jibberish01', obj_info['content_type'])
resp_headers = {
h.lower(): v for h, v in versioned_obj.conn.response.getheaders()}
for k, v in expected_headers.items():
self.assertIn(k.lower(), resp_headers)
self.assertEqual(v, resp_headers[k.lower()])
versioned_obj.delete()
self.assertRaises(ResponseError, versioned_obj.read)
def test_overwriting_with_url_encoded_object_name(self):
versions_container = self.env.versions_container
obj_name = Utils.create_name() + '%25ff'
versioned_obj, expected_headers, expected_content_types = \
self._test_overwriting_setup(obj_name)
# pop one for the current version
expected_content_types.pop()
self.assertEqual(expected_content_types, [
o['content_type'] for o in versions_container.files(
parms={'format': 'json'})])
# test delete
versioned_obj.delete()
self.assertEqual(b"ccccc", versioned_obj.read())
expected_content_types.pop()
self.assertEqual(expected_content_types, [
o['content_type'] for o in versions_container.files(
parms={'format': 'json'})])
versioned_obj.delete()
self.assertEqual(b"bbbbb", versioned_obj.read())
expected_content_types.pop()
self.assertEqual(expected_content_types, [
o['content_type'] for o in versions_container.files(
parms={'format': 'json'})])
versioned_obj.delete()
self.assertEqual(b"aaaaa", versioned_obj.read())
self.assertEqual(0, versions_container.info()['object_count'])
# verify that all the original object headers have been copied back
obj_info = versioned_obj.info()
self.assertEqual('text/jibberish01', obj_info['content_type'])
resp_headers = {
h.lower(): v for h, v in versioned_obj.conn.response.getheaders()}
for k, v in expected_headers.items():
self.assertIn(k.lower(), resp_headers)
self.assertEqual(v, resp_headers[k.lower()])
versioned_obj.delete()
self.assertRaises(ResponseError, versioned_obj.read)
def assert_most_recent_version(self, obj_name, content,
should_be_dlo=False):
name_len = len(obj_name if six.PY2 else obj_name.encode('utf8'))
archive_versions = self.env.versions_container.files(parms={
'prefix': '%03x%s/' % (name_len, obj_name),
'reverse': 'yes'})
archive_file = self.env.versions_container.file(archive_versions[0])
self.assertEqual(content, archive_file.read())
resp_headers = {
h.lower(): v for h, v in archive_file.conn.response.getheaders()}
if should_be_dlo:
self.assertIn('x-object-manifest', resp_headers)
else:
self.assertNotIn('x-object-manifest', resp_headers)
def _test_versioning_dlo_setup(self):
if tf.in_process:
tf.skip_if_no_xattrs()
container = self.env.container
versions_container = self.env.versions_container
obj_name = Utils.create_name()
for i in ('1', '2', '3'):
time.sleep(.01) # guarantee that the timestamp changes
obj_name_seg = obj_name + '/' + i
versioned_obj = container.file(obj_name_seg)
versioned_obj.write(i.encode('ascii'))
# immediately overwrite
versioned_obj.write((i + i).encode('ascii'))
self.assertEqual(3, versions_container.info()['object_count'])
man_file = container.file(obj_name)
# write a normal file first
man_file.write(b'old content')
# guarantee that the timestamp changes
time.sleep(.01)
# overwrite with a dlo manifest
man_file.write(b'', hdrs={"X-Object-Manifest": "%s/%s/" %
(self.env.container.name, obj_name)})
self.assertEqual(4, versions_container.info()['object_count'])
self.assertEqual(b"112233", man_file.read())
self.assert_most_recent_version(obj_name, b'old content')
# overwrite the manifest with a normal file
man_file.write(b'new content')
self.assertEqual(5, versions_container.info()['object_count'])
# new most-recent archive is the dlo
self.assert_most_recent_version(
obj_name, b'112233', should_be_dlo=True)
return obj_name, man_file
def test_versioning_dlo(self):
obj_name, man_file = self._test_versioning_dlo_setup()
# verify that restore works properly
man_file.delete()
self.assertEqual(4, self.env.versions_container.info()['object_count'])
self.assertEqual(b"112233", man_file.read())
resp_headers = {
h.lower(): v for h, v in man_file.conn.response.getheaders()}
self.assertIn('x-object-manifest', resp_headers)
self.assert_most_recent_version(obj_name, b'old content')
man_file.delete()
self.assertEqual(3, self.env.versions_container.info()['object_count'])
self.assertEqual(b"old content", man_file.read())
def test_versioning_container_acl(self):
if tf.skip2:
raise SkipTest('Account2 not set')
# create versions container and DO NOT give write access to account2
versions_container = self.env.account.container(Utils.create_name())
location_header_val = quote(str(versions_container))
self.assertTrue(versions_container.create(hdrs={
'X-Container-Write': ''
}))
# check account2 cannot write to versions container
fail_obj_name = Utils.create_name()
fail_obj = versions_container.file(fail_obj_name)
self.assertRaises(ResponseError, fail_obj.write, b"should fail",
cfg={'use_token': self.env.storage_token2})
# create container and give write access to account2
# don't set X-Versions-Location just yet
container = self.env.account.container(Utils.create_name())
self.assertTrue(container.create(hdrs={
'X-Container-Write': self.env.conn2.user_acl}))
# check account2 cannot set X-Versions-Location on container
self.assertRaises(ResponseError, container.update_metadata, hdrs={
self.env.location_header_key: location_header_val},
cfg={'use_token': self.env.storage_token2})
# good! now let admin set the X-Versions-Location
# p.s.: sticking a 'x-remove' header here to test precedence
# of both headers. Setting the location should succeed.
self.assertTrue(container.update_metadata(hdrs={
'X-Remove-' + self.env.location_header_key[len('X-'):]:
location_header_val,
self.env.location_header_key: location_header_val}))
# write object twice to container and check version
obj_name = Utils.create_name()
versioned_obj = container.file(obj_name)
self.assertTrue(versioned_obj.write(b"never argue with the data",
cfg={'use_token': self.env.storage_token2}))
self.assertEqual(versioned_obj.read(), b"never argue with the data")
self.assertTrue(
versioned_obj.write(b"we don't have no beer, just tequila",
cfg={'use_token': self.env.storage_token2}))
self.assertEqual(versioned_obj.read(),
b"we don't have no beer, just tequila")
self.assertEqual(1, versions_container.info()['object_count'])
# read the original uploaded object
for filename in versions_container.files():
backup_file = versions_container.file(filename)
break
self.assertEqual(backup_file.read(), b"never argue with the data")
if not tf.skip3:
# user3 (some random user with no access to any of account1)
# tries to read from versioned container
self.assertRaises(ResponseError, backup_file.read,
cfg={'use_token': self.env.storage_token3})
# create an object user3 can try to copy
a2_container = self.env.account2.container(Utils.create_name())
a2_container.create(
hdrs={'X-Container-Read': self.env.conn3.user_acl},
cfg={'use_token': self.env.storage_token2})
a2_obj = a2_container.file(Utils.create_name())
self.assertTrue(a2_obj.write(b"unused",
cfg={'use_token': self.env.storage_token2}))
# user3 also cannot write, delete, or copy to/from source container
number_of_versions = versions_container.info()['object_count']
self.assertRaises(ResponseError, versioned_obj.write,
b"some random user trying to write data",
cfg={'use_token': self.env.storage_token3})
self.assertEqual(number_of_versions,
versions_container.info()['object_count'])
self.assertRaises(ResponseError, versioned_obj.delete,
cfg={'use_token': self.env.storage_token3})
self.assertEqual(number_of_versions,
versions_container.info()['object_count'])
self.assertRaises(
ResponseError, versioned_obj.write,
hdrs={
'X-Copy-From': '%s/%s' % (a2_container.name, a2_obj.name),
'X-Copy-From-Account': self.env.conn2.account_name},
cfg={'use_token': self.env.storage_token3})
self.assertEqual(number_of_versions,
versions_container.info()['object_count'])
self.assertRaises(
ResponseError, a2_obj.copy_account,
self.env.conn.account_name, container.name, obj_name,
cfg={'use_token': self.env.storage_token3})
self.assertEqual(number_of_versions,
versions_container.info()['object_count'])
# user2 can't read or delete from versions-location
self.assertRaises(ResponseError, backup_file.read,
cfg={'use_token': self.env.storage_token2})
self.assertRaises(ResponseError, backup_file.delete,
cfg={'use_token': self.env.storage_token2})
# but is able to delete from the source container
# this could be a helpful scenario for dev ops that want to setup
# just one container to hold object versions of multiple containers
# and each one of those containers are owned by different users
self.assertTrue(versioned_obj.delete(
cfg={'use_token': self.env.storage_token2}))
# tear-down since we create these containers here
# and not in self.env
if not tf.skip3:
a2_container.delete_recursive()
versions_container.delete_recursive()
container.delete_recursive()
def _test_versioning_check_acl_setup(self):
container = self.env.container
versions_container = self.env.versions_container
versions_container.create(hdrs={'X-Container-Read': '.r:*,.rlistings'})
obj_name = Utils.create_name()
versioned_obj = container.file(obj_name)
versioned_obj.write(b"aaaaa")
self.assertEqual(b"aaaaa", versioned_obj.read())
versioned_obj.write(b"bbbbb")
self.assertEqual(b"bbbbb", versioned_obj.read())
# Use token from second account and try to delete the object
org_token = self.env.account.conn.storage_token
self.env.account.conn.storage_token = self.env.conn2.storage_token
try:
with self.assertRaises(ResponseError) as cm:
versioned_obj.delete()
self.assertEqual(403, cm.exception.status)
finally:
self.env.account.conn.storage_token = org_token
# Verify with token from first account
self.assertEqual(b"bbbbb", versioned_obj.read())
return versioned_obj
def test_versioning_check_acl(self):
if tf.skip2:
raise SkipTest('Account2 not set')
versioned_obj = self._test_versioning_check_acl_setup()
versioned_obj.delete()
self.assertEqual(b"aaaaa", versioned_obj.read())
def _check_overwriting_symlink(self):
# assertions common to x-versions-location and x-history-location modes
container = self.env.container
versions_container = self.env.versions_container
tgt_a_name = Utils.create_name()
tgt_b_name = Utils.create_name()
tgt_a = container.file(tgt_a_name)
tgt_a.write(b"aaaaa")
tgt_b = container.file(tgt_b_name)
tgt_b.write(b"bbbbb")
symlink_name = Utils.create_name()
sym_tgt_header = quote(unquote('%s/%s' % (container.name, tgt_a_name)))
sym_headers_a = {'X-Symlink-Target': sym_tgt_header}
symlink = container.file(symlink_name)
symlink.write(b"", hdrs=sym_headers_a)
self.assertEqual(b"aaaaa", symlink.read())
sym_headers_b = {'X-Symlink-Target': '%s/%s' % (container.name,
tgt_b_name)}
symlink.write(b"", hdrs=sym_headers_b)
self.assertEqual(b"bbbbb", symlink.read())
# the old version got saved off
self.assertEqual(1, versions_container.info()['object_count'])
versioned_obj_name = versions_container.files()[0]
prev_version = versions_container.file(versioned_obj_name)
prev_version_info = prev_version.info(parms={'symlink': 'get'})
self.assertEqual(b"aaaaa", prev_version.read())
symlink_etag = prev_version_info['etag']
if symlink_etag.startswith('"') and symlink_etag.endswith('"') and \
symlink_etag[1:-1]:
symlink_etag = symlink_etag[1:-1]
self.assertEqual(MD5_OF_EMPTY_STRING, symlink_etag)
self.assertEqual(sym_tgt_header,
prev_version_info['x_symlink_target'])
return symlink, tgt_a
def test_overwriting_symlink(self):
if 'symlink' not in cluster_info:
raise SkipTest("Symlinks not enabled")
symlink, target = self._check_overwriting_symlink()
# test delete
symlink.delete()
sym_info = symlink.info(parms={'symlink': 'get'})
self.assertEqual(b"aaaaa", symlink.read())
if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'):
self.assertEqual('"%s"' % MD5_OF_EMPTY_STRING, sym_info['etag'])
else:
self.assertEqual(MD5_OF_EMPTY_STRING, sym_info['etag'])
self.assertEqual(
quote(unquote('%s/%s' % (self.env.container.name, target.name))),
sym_info['x_symlink_target'])
def _setup_symlink(self):
target = self.env.container.file('target-object')
target.write(b'target object data')
symlink = self.env.container.file('symlink')
symlink.write(b'', hdrs={
'Content-Type': 'application/symlink',
'X-Symlink-Target': '%s/%s' % (
self.env.container.name, target.name)})
return symlink, target
def _assert_symlink(self, symlink, target):
self.assertEqual(b'target object data', symlink.read())
self.assertEqual(target.info(), symlink.info())
self.assertEqual('application/symlink',
symlink.info(parms={
'symlink': 'get'})['content_type'])
def _check_copy_destination_restore_symlink(self):
# assertions common to x-versions-location and x-history-location modes
symlink, target = self._setup_symlink()
symlink.write(b'this is not a symlink')
# the symlink is versioned
version_container_files = self.env.versions_container.files(
parms={'format': 'json'})
self.assertEqual(1, len(version_container_files))
versioned_obj_info = version_container_files[0]
self.assertEqual('application/symlink',
versioned_obj_info['content_type'])
versioned_obj = self.env.versions_container.file(
versioned_obj_info['name'])
# the symlink is still a symlink
self._assert_symlink(versioned_obj, target)
# test manual restore (this creates a new backup of the overwrite)
versioned_obj.copy(self.env.container.name, symlink.name,
parms={'symlink': 'get'})
self._assert_symlink(symlink, target)
# symlink overwritten by write then copy -> 2 versions
self.assertEqual(2, self.env.versions_container.info()['object_count'])
return symlink, target
def test_copy_destination_restore_symlink(self):
if 'symlink' not in cluster_info:
raise SkipTest("Symlinks not enabled")
symlink, target = self._check_copy_destination_restore_symlink()
# and versioned writes restore
symlink.delete()
self.assertEqual(1, self.env.versions_container.info()['object_count'])
self.assertEqual(b'this is not a symlink', symlink.read())
symlink.delete()
self.assertEqual(0, self.env.versions_container.info()['object_count'])
self._assert_symlink(symlink, target)
def test_put_x_copy_from_restore_symlink(self):
if 'symlink' not in cluster_info:
raise SkipTest("Symlinks not enabled")
symlink, target = self._setup_symlink()
symlink.write(b'this is not a symlink')
version_container_files = self.env.versions_container.files()
self.assertEqual(1, len(version_container_files))
versioned_obj = self.env.versions_container.file(
version_container_files[0])
symlink.write(parms={'symlink': 'get'}, cfg={
'no_content_type': True}, hdrs={
'X-Copy-From': '%s/%s' % (
self.env.versions_container, versioned_obj.name)})
self._assert_symlink(symlink, target)
class TestObjectVersioningUTF8(Base2, TestObjectVersioning):
def tearDown(self):
self._tear_down_files()
super(TestObjectVersioningUTF8, self).tearDown()
class TestCrossPolicyObjectVersioning(TestObjectVersioning):
env = TestCrossPolicyObjectVersioningEnv
def setUp(self):
super(TestCrossPolicyObjectVersioning, self).setUp()
if self.env.multiple_policies_enabled is False:
raise SkipTest('Cross policy test requires multiple policies')
elif self.env.multiple_policies_enabled is not True:
# just some sanity checking
raise Exception("Expected multiple_policies_enabled "
"to be True/False, got %r" % (
self.env.versioning_enabled,))
class TestObjectVersioningHistoryMode(TestObjectVersioning):
env = TestObjectVersioningHistoryModeEnv
# those override tests includes assertions for delete versioned objects
# behaviors different from default object versioning using
# x-versions-location.
def test_overwriting(self):
versions_container = self.env.versions_container
versioned_obj, expected_headers, expected_content_types = \
self._test_overwriting_setup()
# test delete
# at first, delete will succeed with 204
versioned_obj.delete()
expected_content_types.append(
'application/x-deleted;swift_versions_deleted=1')
# after that, any time the delete doesn't restore the old version
# and we will get 404 NotFound
for x in range(3):
with self.assertRaises(ResponseError) as cm:
versioned_obj.delete()
self.assertEqual(404, cm.exception.status)
expected_content_types.append(
'application/x-deleted;swift_versions_deleted=1')
# finally, we have 4 versioned items and 4 delete markers total in
# the versions container
self.assertEqual(8, versions_container.info()['object_count'])
self.assertEqual(expected_content_types, [
o['content_type'] for o in versions_container.files(
parms={'format': 'json'})])
# update versioned_obj
versioned_obj.write(b"eeee", hdrs={'Content-Type': 'text/thanksgiving',
'X-Object-Meta-Bar': 'foo'})
# verify the PUT object is kept successfully
obj_info = versioned_obj.info()
self.assertEqual('text/thanksgiving', obj_info['content_type'])
# we still have delete-marker there
self.assertEqual(8, versions_container.info()['object_count'])
# update versioned_obj
versioned_obj.write(b"ffff", hdrs={'Content-Type': 'text/teriyaki',
'X-Object-Meta-Food': 'chickin'})
# verify the PUT object is kept successfully
obj_info = versioned_obj.info()
self.assertEqual('text/teriyaki', obj_info['content_type'])
# new obj will be inserted after delete-marker there
self.assertEqual(9, versions_container.info()['object_count'])
versioned_obj.delete()
with self.assertRaises(ResponseError) as cm:
versioned_obj.read()
self.assertEqual(404, cm.exception.status)
self.assertEqual(11, versions_container.info()['object_count'])
def test_overwriting_with_url_encoded_object_name(self):
versions_container = self.env.versions_container
obj_name = Utils.create_name() + '%25ff'
versioned_obj, expected_headers, expected_content_types = \
self._test_overwriting_setup(obj_name)
# test delete
# at first, delete will succeed with 204
versioned_obj.delete()
expected_content_types.append(
'application/x-deleted;swift_versions_deleted=1')
# after that, any time the delete doesn't restore the old version
# and we will get 404 NotFound
for x in range(3):
with self.assertRaises(ResponseError) as cm:
versioned_obj.delete()
self.assertEqual(404, cm.exception.status)
expected_content_types.append(
'application/x-deleted;swift_versions_deleted=1')
# finally, we have 4 versioned items and 4 delete markers total in
# the versions container
self.assertEqual(8, versions_container.info()['object_count'])
self.assertEqual(expected_content_types, [
o['content_type'] for o in versions_container.files(
parms={'format': 'json'})])
# update versioned_obj
versioned_obj.write(b"eeee", hdrs={'Content-Type': 'text/thanksgiving',
'X-Object-Meta-Bar': 'foo'})
# verify the PUT object is kept successfully
obj_info = versioned_obj.info()
self.assertEqual('text/thanksgiving', obj_info['content_type'])
# we still have delete-marker there
self.assertEqual(8, versions_container.info()['object_count'])
# update versioned_obj
versioned_obj.write(b"ffff", hdrs={'Content-Type': 'text/teriyaki',
'X-Object-Meta-Food': 'chickin'})
# verify the PUT object is kept successfully
obj_info = versioned_obj.info()
self.assertEqual('text/teriyaki', obj_info['content_type'])
# new obj will be inserted after delete-marker there
self.assertEqual(9, versions_container.info()['object_count'])
versioned_obj.delete()
with self.assertRaises(ResponseError) as cm:
versioned_obj.read()
self.assertEqual(404, cm.exception.status)
def test_versioning_dlo(self):
obj_name, man_file = \
self._test_versioning_dlo_setup()
man_file.delete()
with self.assertRaises(ResponseError) as cm:
man_file.read()
self.assertEqual(404, cm.exception.status)
self.assertEqual(7, self.env.versions_container.info()['object_count'])
expected = [b'old content', b'112233', b'new content', b'']
name_len = len(obj_name if six.PY2 else obj_name.encode('utf8'))
bodies = [
self.env.versions_container.file(f).read()
for f in self.env.versions_container.files(parms={
'prefix': '%03x%s/' % (name_len, obj_name)})]
self.assertEqual(expected, bodies)
def test_versioning_check_acl(self):
if tf.skip2:
raise SkipTest('Account2 not set')
versioned_obj = self._test_versioning_check_acl_setup()
versioned_obj.delete()
with self.assertRaises(ResponseError) as cm:
versioned_obj.read()
self.assertEqual(404, cm.exception.status)
# we have 3 objects in the versions_container, 'aaaaa', 'bbbbb'
# and delete-marker with empty content
self.assertEqual(3, self.env.versions_container.info()['object_count'])
files = self.env.versions_container.files()
for actual, expected in zip(files, [b'aaaaa', b'bbbbb', b'']):
prev_version = self.env.versions_container.file(actual)
self.assertEqual(expected, prev_version.read())
def test_overwriting_symlink(self):
if 'symlink' not in cluster_info:
raise SkipTest("Symlinks not enabled")
symlink, target = self._check_overwriting_symlink()
# test delete
symlink.delete()
with self.assertRaises(ResponseError) as cm:
symlink.read()
self.assertEqual(404, cm.exception.status)
def test_copy_destination_restore_symlink(self):
if 'symlink' not in cluster_info:
raise SkipTest("Symlinks not enabled")
symlink, target = self._check_copy_destination_restore_symlink()
symlink.delete()
with self.assertRaises(ResponseError) as cm:
symlink.read()
self.assertEqual(404, cm.exception.status)
# 2 versions plus delete marker and deleted version
self.assertEqual(4, self.env.versions_container.info()['object_count'])
class TestObjectVersioningHistoryModeUTF8(
Base2, TestObjectVersioningHistoryMode):
pass
class TestSloWithVersioning(unittest.TestCase):
def setUp(self):
if 'slo' not in cluster_info:
raise SkipTest("SLO not enabled")
if tf.in_process:
tf.skip_if_no_xattrs()
self.conn = Connection(tf.config)
self.conn.authenticate()
self.account = Account(
self.conn, tf.config.get('account', tf.config['username']))
self.account.delete_containers()
# create a container with versioning
self.versions_container = self.account.container(Utils.create_name())
self.container = self.account.container(Utils.create_name())
self.segments_container = self.account.container(Utils.create_name())
if not self.container.create(
hdrs={'X-Versions-Location': self.versions_container.name}):
if self.conn.response.status == 412:
raise SkipTest("Object versioning not enabled")
else:
raise ResponseError(self.conn.response)
if 'versions' not in self.container.info():
raise SkipTest("Object versioning not enabled")
for cont in (self.versions_container, self.segments_container):
if not cont.create():
raise ResponseError(self.conn.response)
# create some segments
self.seg_info = {}
for letter, size in (('a', 1024 * 1024),
('b', 1024 * 1024)):
seg_name = letter
file_item = self.segments_container.file(seg_name)
file_item.write((letter * size).encode('ascii'))
self.seg_info[seg_name] = {
'size_bytes': size,
'etag': file_item.md5,
'path': '/%s/%s' % (self.segments_container.name, seg_name)}
def _create_manifest(self, seg_name):
# create a manifest in the versioning container
file_item = self.container.file("my-slo-manifest")
file_item.write(
json.dumps([self.seg_info[seg_name]]).encode('ascii'),
parms={'multipart-manifest': 'put'})
return file_item
def _assert_is_manifest(self, file_item, seg_name):
manifest_body = file_item.read(parms={'multipart-manifest': 'get'})
resp_headers = {
h.lower(): v for h, v in file_item.conn.response.getheaders()}
self.assertIn('x-static-large-object', resp_headers)
self.assertEqual('application/json; charset=utf-8',
file_item.content_type)
try:
manifest = json.loads(manifest_body)
except ValueError:
self.fail("GET with multipart-manifest=get got invalid json")
self.assertEqual(1, len(manifest))
key_map = {'etag': 'hash', 'size_bytes': 'bytes'}
for k_client, k_slo in key_map.items():
self.assertEqual(self.seg_info[seg_name][k_client],
manifest[0][k_slo])
if six.PY2:
self.assertEqual(self.seg_info[seg_name]['path'].decode('utf8'),
manifest[0]['name'])
else:
self.assertEqual(self.seg_info[seg_name]['path'],
manifest[0]['name'])
def _assert_is_object(self, file_item, seg_data):
file_contents = file_item.read()
self.assertEqual(1024 * 1024, len(file_contents))
self.assertEqual(seg_data, file_contents[:1])
self.assertEqual(seg_data, file_contents[-1:])
def tearDown(self):
# remove versioning to allow simple container delete
self.container.update_metadata(hdrs={'X-Versions-Location': ''})
self.account.delete_containers()
def test_slo_manifest_version(self):
file_item = self._create_manifest('a')
# sanity check: read the manifest, then the large object
self._assert_is_manifest(file_item, 'a')
self._assert_is_object(file_item, b'a')
# upload new manifest
file_item = self._create_manifest('b')
# sanity check: read the manifest, then the large object
self._assert_is_manifest(file_item, 'b')
self._assert_is_object(file_item, b'b')
versions_list = self.versions_container.files()
self.assertEqual(1, len(versions_list))
version_file = self.versions_container.file(versions_list[0])
# check the version is still a manifest
self._assert_is_manifest(version_file, 'a')
self._assert_is_object(version_file, b'a')
# delete the newest manifest
file_item.delete()
# expect the original manifest file to be restored
self._assert_is_manifest(file_item, 'a')
self._assert_is_object(file_item, b'a')
def test_slo_manifest_version_size(self):
file_item = self._create_manifest('a')
# sanity check: read the manifest, then the large object
self._assert_is_manifest(file_item, 'a')
self._assert_is_object(file_item, b'a')
# original manifest size
primary_list = self.container.files(parms={'format': 'json'})
self.assertEqual(1, len(primary_list))
org_size = primary_list[0]['bytes']
# upload new manifest
file_item = self._create_manifest('b')
# sanity check: read the manifest, then the large object
self._assert_is_manifest(file_item, 'b')
self._assert_is_object(file_item, b'b')
versions_list = self.versions_container.files(parms={'format': 'json'})
self.assertEqual(1, len(versions_list))
version_file = self.versions_container.file(versions_list[0]['name'])
version_file_size = versions_list[0]['bytes']
# check the version is still a manifest
self._assert_is_manifest(version_file, 'a')
self._assert_is_object(version_file, b'a')
# check the version size is correct
self.assertEqual(version_file_size, org_size)
# delete the newest manifest
file_item.delete()
# expect the original manifest file to be restored
self._assert_is_manifest(file_item, 'a')
self._assert_is_object(file_item, b'a')
primary_list = self.container.files(parms={'format': 'json'})
self.assertEqual(1, len(primary_list))
primary_file_size = primary_list[0]['bytes']
# expect the original manifest file size to be the same
self.assertEqual(primary_file_size, org_size)
class TestSloWithVersioningUTF8(Base2, TestSloWithVersioning):
pass
class TestObjectVersioningChangingMode(Base):
env = TestObjectVersioningHistoryModeEnv
def setUp(self):
super(TestObjectVersioningChangingMode, self).setUp()
if 'versioned_writes' not in cluster_info:
raise SkipTest("versioned_writes not enabled")
def test_delete_while_changing_mode(self):
container = self.env.container
versions_container = self.env.versions_container
cont_info = container.info()
self.assertEqual(cont_info['versions'], quote(versions_container.name))
obj_name = Utils.create_name()
versioned_obj = container.file(obj_name)
versioned_obj.write(
b"version1", hdrs={'Content-Type': 'text/jibberish01'})
versioned_obj.write(
b"version2", hdrs={'Content-Type': 'text/jibberish01'})
# sanity, version1 object should have moved to versions_container
self.assertEqual(1, versions_container.info()['object_count'])
versioned_obj.delete()
# version2 and the delete marker should have put in versions_container
self.assertEqual(3, versions_container.info()['object_count'])
delete_marker_name = versions_container.files()[2]
delete_marker = versions_container.file(delete_marker_name)
delete_marker.initialize()
self.assertEqual(
delete_marker.content_type,
'application/x-deleted;swift_versions_deleted=1')
# change to stack mode
hdrs = {'X-Versions-Location': versions_container.name}
container.update_metadata(hdrs=hdrs)
versioned_obj.delete()
# version2 object should have been moved in container
self.assertEqual(b"version2", versioned_obj.read())
# and there's only one version1 is left in versions_container
self.assertEqual(1, versions_container.info()['object_count'])
versioned_obj_name = versions_container.files()[0]
prev_version = versions_container.file(versioned_obj_name)
prev_version.initialize()
self.assertEqual(b"version1", prev_version.read())
self.assertEqual(prev_version.content_type, 'text/jibberish01')
# reset and test double delete
# change back to history mode
hdrs = {'X-History-Location': versions_container.name}
container.update_metadata(hdrs=hdrs)
# double delete, second DELETE returns a 404 as expected
versioned_obj.delete()
with self.assertRaises(ResponseError) as cm:
versioned_obj.delete()
self.assertEqual(404, cm.exception.status)
# There should now be 4 objects total in versions_container
# 2 are delete markers
self.assertEqual(4, versions_container.info()['object_count'])
# change to stack mode
hdrs = {'X-Versions-Location': versions_container.name}
container.update_metadata(hdrs=hdrs)
# a delete, just deletes one delete marker, it doesn't yet pop
# version2 back in the container
# This DELETE doesn't return a 404!
versioned_obj.delete()
self.assertEqual(3, versions_container.info()['object_count'])
self.assertEqual(0, container.info()['object_count'])
# neither does this one!
versioned_obj.delete()
# version2 object should have been moved in container
self.assertEqual(b"version2", versioned_obj.read())
# and there's only one version1 is left in versions_container
self.assertEqual(1, versions_container.info()['object_count'])
class TestObjectVersioningChangingModeUTF8(
Base2, TestObjectVersioningChangingMode):
pass
| swift-master | test/functional/test_versioned_writes.py |
#!/usr/bin/python -u
# Copyright (c) 2010-2016 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import email.parser
import itertools
import json
from copy import deepcopy
from unittest import SkipTest
import six
from six.moves import urllib
from swift.common.swob import normalize_etag
from swift.common.utils import md5
import test.functional as tf
from test.functional import cluster_info
from test.functional.tests import Utils, Base, Base2, BaseEnv
from test.functional.swift_test_client import Connection, ResponseError
def setUpModule():
tf.setup_package()
def tearDownModule():
tf.teardown_package()
def group_file_contents(file_contents):
# This looks a little funny, but iterating through a byte string on py3
# yields a sequence of ints, not a sequence of single-byte byte strings
# as it did on py2.
byte_iter = (file_contents[i:i + 1] for i in range(len(file_contents)))
return [
(char, sum(1 for _ in grp))
for char, grp in itertools.groupby(byte_iter)]
class TestSloEnv(BaseEnv):
slo_enabled = None # tri-state: None initially, then True/False
@classmethod
def create_segments(cls, container):
seg_info = {}
for letter, size in (('a', 1024 * 1024),
('b', 1024 * 1024),
('c', 1024 * 1024),
('d', 1024 * 1024),
('e', 1)):
seg_name = "seg_%s" % letter
file_item = container.file(seg_name)
file_item.write(letter.encode('ascii') * size)
seg_info[seg_name] = {
'size_bytes': size,
'etag': file_item.md5,
'path': '/%s/%s' % (container.name, seg_name)}
return seg_info
@classmethod
def setUp(cls):
if cls.slo_enabled is None:
cls.slo_enabled = 'slo' in cluster_info
if not cls.slo_enabled:
return
super(TestSloEnv, cls).setUp()
if not tf.skip2:
config2 = deepcopy(tf.config)
config2['account'] = tf.config['account2']
config2['username'] = tf.config['username2']
config2['password'] = tf.config['password2']
cls.conn2 = Connection(config2)
cls.conn2.authenticate()
cls.account2 = cls.conn2.get_account()
cls.account2.delete_containers()
if not tf.skip3:
config3 = tf.config.copy()
config3['username'] = tf.config['username3']
config3['password'] = tf.config['password3']
cls.conn3 = Connection(config3)
cls.conn3.authenticate()
cls.container = cls.account.container(Utils.create_name())
cls.container2 = cls.account.container(Utils.create_name())
for cont in (cls.container, cls.container2):
if not cont.create():
raise ResponseError(cls.conn.response)
cls.seg_info = seg_info = cls.create_segments(cls.container)
file_item = cls.container.file("manifest-abcde")
file_item.write(
json.dumps([seg_info['seg_a'], seg_info['seg_b'],
seg_info['seg_c'], seg_info['seg_d'],
seg_info['seg_e']]).encode('ascii'),
parms={'multipart-manifest': 'put'})
cls.container.file('seg_with_%ff_funky_name').write(b'z' * 10)
# Put the same manifest in the container2
file_item = cls.container2.file("manifest-abcde")
file_item.write(
json.dumps([seg_info['seg_a'], seg_info['seg_b'],
seg_info['seg_c'], seg_info['seg_d'],
seg_info['seg_e']]).encode('ascii'),
parms={'multipart-manifest': 'put'})
file_item = cls.container.file('manifest-cd')
cd_json = json.dumps([
seg_info['seg_c'], seg_info['seg_d']]).encode('ascii')
file_item.write(cd_json, parms={'multipart-manifest': 'put'})
cd_etag = md5((
seg_info['seg_c']['etag'] + seg_info['seg_d']['etag']
).encode('ascii'), usedforsecurity=False).hexdigest()
file_item = cls.container.file("manifest-bcd-submanifest")
file_item.write(
json.dumps([seg_info['seg_b'],
{'etag': cd_etag,
'size_bytes': (seg_info['seg_c']['size_bytes'] +
seg_info['seg_d']['size_bytes']),
'path': '/%s/%s' % (cls.container.name,
'manifest-cd')}]).encode('ascii'),
parms={'multipart-manifest': 'put'})
bcd_submanifest_etag = md5((
seg_info['seg_b']['etag'] + cd_etag).encode('ascii'),
usedforsecurity=False).hexdigest()
file_item = cls.container.file("manifest-abcde-submanifest")
file_item.write(
json.dumps([
seg_info['seg_a'],
{'etag': bcd_submanifest_etag,
'size_bytes': (seg_info['seg_b']['size_bytes'] +
seg_info['seg_c']['size_bytes'] +
seg_info['seg_d']['size_bytes']),
'path': '/%s/%s' % (cls.container.name,
'manifest-bcd-submanifest')},
seg_info['seg_e']]).encode('ascii'),
parms={'multipart-manifest': 'put'})
abcde_submanifest_etag = md5((
seg_info['seg_a']['etag'] + bcd_submanifest_etag +
seg_info['seg_e']['etag']).encode('ascii'),
usedforsecurity=False).hexdigest()
abcde_submanifest_size = (seg_info['seg_a']['size_bytes'] +
seg_info['seg_b']['size_bytes'] +
seg_info['seg_c']['size_bytes'] +
seg_info['seg_d']['size_bytes'] +
seg_info['seg_e']['size_bytes'])
file_item = cls.container.file("ranged-manifest")
file_item.write(
json.dumps([
{'etag': abcde_submanifest_etag,
'size_bytes': abcde_submanifest_size,
'path': '/%s/%s' % (cls.container.name,
'manifest-abcde-submanifest'),
'range': '-1048578'}, # 'c' + ('d' * 2**20) + 'e'
{'etag': abcde_submanifest_etag,
'size_bytes': abcde_submanifest_size,
'path': '/%s/%s' % (cls.container.name,
'manifest-abcde-submanifest'),
'range': '524288-1572863'}, # 'a' * 2**19 + 'b' * 2**19
{'etag': abcde_submanifest_etag,
'size_bytes': abcde_submanifest_size,
'path': '/%s/%s' % (cls.container.name,
'manifest-abcde-submanifest'),
'range': '3145727-3145728'}]).encode('ascii'), # 'cd'
parms={'multipart-manifest': 'put'})
ranged_manifest_etag = md5((
abcde_submanifest_etag + ':3145727-4194304;' +
abcde_submanifest_etag + ':524288-1572863;' +
abcde_submanifest_etag + ':3145727-3145728;'
).encode('ascii'), usedforsecurity=False).hexdigest()
ranged_manifest_size = 2 * 1024 * 1024 + 4
file_item = cls.container.file("ranged-submanifest")
file_item.write(
json.dumps([
seg_info['seg_c'],
{'etag': ranged_manifest_etag,
'size_bytes': ranged_manifest_size,
'path': '/%s/%s' % (cls.container.name,
'ranged-manifest')},
{'etag': ranged_manifest_etag,
'size_bytes': ranged_manifest_size,
'path': '/%s/%s' % (cls.container.name,
'ranged-manifest'),
'range': '524289-1572865'},
{'etag': ranged_manifest_etag,
'size_bytes': ranged_manifest_size,
'path': '/%s/%s' % (cls.container.name,
'ranged-manifest'),
'range': '-3'}]).encode('ascii'),
parms={'multipart-manifest': 'put'})
file_item = cls.container.file("manifest-db")
file_item.write(
json.dumps([
{'path': seg_info['seg_d']['path'], 'etag': None,
'size_bytes': None},
{'path': seg_info['seg_b']['path'], 'etag': None,
'size_bytes': None},
]).encode('ascii'), parms={'multipart-manifest': 'put'})
file_item = cls.container.file("ranged-manifest-repeated-segment")
file_item.write(
json.dumps([
{'path': seg_info['seg_a']['path'], 'etag': None,
'size_bytes': None, 'range': '-1048578'},
{'path': seg_info['seg_a']['path'], 'etag': None,
'size_bytes': None},
{'path': seg_info['seg_b']['path'], 'etag': None,
'size_bytes': None, 'range': '-1048578'},
]).encode('ascii'), parms={'multipart-manifest': 'put'})
file_item = cls.container.file("mixed-object-data-manifest")
file_item.write(
json.dumps([
{'data': base64.b64encode(b'APRE' * 8).decode('ascii')},
{'path': seg_info['seg_a']['path']},
{'data': base64.b64encode(b'APOS' * 16).decode('ascii')},
{'path': seg_info['seg_b']['path']},
{'data': base64.b64encode(b'BPOS' * 32).decode('ascii')},
{'data': base64.b64encode(b'CPRE' * 64).decode('ascii')},
{'path': seg_info['seg_c']['path']},
{'data': base64.b64encode(b'CPOS' * 8).decode('ascii')},
]).encode('ascii'), parms={'multipart-manifest': 'put'}
)
file_item = cls.container.file("nested-data-manifest")
file_item.write(
json.dumps([
{'path': '%s/%s' % (cls.container.name,
"mixed-object-data-manifest")}
]).encode('ascii'), parms={'multipart-manifest': 'put'}
)
class TestSlo(Base):
env = TestSloEnv
def setUp(self):
super(TestSlo, self).setUp()
if self.env.slo_enabled is False:
raise SkipTest("SLO not enabled")
elif self.env.slo_enabled is not True:
# just some sanity checking
raise Exception(
"Expected slo_enabled to be True/False, got %r" %
(self.env.slo_enabled,))
manifest_abcde_hash = md5(usedforsecurity=False)
for letter in (b'a', b'b', b'c', b'd'):
manifest_abcde_hash.update(
md5(letter * 1024 * 1024, usedforsecurity=False)
.hexdigest().encode('ascii'))
manifest_abcde_hash.update(
md5(b'e', usedforsecurity=False).hexdigest().encode('ascii'))
self.manifest_abcde_etag = manifest_abcde_hash.hexdigest()
def test_slo_get_simple_manifest(self):
file_item = self.env.container.file('manifest-abcde')
file_contents = file_item.read()
self.assertEqual(file_item.conn.response.status, 200)
headers = dict(
(h.lower(), v)
for h, v in file_item.conn.response.getheaders())
self.assertIn('etag', headers)
self.assertEqual(headers['etag'], '"%s"' % self.manifest_abcde_etag)
self.assertEqual([
(b'a', 1024 * 1024),
(b'b', 1024 * 1024),
(b'c', 1024 * 1024),
(b'd', 1024 * 1024),
(b'e', 1),
], group_file_contents(file_contents))
def test_slo_container_listing(self):
# the listing object size should equal the sum of the size of the
# segments, not the size of the manifest body
file_item = self.env.container.file(Utils.create_name())
file_item.write(
json.dumps([self.env.seg_info['seg_a']]).encode('ascii'),
parms={'multipart-manifest': 'put'})
# The container listing exposes BOTH the MD5 of the manifest content
# and the SLO MD5-of-MD5s by splitting the latter out into a separate
# key. These should remain consistent when the object is updated with
# a POST.
file_item.initialize(parms={'multipart-manifest': 'get'})
manifest_etag = file_item.etag
if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'):
self.assertTrue(manifest_etag.startswith('"'))
self.assertTrue(manifest_etag.endswith('"'))
# ...but in the listing, it'll be stripped
manifest_etag = manifest_etag[1:-1]
else:
self.assertFalse(manifest_etag.startswith('"'))
self.assertFalse(manifest_etag.endswith('"'))
file_item.initialize()
slo_etag = file_item.etag
self.assertTrue(slo_etag.startswith('"'))
self.assertTrue(slo_etag.endswith('"'))
listing = self.env.container.files(parms={'format': 'json'})
for f_dict in listing:
if f_dict['name'] == file_item.name:
self.assertEqual(1024 * 1024, f_dict['bytes'])
self.assertEqual('application/octet-stream',
f_dict['content_type'])
self.assertEqual(manifest_etag, f_dict['hash'])
self.assertEqual(slo_etag, f_dict['slo_etag'])
break
else:
self.fail('Failed to find manifest file in container listing')
# now POST updated content-type file
file_item.content_type = 'image/jpeg'
file_item.sync_metadata({'X-Object-Meta-Test': 'blah'})
file_item.initialize()
self.assertEqual('image/jpeg', file_item.content_type) # sanity
# verify that the container listing is consistent with the file
listing = self.env.container.files(parms={'format': 'json'})
for f_dict in listing:
if f_dict['name'] == file_item.name:
self.assertEqual(1024 * 1024, f_dict['bytes'])
self.assertEqual(file_item.content_type,
f_dict['content_type'])
self.assertEqual(manifest_etag, f_dict['hash'])
self.assertEqual(slo_etag, f_dict['slo_etag'])
break
else:
self.fail('Failed to find manifest file in container listing')
# now POST with no change to content-type
file_item.sync_metadata({'X-Object-Meta-Test': 'blah'},
cfg={'no_content_type': True})
file_item.initialize()
self.assertEqual('image/jpeg', file_item.content_type) # sanity
# verify that the container listing is consistent with the file
listing = self.env.container.files(parms={'format': 'json'})
for f_dict in listing:
if f_dict['name'] == file_item.name:
self.assertEqual(1024 * 1024, f_dict['bytes'])
self.assertEqual(file_item.content_type,
f_dict['content_type'])
self.assertEqual(manifest_etag, f_dict['hash'])
self.assertEqual(slo_etag, f_dict['slo_etag'])
break
else:
self.fail('Failed to find manifest file in container listing')
def test_slo_get_nested_manifest(self):
file_item = self.env.container.file('manifest-abcde-submanifest')
file_contents = file_item.read()
self.assertEqual(4 * 1024 * 1024 + 1, len(file_contents))
self.assertEqual([
(b'a', 1024 * 1024),
(b'b', 1024 * 1024),
(b'c', 1024 * 1024),
(b'd', 1024 * 1024),
(b'e', 1),
], group_file_contents(file_item.read()))
def test_slo_get_ranged_manifest(self):
file_item = self.env.container.file('ranged-manifest')
self.assertEqual([
(b'c', 1),
(b'd', 1024 * 1024),
(b'e', 1),
(b'a', 512 * 1024),
(b'b', 512 * 1024),
(b'c', 1),
(b'd', 1),
], group_file_contents(file_item.read()))
def test_slo_get_ranged_manifest_repeated_segment(self):
file_item = self.env.container.file('ranged-manifest-repeated-segment')
self.assertEqual(
[(b'a', 2097152), (b'b', 1048576)],
group_file_contents(file_item.read()))
def test_slo_get_ranged_submanifest(self):
file_item = self.env.container.file('ranged-submanifest')
self.assertEqual([
(b'c', 1024 * 1024 + 1),
(b'd', 1024 * 1024),
(b'e', 1),
(b'a', 512 * 1024),
(b'b', 512 * 1024),
(b'c', 1),
(b'd', 512 * 1024 + 1),
(b'e', 1),
(b'a', 512 * 1024),
(b'b', 1),
(b'c', 1),
(b'd', 1),
], group_file_contents(file_item.read()))
def test_slo_ranged_get(self):
file_item = self.env.container.file('manifest-abcde')
file_contents = file_item.read(size=1024 * 1024 + 2,
offset=1024 * 1024 - 1)
self.assertEqual(file_item.conn.response.status, 206)
headers = dict(
(h.lower(), v)
for h, v in file_item.conn.response.getheaders())
self.assertIn('etag', headers)
self.assertEqual(headers['etag'], '"%s"' % self.manifest_abcde_etag)
self.assertEqual([
(b'a', 1),
(b'b', 1048576),
(b'c', 1),
], group_file_contents(file_contents))
def test_slo_ranged_get_half_open_on_right(self):
file_item = self.env.container.file('manifest-abcde')
file_contents = file_item.read(
hdrs={"Range": "bytes=1048571-"})
self.assertEqual([
(b'a', 5),
(b'b', 1048576),
(b'c', 1048576),
(b'd', 1048576),
(b'e', 1)
], group_file_contents(file_contents))
def test_slo_ranged_get_half_open_on_left(self):
file_item = self.env.container.file('manifest-abcde')
file_contents = file_item.read(
hdrs={"Range": "bytes=-123456"})
self.assertEqual([
(b'd', 123455),
(b'e', 1),
], group_file_contents(file_contents))
def test_slo_multi_ranged_get(self):
file_item = self.env.container.file('manifest-abcde')
file_contents = file_item.read(
hdrs={"Range": "bytes=1048571-1048580,2097147-2097156"})
# See testMultiRangeGets for explanation
if six.PY2:
parser = email.parser.FeedParser()
else:
parser = email.parser.BytesFeedParser()
parser.feed((
"Content-Type: %s\r\n\r\n" % file_item.content_type).encode())
parser.feed(file_contents)
root_message = parser.close()
self.assertTrue(root_message.is_multipart()) # sanity check
byteranges = root_message.get_payload()
self.assertEqual(len(byteranges), 2)
self.assertEqual(byteranges[0]['Content-Type'],
"application/octet-stream")
self.assertEqual(
byteranges[0]['Content-Range'], "bytes 1048571-1048580/4194305")
self.assertEqual(byteranges[0].get_payload(decode=True), b"aaaaabbbbb")
self.assertEqual(byteranges[1]['Content-Type'],
"application/octet-stream")
self.assertEqual(
byteranges[1]['Content-Range'], "bytes 2097147-2097156/4194305")
self.assertEqual(byteranges[1].get_payload(decode=True), b"bbbbbccccc")
def test_slo_ranged_submanifest(self):
file_item = self.env.container.file('manifest-abcde-submanifest')
file_contents = file_item.read(size=1024 * 1024 + 2,
offset=1024 * 1024 * 2 - 1)
self.assertEqual([
(b'b', 1),
(b'c', 1024 * 1024),
(b'd', 1),
], group_file_contents(file_contents))
def test_slo_etag_is_quote_wrapped_hash_of_etags(self):
# we have this check in test_slo_get_simple_manifest, too,
# but verify that it holds for HEAD requests
file_item = self.env.container.file('manifest-abcde')
self.assertEqual('"%s"' % self.manifest_abcde_etag,
file_item.info()['etag'])
def test_slo_etag_is_quote_wrapped_hash_of_etags_submanifests(self):
def hd(x):
return md5(x, usedforsecurity=False).hexdigest().encode('ascii')
expected_etag = hd(hd(b'a' * 1024 * 1024) +
hd(hd(b'b' * 1024 * 1024) +
hd(hd(b'c' * 1024 * 1024) +
hd(b'd' * 1024 * 1024))) +
hd(b'e'))
file_item = self.env.container.file('manifest-abcde-submanifest')
self.assertEqual('"%s"' % expected_etag.decode('ascii'),
file_item.info()['etag'])
def test_slo_etag_mismatch(self):
file_item = self.env.container.file("manifest-a-bad-etag")
try:
file_item.write(
json.dumps([{
'size_bytes': 1024 * 1024,
'etag': 'not it',
'path': '/%s/%s' % (self.env.container.name, 'seg_a'),
}]).encode('ascii'),
parms={'multipart-manifest': 'put'})
except ResponseError as err:
self.assertEqual(400, err.status)
else:
self.fail("Expected ResponseError but didn't get it")
def test_slo_size_mismatch(self):
file_item = self.env.container.file("manifest-a-bad-size")
try:
file_item.write(
json.dumps([{
'size_bytes': 1024 * 1024 - 1,
'etag': md5(
b'a' * 1024 * 1024,
usedforsecurity=False).hexdigest(),
'path': '/%s/%s' % (self.env.container.name, 'seg_a'),
}]).encode('ascii'),
parms={'multipart-manifest': 'put'})
except ResponseError as err:
self.assertEqual(400, err.status)
else:
self.fail("Expected ResponseError but didn't get it")
def test_slo_client_etag_mismatch(self):
file_item = self.env.container.file("manifest-a-mismatch-etag")
try:
file_item.write(
json.dumps([{
'size_bytes': 1024 * 1024,
'etag': md5(b'a' * 1024 * 1024,
usedforsecurity=False).hexdigest(),
'path': '/%s/%s' % (self.env.container.name, 'seg_a'),
}]).encode('ascii'),
parms={'multipart-manifest': 'put'},
hdrs={'Etag': 'NOTetagofthesegments'})
except ResponseError as err:
self.assertEqual(422, err.status)
def test_slo_client_etag(self):
file_item = self.env.container.file("manifest-a-b-etag")
etag_a = md5(b'a' * 1024 * 1024, usedforsecurity=False).hexdigest()
etag_b = md5(b'b' * 1024 * 1024, usedforsecurity=False).hexdigest()
file_item.write(
json.dumps([{
'size_bytes': 1024 * 1024,
'etag': etag_a,
'path': '/%s/%s' % (self.env.container.name, 'seg_a')}, {
'size_bytes': 1024 * 1024,
'etag': etag_b,
'path': '/%s/%s' % (self.env.container.name, 'seg_b'),
}]).encode('ascii'),
parms={'multipart-manifest': 'put'},
hdrs={'Etag': md5((etag_a + etag_b).encode(),
usedforsecurity=False).hexdigest()})
self.assert_status(201)
def test_slo_unspecified_etag(self):
file_item = self.env.container.file("manifest-a-unspecified-etag")
file_item.write(
json.dumps([{
'size_bytes': 1024 * 1024,
'etag': None,
'path': '/%s/%s' % (self.env.container.name, 'seg_a'),
}]).encode('ascii'),
parms={'multipart-manifest': 'put'})
self.assert_status(201)
def test_slo_unspecified_size(self):
file_item = self.env.container.file("manifest-a-unspecified-size")
file_item.write(
json.dumps([{
'size_bytes': None,
'etag': md5(b'a' * 1024 * 1024,
usedforsecurity=False).hexdigest(),
'path': '/%s/%s' % (self.env.container.name, 'seg_a'),
}]).encode('ascii'),
parms={'multipart-manifest': 'put'})
self.assert_status(201)
def test_slo_funky_segment(self):
file_item = self.env.container.file("manifest-with-funky-segment")
file_item.write(
json.dumps([{
'path': '/%s/%s' % (self.env.container.name,
'seg_with_%ff_funky_name'),
}]).encode('ascii'),
parms={'multipart-manifest': 'put'})
self.assert_status(201)
self.assertEqual(b'z' * 10, file_item.read())
def test_slo_missing_etag(self):
file_item = self.env.container.file("manifest-a-missing-etag")
file_item.write(
json.dumps([{
'size_bytes': 1024 * 1024,
'path': '/%s/%s' % (self.env.container.name, 'seg_a'),
}]).encode('ascii'),
parms={'multipart-manifest': 'put'})
self.assert_status(201)
def test_slo_missing_size(self):
file_item = self.env.container.file("manifest-a-missing-size")
file_item.write(
json.dumps([{
'etag': md5(b'a' * 1024 * 1024,
usedforsecurity=False).hexdigest(),
'path': '/%s/%s' % (self.env.container.name, 'seg_a'),
}]).encode('ascii'),
parms={'multipart-manifest': 'put'})
self.assert_status(201)
def test_slo_path_only(self):
file_item = self.env.container.file("manifest-a-path-only")
file_item.write(
json.dumps([{
'path': '/%s/%s' % (self.env.container.name, 'seg_a'),
}]).encode('ascii'),
parms={'multipart-manifest': 'put'})
self.assert_status(201)
def test_slo_typo_etag(self):
file_item = self.env.container.file("manifest-a-typo-etag")
try:
file_item.write(
json.dumps([{
'teag': md5(b'a' * 1024 * 1024,
usedforsecurity=False).hexdigest(),
'size_bytes': 1024 * 1024,
'path': '/%s/%s' % (self.env.container.name, 'seg_a'),
}]).encode('ascii'),
parms={'multipart-manifest': 'put'})
except ResponseError as err:
self.assertEqual(400, err.status)
else:
self.fail("Expected ResponseError but didn't get it")
def test_slo_typo_size(self):
file_item = self.env.container.file("manifest-a-typo-size")
try:
file_item.write(
json.dumps([{
'etag': md5(b'a' * 1024 * 1024,
usedforsecurity=False).hexdigest(),
'siz_bytes': 1024 * 1024,
'path': '/%s/%s' % (self.env.container.name, 'seg_a'),
}]).encode('ascii'),
parms={'multipart-manifest': 'put'})
except ResponseError as err:
self.assertEqual(400, err.status)
else:
self.fail("Expected ResponseError but didn't get it")
def test_slo_overwrite_segment_with_manifest(self):
file_item = self.env.container.file("seg_b")
with self.assertRaises(ResponseError) as catcher:
file_item.write(
json.dumps([
{'size_bytes': 1024 * 1024,
'etag': md5(b'a' * 1024 * 1024,
usedforsecurity=False).hexdigest(),
'path': '/%s/%s' % (self.env.container.name, 'seg_a')},
{'size_bytes': 1024 * 1024,
'etag': md5(b'b' * 1024 * 1024,
usedforsecurity=False).hexdigest(),
'path': '/%s/%s' % (self.env.container.name, 'seg_b')},
{'size_bytes': 1024 * 1024,
'etag': md5(b'c' * 1024 * 1024,
usedforsecurity=False).hexdigest(),
'path': '/%s/%s' % (self.env.container.name, 'seg_c')},
]).encode('ascii'),
parms={'multipart-manifest': 'put'})
self.assertEqual(400, catcher.exception.status)
def test_slo_copy(self):
file_item = self.env.container.file("manifest-abcde")
file_item.copy(self.env.container.name, "copied-abcde")
copied = self.env.container.file("copied-abcde")
copied_contents = copied.read(parms={'multipart-manifest': 'get'})
self.assertEqual(4 * 1024 * 1024 + 1, len(copied_contents))
def test_slo_copy_account(self):
acct = urllib.parse.unquote(self.env.conn.account_name)
# same account copy
file_item = self.env.container.file("manifest-abcde")
file_item.copy_account(acct, self.env.container.name, "copied-abcde")
copied = self.env.container.file("copied-abcde")
copied_contents = copied.read(parms={'multipart-manifest': 'get'})
self.assertEqual(4 * 1024 * 1024 + 1, len(copied_contents))
if not tf.skip2:
# copy to different account
acct = urllib.parse.unquote(self.env.conn2.account_name)
dest_cont = self.env.account2.container(Utils.create_name())
self.assertTrue(dest_cont.create(hdrs={
'X-Container-Write': self.env.conn.user_acl
}))
file_item = self.env.container.file("manifest-abcde")
file_item.copy_account(acct, dest_cont, "copied-abcde")
copied = dest_cont.file("copied-abcde")
copied_contents = copied.read(parms={'multipart-manifest': 'get'})
self.assertEqual(4 * 1024 * 1024 + 1, len(copied_contents))
def test_slo_copy_the_manifest(self):
source = self.env.container.file("manifest-abcde")
source.initialize(parms={'multipart-manifest': 'get'})
source_contents = source.read(parms={'multipart-manifest': 'get'})
source_json = json.loads(source_contents)
manifest_etag = md5(source_contents, usedforsecurity=False).hexdigest()
if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'):
manifest_etag = '"%s"' % manifest_etag
self.assertEqual(manifest_etag, source.etag)
source.initialize()
self.assertEqual('application/octet-stream', source.content_type)
self.assertNotEqual(manifest_etag, source.etag)
slo_etag = source.etag
self.assertTrue(source.copy(self.env.container.name,
"copied-abcde-manifest-only",
parms={'multipart-manifest': 'get'}))
copied = self.env.container.file("copied-abcde-manifest-only")
copied.initialize(parms={'multipart-manifest': 'get'})
copied_contents = copied.read(parms={'multipart-manifest': 'get'})
try:
copied_json = json.loads(copied_contents)
except ValueError:
self.fail("COPY didn't copy the manifest (invalid json on GET)")
self.assertEqual(source_json, copied_json)
self.assertEqual(manifest_etag, copied.etag)
copied.initialize()
self.assertEqual('application/octet-stream', copied.content_type)
self.assertEqual(slo_etag, copied.etag)
# verify the listing metadata
listing = self.env.container.files(parms={'format': 'json'})
names = {}
for f_dict in listing:
if f_dict['name'] in ('manifest-abcde',
'copied-abcde-manifest-only'):
names[f_dict['name']] = f_dict
self.assertIn('manifest-abcde', names)
actual = names['manifest-abcde']
self.assertEqual(4 * 1024 * 1024 + 1, actual['bytes'])
self.assertEqual('application/octet-stream', actual['content_type'])
self.assertEqual(normalize_etag(manifest_etag), actual['hash'])
self.assertEqual(slo_etag, actual['slo_etag'])
self.assertIn('copied-abcde-manifest-only', names)
actual = names['copied-abcde-manifest-only']
self.assertEqual(4 * 1024 * 1024 + 1, actual['bytes'])
self.assertEqual('application/octet-stream', actual['content_type'])
self.assertEqual(normalize_etag(manifest_etag), actual['hash'])
self.assertEqual(slo_etag, actual['slo_etag'])
# Test copy manifest including data segments
source = self.env.container.file("mixed-object-data-manifest")
source_contents = source.read(parms={'multipart-manifest': 'get'})
source_json = json.loads(source_contents)
source.copy(
self.env.container.name,
"copied-mixed-object-data-manifest",
parms={'multipart-manifest': 'get'})
copied = self.env.container.file("copied-mixed-object-data-manifest")
copied_contents = copied.read(parms={'multipart-manifest': 'get'})
try:
copied_json = json.loads(copied_contents)
except ValueError:
self.fail("COPY didn't copy the manifest (invalid json on GET)")
self.assertEqual(source_contents, copied_contents)
self.assertEqual(copied_json[0], {
'data': base64.b64encode(b'APRE' * 8).decode('ascii')})
def test_slo_copy_the_manifest_updating_metadata(self):
source = self.env.container.file("manifest-abcde")
source.content_type = 'application/octet-stream'
source.sync_metadata({'test': 'original'})
source.initialize(parms={'multipart-manifest': 'get'})
source_contents = source.read(parms={'multipart-manifest': 'get'})
source_json = json.loads(source_contents)
manifest_etag = md5(source_contents, usedforsecurity=False).hexdigest()
if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'):
manifest_etag = '"%s"' % manifest_etag
self.assertEqual(manifest_etag, source.etag)
source.initialize()
self.assertEqual('application/octet-stream', source.content_type)
self.assertNotEqual(manifest_etag, source.etag)
slo_etag = source.etag
self.assertEqual(source.metadata['test'], 'original')
self.assertTrue(
source.copy(self.env.container.name, "copied-abcde-manifest-only",
parms={'multipart-manifest': 'get'},
hdrs={'Content-Type': 'image/jpeg',
'X-Object-Meta-Test': 'updated'}))
copied = self.env.container.file("copied-abcde-manifest-only")
copied.initialize(parms={'multipart-manifest': 'get'})
copied_contents = copied.read(parms={'multipart-manifest': 'get'})
try:
copied_json = json.loads(copied_contents)
except ValueError:
self.fail("COPY didn't copy the manifest (invalid json on GET)")
self.assertEqual(source_json, copied_json)
self.assertEqual(manifest_etag, copied.etag)
copied.initialize()
self.assertEqual('image/jpeg', copied.content_type)
self.assertEqual(slo_etag, copied.etag)
self.assertEqual(copied.metadata['test'], 'updated')
# verify the listing metadata
listing = self.env.container.files(parms={'format': 'json'})
names = {}
for f_dict in listing:
if f_dict['name'] in ('manifest-abcde',
'copied-abcde-manifest-only'):
names[f_dict['name']] = f_dict
self.assertIn('manifest-abcde', names)
actual = names['manifest-abcde']
self.assertEqual(4 * 1024 * 1024 + 1, actual['bytes'])
self.assertEqual('application/octet-stream', actual['content_type'])
# the container listing should have the etag of the manifest contents
self.assertEqual(normalize_etag(manifest_etag), actual['hash'])
self.assertEqual(slo_etag, actual['slo_etag'])
self.assertIn('copied-abcde-manifest-only', names)
actual = names['copied-abcde-manifest-only']
self.assertEqual(4 * 1024 * 1024 + 1, actual['bytes'])
self.assertEqual('image/jpeg', actual['content_type'])
self.assertEqual(normalize_etag(manifest_etag), actual['hash'])
self.assertEqual(slo_etag, actual['slo_etag'])
def test_slo_copy_the_manifest_account(self):
if tf.skip2:
raise SkipTest('Account2 not set')
acct = urllib.parse.unquote(self.env.conn.account_name)
# same account
file_item = self.env.container.file("manifest-abcde")
file_item.copy_account(acct,
self.env.container.name,
"copied-abcde-manifest-only",
parms={'multipart-manifest': 'get'})
copied = self.env.container.file("copied-abcde-manifest-only")
copied_contents = copied.read(parms={'multipart-manifest': 'get'})
try:
json.loads(copied_contents)
except ValueError:
self.fail("COPY didn't copy the manifest (invalid json on GET)")
# different account
acct = urllib.parse.unquote(self.env.conn2.account_name)
dest_cont = self.env.account2.container(Utils.create_name())
self.assertTrue(dest_cont.create(hdrs={
'X-Container-Write': self.env.conn.user_acl
}))
# manifest copy will fail because there is no read access to segments
# in destination account
self.assertRaises(ResponseError, file_item.copy_account,
acct, dest_cont, "copied-abcde-manifest-only",
parms={'multipart-manifest': 'get'})
self.assertEqual(400, file_item.conn.response.status)
resp_body = file_item.conn.response.read()
self.assertEqual(5, resp_body.count(b'403 Forbidden'),
'Unexpected response body %r' % resp_body)
# create segments container in account2 with read access for account1
segs_container = self.env.account2.container(self.env.container.name)
self.assertTrue(segs_container.create(hdrs={
'X-Container-Read': self.env.conn.user_acl
}))
# manifest copy will still fail because there are no segments in
# destination account
self.assertRaises(ResponseError, file_item.copy_account,
acct, dest_cont, "copied-abcde-manifest-only",
parms={'multipart-manifest': 'get'})
self.assertEqual(400, file_item.conn.response.status)
resp_body = file_item.conn.response.read()
self.assertEqual(5, resp_body.count(b'404 Not Found'),
'Unexpected response body %r' % resp_body)
# create segments in account2 container with same name as in account1,
# manifest copy now succeeds
self.env.create_segments(segs_container)
self.assertTrue(file_item.copy_account(
acct, dest_cont, "copied-abcde-manifest-only",
parms={'multipart-manifest': 'get'}))
copied = dest_cont.file("copied-abcde-manifest-only")
copied_contents = copied.read(parms={'multipart-manifest': 'get'})
try:
json.loads(copied_contents)
except ValueError:
self.fail("COPY didn't copy the manifest (invalid json on GET)")
def test_slo_put_heartbeating(self):
if 'yield_frequency' not in cluster_info['slo']:
# old swift?
raise SkipTest('Swift does not seem to support heartbeating')
def do_put(headers=None, include_error=False):
file_item = self.env.container.file("manifest-heartbeat")
seg_info = self.env.seg_info
manifest_data = [seg_info['seg_a'], seg_info['seg_b'],
seg_info['seg_c'], seg_info['seg_d'],
seg_info['seg_e']]
if include_error:
manifest_data.append({'path': 'non-existent/segment'})
resp = file_item.write(
json.dumps(manifest_data).encode('ascii'),
parms={'multipart-manifest': 'put', 'heartbeat': 'on'},
hdrs=headers, return_resp=True)
self.assertEqual(resp.status, 202)
self.assertTrue(resp.chunked)
body_lines = resp.body.split(b'\n', 2)
self.assertFalse(body_lines[0].strip()) # all whitespace
self.assertEqual(b'\r', body_lines[1])
return body_lines[2]
body_lines = do_put().decode('utf8').split('\n')
self.assertIn('Response Status: 201 Created', body_lines)
self.assertIn('Etag', [line.split(':', 1)[0] for line in body_lines])
self.assertIn('Last Modified', [line.split(':', 1)[0]
for line in body_lines])
body_lines = do_put(
{'Accept': 'text/plain'}).decode('utf8').split('\n')
self.assertIn('Response Status: 201 Created', body_lines)
self.assertIn('Etag', [line.split(':', 1)[0] for line in body_lines])
self.assertIn('Last Modified', [line.split(':', 1)[0]
for line in body_lines])
body = do_put({'Accept': 'application/json'})
try:
resp = json.loads(body)
except ValueError:
self.fail('Expected JSON, got %r' % body)
self.assertIn('Etag', resp)
del resp['Etag']
self.assertIn('Last Modified', resp)
del resp['Last Modified']
self.assertEqual(resp, {
'Response Status': '201 Created',
'Response Body': '',
'Errors': [],
})
body_lines = do_put(include_error=True).decode('utf8').split('\n')
self.assertIn('Response Status: 400 Bad Request', body_lines)
self.assertIn('Response Body: Bad Request', body_lines)
self.assertNotIn('Etag', [line.split(':', 1)[0]
for line in body_lines])
self.assertNotIn('Last Modified', [line.split(':', 1)[0]
for line in body_lines])
self.assertEqual(body_lines[-3:], [
'Errors:',
'non-existent/segment, 404 Not Found',
'',
])
body = do_put({'Accept': 'application/json'}, include_error=True)
try:
resp = json.loads(body)
except ValueError:
self.fail('Expected JSON, got %r' % body)
self.assertNotIn('Etag', resp)
self.assertNotIn('Last Modified', resp)
self.assertEqual(resp, {
'Response Status': '400 Bad Request',
'Response Body': 'Bad Request\nThe server could not comply with '
'the request since it is either malformed or '
'otherwise incorrect.',
'Errors': [
['non-existent/segment', '404 Not Found'],
],
})
body = do_put({'Accept': 'application/json', 'ETag': 'bad etag'})
try:
resp = json.loads(body)
except ValueError:
self.fail('Expected JSON, got %r' % body)
self.assertNotIn('Etag', resp)
self.assertNotIn('Last Modified', resp)
self.assertEqual(resp, {
'Response Status': '422 Unprocessable Entity',
'Response Body': 'Unprocessable Entity\nUnable to process the '
'contained instructions',
'Errors': [],
})
def _make_manifest(self):
file_item = self.env.container.file("manifest-post")
seg_info = self.env.seg_info
file_item.write(
json.dumps([seg_info['seg_a'], seg_info['seg_b'],
seg_info['seg_c'], seg_info['seg_d'],
seg_info['seg_e']]).encode('ascii'),
parms={'multipart-manifest': 'put'})
return file_item
def test_slo_post_the_manifest_metadata_update(self):
file_item = self._make_manifest()
# sanity check, check the object is an SLO manifest
file_item.info()
file_item.header_fields([('slo', 'x-static-large-object')])
# POST a user metadata (i.e. x-object-meta-post)
file_item.sync_metadata({'post': 'update'})
updated = self.env.container.file("manifest-post")
updated.info()
updated.header_fields([('user-meta', 'x-object-meta-post')]) # sanity
updated.header_fields([('slo', 'x-static-large-object')])
updated_contents = updated.read(parms={'multipart-manifest': 'get'})
try:
json.loads(updated_contents)
except ValueError:
self.fail("Unexpected content on GET, expected a json body")
def test_slo_post_the_manifest_metadata_update_with_qs(self):
# multipart-manifest query should be ignored on post
for verb in ('put', 'get', 'delete'):
file_item = self._make_manifest()
# sanity check, check the object is an SLO manifest
file_item.info()
file_item.header_fields([('slo', 'x-static-large-object')])
# POST a user metadata (i.e. x-object-meta-post)
file_item.sync_metadata(metadata={'post': 'update'},
parms={'multipart-manifest': verb})
updated = self.env.container.file("manifest-post")
updated.info()
updated.header_fields(
[('user-meta', 'x-object-meta-post')]) # sanity
updated.header_fields([('slo', 'x-static-large-object')])
updated_contents = updated.read(
parms={'multipart-manifest': 'get'})
try:
json.loads(updated_contents)
except ValueError:
self.fail(
"Unexpected content on GET, expected a json body")
def test_slo_get_the_manifest(self):
manifest = self.env.container.file("manifest-abcde")
got_body = manifest.read(parms={'multipart-manifest': 'get'})
self.assertEqual('application/json; charset=utf-8',
manifest.content_type)
try:
json.loads(got_body)
except ValueError:
self.fail("GET with multipart-manifest=get got invalid json")
def test_slo_get_the_manifest_with_details_from_server(self):
manifest = self.env.container.file("manifest-db")
got_body = manifest.read(parms={'multipart-manifest': 'get'})
self.assertEqual('application/json; charset=utf-8',
manifest.content_type)
try:
value = json.loads(got_body)
except ValueError:
self.fail("GET with multipart-manifest=get got invalid json")
self.assertEqual(len(value), 2)
self.assertEqual(value[0]['bytes'], 1024 * 1024)
self.assertEqual(
value[0]['hash'],
md5(b'd' * 1024 * 1024, usedforsecurity=False).hexdigest())
expected_name = '/%s/seg_d' % self.env.container.name
if six.PY2:
expected_name = expected_name.decode("utf-8")
self.assertEqual(value[0]['name'], expected_name)
self.assertEqual(value[1]['bytes'], 1024 * 1024)
self.assertEqual(
value[1]['hash'],
md5(b'b' * 1024 * 1024, usedforsecurity=False).hexdigest())
expected_name = '/%s/seg_b' % self.env.container.name
if six.PY2:
expected_name = expected_name.decode("utf-8")
self.assertEqual(value[1]['name'], expected_name)
def test_slo_get_raw_the_manifest_with_details_from_server(self):
manifest = self.env.container.file("manifest-db")
got_body = manifest.read(parms={'multipart-manifest': 'get',
'format': 'raw'})
self.assert_etag(
md5(got_body, usedforsecurity=False).hexdigest())
# raw format should have the actual manifest object content-type
self.assertEqual('application/octet-stream', manifest.content_type)
try:
value = json.loads(got_body)
except ValueError:
msg = "GET with multipart-manifest=get&format=raw got invalid json"
self.fail(msg)
self.assertEqual(
set(value[0].keys()), set(('size_bytes', 'etag', 'path')))
self.assertEqual(len(value), 2)
self.assertEqual(value[0]['size_bytes'], 1024 * 1024)
self.assertEqual(
value[0]['etag'],
md5(b'd' * 1024 * 1024, usedforsecurity=False).hexdigest())
expected_name = '/%s/seg_d' % self.env.container.name
if six.PY2:
expected_name = expected_name.decode("utf-8")
self.assertEqual(value[0]['path'], expected_name)
self.assertEqual(value[1]['size_bytes'], 1024 * 1024)
self.assertEqual(
value[1]['etag'],
md5(b'b' * 1024 * 1024, usedforsecurity=False).hexdigest())
expected_name = '/%s/seg_b' % self.env.container.name
if six.PY2:
expected_name = expected_name.decode("utf-8")
self.assertEqual(value[1]['path'], expected_name)
file_item = self.env.container.file("manifest-from-get-raw")
file_item.write(got_body, parms={'multipart-manifest': 'put'})
file_contents = file_item.read()
self.assertEqual(2 * 1024 * 1024, len(file_contents))
def test_slo_head_the_manifest(self):
manifest = self.env.container.file("manifest-abcde")
got_info = manifest.info(parms={'multipart-manifest': 'get'})
self.assertEqual('application/json; charset=utf-8',
got_info['content_type'])
def test_slo_if_match_get(self):
manifest = self.env.container.file("manifest-abcde")
etag = manifest.info()['etag']
self.assertRaises(ResponseError, manifest.read,
hdrs={'If-Match': 'not-%s' % etag})
self.assert_status(412)
manifest.read(hdrs={'If-Match': etag})
self.assert_status(200)
def test_slo_if_none_match_put(self):
file_item = self.env.container.file("manifest-if-none-match")
manifest = json.dumps([{
'size_bytes': 1024 * 1024,
'etag': None,
'path': '/%s/%s' % (self.env.container.name, 'seg_a')}])
self.assertRaises(ResponseError, file_item.write,
manifest.encode('ascii'),
parms={'multipart-manifest': 'put'},
hdrs={'If-None-Match': '"not-star"'})
self.assert_status(400)
file_item.write(manifest.encode('ascii'),
parms={'multipart-manifest': 'put'},
hdrs={'If-None-Match': '*'})
self.assert_status(201)
self.assertRaises(ResponseError, file_item.write,
manifest.encode('ascii'),
parms={'multipart-manifest': 'put'},
hdrs={'If-None-Match': '*'})
self.assert_status(412)
def test_slo_if_none_match_get(self):
manifest = self.env.container.file("manifest-abcde")
etag = manifest.info()['etag']
self.assertRaises(ResponseError, manifest.read,
hdrs={'If-None-Match': etag})
self.assert_status(304)
manifest.read(hdrs={'If-None-Match': "not-%s" % etag})
self.assert_status(200)
def test_slo_if_match_head(self):
manifest = self.env.container.file("manifest-abcde")
etag = manifest.info()['etag']
self.assertRaises(ResponseError, manifest.info,
hdrs={'If-Match': 'not-%s' % etag})
self.assert_status(412)
manifest.info(hdrs={'If-Match': etag})
self.assert_status(200)
def test_slo_if_none_match_head(self):
manifest = self.env.container.file("manifest-abcde")
etag = manifest.info()['etag']
self.assertRaises(ResponseError, manifest.info,
hdrs={'If-None-Match': etag})
self.assert_status(304)
manifest.info(hdrs={'If-None-Match': "not-%s" % etag})
self.assert_status(200)
def test_slo_referer_on_segment_container(self):
if tf.skip3:
raise SkipTest('Username3 not set')
# First the account2 (test3) should fail
headers = {'X-Auth-Token': self.env.conn3.storage_token,
'Referer': 'http://blah.example.com'}
slo_file = self.env.container2.file('manifest-abcde')
self.assertRaises(ResponseError, slo_file.read,
hdrs=headers)
self.assert_status(403)
# Now set the referer on the slo container only
referer_metadata = {'X-Container-Read': '.r:*.example.com,.rlistings'}
self.env.container2.update_metadata(referer_metadata)
self.assertRaises(ResponseError, slo_file.read,
hdrs=headers)
self.assert_status(409)
# Finally set the referer on the segment container
self.env.container.update_metadata(referer_metadata)
contents = slo_file.read(hdrs=headers)
self.assertEqual(4 * 1024 * 1024 + 1, len(contents))
self.assertEqual(b'a', contents[:1])
self.assertEqual(b'a', contents[1024 * 1024 - 1:1024 * 1024])
self.assertEqual(b'b', contents[1024 * 1024:1024 * 1024 + 1])
self.assertEqual(b'd', contents[-2:-1])
self.assertEqual(b'e', contents[-1:])
def test_slo_data_segments(self):
# len('APRE' * 8) == 32
# len('APOS' * 16) == 64
# len('BPOS' * 32) == 128
# len('CPRE' * 64) == 256
# len(a_pre + seg_a + post_a) == 32 + 1024 ** 2 + 64
# len(seg_b + post_b) == 1024 ** 2 + 128
# len(c_pre + seg_c) == 256 + 1024 ** 2
# len(total) == 3146208
for file_name in ("mixed-object-data-manifest",
"nested-data-manifest"):
file_item = self.env.container.file(file_name)
file_contents = file_item.read(size=3 * 1024 ** 2 + 456,
offset=28)
self.assertEqual([
(b'A', 1),
(b'P', 1),
(b'R', 1),
(b'E', 1),
(b'a', 1024 * 1024),
] + [
(b'A', 1),
(b'P', 1),
(b'O', 1),
(b'S', 1),
] * 16 + [
(b'b', 1024 * 1024),
] + [
(b'B', 1),
(b'P', 1),
(b'O', 1),
(b'S', 1),
] * 32 + [
(b'C', 1),
(b'P', 1),
(b'R', 1),
(b'E', 1),
] * 64 + [
(b'c', 1024 * 1024),
] + [
(b'C', 1),
(b'P', 1),
(b'O', 1),
(b'S', 1),
], group_file_contents(file_contents))
class TestSloUTF8(Base2, TestSlo):
pass
| swift-master | test/functional/test_slo.py |
#!/usr/bin/python
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import json
import unittest
from uuid import uuid4
import time
from unittest import SkipTest
from xml.dom import minidom
import six
from six.moves import range
from test.functional import check_response, retry, requires_acls, \
requires_policies, requires_bulk
import test.functional as tf
from swift.common.utils import md5
def setUpModule():
tf.setup_package()
def tearDownModule():
tf.teardown_package()
class TestObject(unittest.TestCase):
def setUp(self):
if tf.skip or tf.skip2:
raise SkipTest
if tf.in_process:
tf.skip_if_no_xattrs()
self.container = uuid4().hex
self.containers = []
self._create_container(self.container)
self._create_container(self.container, use_account=2)
self.obj = uuid4().hex
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (
parsed.path, self.container, self.obj), 'test',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
def _create_container(self, name=None, headers=None, use_account=1):
if not name:
name = uuid4().hex
self.containers.append(name)
headers = headers or {}
def put(url, token, parsed, conn, name):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('PUT', parsed.path + '/' + name, '',
new_headers)
return check_response(conn)
resp = retry(put, name, use_account=use_account)
resp.read()
self.assertIn(resp.status, (201, 202))
# With keystoneauth we need the accounts to have had the project
# domain id persisted as sysmeta prior to testing ACLs. This may
# not be the case if, for example, the account was created using
# a request with reseller_admin role, when project domain id may
# not have been known. So we ensure that the project domain id is
# in sysmeta by making a POST to the accounts using an admin role.
def post(url, token, parsed, conn):
conn.request('POST', parsed.path, '', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(post, use_account=use_account)
resp.read()
self.assertEqual(resp.status, 204)
return name
def tearDown(self):
if tf.skip:
raise SkipTest
# get list of objects in container
def get(url, token, parsed, conn, container):
conn.request(
'GET', parsed.path + '/' + container + '?format=json', '',
{'X-Auth-Token': token})
return check_response(conn)
# delete an object
def delete(url, token, parsed, conn, container, obj):
if six.PY2:
obj_name = obj['name'].encode('utf8')
else:
obj_name = obj['name']
path = '/'.join([parsed.path, container, obj_name])
conn.request('DELETE', path, '', {'X-Auth-Token': token})
return check_response(conn)
for container in self.containers:
while True:
resp = retry(get, container)
body = resp.read()
if resp.status == 404:
break
self.assertEqual(resp.status // 100, 2, resp.status)
objs = json.loads(body)
if not objs:
break
for obj in objs:
resp = retry(delete, container, obj)
resp.read()
self.assertIn(resp.status, (204, 404))
# delete the container
def delete(url, token, parsed, conn, name):
conn.request('DELETE', parsed.path + '/' + name, '',
{'X-Auth-Token': token})
return check_response(conn)
for container in self.containers:
resp = retry(delete, container)
resp.read()
self.assertIn(resp.status, (204, 404))
def test_metadata(self):
obj = 'test_metadata'
req_metadata = {}
def put(url, token, parsed, conn):
headers = {'X-Auth-Token': token}
headers.update(req_metadata)
conn.request('PUT', '%s/%s/%s' % (
parsed.path, self.container, obj
), '', headers)
return check_response(conn)
def get(url, token, parsed, conn):
conn.request(
'GET',
'%s/%s/%s' % (parsed.path, self.container, obj),
'',
{'X-Auth-Token': token})
return check_response(conn)
def post(url, token, parsed, conn):
headers = {'X-Auth-Token': token}
headers.update(req_metadata)
conn.request('POST', '%s/%s/%s' % (
parsed.path, self.container, obj
), '', headers)
return check_response(conn)
def metadata(resp):
metadata = {}
for k, v in resp.headers.items():
if 'meta' in k.lower():
metadata[k] = v
return metadata
# empty put
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
resp = retry(get)
self.assertEqual(b'', resp.read())
self.assertEqual(resp.status, 200)
self.assertEqual(metadata(resp), {})
# empty post
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 202)
resp = retry(get)
self.assertEqual(b'', resp.read())
self.assertEqual(resp.status, 200)
self.assertEqual(metadata(resp), {})
# metadata put
req_metadata = {
'x-object-meta-Color': 'blUe',
'X-Object-Meta-food': 'PizZa',
}
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
resp = retry(get)
self.assertEqual(b'', resp.read())
self.assertEqual(resp.status, 200)
self.assertEqual(metadata(resp), {
'X-Object-Meta-Color': 'blUe',
'X-Object-Meta-Food': 'PizZa',
})
# metadata post
req_metadata = {'X-Object-Meta-color': 'oraNge'}
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 202)
resp = retry(get)
self.assertEqual(b'', resp.read())
self.assertEqual(resp.status, 200)
self.assertEqual(metadata(resp), {
'X-Object-Meta-Color': 'oraNge'
})
# sysmeta put
req_metadata = {
'X-Object-Meta-Color': 'Red',
'X-Object-Sysmeta-Color': 'Green',
'X-Object-Transient-Sysmeta-Color': 'Blue',
}
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
resp = retry(get)
self.assertEqual(b'', resp.read())
self.assertEqual(resp.status, 200)
self.assertEqual(metadata(resp), {
'X-Object-Meta-Color': 'Red',
})
# sysmeta post
req_metadata = {
'X-Object-Meta-Food': 'Burger',
'X-Object-Meta-Animal': 'Cat',
'X-Object-Sysmeta-Animal': 'Cow',
'X-Object-Transient-Sysmeta-Food': 'Burger',
}
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 202)
resp = retry(get)
self.assertEqual(b'', resp.read())
self.assertEqual(resp.status, 200)
self.assertEqual(metadata(resp), {
'X-Object-Meta-Food': 'Burger',
'X-Object-Meta-Animal': 'Cat',
})
# non-ascii put
req_metadata = {
'X-Object-Meta-Foo': u'B\u00e2r',
}
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
resp = retry(get)
self.assertEqual(b'', resp.read())
self.assertEqual(resp.status, 200)
self.assertEqual(metadata(resp), {
'X-Object-Meta-Foo': 'B\xc3\xa2r',
})
# non-ascii post
req_metadata = {
'X-Object-Meta-Foo': u'B\u00e5z',
}
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 202)
resp = retry(get)
self.assertEqual(b'', resp.read())
self.assertEqual(resp.status, 200)
self.assertEqual(metadata(resp), {
'X-Object-Meta-Foo': 'B\xc3\xa5z',
})
def test_if_none_match(self):
def delete(url, token, parsed, conn):
conn.request('DELETE', '%s/%s/%s' % (
parsed.path, self.container, 'if_none_match_test'), '',
{'X-Auth-Token': token})
return check_response(conn)
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (
parsed.path, self.container, 'if_none_match_test'), '',
{'X-Auth-Token': token,
'Content-Length': '0',
'If-None-Match': '*'})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 412)
resp = retry(delete)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (
parsed.path, self.container, 'if_none_match_test'), '',
{'X-Auth-Token': token,
'Content-Length': '0',
'If-None-Match': 'somethingelse'})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 400)
def test_too_small_x_timestamp(self):
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (parsed.path, self.container,
'too_small_x_timestamp'),
'', {'X-Auth-Token': token,
'Content-Length': '0',
'X-Timestamp': '-1'})
return check_response(conn)
def head(url, token, parsed, conn):
conn.request('HEAD', '%s/%s/%s' % (parsed.path, self.container,
'too_small_x_timestamp'),
'', {'X-Auth-Token': token,
'Content-Length': '0'})
return check_response(conn)
ts_before = time.time()
time.sleep(0.05)
resp = retry(put)
body = resp.read()
time.sleep(0.05)
ts_after = time.time()
if resp.status == 400:
# shunt_inbound_x_timestamp must be false
self.assertIn(
'X-Timestamp should be a UNIX timestamp float value', body)
else:
self.assertEqual(resp.status, 201)
self.assertEqual(body, b'')
resp = retry(head)
resp.read()
self.assertGreater(float(resp.headers['x-timestamp']), ts_before)
self.assertLess(float(resp.headers['x-timestamp']), ts_after)
def test_too_big_x_timestamp(self):
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (parsed.path, self.container,
'too_big_x_timestamp'),
'', {'X-Auth-Token': token,
'Content-Length': '0',
'X-Timestamp': '99999999999.9999999999'})
return check_response(conn)
def head(url, token, parsed, conn):
conn.request('HEAD', '%s/%s/%s' % (parsed.path, self.container,
'too_big_x_timestamp'),
'', {'X-Auth-Token': token,
'Content-Length': '0'})
return check_response(conn)
ts_before = time.time()
time.sleep(0.05)
resp = retry(put)
body = resp.read()
time.sleep(0.05)
ts_after = time.time()
if resp.status == 400:
# shunt_inbound_x_timestamp must be false
self.assertIn(
'X-Timestamp should be a UNIX timestamp float value', body)
else:
self.assertEqual(resp.status, 201)
self.assertEqual(body, b'')
resp = retry(head)
resp.read()
self.assertGreater(float(resp.headers['x-timestamp']), ts_before)
self.assertLess(float(resp.headers['x-timestamp']), ts_after)
def test_x_delete_after(self):
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (parsed.path, self.container,
'x_delete_after'),
'', {'X-Auth-Token': token,
'Content-Length': '0',
'X-Delete-After': '2'})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
def get(url, token, parsed, conn):
conn.request(
'GET',
'%s/%s/%s' % (parsed.path, self.container, 'x_delete_after'),
'',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(get)
resp.read()
count = 0
while resp.status == 200 and count < 10:
resp = retry(get)
resp.read()
count += 1
time.sleep(0.5)
self.assertEqual(resp.status, 404)
# To avoid an error when the object deletion in tearDown(),
# the object is added again.
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
def test_x_delete_at(self):
def put(url, token, parsed, conn):
dt = datetime.datetime.now()
epoch = time.mktime(dt.timetuple())
delete_time = str(int(epoch) + 3)
conn.request(
'PUT',
'%s/%s/%s' % (parsed.path, self.container, 'x_delete_at'),
'',
{'X-Auth-Token': token,
'Content-Length': '0',
'X-Delete-At': delete_time})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
def get(url, token, parsed, conn):
conn.request(
'GET',
'%s/%s/%s' % (parsed.path, self.container, 'x_delete_at'),
'',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(get)
resp.read()
count = 0
while resp.status == 200 and count < 10:
resp = retry(get)
resp.read()
count += 1
time.sleep(1)
self.assertEqual(resp.status, 404)
# To avoid an error when the object deletion in tearDown(),
# the object is added again.
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
def test_non_integer_x_delete_after(self):
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (parsed.path, self.container,
'non_integer_x_delete_after'),
'', {'X-Auth-Token': token,
'Content-Length': '0',
'X-Delete-After': '*'})
return check_response(conn)
resp = retry(put)
body = resp.read()
self.assertEqual(resp.status, 400)
self.assertEqual(body, b'Non-integer X-Delete-After')
def test_non_integer_x_delete_at(self):
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (parsed.path, self.container,
'non_integer_x_delete_at'),
'', {'X-Auth-Token': token,
'Content-Length': '0',
'X-Delete-At': '*'})
return check_response(conn)
resp = retry(put)
body = resp.read()
self.assertEqual(resp.status, 400)
self.assertEqual(body, b'Non-integer X-Delete-At')
def test_x_delete_at_in_the_past(self):
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (parsed.path, self.container,
'x_delete_at_in_the_past'),
'', {'X-Auth-Token': token,
'Content-Length': '0',
'X-Delete-At': '0'})
return check_response(conn)
resp = retry(put)
body = resp.read()
self.assertEqual(resp.status, 400)
self.assertEqual(body, b'X-Delete-At in past')
def test_x_delete_at_in_the_far_future(self):
def put(url, token, parsed, conn):
path = '%s/%s/%s' % (parsed.path, self.container,
'x_delete_at_in_the_far_future')
conn.request('PUT', path, '', {
'X-Auth-Token': token,
'Content-Length': '0',
'X-Delete-At': '1' * 100})
return check_response(conn)
resp = retry(put)
body = resp.read()
self.assertEqual(resp.status, 201, 'Got %s: %s' % (resp.status, body))
def head(url, token, parsed, conn):
path = '%s/%s/%s' % (parsed.path, self.container,
'x_delete_at_in_the_far_future')
conn.request('HEAD', path, '', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(head)
body = resp.read()
self.assertEqual(resp.status, 200, 'Got %s: %s' % (resp.status, body))
self.assertEqual(resp.headers['x-delete-at'], '9' * 10)
def test_copy_object(self):
if tf.skip:
raise SkipTest
source = '%s/%s' % (self.container, self.obj)
dest = '%s/%s' % (self.container, 'test_copy')
# get contents of source
def get_source(url, token, parsed, conn):
conn.request('GET',
'%s/%s' % (parsed.path, source),
'', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(get_source)
source_contents = resp.read()
self.assertEqual(resp.status, 200)
self.assertEqual(source_contents, b'test')
# copy source to dest with X-Copy-From
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s' % (parsed.path, dest), '',
{'X-Auth-Token': token,
'Content-Length': '0',
'X-Copy-From': source})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
# contents of dest should be the same as source
def get_dest(url, token, parsed, conn):
conn.request('GET',
'%s/%s' % (parsed.path, dest),
'', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(get_dest)
dest_contents = resp.read()
self.assertEqual(resp.status, 200)
self.assertEqual(dest_contents, source_contents)
# delete the copy
def delete(url, token, parsed, conn):
conn.request('DELETE', '%s/%s' % (parsed.path, dest), '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(delete)
resp.read()
self.assertIn(resp.status, (204, 404))
# verify dest does not exist
resp = retry(get_dest)
resp.read()
self.assertEqual(resp.status, 404)
# copy source to dest with COPY
def copy(url, token, parsed, conn):
conn.request('COPY', '%s/%s' % (parsed.path, source), '',
{'X-Auth-Token': token,
'Destination': dest})
return check_response(conn)
resp = retry(copy)
resp.read()
self.assertEqual(resp.status, 201)
# contents of dest should be the same as source
resp = retry(get_dest)
dest_contents = resp.read()
self.assertEqual(resp.status, 200)
self.assertEqual(dest_contents, source_contents)
# copy source to dest with COPY and range
def copy(url, token, parsed, conn):
conn.request('COPY', '%s/%s' % (parsed.path, source), '',
{'X-Auth-Token': token,
'Destination': dest,
'Range': 'bytes=1-2'})
return check_response(conn)
resp = retry(copy)
resp.read()
self.assertEqual(resp.status, 201)
# contents of dest should be the same as source
resp = retry(get_dest)
dest_contents = resp.read()
self.assertEqual(resp.status, 200)
self.assertEqual(dest_contents, source_contents[1:3])
# delete the copy
resp = retry(delete)
resp.read()
self.assertIn(resp.status, (204, 404))
def test_copy_between_accounts(self):
if tf.skip2:
raise SkipTest
source = '%s/%s' % (self.container, self.obj)
dest = '%s/%s' % (self.container, 'test_copy')
# get contents of source
def get_source(url, token, parsed, conn):
conn.request('GET',
'%s/%s' % (parsed.path, source),
'', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(get_source)
source_contents = resp.read()
self.assertEqual(resp.status, 200)
self.assertEqual(source_contents, b'test')
acct = tf.parsed[0].path.split('/', 2)[2]
# copy source to dest with X-Copy-From-Account
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s' % (parsed.path, dest), '',
{'X-Auth-Token': token,
'Content-Length': '0',
'X-Copy-From-Account': acct,
'X-Copy-From': source})
return check_response(conn)
# try to put, will not succeed
# user does not have permissions to read from source
resp = retry(put, use_account=2)
self.assertEqual(resp.status, 403)
# add acl to allow reading from source
def post(url, token, parsed, conn):
conn.request('POST', '%s/%s' % (parsed.path, self.container), '',
{'X-Auth-Token': token,
'X-Container-Read': tf.swift_test_perm[1]})
return check_response(conn)
resp = retry(post)
self.assertEqual(resp.status, 204)
# retry previous put, now should succeed
resp = retry(put, use_account=2)
self.assertEqual(resp.status, 201)
# contents of dest should be the same as source
def get_dest(url, token, parsed, conn):
conn.request('GET',
'%s/%s' % (parsed.path, dest),
'', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(get_dest, use_account=2)
dest_contents = resp.read()
self.assertEqual(resp.status, 200)
self.assertEqual(dest_contents, source_contents)
# delete the copy
def delete(url, token, parsed, conn):
conn.request('DELETE', '%s/%s' % (parsed.path, dest), '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(delete, use_account=2)
resp.read()
self.assertIn(resp.status, (204, 404))
# verify dest does not exist
resp = retry(get_dest, use_account=2)
resp.read()
self.assertEqual(resp.status, 404)
acct_dest = tf.parsed[1].path.split('/', 2)[2]
# copy source to dest with COPY
def copy(url, token, parsed, conn):
conn.request('COPY', '%s/%s' % (parsed.path, source), '',
{'X-Auth-Token': token,
'Destination-Account': acct_dest,
'Destination': dest})
return check_response(conn)
# try to copy, will not succeed
# user does not have permissions to write to destination
resp = retry(copy)
resp.read()
self.assertEqual(resp.status, 403)
# add acl to allow write to destination
def post(url, token, parsed, conn):
conn.request('POST', '%s/%s' % (parsed.path, self.container), '',
{'X-Auth-Token': token,
'X-Container-Write': tf.swift_test_perm[0]})
return check_response(conn)
resp = retry(post, use_account=2)
self.assertEqual(resp.status, 204)
# now copy will succeed
resp = retry(copy)
resp.read()
self.assertEqual(resp.status, 201)
# contents of dest should be the same as source
resp = retry(get_dest, use_account=2)
dest_contents = resp.read()
self.assertEqual(resp.status, 200)
self.assertEqual(dest_contents, source_contents)
# delete the copy
resp = retry(delete, use_account=2)
resp.read()
self.assertIn(resp.status, (204, 404))
def test_public_object(self):
if tf.skip:
raise SkipTest
def get(url, token, parsed, conn):
conn.request('GET',
'%s/%s/%s' % (parsed.path, self.container, self.obj))
return check_response(conn)
try:
resp = retry(get)
raise Exception('Should not have been able to GET')
except Exception as err:
self.assertTrue(str(err).startswith('No result after '))
def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.container, '',
{'X-Auth-Token': token,
'X-Container-Read': '.r:*'})
return check_response(conn)
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get)
resp.read()
self.assertEqual(resp.status, 200)
def post(url, token, parsed, conn):
conn.request('POST', parsed.path + '/' + self.container, '',
{'X-Auth-Token': token, 'X-Container-Read': ''})
return check_response(conn)
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 204)
try:
resp = retry(get)
raise Exception('Should not have been able to GET')
except Exception as err:
self.assertTrue(str(err).startswith('No result after '))
def test_private_object(self):
if tf.skip or tf.skip3:
raise SkipTest
# Ensure we can't access the object with the third account
def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/%s' % (
parsed.path, self.container, self.obj), '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(get, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# create a shared container writable by account3
shared_container = uuid4().hex
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s' % (
parsed.path, shared_container), '',
{'X-Auth-Token': token,
'X-Container-Read': tf.swift_test_perm[2],
'X-Container-Write': tf.swift_test_perm[2]})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
# verify third account can not copy from private container
def copy(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (
parsed.path, shared_container, 'private_object'), '',
{'X-Auth-Token': token,
'Content-Length': '0',
'X-Copy-From': '%s/%s' % (self.container, self.obj)})
return check_response(conn)
resp = retry(copy, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# verify third account can write "obj1" to shared container
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (
parsed.path, shared_container, 'obj1'), 'test',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(put, use_account=3)
resp.read()
self.assertEqual(resp.status, 201)
# verify third account can copy "obj1" to shared container
def copy2(url, token, parsed, conn):
conn.request('COPY', '%s/%s/%s' % (
parsed.path, shared_container, 'obj1'), '',
{'X-Auth-Token': token,
'Destination': '%s/%s' % (shared_container, 'obj1')})
return check_response(conn)
resp = retry(copy2, use_account=3)
resp.read()
self.assertEqual(resp.status, 201)
# verify third account STILL can not copy from private container
def copy3(url, token, parsed, conn):
conn.request('COPY', '%s/%s/%s' % (
parsed.path, self.container, self.obj), '',
{'X-Auth-Token': token,
'Destination': '%s/%s' % (shared_container,
'private_object')})
return check_response(conn)
resp = retry(copy3, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# clean up "obj1"
def delete(url, token, parsed, conn):
conn.request('DELETE', '%s/%s/%s' % (
parsed.path, shared_container, 'obj1'), '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(delete)
resp.read()
self.assertIn(resp.status, (204, 404))
# clean up shared_container
def delete(url, token, parsed, conn):
conn.request('DELETE',
parsed.path + '/' + shared_container, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(delete)
resp.read()
self.assertIn(resp.status, (204, 404))
def test_container_write_only(self):
if tf.skip or tf.skip3:
raise SkipTest
# Ensure we can't access the object with the third account
def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/%s' % (
parsed.path, self.container, self.obj), '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(get, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# create a shared container writable (but not readable) by account3
shared_container = uuid4().hex
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s' % (
parsed.path, shared_container), '',
{'X-Auth-Token': token,
'X-Container-Write': tf.swift_test_perm[2]})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
# verify third account can write "obj1" to shared container
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/%s' % (
parsed.path, shared_container, 'obj1'), 'test',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(put, use_account=3)
resp.read()
self.assertEqual(resp.status, 201)
# verify third account cannot copy "obj1" to shared container
def copy(url, token, parsed, conn):
conn.request('COPY', '%s/%s/%s' % (
parsed.path, shared_container, 'obj1'), '',
{'X-Auth-Token': token,
'Destination': '%s/%s' % (shared_container, 'obj2')})
return check_response(conn)
resp = retry(copy, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# verify third account can POST to "obj1" in shared container
def post(url, token, parsed, conn):
conn.request('POST', '%s/%s/%s' % (
parsed.path, shared_container, 'obj1'), '',
{'X-Auth-Token': token,
'X-Object-Meta-Color': 'blue'})
return check_response(conn)
resp = retry(post, use_account=3)
resp.read()
self.assertEqual(resp.status, 202)
# verify third account can DELETE from shared container
def delete(url, token, parsed, conn):
conn.request('DELETE', '%s/%s/%s' % (
parsed.path, shared_container, 'obj1'), '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(delete, use_account=3)
resp.read()
self.assertIn(resp.status, (204, 404))
# clean up shared_container
def delete(url, token, parsed, conn):
conn.request('DELETE',
parsed.path + '/' + shared_container, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(delete)
resp.read()
self.assertIn(resp.status, (204, 404))
@requires_acls
def test_read_only(self):
if tf.skip3:
raise SkipTest
def get_listing(url, token, parsed, conn):
conn.request('GET', '%s/%s' % (parsed.path, self.container), '',
{'X-Auth-Token': token})
return check_response(conn)
def post_account(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
def get(url, token, parsed, conn, name):
conn.request('GET', '%s/%s/%s' % (
parsed.path, self.container, name), '',
{'X-Auth-Token': token})
return check_response(conn)
def put(url, token, parsed, conn, name):
conn.request('PUT', '%s/%s/%s' % (
parsed.path, self.container, name), 'test',
{'X-Auth-Token': token})
return check_response(conn)
def delete(url, token, parsed, conn, name):
conn.request('PUT', '%s/%s/%s' % (
parsed.path, self.container, name), '',
{'X-Auth-Token': token})
return check_response(conn)
# cannot list objects
resp = retry(get_listing, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# cannot get object
resp = retry(get, self.obj, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# grant read-only access
acl_user = tf.swift_test_user[2]
acl = {'read-only': [acl_user]}
headers = {'x-account-access-control': json.dumps(acl)}
resp = retry(post_account, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# can list objects
resp = retry(get_listing, use_account=3)
listing = resp.read()
if not six.PY2:
listing = listing.decode('utf8')
self.assertEqual(resp.status, 200)
self.assertIn(self.obj, listing.split('\n'))
# can get object
resp = retry(get, self.obj, use_account=3)
body = resp.read()
self.assertEqual(resp.status, 200)
self.assertEqual(body, b'test')
# can not put an object
obj_name = str(uuid4())
resp = retry(put, obj_name, use_account=3)
body = resp.read()
self.assertEqual(resp.status, 403)
# can not delete an object
resp = retry(delete, self.obj, use_account=3)
body = resp.read()
self.assertEqual(resp.status, 403)
# sanity with account1
resp = retry(get_listing, use_account=3)
listing = resp.read()
if not six.PY2:
listing = listing.decode('utf8')
self.assertEqual(resp.status, 200)
self.assertNotIn(obj_name, listing.split('\n'))
self.assertIn(self.obj, listing.split('\n'))
@requires_acls
def test_read_write(self):
if tf.skip3:
raise SkipTest
def get_listing(url, token, parsed, conn):
conn.request('GET', '%s/%s' % (parsed.path, self.container), '',
{'X-Auth-Token': token})
return check_response(conn)
def post_account(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
def get(url, token, parsed, conn, name):
conn.request('GET', '%s/%s/%s' % (
parsed.path, self.container, name), '',
{'X-Auth-Token': token})
return check_response(conn)
def put(url, token, parsed, conn, name):
conn.request('PUT', '%s/%s/%s' % (
parsed.path, self.container, name), 'test',
{'X-Auth-Token': token})
return check_response(conn)
def delete(url, token, parsed, conn, name):
conn.request('DELETE', '%s/%s/%s' % (
parsed.path, self.container, name), '',
{'X-Auth-Token': token})
return check_response(conn)
# cannot list objects
resp = retry(get_listing, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# cannot get object
resp = retry(get, self.obj, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# grant read-write access
acl_user = tf.swift_test_user[2]
acl = {'read-write': [acl_user]}
headers = {'x-account-access-control': json.dumps(acl)}
resp = retry(post_account, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# can list objects
resp = retry(get_listing, use_account=3)
listing = resp.read()
if not six.PY2:
listing = listing.decode('utf8')
self.assertEqual(resp.status, 200)
self.assertIn(self.obj, listing.split('\n'))
# can get object
resp = retry(get, self.obj, use_account=3)
body = resp.read()
self.assertEqual(resp.status, 200)
self.assertEqual(body, b'test')
# can put an object
obj_name = str(uuid4())
resp = retry(put, obj_name, use_account=3)
body = resp.read()
self.assertEqual(resp.status, 201)
# can delete an object
resp = retry(delete, self.obj, use_account=3)
body = resp.read()
self.assertIn(resp.status, (204, 404))
# sanity with account1
resp = retry(get_listing, use_account=3)
listing = resp.read()
if not six.PY2:
listing = listing.decode('utf8')
self.assertEqual(resp.status, 200)
self.assertIn(obj_name, listing.split('\n'))
self.assertNotIn(self.obj, listing.split('\n'))
@requires_acls
def test_admin(self):
if tf.skip3:
raise SkipTest
def get_listing(url, token, parsed, conn):
conn.request('GET', '%s/%s' % (parsed.path, self.container), '',
{'X-Auth-Token': token})
return check_response(conn)
def post_account(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
def get(url, token, parsed, conn, name):
conn.request('GET', '%s/%s/%s' % (
parsed.path, self.container, name), '',
{'X-Auth-Token': token})
return check_response(conn)
def put(url, token, parsed, conn, name):
conn.request('PUT', '%s/%s/%s' % (
parsed.path, self.container, name), 'test',
{'X-Auth-Token': token})
return check_response(conn)
def delete(url, token, parsed, conn, name):
conn.request('DELETE', '%s/%s/%s' % (
parsed.path, self.container, name), '',
{'X-Auth-Token': token})
return check_response(conn)
# cannot list objects
resp = retry(get_listing, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# cannot get object
resp = retry(get, self.obj, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# grant admin access
acl_user = tf.swift_test_user[2]
acl = {'admin': [acl_user]}
headers = {'x-account-access-control': json.dumps(acl)}
resp = retry(post_account, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# can list objects
resp = retry(get_listing, use_account=3)
listing = resp.read()
if not six.PY2:
listing = listing.decode('utf8')
self.assertEqual(resp.status, 200)
self.assertIn(self.obj, listing.split('\n'))
# can get object
resp = retry(get, self.obj, use_account=3)
body = resp.read()
self.assertEqual(resp.status, 200)
self.assertEqual(body, b'test')
# can put an object
obj_name = str(uuid4())
resp = retry(put, obj_name, use_account=3)
body = resp.read()
self.assertEqual(resp.status, 201)
# can delete an object
resp = retry(delete, self.obj, use_account=3)
body = resp.read()
self.assertIn(resp.status, (204, 404))
# sanity with account1
resp = retry(get_listing, use_account=3)
listing = resp.read()
if not six.PY2:
listing = listing.decode('utf8')
self.assertEqual(resp.status, 200)
self.assertIn(obj_name, listing.split('\n'))
self.assertNotIn(self.obj, listing)
def test_manifest(self):
if tf.skip:
raise SkipTest
# Data for the object segments
segments1 = [b'one', b'two', b'three', b'four', b'five']
segments2 = [b'six', b'seven', b'eight']
segments3 = [b'nine', b'ten', b'eleven']
# Upload the first set of segments
def put(url, token, parsed, conn, objnum):
conn.request('PUT', '%s/%s/segments1/%s' % (
parsed.path, self.container, str(objnum)), segments1[objnum],
{'X-Auth-Token': token})
return check_response(conn)
for objnum in range(len(segments1)):
resp = retry(put, objnum)
resp.read()
self.assertEqual(resp.status, 201)
# Upload the manifest
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/manifest' % (
parsed.path, self.container), '', {
'X-Auth-Token': token,
'X-Object-Manifest': '%s/segments1/' % self.container,
'Content-Type': 'text/jibberish', 'Content-Length': '0'})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
# Get the manifest (should get all the segments as the body)
def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (
parsed.path, self.container), '', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(get)
self.assertEqual(resp.read(), b''.join(segments1))
self.assertEqual(resp.status, 200)
self.assertEqual(resp.getheader('content-type'), 'text/jibberish')
# Get with a range at the start of the second segment
def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (
parsed.path, self.container), '', {
'X-Auth-Token': token, 'Range': 'bytes=3-'})
return check_response(conn)
resp = retry(get)
self.assertEqual(resp.read(), b''.join(segments1[1:]))
self.assertEqual(resp.status, 206)
# Get with a range in the middle of the second segment
def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (
parsed.path, self.container), '', {
'X-Auth-Token': token, 'Range': 'bytes=5-'})
return check_response(conn)
resp = retry(get)
self.assertEqual(resp.read(), b''.join(segments1)[5:])
self.assertEqual(resp.status, 206)
# Get with a full start and stop range
def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (
parsed.path, self.container), '', {
'X-Auth-Token': token, 'Range': 'bytes=5-10'})
return check_response(conn)
resp = retry(get)
self.assertEqual(resp.read(), b''.join(segments1)[5:11])
self.assertEqual(resp.status, 206)
# Upload the second set of segments
def put(url, token, parsed, conn, objnum):
conn.request('PUT', '%s/%s/segments2/%s' % (
parsed.path, self.container, str(objnum)), segments2[objnum],
{'X-Auth-Token': token})
return check_response(conn)
for objnum in range(len(segments2)):
resp = retry(put, objnum)
resp.read()
self.assertEqual(resp.status, 201)
# Get the manifest (should still be the first segments of course)
def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (
parsed.path, self.container), '', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(get)
self.assertEqual(resp.read(), b''.join(segments1))
self.assertEqual(resp.status, 200)
# Update the manifest
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/manifest' % (
parsed.path, self.container), '', {
'X-Auth-Token': token,
'X-Object-Manifest': '%s/segments2/' % self.container,
'Content-Length': '0'})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
# Get the manifest (should be the second set of segments now)
def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (
parsed.path, self.container), '', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(get)
self.assertEqual(resp.read(), b''.join(segments2))
self.assertEqual(resp.status, 200)
if not tf.skip3:
# Ensure we can't access the manifest with the third account
def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (
parsed.path, self.container), '', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(get, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# Grant access to the third account
def post(url, token, parsed, conn):
conn.request('POST', '%s/%s' % (parsed.path, self.container),
'', {'X-Auth-Token': token,
'X-Container-Read': tf.swift_test_perm[2]})
return check_response(conn)
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 204)
# The third account should be able to get the manifest now
def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (
parsed.path, self.container), '', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(get, use_account=3)
self.assertEqual(resp.read(), b''.join(segments2))
self.assertEqual(resp.status, 200)
# Create another container for the third set of segments
acontainer = uuid4().hex
def put(url, token, parsed, conn):
conn.request('PUT', parsed.path + '/' + acontainer, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
# Upload the third set of segments in the other container
def put(url, token, parsed, conn, objnum):
conn.request('PUT', '%s/%s/segments3/%s' % (
parsed.path, acontainer, str(objnum)), segments3[objnum],
{'X-Auth-Token': token})
return check_response(conn)
for objnum in range(len(segments3)):
resp = retry(put, objnum)
resp.read()
self.assertEqual(resp.status, 201)
# Update the manifest
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/manifest' % (
parsed.path, self.container), '',
{'X-Auth-Token': token,
'X-Object-Manifest': '%s/segments3/' % acontainer,
'Content-Length': '0'})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
# Get the manifest to ensure it's the third set of segments
def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (
parsed.path, self.container), '', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(get)
self.assertEqual(resp.read(), b''.join(segments3))
self.assertEqual(resp.status, 200)
if not tf.skip3:
# Ensure we can't access the manifest with the third account
# (because the segments are in a protected container even if the
# manifest itself is not).
def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (
parsed.path, self.container), '', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(get, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# Grant access to the third account
def post(url, token, parsed, conn):
conn.request('POST', '%s/%s' % (parsed.path, acontainer),
'', {'X-Auth-Token': token,
'X-Container-Read': tf.swift_test_perm[2]})
return check_response(conn)
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 204)
# The third account should be able to get the manifest now
def get(url, token, parsed, conn):
conn.request('GET', '%s/%s/manifest' % (
parsed.path, self.container), '', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(get, use_account=3)
self.assertEqual(resp.read(), b''.join(segments3))
self.assertEqual(resp.status, 200)
# Delete the manifest
def delete(url, token, parsed, conn, objnum):
conn.request('DELETE', '%s/%s/manifest' % (
parsed.path,
self.container), '', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(delete, objnum)
resp.read()
self.assertIn(resp.status, (204, 404))
# Delete the third set of segments
def delete(url, token, parsed, conn, objnum):
conn.request('DELETE', '%s/%s/segments3/%s' % (
parsed.path, acontainer, str(objnum)), '',
{'X-Auth-Token': token})
return check_response(conn)
for objnum in range(len(segments3)):
resp = retry(delete, objnum)
resp.read()
self.assertIn(resp.status, (204, 404))
# Delete the second set of segments
def delete(url, token, parsed, conn, objnum):
conn.request('DELETE', '%s/%s/segments2/%s' % (
parsed.path, self.container, str(objnum)), '',
{'X-Auth-Token': token})
return check_response(conn)
for objnum in range(len(segments2)):
resp = retry(delete, objnum)
resp.read()
self.assertIn(resp.status, (204, 404))
# Delete the first set of segments
def delete(url, token, parsed, conn, objnum):
conn.request('DELETE', '%s/%s/segments1/%s' % (
parsed.path, self.container, str(objnum)), '',
{'X-Auth-Token': token})
return check_response(conn)
for objnum in range(len(segments1)):
resp = retry(delete, objnum)
resp.read()
self.assertIn(resp.status, (204, 404))
# Delete the extra container
def delete(url, token, parsed, conn):
conn.request('DELETE', '%s/%s' % (parsed.path, acontainer), '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(delete)
resp.read()
self.assertIn(resp.status, (204, 404))
def test_delete_content_type(self):
if tf.skip:
raise SkipTest
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/hi' % (parsed.path, self.container),
'there', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
def delete(url, token, parsed, conn):
conn.request('DELETE', '%s/%s/hi' % (parsed.path, self.container),
'', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(delete)
resp.read()
self.assertIn(resp.status, (204, 404))
self.assertEqual(resp.getheader('Content-Type'),
'text/html; charset=UTF-8')
def test_delete_if_delete_at_bad(self):
if tf.skip:
raise SkipTest
def put(url, token, parsed, conn):
conn.request('PUT',
'%s/%s/hi-delete-bad' % (parsed.path, self.container),
'there', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(put)
resp.read()
self.assertEqual(resp.status, 201)
def delete(url, token, parsed, conn):
conn.request('DELETE', '%s/%s/hi' % (parsed.path, self.container),
'', {'X-Auth-Token': token,
'X-If-Delete-At': 'bad'})
return check_response(conn)
resp = retry(delete)
resp.read()
self.assertEqual(resp.status, 400)
def test_null_name(self):
if tf.skip:
raise SkipTest
def put(url, token, parsed, conn):
conn.request('PUT', '%s/%s/abc%%00def' % (
parsed.path,
self.container), 'test', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(put)
if (tf.web_front_end == 'apache2'):
self.assertEqual(resp.status, 404)
else:
self.assertEqual(resp.read(), b'Invalid UTF8 or contains NULL')
self.assertEqual(resp.status, 412)
def test_cors(self):
if tf.skip:
raise SkipTest
try:
strict_cors = tf.cluster_info['swift']['strict_cors_mode']
except KeyError:
raise SkipTest("cors mode is unknown")
def put_cors_cont(url, token, parsed, conn, orig):
conn.request(
'PUT', '%s/%s' % (parsed.path, self.container),
'', {'X-Auth-Token': token,
'X-Container-Meta-Access-Control-Allow-Origin': orig})
return check_response(conn)
def put_obj(url, token, parsed, conn, obj):
conn.request(
'PUT', '%s/%s/%s' % (parsed.path, self.container, obj),
'test', {'X-Auth-Token': token, 'X-Object-Meta-Color': 'red'})
return check_response(conn)
def check_cors(url, token, parsed, conn,
method, obj, headers):
if method != 'OPTIONS':
headers['X-Auth-Token'] = token
conn.request(
method, '%s/%s/%s' % (parsed.path, self.container, obj),
'', headers)
return conn.getresponse()
resp = retry(put_cors_cont, '*')
resp.read()
self.assertEqual(resp.status // 100, 2)
resp = retry(put_obj, 'cat')
resp.read()
self.assertEqual(resp.status // 100, 2)
resp = retry(check_cors,
'OPTIONS', 'cat', {'Origin': 'http://m.com'})
self.assertEqual(resp.status, 401)
resp = retry(check_cors,
'OPTIONS', 'cat',
{'Origin': 'http://m.com',
'Access-Control-Request-Method': 'GET'})
self.assertEqual(resp.status, 200)
resp.read()
headers = dict((k.lower(), v) for k, v in resp.getheaders())
self.assertEqual(headers.get('access-control-allow-origin'),
'*')
# Just a pre-flight; this doesn't show up yet
self.assertNotIn('access-control-expose-headers', headers)
resp = retry(check_cors,
'GET', 'cat', {'Origin': 'http://m.com'})
self.assertEqual(resp.status, 200)
headers = dict((k.lower(), v) for k, v in resp.getheaders())
self.assertEqual(headers.get('access-control-allow-origin'),
'*')
self.assertIn('x-object-meta-color', headers.get(
'access-control-expose-headers').split(', '))
resp = retry(check_cors,
'GET', 'cat', {'Origin': 'http://m.com',
'X-Web-Mode': 'True'})
self.assertEqual(resp.status, 200)
headers = dict((k.lower(), v) for k, v in resp.getheaders())
self.assertEqual(headers.get('access-control-allow-origin'),
'*')
self.assertIn('x-object-meta-color', headers.get(
'access-control-expose-headers').split(', '))
####################
resp = retry(put_cors_cont, 'http://secret.com')
resp.read()
self.assertEqual(resp.status // 100, 2)
resp = retry(check_cors,
'OPTIONS', 'cat',
{'Origin': 'http://m.com',
'Access-Control-Request-Method': 'GET'})
resp.read()
self.assertEqual(resp.status, 401)
if strict_cors:
resp = retry(check_cors,
'GET', 'cat', {'Origin': 'http://m.com'})
resp.read()
self.assertEqual(resp.status, 200)
headers = dict((k.lower(), v) for k, v in resp.getheaders())
self.assertNotIn('access-control-allow-origin', headers)
resp = retry(check_cors,
'GET', 'cat', {'Origin': 'http://secret.com'})
resp.read()
self.assertEqual(resp.status, 200)
headers = dict((k.lower(), v) for k, v in resp.getheaders())
self.assertEqual(headers.get('access-control-allow-origin'),
'http://secret.com')
else:
resp = retry(check_cors,
'GET', 'cat', {'Origin': 'http://m.com'})
resp.read()
self.assertEqual(resp.status, 200)
headers = dict((k.lower(), v) for k, v in resp.getheaders())
self.assertEqual(headers.get('access-control-allow-origin'),
'http://m.com')
@requires_policies
def test_cross_policy_copy(self):
# create container in first policy
policy = self.policies.select()
container = self._create_container(
headers={'X-Storage-Policy': policy['name']})
obj = uuid4().hex
# create a container in second policy
other_policy = self.policies.exclude(name=policy['name']).select()
other_container = self._create_container(
headers={'X-Storage-Policy': other_policy['name']})
other_obj = uuid4().hex
def put_obj(url, token, parsed, conn, container, obj):
# to keep track of things, use the original path as the body
content = '%s/%s' % (container, obj)
path = '%s/%s' % (parsed.path, content)
conn.request('PUT', path, content, {'X-Auth-Token': token})
return check_response(conn)
# create objects
for c, o in zip((container, other_container), (obj, other_obj)):
resp = retry(put_obj, c, o)
resp.read()
self.assertEqual(resp.status, 201)
def put_copy_from(url, token, parsed, conn, container, obj, source):
dest_path = '%s/%s/%s' % (parsed.path, container, obj)
conn.request('PUT', dest_path, '',
{'X-Auth-Token': token,
'Content-Length': '0',
'X-Copy-From': source})
return check_response(conn)
copy_requests = (
(container, other_obj, '%s/%s' % (other_container, other_obj)),
(other_container, obj, '%s/%s' % (container, obj)),
)
# copy objects
for c, o, source in copy_requests:
resp = retry(put_copy_from, c, o, source)
resp.read()
self.assertEqual(resp.status, 201)
def get_obj(url, token, parsed, conn, container, obj):
path = '%s/%s/%s' % (parsed.path, container, obj)
conn.request('GET', path, '', {'X-Auth-Token': token})
return check_response(conn)
# validate contents, contents should be source
validate_requests = copy_requests
for c, o, body in validate_requests:
resp = retry(get_obj, c, o)
self.assertEqual(resp.status, 200)
if not six.PY2:
body = body.encode('utf8')
self.assertEqual(body, resp.read())
@requires_bulk
def test_bulk_delete(self):
def bulk_delete(url, token, parsed, conn):
# try to bulk delete the object that was created during test setup
conn.request('DELETE', '%s/%s/%s?bulk-delete' % (
parsed.path, self.container, self.obj),
'%s/%s' % (self.container, self.obj),
{'X-Auth-Token': token,
'Accept': 'application/xml',
'Expect': '100-continue',
'Content-Type': 'text/plain'})
return check_response(conn)
resp = retry(bulk_delete)
self.assertEqual(resp.status, 200)
body = resp.read()
tree = minidom.parseString(body)
self.assertEqual(tree.documentElement.tagName, 'delete')
errors = tree.getElementsByTagName('errors')
self.assertEqual(len(errors), 1)
errors = [c.data if c.nodeType == c.TEXT_NODE else c.childNodes[0].data
for c in errors[0].childNodes
if c.nodeType != c.TEXT_NODE or c.data.strip()]
self.assertEqual(errors, [])
final_status = tree.getElementsByTagName('response_status')
self.assertEqual(len(final_status), 1)
self.assertEqual(len(final_status[0].childNodes), 1)
self.assertEqual(final_status[0].childNodes[0].data, '200 OK')
def test_etag_quoter(self):
if tf.skip:
raise SkipTest
if 'etag_quoter' not in tf.cluster_info:
raise SkipTest("etag-quoter middleware is not enabled")
def do_head(expect_quoted=None):
def head(url, token, parsed, conn):
conn.request('HEAD', '%s/%s/%s' % (
parsed.path, self.container, self.obj), '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(head)
resp.read()
self.assertEqual(resp.status, 200)
if expect_quoted is None:
expect_quoted = tf.cluster_info.get('etag_quoter', {}).get(
'enable_by_default', False)
expected_etag = md5(b'test', usedforsecurity=False).hexdigest()
if expect_quoted:
expected_etag = '"%s"' % expected_etag
self.assertEqual(resp.headers['etag'], expected_etag)
def _post(enable_flag, container_path):
def post(url, token, parsed, conn):
if container_path:
path = '%s/%s' % (parsed.path, self.container)
hdr = 'X-Container-Rfc-Compliant-Etags'
else:
path = parsed.path
hdr = 'X-Account-Rfc-Compliant-Etags'
headers = {hdr: enable_flag, 'X-Auth-Token': token}
conn.request('POST', path, '', headers)
return check_response(conn)
resp = retry(post)
resp.read()
self.assertEqual(resp.status, 204)
def post_account(enable_flag):
return _post(enable_flag, False)
def post_container(enable_flag):
return _post(enable_flag, True)
do_head()
post_container('t')
do_head(expect_quoted=True)
try:
post_account('t')
post_container('')
do_head(expect_quoted=True)
post_container('f')
do_head(expect_quoted=False)
finally:
# Don't leave a dirty account
post_account('')
if __name__ == '__main__':
unittest.main()
| swift-master | test/functional/test_object.py |
# Copyright (c) 2017 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from castellan.tests.unit.key_manager.mock_key_manager import MockKeyManager
class MockSwiftKeyManager(MockKeyManager):
"""Mocking key manager for Swift functional tests.
This mock key manager implementation extends the Castellan mock key
manager with support for a pre-existing key that the Swift proxy server
can use as the root encryption secret. The actual key material bytes
for the root encryption secret changes each time this mock key manager is
instantiated, meaning that data written earlier is no longer accessible
once the proxy server is restarted.
To use this mock key manager instead of the default Barbican key manager,
set the following property in the [kms_keymaster] section in the
keymaster.conf configuration file pointed to using the
keymaster_config_path property in the [filter:kms_keymaster] section in the
proxy-server.conf file:
api_class = test.functional.mock_swift_key_manager.MockSwiftKeyManager
In case of a Python import error, make sure that the swift directory under
which this mock key manager resides is early in the sys.path, e.g., by
setting it in the PYTHONPATH environment variable before starting the
proxy server.
This key manager is not suitable for use in production deployments.
"""
def __init__(self, configuration=None):
super(MockSwiftKeyManager, self).__init__(configuration)
'''
Create a new, random symmetric key for use as the encryption root
secret.
'''
existing_key = self._generate_key(algorithm='AES', length=256)
'''
Store the key under the UUID 'mock_key_manager_existing_key', from
where it can be retrieved by the proxy server. In the kms_keymaster
configuration, set the following property to use this key:
key_id = mock_key_manager_existing_key
'''
self.keys['mock_key_manager_existing_key'] = existing_key
| swift-master | test/functional/mock_swift_key_manager.py |
#!/usr/bin/python -u
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
import io
import locale
import random
import six
from six.moves import urllib
import time
import unittest
import uuid
from copy import deepcopy
import eventlet
from swift.common.http import is_success, is_client_error
from swift.common.swob import normalize_etag
from swift.common.utils import md5
from email.utils import parsedate
if six.PY2:
from email.parser import FeedParser
else:
from email.parser import BytesFeedParser as FeedParser
import mock
from test.functional import normalized_urls, load_constraint, cluster_info
from test.functional import check_response, retry
import test.functional as tf
from test.functional.swift_test_client import Account, Connection, File, \
ResponseError, SkipTest
def setUpModule():
tf.setup_package()
def tearDownModule():
tf.teardown_package()
class Utils(object):
@classmethod
def create_ascii_name(cls, length=None):
return uuid.uuid4().hex
@classmethod
def create_utf8_name(cls, length=None):
if length is None:
length = 15
else:
length = int(length)
utf8_chars = u'\uF10F\uD20D\uB30B\u9409\u8508\u5605\u3703\u1801'\
u'\u0900\uF110\uD20E\uB30C\u940A\u8509\u5606\u3704'\
u'\u1802\u0901\uF111\uD20F\uB30D\u940B\u850A\u5607'\
u'\u3705\u1803\u0902\uF112\uD210\uB30E\u940C\u850B'\
u'\u5608\u3706\u1804\u0903\u03A9\u2603'
ustr = u''.join([random.choice(utf8_chars)
for x in range(length)])
if six.PY2:
return ustr.encode('utf-8')
return ustr
create_name = create_ascii_name
class BaseEnv(object):
account = conn = None
@classmethod
def setUp(cls):
cls.conn = Connection(tf.config)
cls.conn.authenticate()
cls.account = Account(cls.conn, tf.config.get('account',
tf.config['username']))
cls.account.delete_containers()
@classmethod
def tearDown(cls):
pass
class Base(unittest.TestCase):
env = BaseEnv
@classmethod
def tearDownClass(cls):
cls.env.tearDown()
@classmethod
def setUpClass(cls):
cls.env.setUp()
def setUp(self):
if tf.in_process:
tf.skip_if_no_xattrs()
def assert_body(self, body):
if not isinstance(body, bytes):
body = body.encode('utf-8')
response_body = self.env.conn.response.read()
self.assertEqual(response_body, body,
'Body returned: %s' % (response_body))
def assert_status(self, status_or_statuses):
self.assertTrue(
self.env.conn.response.status == status_or_statuses or
(hasattr(status_or_statuses, '__iter__') and
self.env.conn.response.status in status_or_statuses),
'Status returned: %d Expected: %s' %
(self.env.conn.response.status, status_or_statuses))
def assert_header(self, header_name, expected_value):
try:
actual_value = self.env.conn.response.getheader(header_name)
except KeyError:
self.fail(
'Expected header name %r not found in response.' % header_name)
self.assertEqual(expected_value, actual_value)
def assert_etag(self, unquoted_value):
if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'):
expected = '"%s"' % unquoted_value
else:
expected = unquoted_value
self.assert_header('etag', expected)
class Base2(object):
@classmethod
def setUpClass(cls):
Utils.create_name = Utils.create_utf8_name
super(Base2, cls).setUpClass()
@classmethod
def tearDownClass(cls):
Utils.create_name = Utils.create_ascii_name
class TestAccountEnv(BaseEnv):
@classmethod
def setUp(cls):
super(TestAccountEnv, cls).setUp()
cls.containers = []
for i in range(10):
cont = cls.account.container(Utils.create_name())
if not cont.create():
raise ResponseError(cls.conn.response)
cls.containers.append(cont)
class TestAccountDev(Base):
env = TestAccountEnv
class TestAccountDevUTF8(Base2, TestAccountDev):
pass
class TestAccount(Base):
env = TestAccountEnv
def testNoAuthToken(self):
self.assertRaises(ResponseError, self.env.account.info,
cfg={'no_auth_token': True})
self.assert_status([401, 412])
self.assertRaises(ResponseError, self.env.account.containers,
cfg={'no_auth_token': True})
self.assert_status([401, 412])
def testInvalidUTF8Path(self):
valid_utf8 = Utils.create_utf8_name()
if six.PY2:
invalid_utf8 = valid_utf8[::-1]
else:
invalid_utf8 = (valid_utf8.encode('utf8')[::-1]).decode(
'utf-8', 'surrogateescape')
container = self.env.account.container(invalid_utf8)
self.assertFalse(container.create(cfg={'no_path_quote': True}))
self.assert_status(412)
self.assert_body('Invalid UTF8 or contains NULL')
def testVersionOnlyPath(self):
self.env.account.conn.make_request('PUT',
cfg={'version_only_path': True})
self.assert_status(412)
self.assert_body('Bad URL')
def testInvalidPath(self):
was_path = self.env.account.conn.storage_path
if (normalized_urls):
self.env.account.conn.storage_path = '/'
else:
self.env.account.conn.storage_path = "/%s" % was_path
try:
self.env.account.conn.make_request('GET')
self.assert_status(404)
finally:
self.env.account.conn.storage_path = was_path
def testPUTError(self):
if load_constraint('allow_account_management'):
raise SkipTest("Allow account management is enabled")
self.env.account.conn.make_request('PUT')
self.assert_status([403, 405])
def testAccountHead(self):
try_count = 0
while try_count < 5:
try_count += 1
info = self.env.account.info()
for field in ['object_count', 'container_count', 'bytes_used']:
self.assertGreaterEqual(info[field], 0)
if info['container_count'] == len(self.env.containers):
break
if try_count < 5:
time.sleep(1)
self.assertEqual(info['container_count'], len(self.env.containers))
self.assert_status(204)
def testContainerSerializedInfo(self):
container_info = {}
for container in self.env.containers:
info = {'bytes': 0}
info['count'] = random.randint(10, 30)
for i in range(info['count']):
file_item = container.file(Utils.create_name())
bytes = random.randint(1, 32768)
file_item.write_random(bytes)
info['bytes'] += bytes
container_info[container.name] = info
for format_type in ['json', 'xml']:
for a in self.env.account.containers(
parms={'format': format_type}):
self.assertGreaterEqual(a['count'], 0)
self.assertGreaterEqual(a['bytes'], 0)
headers = dict((k.lower(), v)
for k, v in self.env.conn.response.getheaders())
if format_type == 'json':
self.assertEqual(headers['content-type'],
'application/json; charset=utf-8')
elif format_type == 'xml':
self.assertEqual(headers['content-type'],
'application/xml; charset=utf-8')
def testListingLimit(self):
limit = load_constraint('account_listing_limit')
for lim in (1, 100, limit / 2, limit - 1, limit, limit + 1, limit * 2):
p = {'limit': lim}
if lim <= limit:
self.assertLessEqual(len(self.env.account.containers(parms=p)),
lim)
self.assert_status(200)
else:
self.assertRaises(ResponseError,
self.env.account.containers, parms=p)
self.assert_status(412)
def testContainerListing(self):
a = sorted([c.name for c in self.env.containers])
for format_type in [None, 'json', 'xml']:
b = self.env.account.containers(parms={'format': format_type})
if isinstance(b[0], dict):
b = [x['name'] for x in b]
self.assertEqual(a, b)
def testListDelimiter(self):
delimiter = '-'
containers = ['test', delimiter.join(['test', 'bar']),
delimiter.join(['test', 'foo'])]
for c in containers:
cont = self.env.account.container(c)
self.assertTrue(cont.create())
results = self.env.account.containers(parms={'delimiter': delimiter})
expected = ['test', 'test-']
results = [r for r in results if r in expected]
self.assertEqual(expected, results)
results = self.env.account.containers(parms={'delimiter': delimiter,
'reverse': 'yes'})
expected.reverse()
results = [r for r in results if r in expected]
self.assertEqual(expected, results)
def testListMultiCharDelimiter(self):
delimiter = '-&'
containers = ['test', delimiter.join(['test', 'bar']),
delimiter.join(['test', 'foo'])]
for c in containers:
cont = self.env.account.container(c)
self.assertTrue(cont.create())
results = self.env.account.containers(parms={'delimiter': delimiter})
expected = ['test', 'test-&']
results = [r for r in results if r in expected]
self.assertEqual(expected, results)
results = self.env.account.containers(parms={'delimiter': delimiter,
'reverse': 'yes'})
expected.reverse()
results = [r for r in results if r in expected]
self.assertEqual(expected, results)
def testListDelimiterAndPrefix(self):
delimiter = 'a'
containers = ['bar', 'bazar']
for c in containers:
cont = self.env.account.container(c)
self.assertTrue(cont.create())
results = self.env.account.containers(parms={'delimiter': delimiter,
'prefix': 'ba'})
expected = ['bar', 'baza']
results = [r for r in results if r in expected]
self.assertEqual(expected, results)
results = self.env.account.containers(parms={'delimiter': delimiter,
'prefix': 'ba',
'reverse': 'yes'})
expected.reverse()
results = [r for r in results if r in expected]
self.assertEqual(expected, results)
def testContainerListingLastModified(self):
expected = {}
for container in self.env.containers:
res = container.info()
expected[container.name] = time.mktime(
parsedate(res['last_modified']))
for format_type in ['json', 'xml']:
actual = {}
containers = self.env.account.containers(
parms={'format': format_type})
if isinstance(containers[0], dict):
for container in containers:
self.assertIn('name', container) # sanity
self.assertIn('last_modified', container) # sanity
# ceil by hand (wants easier way!)
datetime_str, micro_sec_str = \
container['last_modified'].split('.')
timestamp = time.mktime(
time.strptime(datetime_str,
"%Y-%m-%dT%H:%M:%S"))
if int(micro_sec_str):
timestamp += 1
actual[container['name']] = timestamp
self.assertEqual(expected, actual)
def testInvalidAuthToken(self):
hdrs = {'X-Auth-Token': 'bogus_auth_token'}
self.assertRaises(ResponseError, self.env.account.info, hdrs=hdrs)
self.assert_status(401)
def testLastContainerMarker(self):
for format_type in [None, 'json', 'xml']:
containers = self.env.account.containers(parms={
'format': format_type})
self.assertEqual(len(containers), len(self.env.containers))
self.assert_status(200)
marker = (containers[-1] if format_type is None
else containers[-1]['name'])
containers = self.env.account.containers(
parms={'format': format_type, 'marker': marker})
self.assertEqual(len(containers), 0)
if format_type is None:
self.assert_status(204)
else:
self.assert_status(200)
def testMarkerLimitContainerList(self):
for format_type in [None, 'json', 'xml']:
for marker in ['0', 'A', 'I', 'R', 'Z', 'a', 'i', 'r', 'z',
'abc123', 'mnop', 'xyz']:
limit = random.randint(2, 9)
containers = self.env.account.containers(
parms={'format': format_type,
'marker': marker,
'limit': limit})
self.assertLessEqual(len(containers), limit)
if containers:
if isinstance(containers[0], dict):
containers = [x['name'] for x in containers]
self.assertGreater(locale.strcoll(containers[0], marker),
0)
def testContainersOrderedByName(self):
for format_type in [None, 'json', 'xml']:
containers = self.env.account.containers(
parms={'format': format_type})
if isinstance(containers[0], dict):
containers = [x['name'] for x in containers]
self.assertEqual(sorted(containers, key=locale.strxfrm),
containers)
def testQuotedWWWAuthenticateHeader(self):
# check that the www-authenticate header value with the swift realm
# is correctly quoted.
conn = Connection(tf.config)
conn.authenticate()
inserted_html = '<b>Hello World'
hax = 'AUTH_haxx"\nContent-Length: %d\n\n%s' % (len(inserted_html),
inserted_html)
quoted_hax = urllib.parse.quote(hax)
conn.connection.request('GET', '/v1/' + quoted_hax, None, {})
resp = conn.connection.getresponse()
resp_headers = {}
for h, v in resp.getheaders():
h = h.lower()
if h in resp_headers:
# py2 would do this for us, but py3 apparently keeps them
# separate? Not sure which I like more...
resp_headers[h] += ',' + v
else:
resp_headers[h] = v
self.assertIn('www-authenticate', resp_headers)
actual = resp_headers['www-authenticate']
expected = 'Swift realm="%s"' % quoted_hax
# other middleware e.g. auth_token may also set www-authenticate
# headers in which case actual values will be a comma separated list.
# check that expected value is among the actual values
self.assertIn(expected, actual)
class TestAccountUTF8(Base2, TestAccount):
pass
class TestAccountNoContainers(Base):
def testGetRequest(self):
for format_type in [None, 'json', 'xml']:
self.assertFalse(self.env.account.containers(
parms={'format': format_type}))
if format_type is None:
self.assert_status(204)
else:
self.assert_status(200)
class TestAccountNoContainersUTF8(Base2, TestAccountNoContainers):
pass
class TestAccountSortingEnv(BaseEnv):
@classmethod
def setUp(cls):
super(TestAccountSortingEnv, cls).setUp()
postfix = Utils.create_name()
cls.cont_items = ('a1', 'a2', 'A3', 'b1', 'B2', 'a10', 'b10', 'zz')
cls.cont_items = ['%s%s' % (x, postfix) for x in cls.cont_items]
for container in cls.cont_items:
c = cls.account.container(container)
if not c.create():
raise ResponseError(cls.conn.response)
class TestAccountSorting(Base):
env = TestAccountSortingEnv
def testAccountContainerListSorting(self):
# name (byte order) sorting.
cont_list = sorted(self.env.cont_items)
for reverse in ('false', 'no', 'off', '', 'garbage'):
cont_listing = self.env.account.containers(
parms={'reverse': reverse})
self.assert_status(200)
self.assertEqual(cont_list, cont_listing,
'Expected %s but got %s with reverse param %r'
% (cont_list, cont_listing, reverse))
def testAccountContainerListSortingReverse(self):
# name (byte order) sorting.
cont_list = sorted(self.env.cont_items)
cont_list.reverse()
for reverse in ('true', '1', 'yes', 'on', 't', 'y'):
cont_listing = self.env.account.containers(
parms={'reverse': reverse})
self.assert_status(200)
self.assertEqual(cont_list, cont_listing,
'Expected %s but got %s with reverse param %r'
% (cont_list, cont_listing, reverse))
def testAccountContainerListSortingByPrefix(self):
cont_list = sorted(c for c in self.env.cont_items if c.startswith('a'))
cont_list.reverse()
cont_listing = self.env.account.containers(parms={
'reverse': 'on', 'prefix': 'a'})
self.assert_status(200)
self.assertEqual(cont_list, cont_listing)
def testAccountContainerListSortingByMarkersExclusive(self):
first_item = self.env.cont_items[3] # 'b1' + postfix
last_item = self.env.cont_items[4] # 'B2' + postfix
cont_list = sorted(c for c in self.env.cont_items
if last_item < c < first_item)
cont_list.reverse()
cont_listing = self.env.account.containers(parms={
'reverse': 'on', 'marker': first_item, 'end_marker': last_item})
self.assert_status(200)
self.assertEqual(cont_list, cont_listing)
def testAccountContainerListSortingByMarkersInclusive(self):
first_item = self.env.cont_items[3] # 'b1' + postfix
last_item = self.env.cont_items[4] # 'B2' + postfix
cont_list = sorted(c for c in self.env.cont_items
if last_item <= c <= first_item)
cont_list.reverse()
cont_listing = self.env.account.containers(parms={
'reverse': 'on', 'marker': first_item + '\x00',
'end_marker': last_item[:-1] + chr(ord(last_item[-1]) - 1)})
self.assert_status(200)
self.assertEqual(cont_list, cont_listing)
def testAccountContainerListSortingByReversedMarkers(self):
cont_listing = self.env.account.containers(parms={
'reverse': 'on', 'marker': 'B', 'end_marker': 'b1'})
self.assert_status(204)
self.assertEqual([], cont_listing)
class TestContainerEnv(BaseEnv):
@classmethod
def setUp(cls):
super(TestContainerEnv, cls).setUp()
cls.container = cls.account.container(Utils.create_name())
if not cls.container.create():
raise ResponseError(cls.conn.response)
cls.file_count = 10
cls.file_size = 128
cls.files = list()
for x in range(cls.file_count):
file_item = cls.container.file(Utils.create_name())
file_item.write_random(cls.file_size)
cls.files.append(file_item.name)
class TestContainerDev(Base):
env = TestContainerEnv
class TestContainerDevUTF8(Base2, TestContainerDev):
pass
class TestContainer(Base):
env = TestContainerEnv
def testContainerNameLimit(self):
limit = load_constraint('max_container_name_length')
for lim in (limit - 100, limit - 10, limit - 1, limit,
limit + 1, limit + 10, limit + 100):
cont = self.env.account.container('a' * lim)
if lim <= limit:
self.assertTrue(cont.create())
self.assert_status((201, 202))
else:
self.assertFalse(cont.create())
self.assert_status(400)
def testFileThenContainerDelete(self):
cont = self.env.account.container(Utils.create_name())
self.assertTrue(cont.create())
file_item = cont.file(Utils.create_name())
self.assertTrue(file_item.write_random())
self.assertTrue(file_item.delete())
self.assert_status(204)
self.assertNotIn(file_item.name, cont.files())
self.assertTrue(cont.delete())
self.assert_status(204)
self.assertNotIn(cont.name, self.env.account.containers())
def testFileListingLimitMarkerPrefix(self):
cont = self.env.account.container(Utils.create_name())
self.assertTrue(cont.create())
files = sorted([Utils.create_name() for x in range(10)])
for f in files:
file_item = cont.file(f)
self.assertTrue(file_item.write_random())
for i in range(len(files)):
f = files[i]
for j in range(1, len(files) - i):
self.assertEqual(cont.files(parms={'limit': j, 'marker': f}),
files[i + 1: i + j + 1])
self.assertEqual(cont.files(parms={'marker': f}), files[i + 1:])
self.assertEqual(cont.files(parms={'marker': f, 'prefix': f}), [])
self.assertEqual(cont.files(parms={'prefix': f}), [f])
def testPrefixAndLimit(self):
load_constraint('container_listing_limit')
cont = self.env.account.container(Utils.create_name())
self.assertTrue(cont.create())
prefix_file_count = 10
limit_count = 2
prefixs = ['alpha/', 'beta/', 'kappa/']
prefix_files = {}
for prefix in prefixs:
prefix_files[prefix] = []
for i in range(prefix_file_count):
file_item = cont.file(prefix + Utils.create_name())
file_item.write()
prefix_files[prefix].append(file_item.name)
for format_type in [None, 'json', 'xml']:
for prefix in prefixs:
files = cont.files(parms={'prefix': prefix,
'format': format_type})
if isinstance(files[0], dict):
files = [x.get('name', x.get('subdir')) for x in files]
self.assertEqual(files, sorted(prefix_files[prefix]))
for format_type in [None, 'json', 'xml']:
for prefix in prefixs:
files = cont.files(parms={'limit': limit_count,
'prefix': prefix,
'format': format_type})
if isinstance(files[0], dict):
files = [x.get('name', x.get('subdir')) for x in files]
self.assertEqual(len(files), limit_count)
for file_item in files:
self.assertTrue(file_item.startswith(prefix))
def testListDelimiter(self):
cont = self.env.account.container(Utils.create_name())
self.assertTrue(cont.create())
delimiter = '-'
files = ['test', delimiter.join(['test', 'bar']),
delimiter.join(['test', 'foo'])]
for f in files:
file_item = cont.file(f)
self.assertTrue(file_item.write_random())
for format_type in [None, 'json', 'xml']:
results = cont.files(parms={'format': format_type})
if isinstance(results[0], dict):
results = [x.get('name', x.get('subdir')) for x in results]
self.assertEqual(results, ['test', 'test-bar', 'test-foo'])
results = cont.files(parms={'delimiter': delimiter,
'format': format_type})
if isinstance(results[0], dict):
results = [x.get('name', x.get('subdir')) for x in results]
self.assertEqual(results, ['test', 'test-'])
results = cont.files(parms={'delimiter': delimiter,
'format': format_type,
'reverse': 'yes'})
if isinstance(results[0], dict):
results = [x.get('name', x.get('subdir')) for x in results]
self.assertEqual(results, ['test-', 'test'])
def testListMultiCharDelimiter(self):
cont = self.env.account.container(Utils.create_name())
self.assertTrue(cont.create())
delimiter = '-&'
files = ['test', delimiter.join(['test', 'bar']),
delimiter.join(['test', 'foo']), "test-'baz"]
for f in files:
file_item = cont.file(f)
self.assertTrue(file_item.write_random())
for format_type in [None, 'json', 'xml']:
results = cont.files(parms={'format': format_type})
if isinstance(results[0], dict):
results = [x.get('name', x.get('subdir')) for x in results]
self.assertEqual(results, ['test', 'test-&bar', 'test-&foo',
"test-'baz"])
results = cont.files(parms={'delimiter': delimiter,
'format': format_type})
if isinstance(results[0], dict):
results = [x.get('name', x.get('subdir')) for x in results]
self.assertEqual(results, ['test', 'test-&', "test-'baz"])
results = cont.files(parms={'delimiter': delimiter,
'format': format_type,
'reverse': 'yes'})
if isinstance(results[0], dict):
results = [x.get('name', x.get('subdir')) for x in results]
self.assertEqual(results, ["test-'baz", 'test-&', 'test'])
def testListDelimiterAndPrefix(self):
cont = self.env.account.container(Utils.create_name())
self.assertTrue(cont.create())
delimiter = 'a'
files = ['bar', 'bazar']
for f in files:
file_item = cont.file(f)
self.assertTrue(file_item.write_random())
results = cont.files(parms={'delimiter': delimiter, 'prefix': 'ba'})
self.assertEqual(results, ['bar', 'baza'])
results = cont.files(parms={'delimiter': delimiter,
'prefix': 'ba',
'reverse': 'yes'})
self.assertEqual(results, ['baza', 'bar'])
def testLeadingDelimiter(self):
cont = self.env.account.container(Utils.create_name())
self.assertTrue(cont.create())
delimiter = '/'
files = ['test', delimiter.join(['', 'test', 'bar']),
delimiter.join(['', 'test', 'bar', 'foo'])]
for f in files:
file_item = cont.file(f)
self.assertTrue(file_item.write_random())
results = cont.files(parms={'delimiter': delimiter})
self.assertEqual(results, [delimiter, 'test'])
def testCreate(self):
cont = self.env.account.container(Utils.create_name())
self.assertTrue(cont.create())
self.assert_status(201)
self.assertIn(cont.name, self.env.account.containers())
def testContainerFileListOnContainerThatDoesNotExist(self):
for format_type in [None, 'json', 'xml']:
container = self.env.account.container(Utils.create_name())
self.assertRaises(ResponseError, container.files,
parms={'format': format_type})
self.assert_status(404)
def testUtf8Container(self):
valid_utf8 = Utils.create_utf8_name()
if six.PY2:
invalid_utf8 = valid_utf8[::-1]
else:
invalid_utf8 = (valid_utf8.encode('utf8')[::-1]).decode(
'utf-8', 'surrogateescape')
container = self.env.account.container(valid_utf8)
self.assertTrue(container.create(cfg={'no_path_quote': True}))
self.assertIn(container.name, self.env.account.containers())
self.assertEqual(container.files(), [])
self.assertTrue(container.delete())
container = self.env.account.container(invalid_utf8)
self.assertFalse(container.create(cfg={'no_path_quote': True}))
self.assert_status(412)
self.assertRaises(ResponseError, container.files,
cfg={'no_path_quote': True})
self.assert_status(412)
def testCreateOnExisting(self):
cont = self.env.account.container(Utils.create_name())
self.assertTrue(cont.create())
self.assert_status(201)
self.assertTrue(cont.create())
self.assert_status(202)
def testSlashInName(self):
if six.PY2:
cont_name = list(Utils.create_name().decode('utf-8'))
else:
cont_name = list(Utils.create_name())
cont_name[random.randint(2, len(cont_name) - 2)] = '/'
cont_name = ''.join(cont_name)
if six.PY2:
cont_name = cont_name.encode('utf-8')
cont = self.env.account.container(cont_name)
self.assertFalse(cont.create(cfg={'no_path_quote': True}),
'created container with name %s' % (cont_name))
self.assert_status(404)
self.assertNotIn(cont.name, self.env.account.containers())
def testDelete(self):
cont = self.env.account.container(Utils.create_name())
self.assertTrue(cont.create())
self.assert_status(201)
self.assertTrue(cont.delete())
self.assert_status(204)
self.assertNotIn(cont.name, self.env.account.containers())
def testDeleteOnContainerThatDoesNotExist(self):
cont = self.env.account.container(Utils.create_name())
self.assertFalse(cont.delete())
self.assert_status(404)
def testDeleteOnContainerWithFiles(self):
cont = self.env.account.container(Utils.create_name())
self.assertTrue(cont.create())
file_item = cont.file(Utils.create_name())
file_item.write_random(self.env.file_size)
self.assertIn(file_item.name, cont.files())
self.assertFalse(cont.delete())
self.assert_status(409)
def testFileCreateInContainerThatDoesNotExist(self):
file_item = File(self.env.conn, self.env.account, Utils.create_name(),
Utils.create_name())
self.assertRaises(ResponseError, file_item.write)
self.assert_status(404)
def testLastFileMarker(self):
for format_type in [None, 'json', 'xml']:
files = self.env.container.files(parms={'format': format_type})
self.assertEqual(len(files), len(self.env.files))
self.assert_status(200)
marker = files[-1] if format_type is None else files[-1]['name']
files = self.env.container.files(
parms={'format': format_type, 'marker': marker})
self.assertEqual(len(files), 0)
if format_type is None:
self.assert_status(204)
else:
self.assert_status(200)
def testContainerFileList(self):
for format_type in [None, 'json', 'xml']:
files = self.env.container.files(parms={'format': format_type})
self.assert_status(200)
if isinstance(files[0], dict):
files = [x['name'] for x in files]
for file_item in self.env.files:
self.assertIn(file_item, files)
for file_item in files:
self.assertIn(file_item, self.env.files)
def _testContainerFormattedFileList(self, format_type):
expected = {}
for name in self.env.files:
expected[name] = self.env.container.file(name).info()
file_list = self.env.container.files(parms={'format': format_type})
self.assert_status(200)
for actual in file_list:
name = actual['name']
self.assertIn(name, expected)
if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'):
self.assertEqual(expected[name]['etag'],
'"%s"' % actual['hash'])
else:
self.assertEqual(expected[name]['etag'], actual['hash'])
self.assertEqual(
expected[name]['content_type'], actual['content_type'])
self.assertEqual(
expected[name]['content_length'], actual['bytes'])
expected.pop(name)
self.assertFalse(expected) # sanity check
def testContainerJsonFileList(self):
self._testContainerFormattedFileList('json')
def testContainerXmlFileList(self):
self._testContainerFormattedFileList('xml')
def testMarkerLimitFileList(self):
for format_type in [None, 'json', 'xml']:
for marker in ['0', 'A', 'I', 'R', 'Z', 'a', 'i', 'r', 'z',
'abc123', 'mnop', 'xyz']:
limit = random.randint(2, self.env.file_count - 1)
files = self.env.container.files(parms={'format': format_type,
'marker': marker,
'limit': limit})
if not files:
continue
if isinstance(files[0], dict):
files = [x['name'] for x in files]
self.assertLessEqual(len(files), limit)
if files:
if isinstance(files[0], dict):
files = [x['name'] for x in files]
self.assertGreater(locale.strcoll(files[0], marker), 0)
def testFileOrder(self):
for format_type in [None, 'json', 'xml']:
files = self.env.container.files(parms={'format': format_type})
if isinstance(files[0], dict):
files = [x['name'] for x in files]
self.assertEqual(sorted(files, key=locale.strxfrm), files)
def testContainerInfo(self):
info = self.env.container.info()
self.assert_status(204)
self.assertEqual(info['object_count'], self.env.file_count)
self.assertEqual(info['bytes_used'],
self.env.file_count * self.env.file_size)
def testContainerInfoOnContainerThatDoesNotExist(self):
container = self.env.account.container(Utils.create_name())
self.assertRaises(ResponseError, container.info)
self.assert_status(404)
def testContainerFileListWithLimit(self):
for format_type in [None, 'json', 'xml']:
files = self.env.container.files(parms={'format': format_type,
'limit': 2})
self.assertEqual(len(files), 2)
def testContainerExistenceCachingProblem(self):
cont = self.env.account.container(Utils.create_name())
self.assertRaises(ResponseError, cont.files)
self.assertTrue(cont.create())
self.assertEqual(cont.files(), [])
cont = self.env.account.container(Utils.create_name())
self.assertRaises(ResponseError, cont.files)
self.assertTrue(cont.create())
# NB: no GET! Make sure the PUT cleared the cached 404
file_item = cont.file(Utils.create_name())
file_item.write_random()
def testContainerLastModified(self):
container = self.env.account.container(Utils.create_name())
self.assertTrue(container.create())
info = container.info()
t0 = info['last_modified']
# last modified header is in date format which supports in second
# so we need to wait to increment a sec in the header.
eventlet.sleep(1)
# POST container change last modified timestamp
self.assertTrue(
container.update_metadata({'x-container-meta-japan': 'mitaka'}))
info = container.info()
t1 = info['last_modified']
self.assertNotEqual(t0, t1)
eventlet.sleep(1)
# PUT container (overwrite) also change last modified
self.assertTrue(container.create())
info = container.info()
t2 = info['last_modified']
self.assertNotEqual(t1, t2)
eventlet.sleep(1)
# PUT object doesn't change container last modified timestamp
obj = container.file(Utils.create_name())
self.assertTrue(
obj.write(b"aaaaa", hdrs={'Content-Type': 'text/plain'}))
info = container.info()
t3 = info['last_modified']
self.assertEqual(t2, t3)
# POST object also doesn't change container last modified timestamp
self.assertTrue(
obj.sync_metadata({'us': 'austin'}))
info = container.info()
t4 = info['last_modified']
self.assertEqual(t2, t4)
class TestContainerUTF8(Base2, TestContainer):
pass
class TestContainerSortingEnv(BaseEnv):
@classmethod
def setUp(cls):
super(TestContainerSortingEnv, cls).setUp()
cls.container = cls.account.container(Utils.create_name())
if not cls.container.create():
raise ResponseError(cls.conn.response)
cls.file_items = ('a1', 'a2', 'A3', 'b1', 'B2', 'a10', 'b10', 'zz')
cls.files = list()
cls.file_size = 128
for name in cls.file_items:
file_item = cls.container.file(name)
file_item.write_random(cls.file_size)
cls.files.append(file_item.name)
class TestContainerSorting(Base):
env = TestContainerSortingEnv
def testContainerFileListSortingReversed(self):
file_list = list(sorted(self.env.file_items))
file_list.reverse()
for reverse in ('true', '1', 'yes', 'on', 't', 'y'):
cont_files = self.env.container.files(parms={'reverse': reverse})
self.assert_status(200)
self.assertEqual(file_list, cont_files,
'Expected %s but got %s with reverse param %r'
% (file_list, cont_files, reverse))
def testContainerFileSortingByPrefixReversed(self):
cont_list = sorted(c for c in self.env.file_items if c.startswith('a'))
cont_list.reverse()
cont_listing = self.env.container.files(parms={
'reverse': 'on', 'prefix': 'a'})
self.assert_status(200)
self.assertEqual(cont_list, cont_listing)
def testContainerFileSortingByMarkersExclusiveReversed(self):
first_item = self.env.file_items[3] # 'b1' + postfix
last_item = self.env.file_items[4] # 'B2' + postfix
cont_list = sorted(c for c in self.env.file_items
if last_item < c < first_item)
cont_list.reverse()
cont_listing = self.env.container.files(parms={
'reverse': 'on', 'marker': first_item, 'end_marker': last_item})
self.assert_status(200)
self.assertEqual(cont_list, cont_listing)
def testContainerFileSortingByMarkersInclusiveReversed(self):
first_item = self.env.file_items[3] # 'b1' + postfix
last_item = self.env.file_items[4] # 'B2' + postfix
cont_list = sorted(c for c in self.env.file_items
if last_item <= c <= first_item)
cont_list.reverse()
cont_listing = self.env.container.files(parms={
'reverse': 'on', 'marker': first_item + '\x00',
'end_marker': last_item[:-1] + chr(ord(last_item[-1]) - 1)})
self.assert_status(200)
self.assertEqual(cont_list, cont_listing)
def testContainerFileSortingByReversedMarkersReversed(self):
cont_listing = self.env.container.files(parms={
'reverse': 'on', 'marker': 'B', 'end_marker': 'b1'})
self.assert_status(204)
self.assertEqual([], cont_listing)
def testContainerFileListSorting(self):
file_list = list(sorted(self.env.file_items))
cont_files = self.env.container.files()
self.assert_status(200)
self.assertEqual(file_list, cont_files)
# Lets try again but with reverse is specifically turned off
cont_files = self.env.container.files(parms={'reverse': 'off'})
self.assert_status(200)
self.assertEqual(file_list, cont_files)
cont_files = self.env.container.files(parms={'reverse': 'false'})
self.assert_status(200)
self.assertEqual(file_list, cont_files)
cont_files = self.env.container.files(parms={'reverse': 'no'})
self.assert_status(200)
self.assertEqual(file_list, cont_files)
cont_files = self.env.container.files(parms={'reverse': ''})
self.assert_status(200)
self.assertEqual(file_list, cont_files)
# Lets try again but with a incorrect reverse values
cont_files = self.env.container.files(parms={'reverse': 'foo'})
self.assert_status(200)
self.assertEqual(file_list, cont_files)
cont_files = self.env.container.files(parms={'reverse': 'hai'})
self.assert_status(200)
self.assertEqual(file_list, cont_files)
cont_files = self.env.container.files(parms={'reverse': 'o=[]::::>'})
self.assert_status(200)
self.assertEqual(file_list, cont_files)
class TestContainerPathsEnv(BaseEnv):
@classmethod
def setUp(cls):
super(TestContainerPathsEnv, cls).setUp()
cls.file_size = 8
cls.container = cls.account.container(Utils.create_name())
if not cls.container.create():
raise ResponseError(cls.conn.response)
cls.files = [
'/file1',
'/file A',
'/dir1/',
'/dir2/',
'/dir1/file2',
'/dir1/subdir1/',
'/dir1/subdir2/',
'/dir1/subdir1/file2',
'/dir1/subdir1/file3',
'/dir1/subdir1/file4',
'/dir1/subdir1/subsubdir1/',
'/dir1/subdir1/subsubdir1/file5',
'/dir1/subdir1/subsubdir1/file6',
'/dir1/subdir1/subsubdir1/file7',
'/dir1/subdir1/subsubdir1/file8',
'/dir1/subdir1/subsubdir2/',
'/dir1/subdir1/subsubdir2/file9',
'/dir1/subdir1/subsubdir2/file0',
'file1',
'dir1/',
'dir2/',
'dir1/file2',
'dir1/subdir1/',
'dir1/subdir2/',
'dir1/subdir1/file2',
'dir1/subdir1/file3',
'dir1/subdir1/file4',
'dir1/subdir1/subsubdir1/',
'dir1/subdir1/subsubdir1/file5',
'dir1/subdir1/subsubdir1/file6',
'dir1/subdir1/subsubdir1/file7',
'dir1/subdir1/subsubdir1/file8',
'dir1/subdir1/subsubdir2/',
'dir1/subdir1/subsubdir2/file9',
'dir1/subdir1/subsubdir2/file0',
'dir1/subdir with spaces/',
'dir1/subdir with spaces/file B',
'dir1/subdir+with{whatever/',
'dir1/subdir+with{whatever/file D',
]
stored_files = set()
for f in cls.files:
file_item = cls.container.file(f)
if f.endswith('/'):
file_item.write(hdrs={'Content-Type': 'application/directory'})
else:
file_item.write_random(cls.file_size,
hdrs={'Content-Type':
'application/directory'})
if (normalized_urls):
nfile = '/'.join(filter(None, f.split('/')))
if (f[-1] == '/'):
nfile += '/'
stored_files.add(nfile)
else:
stored_files.add(f)
cls.stored_files = sorted(stored_files)
class TestContainerPaths(Base):
env = TestContainerPathsEnv
def testTraverseContainer(self):
found_files = []
found_dirs = []
def recurse_path(path, count=0):
if count > 10:
raise ValueError('too deep recursion')
for file_item in self.env.container.files(parms={'path': path}):
self.assertTrue(file_item.startswith(path))
if file_item.endswith('/'):
recurse_path(file_item, count + 1)
found_dirs.append(file_item)
else:
found_files.append(file_item)
recurse_path('')
for file_item in self.env.stored_files:
if file_item.startswith('/'):
self.assertNotIn(file_item, found_dirs)
self.assertNotIn(file_item, found_files)
elif file_item.endswith('/'):
self.assertIn(file_item, found_dirs)
self.assertNotIn(file_item, found_files)
else:
self.assertIn(file_item, found_files)
self.assertNotIn(file_item, found_dirs)
found_files = []
found_dirs = []
recurse_path('/')
for file_item in self.env.stored_files:
if not file_item.startswith('/'):
self.assertNotIn(file_item, found_dirs)
self.assertNotIn(file_item, found_files)
elif file_item.endswith('/'):
self.assertIn(file_item, found_dirs)
self.assertNotIn(file_item, found_files)
else:
self.assertIn(file_item, found_files)
self.assertNotIn(file_item, found_dirs)
def testContainerListing(self):
for format_type in (None, 'json', 'xml'):
files = self.env.container.files(parms={'format': format_type})
if isinstance(files[0], dict):
files = [str(x['name']) for x in files]
self.assertEqual(files, self.env.stored_files)
for format_type in ('json', 'xml'):
for file_item in self.env.container.files(parms={'format':
format_type}):
self.assertGreaterEqual(int(file_item['bytes']), 0)
self.assertIn('last_modified', file_item)
if file_item['name'].endswith('/'):
self.assertEqual(file_item['content_type'],
'application/directory')
def testStructure(self):
def assert_listing(path, file_list):
files = self.env.container.files(parms={'path': path})
self.assertEqual(sorted(file_list, key=locale.strxfrm), files)
if not normalized_urls:
assert_listing('/', ['/dir1/', '/dir2/', '/file1', '/file A'])
assert_listing('/dir1',
['/dir1/file2', '/dir1/subdir1/', '/dir1/subdir2/'])
assert_listing('/dir1/',
['/dir1/file2', '/dir1/subdir1/', '/dir1/subdir2/'])
assert_listing('/dir1/subdir1',
['/dir1/subdir1/subsubdir2/', '/dir1/subdir1/file2',
'/dir1/subdir1/file3', '/dir1/subdir1/file4',
'/dir1/subdir1/subsubdir1/'])
assert_listing('/dir1/subdir2', [])
assert_listing('', ['file1', 'dir1/', 'dir2/'])
else:
assert_listing('', ['file1', 'dir1/', 'dir2/', 'file A'])
assert_listing('dir1', ['dir1/file2', 'dir1/subdir1/',
'dir1/subdir2/', 'dir1/subdir with spaces/',
'dir1/subdir+with{whatever/'])
assert_listing('dir1/subdir1',
['dir1/subdir1/file4', 'dir1/subdir1/subsubdir2/',
'dir1/subdir1/file2', 'dir1/subdir1/file3',
'dir1/subdir1/subsubdir1/'])
assert_listing('dir1/subdir1/subsubdir1',
['dir1/subdir1/subsubdir1/file7',
'dir1/subdir1/subsubdir1/file5',
'dir1/subdir1/subsubdir1/file8',
'dir1/subdir1/subsubdir1/file6'])
assert_listing('dir1/subdir1/subsubdir1/',
['dir1/subdir1/subsubdir1/file7',
'dir1/subdir1/subsubdir1/file5',
'dir1/subdir1/subsubdir1/file8',
'dir1/subdir1/subsubdir1/file6'])
assert_listing('dir1/subdir with spaces/',
['dir1/subdir with spaces/file B'])
class TestFileEnv(BaseEnv):
@classmethod
def setUp(cls):
super(TestFileEnv, cls).setUp()
if not tf.skip2:
# creating another account and connection
# for account to account copy tests
config2 = deepcopy(tf.config)
config2['account'] = tf.config['account2']
config2['username'] = tf.config['username2']
config2['password'] = tf.config['password2']
cls.conn2 = Connection(config2)
cls.conn2.authenticate()
cls.account2 = cls.conn2.get_account()
cls.account2.delete_containers()
cls.container = cls.account.container(Utils.create_name())
if not cls.container.create():
raise ResponseError(cls.conn.response)
cls.file_size = 128
# With keystoneauth we need the accounts to have had the project
# domain id persisted as sysmeta prior to testing ACLs. This may
# not be the case if, for example, the account was created using
# a request with reseller_admin role, when project domain id may
# not have been known. So we ensure that the project domain id is
# in sysmeta by making a POST to the accounts using an admin role.
cls.account.update_metadata()
if not tf.skip2:
cls.account2.update_metadata()
class TestFileDev(Base):
env = TestFileEnv
class TestFileDevUTF8(Base2, TestFileDev):
pass
class TestFile(Base):
env = TestFileEnv
def testGetResponseHeaders(self):
obj_data = b'test_body'
def do_test(put_hdrs, get_hdrs, expected_hdrs, unexpected_hdrs):
filename = Utils.create_name()
file_item = self.env.container.file(filename)
resp = file_item.write(
data=obj_data, hdrs=put_hdrs, return_resp=True)
# put then get an object
resp.read()
read_data = file_item.read(hdrs=get_hdrs)
self.assertEqual(obj_data, read_data) # sanity check
resp_headers = file_item.conn.response.getheaders()
# check the *list* of all header (name, value) pairs rather than
# constructing a dict in case of repeated names in the list
errors = []
for k, v in resp_headers:
if k.lower() in unexpected_hdrs:
errors.append('Found unexpected header %s: %s' % (k, v))
for k, v in expected_hdrs.items():
matches = [hdr for hdr in resp_headers if hdr[0].lower() == k]
if not matches:
errors.append('Missing expected header %s' % k)
for (got_k, got_v) in matches:
# The Connection: header is parsed by cluster's LB and may
# be returned in either original lowercase or camel-cased.
if k == 'connection':
got_v = got_v.lower()
if got_v != v:
errors.append('Expected %s but got %s for %s' %
(v, got_v, k))
if errors:
self.fail(
'Errors in response headers:\n %s' % '\n '.join(errors))
put_headers = {'X-Object-Meta-Fruit': 'Banana',
'X-Delete-After': '10000',
'Content-Type': 'application/test'}
expected_headers = {'content-length': str(len(obj_data)),
'x-object-meta-fruit': 'Banana',
'accept-ranges': 'bytes',
'content-type': 'application/test',
'etag': md5(
obj_data, usedforsecurity=False).hexdigest(),
'last-modified': mock.ANY,
'date': mock.ANY,
'x-delete-at': mock.ANY,
'x-trans-id': mock.ANY,
'x-openstack-request-id': mock.ANY}
if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'):
expected_headers['etag'] = '"%s"' % expected_headers['etag']
unexpected_headers = ['connection', 'x-delete-after']
do_test(put_headers, {}, expected_headers, unexpected_headers)
get_headers = {'Connection': 'keep-alive'}
expected_headers['connection'] = 'keep-alive'
unexpected_headers = ['x-delete-after']
do_test(put_headers, get_headers, expected_headers, unexpected_headers)
def testCopy(self):
# makes sure to test encoded characters
source_filename = 'dealde%2Fl04 011e%204c8df/flash.png'
file_item = self.env.container.file(source_filename)
metadata = {}
metadata[Utils.create_ascii_name()] = Utils.create_name()
put_headers = {'Content-Type': 'application/test',
'Content-Encoding': 'gzip',
'Content-Disposition': 'attachment; filename=myfile'}
file_item.metadata = metadata
data = file_item.write_random(hdrs=put_headers)
# the allowed headers are configurable in object server, so we cannot
# assert that content-encoding and content-disposition get *copied*
# unless they were successfully set on the original PUT, so populate
# expected_headers by making a HEAD on the original object
file_item.initialize()
self.assertEqual('application/test', file_item.content_type)
resp_headers = dict(file_item.conn.response.getheaders())
expected_headers = {}
for k, v in put_headers.items():
if k.lower() in resp_headers:
expected_headers[k] = v
dest_cont = self.env.account.container(Utils.create_name())
self.assertTrue(dest_cont.create())
# copy both from within and across containers
for cont in (self.env.container, dest_cont):
# copy both with and without initial slash
for prefix in ('', '/'):
dest_filename = Utils.create_name()
extra_hdrs = {'X-Object-Meta-Extra': 'fresh'}
self.assertTrue(file_item.copy(
'%s%s' % (prefix, cont), dest_filename, hdrs=extra_hdrs))
# verify container listing for copy
listing = cont.files(parms={'format': 'json'})
for obj in listing:
if obj['name'] == dest_filename:
break
else:
self.fail('Failed to find %s in listing' % dest_filename)
self.assertEqual(file_item.size, obj['bytes'])
self.assertEqual(normalize_etag(file_item.etag), obj['hash'])
self.assertEqual(file_item.content_type, obj['content_type'])
file_copy = cont.file(dest_filename)
self.assertEqual(data, file_copy.read())
self.assertTrue(file_copy.initialize())
expected_metadata = dict(metadata)
# new metadata should be merged with existing
expected_metadata['extra'] = 'fresh'
self.assertDictEqual(expected_metadata, file_copy.metadata)
resp_headers = dict(file_copy.conn.response.getheaders())
for k, v in expected_headers.items():
self.assertIn(k.lower(), resp_headers)
self.assertEqual(v, resp_headers[k.lower()])
# repeat copy with updated content-type, content-encoding and
# content-disposition, which should get updated
extra_hdrs = {
'X-Object-Meta-Extra': 'fresher',
'Content-Type': 'application/test-changed',
'Content-Encoding': 'not_gzip',
'Content-Disposition': 'attachment; filename=notmyfile'}
self.assertTrue(file_item.copy(
'%s%s' % (prefix, cont), dest_filename, hdrs=extra_hdrs))
self.assertIn(dest_filename, cont.files())
file_copy = cont.file(dest_filename)
self.assertEqual(data, file_copy.read())
self.assertTrue(file_copy.initialize())
expected_metadata['extra'] = 'fresher'
self.assertDictEqual(expected_metadata, file_copy.metadata)
resp_headers = dict(file_copy.conn.response.getheaders())
# if k is in expected_headers then we can assert its new value
for k, v in expected_headers.items():
v = extra_hdrs.get(k, v)
self.assertIn(k.lower(), resp_headers)
self.assertEqual(v, resp_headers[k.lower()])
# verify container listing for copy
listing = cont.files(parms={'format': 'json'})
for obj in listing:
if obj['name'] == dest_filename:
break
else:
self.fail('Failed to find %s in listing' % dest_filename)
self.assertEqual(file_item.size, obj['bytes'])
self.assertEqual(normalize_etag(file_item.etag), obj['hash'])
self.assertEqual(
'application/test-changed', obj['content_type'])
# repeat copy with X-Fresh-Metadata header - existing user
# metadata should not be copied, new completely replaces it.
extra_hdrs = {'Content-Type': 'application/test-updated',
'X-Object-Meta-Extra': 'fresher',
'X-Fresh-Metadata': 'true'}
self.assertTrue(file_item.copy(
'%s%s' % (prefix, cont), dest_filename, hdrs=extra_hdrs))
self.assertIn(dest_filename, cont.files())
file_copy = cont.file(dest_filename)
self.assertEqual(data, file_copy.read())
self.assertTrue(file_copy.initialize())
self.assertEqual('application/test-updated',
file_copy.content_type)
expected_metadata = {'extra': 'fresher'}
self.assertDictEqual(expected_metadata, file_copy.metadata)
resp_headers = dict(file_copy.conn.response.getheaders())
for k in ('Content-Disposition', 'Content-Encoding'):
self.assertNotIn(k.lower(), resp_headers)
# verify container listing for copy
listing = cont.files(parms={'format': 'json'})
for obj in listing:
if obj['name'] == dest_filename:
break
else:
self.fail('Failed to find %s in listing' % dest_filename)
self.assertEqual(file_item.size, obj['bytes'])
self.assertEqual(normalize_etag(file_item.etag), obj['hash'])
self.assertEqual(
'application/test-updated', obj['content_type'])
def testCopyRange(self):
# makes sure to test encoded characters
source_filename = 'dealde%2Fl04 011e%204c8df/flash.png'
file_item = self.env.container.file(source_filename)
metadata = {Utils.create_ascii_name(): Utils.create_name()}
data = file_item.write_random(1024)
file_item.sync_metadata(metadata)
file_item.initialize()
dest_cont = self.env.account.container(Utils.create_name())
self.assertTrue(dest_cont.create())
expected_body = data[100:201]
expected_etag = md5(expected_body, usedforsecurity=False)
# copy both from within and across containers
for cont in (self.env.container, dest_cont):
# copy both with and without initial slash
for prefix in ('', '/'):
dest_filename = Utils.create_name()
file_item.copy('%s%s' % (prefix, cont), dest_filename,
hdrs={'Range': 'bytes=100-200'})
self.assertEqual(201, file_item.conn.response.status)
# verify container listing for copy
listing = cont.files(parms={'format': 'json'})
for obj in listing:
if obj['name'] == dest_filename:
break
else:
self.fail('Failed to find %s in listing' % dest_filename)
self.assertEqual(101, obj['bytes'])
self.assertEqual(expected_etag.hexdigest(), obj['hash'])
self.assertEqual(file_item.content_type, obj['content_type'])
# verify copy object
copy_file_item = cont.file(dest_filename)
self.assertEqual(expected_body, copy_file_item.read())
self.assertTrue(copy_file_item.initialize())
self.assertEqual(metadata, copy_file_item.metadata)
def testCopyAccount(self):
# makes sure to test encoded characters
source_filename = 'dealde%2Fl04 011e%204c8df/flash.png'
file_item = self.env.container.file(source_filename)
metadata = {Utils.create_ascii_name(): Utils.create_name()}
data = file_item.write_random()
file_item.sync_metadata(metadata)
dest_cont = self.env.account.container(Utils.create_name())
self.assertTrue(dest_cont.create())
acct = self.env.conn.account_name
# copy both from within and across containers
for cont in (self.env.container, dest_cont):
# copy both with and without initial slash
for prefix in ('', '/'):
dest_filename = Utils.create_name()
file_item = self.env.container.file(source_filename)
file_item.copy_account(acct,
'%s%s' % (prefix, cont),
dest_filename)
self.assertIn(dest_filename, cont.files())
file_item = cont.file(dest_filename)
self.assertEqual(data, file_item.read())
self.assertTrue(file_item.initialize())
self.assertEqual(metadata, file_item.metadata)
if not tf.skip2:
dest_cont = self.env.account2.container(Utils.create_name())
self.assertTrue(dest_cont.create(hdrs={
'X-Container-Write': self.env.conn.user_acl
}))
acct = self.env.conn2.account_name
# copy both with and without initial slash
for prefix in ('', '/'):
dest_filename = Utils.create_name()
file_item = self.env.container.file(source_filename)
file_item.copy_account(acct,
'%s%s' % (prefix, dest_cont),
dest_filename)
self.assertIn(dest_filename, dest_cont.files())
file_item = dest_cont.file(dest_filename)
self.assertEqual(data, file_item.read())
self.assertTrue(file_item.initialize())
self.assertEqual(metadata, file_item.metadata)
def testCopy404s(self):
source_filename = Utils.create_name()
file_item = self.env.container.file(source_filename)
file_item.write_random()
dest_cont = self.env.account.container(Utils.create_name())
self.assertTrue(dest_cont.create())
for prefix in ('', '/'):
# invalid source container
source_cont = self.env.account.container(Utils.create_name())
file_item = source_cont.file(source_filename)
self.assertRaises(ResponseError, file_item.copy,
'%s%s' % (prefix, self.env.container),
Utils.create_name())
self.assert_status(404)
self.assertRaises(ResponseError, file_item.copy,
'%s%s' % (prefix, dest_cont),
Utils.create_name())
self.assert_status(404)
# invalid source object
file_item = self.env.container.file(Utils.create_name())
self.assertRaises(ResponseError, file_item.copy,
'%s%s' % (prefix, self.env.container),
Utils.create_name())
self.assert_status(404)
self.assertRaises(ResponseError, file_item.copy,
'%s%s' % (prefix, dest_cont),
Utils.create_name())
self.assert_status(404)
# invalid destination container
file_item = self.env.container.file(source_filename)
self.assertRaises(ResponseError, file_item.copy,
'%s%s' % (prefix, Utils.create_name()),
Utils.create_name())
def testCopyAccount404s(self):
if tf.skip2:
raise SkipTest('Account2 not set')
acct = self.env.conn.account_name
acct2 = self.env.conn2.account_name
source_filename = Utils.create_name()
file_item = self.env.container.file(source_filename)
file_item.write_random()
dest_cont = self.env.account.container(Utils.create_name())
self.assertTrue(dest_cont.create(hdrs={
'X-Container-Read': self.env.conn2.user_acl
}))
dest_cont2 = self.env.account2.container(Utils.create_name())
self.assertTrue(dest_cont2.create(hdrs={
'X-Container-Write': self.env.conn.user_acl,
'X-Container-Read': self.env.conn.user_acl
}))
for acct, cont in ((acct, dest_cont), (acct2, dest_cont2)):
for prefix in ('', '/'):
# invalid source container
source_cont = self.env.account.container(Utils.create_name())
file_item = source_cont.file(source_filename)
self.assertRaises(ResponseError, file_item.copy_account,
acct,
'%s%s' % (prefix, self.env.container),
Utils.create_name())
# there is no such source container but user has
# permissions to do a GET (done internally via COPY) for
# objects in his own account.
self.assert_status(404)
self.assertRaises(ResponseError, file_item.copy_account,
acct,
'%s%s' % (prefix, cont),
Utils.create_name())
self.assert_status(404)
# invalid source object
file_item = self.env.container.file(Utils.create_name())
self.assertRaises(ResponseError, file_item.copy_account,
acct,
'%s%s' % (prefix, self.env.container),
Utils.create_name())
# there is no such source container but user has
# permissions to do a GET (done internally via COPY) for
# objects in his own account.
self.assert_status(404)
self.assertRaises(ResponseError, file_item.copy_account,
acct,
'%s%s' % (prefix, cont),
Utils.create_name())
self.assert_status(404)
# invalid destination container
file_item = self.env.container.file(source_filename)
self.assertRaises(ResponseError, file_item.copy_account,
acct,
'%s%s' % (prefix, Utils.create_name()),
Utils.create_name())
if acct == acct2:
# there is no such destination container
# and foreign user can have no permission to write there
self.assert_status(403)
else:
self.assert_status(404)
def testCopyNoDestinationHeader(self):
source_filename = Utils.create_name()
file_item = self.env.container.file(source_filename)
file_item.write_random()
file_item = self.env.container.file(source_filename)
self.assertRaises(ResponseError, file_item.copy, Utils.create_name(),
Utils.create_name(),
cfg={'no_destination': True})
self.assert_status(412)
def testCopyDestinationSlashProblems(self):
source_filename = Utils.create_name()
file_item = self.env.container.file(source_filename)
file_item.write_random()
# no slash
self.assertRaises(ResponseError, file_item.copy, Utils.create_name(),
Utils.create_name(),
cfg={'destination': Utils.create_name()})
self.assert_status(412)
# too many slashes
self.assertRaises(ResponseError, file_item.copy, Utils.create_name(),
Utils.create_name(),
cfg={'destination': '//%s' % Utils.create_name()})
self.assert_status(412)
def testCopyFromHeader(self):
source_filename = Utils.create_name()
file_item = self.env.container.file(source_filename)
metadata = {}
for i in range(1):
metadata[Utils.create_ascii_name()] = Utils.create_name()
file_item.metadata = metadata
data = file_item.write_random()
dest_cont = self.env.account.container(Utils.create_name())
self.assertTrue(dest_cont.create())
# copy both from within and across containers
for cont in (self.env.container, dest_cont):
# copy both with and without initial slash
for prefix in ('', '/'):
dest_filename = Utils.create_name()
file_item = cont.file(dest_filename)
file_item.write(hdrs={'X-Copy-From': '%s%s/%s' % (
prefix, self.env.container.name, source_filename)})
self.assertIn(dest_filename, cont.files())
file_item = cont.file(dest_filename)
self.assertEqual(data, file_item.read())
self.assertTrue(file_item.initialize())
self.assertEqual(metadata, file_item.metadata)
def testCopyFromAccountHeader(self):
if tf.skip2:
raise SkipTest('Account2 not set')
acct = self.env.conn.account_name
src_cont = self.env.account.container(Utils.create_name())
self.assertTrue(src_cont.create(hdrs={
'X-Container-Read': self.env.conn2.user_acl
}))
source_filename = Utils.create_name()
file_item = src_cont.file(source_filename)
metadata = {}
for i in range(1):
metadata[Utils.create_ascii_name()] = Utils.create_name()
file_item.metadata = metadata
data = file_item.write_random()
dest_cont = self.env.account.container(Utils.create_name())
self.assertTrue(dest_cont.create())
dest_cont2 = self.env.account2.container(Utils.create_name())
self.assertTrue(dest_cont2.create(hdrs={
'X-Container-Write': self.env.conn.user_acl
}))
for cont in (src_cont, dest_cont, dest_cont2):
# copy both with and without initial slash
for prefix in ('', '/'):
dest_filename = Utils.create_name()
file_item = cont.file(dest_filename)
file_item.write(hdrs={'X-Copy-From-Account': acct,
'X-Copy-From': '%s%s/%s' % (
prefix,
src_cont.name,
source_filename)})
self.assertIn(dest_filename, cont.files())
file_item = cont.file(dest_filename)
self.assertEqual(data, file_item.read())
self.assertTrue(file_item.initialize())
self.assertEqual(metadata, file_item.metadata)
def testCopyFromHeader404s(self):
source_filename = Utils.create_name()
file_item = self.env.container.file(source_filename)
file_item.write_random()
for prefix in ('', '/'):
# invalid source container
file_item = self.env.container.file(Utils.create_name())
copy_from = ('%s%s/%s'
% (prefix, Utils.create_name(), source_filename))
self.assertRaises(ResponseError, file_item.write,
hdrs={'X-Copy-From': copy_from})
self.assert_status(404)
# invalid source object
copy_from = ('%s%s/%s'
% (prefix, self.env.container.name,
Utils.create_name()))
file_item = self.env.container.file(Utils.create_name())
self.assertRaises(ResponseError, file_item.write,
hdrs={'X-Copy-From': copy_from})
self.assert_status(404)
# invalid destination container
dest_cont = self.env.account.container(Utils.create_name())
file_item = dest_cont.file(Utils.create_name())
copy_from = ('%s%s/%s'
% (prefix, self.env.container.name, source_filename))
self.assertRaises(ResponseError, file_item.write,
hdrs={'X-Copy-From': copy_from})
self.assert_status(404)
def testCopyFromAccountHeader404s(self):
if tf.skip2:
raise SkipTest('Account2 not set')
acct = self.env.conn2.account_name
src_cont = self.env.account2.container(Utils.create_name())
self.assertTrue(src_cont.create(hdrs={
'X-Container-Read': self.env.conn.user_acl
}))
source_filename = Utils.create_name()
file_item = src_cont.file(source_filename)
file_item.write_random()
dest_cont = self.env.account.container(Utils.create_name())
self.assertTrue(dest_cont.create())
for prefix in ('', '/'):
# invalid source container
file_item = dest_cont.file(Utils.create_name())
self.assertRaises(ResponseError, file_item.write,
hdrs={'X-Copy-From-Account': acct,
'X-Copy-From': '%s%s/%s' %
(prefix,
Utils.create_name(),
source_filename)})
self.assert_status(403)
# invalid source object
file_item = self.env.container.file(Utils.create_name())
self.assertRaises(ResponseError, file_item.write,
hdrs={'X-Copy-From-Account': acct,
'X-Copy-From': '%s%s/%s' %
(prefix,
src_cont,
Utils.create_name())})
self.assert_status(404)
# invalid destination container
dest_cont = self.env.account.container(Utils.create_name())
file_item = dest_cont.file(Utils.create_name())
self.assertRaises(ResponseError, file_item.write,
hdrs={'X-Copy-From-Account': acct,
'X-Copy-From': '%s%s/%s' %
(prefix,
src_cont,
source_filename)})
self.assert_status(404)
def testCopyFromAccountHeader403s(self):
if tf.skip2:
raise SkipTest('Account2 not set')
acct = self.env.conn2.account_name
src_cont = self.env.account2.container(Utils.create_name())
self.assertTrue(src_cont.create()) # Primary user has no access
source_filename = Utils.create_name()
file_item = src_cont.file(source_filename)
file_item.write_random()
dest_cont = self.env.account.container(Utils.create_name())
self.assertTrue(dest_cont.create())
for prefix in ('', '/'):
# invalid source container
file_item = dest_cont.file(Utils.create_name())
self.assertRaises(ResponseError, file_item.write,
hdrs={'X-Copy-From-Account': acct,
'X-Copy-From': '%s%s/%s' %
(prefix,
Utils.create_name(),
source_filename)})
self.assert_status(403)
# invalid source object
file_item = self.env.container.file(Utils.create_name())
self.assertRaises(ResponseError, file_item.write,
hdrs={'X-Copy-From-Account': acct,
'X-Copy-From': '%s%s/%s' %
(prefix,
src_cont,
Utils.create_name())})
self.assert_status(403)
# invalid destination container
dest_cont = self.env.account.container(Utils.create_name())
file_item = dest_cont.file(Utils.create_name())
self.assertRaises(ResponseError, file_item.write,
hdrs={'X-Copy-From-Account': acct,
'X-Copy-From': '%s%s/%s' %
(prefix,
src_cont,
source_filename)})
self.assert_status(403)
def testNameLimit(self):
limit = load_constraint('max_object_name_length')
for lim in (1, 10, limit // 2, limit - 1, limit, limit + 1, limit * 2):
file_item = self.env.container.file('a' * lim)
if lim <= limit:
self.assertTrue(file_item.write())
self.assert_status(201)
else:
self.assertRaises(ResponseError, file_item.write)
self.assert_status(400)
def testQuestionMarkInName(self):
if Utils.create_name == Utils.create_ascii_name:
file_name = list(Utils.create_name())
file_name[random.randint(2, len(file_name) - 2)] = '?'
file_name = "".join(file_name)
else:
file_name = Utils.create_name(6) + '?' + Utils.create_name(6)
file_item = self.env.container.file(file_name)
self.assertTrue(file_item.write(cfg={'no_path_quote': True}))
self.assertNotIn(file_name, self.env.container.files())
self.assertIn(file_name.split('?')[0], self.env.container.files())
def testDeleteThen404s(self):
file_item = self.env.container.file(Utils.create_name())
self.assertTrue(file_item.write_random())
self.assert_status(201)
self.assertTrue(file_item.delete())
self.assert_status(204)
file_item.metadata = {Utils.create_ascii_name(): Utils.create_name()}
for method in (file_item.info,
file_item.read,
file_item.sync_metadata,
file_item.delete):
self.assertRaises(ResponseError, method)
self.assert_status(404)
def testBlankMetadataName(self):
file_item = self.env.container.file(Utils.create_name())
file_item.metadata = {'': Utils.create_name()}
self.assertRaises(ResponseError, file_item.write_random)
self.assert_status(400)
def testMetadataNumberLimit(self):
number_limit = load_constraint('max_meta_count')
size_limit = load_constraint('max_meta_overall_size')
for i in (number_limit - 10, number_limit - 1, number_limit,
number_limit + 1, number_limit + 10, number_limit + 100):
j = size_limit // (i * 2)
metadata = {}
while len(metadata.keys()) < i:
key = Utils.create_ascii_name()
val = Utils.create_name()
if len(key) > j:
key = key[:j]
# NB: we'll likely write object metadata that's *not* UTF-8
if six.PY2:
val = val[:j]
else:
val = val.encode('utf8')[:j].decode(
'utf8', 'surrogateescape')
metadata[key] = val
file_item = self.env.container.file(Utils.create_name())
file_item.metadata = metadata
if i <= number_limit:
self.assertTrue(file_item.write())
self.assert_status(201)
self.assertTrue(file_item.sync_metadata())
self.assert_status(202)
else:
self.assertRaises(ResponseError, file_item.write)
self.assert_status(400)
file_item.metadata = {}
self.assertTrue(file_item.write())
self.assert_status(201)
file_item.metadata = metadata
self.assertRaises(ResponseError, file_item.sync_metadata)
self.assert_status(400)
def testContentTypeGuessing(self):
file_types = {'wav': 'audio/x-wav', 'txt': 'text/plain',
'zip': 'application/zip'}
container = self.env.account.container(Utils.create_name())
self.assertTrue(container.create())
for i in file_types.keys():
file_item = container.file(Utils.create_name() + '.' + i)
file_item.write(b'', cfg={'no_content_type': True})
file_types_read = {}
for i in container.files(parms={'format': 'json'}):
file_types_read[i['name'].split('.')[1]] = i['content_type']
self.assertEqual(file_types, file_types_read)
def testRangedGets(self):
# We set the file_length to a strange multiple here. This is to check
# that ranges still work in the EC case when the requested range
# spans EC segment boundaries. The 1 MiB base value is chosen because
# that's a common EC segment size. The 1.33 multiple is to ensure we
# aren't aligned on segment boundaries
file_length = int(1048576 * 1.33)
range_size = file_length // 10
file_item = self.env.container.file(Utils.create_name())
data = file_item.write_random(file_length)
for i in range(0, file_length, range_size):
range_string = 'bytes=%d-%d' % (i, i + range_size - 1)
hdrs = {'Range': range_string}
self.assertEqual(
data[i: i + range_size], file_item.read(hdrs=hdrs),
range_string)
range_string = 'bytes=-%d' % (i)
hdrs = {'Range': range_string}
if i == 0:
# RFC 2616 14.35.1
# "If a syntactically valid byte-range-set includes ... at
# least one suffix-byte-range-spec with a NON-ZERO
# suffix-length, then the byte-range-set is satisfiable.
# Otherwise, the byte-range-set is unsatisfiable.
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(416)
self.assert_header('content-range', 'bytes */%d' % file_length)
else:
self.assertEqual(file_item.read(hdrs=hdrs), data[-i:])
self.assert_header('content-range', 'bytes %d-%d/%d' % (
file_length - i, file_length - 1, file_length))
self.assert_etag(file_item.md5)
self.assert_header('accept-ranges', 'bytes')
range_string = 'bytes=%d-' % (i)
hdrs = {'Range': range_string}
self.assertEqual(
file_item.read(hdrs=hdrs), data[i - file_length:],
range_string)
range_string = 'bytes=%d-%d' % (file_length + 1000, file_length + 2000)
hdrs = {'Range': range_string}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(416)
self.assert_header('content-range', 'bytes */%d' % file_length)
self.assert_etag(file_item.md5)
self.assert_header('accept-ranges', 'bytes')
range_string = 'bytes=%d-%d' % (file_length - 1000, file_length + 2000)
hdrs = {'Range': range_string}
self.assertEqual(file_item.read(hdrs=hdrs), data[-1000:], range_string)
hdrs = {'Range': '0-4'}
self.assertEqual(file_item.read(hdrs=hdrs), data, '0-4')
# RFC 2616 14.35.1
# "If the entity is shorter than the specified suffix-length, the
# entire entity-body is used."
range_string = 'bytes=-%d' % (file_length + 10)
hdrs = {'Range': range_string}
self.assertEqual(file_item.read(hdrs=hdrs), data, range_string)
def testMultiRangeGets(self):
file_length = 10000
range_size = file_length // 10
subrange_size = range_size // 10
file_item = self.env.container.file(Utils.create_name())
data = file_item.write_random(
file_length, hdrs={"Content-Type":
"lovecraft/rugose; squamous=true"})
for i in range(0, file_length, range_size):
range_string = 'bytes=%d-%d,%d-%d,%d-%d' % (
i, i + subrange_size - 1,
i + 2 * subrange_size, i + 3 * subrange_size - 1,
i + 4 * subrange_size, i + 5 * subrange_size - 1)
hdrs = {'Range': range_string}
fetched = file_item.read(hdrs=hdrs)
self.assert_status(206)
content_type = file_item.content_type
self.assertTrue(content_type.startswith("multipart/byteranges"))
self.assertIsNone(file_item.content_range)
# email.parser.FeedParser wants a message with headers on the
# front, then two CRLFs, and then a body (like emails have but
# HTTP response bodies don't). We fake it out by constructing a
# one-header preamble containing just the Content-Type, then
# feeding in the response body.
parser = FeedParser()
parser.feed(b"Content-Type: %s\r\n\r\n" % content_type.encode())
parser.feed(fetched)
root_message = parser.close()
self.assertTrue(root_message.is_multipart())
byteranges = root_message.get_payload()
self.assertEqual(len(byteranges), 3)
self.assertEqual(byteranges[0]['Content-Type'],
"lovecraft/rugose; squamous=true")
self.assertEqual(
byteranges[0]['Content-Range'],
"bytes %d-%d/%d" % (i, i + subrange_size - 1, file_length))
self.assertEqual(
byteranges[0].get_payload(decode=True),
data[i:(i + subrange_size)])
self.assertEqual(byteranges[1]['Content-Type'],
"lovecraft/rugose; squamous=true")
self.assertEqual(
byteranges[1]['Content-Range'],
"bytes %d-%d/%d" % (i + 2 * subrange_size,
i + 3 * subrange_size - 1, file_length))
self.assertEqual(
byteranges[1].get_payload(decode=True),
data[(i + 2 * subrange_size):(i + 3 * subrange_size)])
self.assertEqual(byteranges[2]['Content-Type'],
"lovecraft/rugose; squamous=true")
self.assertEqual(
byteranges[2]['Content-Range'],
"bytes %d-%d/%d" % (i + 4 * subrange_size,
i + 5 * subrange_size - 1, file_length))
self.assertEqual(
byteranges[2].get_payload(decode=True),
data[(i + 4 * subrange_size):(i + 5 * subrange_size)])
# The first two ranges are satisfiable but the third is not; the
# result is a multipart/byteranges response containing only the two
# satisfiable byteranges.
range_string = 'bytes=%d-%d,%d-%d,%d-%d' % (
0, subrange_size - 1,
2 * subrange_size, 3 * subrange_size - 1,
file_length, file_length + subrange_size - 1)
hdrs = {'Range': range_string}
fetched = file_item.read(hdrs=hdrs)
self.assert_status(206)
content_type = file_item.content_type
self.assertTrue(content_type.startswith("multipart/byteranges"))
self.assertIsNone(file_item.content_range)
parser = FeedParser()
parser.feed(b"Content-Type: %s\r\n\r\n" % content_type.encode())
parser.feed(fetched)
root_message = parser.close()
self.assertTrue(root_message.is_multipart())
byteranges = root_message.get_payload()
self.assertEqual(len(byteranges), 2)
self.assertEqual(byteranges[0]['Content-Type'],
"lovecraft/rugose; squamous=true")
self.assertEqual(
byteranges[0]['Content-Range'],
"bytes %d-%d/%d" % (0, subrange_size - 1, file_length))
self.assertEqual(byteranges[0].get_payload(decode=True),
data[:subrange_size])
self.assertEqual(byteranges[1]['Content-Type'],
"lovecraft/rugose; squamous=true")
self.assertEqual(
byteranges[1]['Content-Range'],
"bytes %d-%d/%d" % (2 * subrange_size, 3 * subrange_size - 1,
file_length))
self.assertEqual(
byteranges[1].get_payload(decode=True),
data[(2 * subrange_size):(3 * subrange_size)])
# The first range is satisfiable but the second is not; the
# result is either a multipart/byteranges response containing one
# byterange or a normal, non-MIME 206 response.
range_string = 'bytes=%d-%d,%d-%d' % (
0, subrange_size - 1,
file_length, file_length + subrange_size - 1)
hdrs = {'Range': range_string}
fetched = file_item.read(hdrs=hdrs)
self.assert_status(206)
content_type = file_item.content_type
if content_type.startswith("multipart/byteranges"):
self.assertIsNone(file_item.content_range)
parser = FeedParser()
parser.feed(b"Content-Type: %s\r\n\r\n" % content_type.encode())
parser.feed(fetched)
root_message = parser.close()
self.assertTrue(root_message.is_multipart())
byteranges = root_message.get_payload()
self.assertEqual(len(byteranges), 1)
self.assertEqual(byteranges[0]['Content-Type'],
"lovecraft/rugose; squamous=true")
self.assertEqual(
byteranges[0]['Content-Range'],
"bytes %d-%d/%d" % (0, subrange_size - 1, file_length))
self.assertEqual(byteranges[0].get_payload(decode=True),
data[:subrange_size])
else:
self.assertEqual(
file_item.content_range,
"bytes %d-%d/%d" % (0, subrange_size - 1, file_length))
self.assertEqual(content_type, "lovecraft/rugose; squamous=true")
self.assertEqual(fetched, data[:subrange_size])
# No byterange is satisfiable, so we get a 416 response.
range_string = 'bytes=%d-%d,%d-%d' % (
file_length, file_length + 2,
file_length + 100, file_length + 102)
hdrs = {'Range': range_string}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(416)
self.assert_header('content-range', 'bytes */%d' % file_length)
def testRangedGetsWithLWSinHeader(self):
file_length = 10000
file_item = self.env.container.file(Utils.create_name())
data = file_item.write_random(file_length)
for r in ('BYTES=0-999', 'bytes = 0-999', 'BYTES = 0 - 999',
'bytes = 0 - 999', 'bytes=0 - 999', 'bytes=0-999 '):
self.assertEqual(file_item.read(hdrs={'Range': r}), data[0:1000])
def testFileSizeLimit(self):
limit = load_constraint('max_file_size')
tsecs = 3
def timeout(seconds, method, *args, **kwargs):
try:
with eventlet.Timeout(seconds):
method(*args, **kwargs)
except eventlet.Timeout:
return True
else:
return False
# This loop will result in fallocate calls for 4x the limit
# (minus 111 bytes). With fallocate turned on in the object servers,
# this may fail if you don't have 4x the limit available on your
# data drives.
# Note that this test does not actually send any data to the system.
# All it does is ensure that a response (success or failure) comes
# back within 3 seconds. For the successful tests (size smaller
# than limit), the cluster will log a 499.
for i in (limit - 100, limit - 10, limit - 1, limit, limit + 1,
limit + 10, limit + 100):
file_item = self.env.container.file(Utils.create_name())
if i <= limit:
self.assertTrue(timeout(tsecs, file_item.write,
cfg={'set_content_length': i}))
else:
self.assertRaises(ResponseError, timeout, tsecs,
file_item.write,
cfg={'set_content_length': i})
def testNoContentLengthForPut(self):
file_item = self.env.container.file(Utils.create_name())
self.assertRaises(ResponseError, file_item.write, b'testing',
cfg={'no_content_length': True})
self.assert_status(411)
def testDelete(self):
file_item = self.env.container.file(Utils.create_name())
file_item.write_random(self.env.file_size)
self.assertIn(file_item.name, self.env.container.files())
self.assertTrue(file_item.delete())
self.assertNotIn(file_item.name, self.env.container.files())
def testBadHeaders(self):
file_length = 100
# no content type on puts should be ok
file_item = self.env.container.file(Utils.create_name())
file_item.write_random(file_length, cfg={'no_content_type': True})
self.assert_status(201)
# content length x
self.assertRaises(ResponseError, file_item.write_random, file_length,
hdrs={'Content-Length': 'X'},
cfg={'no_content_length': True})
self.assert_status(400)
# no content-length
self.assertRaises(ResponseError, file_item.write_random, file_length,
cfg={'no_content_length': True})
self.assert_status(411)
self.assertRaises(ResponseError, file_item.write_random, file_length,
hdrs={'transfer-encoding': 'gzip,chunked'},
cfg={'no_content_length': True})
self.assert_status(501)
# bad request types
# for req in ('LICK', 'GETorHEAD_base', 'container_info',
# 'best_response'):
for req in ('LICK', 'GETorHEAD_base'):
self.env.account.conn.make_request(req)
self.assert_status(405)
# bad range headers
self.assertEqual(
len(file_item.read(hdrs={'Range': 'parsecs=8-12'})),
file_length)
self.assert_status(200)
def testMetadataLengthLimits(self):
key_limit = load_constraint('max_meta_name_length')
value_limit = load_constraint('max_meta_value_length')
lengths = [[key_limit, value_limit], [key_limit, value_limit + 1],
[key_limit + 1, value_limit], [key_limit, 0],
[key_limit, value_limit * 10],
[key_limit * 10, value_limit]]
for l in lengths:
metadata = {'a' * l[0]: 'b' * l[1]}
file_item = self.env.container.file(Utils.create_name())
file_item.metadata = metadata
if l[0] <= key_limit and l[1] <= value_limit:
self.assertTrue(file_item.write())
self.assert_status(201)
self.assertTrue(file_item.sync_metadata())
else:
self.assertRaises(ResponseError, file_item.write)
self.assert_status(400)
file_item.metadata = {}
self.assertTrue(file_item.write())
self.assert_status(201)
file_item.metadata = metadata
self.assertRaises(ResponseError, file_item.sync_metadata)
self.assert_status(400)
def testEtagWayoff(self):
file_item = self.env.container.file(Utils.create_name())
hdrs = {'etag': 'reallylonganddefinitelynotavalidetagvalue'}
self.assertRaises(ResponseError, file_item.write_random, hdrs=hdrs)
self.assert_status(422)
def testFileCreate(self):
for i in range(10):
file_item = self.env.container.file(Utils.create_name())
data = file_item.write_random()
self.assert_status(201)
self.assertEqual(data, file_item.read())
self.assert_status(200)
def testHead(self):
file_name = Utils.create_name()
content_type = Utils.create_name()
file_item = self.env.container.file(file_name)
file_item.content_type = content_type
file_item.write_random(self.env.file_size)
expected_etag = file_item.md5
if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'):
expected_etag = '"%s"' % expected_etag
file_item = self.env.container.file(file_name)
info = file_item.info()
self.assert_status(200)
self.assertEqual(info['content_length'], self.env.file_size)
self.assertEqual(info['etag'], expected_etag)
self.assertEqual(info['content_type'], content_type)
self.assertIn('last_modified', info)
def testDeleteOfFileThatDoesNotExist(self):
# in container that exists
file_item = self.env.container.file(Utils.create_name())
self.assertRaises(ResponseError, file_item.delete)
self.assert_status(404)
# in container that does not exist
container = self.env.account.container(Utils.create_name())
file_item = container.file(Utils.create_name())
self.assertRaises(ResponseError, file_item.delete)
self.assert_status(404)
def testHeadOnFileThatDoesNotExist(self):
# in container that exists
file_item = self.env.container.file(Utils.create_name())
self.assertRaises(ResponseError, file_item.info)
self.assert_status(404)
# in container that does not exist
container = self.env.account.container(Utils.create_name())
file_item = container.file(Utils.create_name())
self.assertRaises(ResponseError, file_item.info)
self.assert_status(404)
def testMetadataOnPost(self):
file_item = self.env.container.file(Utils.create_name())
file_item.write_random(self.env.file_size)
for i in range(10):
metadata = {}
for j in range(10):
metadata[Utils.create_ascii_name()] = Utils.create_name()
file_item.metadata = metadata
self.assertTrue(file_item.sync_metadata())
self.assert_status(202)
file_item = self.env.container.file(file_item.name)
self.assertTrue(file_item.initialize())
self.assert_status(200)
self.assertEqual(file_item.metadata, metadata)
def testGetContentType(self):
file_name = Utils.create_name()
content_type = Utils.create_name()
file_item = self.env.container.file(file_name)
file_item.content_type = content_type
file_item.write_random()
file_item = self.env.container.file(file_name)
file_item.read()
self.assertEqual(content_type, file_item.content_type)
def testGetOnFileThatDoesNotExist(self):
# in container that exists
file_item = self.env.container.file(Utils.create_name())
self.assertRaises(ResponseError, file_item.read)
self.assert_status(404)
# in container that does not exist
container = self.env.account.container(Utils.create_name())
file_item = container.file(Utils.create_name())
self.assertRaises(ResponseError, file_item.read)
self.assert_status(404)
def testPostOnFileThatDoesNotExist(self):
# in container that exists
file_item = self.env.container.file(Utils.create_name())
file_item.metadata['Field'] = 'Value'
self.assertRaises(ResponseError, file_item.sync_metadata)
self.assert_status(404)
# in container that does not exist
container = self.env.account.container(Utils.create_name())
file_item = container.file(Utils.create_name())
file_item.metadata['Field'] = 'Value'
self.assertRaises(ResponseError, file_item.sync_metadata)
self.assert_status(404)
def testMetadataOnPut(self):
for i in range(10):
metadata = {}
for j in range(10):
metadata[Utils.create_ascii_name()] = Utils.create_name()
file_item = self.env.container.file(Utils.create_name())
file_item.metadata = metadata
file_item.write_random(self.env.file_size)
file_item = self.env.container.file(file_item.name)
self.assertTrue(file_item.initialize())
self.assert_status(200)
self.assertEqual(file_item.metadata, metadata)
def testSerialization(self):
container = self.env.account.container(Utils.create_name())
self.assertTrue(container.create())
files = []
for i in (0, 1, 10, 100, 1000, 10000):
files.append({'name': Utils.create_name(),
'content_type': Utils.create_name(), 'bytes': i})
write_time = time.time()
for f in files:
file_item = container.file(f['name'])
file_item.content_type = f['content_type']
file_item.write_random(f['bytes'])
f['hash'] = file_item.md5
f['json'] = False
f['xml'] = False
write_time = time.time() - write_time
for format_type in ['json', 'xml']:
for file_item in container.files(parms={'format': format_type}):
found = False
for f in files:
if f['name'] != file_item['name']:
continue
self.assertEqual(file_item['content_type'],
f['content_type'])
self.assertEqual(int(file_item['bytes']), f['bytes'])
d = datetime.strptime(
file_item['last_modified'].split('.')[0],
"%Y-%m-%dT%H:%M:%S")
lm = time.mktime(d.timetuple())
if 'last_modified' in f:
self.assertEqual(f['last_modified'], lm)
else:
f['last_modified'] = lm
f[format_type] = True
found = True
self.assertTrue(
found, 'Unexpected file %s found in '
'%s listing' % (file_item['name'], format_type))
headers = dict((h.lower(), v)
for h, v in self.env.conn.response.getheaders())
if format_type == 'json':
self.assertEqual(headers['content-type'],
'application/json; charset=utf-8')
elif format_type == 'xml':
self.assertEqual(headers['content-type'],
'application/xml; charset=utf-8')
lm_diff = max([f['last_modified'] for f in files]) -\
min([f['last_modified'] for f in files])
self.assertLess(lm_diff, write_time + 1,
'Diff in last modified times '
'should be less than time to write files')
for f in files:
for format_type in ['json', 'xml']:
self.assertTrue(
f[format_type], 'File %s not found in %s listing'
% (f['name'], format_type))
def testStackedOverwrite(self):
file_item = self.env.container.file(Utils.create_name())
for i in range(1, 11):
data = file_item.write_random(512)
file_item.write(data)
self.assertEqual(file_item.read(), data)
def testZeroByteFile(self):
file_item = self.env.container.file(Utils.create_name())
self.assertTrue(file_item.write(b''))
self.assertIn(file_item.name, self.env.container.files())
self.assertEqual(file_item.read(), b'')
def testEtagResponse(self):
file_item = self.env.container.file(Utils.create_name())
data = io.BytesIO(file_item.write_random(512))
self.assert_etag(File.compute_md5sum(data))
def testChunkedPut(self):
if (tf.web_front_end == 'apache2'):
raise SkipTest("Chunked PUT cannot be tested with apache2 web "
"front end")
def chunks(s, length=3):
i, j = 0, length
while i < len(s):
yield s[i:j]
i, j = j, j + length
data = File.random_data(10000)
etag = File.compute_md5sum(data)
for i in (1, 10, 100, 1000):
file_item = self.env.container.file(Utils.create_name())
for j in chunks(data, i):
file_item.chunked_write(j)
self.assertTrue(file_item.chunked_write())
self.assertEqual(data, file_item.read())
info = file_item.info()
self.assertEqual(normalize_etag(info['etag']), etag)
def test_POST(self):
# verify consistency between object and container listing metadata
file_name = Utils.create_name()
file_item = self.env.container.file(file_name)
file_item.content_type = 'text/foobar'
file_item.write_random(1024)
# sanity check
file_item = self.env.container.file(file_name)
file_item.initialize()
self.assertEqual('text/foobar', file_item.content_type)
self.assertEqual(1024, file_item.size)
etag = file_item.etag
# check container listing is consistent
listing = self.env.container.files(parms={'format': 'json'})
for f_dict in listing:
if f_dict['name'] == file_name:
break
else:
self.fail('Failed to find file %r in listing' % file_name)
self.assertEqual(1024, f_dict['bytes'])
self.assertEqual('text/foobar', f_dict['content_type'])
if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'):
self.assertEqual(etag, '"%s"' % f_dict['hash'])
else:
self.assertEqual(etag, f_dict['hash'])
put_last_modified = f_dict['last_modified']
# now POST updated content-type to each file
file_item = self.env.container.file(file_name)
file_item.content_type = 'image/foobarbaz'
file_item.sync_metadata({'Test': 'blah'})
# sanity check object metadata
file_item = self.env.container.file(file_name)
file_item.initialize()
self.assertEqual(1024, file_item.size)
self.assertEqual('image/foobarbaz', file_item.content_type)
self.assertEqual(etag, file_item.etag)
self.assertIn('test', file_item.metadata)
# check for consistency between object and container listing
listing = self.env.container.files(parms={'format': 'json'})
for f_dict in listing:
if f_dict['name'] == file_name:
break
else:
self.fail('Failed to find file %r in listing' % file_name)
self.assertEqual(1024, f_dict['bytes'])
self.assertEqual('image/foobarbaz', f_dict['content_type'])
self.assertLess(put_last_modified, f_dict['last_modified'])
if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'):
self.assertEqual(etag, '"%s"' % f_dict['hash'])
else:
self.assertEqual(etag, f_dict['hash'])
class TestFileUTF8(Base2, TestFile):
pass
class TestFileComparisonEnv(BaseEnv):
@classmethod
def setUp(cls):
super(TestFileComparisonEnv, cls).setUp()
cls.container = cls.account.container(Utils.create_name())
if not cls.container.create():
raise ResponseError(cls.conn.response)
cls.file_count = 20
cls.file_size = 128
cls.files = list()
for x in range(cls.file_count):
file_item = cls.container.file(Utils.create_name())
file_item.write_random(cls.file_size)
cls.files.append(file_item)
cls.time_old_f1 = time.strftime("%a, %d %b %Y %H:%M:%S GMT",
time.gmtime(time.time() - 86400))
cls.time_old_f2 = time.strftime("%A, %d-%b-%y %H:%M:%S GMT",
time.gmtime(time.time() - 86400))
cls.time_old_f3 = time.strftime("%a %b %d %H:%M:%S %Y",
time.gmtime(time.time() - 86400))
cls.time_new = time.strftime("%a, %d %b %Y %H:%M:%S GMT",
time.gmtime(time.time() + 86400))
class TestFileComparison(Base):
env = TestFileComparisonEnv
def testIfMatch(self):
for file_item in self.env.files:
hdrs = {'If-Match': file_item.md5}
self.assertTrue(file_item.read(hdrs=hdrs))
hdrs = {'If-Match': 'bogus'}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(412)
self.assert_etag(file_item.md5)
def testIfMatchMultipleEtags(self):
for file_item in self.env.files:
hdrs = {'If-Match': '"bogus1", "%s", "bogus2"' % file_item.md5}
self.assertTrue(file_item.read(hdrs=hdrs))
hdrs = {'If-Match': '"bogus1", "bogus2", "bogus3"'}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(412)
self.assert_etag(file_item.md5)
def testIfNoneMatch(self):
for file_item in self.env.files:
hdrs = {'If-None-Match': 'bogus'}
self.assertTrue(file_item.read(hdrs=hdrs))
hdrs = {'If-None-Match': file_item.md5}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(304)
self.assert_etag(file_item.md5)
self.assert_header('accept-ranges', 'bytes')
def testIfNoneMatchMultipleEtags(self):
for file_item in self.env.files:
hdrs = {'If-None-Match': '"bogus1", "bogus2", "bogus3"'}
self.assertTrue(file_item.read(hdrs=hdrs))
hdrs = {'If-None-Match':
'"bogus1", "bogus2", "%s"' % file_item.md5}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(304)
self.assert_etag(file_item.md5)
self.assert_header('accept-ranges', 'bytes')
def testIfModifiedSince(self):
for file_item in self.env.files:
hdrs = {'If-Modified-Since': self.env.time_old_f1}
self.assertTrue(file_item.read(hdrs=hdrs))
self.assertTrue(file_item.info(hdrs=hdrs))
hdrs = {'If-Modified-Since': self.env.time_new}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(304)
self.assert_etag(file_item.md5)
self.assert_header('accept-ranges', 'bytes')
self.assertRaises(ResponseError, file_item.info, hdrs=hdrs)
self.assert_status(304)
self.assert_etag(file_item.md5)
self.assert_header('accept-ranges', 'bytes')
def testIfUnmodifiedSince(self):
for file_item in self.env.files:
hdrs = {'If-Unmodified-Since': self.env.time_new}
self.assertTrue(file_item.read(hdrs=hdrs))
self.assertTrue(file_item.info(hdrs=hdrs))
hdrs = {'If-Unmodified-Since': self.env.time_old_f2}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(412)
self.assert_etag(file_item.md5)
self.assertRaises(ResponseError, file_item.info, hdrs=hdrs)
self.assert_status(412)
self.assert_etag(file_item.md5)
def testIfMatchAndUnmodified(self):
for file_item in self.env.files:
hdrs = {'If-Match': file_item.md5,
'If-Unmodified-Since': self.env.time_new}
self.assertTrue(file_item.read(hdrs=hdrs))
hdrs = {'If-Match': 'bogus',
'If-Unmodified-Since': self.env.time_new}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(412)
self.assert_etag(file_item.md5)
hdrs = {'If-Match': file_item.md5,
'If-Unmodified-Since': self.env.time_old_f3}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(412)
self.assert_etag(file_item.md5)
def testLastModified(self):
file_name = Utils.create_name()
content_type = Utils.create_name()
file_item = self.env.container.file(file_name)
file_item.content_type = content_type
resp = file_item.write_random_return_resp(self.env.file_size)
put_last_modified = resp.getheader('last-modified')
etag = file_item.md5
file_item = self.env.container.file(file_name)
info = file_item.info()
self.assertIn('last_modified', info)
last_modified = info['last_modified']
self.assertEqual(put_last_modified, info['last_modified'])
hdrs = {'If-Modified-Since': last_modified}
self.assertRaises(ResponseError, file_item.read, hdrs=hdrs)
self.assert_status(304)
self.assert_etag(etag)
self.assert_header('accept-ranges', 'bytes')
hdrs = {'If-Unmodified-Since': last_modified}
self.assertTrue(file_item.read(hdrs=hdrs))
class TestFileComparisonUTF8(Base2, TestFileComparison):
pass
class TestServiceToken(unittest.TestCase):
def setUp(self):
if tf.skip_service_tokens:
raise SkipTest
if tf.in_process:
tf.skip_if_no_xattrs()
self.SET_TO_USERS_TOKEN = 1
self.SET_TO_SERVICE_TOKEN = 2
# keystoneauth and tempauth differ in allowing PUT account
# Even if keystoneauth allows it, the proxy-server uses
# allow_account_management to decide if accounts can be created
self.put_account_expect = is_client_error
if tf.swift_test_auth_version != '1':
if cluster_info.get('swift').get('allow_account_management'):
self.put_account_expect = is_success
def _scenario_generator(self):
paths = ((None, None), ('c', None), ('c', 'o'))
for path in paths:
for method in ('PUT', 'POST', 'HEAD', 'GET', 'OPTIONS'):
yield method, path[0], path[1]
for path in reversed(paths):
yield 'DELETE', path[0], path[1]
def _assert_is_authed_response(self, method, container, object, resp):
resp.read()
expect = is_success
if method == 'DELETE' and not container:
expect = is_client_error
if method == 'PUT' and not container:
expect = self.put_account_expect
self.assertTrue(expect(resp.status), 'Unexpected %s for %s %s %s'
% (resp.status, method, container, object))
def _assert_not_authed_response(self, method, container, object, resp):
resp.read()
expect = is_client_error
if method == 'OPTIONS':
expect = is_success
self.assertTrue(expect(resp.status), 'Unexpected %s for %s %s %s'
% (resp.status, method, container, object))
def prepare_request(self, method, use_service_account=False,
container=None, obj=None, body=None, headers=None,
x_auth_token=None,
x_service_token=None, dbg=False):
"""
Setup for making the request
When retry() calls the do_request() function, it calls it the
test user's token, the parsed path, a connection and (optionally)
a token from the test service user. We save options here so that
do_request() can make the appropriate request.
:param method: The operation (e.g. 'HEAD')
:param use_service_account: Optional. Set True to change the path to
be the service account
:param container: Optional. Adds a container name to the path
:param obj: Optional. Adds an object name to the path
:param body: Optional. Adds a body (string) in the request
:param headers: Optional. Adds additional headers.
:param x_auth_token: Optional. Default is SET_TO_USERS_TOKEN. One of:
SET_TO_USERS_TOKEN Put the test user's token in
X-Auth-Token
SET_TO_SERVICE_TOKEN Put the service token in X-Auth-Token
:param x_service_token: Optional. Default is to not set X-Service-Token
to any value. If specified, is one of following:
SET_TO_USERS_TOKEN Put the test user's token in
X-Service-Token
SET_TO_SERVICE_TOKEN Put the service token in
X-Service-Token
:param dbg: Optional. Set true to check request arguments
"""
self.method = method
self.use_service_account = use_service_account
self.container = container
self.obj = obj
self.body = body
self.headers = headers
if x_auth_token:
self.x_auth_token = x_auth_token
else:
self.x_auth_token = self.SET_TO_USERS_TOKEN
self.x_service_token = x_service_token
self.dbg = dbg
def do_request(self, url, token, parsed, conn, service_token=''):
if self.use_service_account:
path = self._service_account(parsed.path)
else:
path = parsed.path
if self.container:
path += '/%s' % self.container
if self.obj:
path += '/%s' % self.obj
headers = {}
if self.body:
headers.update({'Content-Length': len(self.body)})
if self.x_auth_token == self.SET_TO_USERS_TOKEN:
headers.update({'X-Auth-Token': token})
elif self.x_auth_token == self.SET_TO_SERVICE_TOKEN:
headers.update({'X-Auth-Token': service_token})
if self.x_service_token == self.SET_TO_USERS_TOKEN:
headers.update({'X-Service-Token': token})
elif self.x_service_token == self.SET_TO_SERVICE_TOKEN:
headers.update({'X-Service-Token': service_token})
if self.dbg:
print('DEBUG: conn.request: method:%s path:%s'
' body:%s headers:%s' % (self.method, path, self.body,
headers))
conn.request(self.method, path, self.body, headers=headers)
return check_response(conn)
def _service_account(self, path):
parts = path.split('/', 3)
account = parts[2]
try:
project_id = account[account.index('_') + 1:]
except ValueError:
project_id = account
parts[2] = '%s%s' % (tf.swift_test_service_prefix, project_id)
return '/'.join(parts)
def test_user_access_own_auth_account(self):
# This covers ground tested elsewhere (tests a user doing HEAD
# on own account). However, if this fails, none of the remaining
# tests will work
self.prepare_request('HEAD')
resp = retry(self.do_request)
resp.read()
self.assertIn(resp.status, (200, 204))
def test_user_cannot_access_service_account(self):
for method, container, obj in self._scenario_generator():
self.prepare_request(method, use_service_account=True,
container=container, obj=obj)
resp = retry(self.do_request)
self._assert_not_authed_response(method, container, obj, resp)
def test_service_user_denied_with_x_auth_token(self):
for method, container, obj in self._scenario_generator():
self.prepare_request(method, use_service_account=True,
container=container, obj=obj,
x_auth_token=self.SET_TO_SERVICE_TOKEN)
resp = retry(self.do_request, service_user=5)
self._assert_not_authed_response(method, container, obj, resp)
def test_service_user_denied_with_x_service_token(self):
for method, container, obj in self._scenario_generator():
self.prepare_request(method, use_service_account=True,
container=container, obj=obj,
x_auth_token=self.SET_TO_SERVICE_TOKEN,
x_service_token=self.SET_TO_SERVICE_TOKEN)
resp = retry(self.do_request, service_user=5)
self._assert_not_authed_response(method, container, obj, resp)
def test_user_plus_service_can_access_service_account(self):
for method, container, obj in self._scenario_generator():
self.prepare_request(method, use_service_account=True,
container=container, obj=obj,
x_auth_token=self.SET_TO_USERS_TOKEN,
x_service_token=self.SET_TO_SERVICE_TOKEN)
resp = retry(self.do_request, service_user=5)
self._assert_is_authed_response(method, container, obj, resp)
if __name__ == '__main__':
unittest.main()
| swift-master | test/functional/tests.py |
#!/usr/bin/python
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import json
from uuid import uuid4
from string import ascii_letters
import six
from six.moves import range
from swift.common.middleware.acl import format_acl
from swift.common.utils import distribute_evenly
from test.functional import check_response, retry, requires_acls, \
load_constraint, SkipTest
import test.functional as tf
def setUpModule():
tf.setup_package()
def tearDownModule():
tf.teardown_package()
class TestAccount(unittest.TestCase):
existing_metadata = None
@classmethod
def get_meta(cls):
def head(url, token, parsed, conn):
conn.request('HEAD', parsed.path, '', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(head)
resp.read()
return dict((k, v) for k, v in resp.getheaders() if
k.lower().startswith('x-account-meta'))
@classmethod
def clear_meta(cls, remove_metadata_keys):
def post(url, token, parsed, conn, hdr_keys):
headers = {'X-Auth-Token': token}
headers.update((k, '') for k in hdr_keys)
conn.request('POST', parsed.path, '', headers)
return check_response(conn)
buckets = (len(remove_metadata_keys) - 1) // 90 + 1
for batch in distribute_evenly(remove_metadata_keys, buckets):
resp = retry(post, batch)
resp.read()
@classmethod
def set_meta(cls, metadata):
def post(url, token, parsed, conn, meta_hdrs):
headers = {'X-Auth-Token': token}
headers.update(meta_hdrs)
conn.request('POST', parsed.path, '', headers)
return check_response(conn)
if not metadata:
return
resp = retry(post, metadata)
resp.read()
@classmethod
def setUpClass(cls):
# remove and stash any existing account user metadata before tests
cls.existing_metadata = cls.get_meta()
cls.clear_meta(cls.existing_metadata.keys())
@classmethod
def tearDownClass(cls):
# replace any stashed account user metadata
cls.set_meta(cls.existing_metadata)
def setUp(self):
self.max_meta_count = load_constraint('max_meta_count')
self.max_meta_name_length = load_constraint('max_meta_name_length')
self.max_meta_overall_size = load_constraint('max_meta_overall_size')
self.max_meta_value_length = load_constraint('max_meta_value_length')
def tearDown(self):
# clean up any account user metadata created by test
new_metadata = self.get_meta().keys()
self.clear_meta(new_metadata)
def test_metadata(self):
if tf.skip:
raise SkipTest
def post(url, token, parsed, conn, value):
conn.request('POST', parsed.path, '',
{'X-Auth-Token': token, 'X-Account-Meta-Test': value})
return check_response(conn)
def head(url, token, parsed, conn):
conn.request('HEAD', parsed.path, '', {'X-Auth-Token': token})
return check_response(conn)
def get(url, token, parsed, conn):
conn.request('GET', parsed.path, '', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(post, '')
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertIsNone(resp.getheader('x-account-meta-test'))
resp = retry(get)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertIsNone(resp.getheader('x-account-meta-test'))
resp = retry(post, 'Value')
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('x-account-meta-test'), 'Value')
resp = retry(get)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('x-account-meta-test'), 'Value')
def test_invalid_acls(self):
if tf.skip:
raise SkipTest
def post(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
# needs to be an acceptable header size
num_keys = 8
max_key_size = load_constraint('max_header_size') // num_keys
acl = {'admin': [c * max_key_size for c in ascii_letters[:num_keys]]}
headers = {'x-account-access-control': format_acl(
version=2, acl_dict=acl)}
resp = retry(post, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 400)
# and again a touch smaller
acl = {'admin': [c * max_key_size for c
in ascii_letters[:num_keys - 1]]}
headers = {'x-account-access-control': format_acl(
version=2, acl_dict=acl)}
resp = retry(post, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
@requires_acls
def test_invalid_acl_keys(self):
def post(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
# needs to be json
resp = retry(post, headers={'X-Account-Access-Control': 'invalid'},
use_account=1)
resp.read()
self.assertEqual(resp.status, 400)
acl_user = tf.swift_test_user[1]
acl = {'admin': [acl_user], 'invalid_key': 'invalid_value'}
headers = {'x-account-access-control': format_acl(
version=2, acl_dict=acl)}
resp = retry(post, headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 400)
self.assertIsNone(resp.getheader('X-Account-Access-Control'))
@requires_acls
def test_invalid_acl_values(self):
def post(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
acl = {'admin': 'invalid_value'}
headers = {'x-account-access-control': format_acl(
version=2, acl_dict=acl)}
resp = retry(post, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 400)
self.assertIsNone(resp.getheader('X-Account-Access-Control'))
@requires_acls
def test_read_only_acl(self):
if tf.skip3:
raise SkipTest
def get(url, token, parsed, conn):
conn.request('GET', parsed.path, '', {'X-Auth-Token': token})
return check_response(conn)
def post(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
# cannot read account
resp = retry(get, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# grant read access
acl_user = tf.swift_test_user[2]
acl = {'read-only': [acl_user]}
headers = {'x-account-access-control': format_acl(
version=2, acl_dict=acl)}
resp = retry(post, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# read-only can read account headers
resp = retry(get, use_account=3)
resp.read()
self.assertIn(resp.status, (200, 204))
# but not acls
self.assertIsNone(resp.getheader('X-Account-Access-Control'))
# read-only can not write metadata
headers = {'x-account-meta-test': 'value'}
resp = retry(post, headers=headers, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# but they can read it
headers = {'x-account-meta-test': 'value'}
resp = retry(post, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, use_account=3)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('X-Account-Meta-Test'), 'value')
@requires_acls
def test_read_write_acl(self):
if tf.skip3:
raise SkipTest
def get(url, token, parsed, conn):
conn.request('GET', parsed.path, '', {'X-Auth-Token': token})
return check_response(conn)
def post(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
# cannot read account
resp = retry(get, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# grant read-write access
acl_user = tf.swift_test_user[2]
acl = {'read-write': [acl_user]}
headers = {'x-account-access-control': format_acl(
version=2, acl_dict=acl)}
resp = retry(post, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# read-write can read account headers
resp = retry(get, use_account=3)
resp.read()
self.assertIn(resp.status, (200, 204))
# but not acls
self.assertIsNone(resp.getheader('X-Account-Access-Control'))
# read-write can not write account metadata
headers = {'x-account-meta-test': 'value'}
resp = retry(post, headers=headers, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
@requires_acls
def test_admin_acl(self):
if tf.skip3:
raise SkipTest
def get(url, token, parsed, conn):
conn.request('GET', parsed.path, '', {'X-Auth-Token': token})
return check_response(conn)
def post(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
# cannot read account
resp = retry(get, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
# grant admin access
acl_user = tf.swift_test_user[2]
acl = {'admin': [acl_user]}
acl_json_str = format_acl(version=2, acl_dict=acl)
headers = {'x-account-access-control': acl_json_str}
resp = retry(post, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# admin can read account headers
resp = retry(get, use_account=3)
resp.read()
self.assertIn(resp.status, (200, 204))
# including acls
self.assertEqual(resp.getheader('X-Account-Access-Control'),
acl_json_str)
# admin can write account metadata
value = str(uuid4())
headers = {'x-account-meta-test': value}
resp = retry(post, headers=headers, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, use_account=3)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('X-Account-Meta-Test'), value)
# admin can even revoke their own access
headers = {'x-account-access-control': '{}'}
resp = retry(post, headers=headers, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
# and again, cannot read account
resp = retry(get, use_account=3)
resp.read()
self.assertEqual(resp.status, 403)
@requires_acls
def test_protected_tempurl(self):
if tf.skip3:
raise SkipTest
def get(url, token, parsed, conn):
conn.request('GET', parsed.path, '', {'X-Auth-Token': token})
return check_response(conn)
def post(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
# add an account metadata, and temp-url-key to account
value = str(uuid4())
headers = {
'x-account-meta-temp-url-key': 'secret',
'x-account-meta-test': value,
}
resp = retry(post, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# grant read-only access to tester3
acl_user = tf.swift_test_user[2]
acl = {'read-only': [acl_user]}
acl_json_str = format_acl(version=2, acl_dict=acl)
headers = {'x-account-access-control': acl_json_str}
resp = retry(post, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# read-only tester3 can read account metadata
resp = retry(get, use_account=3)
resp.read()
self.assertIn(resp.status, (200, 204),
'Expected status in (200, 204), got %s' % resp.status)
self.assertEqual(resp.getheader('X-Account-Meta-Test'), value)
# but not temp-url-key
self.assertIsNone(resp.getheader('X-Account-Meta-Temp-Url-Key'))
# grant read-write access to tester3
acl_user = tf.swift_test_user[2]
acl = {'read-write': [acl_user]}
acl_json_str = format_acl(version=2, acl_dict=acl)
headers = {'x-account-access-control': acl_json_str}
resp = retry(post, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# read-write tester3 can read account metadata
resp = retry(get, use_account=3)
resp.read()
self.assertIn(resp.status, (200, 204),
'Expected status in (200, 204), got %s' % resp.status)
self.assertEqual(resp.getheader('X-Account-Meta-Test'), value)
# but not temp-url-key
self.assertIsNone(resp.getheader('X-Account-Meta-Temp-Url-Key'))
# grant admin access to tester3
acl_user = tf.swift_test_user[2]
acl = {'admin': [acl_user]}
acl_json_str = format_acl(version=2, acl_dict=acl)
headers = {'x-account-access-control': acl_json_str}
resp = retry(post, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# admin tester3 can read account metadata
resp = retry(get, use_account=3)
resp.read()
self.assertIn(resp.status, (200, 204),
'Expected status in (200, 204), got %s' % resp.status)
self.assertEqual(resp.getheader('X-Account-Meta-Test'), value)
# including temp-url-key
self.assertEqual(resp.getheader('X-Account-Meta-Temp-Url-Key'),
'secret')
# admin tester3 can even change temp-url-key
secret = str(uuid4())
headers = {
'x-account-meta-temp-url-key': secret,
}
resp = retry(post, headers=headers, use_account=3)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(get, use_account=3)
resp.read()
self.assertIn(resp.status, (200, 204),
'Expected status in (200, 204), got %s' % resp.status)
self.assertEqual(resp.getheader('X-Account-Meta-Temp-Url-Key'),
secret)
@requires_acls
def test_account_acls(self):
if tf.skip2:
raise SkipTest
def post(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
def put(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('PUT', parsed.path, '', new_headers)
return check_response(conn)
def delete(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('DELETE', parsed.path, '', new_headers)
return check_response(conn)
def head(url, token, parsed, conn):
conn.request('HEAD', parsed.path, '', {'X-Auth-Token': token})
return check_response(conn)
def get(url, token, parsed, conn):
conn.request('GET', parsed.path, '', {'X-Auth-Token': token})
return check_response(conn)
try:
# User1 can POST to their own account (and reset the ACLs)
resp = retry(post, headers={'X-Account-Access-Control': '{}'},
use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
self.assertIsNone(resp.getheader('X-Account-Access-Control'))
# User1 can GET their own empty account
resp = retry(get, use_account=1)
resp.read()
self.assertEqual(resp.status // 100, 2)
self.assertIsNone(resp.getheader('X-Account-Access-Control'))
# User2 can't GET User1's account
resp = retry(get, use_account=2, url_account=1)
resp.read()
self.assertEqual(resp.status, 403)
# User1 is swift_owner of their own account, so they can POST an
# ACL -- let's do this and make User2 (test_user[1]) an admin
acl_user = tf.swift_test_user[1]
acl = {'admin': [acl_user]}
headers = {'x-account-access-control': format_acl(
version=2, acl_dict=acl)}
resp = retry(post, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# User1 can see the new header
resp = retry(get, use_account=1)
resp.read()
self.assertEqual(resp.status // 100, 2)
data_from_headers = resp.getheader('x-account-access-control')
expected = json.dumps(acl, separators=(',', ':'))
self.assertEqual(data_from_headers, expected)
# Now User2 should be able to GET the account and see the ACL
resp = retry(head, use_account=2, url_account=1)
resp.read()
data_from_headers = resp.getheader('x-account-access-control')
self.assertEqual(data_from_headers, expected)
# Revoke User2's admin access, grant User2 read-write access
acl = {'read-write': [acl_user]}
headers = {'x-account-access-control': format_acl(
version=2, acl_dict=acl)}
resp = retry(post, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# User2 can still GET the account, but not see the ACL
# (since it's privileged data)
resp = retry(head, use_account=2, url_account=1)
resp.read()
self.assertEqual(resp.status, 204)
self.assertIsNone(resp.getheader('x-account-access-control'))
# User2 can PUT and DELETE a container
resp = retry(put, use_account=2, url_account=1,
resource='%(storage_url)s/mycontainer', headers={})
resp.read()
self.assertEqual(resp.status, 201)
resp = retry(delete, use_account=2, url_account=1,
resource='%(storage_url)s/mycontainer', headers={})
resp.read()
self.assertEqual(resp.status, 204)
# Revoke User2's read-write access, grant User2 read-only access
acl = {'read-only': [acl_user]}
headers = {'x-account-access-control': format_acl(
version=2, acl_dict=acl)}
resp = retry(post, headers=headers, use_account=1)
resp.read()
self.assertEqual(resp.status, 204)
# User2 can still GET the account, but not see the ACL
# (since it's privileged data)
resp = retry(head, use_account=2, url_account=1)
resp.read()
self.assertEqual(resp.status, 204)
self.assertIsNone(resp.getheader('x-account-access-control'))
# User2 can't PUT a container
resp = retry(put, use_account=2, url_account=1,
resource='%(storage_url)s/mycontainer', headers={})
resp.read()
self.assertEqual(resp.status, 403)
finally:
# Make sure to clean up even if tests fail -- User2 should not
# have access to User1's account in other functional tests!
resp = retry(post, headers={'X-Account-Access-Control': '{}'},
use_account=1)
resp.read()
@requires_acls
def test_swift_account_acls(self):
if tf.skip:
raise SkipTest
def post(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
def head(url, token, parsed, conn):
conn.request('HEAD', parsed.path, '', {'X-Auth-Token': token})
return check_response(conn)
def get(url, token, parsed, conn):
conn.request('GET', parsed.path, '', {'X-Auth-Token': token})
return check_response(conn)
try:
# User1 can POST to their own account
resp = retry(post, headers={'X-Account-Access-Control': '{}'})
resp.read()
self.assertEqual(resp.status, 204)
self.assertIsNone(resp.getheader('X-Account-Access-Control'))
# User1 can GET their own empty account
resp = retry(get)
resp.read()
self.assertEqual(resp.status // 100, 2)
self.assertIsNone(resp.getheader('X-Account-Access-Control'))
# User1 can POST non-empty data
acl_json = '{"admin":["bob"]}'
resp = retry(post, headers={'X-Account-Access-Control': acl_json})
resp.read()
self.assertEqual(resp.status, 204)
# User1 can GET the non-empty data
resp = retry(get)
resp.read()
self.assertEqual(resp.status // 100, 2)
self.assertEqual(resp.getheader('X-Account-Access-Control'),
acl_json)
# POST non-JSON ACL should fail
resp = retry(post, headers={'X-Account-Access-Control': 'yuck'})
resp.read()
# resp.status will be 400 if tempauth or some other ACL-aware
# auth middleware rejects it, or 200 (but silently swallowed by
# core Swift) if ACL-unaware auth middleware approves it.
# A subsequent GET should show the old, valid data, not the garbage
resp = retry(get)
resp.read()
self.assertEqual(resp.status // 100, 2)
self.assertEqual(resp.getheader('X-Account-Access-Control'),
acl_json)
finally:
# Make sure to clean up even if tests fail -- User2 should not
# have access to User1's account in other functional tests!
resp = retry(post, headers={'X-Account-Access-Control': '{}'})
resp.read()
def test_swift_prohibits_garbage_account_acls(self):
if tf.skip:
raise SkipTest
def post(url, token, parsed, conn, headers):
new_headers = dict({'X-Auth-Token': token}, **headers)
conn.request('POST', parsed.path, '', new_headers)
return check_response(conn)
def get(url, token, parsed, conn):
conn.request('GET', parsed.path, '', {'X-Auth-Token': token})
return check_response(conn)
try:
# User1 can POST to their own account
resp = retry(post, headers={'X-Account-Access-Control': '{}'})
resp.read()
self.assertEqual(resp.status, 204)
self.assertIsNone(resp.getheader('X-Account-Access-Control'))
# User1 can GET their own empty account
resp = retry(get)
resp.read()
self.assertEqual(resp.status // 100, 2)
self.assertIsNone(resp.getheader('X-Account-Access-Control'))
# User1 can POST non-empty data
acl_json = '{"admin":["bob"]}'
resp = retry(post, headers={'X-Account-Access-Control': acl_json})
resp.read()
self.assertEqual(resp.status, 204)
# If this request is handled by ACL-aware auth middleware, then the
# ACL will be persisted. If it is handled by ACL-unaware auth
# middleware, then the header will be thrown out. But the request
# should return successfully in any case.
# User1 can GET the non-empty data
resp = retry(get)
resp.read()
self.assertEqual(resp.status // 100, 2)
# ACL will be set if some ACL-aware auth middleware (e.g. tempauth)
# propagates it to sysmeta; if no ACL-aware auth middleware does,
# then X-Account-Access-Control will still be empty.
# POST non-JSON ACL should fail
resp = retry(post, headers={'X-Account-Access-Control': 'yuck'})
resp.read()
# resp.status will be 400 if tempauth or some other ACL-aware
# auth middleware rejects it, or 200 (but silently swallowed by
# core Swift) if ACL-unaware auth middleware approves it.
# A subsequent GET should either show the old, valid data (if
# ACL-aware auth middleware is propagating it) or show nothing
# (if no auth middleware in the pipeline is ACL-aware), but should
# never return the garbage ACL.
resp = retry(get)
resp.read()
self.assertEqual(resp.status // 100, 2)
self.assertNotEqual(resp.getheader('X-Account-Access-Control'),
'yuck')
finally:
# Make sure to clean up even if tests fail -- User2 should not
# have access to User1's account in other functional tests!
resp = retry(post, headers={'X-Account-Access-Control': '{}'})
resp.read()
def test_unicode_metadata(self):
if tf.skip:
raise SkipTest
def post(url, token, parsed, conn, name, value):
conn.request('POST', parsed.path, '',
{'X-Auth-Token': token, name: value})
return check_response(conn)
def head(url, token, parsed, conn):
conn.request('HEAD', parsed.path, '', {'X-Auth-Token': token})
return check_response(conn)
uni_key = u'X-Account-Meta-uni\u0E12'
uni_value = u'uni\u0E12'
# Note that py3 has issues with non-ascii header names; see
# https://bugs.python.org/issue37093
if (tf.web_front_end == 'integral' and six.PY2):
resp = retry(post, uni_key, '1')
resp.read()
self.assertIn(resp.status, (201, 204))
resp = retry(head)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader(uni_key.encode('utf-8')), '1')
resp = retry(post, 'X-Account-Meta-uni', uni_value)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
self.assertIn(resp.status, (200, 204))
if six.PY2:
self.assertEqual(resp.getheader('X-Account-Meta-uni'),
uni_value.encode('utf8'))
else:
self.assertEqual(resp.getheader('X-Account-Meta-uni'),
uni_value)
# See above note about py3 and non-ascii header names
if (tf.web_front_end == 'integral' and six.PY2):
resp = retry(post, uni_key, uni_value)
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader(uni_key.encode('utf-8')),
uni_value.encode('utf-8'))
def test_multi_metadata(self):
if tf.skip:
raise SkipTest
def post(url, token, parsed, conn, name, value):
conn.request('POST', parsed.path, '',
{'X-Auth-Token': token, name: value})
return check_response(conn)
def head(url, token, parsed, conn):
conn.request('HEAD', parsed.path, '', {'X-Auth-Token': token})
return check_response(conn)
resp = retry(post, 'X-Account-Meta-One', '1')
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('x-account-meta-one'), '1')
resp = retry(post, 'X-Account-Meta-Two', '2')
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(head)
resp.read()
self.assertIn(resp.status, (200, 204))
self.assertEqual(resp.getheader('x-account-meta-one'), '1')
self.assertEqual(resp.getheader('x-account-meta-two'), '2')
def test_bad_metadata(self):
if tf.skip:
raise SkipTest
def post(url, token, parsed, conn, extra_headers):
headers = {'X-Auth-Token': token}
headers.update(extra_headers)
conn.request('POST', parsed.path, '', headers)
return check_response(conn)
resp = retry(post,
{'X-Account-Meta-' + (
'k' * self.max_meta_name_length): 'v'})
resp.read()
self.assertEqual(resp.status, 204)
# Clear it, so the value-length checking doesn't accidentally trip
# the overall max
resp = retry(post,
{'X-Account-Meta-' + (
'k' * self.max_meta_name_length): ''})
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(
post,
{'X-Account-Meta-' + ('k' * (
self.max_meta_name_length + 1)): 'v'})
resp.read()
self.assertEqual(resp.status, 400)
resp = retry(post,
{'X-Account-Meta-Too-Long': (
'k' * self.max_meta_value_length)})
resp.read()
self.assertEqual(resp.status, 204)
resp = retry(
post,
{'X-Account-Meta-Too-Long': 'k' * (
self.max_meta_value_length + 1)})
resp.read()
self.assertEqual(resp.status, 400)
def test_bad_metadata2(self):
if tf.skip:
raise SkipTest
def post(url, token, parsed, conn, extra_headers):
headers = {'X-Auth-Token': token}
headers.update(extra_headers)
conn.request('POST', parsed.path, '', headers)
return check_response(conn)
headers = {}
for x in range(self.max_meta_count):
headers['X-Account-Meta-%d' % x] = 'v'
resp = retry(post, headers)
resp.read()
self.assertEqual(resp.status, 204)
headers = {}
for x in range(self.max_meta_count + 1):
headers['X-Account-Meta-%d' % x] = 'v'
resp = retry(post, headers)
resp.read()
self.assertEqual(resp.status, 400)
def test_bad_metadata3(self):
if tf.skip:
raise SkipTest
if tf.in_process:
tf.skip_if_no_xattrs()
def post(url, token, parsed, conn, extra_headers):
headers = {'X-Auth-Token': token}
headers.update(extra_headers)
conn.request('POST', parsed.path, '', headers)
return check_response(conn)
headers = {}
header_value = 'k' * self.max_meta_value_length
size = 0
x = 0
while size < (self.max_meta_overall_size - 4
- self.max_meta_value_length):
size += 4 + self.max_meta_value_length
headers['X-Account-Meta-%04d' % x] = header_value
x += 1
if self.max_meta_overall_size - size > 1:
headers['X-Account-Meta-k'] = \
'v' * (self.max_meta_overall_size - size - 1)
resp = retry(post, headers)
resp.read()
self.assertEqual(resp.status, 204)
# this POST includes metadata size that is over limit
headers['X-Account-Meta-k'] = \
'x' * (self.max_meta_overall_size - size)
resp = retry(post, headers)
resp.read()
self.assertEqual(resp.status, 400)
# this POST would be ok and the aggregate backend metadata
# size is on the border
headers = {'X-Account-Meta-k':
'y' * (self.max_meta_overall_size - size - 1)}
resp = retry(post, headers)
resp.read()
self.assertEqual(resp.status, 204)
# this last POST would be ok by itself but takes the aggregate
# backend metadata size over limit
headers = {'X-Account-Meta-k':
'z' * (self.max_meta_overall_size - size)}
resp = retry(post, headers)
resp.read()
self.assertEqual(resp.status, 400)
class TestAccountInNonDefaultDomain(unittest.TestCase):
def setUp(self):
if tf.skip or tf.skip2 or tf.skip_if_not_v3:
raise SkipTest('AUTH VERSION 3 SPECIFIC TEST')
def test_project_domain_id_header(self):
# make sure account exists (assumes account auto create)
def post(url, token, parsed, conn):
conn.request('POST', parsed.path, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(post, use_account=4)
resp.read()
self.assertEqual(resp.status, 204)
# account in non-default domain should have a project domain id
def head(url, token, parsed, conn):
conn.request('HEAD', parsed.path, '',
{'X-Auth-Token': token})
return check_response(conn)
resp = retry(head, use_account=4)
resp.read()
self.assertEqual(resp.status, 204)
self.assertIn('X-Account-Project-Domain-Id', resp.headers)
if __name__ == '__main__':
unittest.main()
| swift-master | test/functional/test_account.py |
#!/usr/bin/python -u
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import hashlib
import hmac
import json
import time
import six
from copy import deepcopy
from six.moves.urllib.parse import quote, unquote
from unittest import SkipTest
import test.functional as tf
from swift.common.swob import normalize_etag
from swift.common.utils import MD5_OF_EMPTY_STRING, config_true_value, md5
from swift.common.middleware.versioned_writes.object_versioning import \
DELETE_MARKER_CONTENT_TYPE
from test.functional.tests import Base, Base2, BaseEnv, Utils
from test.functional import cluster_info
from test.functional.swift_test_client import Connection, \
ResponseError
from test.functional.test_tempurl import TestContainerTempurlEnv, \
TestTempurlEnv
def setUpModule():
tf.setup_package()
def tearDownModule():
tf.teardown_package()
class TestObjectVersioningEnv(BaseEnv):
account2 = None
versions_header_key = 'X-Versions-Enabled'
@classmethod
def setUp(cls):
super(TestObjectVersioningEnv, cls).setUp()
if not tf.skip2:
# Second connection for ACL tests
config2 = deepcopy(tf.config)
config2['account'] = tf.config['account2']
config2['username'] = tf.config['username2']
config2['password'] = tf.config['password2']
cls.conn2 = Connection(config2)
cls.conn2.authenticate()
if six.PY2:
# avoid getting a prefix that stops halfway through an encoded
# character
prefix = Utils.create_name().decode("utf-8")[:10].encode("utf-8")
else:
prefix = Utils.create_name()[:10]
cls.container = cls.account.container(prefix + "-objs")
container_headers = {cls.versions_header_key: 'True'}
if not cls.container.create(hdrs=container_headers):
raise ResponseError(cls.conn.response)
cls.unversioned_container = cls.account.container(
prefix + "-unversioned")
if not cls.unversioned_container.create():
raise ResponseError(cls.conn.response)
if not tf.skip2:
# setup another account to test ACLs
config2 = deepcopy(tf.config)
config2['account'] = tf.config['account2']
config2['username'] = tf.config['username2']
config2['password'] = tf.config['password2']
cls.conn2 = Connection(config2)
cls.storage_url2, cls.storage_token2 = cls.conn2.authenticate()
cls.account2 = cls.conn2.get_account()
cls.account2.delete_containers()
if not tf.skip3:
# setup another account with no access to anything to test ACLs
config3 = deepcopy(tf.config)
config3['account'] = tf.config['account']
config3['username'] = tf.config['username3']
config3['password'] = tf.config['password3']
cls.conn3 = Connection(config3)
cls.storage_url3, cls.storage_token3 = cls.conn3.authenticate()
cls.account3 = cls.conn3.get_account()
# the allowed headers are configurable in object server, so we cannot
# assert that content-encoding or content-disposition get *copied* to
# the object version unless they were set on the original PUT, so
# populate expected_headers by making a HEAD on the original object
precheck_container = cls.account.container('header-precheck-cont')
if not precheck_container.create():
raise ResponseError(cls.conn.response)
test_obj = precheck_container.file('test_allowed_headers')
put_headers = {'Content-Type': 'text/jibberish01',
'Content-Encoding': 'gzip',
'Content-Disposition': 'attachment; filename=myfile'}
test_obj.write(b"aaaaa", hdrs=put_headers)
test_obj.initialize()
resp_headers = {
h.lower(): v for h, v in test_obj.conn.response.getheaders()}
cls.expected_headers = {}
for k, v in put_headers.items():
if k.lower() in resp_headers:
cls.expected_headers[k] = v
precheck_container.delete_recursive()
@classmethod
def tearDown(cls):
if cls.account:
cls.account.delete_containers()
if cls.account2:
cls.account2.delete_containers()
class TestObjectVersioningBase(Base):
env = TestObjectVersioningEnv
def setUp(self):
super(TestObjectVersioningBase, self).setUp()
if 'object_versioning' not in tf.cluster_info:
raise SkipTest("Object Versioning not enabled")
self._account_name = None
# make sure versioning is enabled,
# since it gets disabled in tearDown
self.env.container.update_metadata(
hdrs={self.env.versions_header_key: 'True'})
def _tear_down_files(self, container):
try:
# only delete files and not containers
# as they were configured in self.env
# get rid of any versions so they aren't restored
container.update_metadata(
hdrs={self.env.versions_header_key: 'False'})
# get rid of originals
container.delete_files()
# delete older versions
listing_parms = {'versions': None, 'format': 'json'}
for obj_info in container.files(parms=listing_parms):
prev_version = container.file(obj_info['name'])
prev_version.delete(
parms={'version-id': obj_info['version_id']})
except ResponseError:
pass
def tearDown(self):
super(TestObjectVersioningBase, self).tearDown()
self._tear_down_files(self.env.container)
def assertTotalVersions(self, container, count):
listing_parms = {'versions': None}
self.assertEqual(count, len(container.files(parms=listing_parms)))
def assertContentTypes(self, container, expected_content_types):
listing_parms = {'versions': None,
'format': 'json',
'reverse': 'true'}
self.assertEqual(expected_content_types, [
o['content_type']
for o in container.files(parms=listing_parms)])
class TestObjectVersioning(TestObjectVersioningBase):
@property
def account_name(self):
if not self._account_name:
self._account_name = unquote(
self.env.conn.storage_path.rsplit('/', 1)[-1])
return self._account_name
def test_disable_version(self):
# sanity
self.assertTrue(
config_true_value(self.env.container.info()['versions_enabled']))
# disable it
self.env.container.update_metadata(
hdrs={self.env.versions_header_key: 'False'})
self.assertFalse(
config_true_value(self.env.container.info()['versions_enabled']))
# enabled it back
self.env.container.update_metadata(
hdrs={self.env.versions_header_key: 'True'})
self.assertTrue(
config_true_value(self.env.container.info()['versions_enabled']))
def test_account_list_containers(self):
cont_listing = self.env.account.containers()
self.assertEqual(cont_listing, [self.env.container.name,
self.env.unversioned_container.name])
self.env.account.delete_containers()
prefix = Utils.create_name()
def get_name(i):
return prefix + '-%02d' % i
num_container = [15, 20]
for i in range(num_container[1]):
name = get_name(i)
container = self.env.account.container(name)
container.create()
limit = 5
cont_listing = self.env.account.containers(parms={'limit': limit})
self.assertEqual(cont_listing, [get_name(i) for i in range(limit)])
for i in range(num_container[0], num_container[1]):
name = get_name(i)
container = self.env.account.container(name)
container.update_metadata(
hdrs={self.env.versions_header_key: 'True'})
cont_listing = self.env.account.containers(parms={'limit': limit})
self.assertEqual(cont_listing, [get_name(i) for i in range(limit)])
# we're in charge of getting everything back to normal
self.env.account.delete_containers()
self.env.container.create()
self.env.unversioned_container.create()
def assert_previous_version(self, object_name, version_id, content,
content_type, expected_headers={},
not_expected_header_keys=[],
check_env_expected_headers=False):
'''
Find previous version of an object using the ?versions API
then, assert object data and metadata using ?version-id API
'''
prev_version = self.env.container.file(object_name)
prev_version.initialize(parms={'version-id': version_id})
self.assertEqual(content, prev_version.read(
parms={'version-id': version_id}))
self.assertEqual(content_type, prev_version.content_type)
# make sure the new obj metadata did not leak to the prev. version
resp_headers = {
h.lower(): v for h, v in prev_version.conn.response.getheaders()}
for k in not_expected_header_keys:
self.assertNotIn(k, resp_headers)
for k, v in expected_headers.items():
self.assertIn(k.lower(), resp_headers)
self.assertEqual(v, resp_headers[k.lower()])
# also check env expected_headers
if check_env_expected_headers:
for k, v in self.env.expected_headers.items():
self.assertIn(k.lower(), resp_headers)
self.assertEqual(v, resp_headers[k.lower()])
def test_expiry(self):
# sanity
container = self.env.container
self.assertTrue(
config_true_value(self.env.container.info()['versions_enabled']))
versioned_obj1 = container.file(Utils.create_name())
put_headers = {'Content-Type': 'text/blah-blah-blah',
'X-Delete-After': '1',
'X-Object-Meta-Color': 'blue'}
resp = versioned_obj1.write(b"aaaaa", hdrs=put_headers,
return_resp=True)
version_id1 = resp.getheader('x-object-version-id')
versioned_obj2 = container.file(Utils.create_name())
resp = versioned_obj2.write(b"aaaaa", hdrs={}, return_resp=True)
version_id2 = resp.getheader('x-object-version-id')
# swift_test_client's File API doesn't really allow for POSTing
# arbitrary headers, so...
def post(url, token, parsed, conn):
conn.request('POST', '%s/%s/%s' % (parsed.path, container,
versioned_obj2.name),
'', {'X-Auth-Token': token,
'Content-Length': '0',
'X-Object-Meta-Color': 'red',
'X-Delete-After': '1'})
return tf.check_response(conn)
resp = tf.retry(post)
resp.read()
self.assertEqual(resp.status, 202)
time.sleep(1)
# Links have expired
with self.assertRaises(ResponseError) as cm:
versioned_obj1.info()
self.assertEqual(404, cm.exception.status)
with self.assertRaises(ResponseError) as cm:
versioned_obj2.info()
self.assertEqual(404, cm.exception.status)
# But data are still there
versioned_obj1.initialize(parms={'version-id': version_id1})
self.assertEqual('text/blah-blah-blah', versioned_obj1.content_type)
self.assertEqual('blue', versioned_obj1.metadata['color'])
versioned_obj2.initialize(parms={'version-id': version_id2})
self.assertEqual('application/octet-stream',
versioned_obj2.content_type)
self.assertEqual('red', versioned_obj2.metadata['color'])
# Note that links may still show up in listings, depending on how
# aggressive the object-expirer is. When doing a version-aware
# listing, though, we'll only ever have the two entries.
self.assertTotalVersions(container, 2)
def test_get_if_match(self):
body = b'data'
oname = Utils.create_name()
obj = self.env.unversioned_container.file(oname)
resp = obj.write(body, return_resp=True)
etag = resp.getheader('etag')
self.assertEqual(
md5(body, usedforsecurity=False).hexdigest(),
normalize_etag(etag))
# un-versioned object is cool with with if-match
self.assertEqual(body, obj.read(hdrs={'if-match': etag}))
with self.assertRaises(ResponseError) as cm:
obj.read(hdrs={'if-match': 'not-the-etag'})
self.assertEqual(412, cm.exception.status)
v_obj = self.env.container.file(oname)
resp = v_obj.write(body, return_resp=True)
self.assertEqual(resp.getheader('etag'), etag)
# versioned object is too with with if-match
self.assertEqual(body, v_obj.read(hdrs={
'if-match': normalize_etag(etag)}))
# works quoted, too
self.assertEqual(body, v_obj.read(hdrs={
'if-match': '"%s"' % normalize_etag(etag)}))
with self.assertRaises(ResponseError) as cm:
v_obj.read(hdrs={'if-match': 'not-the-etag'})
self.assertEqual(412, cm.exception.status)
def test_container_acls(self):
if tf.skip3:
raise SkipTest('Username3 not set')
obj = self.env.container.file(Utils.create_name())
resp = obj.write(b"data", return_resp=True)
version_id = resp.getheader('x-object-version-id')
self.assertIsNotNone(version_id)
with self.assertRaises(ResponseError) as cm:
obj.read(hdrs={'X-Auth-Token': self.env.conn3.storage_token})
self.assertEqual(403, cm.exception.status)
# Container ACLs work more or less like they always have
self.env.container.update_metadata(
hdrs={'X-Container-Read': self.env.conn3.user_acl})
self.assertEqual(b"data", obj.read(hdrs={
'X-Auth-Token': self.env.conn3.storage_token}))
# But the version-specifc GET still requires a swift owner
with self.assertRaises(ResponseError) as cm:
obj.read(hdrs={'X-Auth-Token': self.env.conn3.storage_token},
parms={'version-id': version_id})
self.assertEqual(403, cm.exception.status)
# If it's pointing to a symlink that points elsewhere, that still needs
# to be authed
tgt_name = Utils.create_name()
self.env.unversioned_container.file(tgt_name).write(b'link')
sym_tgt_header = quote(unquote('%s/%s' % (
self.env.unversioned_container.name, tgt_name)))
obj.write(hdrs={'X-Symlink-Target': sym_tgt_header})
# So, user1's good...
self.assertEqual(b'link', obj.read())
# ...but user3 can't
with self.assertRaises(ResponseError) as cm:
obj.read(hdrs={'X-Auth-Token': self.env.conn3.storage_token})
self.assertEqual(403, cm.exception.status)
# unless we add the acl to the unversioned_container
self.env.unversioned_container.update_metadata(
hdrs={'X-Container-Read': self.env.conn3.user_acl})
self.assertEqual(b'link', obj.read(
hdrs={'X-Auth-Token': self.env.conn3.storage_token}))
def _test_overwriting_setup(self, obj_name=None):
# sanity
container = self.env.container
self.assertTrue(
config_true_value(self.env.container.info()['versions_enabled']))
expected_content_types = []
self.assertTotalVersions(container, 0)
obj_name = obj_name or Utils.create_name()
versioned_obj = container.file(obj_name)
put_headers = {'Content-Type': 'text/jibberish01',
'Content-Encoding': 'gzip',
'Content-Disposition': 'attachment; filename=myfile'}
resp = versioned_obj.write(b"aaaaa", hdrs=put_headers,
return_resp=True)
v1_version_id = resp.getheader('x-object-version-id')
expected_content_types.append('text/jibberish01')
self.assertContentTypes(container, expected_content_types)
obj_info = versioned_obj.info()
self.assertEqual('text/jibberish01', obj_info['content_type'])
self.assertTotalVersions(container, 1)
resp = versioned_obj.write(
b"bbbbb",
hdrs={'Content-Type': 'text/jibberish02',
'X-Object-Meta-Foo': 'Bar'},
return_resp=True)
v2_version_id = resp.getheader('x-object-version-id')
versioned_obj.initialize()
self.assertEqual(versioned_obj.content_type, 'text/jibberish02')
self.assertEqual(versioned_obj.metadata['foo'], 'Bar')
resp_headers = {
h.lower(): v for h, v in versioned_obj.conn.response.getheaders()}
content_location = quote('/v1/%s/%s/%s' % (
self.account_name, container.name, obj_name
)) + '?version-id=%s' % (v2_version_id,)
self.assertEqual(content_location, resp_headers['content-location'])
expected_content_types.append('text/jibberish02')
self.assertContentTypes(container, expected_content_types)
# the old version got saved off
self.assertTotalVersions(container, 2)
self.assert_previous_version(
obj_name, v1_version_id, b'aaaaa', 'text/jibberish01',
not_expected_header_keys=['X-Object-Meta-Foo'],
check_env_expected_headers=True)
# check that POST does not create a new version
versioned_obj.sync_metadata(metadata={'fu': 'baz'})
self.assertTotalVersions(container, 2)
self.assert_previous_version(
obj_name, v2_version_id, b'bbbbb', 'text/jibberish02',
expected_headers={'X-Object-Meta-Fu': 'baz'})
# if we overwrite it again, there are three versions
resp = versioned_obj.write(b"ccccc", return_resp=True)
v3_version_id = resp.getheader('x-object-version-id')
expected_content_types.append('text/jibberish02')
self.assertContentTypes(container, expected_content_types)
self.assertTotalVersions(self.env.container, 3)
# versioned_obj keeps the newest content
self.assertEqual(b"ccccc", versioned_obj.read())
# test copy from a different container
src_container = self.env.account.container(Utils.create_name())
self.assertTrue(src_container.create())
src_name = Utils.create_name()
src_obj = src_container.file(src_name)
src_obj.write(b"ddddd", hdrs={'Content-Type': 'text/jibberish04'})
src_obj.copy(container.name, obj_name)
expected_content_types.append('text/jibberish04')
self.assertContentTypes(container, expected_content_types)
self.assertEqual(b"ddddd", versioned_obj.read())
versioned_obj.initialize()
self.assertEqual(versioned_obj.content_type, 'text/jibberish04')
# make sure versions container has the previous version
self.assertTotalVersions(self.env.container, 4)
self.assert_previous_version(
obj_name, v3_version_id, b'ccccc', 'text/jibberish02')
# test delete
# at first, delete will succeed with 204
versioned_obj.delete()
expected_content_types.append(
'application/x-deleted;swift_versions_deleted=1')
# after that, any time the delete doesn't restore the old version
# and we will get 404 NotFound
for x in range(3):
with self.assertRaises(ResponseError) as cm:
versioned_obj.delete()
self.assertEqual(404, cm.exception.status)
expected_content_types.append(
'application/x-deleted;swift_versions_deleted=1')
# finally, we have 4 versioned items and 4 delete markers total in
# the versions container
self.assertTotalVersions(self.env.container, 8)
self.assertContentTypes(self.env.container, expected_content_types)
# update versioned_obj
versioned_obj.write(b"eeee", hdrs={'Content-Type': 'text/thanksgiving',
'X-Object-Meta-Bar': 'foo'})
# verify the PUT object is kept successfully
obj_info = versioned_obj.info()
self.assertEqual('text/thanksgiving', obj_info['content_type'])
# 8 plus one more write
self.assertTotalVersions(self.env.container, 9)
# update versioned_obj
versioned_obj.write(b"ffff", hdrs={'Content-Type': 'text/teriyaki',
'X-Object-Meta-Food': 'chickin'})
# verify the PUT object is kept successfully
obj_info = versioned_obj.info()
self.assertEqual('text/teriyaki', obj_info['content_type'])
# 9 plus one more write
self.assertTotalVersions(self.env.container, 10)
versioned_obj.delete()
with self.assertRaises(ResponseError) as cm:
versioned_obj.read()
self.assertEqual(404, cm.exception.status)
# 10 plus delete marker
self.assertTotalVersions(self.env.container, 11)
return (versioned_obj, expected_content_types)
def test_overwriting(self):
versioned_obj, expected_content_types = \
self._test_overwriting_setup()
def test_make_old_version_latest(self):
obj_name = Utils.create_name()
versioned_obj = self.env.container.file(obj_name)
versions = [{
'content_type': 'text/jibberish01',
'body': b'aaaaa',
}, {
'content_type': 'text/jibberish02',
'body': b'bbbbbb',
}, {
'content_type': 'text/jibberish03',
'body': b'ccccccc',
}]
for version in versions:
resp = versioned_obj.write(version['body'], hdrs={
'Content-Type': version['content_type']}, return_resp=True)
version['version_id'] = resp.getheader('x-object-version-id')
expected = [{
'name': obj_name,
'content_type': version['content_type'],
'version_id': version['version_id'],
'hash': md5(version['body'], usedforsecurity=False).hexdigest(),
'bytes': len(version['body'],)
} for version in reversed(versions)]
for item, is_latest in zip(expected, (True, False, False)):
item['is_latest'] = is_latest
versions_listing = self.env.container.files(parms={
'versions': 'true', 'format': 'json'})
for item in versions_listing:
item.pop('last_modified')
self.assertEqual(expected, versions_listing)
versioned_obj.write(b'', parms={
'version-id': versions[1]['version_id']})
self.assertEqual(b'bbbbbb', versioned_obj.read())
for item, is_latest in zip(expected, (False, True, False)):
item['is_latest'] = is_latest
versions_listing = self.env.container.files(parms={
'versions': 'true', 'format': 'json'})
for item in versions_listing:
item.pop('last_modified')
self.assertEqual(expected, versions_listing)
def test_overwriting_with_url_encoded_object_name(self):
obj_name = Utils.create_name() + '%25ff'
versioned_obj, expected_content_types = \
self._test_overwriting_setup(obj_name)
def _test_versioning_dlo_setup(self):
if tf.in_process:
tf.skip_if_no_xattrs()
container = self.env.container
obj_name = Utils.create_name()
for i in ('1', '2', '3'):
time.sleep(.01) # guarantee that the timestamp changes
obj_name_seg = 'segs_' + obj_name + '/' + i
versioned_obj = container.file(obj_name_seg)
versioned_obj.write(i.encode('ascii'))
# immediately overwrite
versioned_obj.write((i + i).encode('ascii'))
# three objects 2 versions each
self.assertTotalVersions(self.env.container, 6)
man_file = container.file(obj_name)
# write a normal file first
resp = man_file.write(
b'old content', hdrs={'Content-Type': 'text/jibberish01'},
return_resp=True)
v1_version_id = resp.getheader('x-object-version-id')
self.assertEqual(b'old content', man_file.read())
# guarantee that the timestamp changes
time.sleep(.01)
# overwrite with a dlo manifest
dlo_prefix = quote(unquote('%s/segs_%s/' % (
self.env.container.name, obj_name)))
resp = man_file.write(
b'', hdrs={'Content-Type': 'text/jibberish02',
'X-Object-Manifest': dlo_prefix},
return_resp=True)
v2_version_id = resp.getheader('x-object-version-id')
self.assertTotalVersions(self.env.container, 8)
self.assertEqual(b'112233', man_file.read())
self.assert_previous_version(
obj_name, v1_version_id, b'old content', 'text/jibberish01')
# overwrite the manifest with a normal file
man_file.write(b'new content')
self.assertTotalVersions(self.env.container, 9)
self.assertEqual(b'new content', man_file.read())
# new most-recent archive is the dlo
self.assert_previous_version(
obj_name, v2_version_id, b'112233', 'text/jibberish02',
expected_headers={'X-Object-Manifest': dlo_prefix})
return obj_name, man_file
def test_versioning_dlo(self):
obj_name, man_file = \
self._test_versioning_dlo_setup()
man_file.delete()
with self.assertRaises(ResponseError) as cm:
man_file.read()
self.assertEqual(404, cm.exception.status)
# 9 plus one more write
self.assertTotalVersions(self.env.container, 10)
expected = [b'old content', b'112233', b'new content']
bodies = []
listing_parms = {'versions': None, 'format': 'json',
'reverse': 'true', 'prefix': obj_name}
for obj_info in self.env.container.files(parms=listing_parms)[:3]:
bodies.append(man_file.read(
parms={'version-id': obj_info['version_id']}))
self.assertEqual(expected, bodies)
def _check_overwriting_symlink(self):
# sanity
container = self.env.container
self.assertTrue(
config_true_value(self.env.container.info()['versions_enabled']))
tgt_a_name = Utils.create_name()
tgt_b_name = Utils.create_name()
expected_count = 0
tgt_a = container.file(tgt_a_name)
tgt_a.write(b'aaaaa', hdrs={'Content-Type': 'text/jibberish01'})
expected_count += 1
tgt_b = container.file(tgt_b_name)
tgt_b.write(b"bbbbb")
expected_count += 1
symlink_name = Utils.create_name()
sym_tgt_header = quote(unquote('%s/%s' % (container.name, tgt_a_name)))
sym_headers_a = {'X-Symlink-Target': sym_tgt_header}
symlink = container.file(symlink_name)
resp = symlink.write(b'', hdrs=sym_headers_a, return_resp=True)
v1_version_id = resp.getheader('x-object-version-id')
expected_count += 1
self.assertEqual(b"aaaaa", symlink.read())
sym_headers_b = {'X-Symlink-Target': '%s/%s' % (container.name,
tgt_b_name)}
symlink.write(b"", hdrs=sym_headers_b)
expected_count += 1
self.assertEqual(b"bbbbb", symlink.read())
self.assertTotalVersions(container, expected_count)
self.assert_previous_version(
symlink_name, v1_version_id, b'aaaaa', 'text/jibberish01')
return symlink, tgt_a
def test_overwriting_symlink(self):
if 'symlink' not in cluster_info:
raise SkipTest("Symlinks not enabled")
symlink, target = self._check_overwriting_symlink()
# test delete
symlink.delete()
with self.assertRaises(ResponseError) as cm:
symlink.read()
self.assertEqual(404, cm.exception.status)
def _setup_symlink(self):
tgt_name = 'target-' + Utils.create_name()
target = self.env.container.file(tgt_name)
target.write(b'target object data',
hdrs={'Content-Type': 'text/jibberish01'})
symlink = self.env.container.file('symlink')
resp = symlink.write(b'', hdrs={
'Content-Type': 'application/symlink',
'X-Symlink-Target': '%s/%s' % (
self.env.container.name, target.name)},
return_resp=True)
symlink_version_id = resp.getheader('x-object-version-id')
return symlink, symlink_version_id, target
def _check_copy_destination_symlink(self):
symlink, sym_version_id, target = self._setup_symlink()
self.assertEqual(b'target object data', symlink.read())
symlink.write(b'this is not a symlink')
# target, symlink, and new 'not a symlink' overwritten by write
self.assertTotalVersions(self.env.container, 3)
self.assert_previous_version(
symlink.name, sym_version_id,
b'target object data', 'text/jibberish01')
# the symlink is still a symlink
prev_version = self.env.container.file(symlink.name)
prev_version.initialize(parms={'version-id': sym_version_id})
self.assertEqual('application/symlink',
prev_version.info(parms={
'version-id': sym_version_id,
'symlink': 'get'})['content_type'])
prev_version.copy(self.env.container.name, symlink.name,
parms={'version-id': sym_version_id,
'symlink': 'get'})
self.assertEqual(b'target object data', symlink.read())
self.assertTotalVersions(self.env.container, 4)
return symlink, target
def test_copy_destination_restore_symlink(self):
if 'symlink' not in cluster_info:
raise SkipTest("Symlinks not enabled")
symlink, target = self._check_copy_destination_symlink()
symlink.delete()
with self.assertRaises(ResponseError) as cm:
symlink.read()
self.assertEqual(404, cm.exception.status)
# symlink & target, plus overwrite and restore, then delete marker
self.assertTotalVersions(self.env.container, 5)
def test_versioned_staticlink(self):
tgt_name = 'target-' + Utils.create_name()
link_name = 'staticlink-' + Utils.create_name()
target = self.env.container.file(tgt_name)
staticlink = self.env.container.file(link_name)
target_resp = target.write(b'target object data', hdrs={
'Content-Type': 'text/jibberish01'}, return_resp=True)
staticlink.write(b'', hdrs={
'X-Symlink-Target': '%s/%s' % (
self.env.container.name, target.name),
'X-Symlink-Target-Etag': target_resp.getheader('etag'),
}, cfg={'no_content_type': True})
self.assertEqual(b'target object data', staticlink.read())
listing_parms = {'format': 'json', 'versions': 'true'}
prev_versions = self.env.container.files(parms=listing_parms)
expected = [{
'name': link_name,
'bytes': 0,
'content_type': 'text/jibberish01',
'is_latest': True,
}, {
'name': tgt_name,
'bytes': 18,
'content_type': 'text/jibberish01',
'is_latest': True,
}]
self.assertEqual(expected, [{
k: i[k] for k in (
'name', 'bytes', 'content_type', 'is_latest',
)} for i in prev_versions])
target_resp = target.write(b'updated target data', hdrs={
'Content-Type': 'text/jibberish02'}, return_resp=True)
with self.assertRaises(ResponseError) as caught:
staticlink.read()
self.assertEqual(409, caught.exception.status)
staticlink.write(b'', hdrs={
'X-Symlink-Target': '%s/%s' % (
self.env.container.name, target.name),
'X-Symlink-Target-Etag': target_resp.getheader('etag'),
}, cfg={'no_content_type': True})
self.assertEqual(b'updated target data', staticlink.read())
listing_parms = {'format': 'json', 'versions': 'true'}
prev_versions = self.env.container.files(parms=listing_parms)
expected = [{
'name': link_name,
'bytes': 0,
'content_type': 'text/jibberish02',
'is_latest': True,
}, {
'name': link_name,
'bytes': 0,
'content_type': 'text/jibberish01',
'is_latest': False,
}, {
'name': tgt_name,
'bytes': 19,
'content_type': 'text/jibberish02',
'is_latest': True,
}, {
'name': tgt_name,
'bytes': 18,
'content_type': 'text/jibberish01',
'is_latest': False,
}]
self.assertEqual(expected, [{
k: i[k] for k in (
'name', 'bytes', 'content_type', 'is_latest',
)} for i in prev_versions])
def test_link_to_versioned_object(self):
# setup target object
tgt_name = 'target-' + Utils.create_name()
target = self.env.container.file(tgt_name)
target_resp = target.write(b'target object data', hdrs={
'Content-Type': 'text/jibberish01'}, return_resp=True)
# setup dynamic link object from a non-versioned container
link_container_name = 'link-container-' + Utils.create_name()
link_name = 'link-' + Utils.create_name()
link_cont = self.env.account.container(link_container_name)
self.assertTrue(link_cont.create())
link = link_cont.file(link_name)
self.assertTrue(link.write(b'', hdrs={
'X-Symlink-Target': '%s/%s' % (
self.env.container.name, tgt_name),
}, cfg={'no_content_type': True}))
self.assertEqual(b'target object data', link.read())
# setup static link object from a non-versioned container
staticlink_name = 'staticlink-' + Utils.create_name()
staticlink = link_cont.file(staticlink_name)
self.assertTrue(staticlink.write(b'', hdrs={
'X-Symlink-Target': '%s/%s' % (
self.env.container.name, tgt_name),
'X-Symlink-Target-Etag': target_resp.getheader('etag'),
}, cfg={'no_content_type': True}))
self.assertEqual(b'target object data', link.read())
def test_versioned_post(self):
# first we'll create a versioned object
obj_name = Utils.create_name()
obj = self.env.container.file(obj_name)
resp = obj.write(b'version1', hdrs={
'Content-Type': 'text/jibberish10'
}, return_resp=True)
v1_version_id = resp.getheader('x-object-version-id')
# send post request
obj.post(hdrs={'Content-Type': 'text/updated20'})
# head request should show updated content-type
obj_info = obj.info()
self.assertEqual(obj_info['content_type'], 'text/updated20')
listing_parms = {'format': 'json', 'versions': None}
prev_versions = self.env.container.files(parms=listing_parms)
self.assertEqual(1, len(prev_versions))
for pv in prev_versions:
pv.pop('last_modified')
self.assertEqual(prev_versions, [{
'name': obj_name,
'bytes': 8,
'content_type': 'text/updated20',
'hash': '966634ebf2fc135707d6753692bf4b1e',
'version_id': v1_version_id,
'is_latest': True,
}])
def test_unversioned_post(self):
# first we'll create a versioned object
obj_name = Utils.create_name()
obj = self.env.container.file(obj_name)
resp = obj.write(b'version1', hdrs={
'Content-Type': 'text/jibberish10'
}, return_resp=True)
v1_version_id = resp.getheader('x-object-version-id')
# now, turn off versioning
self.env.container.update_metadata(
hdrs={self.env.versions_header_key: 'False'})
obj.post(hdrs={'Content-Type': 'text/updated20'})
# head request should show updated content-type
obj_info = obj.info()
self.assertEqual(obj_info['content_type'], 'text/updated20')
listing_parms = {'format': 'json', 'versions': None}
prev_versions = self.env.container.files(parms=listing_parms)
self.assertEqual(1, len(prev_versions))
for pv in prev_versions:
pv.pop('last_modified')
self.assertEqual(prev_versions, [{
'name': obj_name,
'bytes': 8,
'content_type': 'text/updated20',
'hash': '966634ebf2fc135707d6753692bf4b1e',
'is_latest': True,
'version_id': v1_version_id,
'is_latest': True,
}])
def test_unversioned_overwrite_and_delete(self):
# first we'll create a versioned object
obj_name = Utils.create_name()
obj = self.env.container.file(obj_name)
resp = obj.write(b'version1', hdrs={
'Content-Type': 'text/jibberish18'
}, return_resp=True)
v1_version_id = resp.getheader('x-object-version-id')
self.assertTotalVersions(self.env.container, 1)
# now, turn off versioning, and delete source obj
self.env.container.update_metadata(
hdrs={self.env.versions_header_key: 'False'})
obj.delete()
# no delete markers, archive listing is unchanged
self.assertTotalVersions(self.env.container, 1)
# sanity, object is gone
self.assertRaises(ResponseError, obj.read)
self.assertEqual(404, obj.conn.response.status)
# but, archive version is unmodified
self.assert_previous_version(obj_name, v1_version_id, b'version1',
'text/jibberish18')
# a new overwrites will not have a version-id
resp = obj.write(b'version2', hdrs={
'Content-Type': 'text/jibberish19'
}, return_resp=True)
self.assertIsNone(resp.getheader('x-object-version-id'))
self.assertTotalVersions(self.env.container, 2)
resp = obj.write(b'version3', hdrs={
'Content-Type': 'text/jibberish20'
}, return_resp=True)
self.assertIsNone(resp.getheader('x-object-version-id'))
self.assertTotalVersions(self.env.container, 2)
obj.delete()
self.assertTotalVersions(self.env.container, 1)
obj.delete(tolerate_missing=True)
self.assertTotalVersions(self.env.container, 1)
def test_versioned_overwrite_from_old_version(self):
versioned_obj_name = Utils.create_name()
obj = self.env.container.file(versioned_obj_name)
resp = obj.write(b'version1', hdrs={
'Content-Type': 'text/jibberish32'
}, return_resp=True)
v1_version_id = resp.getheader('x-object-version-id')
v1_etag = normalize_etag(resp.getheader('etag'))
resp = obj.write(b'version2', hdrs={
'Content-Type': 'text/jibberish33'
}, return_resp=True)
v2_version_id = resp.getheader('x-object-version-id')
v2_etag = normalize_etag(resp.getheader('etag'))
# sanity
self.assertEqual(b'version2', obj.read())
self.assertTotalVersions(self.env.container, 2)
listing_parms = {'format': 'json', 'reverse': 'true', 'versions': None}
prev_versions = self.env.container.files(parms=listing_parms)
self.assertEqual(2, len(prev_versions))
for pv in prev_versions:
pv.pop('last_modified')
self.assertEqual(prev_versions, [{
'name': versioned_obj_name,
'bytes': 8,
'content_type': 'text/jibberish32',
'hash': v1_etag,
'version_id': v1_version_id,
'is_latest': False,
}, {
'name': versioned_obj_name,
'bytes': 8,
'content_type': 'text/jibberish33',
'hash': v2_etag,
'version_id': v2_version_id,
'is_latest': True,
}])
# restore old version1 back in place with a copy request
# should get a new version-id
old_version_obj = self.env.container.file(versioned_obj_name)
resp = old_version_obj.copy(self.env.container.name,
versioned_obj_name,
parms={'version-id': v1_version_id},
return_resp=True)
v3_version_id = resp.getheader('x-object-version-id')
listing_parms = {'format': 'json', 'reverse': 'true', 'versions': None}
prev_versions = self.env.container.files(parms=listing_parms)
self.assertEqual(3, len(prev_versions))
for pv in prev_versions:
pv.pop('last_modified')
self.assertEqual(prev_versions, [{
'name': versioned_obj_name,
'bytes': 8,
'content_type': 'text/jibberish32',
'hash': v1_etag,
'version_id': v1_version_id,
'is_latest': False,
}, {
'name': versioned_obj_name,
'bytes': 8,
'content_type': 'text/jibberish33',
'hash': v2_etag,
'version_id': v2_version_id,
'is_latest': False,
}, {
'name': versioned_obj_name,
'bytes': 8,
'content_type': 'text/jibberish32',
'hash': v1_etag,
'version_id': v3_version_id,
'is_latest': True,
}])
self.assertEqual(b'version1', obj.read())
obj_info = obj.info()
self.assertEqual('text/jibberish32', obj_info['content_type'])
self.assertEqual(v1_etag, normalize_etag(obj_info['etag']))
def test_delete_with_version_api_old_object(self):
versioned_obj_name = Utils.create_name()
obj = self.env.container.file(versioned_obj_name)
resp = obj.write(b'version1', hdrs={
'Content-Type': 'text/jibberish32'
}, return_resp=True)
v1_version_id = resp.getheader('x-object-version-id')
obj.write(b'version2', hdrs={'Content-Type': 'text/jibberish33'})
# sanity
self.assertEqual(b'version2', obj.read())
self.assertTotalVersions(self.env.container, 2)
obj.delete(parms={'version-id': v1_version_id})
self.assertEqual(b'version2', obj.read())
self.assertTotalVersions(self.env.container, 1)
def test_delete_with_version_api_current_object(self):
versioned_obj_name = Utils.create_name()
obj = self.env.container.file(versioned_obj_name)
obj.write(b'version1', hdrs={'Content-Type': 'text/jibberish32'})
resp = obj.write(b'version2', hdrs={
'Content-Type': 'text/jibberish33'
}, return_resp=True)
v2_version_id = resp.getheader('x-object-version-id')
# sanity
self.assertEqual(b'version2', obj.read())
self.assertTotalVersions(self.env.container, 2)
obj.delete(parms={'version-id': v2_version_id})
with self.assertRaises(ResponseError) as cm:
obj.read()
self.assertEqual(404, cm.exception.status)
self.assertTotalVersions(self.env.container, 1)
def test_delete_delete_marker_with_version_api(self):
versioned_obj_name = Utils.create_name()
obj = self.env.container.file(versioned_obj_name)
obj.write(b'version1', hdrs={'Content-Type': 'text/jibberish32'})
obj.delete()
resp_headers = {
h.lower(): v for h, v in obj.conn.response.getheaders()}
self.assertIn('x-object-version-id', resp_headers)
dm_version_id = resp_headers['x-object-version-id']
# sanity
with self.assertRaises(ResponseError) as cm:
obj.info(parms={'version-id': dm_version_id})
resp_headers = {
h.lower(): v for h, v in cm.exception.headers}
self.assertEqual(dm_version_id,
resp_headers['x-object-version-id'])
self.assertEqual(DELETE_MARKER_CONTENT_TYPE,
resp_headers['content-type'])
obj.delete(parms={'version-id': dm_version_id})
resp_headers = {
h.lower(): v for h, v in obj.conn.response.getheaders()}
self.assertEqual(dm_version_id,
resp_headers['x-object-version-id'])
def test_delete_with_version_api_last_object(self):
versioned_obj_name = Utils.create_name()
obj = self.env.container.file(versioned_obj_name)
resp = obj.write(b'version1', hdrs={
'Content-Type': 'text/jibberish1'
}, return_resp=True)
v1_version_id = resp.getheader('x-object-version-id')
# sanity
self.assertEqual(b'version1', obj.read())
self.assertTotalVersions(self.env.container, 1)
# delete
obj.delete(parms={'version-id': v1_version_id})
with self.assertRaises(ResponseError) as cm:
obj.read()
self.assertEqual(404, cm.exception.status)
self.assertTotalVersions(self.env.container, 0)
def test_delete_with_version_api_null_version(self):
versioned_obj_name = Utils.create_name()
obj = self.env.container.file(versioned_obj_name)
obj.write(b'version1', hdrs={'Content-Type': 'text/jibberish32'})
obj.write(b'version2', hdrs={'Content-Type': 'text/jibberish33'})
# sanity
self.assertEqual(b'version2', obj.read())
self.assertTotalVersions(self.env.container, 2)
obj.delete(parms={'version-id': 'null'})
with self.assertRaises(ResponseError) as caught:
obj.read()
self.assertEqual(404, caught.exception.status)
# no versions removed
self.assertTotalVersions(self.env.container, 2)
def test_delete_with_version_api_old_object_disabled(self):
versioned_obj_name = Utils.create_name()
obj = self.env.container.file(versioned_obj_name)
resp = obj.write(b'version1', hdrs={
'Content-Type': 'text/jibberish32'
}, return_resp=True)
v1_version_id = resp.getheader('x-object-version-id')
obj.write(b'version2', hdrs={'Content-Type': 'text/jibberish33'})
# disabled versioning
self.env.container.update_metadata(
hdrs={self.env.versions_header_key: 'False'})
# sanity
self.assertEqual(b'version2', obj.read())
self.assertTotalVersions(self.env.container, 2)
obj.delete(parms={'version-id': v1_version_id})
self.assertEqual(b'version2', obj.read())
self.assertTotalVersions(self.env.container, 1)
def test_delete_with_version_api_current_object_disabled(self):
versioned_obj_name = Utils.create_name()
obj = self.env.container.file(versioned_obj_name)
obj.write(b'version1', hdrs={'Content-Type': 'text/jibberish32'})
resp = obj.write(b'version2', hdrs={
'Content-Type': 'text/jibberish33'
}, return_resp=True)
v2_version_id = resp.getheader('x-object-version-id')
# disabled versioning
self.env.container.update_metadata(
hdrs={self.env.versions_header_key: 'False'})
# sanity
self.assertEqual(b'version2', obj.read())
self.assertTotalVersions(self.env.container, 2)
obj.delete(parms={'version-id': v2_version_id})
with self.assertRaises(ResponseError) as cm:
obj.read()
self.assertEqual(404, cm.exception.status)
self.assertTotalVersions(self.env.container, 1)
def test_delete_with_version_api_old_object_current_unversioned(self):
versioned_obj_name = Utils.create_name()
obj = self.env.container.file(versioned_obj_name)
resp = obj.write(b'version1', hdrs={
'Content-Type': 'text/jibberish32'
}, return_resp=True)
v1_version_id = resp.getheader('x-object-version-id')
# disabled versioning
self.env.container.update_metadata(
hdrs={self.env.versions_header_key: 'False'})
# write unversioned object (i.e., version-id='null')
obj.write(b'version2', hdrs={'Content-Type': 'text/jibberish33'})
# sanity
self.assertEqual(b'version2', obj.read())
self.assertTotalVersions(self.env.container, 2)
obj.delete(parms={'version-id': v1_version_id})
self.assertEqual(b'version2', obj.read())
self.assertTotalVersions(self.env.container, 1)
class TestObjectVersioningUTF8(Base2, TestObjectVersioning):
pass
class TestContainerOperations(TestObjectVersioningBase):
def _prep_object_versions(self):
# object with multiple versions and currently deleted
obj1_v1 = {}
obj1_v1['name'] = 'c' + Utils.create_name()
obj = self.env.container.file(obj1_v1['name'])
# v1
resp = obj.write(b'version1', hdrs={
'Content-Type': 'text/jibberish11',
'ETag': md5(b'version1', usedforsecurity=False).hexdigest(),
}, return_resp=True)
obj1_v1['id'] = resp.getheader('x-object-version-id')
# v2
resp = obj.write(b'version2', hdrs={
'Content-Type': 'text/jibberish12',
'ETag': md5(b'version2', usedforsecurity=False).hexdigest(),
}, return_resp=True)
obj1_v2 = {}
obj1_v2['name'] = obj1_v1['name']
obj1_v2['id'] = resp.getheader('x-object-version-id')
# v3
resp = obj.write(b'version3', hdrs={
'Content-Type': 'text/jibberish13',
'ETag': md5(b'version3', usedforsecurity=False).hexdigest(),
}, return_resp=True)
obj1_v3 = {}
obj1_v3['name'] = obj1_v1['name']
obj1_v3['id'] = resp.getheader('x-object-version-id')
with self.assertRaises(ResponseError) as cm:
obj.write(b'version4', hdrs={
'Content-Type': 'text/jibberish11',
'ETag': 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
})
self.assertEqual(422, cm.exception.status)
# v4
obj.delete()
resp_headers = {
h.lower(): v for h, v in obj.conn.response.getheaders()}
obj1_v4 = {}
obj1_v4['name'] = obj1_v1['name']
obj1_v4['id'] = resp_headers.get('x-object-version-id')
# object with just a single version
obj2_v1 = {}
obj2_v1['name'] = 'b' + Utils.create_name()
obj = self.env.container.file(obj2_v1['name'])
resp = obj.write(b'version1', hdrs={
'Content-Type': 'text/jibberish20',
'ETag': '966634ebf2fc135707d6753692bf4b1e',
}, return_resp=True)
obj2_v1['id'] = resp.getheader('x-object-version-id')
# object never existed, just a delete marker
obj3_v1 = {}
obj3_v1['name'] = 'a' + Utils.create_name()
obj = self.env.container.file(obj3_v1['name'])
obj.delete(tolerate_missing=True)
self.assertEqual(obj.conn.response.status, 404)
resp_headers = {
h.lower(): v for h, v in obj.conn.response.getheaders()}
obj3_v1['id'] = resp_headers.get('x-object-version-id')
return (obj1_v1, obj1_v2, obj1_v3, obj1_v4, obj2_v1, obj3_v1)
def _prep_unversioned_objects(self):
objs = (
'deleted' + Utils.create_name(),
'in' + Utils.create_name(),
'order' + Utils.create_name(),
)
# object with multiple writes and currently deleted
obj = self.env.unversioned_container.file(objs[0])
obj.write(b'data', hdrs={
'Content-Type': 'text/jibberish11',
'ETag': md5(b'data', usedforsecurity=False).hexdigest(),
})
obj.delete()
obj = self.env.unversioned_container.file(objs[1])
obj.write(b'first', hdrs={
'Content-Type': 'text/blah-blah-blah',
'ETag': md5(b'first', usedforsecurity=False).hexdigest(),
})
obj = self.env.unversioned_container.file(objs[2])
obj.write(b'second', hdrs={
'Content-Type': 'text/plain',
'ETag': md5(b'second', usedforsecurity=False).hexdigest(),
})
return objs
def test_list_all_versions(self):
obj1_v1, obj1_v2, obj1_v3, obj1_v4, obj2_v1, obj3_v1 = \
self._prep_object_versions()
# list all versions in container
listing_parms = {'format': 'json', 'versions': None}
prev_versions = self.env.container.files(parms=listing_parms)
self.assertEqual(6, len(prev_versions))
for pv in prev_versions:
pv.pop('last_modified')
self.assertEqual(prev_versions, [{
'name': obj3_v1['name'],
'bytes': 0,
'content_type': 'application/x-deleted;swift_versions_deleted=1',
'hash': MD5_OF_EMPTY_STRING,
'is_latest': True,
'version_id': obj3_v1['id'],
}, {
'name': obj2_v1['name'],
'bytes': 8,
'content_type': 'text/jibberish20',
'hash': '966634ebf2fc135707d6753692bf4b1e',
'is_latest': True,
'version_id': obj2_v1['id'],
}, {
'name': obj1_v4['name'],
'bytes': 0,
'content_type': 'application/x-deleted;swift_versions_deleted=1',
'hash': MD5_OF_EMPTY_STRING,
'is_latest': True,
'version_id': obj1_v4['id'],
}, {
'name': obj1_v3['name'],
'bytes': 8,
'content_type': 'text/jibberish13',
'hash': md5(b'version3', usedforsecurity=False).hexdigest(),
'is_latest': False,
'version_id': obj1_v3['id'],
}, {
'name': obj1_v2['name'],
'bytes': 8,
'content_type': 'text/jibberish12',
'hash': md5(b'version2', usedforsecurity=False).hexdigest(),
'is_latest': False,
'version_id': obj1_v2['id'],
}, {
'name': obj1_v1['name'],
'bytes': 8,
'content_type': 'text/jibberish11',
'hash': md5(b'version1', usedforsecurity=False).hexdigest(),
'is_latest': False,
'version_id': obj1_v1['id'],
}])
def test_list_all_versions_reverse(self):
obj1_v1, obj1_v2, obj1_v3, obj1_v4, obj2_v1, obj3_v1 = \
self._prep_object_versions()
# list all versions in container in reverse order
listing_parms = {'format': 'json', 'reverse': 'true', 'versions': None}
prev_versions = self.env.container.files(parms=listing_parms)
self.assertEqual(6, len(prev_versions))
for pv in prev_versions:
pv.pop('last_modified')
self.assertEqual(prev_versions, [{
'name': obj1_v1['name'],
'bytes': 8,
'content_type': 'text/jibberish11',
'hash': md5(b'version1', usedforsecurity=False).hexdigest(),
'is_latest': False,
'version_id': obj1_v1['id'],
}, {
'name': obj1_v2['name'],
'bytes': 8,
'content_type': 'text/jibberish12',
'hash': md5(b'version2', usedforsecurity=False).hexdigest(),
'is_latest': False,
'version_id': obj1_v2['id'],
}, {
'name': obj1_v3['name'],
'bytes': 8,
'content_type': 'text/jibberish13',
'hash': md5(b'version3', usedforsecurity=False).hexdigest(),
'is_latest': False,
'version_id': obj1_v3['id'],
}, {
'name': obj1_v4['name'],
'bytes': 0,
'content_type': 'application/x-deleted;swift_versions_deleted=1',
'hash': MD5_OF_EMPTY_STRING,
'is_latest': True,
'version_id': obj1_v4['id'],
}, {
'name': obj2_v1['name'],
'bytes': 8,
'content_type': 'text/jibberish20',
'hash': '966634ebf2fc135707d6753692bf4b1e',
'is_latest': True,
'version_id': obj2_v1['id'],
}, {
'name': obj3_v1['name'],
'bytes': 0,
'content_type': 'application/x-deleted;swift_versions_deleted=1',
'hash': MD5_OF_EMPTY_STRING,
'is_latest': True,
'version_id': obj3_v1['id'],
}])
def test_list_versions_prefix(self):
obj1_v1, obj1_v2, obj1_v3, obj1_v4, obj2_v1, obj3_v1 = \
self._prep_object_versions()
# list all versions for a given object
listing_parms = {'format': 'json',
'versions': None, 'prefix': obj1_v1['name']}
prev_versions = self.env.container.files(parms=listing_parms)
self.assertEqual(4, len(prev_versions))
for pv in prev_versions:
pv.pop('last_modified')
self.assertEqual(prev_versions, [{
'name': obj1_v4['name'],
'bytes': 0,
'content_type': 'application/x-deleted;swift_versions_deleted=1',
'hash': MD5_OF_EMPTY_STRING,
'is_latest': True,
'version_id': obj1_v4['id'],
}, {
'name': obj1_v3['name'],
'bytes': 8,
'content_type': 'text/jibberish13',
'hash': md5(b'version3', usedforsecurity=False).hexdigest(),
'is_latest': False,
'version_id': obj1_v3['id'],
}, {
'name': obj1_v2['name'],
'bytes': 8,
'content_type': 'text/jibberish12',
'hash': md5(b'version2', usedforsecurity=False).hexdigest(),
'is_latest': False,
'version_id': obj1_v2['id'],
}, {
'name': obj1_v1['name'],
'bytes': 8,
'content_type': 'text/jibberish11',
'hash': md5(b'version1', usedforsecurity=False).hexdigest(),
'is_latest': False,
'version_id': obj1_v1['id'],
}])
def test_list_versions_prefix_reverse(self):
obj1_v1, obj1_v2, obj1_v3, obj1_v4, obj2_v1, obj3_v1 = \
self._prep_object_versions()
# list all versions for a given object in reverse order
listing_parms = {'format': 'json', 'reverse': 'true',
'versions': None, 'prefix': obj1_v1['name']}
prev_versions = self.env.container.files(parms=listing_parms)
self.assertEqual(4, len(prev_versions))
for pv in prev_versions:
pv.pop('last_modified')
self.assertEqual(prev_versions, [{
'name': obj1_v1['name'],
'bytes': 8,
'content_type': 'text/jibberish11',
'hash': md5(b'version1', usedforsecurity=False).hexdigest(),
'is_latest': False,
'version_id': obj1_v1['id'],
}, {
'name': obj1_v1['name'],
'bytes': 8,
'content_type': 'text/jibberish12',
'hash': md5(b'version2', usedforsecurity=False).hexdigest(),
'is_latest': False,
'version_id': obj1_v2['id'],
}, {
'name': obj1_v1['name'],
'bytes': 8,
'content_type': 'text/jibberish13',
'hash': md5(b'version3', usedforsecurity=False).hexdigest(),
'is_latest': False,
'version_id': obj1_v3['id'],
}, {
'name': obj1_v1['name'],
'bytes': 0,
'content_type': 'application/x-deleted;swift_versions_deleted=1',
'hash': MD5_OF_EMPTY_STRING,
'is_latest': True,
'version_id': obj1_v4['id'],
}])
def test_list_limit(self):
obj1_v1, obj1_v2, obj1_v3, obj1_v4, obj2_v1, obj3_v1 = \
self._prep_object_versions()
# list all versions in container
listing_parms = {'format': 'json',
'limit': 3,
'versions': None}
prev_versions = self.env.container.files(parms=listing_parms)
self.assertEqual(3, len(prev_versions))
for pv in prev_versions:
pv.pop('last_modified')
self.assertEqual(prev_versions, [{
'name': obj3_v1['name'],
'bytes': 0,
'content_type': 'application/x-deleted;swift_versions_deleted=1',
'hash': MD5_OF_EMPTY_STRING,
'is_latest': True,
'version_id': obj3_v1['id'],
}, {
'name': obj2_v1['name'],
'bytes': 8,
'content_type': 'text/jibberish20',
'hash': '966634ebf2fc135707d6753692bf4b1e',
'is_latest': True,
'version_id': obj2_v1['id'],
}, {
'name': obj1_v4['name'],
'bytes': 0,
'content_type': 'application/x-deleted;swift_versions_deleted=1',
'hash': MD5_OF_EMPTY_STRING,
'is_latest': True,
'version_id': obj1_v4['id'],
}])
def test_list_limit_marker(self):
obj1_v1, obj1_v2, obj1_v3, obj1_v4, obj2_v1, obj3_v1 = \
self._prep_object_versions()
# list all versions in container
listing_parms = {'format': 'json',
'limit': 2,
'marker': obj2_v1['name'],
'versions': None}
prev_versions = self.env.container.files(parms=listing_parms)
self.assertEqual(2, len(prev_versions))
for pv in prev_versions:
pv.pop('last_modified')
self.assertEqual(prev_versions, [{
'name': obj1_v4['name'],
'bytes': 0,
'content_type': 'application/x-deleted;swift_versions_deleted=1',
'hash': MD5_OF_EMPTY_STRING,
'is_latest': True,
'version_id': obj1_v4['id'],
}, {
'name': obj1_v3['name'],
'bytes': 8,
'content_type': 'text/jibberish13',
'hash': md5(b'version3', usedforsecurity=False).hexdigest(),
'is_latest': False,
'version_id': obj1_v3['id'],
}])
def test_list_version_marker(self):
obj1_v1, obj1_v2, obj1_v3, obj1_v4, obj2_v1, obj3_v1 = \
self._prep_object_versions()
# list all versions starting with version_marker
listing_parms = {'format': 'json',
'marker': obj1_v3['name'],
'version_marker': obj1_v3['id'],
'versions': None}
prev_versions = self.env.container.files(parms=listing_parms)
self.assertEqual(2, len(prev_versions))
for pv in prev_versions:
pv.pop('last_modified')
self.assertEqual(prev_versions, [{
'name': obj1_v2['name'],
'bytes': 8,
'content_type': 'text/jibberish12',
'hash': md5(b'version2', usedforsecurity=False).hexdigest(),
'is_latest': False,
'version_id': obj1_v2['id'],
}, {
'name': obj1_v1['name'],
'bytes': 8,
'content_type': 'text/jibberish11',
'hash': md5(b'version1', usedforsecurity=False).hexdigest(),
'is_latest': False,
'version_id': obj1_v1['id'],
}])
def test_list_version_marker_reverse(self):
obj1_v1, obj1_v2, obj1_v3, obj1_v4, obj2_v1, obj3_v1 = \
self._prep_object_versions()
# list all versions starting with version_marker in reverse order
listing_parms = {'format': 'json',
'marker': obj1_v3['name'],
'version_marker': obj1_v3['id'],
'reverse': 'true',
'versions': None}
prev_versions = self.env.container.files(parms=listing_parms)
self.assertEqual(3, len(prev_versions))
for pv in prev_versions:
pv.pop('last_modified')
self.assertEqual(prev_versions, [{
'name': obj1_v4['name'],
'bytes': 0,
'content_type': 'application/x-deleted;swift_versions_deleted=1',
'hash': MD5_OF_EMPTY_STRING,
'is_latest': True,
'version_id': obj1_v4['id'],
}, {
'name': obj2_v1['name'],
'bytes': 8,
'content_type': 'text/jibberish20',
'hash': '966634ebf2fc135707d6753692bf4b1e',
'is_latest': True,
'version_id': obj2_v1['id'],
}, {
'name': obj3_v1['name'],
'bytes': 0,
'content_type': 'application/x-deleted;swift_versions_deleted=1',
'hash': MD5_OF_EMPTY_STRING,
'is_latest': True,
'version_id': obj3_v1['id'],
}])
def test_list_prefix_version_marker(self):
obj1_v1, obj1_v2, obj1_v3, obj1_v4, obj2_v1, obj3_v1 = \
self._prep_object_versions()
# list versions with prefix starting with version_marker
listing_parms = {'format': 'json',
'prefix': obj1_v3['name'],
'marker': obj1_v3['name'],
'version_marker': obj1_v3['id'],
'versions': None}
prev_versions = self.env.container.files(parms=listing_parms)
self.assertEqual(2, len(prev_versions))
for pv in prev_versions:
pv.pop('last_modified')
self.assertEqual(prev_versions, [{
'name': obj1_v2['name'],
'bytes': 8,
'content_type': 'text/jibberish12',
'hash': md5(b'version2', usedforsecurity=False).hexdigest(),
'is_latest': False,
'version_id': obj1_v2['id'],
}, {
'name': obj1_v1['name'],
'bytes': 8,
'content_type': 'text/jibberish11',
'hash': md5(b'version1', usedforsecurity=False).hexdigest(),
'is_latest': False,
'version_id': obj1_v1['id'],
}])
def test_list_prefix_version_marker_reverse(self):
obj1_v1, obj1_v2, obj1_v3, obj1_v4, obj2_v1, obj3_v1 = \
self._prep_object_versions()
# list versions with prefix starting with version_marker
# in reverse order
listing_parms = {'format': 'json',
'prefix': obj1_v3['name'],
'marker': obj1_v3['name'],
'version_marker': obj1_v3['id'],
'reverse': 'true',
'versions': None}
prev_versions = self.env.container.files(parms=listing_parms)
self.assertEqual(1, len(prev_versions))
for pv in prev_versions:
pv.pop('last_modified')
self.assertEqual(prev_versions, [{
'name': obj1_v4['name'],
'bytes': 0,
'content_type': 'application/x-deleted;swift_versions_deleted=1',
'hash': MD5_OF_EMPTY_STRING,
'is_latest': True,
'version_id': obj1_v4['id'],
}])
def test_unacceptable(self):
def do_test(format):
with self.assertRaises(ResponseError) as caught:
self.env.container.files(parms={
'format': format, 'versions': None})
self.assertEqual(caught.exception.status, 406)
do_test('plain')
do_test('xml')
def do_test(accept):
with self.assertRaises(ResponseError) as caught:
self.env.container.files(hdrs={'Accept': accept},
parms={'versions': None})
self.assertEqual(caught.exception.status, 406)
do_test('text/plain')
do_test('text/xml')
do_test('application/xml')
do_test('foo/bar')
def testFileListingLimitMarkerPrefix(self):
cont = self.env.container
files = ['apple', 'banana', 'cacao', 'date', 'elderberry']
for f in files:
file_item = cont.file(f)
self.assertTrue(file_item.write_random())
# immediately ovewrite
self.assertTrue(file_item.write_random())
time.sleep(.01) # guarantee that the timestamp changes
# sanity
for i in range(len(files)):
f = files[i]
for j in range(1, len(files) - i):
self.assertEqual(cont.files(parms={'limit': j, 'marker': f}),
files[i + 1: i + j + 1])
self.assertEqual(cont.files(parms={'marker': f}), files[i + 1:])
self.assertEqual(cont.files(parms={'marker': f, 'prefix': f}), [])
self.assertEqual(cont.files(parms={'prefix': f}), [f])
# repeat items in files list
versions = [f2 for f1 in files for f2 in (f1,) * 2]
# now list versions too
v = 0
for i in range(len(files)):
f = files[i]
for j in range(1, len(files) - i):
self.assertEqual(versions[i + v + 2: i + j + v + 2], [
item['name'] for item in cont.files(parms={
'limit': j, 'marker': f, 'versions': None})])
self.assertEqual(versions[v + i + 2:], [
item['name'] for item in cont.files(parms={
'marker': f, 'versions': None})])
self.assertEqual(cont.files(parms={'marker': f, 'prefix': f,
'versions': None}), [])
self.assertEqual([f, f], [
item['name'] for item in cont.files(parms={
'prefix': f, 'versions': None})])
v = v + 1
def testPrefixAndLimit(self):
cont = self.env.container
prefix_file_count = 10
limit_count = 2
prefixs = ['apple/', 'banana/', 'cacao/']
prefix_files = {}
for prefix in prefixs:
prefix_files[prefix] = []
for i in range(prefix_file_count):
file_item = cont.file(prefix + Utils.create_name())
self.assertTrue(file_item.write_random())
self.assertTrue(file_item.write_random())
prefix_files[prefix].append(file_item.name)
time.sleep(.01) # guarantee that the timestamp changes
versions_prefix_files = {}
for prefix in prefixs:
versions_prefix_files[prefix] = [f2 for f1 in prefix_files[prefix]
for f2 in (f1,) * 2]
# sanity
for format_type in [None, 'json', 'xml']:
for prefix in prefixs:
files = cont.files(parms={'prefix': prefix,
'format': format_type})
if isinstance(files[0], dict):
files = [x.get('name', x.get('subdir')) for x in files]
self.assertEqual(files, sorted(prefix_files[prefix]))
# list versions
for format_type in [None, 'json']:
for prefix in prefixs:
files = cont.files(parms={'prefix': prefix,
'versions': None,
'format': format_type})
if isinstance(files[0], dict):
files = [x.get('name', x.get('subdir')) for x in files]
self.assertEqual(files, sorted(versions_prefix_files[prefix]))
# list versions
for format_type in [None, 'json']:
for prefix in prefixs:
files = cont.files(parms={'limit': limit_count,
'versions': None,
'prefix': prefix,
'format': format_type})
if isinstance(files[0], dict):
files = [x.get('name', x.get('subdir')) for x in files]
self.assertEqual(len(files), limit_count)
for file_item in files:
self.assertTrue(file_item.startswith(prefix))
def testListDelimiter(self):
cont = self.env.container
delimiter = '-'
files = ['test', delimiter.join(['test', 'bar']),
delimiter.join(['test', 'foo'])]
for f in files:
file_item = cont.file(f)
self.assertTrue(file_item.write_random())
# object with no current version, just a delete marker
del_file = 'del-baz'
obj = self.env.container.file(del_file)
obj.delete(tolerate_missing=True)
self.assertEqual(obj.conn.response.status, 404)
# now, turn off versioning and write a un-versioned obj
self.env.container.update_metadata(
hdrs={self.env.versions_header_key: 'False'})
# a new write will not have a version-id
off_file = 'off-xyz'
obj = self.env.container.file(off_file)
resp = obj.write(b'unversioned', return_resp=True)
self.assertIsNone(resp.getheader('x-object-version-id'))
# sanity
# list latest, delete marker should not show-up
for format_type in [None, 'json', 'xml']:
results = cont.files(parms={'format': format_type})
if isinstance(results[0], dict):
results = [x.get('name', x.get('subdir')) for x in results]
self.assertEqual(results, ['off-xyz', 'test', 'test-bar',
'test-foo'])
results = cont.files(parms={'delimiter': delimiter,
'format': format_type})
if isinstance(results[0], dict):
results = [x.get('name', x.get('subdir')) for x in results]
self.assertEqual(results, ['off-', 'test', 'test-'])
results = cont.files(parms={'delimiter': delimiter,
'format': format_type,
'reverse': 'yes'})
if isinstance(results[0], dict):
results = [x.get('name', x.get('subdir')) for x in results]
self.assertEqual(results, ['test-', 'test', 'off-'])
# list versions, we should see delete marker here
for format_type in [None, 'json']:
results = cont.files(parms={'versions': None,
'format': format_type})
if isinstance(results[0], dict):
results = [x.get('name', x.get('subdir')) for x in results]
self.assertEqual(results, ['del-baz', 'off-xyz', 'test',
'test-bar', 'test-foo'])
results = cont.files(parms={'delimiter': delimiter,
'versions': None,
'format': format_type})
if isinstance(results[0], dict):
results = [x.get('name', x.get('subdir')) for x in results]
self.assertEqual(results, ['del-', 'off-', 'test', 'test-'])
results = cont.files(parms={'delimiter': delimiter,
'versions': None,
'format': format_type,
'reverse': 'yes'})
if isinstance(results[0], dict):
results = [x.get('name', x.get('subdir')) for x in results]
self.assertEqual(results, ['test-', 'test', 'off-', 'del-'])
def testListMultiCharDelimiter(self):
cont = self.env.container
delimiter = '-&'
files = ['test', delimiter.join(['test', 'bar']),
delimiter.join(['test', 'foo'])]
for f in files:
file_item = cont.file(f)
self.assertTrue(file_item.write_random())
# object with no current version, just a delete marker
del_file = 'del-&baz'
obj = self.env.container.file(del_file)
obj.delete(tolerate_missing=True)
self.assertEqual(obj.conn.response.status, 404)
# now, turn off versioning and write a un-versioned obj
self.env.container.update_metadata(
hdrs={self.env.versions_header_key: 'False'})
# a new write will not have a version-id
off_file = 'off-&xyz'
obj = self.env.container.file(off_file)
resp = obj.write(b'unversioned', return_resp=True)
self.assertIsNone(resp.getheader('x-object-version-id'))
# sanity
# list latest, delete marker should not show-up
for format_type in [None, 'json', 'xml']:
results = cont.files(parms={'format': format_type})
if isinstance(results[0], dict):
results = [x.get('name', x.get('subdir')) for x in results]
self.assertEqual(results, ['off-&xyz', 'test', 'test-&bar',
'test-&foo'])
results = cont.files(parms={'delimiter': delimiter,
'format': format_type})
if isinstance(results[0], dict):
results = [x.get('name', x.get('subdir')) for x in results]
self.assertEqual(results, ['off-&', 'test', 'test-&'])
results = cont.files(parms={'delimiter': delimiter,
'format': format_type,
'reverse': 'yes'})
if isinstance(results[0], dict):
results = [x.get('name', x.get('subdir')) for x in results]
self.assertEqual(results, ['test-&', 'test', 'off-&'])
# list versions, we should see delete marker here
for format_type in [None, 'json']:
results = cont.files(parms={'versions': None,
'format': format_type})
if isinstance(results[0], dict):
results = [x.get('name', x.get('subdir')) for x in results]
self.assertEqual(results, ['del-&baz', 'off-&xyz', 'test',
'test-&bar', 'test-&foo'])
results = cont.files(parms={'delimiter': delimiter,
'versions': None,
'format': format_type})
if isinstance(results[0], dict):
results = [x.get('name', x.get('subdir')) for x in results]
self.assertEqual(results, ['del-&', 'off-&', 'test', 'test-&'])
results = cont.files(parms={'delimiter': delimiter,
'versions': None,
'format': format_type,
'reverse': 'yes'})
if isinstance(results[0], dict):
results = [x.get('name', x.get('subdir')) for x in results]
self.assertEqual(results, ['test-&', 'test', 'off-&', 'del-&'])
def test_bytes_count(self):
container = self.env.container
# first store a non-versioned object
# disable versioning
container.update_metadata(
hdrs={self.env.versions_header_key: 'False'})
self.assertFalse(
config_true_value(container.info()['versions_enabled']))
obj = container.file(Utils.create_name())
self.assertTrue(obj.write(b'not-versioned'))
self.assertTotalVersions(container, 1)
# enable versioning
container.update_metadata(
hdrs={self.env.versions_header_key: 'True'})
self.assertTrue(
config_true_value(container.info()['versions_enabled']))
obj1_v1, obj1_v2, obj1_v3, obj1_v4, obj2_v1, obj3_v1 = \
self._prep_object_versions()
self.assertEqual(int(container.info()['bytes_used']), 32 + obj.size)
self.assertEqual(int(container.info()['object_count']), 2)
self.assertTotalVersions(container, 7)
def test_container_quota_bytes(self):
if 'container_quotas' not in tf.cluster_info:
raise SkipTest('Container quotas not enabled')
if tf.in_process:
tf.skip_if_no_xattrs()
container = self.env.container
# write two versions of 5 bytes each
obj = container.file(Utils.create_name())
self.assertTrue(obj.write(b'aaaaa'))
self.assertTrue(obj.write(b'bbbbb'))
self.assertTotalVersions(container, 2)
# set X-Container-Meta-Quota-Bytes is 10
container.update_metadata(
hdrs={'X-Container-Meta-Quota-Bytes': '10'})
self.assertEqual(container.info()['container_quota_bytes'], '10')
with self.assertRaises(ResponseError) as cm:
obj.write(b'ccccc')
self.assertEqual(413, cm.exception.status)
# reset container quota
container.update_metadata(
hdrs={'X-Container-Meta-Quota-Bytes': ''})
def test_list_unversioned_container(self):
_obj1, obj2, obj3 = self._prep_unversioned_objects()
# _obj1 got deleted, so won't show up at all
item2 = {
'name': obj2,
'bytes': 5,
'content_type': 'text/blah-blah-blah',
'hash': md5(b'first', usedforsecurity=False).hexdigest(),
'is_latest': True,
'version_id': 'null',
}
item3 = {
'name': obj3,
'bytes': 6,
'content_type': 'text/plain',
'hash': md5(b'second', usedforsecurity=False).hexdigest(),
'is_latest': True,
'version_id': 'null',
}
# version-aware listing works for unversioned containers
listing_parms = {'format': 'json',
'versions': None}
listing = self.env.unversioned_container.files(parms=listing_parms)
for item in listing:
item.pop('last_modified')
self.assertEqual(listing, [item2, item3])
listing_parms = {'format': 'json',
'prefix': obj2[:2],
'versions': None}
listing = self.env.unversioned_container.files(parms=listing_parms)
for item in listing:
item.pop('last_modified')
self.assertEqual(listing, [item2])
listing_parms = {'format': 'json',
'marker': obj2,
'versions': None}
listing = self.env.unversioned_container.files(parms=listing_parms)
for item in listing:
item.pop('last_modified')
self.assertEqual(listing, [item3])
listing_parms = {'format': 'json',
'delimiter': 'er',
'versions': None}
listing = self.env.unversioned_container.files(parms=listing_parms)
for item in listing:
if 'name' in item:
item.pop('last_modified')
self.assertEqual(listing, [item2, {'subdir': 'order'}])
listing_parms = {'format': 'json',
'reverse': 'true',
'versions': None}
listing = self.env.unversioned_container.files(parms=listing_parms)
for item in listing:
item.pop('last_modified')
self.assertEqual(listing, [item3, item2])
def test_is_latest(self):
obj = self.env.container.file(Utils.create_name())
# v1
resp = obj.write(b'version1', hdrs={
'Content-Type': 'text/jibberish11',
'ETag': md5(b'version1', usedforsecurity=False).hexdigest(),
}, return_resp=True)
obj_v1 = resp.getheader('x-object-version-id')
# v2
resp = obj.write(b'version2', hdrs={
'Content-Type': 'text/jibberish12',
'ETag': md5(b'version2', usedforsecurity=False).hexdigest(),
}, return_resp=True)
obj_v2 = resp.getheader('x-object-version-id')
obj.delete()
resp_headers = {
h.lower(): v for h, v in obj.conn.response.getheaders()}
obj_v3 = resp_headers.get('x-object-version-id')
resp = obj.write(b'version4', hdrs={
'Content-Type': 'text/jibberish14',
'ETag': md5(b'version4', usedforsecurity=False).hexdigest(),
}, return_resp=True)
obj_v4 = resp.getheader('x-object-version-id')
listing_parms = {'format': 'json', 'versions': None}
prev_versions = self.env.container.files(parms=listing_parms)
self.assertEqual(4, len(prev_versions))
for pv in prev_versions:
pv.pop('last_modified')
self.assertEqual(prev_versions, [{
'name': obj.name,
'bytes': 8,
'content_type': 'text/jibberish14',
'hash': md5(b'version4', usedforsecurity=False).hexdigest(),
'is_latest': True,
'version_id': obj_v4,
}, {
'name': obj.name,
'bytes': 0,
'content_type': 'application/x-deleted;swift_versions_deleted=1',
'hash': MD5_OF_EMPTY_STRING,
'is_latest': False,
'version_id': obj_v3,
}, {
'name': obj.name,
'bytes': 8,
'content_type': 'text/jibberish12',
'hash': md5(b'version2', usedforsecurity=False).hexdigest(),
'is_latest': False,
'version_id': obj_v2,
}, {
'name': obj.name,
'bytes': 8,
'content_type': 'text/jibberish11',
'hash': md5(b'version1', usedforsecurity=False).hexdigest(),
'is_latest': False,
'version_id': obj_v1,
}])
self.env.container.update_metadata(
hdrs={self.env.versions_header_key: 'False'})
# v5 - non-versioned
obj.write(b'version5', hdrs={
'Content-Type': 'text/jibberish15',
'ETag': md5(b'version5', usedforsecurity=False).hexdigest(),
})
listing_parms = {'format': 'json', 'versions': None}
prev_versions = self.env.container.files(parms=listing_parms)
self.assertEqual(5, len(prev_versions))
for pv in prev_versions:
pv.pop('last_modified')
self.assertEqual(prev_versions, [{
'name': obj.name,
'bytes': 8,
'content_type': 'text/jibberish15',
'hash': md5(b'version5', usedforsecurity=False).hexdigest(),
'is_latest': True,
'version_id': 'null',
}, {
'name': obj.name,
'bytes': 8,
'content_type': 'text/jibberish14',
'hash': md5(b'version4', usedforsecurity=False).hexdigest(),
'is_latest': False,
'version_id': obj_v4,
}, {
'name': obj.name,
'bytes': 0,
'content_type': 'application/x-deleted;swift_versions_deleted=1',
'hash': MD5_OF_EMPTY_STRING,
'is_latest': False,
'version_id': obj_v3,
}, {
'name': obj.name,
'bytes': 8,
'content_type': 'text/jibberish12',
'hash': md5(b'version2', usedforsecurity=False).hexdigest(),
'is_latest': False,
'version_id': obj_v2,
}, {
'name': obj.name,
'bytes': 8,
'content_type': 'text/jibberish11',
'hash': md5(b'version1', usedforsecurity=False).hexdigest(),
'is_latest': False,
'version_id': obj_v1,
}])
class TestContainerOperationsUTF8(Base2, TestContainerOperations):
pass
class TestDeleteContainer(TestObjectVersioningBase):
def tearDown(self):
# do nothing since test will delete all data + container
pass
def test_delete_container(self):
# sanity
container = self.env.container
self.assertTrue(
config_true_value(container.info()['versions_enabled']))
self.assertTotalVersions(container, 0)
# write an object to be versioned
obj = container.file(Utils.create_name)
obj.write(b"foo")
self.assertTotalVersions(container, 1)
# delete object and attempt to delete container
obj.delete()
self.assertTotalVersions(container, 2)
# expect failure because versioning is enabled and
# old versions still exist
self.assertFalse(container.delete())
# disable it
container.update_metadata(
hdrs={self.env.versions_header_key: 'False'})
self.assertFalse(
config_true_value(container.info()['versions_enabled']))
# expect failure because old versions still exist
self.assertFalse(container.delete())
# delete older versions
self._tear_down_files(container)
self.assertTotalVersions(container, 0)
# and finally delete container
self.assertTrue(container.delete())
class TestSloWithVersioning(TestObjectVersioningBase):
def setUp(self):
super(TestSloWithVersioning, self).setUp()
if 'slo' not in cluster_info:
raise SkipTest("SLO not enabled")
if tf.in_process:
tf.skip_if_no_xattrs()
# create a container with versioning
self.env.versions_header_key = 'X-Versions-Enabled'
self.container = self.env.account.container(Utils.create_name())
container_headers = {self.env.versions_header_key: 'True'}
if not self.container.create(hdrs=container_headers):
raise ResponseError(self.conn.response)
self.segments_container = self.env.account.container(
Utils.create_name())
if not self.segments_container.create():
raise ResponseError(self.conn.response)
# create some segments
self.seg_info = {}
for letter, size in (('a', 1024 * 1024),
('b', 1024 * 1024)):
seg_name = letter
file_item = self.segments_container.file(seg_name)
file_item.write((letter * size).encode('ascii'))
self.seg_info[seg_name] = {
'size_bytes': size,
'etag': file_item.md5,
'path': '/%s/%s' % (self.segments_container.name, seg_name)}
@property
def account_name(self):
if not self._account_name:
self._account_name = self.env.account.conn.storage_path.rsplit(
'/', 1)[-1]
return self._account_name
def _create_manifest(self, seg_name):
# create a manifest in the versioning container
file_item = self.container.file("my-slo-manifest")
resp = file_item.write(
json.dumps([self.seg_info[seg_name]]).encode('ascii'),
parms={'multipart-manifest': 'put'},
return_resp=True)
version_id = resp.getheader('x-object-version-id')
return file_item, version_id
def _assert_is_manifest(self, file_item, seg_name, version_id=None):
if version_id:
read_params = {'multipart-manifest': 'get',
'version-id': version_id}
else:
read_params = {'multipart-manifest': 'get'}
manifest_body = file_item.read(parms=read_params)
resp_headers = {
h.lower(): v for h, v in file_item.conn.response.getheaders()}
self.assertIn('x-static-large-object', resp_headers)
self.assertEqual('application/json; charset=utf-8',
file_item.content_type)
try:
manifest = json.loads(manifest_body)
except ValueError:
self.fail("GET with multipart-manifest=get got invalid json")
self.assertEqual(1, len(manifest))
key_map = {'etag': 'hash', 'size_bytes': 'bytes', 'path': 'name'}
for k_client, k_slo in key_map.items():
self.assertEqual(self.seg_info[seg_name][k_client],
manifest[0][k_slo])
def _assert_is_object(self, file_item, seg_data, version_id=None):
if version_id:
file_contents = file_item.read(parms={'version-id': version_id})
else:
file_contents = file_item.read()
self.assertEqual(1024 * 1024, len(file_contents))
self.assertEqual(seg_data, file_contents[:1])
self.assertEqual(seg_data, file_contents[-1:])
def tearDown(self):
self._tear_down_files(self.container)
def test_slo_manifest_version(self):
file_item, v1_version_id = self._create_manifest('a')
# sanity check: read the manifest, then the large object
self._assert_is_manifest(file_item, 'a')
self._assert_is_object(file_item, b'a')
# upload new manifest
file_item, v2_version_id = self._create_manifest('b')
# sanity check: read the manifest, then the large object
self._assert_is_manifest(file_item, 'b')
self._assert_is_object(file_item, b'b')
# we wrote two versions
self.assertTotalVersions(self.container, 2)
# check the version 1 is still a manifest
self._assert_is_manifest(file_item, 'a', v1_version_id)
self._assert_is_object(file_item, b'a', v1_version_id)
# listing looks good
file_info = file_item.info()
manifest_info = file_item.info(parms={'multipart-manifest': 'get'})
obj_list = self.container.files(parms={'format': 'json'})
for o in obj_list:
o.pop('last_modified')
# TODO: add symlink_path back in expected
o.pop('symlink_path')
expected = {
'bytes': file_info['content_length'],
'content_type': 'application/octet-stream',
'hash': normalize_etag(manifest_info['etag']),
'name': 'my-slo-manifest',
'slo_etag': file_info['etag'],
'version_symlink': True,
}
self.assertEqual([expected], obj_list)
# delete the newest manifest
file_item.delete()
# expect to have 3 versions now, last one being a delete-marker
self.assertTotalVersions(self.container, 3)
# restore version 1
file_item.copy(self.container.name, file_item.name,
parms={'multipart-manifest': 'get',
'version-id': v1_version_id})
self.assertTotalVersions(self.container, 4)
self._assert_is_manifest(file_item, 'a')
self._assert_is_object(file_item, b'a')
# versioned container listing still looks slo-like
file_info = file_item.info()
manifest_info = file_item.info(parms={'multipart-manifest': 'get'})
obj_list = self.container.files(parms={'format': 'json'})
for o in obj_list:
o.pop('last_modified')
# TODO: add symlink_path back in expected
o.pop('symlink_path')
expected = {
'bytes': file_info['content_length'],
'content_type': 'application/octet-stream',
'hash': normalize_etag(manifest_info['etag']),
'name': 'my-slo-manifest',
'slo_etag': file_info['etag'],
'version_symlink': True,
}
self.assertEqual([expected], obj_list)
status = file_item.conn.make_request(
'DELETE', file_item.path,
hdrs={'Accept': 'application/json'},
parms={'multipart-manifest': 'delete',
'version-id': v1_version_id})
body = file_item.conn.response.read()
self.assertEqual(status, 200, body)
resp = json.loads(body)
self.assertEqual(resp['Response Status'], '200 OK')
self.assertEqual(resp['Errors'], [])
self.assertEqual(resp['Number Deleted'], 2)
self.assertTotalVersions(self.container, 3)
# Since we included the ?multipart-manifest=delete, segments
# got cleaned up and now the current version is busted
with self.assertRaises(ResponseError) as caught:
file_item.read()
self.assertEqual(409, caught.exception.status)
def test_links_to_slo(self):
file_item, v1_version_id = self._create_manifest('a')
slo_info = file_item.info()
symlink_name = Utils.create_name()
sym_tgt_header = quote(unquote('%s/%s' % (
self.container.name, file_item.name)))
symlink = self.container.file(symlink_name)
# symlink to the slo
sym_headers = {'X-Symlink-Target': sym_tgt_header}
symlink.write(b'', hdrs=sym_headers)
self.assertEqual(slo_info, symlink.info())
# hardlink to the slo
sym_headers['X-Symlink-Target-Etag'] = slo_info['x_manifest_etag']
symlink.write(b'', hdrs=sym_headers)
self.assertEqual(slo_info, symlink.info())
class TestVersionsLocationWithVersioning(TestObjectVersioningBase):
# create a container with versioned writes
location_header_key = 'X-Versions-Location'
def setUp(self):
super(TestVersionsLocationWithVersioning, self).setUp()
if six.PY2:
# avoid getting a prefix that stops halfway through an encoded
# character
prefix = Utils.create_name().decode("utf-8")[:10].encode("utf-8")
else:
prefix = Utils.create_name()[:10]
self.versions_container = self.env.account.container(
prefix + "-versions")
if not self.versions_container.create():
raise ResponseError(self.conn.response)
self.container = self.env.account.container(prefix + "-objs")
container_headers = {
self.location_header_key: quote(self.versions_container.name)}
if not self.container.create(hdrs=container_headers):
raise ResponseError(self.conn.response)
def _prep_object_versions(self):
# object with multiple versions
object_name = Utils.create_name()
obj = self.container.file(object_name)
# v1
obj.write(b'version1', hdrs={
'Content-Type': 'text/jibberish11',
'ETag': md5(b'version1', usedforsecurity=False).hexdigest(),
})
# v2
obj.write(b'version2', hdrs={
'Content-Type': 'text/jibberish12',
'ETag': md5(b'version2', usedforsecurity=False).hexdigest(),
})
# v3
obj.write(b'version3', hdrs={
'Content-Type': 'text/jibberish13',
'ETag': md5(b'version3', usedforsecurity=False).hexdigest(),
})
return obj
def test_list_with_versions_param(self):
obj = self._prep_object_versions()
obj_name = obj.name
listing_parms = {'format': 'json', 'versions': None}
current_versions = self.container.files(parms=listing_parms)
self.assertEqual(1, len(current_versions))
for pv in current_versions:
pv.pop('last_modified')
self.assertEqual(current_versions, [{
'name': obj_name,
'bytes': 8,
'content_type': 'text/jibberish13',
'hash': md5(b'version3', usedforsecurity=False).hexdigest(),
'is_latest': True,
'version_id': 'null'
}])
prev_versions = self.versions_container.files(parms=listing_parms)
self.assertEqual(2, len(prev_versions))
for pv in prev_versions:
pv.pop('last_modified')
name = pv.pop('name')
self.assertTrue(name.startswith('%03x%s/' % (len(obj_name),
obj_name)))
self.assertEqual(prev_versions, [{
'bytes': 8,
'content_type': 'text/jibberish11',
'hash': md5(b'version1', usedforsecurity=False).hexdigest(),
'is_latest': True,
'version_id': 'null',
}, {
'bytes': 8,
'content_type': 'text/jibberish12',
'hash': md5(b'version2', usedforsecurity=False).hexdigest(),
'is_latest': True,
'version_id': 'null'
}])
def test_delete_with_null_version_id(self):
obj = self._prep_object_versions()
# sanity
self.assertEqual(b'version3', obj.read())
obj.delete(parms={'version-id': 'null'})
if self.location_header_key == 'X-Versions-Location':
self.assertEqual(b'version2', obj.read())
else:
with self.assertRaises(ResponseError) as caught:
obj.read()
self.assertEqual(404, caught.exception.status)
class TestHistoryLocationWithVersioning(TestVersionsLocationWithVersioning):
# create a container with versioned writes
location_header_key = 'X-History-Location'
class TestVersioningAccountTempurl(TestObjectVersioningBase):
env = TestTempurlEnv
digest_name = 'sha256'
def setUp(self):
self.env.versions_header_key = 'X-Versions-Enabled'
super(TestVersioningAccountTempurl, self).setUp()
if self.env.tempurl_enabled is False:
raise SkipTest("TempURL not enabled")
elif self.env.tempurl_enabled is not True:
# just some sanity checking
raise Exception(
"Expected tempurl_enabled to be True/False, got %r" %
(self.env.tempurl_enabled,))
if self.digest_name not in cluster_info['tempurl'].get(
'allowed_digests', ['sha1']):
raise SkipTest("tempurl does not support %s signatures" %
self.digest_name)
self.digest = getattr(hashlib, self.digest_name)
self.expires = int(time.time()) + 86400
self.obj_tempurl_parms = self.tempurl_parms(
'GET', self.expires, self.env.conn.make_path(self.env.obj.path),
self.env.tempurl_key)
def tempurl_parms(self, method, expires, path, key):
path = unquote(path)
if not six.PY2:
method = method.encode('utf8')
path = path.encode('utf8')
key = key.encode('utf8')
sig = hmac.new(
key,
b'%s\n%d\n%s' % (method, expires, path),
self.digest).hexdigest()
return {'temp_url_sig': sig, 'temp_url_expires': str(expires)}
def test_PUT(self):
obj = self.env.obj
# give out a signature which allows a PUT to obj
expires = int(time.time()) + 86400
put_parms = self.tempurl_parms(
'PUT', expires, self.env.conn.make_path(obj.path),
self.env.tempurl_key)
# try to overwrite existing object
resp = obj.write(b"version2", parms=put_parms,
cfg={'no_auth_token': True},
return_resp=True)
resp_headers = {
h.lower(): v for h, v in resp.getheaders()}
self.assertIn('x-object-version-id', resp_headers)
def test_GET_latest(self):
obj = self.env.obj
expires = int(time.time()) + 86400
get_parms = self.tempurl_parms(
'GET', expires, self.env.conn.make_path(obj.path),
self.env.tempurl_key)
# get v1 object (., version-id=null, no symlinks involved)
contents = obj.read(parms=get_parms, cfg={'no_auth_token': True})
self.assert_status([200])
self.assertEqual(contents, b"obj contents")
# give out a signature which allows a PUT to obj
expires = int(time.time()) + 86400
put_parms = self.tempurl_parms(
'PUT', expires, self.env.conn.make_path(obj.path),
self.env.tempurl_key)
# try to overwrite existing object
resp = obj.write(b"version2", parms=put_parms,
cfg={'no_auth_token': True},
return_resp=True)
resp_headers = {
h.lower(): v for h, v in resp.getheaders()}
self.assertIn('x-object-version-id', resp_headers)
# get v2 object
contents = obj.read(parms=get_parms, cfg={'no_auth_token': True})
self.assert_status([200])
self.assertEqual(contents, b"version2")
def test_GET_version_id(self):
# N.B.: The test is not intended to imply the desired behavior
# of a tempurl GET with version-id. Currently version-id is simply
# ignored as the latest version is always returned. In the future,
# users should be able to create a tempurl with version-id as a
# parameter.
# overwrite object a couple more times
obj = self.env.obj
resp = obj.write(b"version2", return_resp=True)
v2_version_id = resp.getheader('x-object-version-id')
obj.write(b"version3!!!")
expires = int(time.time()) + 86400
get_parms = self.tempurl_parms(
'GET', expires, self.env.conn.make_path(obj.path),
self.env.tempurl_key)
get_parms['version-id'] = v2_version_id
contents = obj.read(parms=get_parms, cfg={'no_auth_token': True})
self.assert_status([200])
self.assertEqual(contents, b"version3!!!")
class TestVersioningContainerTempurl(TestObjectVersioningBase):
env = TestContainerTempurlEnv
digest_name = 'sha256'
def setUp(self):
self.env.versions_header_key = 'X-Versions-Enabled'
super(TestVersioningContainerTempurl, self).setUp()
if self.env.tempurl_enabled is False:
raise SkipTest("TempURL not enabled")
elif self.env.tempurl_enabled is not True:
# just some sanity checking
raise Exception(
"Expected tempurl_enabled to be True/False, got %r" %
(self.env.tempurl_enabled,))
if self.digest_name not in cluster_info['tempurl'].get(
'allowed_digests', ['sha1']):
raise SkipTest("tempurl does not support %s signatures" %
self.digest_name)
self.digest = getattr(hashlib, self.digest_name)
expires = int(time.time()) + 86400
sig = self.tempurl_sig(
'GET', expires, self.env.conn.make_path(self.env.obj.path),
self.env.tempurl_key)
self.obj_tempurl_parms = {'temp_url_sig': sig,
'temp_url_expires': str(expires)}
def tempurl_sig(self, method, expires, path, key):
path = unquote(path)
if not six.PY2:
method = method.encode('utf8')
path = path.encode('utf8')
key = key.encode('utf8')
return hmac.new(
key,
b'%s\n%d\n%s' % (method, expires, path),
self.digest).hexdigest()
def test_PUT(self):
obj = self.env.obj
# give out a signature which allows a PUT to new_obj
expires = int(time.time()) + 86400
sig = self.tempurl_sig(
'PUT', expires, self.env.conn.make_path(obj.path),
self.env.tempurl_key)
put_parms = {'temp_url_sig': sig,
'temp_url_expires': str(expires)}
# try to overwrite existing object
resp = obj.write(b"version2", parms=put_parms,
cfg={'no_auth_token': True},
return_resp=True)
resp_headers = {
h.lower(): v for h, v in resp.getheaders()}
self.assertIn('x-object-version-id', resp_headers)
def test_GET_latest(self):
obj = self.env.obj
expires = int(time.time()) + 86400
sig = self.tempurl_sig(
'GET', expires, self.env.conn.make_path(obj.path),
self.env.tempurl_key)
get_parms = {'temp_url_sig': sig,
'temp_url_expires': str(expires)}
# get v1 object (., version-id=null, no symlinks involved)
contents = obj.read(parms=get_parms, cfg={'no_auth_token': True})
self.assert_status([200])
self.assertEqual(contents, b"obj contents")
# overwrite existing object
obj.write(b"version2")
# get v2 object (reading from versions container)
# versioning symlink allows us to bypass the normal
# container-tempurl-key scoping
contents = obj.read(parms=get_parms, cfg={'no_auth_token': True})
self.assert_status([200])
self.assertEqual(contents, b"version2")
# HEAD works, too
obj.info(parms=get_parms, cfg={'no_auth_token': True})
self.assert_status([200])
| swift-master | test/functional/test_object_versioning.py |
#!/usr/bin/python -u
# Copyright (c) 2010-2017 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import six
from unittest import SkipTest
from six.moves.urllib.parse import unquote
from swift.common.utils import quote
from swift.common.swob import str_to_wsgi
import test.functional as tf
from test.functional.tests import Utils, Base, Base2, BaseEnv
from test.functional.swift_test_client import Account, Connection, \
ResponseError
def setUpModule():
tf.setup_package()
def tearDownModule():
tf.teardown_package()
def requires_domain_remap(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
if 'domain_remap' not in tf.cluster_info:
raise SkipTest('Domain Remap is not enabled')
# domain_remap middleware does not advertise its storage_domain values
# in swift /info responses so a storage_domain must be configured in
# test.conf for these tests to succeed
if not tf.config.get('storage_domain'):
raise SkipTest('Domain Remap storage_domain not configured in %s' %
tf.config['__file__'])
return func(*args, **kwargs)
return wrapper
class TestStaticWebEnv(BaseEnv):
static_web_enabled = None # tri-state: None initially, then True/False
@classmethod
def setUp(cls):
cls.conn = Connection(tf.config)
cls.conn.authenticate()
if cls.static_web_enabled is None:
cls.static_web_enabled = 'staticweb' in tf.cluster_info
if not cls.static_web_enabled:
return
cls.account = Account(
cls.conn, tf.config.get('account', tf.config['username']))
cls.account.delete_containers()
cls.container = cls.account.container(Utils.create_name())
if not cls.container.create(
hdrs={'X-Container-Read': '.r:*,.rlistings'}):
raise ResponseError(cls.conn.response)
objects = ['index',
'error',
'listings_css',
'dir/',
'dir/obj',
'dir/some sub%dir/',
'dir/some sub%dir/obj']
cls.objects = {}
for item in sorted(objects):
if '/' in item.rstrip('/'):
parent, _ = item.rstrip('/').rsplit('/', 1)
path = '%s/%s' % (cls.objects[parent + '/'].name,
Utils.create_name())
else:
path = Utils.create_name()
if item[-1] == '/':
cls.objects[item] = cls.container.file(path)
cls.objects[item].write(hdrs={
'Content-Type': 'application/directory'})
else:
cls.objects[item] = cls.container.file(path)
cls.objects[item].write(('%s contents' % item).encode('utf8'))
class TestStaticWeb(Base):
env = TestStaticWebEnv
set_up = False
def setUp(self):
super(TestStaticWeb, self).setUp()
if self.env.static_web_enabled is False:
raise SkipTest("Static Web not enabled")
elif self.env.static_web_enabled is not True:
# just some sanity checking
raise Exception(
"Expected static_web_enabled to be True/False, got %r" %
(self.env.static_web_enabled,))
@property
def domain_remap_acct(self):
# the storage_domain option is test.conf must be set to one of the
# domain_remap middleware storage_domain values
return '.'.join((self.env.account.conn.account_name,
tf.config.get('storage_domain')))
@property
def domain_remap_cont(self):
# the storage_domain option in test.conf must be set to one of the
# domain_remap middleware storage_domain values
return '.'.join(
(self.env.container.name, self.env.account.conn.account_name,
tf.config.get('storage_domain')))
def _set_staticweb_headers(self, index=False, listings=False,
listings_css=False, error=False):
objects = self.env.objects
headers = {}
if index:
headers['X-Container-Meta-Web-Index'] = objects['index'].name
else:
headers['X-Remove-Container-Meta-Web-Index'] = 'true'
if listings:
headers['X-Container-Meta-Web-Listings'] = 'true'
else:
headers['X-Remove-Container-Meta-Web-Listings'] = 'true'
if listings_css:
headers['X-Container-Meta-Web-Listings-Css'] = \
objects['listings_css'].name
else:
headers['X-Remove-Container-Meta-Web-Listings-Css'] = 'true'
if error:
headers['X-Container-Meta-Web-Error'] = objects['error'].name
else:
headers['X-Remove-Container-Meta-Web-Error'] = 'true'
self.assertTrue(self.env.container.update_metadata(hdrs=headers))
def _test_redirect_with_slash(self, host, path, anonymous=False):
self._set_staticweb_headers(listings=True)
self.env.account.conn.make_request(
'GET', path,
hdrs={'X-Web-Mode': str(not anonymous), 'Host': host},
cfg={'no_auth_token': anonymous, 'absolute_path': True})
self.assert_status(301)
expected = '%s://%s%s/' % (
self.env.account.conn.storage_scheme, host, path)
self.assertEqual(self.env.conn.response.getheader('location'),
expected)
def _test_redirect_slash_direct(self, anonymous):
host = self.env.account.conn.storage_netloc
path = '%s/%s' % (self.env.account.conn.storage_path,
quote(self.env.container.name))
self._test_redirect_with_slash(host, path, anonymous=anonymous)
path = '%s/%s/%s' % (self.env.account.conn.storage_path,
quote(self.env.container.name),
quote(self.env.objects['dir/'].name))
self._test_redirect_with_slash(host, path, anonymous=anonymous)
def test_redirect_slash_auth_direct(self):
self._test_redirect_slash_direct(False)
def test_redirect_slash_anon_direct(self):
self._test_redirect_slash_direct(True)
@requires_domain_remap
def _test_redirect_slash_remap_acct(self, anonymous):
host = self.domain_remap_acct
path = '/%s' % quote(self.env.container.name)
self._test_redirect_with_slash(host, path, anonymous=anonymous)
path = '/%s/%s' % (quote(self.env.container.name),
quote(self.env.objects['dir/'].name))
self._test_redirect_with_slash(host, path, anonymous=anonymous)
def test_redirect_slash_auth_remap_acct(self):
self._test_redirect_slash_remap_acct(False)
def test_redirect_slash_anon_remap_acct(self):
self._test_redirect_slash_remap_acct(True)
@requires_domain_remap
def _test_redirect_slash_remap_cont(self, anonymous):
host = self.domain_remap_cont
path = '/%s' % self.env.objects['dir/'].name
self._test_redirect_with_slash(host, path, anonymous=anonymous)
def test_redirect_slash_auth_remap_cont(self):
self._test_redirect_slash_remap_cont(False)
def test_redirect_slash_anon_remap_cont(self):
self._test_redirect_slash_remap_cont(True)
def _test_get_path(self, host, path, anonymous=False, expected_status=200,
expected_in=[], expected_not_in=[]):
self.env.account.conn.make_request(
'GET', str_to_wsgi(path),
hdrs={'X-Web-Mode': str(not anonymous), 'Host': host},
cfg={'no_auth_token': anonymous, 'absolute_path': True})
self.assert_status(expected_status)
body = self.env.account.conn.response.read()
if not six.PY2:
body = body.decode('utf8')
for string in expected_in:
self.assertIn(string, body)
for string in expected_not_in:
self.assertNotIn(string, body)
def _test_listing(self, host, path, title=None, links=[], notins=[],
css=None, anonymous=False):
self._set_staticweb_headers(listings=True,
listings_css=(css is not None))
if title is None:
title = unquote(path)
expected_in = ['Listing of %s' % title] + [
'<a href="{0}">{1}</a>'.format(quote(link), link)
for link in links]
expected_not_in = notins
if css:
expected_in.append('<link rel="stylesheet" type="text/css" '
'href="%s" />' % quote(css))
self._test_get_path(host, path, anonymous=anonymous,
expected_in=expected_in,
expected_not_in=expected_not_in)
def _test_listing_direct(self, anonymous, listings_css):
objects = self.env.objects
host = self.env.account.conn.storage_netloc
path = '%s/%s/' % (self.env.account.conn.storage_path,
quote(self.env.container.name))
css = objects['listings_css'].name if listings_css else None
self._test_listing(host, path, anonymous=True, css=css,
links=[objects['index'].name,
objects['dir/'].name + '/'],
notins=[objects['dir/obj'].name])
path = '%s/%s/%s/' % (self.env.account.conn.storage_path,
quote(self.env.container.name),
quote(objects['dir/'].name))
css = '../%s' % objects['listings_css'].name if listings_css else None
self._test_listing(
host, path, anonymous=anonymous, css=css,
links=[objects['dir/obj'].name.split('/')[-1],
objects['dir/some sub%dir/'].name.split('/')[-1] + '/'],
notins=[objects['index'].name,
objects['dir/some sub%dir/obj'].name])
def test_listing_auth_direct_without_css(self):
self._test_listing_direct(False, False)
def test_listing_anon_direct_without_css(self):
self._test_listing_direct(True, False)
def test_listing_auth_direct_with_css(self):
self._test_listing_direct(False, True)
def test_listing_anon_direct_with_css(self):
self._test_listing_direct(True, True)
@requires_domain_remap
def _test_listing_remap_acct(self, anonymous, listings_css):
objects = self.env.objects
host = self.domain_remap_acct
path = '/%s/' % self.env.container.name
css = objects['listings_css'].name if listings_css else None
title = '%s/%s/' % (self.env.account.conn.storage_path,
self.env.container.name)
self._test_listing(host, path, title=title, anonymous=anonymous,
css=css,
links=[objects['index'].name,
objects['dir/'].name + '/'],
notins=[objects['dir/obj'].name])
path = '/%s/%s/' % (self.env.container.name, objects['dir/'].name)
css = '../%s' % objects['listings_css'].name if listings_css else None
title = '%s/%s/%s/' % (self.env.account.conn.storage_path,
self.env.container.name,
objects['dir/'])
self._test_listing(
host, path, title=title, anonymous=anonymous, css=css,
links=[objects['dir/obj'].name.split('/')[-1],
objects['dir/some sub%dir/'].name.split('/')[-1] + '/'],
notins=[objects['index'].name,
objects['dir/some sub%dir/obj'].name])
def test_listing_auth_remap_acct_without_css(self):
self._test_listing_remap_acct(False, False)
def test_listing_anon_remap_acct_without_css(self):
self._test_listing_remap_acct(True, False)
def test_listing_auth_remap_acct_with_css(self):
self._test_listing_remap_acct(False, True)
def test_listing_anon_remap_acct_with_css(self):
self._test_listing_remap_acct(True, True)
@requires_domain_remap
def _test_listing_remap_cont(self, anonymous, listings_css):
objects = self.env.objects
host = self.domain_remap_cont
path = '/'
css = objects['listings_css'].name if listings_css else None
title = '%s/%s/' % (self.env.account.conn.storage_path,
self.env.container.name)
self._test_listing(host, path, title=title, anonymous=anonymous,
css=css,
links=[objects['index'].name,
objects['dir/'].name + '/'],
notins=[objects['dir/obj'].name])
path = '/%s/' % objects['dir/'].name
css = '../%s' % objects['listings_css'].name if listings_css else None
title = '%s/%s/%s/' % (self.env.account.conn.storage_path,
self.env.container.name,
objects['dir/'])
self._test_listing(
host, path, title=title, anonymous=anonymous, css=css,
links=[objects['dir/obj'].name.split('/')[-1],
objects['dir/some sub%dir/'].name.split('/')[-1] + '/'],
notins=[objects['index'].name,
objects['dir/some sub%dir/obj'].name])
def test_listing_auth_remap_cont_without_css(self):
self._test_listing_remap_cont(False, False)
def test_listing_anon_remap_cont_without_css(self):
self._test_listing_remap_cont(True, False)
def test_listing_auth_remap_cont_with_css(self):
self._test_listing_remap_cont(False, True)
def test_listing_anon_remap_cont_with_css(self):
self._test_listing_remap_cont(True, True)
def _test_index(self, host, path, anonymous=False, expected_status=200):
self._set_staticweb_headers(index=True)
if expected_status == 200:
expected_in = ['index contents']
expected_not_in = ['Listing']
else:
expected_in = []
expected_not_in = []
self._test_get_path(host, path, anonymous=anonymous,
expected_status=expected_status,
expected_in=expected_in,
expected_not_in=expected_not_in)
def _test_index_direct(self, anonymous):
objects = self.env.objects
host = self.env.account.conn.storage_netloc
path = '%s/%s/' % (self.env.account.conn.storage_path,
quote(self.env.container.name))
self._test_index(host, path, anonymous=anonymous)
path = '%s/%s/%s/' % (self.env.account.conn.storage_path,
quote(self.env.container.name),
quote(objects['dir/'].name))
self._test_index(host, path, anonymous=anonymous, expected_status=404)
def test_index_auth_direct(self):
self._test_index_direct(False)
def test_index_anon_direct(self):
self._test_index_direct(True)
@requires_domain_remap
def _test_index_remap_acct(self, anonymous):
objects = self.env.objects
host = self.domain_remap_acct
path = '/%s/' % self.env.container.name
self._test_index(host, path, anonymous=anonymous)
path = '/%s/%s/' % (self.env.container.name, objects['dir/'].name)
self._test_index(host, path, anonymous=anonymous, expected_status=404)
def test_index_auth_remap_acct(self):
self._test_index_remap_acct(False)
def test_index_anon_remap_acct(self):
self._test_index_remap_acct(True)
@requires_domain_remap
def _test_index_remap_cont(self, anonymous):
objects = self.env.objects
host = self.domain_remap_cont
path = '/'
self._test_index(host, path, anonymous=anonymous)
path = '/%s/' % objects['dir/'].name
self._test_index(host, path, anonymous=anonymous, expected_status=404)
def test_index_auth_remap_cont(self):
self._test_index_remap_cont(False)
def test_index_anon_remap_cont(self):
self._test_index_remap_cont(True)
class TestStaticWebUTF8(Base2, TestStaticWeb):
def test_redirect_slash_auth_remap_cont(self):
self.skipTest("Can't remap UTF8 containers")
def test_redirect_slash_anon_remap_cont(self):
self.skipTest("Can't remap UTF8 containers")
| swift-master | test/functional/test_staticweb.py |
# Copyright (c) 2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import binascii
import unittest
import os
import boto
# For an issue with venv and distutils, disable pylint message here
# pylint: disable-msg=E0611,F0401
from distutils.version import StrictVersion
import six
from six.moves import urllib, zip, zip_longest
import test.functional as tf
from swift.common.middleware.s3api.etree import fromstring, tostring, \
Element, SubElement
from swift.common.middleware.s3api.utils import MULTIUPLOAD_SUFFIX, mktime, \
S3Timestamp
from swift.common.utils import md5
from test.functional.s3api import S3ApiBase
from test.functional.s3api.s3_test_client import Connection
from test.functional.s3api.utils import get_error_code, get_error_msg, \
calculate_md5
from test.functional.swift_test_client import Connection as SwiftConnection
def setUpModule():
tf.setup_package()
def tearDownModule():
tf.teardown_package()
class TestS3ApiMultiUpload(S3ApiBase):
def setUp(self):
super(TestS3ApiMultiUpload, self).setUp()
if not tf.cluster_info['s3api'].get('allow_multipart_uploads', False):
self.skipTest('multipart upload is not enebled')
self.min_segment_size = int(tf.cluster_info['s3api'].get(
'min_segment_size', 5242880))
def _gen_comp_xml(self, etags, step=1):
elem = Element('CompleteMultipartUpload')
for i, etag in enumerate(etags):
elem_part = SubElement(elem, 'Part')
SubElement(elem_part, 'PartNumber').text = str(i * step + 1)
SubElement(elem_part, 'ETag').text = etag
return tostring(elem)
def _initiate_multi_uploads_result_generator(self, bucket, keys,
headers=None, trials=1):
if headers is None:
headers = [None] * len(keys)
self.conn.make_request('PUT', bucket)
query = 'uploads'
for key, key_headers in zip_longest(keys, headers):
for i in range(trials):
status, resp_headers, body = \
self.conn.make_request('POST', bucket, key,
headers=key_headers, query=query)
yield status, resp_headers, body
def _upload_part(self, bucket, key, upload_id, content=None, part_num=1):
query = 'partNumber=%s&uploadId=%s' % (part_num, upload_id)
content = content if content else b'a' * self.min_segment_size
with self.quiet_boto_logging():
status, headers, body = self.conn.make_request(
'PUT', bucket, key, body=content, query=query)
return status, headers, body
def _upload_part_copy(self, src_bucket, src_obj, dst_bucket, dst_key,
upload_id, part_num=1, src_range=None,
src_version_id=None):
src_path = '%s/%s' % (src_bucket, src_obj)
if src_version_id:
src_path += '?versionId=%s' % src_version_id
query = 'partNumber=%s&uploadId=%s' % (part_num, upload_id)
req_headers = {'X-Amz-Copy-Source': src_path}
if src_range:
req_headers['X-Amz-Copy-Source-Range'] = src_range
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_key,
headers=req_headers,
query=query)
elem = fromstring(body, 'CopyPartResult')
etag = elem.find('ETag').text.strip('"')
return status, headers, body, etag
def _complete_multi_upload(self, bucket, key, upload_id, xml):
query = 'uploadId=%s' % upload_id
status, headers, body = \
self.conn.make_request('POST', bucket, key, body=xml,
query=query)
return status, headers, body
def test_object_multi_upload(self):
bucket = 'bucket'
keys = [u'obj1\N{SNOWMAN}', u'obj2\N{SNOWMAN}', 'obj3']
bad_content_md5 = base64.b64encode(b'a' * 16).strip().decode('ascii')
headers = [{'Content-Type': 'foo/bar', 'x-amz-meta-baz': 'quux',
'Content-Encoding': 'gzip', 'Content-Language': 'en-US',
'Expires': 'Thu, 01 Dec 1994 16:00:00 GMT',
'Cache-Control': 'no-cache',
'Content-Disposition': 'attachment'},
{'Content-MD5': bad_content_md5},
{'Etag': 'nonsense'}]
uploads = []
results_generator = self._initiate_multi_uploads_result_generator(
bucket, keys, headers=headers)
# Initiate Multipart Upload
for expected_key, (status, headers, body) in \
zip(keys, results_generator):
self.assertEqual(status, 200, body)
self.assertCommonResponseHeaders(headers)
self.assertIn('content-type', headers)
self.assertEqual(headers['content-type'], 'application/xml')
self.assertIn('content-length', headers)
self.assertEqual(headers['content-length'], str(len(body)))
elem = fromstring(body, 'InitiateMultipartUploadResult')
self.assertEqual(elem.find('Bucket').text, bucket)
key = elem.find('Key').text
if six.PY2:
expected_key = expected_key.encode('utf-8')
self.assertEqual(expected_key, key)
upload_id = elem.find('UploadId').text
self.assertIsNotNone(upload_id)
self.assertNotIn((key, upload_id), uploads)
uploads.append((key, upload_id))
self.assertEqual(len(uploads), len(keys)) # sanity
# List Multipart Uploads
expected_uploads_list = [uploads]
for upload in uploads:
expected_uploads_list.append([upload])
for expected_uploads in expected_uploads_list:
query = 'uploads'
if len(expected_uploads) == 1:
query += '&' + urllib.parse.urlencode(
{'prefix': expected_uploads[0][0]})
status, headers, body = \
self.conn.make_request('GET', bucket, query=query)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self.assertTrue('content-type' in headers)
self.assertEqual(headers['content-type'], 'application/xml')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], str(len(body)))
elem = fromstring(body, 'ListMultipartUploadsResult')
self.assertEqual(elem.find('Bucket').text, bucket)
self.assertIsNone(elem.find('KeyMarker').text)
if len(expected_uploads) > 1:
self.assertEqual(elem.find('NextKeyMarker').text,
expected_uploads[-1][0])
else:
self.assertIsNone(elem.find('NextKeyMarker').text)
self.assertIsNone(elem.find('UploadIdMarker').text)
if len(expected_uploads) > 1:
self.assertEqual(elem.find('NextUploadIdMarker').text,
expected_uploads[-1][1])
else:
self.assertIsNone(elem.find('NextUploadIdMarker').text)
self.assertEqual(elem.find('MaxUploads').text, '1000')
self.assertTrue(elem.find('EncodingType') is None)
self.assertEqual(elem.find('IsTruncated').text, 'false')
self.assertEqual(len(elem.findall('Upload')),
len(expected_uploads))
for (expected_key, expected_upload_id), u in \
zip(expected_uploads, elem.findall('Upload')):
key = u.find('Key').text
upload_id = u.find('UploadId').text
self.assertEqual(expected_key, key)
self.assertEqual(expected_upload_id, upload_id)
self.assertEqual(u.find('Initiator/ID').text,
self.conn.user_id)
self.assertEqual(u.find('Initiator/DisplayName').text,
self.conn.user_id)
self.assertEqual(u.find('Owner/ID').text, self.conn.user_id)
self.assertEqual(u.find('Owner/DisplayName').text,
self.conn.user_id)
self.assertEqual(u.find('StorageClass').text, 'STANDARD')
self.assertTrue(u.find('Initiated').text is not None)
# Upload Part
key, upload_id = uploads[0]
content = b'a' * self.min_segment_size
etag = md5(content, usedforsecurity=False).hexdigest()
status, headers, body = \
self._upload_part(bucket, key, upload_id, content)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers, etag)
self.assertTrue('content-type' in headers)
self.assertEqual(headers['content-type'], 'text/html; charset=UTF-8')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '0')
expected_parts_list = [(headers['etag'],
mktime(headers['last-modified']))]
# Upload Part Copy
key, upload_id = uploads[1]
src_bucket = 'bucket2'
src_obj = 'obj3'
src_content = b'b' * self.min_segment_size
etag = md5(src_content, usedforsecurity=False).hexdigest()
# prepare src obj
self.conn.make_request('PUT', src_bucket)
with self.quiet_boto_logging():
self.conn.make_request('PUT', src_bucket, src_obj,
body=src_content)
_, headers, _ = self.conn.make_request('HEAD', src_bucket, src_obj)
self.assertCommonResponseHeaders(headers)
status, headers, body, resp_etag = \
self._upload_part_copy(src_bucket, src_obj, bucket,
key, upload_id)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self.assertTrue('content-type' in headers)
self.assertEqual(headers['content-type'], 'application/xml')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], str(len(body)))
self.assertTrue('etag' not in headers)
elem = fromstring(body, 'CopyPartResult')
copy_resp_last_modified = elem.find('LastModified').text
self.assertIsNotNone(copy_resp_last_modified)
self.assertEqual(resp_etag, etag)
# Check last-modified timestamp
key, upload_id = uploads[1]
query = 'uploadId=%s' % upload_id
status, headers, body = \
self.conn.make_request('GET', bucket, key, query=query)
self.assertEqual(200, status)
elem = fromstring(body, 'ListPartsResult')
listing_last_modified = [p.find('LastModified').text
for p in elem.iterfind('Part')]
# There should be *exactly* one parts in the result
self.assertEqual(listing_last_modified, [copy_resp_last_modified])
# List Parts
key, upload_id = uploads[0]
query = 'uploadId=%s' % upload_id
status, headers, body = \
self.conn.make_request('GET', bucket, key, query=query)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self.assertTrue('content-type' in headers)
self.assertEqual(headers['content-type'], 'application/xml')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], str(len(body)))
elem = fromstring(body, 'ListPartsResult')
self.assertEqual(elem.find('Bucket').text, bucket)
self.assertEqual(elem.find('Key').text, key)
self.assertEqual(elem.find('UploadId').text, upload_id)
self.assertEqual(elem.find('Initiator/ID').text, self.conn.user_id)
self.assertEqual(elem.find('Initiator/DisplayName').text,
self.conn.user_id)
self.assertEqual(elem.find('Owner/ID').text, self.conn.user_id)
self.assertEqual(elem.find('Owner/DisplayName').text,
self.conn.user_id)
self.assertEqual(elem.find('StorageClass').text, 'STANDARD')
self.assertEqual(elem.find('PartNumberMarker').text, '0')
self.assertEqual(elem.find('NextPartNumberMarker').text, '1')
self.assertEqual(elem.find('MaxParts').text, '1000')
self.assertEqual(elem.find('IsTruncated').text, 'false')
self.assertEqual(len(elem.findall('Part')), 1)
# etags will be used to generate xml for Complete Multipart Upload
etags = []
for (expected_etag, expected_date), p in \
zip(expected_parts_list, elem.findall('Part')):
last_modified = p.find('LastModified').text
self.assertIsNotNone(last_modified)
last_modified_from_xml = S3Timestamp.from_s3xmlformat(
last_modified)
self.assertEqual(expected_date, float(last_modified_from_xml))
self.assertEqual(expected_etag, p.find('ETag').text)
self.assertEqual(self.min_segment_size, int(p.find('Size').text))
etags.append(p.find('ETag').text)
# Complete Multipart Upload
key, upload_id = uploads[0]
xml = self._gen_comp_xml(etags)
status, headers, body = \
self._complete_multi_upload(bucket, key, upload_id, xml)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self.assertIn('content-type', headers)
self.assertEqual(headers['content-type'], 'application/xml')
if 'content-length' in headers:
self.assertEqual(headers['content-length'], str(len(body)))
else:
self.assertIn('transfer-encoding', headers)
self.assertEqual(headers['transfer-encoding'], 'chunked')
lines = body.split(b'\n')
self.assertTrue(lines[0].startswith(b'<?xml'), body)
self.assertTrue(lines[0].endswith(b'?>'), body)
elem = fromstring(body, 'CompleteMultipartUploadResult')
self.assertEqual(
'%s/bucket/obj1%%E2%%98%%83' %
tf.config['s3_storage_url'].rstrip('/'),
elem.find('Location').text)
self.assertEqual(elem.find('Bucket').text, bucket)
self.assertEqual(elem.find('Key').text, key)
concatted_etags = b''.join(
etag.strip('"').encode('ascii') for etag in etags)
exp_etag = '"%s-%s"' % (
md5(binascii.unhexlify(concatted_etags),
usedforsecurity=False).hexdigest(), len(etags))
etag = elem.find('ETag').text
self.assertEqual(etag, exp_etag)
exp_size = self.min_segment_size * len(etags)
status, headers, body = \
self.conn.make_request('HEAD', bucket, key)
self.assertEqual(status, 200)
self.assertEqual(headers['content-length'], str(exp_size))
self.assertEqual(headers['content-type'], 'foo/bar')
self.assertEqual(headers['content-encoding'], 'gzip')
self.assertEqual(headers['content-language'], 'en-US')
self.assertEqual(headers['content-disposition'], 'attachment')
self.assertEqual(headers['expires'], 'Thu, 01 Dec 1994 16:00:00 GMT')
self.assertEqual(headers['cache-control'], 'no-cache')
self.assertEqual(headers['x-amz-meta-baz'], 'quux')
swift_etag = '"%s"' % md5(
concatted_etags, usedforsecurity=False).hexdigest()
# TODO: GET via swift api, check against swift_etag
# Should be safe to retry
status, headers, body = \
self._complete_multi_upload(bucket, key, upload_id, xml)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self.assertIn('content-type', headers)
self.assertEqual(headers['content-type'], 'application/xml')
if 'content-length' in headers:
self.assertEqual(headers['content-length'], str(len(body)))
else:
self.assertIn('transfer-encoding', headers)
self.assertEqual(headers['transfer-encoding'], 'chunked')
lines = body.split(b'\n')
self.assertTrue(lines[0].startswith(b'<?xml'), body)
self.assertTrue(lines[0].endswith(b'?>'), body)
elem = fromstring(body, 'CompleteMultipartUploadResult')
self.assertEqual(
'%s/bucket/obj1%%E2%%98%%83' %
tf.config['s3_storage_url'].rstrip('/'),
elem.find('Location').text)
self.assertEqual(elem.find('Bucket').text, bucket)
self.assertEqual(elem.find('Key').text, key)
self.assertEqual(elem.find('ETag').text, exp_etag)
status, headers, body = \
self.conn.make_request('HEAD', bucket, key)
self.assertEqual(status, 200)
self.assertEqual(headers['content-length'], str(exp_size))
self.assertEqual(headers['content-type'], 'foo/bar')
self.assertEqual(headers['x-amz-meta-baz'], 'quux')
# Upload Part Copy -- MU as source
key, upload_id = uploads[1]
status, headers, body, resp_etag = \
self._upload_part_copy(bucket, keys[0], bucket,
key, upload_id, part_num=2)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self.assertIn('content-type', headers)
self.assertEqual(headers['content-type'], 'application/xml')
self.assertIn('content-length', headers)
self.assertEqual(headers['content-length'], str(len(body)))
self.assertNotIn('etag', headers)
elem = fromstring(body, 'CopyPartResult')
last_modified = elem.find('LastModified').text
self.assertIsNotNone(last_modified)
exp_content = b'a' * self.min_segment_size
etag = md5(exp_content, usedforsecurity=False).hexdigest()
self.assertEqual(resp_etag, etag)
# Also check that the etag is correct in part listings
query = 'uploadId=%s' % upload_id
status, headers, body = \
self.conn.make_request('GET', bucket, key, query=query)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self.assertTrue('content-type' in headers)
self.assertEqual(headers['content-type'], 'application/xml')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], str(len(body)))
elem = fromstring(body, 'ListPartsResult')
self.assertEqual(len(elem.findall('Part')), 2)
self.assertEqual(elem.findall('Part')[1].find('PartNumber').text, '2')
self.assertEqual(elem.findall('Part')[1].find('ETag').text,
'"%s"' % etag)
# Abort Multipart Uploads
# note that uploads[1] has part data while uploads[2] does not
sw_conn = SwiftConnection(tf.config)
sw_conn.authenticate()
for key, upload_id in uploads[1:]:
query = 'uploadId=%s' % upload_id
status, headers, body = \
self.conn.make_request('DELETE', bucket, key, query=query)
self.assertEqual(status, 204)
self.assertCommonResponseHeaders(headers)
self.assertTrue('content-type' in headers)
self.assertEqual(headers['content-type'],
'text/html; charset=UTF-8')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '0')
# Check if all parts have been deleted
segments = sw_conn.get_account().container(
bucket + MULTIUPLOAD_SUFFIX).files(
parms={'prefix': '%s/%s' % (key, upload_id)})
self.assertFalse(segments)
# Check object
def check_obj(req_headers, exp_status):
status, headers, body = \
self.conn.make_request('HEAD', bucket, keys[0], req_headers)
self.assertEqual(status, exp_status)
self.assertCommonResponseHeaders(headers)
self.assertIn('content-length', headers)
if exp_status == 412:
self.assertNotIn('etag', headers)
self.assertEqual(headers['content-length'], '0')
else:
self.assertIn('etag', headers)
self.assertEqual(headers['etag'], exp_etag)
if exp_status == 304:
self.assertEqual(headers['content-length'], '0')
else:
self.assertEqual(headers['content-length'], str(exp_size))
check_obj({}, 200)
# Sanity check conditionals
check_obj({'If-Match': 'some other thing'}, 412)
check_obj({'If-None-Match': 'some other thing'}, 200)
# More interesting conditional cases
check_obj({'If-Match': exp_etag}, 200)
check_obj({'If-Match': swift_etag}, 412)
check_obj({'If-None-Match': swift_etag}, 200)
check_obj({'If-None-Match': exp_etag}, 304)
# Check listings
status, headers, body = self.conn.make_request('GET', bucket)
self.assertEqual(status, 200)
elem = fromstring(body, 'ListBucketResult')
resp_objects = list(elem.findall('./Contents'))
self.assertEqual(len(resp_objects), 1)
o = resp_objects[0]
if six.PY2:
expected_key = keys[0].encode('utf-8')
else:
expected_key = keys[0]
self.assertEqual(o.find('Key').text, expected_key)
self.assertIsNotNone(o.find('LastModified').text)
self.assertRegex(
o.find('LastModified').text,
r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.000Z$')
self.assertEqual(o.find('ETag').text, exp_etag)
self.assertEqual(o.find('Size').text, str(exp_size))
self.assertIsNotNone(o.find('StorageClass').text)
self.assertEqual(o.find('Owner/ID').text, self.conn.user_id)
self.assertEqual(o.find('Owner/DisplayName').text,
self.conn.user_id)
def test_initiate_multi_upload_error(self):
bucket = 'bucket'
key = 'obj'
self.conn.make_request('PUT', bucket)
query = 'uploads'
auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('POST', bucket, key, query=query)
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
status, resp_headers, body = \
self.conn.make_request('POST', 'nothing', key, query=query)
self.assertEqual(get_error_code(body), 'NoSuchBucket')
status, resp_headers, body = self.conn.make_request(
'POST', bucket,
'x' * (tf.cluster_info['swift']['max_object_name_length'] + 1),
query=query)
self.assertEqual(get_error_code(body), 'KeyTooLongError')
def test_list_multi_uploads_error(self):
bucket = 'bucket'
self.conn.make_request('PUT', bucket)
query = 'uploads'
auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('GET', bucket, query=query)
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
status, headers, body = \
self.conn.make_request('GET', 'nothing', query=query)
self.assertEqual(get_error_code(body), 'NoSuchBucket')
def test_upload_part_error(self):
bucket = 'bucket'
self.conn.make_request('PUT', bucket)
query = 'uploads'
key = 'obj'
status, headers, body = \
self.conn.make_request('POST', bucket, key, query=query)
elem = fromstring(body, 'InitiateMultipartUploadResult')
upload_id = elem.find('UploadId').text
query = 'partNumber=%s&uploadId=%s' % (1, upload_id)
auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('PUT', bucket, key, query=query)
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
status, headers, body = \
self.conn.make_request('PUT', 'nothing', key, query=query)
self.assertEqual(get_error_code(body), 'NoSuchBucket')
query = 'partNumber=%s&uploadId=%s' % (1, 'nothing')
status, headers, body = \
self.conn.make_request('PUT', bucket, key, query=query)
self.assertEqual(get_error_code(body), 'NoSuchUpload')
query = 'partNumber=%s&uploadId=%s' % (0, upload_id)
status, headers, body = \
self.conn.make_request('PUT', bucket, key, query=query)
self.assertEqual(get_error_code(body), 'InvalidArgument')
err_msg = 'Part number must be an integer between 1 and'
self.assertTrue(err_msg in get_error_msg(body))
def test_upload_part_copy_error(self):
src_bucket = 'src'
src_obj = 'src'
self.conn.make_request('PUT', src_bucket)
self.conn.make_request('PUT', src_bucket, src_obj)
src_path = '%s/%s' % (src_bucket, src_obj)
bucket = 'bucket'
self.conn.make_request('PUT', bucket)
key = 'obj'
query = 'uploads'
status, headers, body = \
self.conn.make_request('POST', bucket, key, query=query)
elem = fromstring(body, 'InitiateMultipartUploadResult')
upload_id = elem.find('UploadId').text
query = 'partNumber=%s&uploadId=%s' % (1, upload_id)
auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('PUT', bucket, key,
headers={
'X-Amz-Copy-Source': src_path
},
query=query)
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
status, headers, body = \
self.conn.make_request('PUT', 'nothing', key,
headers={'X-Amz-Copy-Source': src_path},
query=query)
self.assertEqual(get_error_code(body), 'NoSuchBucket')
query = 'partNumber=%s&uploadId=%s' % (1, 'nothing')
status, headers, body = \
self.conn.make_request('PUT', bucket, key,
headers={'X-Amz-Copy-Source': src_path},
query=query)
self.assertEqual(get_error_code(body), 'NoSuchUpload')
src_path = '%s/%s' % (src_bucket, 'nothing')
query = 'partNumber=%s&uploadId=%s' % (1, upload_id)
status, headers, body = \
self.conn.make_request('PUT', bucket, key,
headers={'X-Amz-Copy-Source': src_path},
query=query)
self.assertEqual(get_error_code(body), 'NoSuchKey')
def test_list_parts_error(self):
bucket = 'bucket'
self.conn.make_request('PUT', bucket)
key = 'obj'
query = 'uploads'
status, headers, body = \
self.conn.make_request('POST', bucket, key, query=query)
elem = fromstring(body, 'InitiateMultipartUploadResult')
upload_id = elem.find('UploadId').text
query = 'uploadId=%s' % upload_id
auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('GET', bucket, key, query=query)
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
status, headers, body = \
self.conn.make_request('GET', 'nothing', key, query=query)
self.assertEqual(get_error_code(body), 'NoSuchBucket')
query = 'uploadId=%s' % 'nothing'
status, headers, body = \
self.conn.make_request('GET', bucket, key, query=query)
self.assertEqual(get_error_code(body), 'NoSuchUpload')
def test_abort_multi_upload_error(self):
bucket = 'bucket'
self.conn.make_request('PUT', bucket)
key = 'obj'
query = 'uploads'
status, headers, body = \
self.conn.make_request('POST', bucket, key, query=query)
elem = fromstring(body, 'InitiateMultipartUploadResult')
upload_id = elem.find('UploadId').text
self._upload_part(bucket, key, upload_id)
query = 'uploadId=%s' % upload_id
auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('DELETE', bucket, key, query=query)
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
status, headers, body = \
self.conn.make_request('DELETE', 'nothing', key, query=query)
self.assertEqual(get_error_code(body), 'NoSuchBucket')
status, headers, body = \
self.conn.make_request('DELETE', bucket, 'nothing', query=query)
self.assertEqual(get_error_code(body), 'NoSuchUpload')
query = 'uploadId=%s' % 'nothing'
status, headers, body = \
self.conn.make_request('DELETE', bucket, key, query=query)
self.assertEqual(get_error_code(body), 'NoSuchUpload')
def test_complete_multi_upload_error(self):
bucket = 'bucket'
keys = ['obj', 'obj2']
self.conn.make_request('PUT', bucket)
query = 'uploads'
status, headers, body = \
self.conn.make_request('POST', bucket, keys[0], query=query)
elem = fromstring(body, 'InitiateMultipartUploadResult')
upload_id = elem.find('UploadId').text
etags = []
for i in range(1, 3):
query = 'partNumber=%s&uploadId=%s' % (i, upload_id)
status, headers, body = \
self.conn.make_request('PUT', bucket, keys[0], query=query)
etags.append(headers['etag'])
xml = self._gen_comp_xml(etags)
# part 1 too small
query = 'uploadId=%s' % upload_id
status, headers, body = \
self.conn.make_request('POST', bucket, keys[0], body=xml,
query=query)
self.assertEqual(get_error_code(body), 'EntityTooSmall')
# invalid credentials
auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('POST', bucket, keys[0], body=xml,
query=query)
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
# wrong/missing bucket
status, headers, body = \
self.conn.make_request('POST', 'nothing', keys[0], query=query)
self.assertEqual(get_error_code(body), 'NoSuchBucket')
# wrong upload ID
query = 'uploadId=%s' % 'nothing'
status, headers, body = \
self.conn.make_request('POST', bucket, keys[0], body=xml,
query=query)
self.assertEqual(get_error_code(body), 'NoSuchUpload')
# without Part tag in xml
query = 'uploadId=%s' % upload_id
xml = self._gen_comp_xml([])
status, headers, body = \
self.conn.make_request('POST', bucket, keys[0], body=xml,
query=query)
self.assertEqual(get_error_code(body), 'MalformedXML')
# with invalid etag in xml
invalid_etag = 'invalid'
xml = self._gen_comp_xml([invalid_etag])
status, headers, body = \
self.conn.make_request('POST', bucket, keys[0], body=xml,
query=query)
self.assertEqual(get_error_code(body), 'InvalidPart')
# without part in Swift
query = 'uploads'
status, headers, body = \
self.conn.make_request('POST', bucket, keys[1], query=query)
elem = fromstring(body, 'InitiateMultipartUploadResult')
upload_id = elem.find('UploadId').text
query = 'uploadId=%s' % upload_id
xml = self._gen_comp_xml([etags[0]])
status, headers, body = \
self.conn.make_request('POST', bucket, keys[1], body=xml,
query=query)
self.assertEqual(get_error_code(body), 'InvalidPart')
def test_complete_upload_min_segment_size(self):
bucket = 'bucket'
key = 'obj'
self.conn.make_request('PUT', bucket)
query = 'uploads'
status, headers, body = \
self.conn.make_request('POST', bucket, key, query=query)
elem = fromstring(body, 'InitiateMultipartUploadResult')
upload_id = elem.find('UploadId').text
# multi parts with no body
etags = []
for i in range(1, 3):
query = 'partNumber=%s&uploadId=%s' % (i, upload_id)
status, headers, body = \
self.conn.make_request('PUT', bucket, key, query=query)
etags.append(headers['etag'])
xml = self._gen_comp_xml(etags)
query = 'uploadId=%s' % upload_id
status, headers, body = \
self.conn.make_request('POST', bucket, key, body=xml,
query=query)
self.assertEqual(get_error_code(body), 'EntityTooSmall')
# multi parts with all parts less than min segment size
etags = []
for i in range(1, 3):
query = 'partNumber=%s&uploadId=%s' % (i, upload_id)
status, headers, body = \
self.conn.make_request('PUT', bucket, key, query=query,
body='AA')
etags.append(headers['etag'])
xml = self._gen_comp_xml(etags)
query = 'uploadId=%s' % upload_id
status, headers, body = \
self.conn.make_request('POST', bucket, key, body=xml,
query=query)
self.assertEqual(get_error_code(body), 'EntityTooSmall')
# one part and less than min segment size
etags = []
query = 'partNumber=1&uploadId=%s' % upload_id
status, headers, body = \
self.conn.make_request('PUT', bucket, key, query=query,
body='AA')
etags.append(headers['etag'])
xml = self._gen_comp_xml(etags)
query = 'uploadId=%s' % upload_id
status, headers, body = \
self.conn.make_request('POST', bucket, key, body=xml,
query=query)
self.assertEqual(status, 200)
# multi parts with all parts except the first part less than min
# segment size
query = 'uploads'
status, headers, body = \
self.conn.make_request('POST', bucket, key, query=query)
elem = fromstring(body, 'InitiateMultipartUploadResult')
upload_id = elem.find('UploadId').text
etags = []
body_size = [self.min_segment_size, self.min_segment_size - 1, 2]
for i in range(1, 3):
query = 'partNumber=%s&uploadId=%s' % (i, upload_id)
status, headers, body = \
self.conn.make_request('PUT', bucket, key, query=query,
body=b'A' * body_size[i])
etags.append(headers['etag'])
xml = self._gen_comp_xml(etags)
query = 'uploadId=%s' % upload_id
status, headers, body = \
self.conn.make_request('POST', bucket, key, body=xml,
query=query)
self.assertEqual(get_error_code(body), 'EntityTooSmall')
# multi parts with all parts except last part more than min segment
# size
query = 'uploads'
status, headers, body = \
self.conn.make_request('POST', bucket, key, query=query)
elem = fromstring(body, 'InitiateMultipartUploadResult')
upload_id = elem.find('UploadId').text
etags = []
body_size = [self.min_segment_size, self.min_segment_size, 2]
for i in range(1, 3):
query = 'partNumber=%s&uploadId=%s' % (i, upload_id)
status, headers, body = \
self.conn.make_request('PUT', bucket, key, query=query,
body=b'A' * body_size[i])
etags.append(headers['etag'])
xml = self._gen_comp_xml(etags)
query = 'uploadId=%s' % upload_id
status, headers, body = \
self.conn.make_request('POST', bucket, key, body=xml,
query=query)
self.assertEqual(status, 200)
def test_complete_upload_with_fewer_etags(self):
bucket = 'bucket'
key = 'obj'
self.conn.make_request('PUT', bucket)
query = 'uploads'
status, headers, body = \
self.conn.make_request('POST', bucket, key, query=query)
elem = fromstring(body, 'InitiateMultipartUploadResult')
upload_id = elem.find('UploadId').text
etags = []
for i in range(1, 4):
query = 'partNumber=%s&uploadId=%s' % (2 * i - 1, upload_id)
status, headers, body = self.conn.make_request(
'PUT', bucket, key, body=b'A' * 1024 * 1024 * 5,
query=query)
etags.append(headers['etag'])
query = 'uploadId=%s' % upload_id
xml = self._gen_comp_xml(etags[:-1], step=2)
status, headers, body = \
self.conn.make_request('POST', bucket, key, body=xml,
query=query)
self.assertEqual(status, 200)
def test_object_multi_upload_part_copy_range(self):
bucket = 'bucket'
keys = ['obj1']
uploads = []
results_generator = self._initiate_multi_uploads_result_generator(
bucket, keys)
# Initiate Multipart Upload
for expected_key, (status, headers, body) in \
zip(keys, results_generator):
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self.assertTrue('content-type' in headers)
self.assertEqual(headers['content-type'], 'application/xml')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], str(len(body)))
elem = fromstring(body, 'InitiateMultipartUploadResult')
self.assertEqual(elem.find('Bucket').text, bucket)
key = elem.find('Key').text
self.assertEqual(expected_key, key)
upload_id = elem.find('UploadId').text
self.assertTrue(upload_id is not None)
self.assertTrue((key, upload_id) not in uploads)
uploads.append((key, upload_id))
self.assertEqual(len(uploads), len(keys)) # sanity
# Upload Part Copy Range
key, upload_id = uploads[0]
src_bucket = 'bucket2'
src_obj = 'obj4'
src_content = b'y' * (self.min_segment_size // 2) + b'z' * \
self.min_segment_size
src_range = 'bytes=0-%d' % (self.min_segment_size - 1)
etag = md5(
src_content[:self.min_segment_size],
usedforsecurity=False).hexdigest()
# prepare src obj
self.conn.make_request('PUT', src_bucket)
self.conn.make_request('PUT', src_bucket, src_obj, body=src_content)
_, headers, _ = self.conn.make_request('HEAD', src_bucket, src_obj)
self.assertCommonResponseHeaders(headers)
status, headers, body, resp_etag = \
self._upload_part_copy(src_bucket, src_obj, bucket,
key, upload_id, 1, src_range)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self.assertTrue('content-type' in headers)
self.assertEqual(headers['content-type'], 'application/xml')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], str(len(body)))
self.assertTrue('etag' not in headers)
elem = fromstring(body, 'CopyPartResult')
copy_resp_last_modified = elem.find('LastModified').text
self.assertIsNotNone(copy_resp_last_modified)
self.assertEqual(resp_etag, etag)
# Check last-modified timestamp
key, upload_id = uploads[0]
query = 'uploadId=%s' % upload_id
status, headers, body = \
self.conn.make_request('GET', bucket, key, query=query)
elem = fromstring(body, 'ListPartsResult')
listing_last_modified = [p.find('LastModified').text
for p in elem.iterfind('Part')]
# There should be *exactly* one parts in the result
self.assertEqual(listing_last_modified, [copy_resp_last_modified])
# Abort Multipart Upload
key, upload_id = uploads[0]
query = 'uploadId=%s' % upload_id
status, headers, body = \
self.conn.make_request('DELETE', bucket, key, query=query)
# sanity checks
self.assertEqual(status, 204)
self.assertCommonResponseHeaders(headers)
self.assertTrue('content-type' in headers)
self.assertEqual(headers['content-type'], 'text/html; charset=UTF-8')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '0')
def test_object_multi_upload_part_copy_version(self):
if 'object_versioning' not in tf.cluster_info:
self.skipTest('Object Versioning not enabled')
bucket = 'bucket'
keys = ['obj1']
uploads = []
results_generator = self._initiate_multi_uploads_result_generator(
bucket, keys)
# Initiate Multipart Upload
for expected_key, (status, headers, body) in \
zip(keys, results_generator):
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self.assertTrue('content-type' in headers)
self.assertEqual(headers['content-type'], 'application/xml')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], str(len(body)))
elem = fromstring(body, 'InitiateMultipartUploadResult')
self.assertEqual(elem.find('Bucket').text, bucket)
key = elem.find('Key').text
self.assertEqual(expected_key, key)
upload_id = elem.find('UploadId').text
self.assertTrue(upload_id is not None)
self.assertTrue((key, upload_id) not in uploads)
uploads.append((key, upload_id))
self.assertEqual(len(uploads), len(keys)) # sanity
key, upload_id = uploads[0]
src_bucket = 'bucket2'
src_obj = 'obj4'
src_content = b'y' * (self.min_segment_size // 2) + b'z' * \
self.min_segment_size
etags = [md5(src_content, usedforsecurity=False).hexdigest()]
# prepare null-version src obj
self.conn.make_request('PUT', src_bucket)
self.conn.make_request('PUT', src_bucket, src_obj, body=src_content)
_, headers, _ = self.conn.make_request('HEAD', src_bucket, src_obj)
self.assertCommonResponseHeaders(headers)
# Turn on versioning
elem = Element('VersioningConfiguration')
SubElement(elem, 'Status').text = 'Enabled'
xml = tostring(elem)
status, headers, body = self.conn.make_request(
'PUT', src_bucket, body=xml, query='versioning')
self.assertEqual(status, 200)
src_obj2 = 'obj5'
src_content2 = b'stub'
etags.append(md5(src_content2, usedforsecurity=False).hexdigest())
# prepare src obj w/ real version
self.conn.make_request('PUT', src_bucket, src_obj2, body=src_content2)
_, headers, _ = self.conn.make_request('HEAD', src_bucket, src_obj2)
self.assertCommonResponseHeaders(headers)
version_id2 = headers['x-amz-version-id']
status, headers, body, resp_etag = \
self._upload_part_copy(src_bucket, src_obj, bucket,
key, upload_id, 1,
src_version_id='null')
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self.assertTrue('content-type' in headers)
self.assertEqual(headers['content-type'], 'application/xml')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], str(len(body)))
self.assertTrue('etag' not in headers)
elem = fromstring(body, 'CopyPartResult')
copy_resp_last_modifieds = [elem.find('LastModified').text]
self.assertTrue(copy_resp_last_modifieds[0] is not None)
self.assertEqual(resp_etag, etags[0])
status, headers, body, resp_etag = \
self._upload_part_copy(src_bucket, src_obj2, bucket,
key, upload_id, 2,
src_version_id=version_id2)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self.assertTrue('content-type' in headers)
self.assertEqual(headers['content-type'], 'application/xml')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], str(len(body)))
self.assertTrue('etag' not in headers)
elem = fromstring(body, 'CopyPartResult')
copy_resp_last_modifieds.append(elem.find('LastModified').text)
self.assertTrue(copy_resp_last_modifieds[1] is not None)
self.assertEqual(resp_etag, etags[1])
# Check last-modified timestamp
key, upload_id = uploads[0]
query = 'uploadId=%s' % upload_id
status, headers, body = \
self.conn.make_request('GET', bucket, key, query=query)
elem = fromstring(body, 'ListPartsResult')
listing_last_modified = [p.find('LastModified').text
for p in elem.iterfind('Part')]
self.assertEqual(listing_last_modified, copy_resp_last_modifieds)
# Abort Multipart Upload
key, upload_id = uploads[0]
query = 'uploadId=%s' % upload_id
status, headers, body = \
self.conn.make_request('DELETE', bucket, key, query=query)
# sanity checks
self.assertEqual(status, 204)
self.assertCommonResponseHeaders(headers)
self.assertTrue('content-type' in headers)
self.assertEqual(headers['content-type'], 'text/html; charset=UTF-8')
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '0')
class TestS3ApiMultiUploadSigV4(TestS3ApiMultiUpload):
@classmethod
def setUpClass(cls):
os.environ['S3_USE_SIGV4'] = "True"
@classmethod
def tearDownClass(cls):
del os.environ['S3_USE_SIGV4']
def setUp(self):
super(TestS3ApiMultiUploadSigV4, self).setUp()
def test_object_multi_upload_part_copy_range(self):
if StrictVersion(boto.__version__) < StrictVersion('3.0'):
# boto 2 doesn't sort headers properly; see
# https://github.com/boto/boto/pull/3032
# or https://github.com/boto/boto/pull/3176
# or https://github.com/boto/boto/pull/3751
# or https://github.com/boto/boto/pull/3824
self.skipTest('This stuff got the issue of boto<=2.x')
def test_delete_bucket_multi_upload_object_exisiting(self):
bucket = 'bucket'
keys = ['obj1']
uploads = []
results_generator = self._initiate_multi_uploads_result_generator(
bucket, keys)
# Initiate Multipart Upload
for expected_key, (status, _, body) in \
zip(keys, results_generator):
self.assertEqual(status, 200) # sanity
elem = fromstring(body, 'InitiateMultipartUploadResult')
key = elem.find('Key').text
self.assertEqual(expected_key, key) # sanity
upload_id = elem.find('UploadId').text
self.assertTrue(upload_id is not None) # sanity
self.assertTrue((key, upload_id) not in uploads)
uploads.append((key, upload_id))
self.assertEqual(len(uploads), len(keys)) # sanity
# Upload Part
key, upload_id = uploads[0]
content = b'a' * self.min_segment_size
status, headers, body = \
self._upload_part(bucket, key, upload_id, content)
self.assertEqual(status, 200)
# Complete Multipart Upload
key, upload_id = uploads[0]
etags = [md5(content, usedforsecurity=False).hexdigest()]
xml = self._gen_comp_xml(etags)
status, headers, body = \
self._complete_multi_upload(bucket, key, upload_id, xml)
self.assertEqual(status, 200) # sanity
# GET multipart object
status, headers, body = \
self.conn.make_request('GET', bucket, key)
self.assertEqual(status, 200) # sanity
self.assertEqual(content, body) # sanity
# DELETE bucket while the object existing
status, headers, body = \
self.conn.make_request('DELETE', bucket)
self.assertEqual(status, 409) # sanity
# The object must still be there.
status, headers, body = \
self.conn.make_request('GET', bucket, key)
self.assertEqual(status, 200) # sanity
self.assertEqual(content, body) # sanity
# Can delete it with DeleteMultipleObjects request
elem = Element('Delete')
SubElement(elem, 'Quiet').text = 'true'
obj_elem = SubElement(elem, 'Object')
SubElement(obj_elem, 'Key').text = key
body = tostring(elem, use_s3ns=False)
status, headers, body = self.conn.make_request(
'POST', bucket, body=body, query='delete',
headers={'Content-MD5': calculate_md5(body)})
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
status, headers, body = \
self.conn.make_request('GET', bucket, key)
self.assertEqual(status, 404) # sanity
# Now we can delete
status, headers, body = \
self.conn.make_request('DELETE', bucket)
self.assertEqual(status, 204) # sanity
if __name__ == '__main__':
unittest.main()
| swift-master | test/functional/s3api/test_multi_upload.py |
# Copyright (c) 2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import six
import unittest
import os
import test.functional as tf
from swift.common.middleware.s3api.etree import fromstring, tostring, \
Element, SubElement
from test.functional.s3api import S3ApiBase
from test.functional.s3api.s3_test_client import Connection
from test.functional.s3api.utils import get_error_code, calculate_md5
def setUpModule():
tf.setup_package()
def tearDownModule():
tf.teardown_package()
class TestS3ApiMultiDelete(S3ApiBase):
def _prepare_test_delete_multi_objects(self, bucket, objects):
self.conn.make_request('PUT', bucket)
for obj in objects:
self.conn.make_request('PUT', bucket, obj)
def _gen_multi_delete_xml(self, objects, quiet=None):
elem = Element('Delete')
if quiet:
SubElement(elem, 'Quiet').text = quiet
for key in objects:
obj = SubElement(elem, 'Object')
SubElement(obj, 'Key').text = key
return tostring(elem, use_s3ns=False)
def _gen_invalid_multi_delete_xml(self, hasObjectTag=False):
elem = Element('Delete')
if hasObjectTag:
obj = SubElement(elem, 'Object')
SubElement(obj, 'Key').text = ''
return tostring(elem, use_s3ns=False)
def _test_delete_multi_objects(self, with_non_ascii=False):
bucket = 'bucket'
if with_non_ascii:
put_objects = [u'\N{SNOWMAN}obj%s' % var for var in range(4)]
else:
put_objects = ['obj%s' % var for var in range(4)]
self._prepare_test_delete_multi_objects(bucket, put_objects)
query = 'delete'
# Delete an object via MultiDelete API
req_objects = put_objects[:1]
xml = self._gen_multi_delete_xml(req_objects)
content_md5 = calculate_md5(xml)
status, headers, body = \
self.conn.make_request('POST', bucket, body=xml,
headers={'Content-MD5': content_md5},
query=query)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self.assertTrue(headers['content-type'] is not None)
self.assertEqual(headers['content-length'], str(len(body)))
elem = fromstring(body)
resp_objects = elem.findall('Deleted')
self.assertEqual(len(resp_objects), len(req_objects))
for o in resp_objects:
key = o.find('Key').text
if six.PY2:
key = key.decode('utf-8')
self.assertTrue(key in req_objects)
# Delete 2 objects via MultiDelete API
req_objects = put_objects[1:3]
xml = self._gen_multi_delete_xml(req_objects)
content_md5 = calculate_md5(xml)
status, headers, body = \
self.conn.make_request('POST', bucket, body=xml,
headers={'Content-MD5': content_md5},
query=query)
self.assertEqual(status, 200)
elem = fromstring(body, 'DeleteResult')
resp_objects = elem.findall('Deleted')
self.assertEqual(len(resp_objects), len(req_objects))
for o in resp_objects:
key = o.find('Key').text
if six.PY2:
key = key.decode('utf-8')
self.assertTrue(key in req_objects)
if with_non_ascii:
fake_objs = [u'\N{SNOWMAN}obj%s' % var for var in range(4, 6)]
else:
fake_objs = ['obj%s' % var for var in range(4, 6)]
# Delete 2 objects via MultiDelete API but one (obj4) doesn't exist.
req_objects = [put_objects[-1], fake_objs[0]]
xml = self._gen_multi_delete_xml(req_objects)
content_md5 = calculate_md5(xml)
status, headers, body = \
self.conn.make_request('POST', bucket, body=xml,
headers={'Content-MD5': content_md5},
query=query)
self.assertEqual(status, 200)
elem = fromstring(body, 'DeleteResult')
resp_objects = elem.findall('Deleted')
# S3 assumes a NoSuchKey object as deleted.
self.assertEqual(len(resp_objects), len(req_objects))
for o in resp_objects:
key = o.find('Key').text
if six.PY2:
key = key.decode('utf-8')
self.assertTrue(key in req_objects)
# Delete 2 objects via MultiDelete API but no objects exist
req_objects = fake_objs[:2]
xml = self._gen_multi_delete_xml(req_objects)
content_md5 = calculate_md5(xml)
status, headers, body = \
self.conn.make_request('POST', bucket, body=xml,
headers={'Content-MD5': content_md5},
query=query)
self.assertEqual(status, 200)
elem = fromstring(body, 'DeleteResult')
resp_objects = elem.findall('Deleted')
self.assertEqual(len(resp_objects), len(req_objects))
for o in resp_objects:
key = o.find('Key').text
if six.PY2:
key = key.decode('utf-8')
self.assertTrue(key in req_objects)
def test_delete_multi_objects(self):
self._test_delete_multi_objects()
def test_delete_multi_objects_with_non_ascii(self):
self._test_delete_multi_objects(with_non_ascii=True)
def test_delete_multi_objects_error(self):
bucket = 'bucket'
put_objects = ['obj']
self._prepare_test_delete_multi_objects(bucket, put_objects)
xml = self._gen_multi_delete_xml(put_objects)
content_md5 = calculate_md5(xml)
query = 'delete'
auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('POST', bucket, body=xml,
headers={
'Content-MD5': content_md5
},
query=query)
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
status, headers, body = \
self.conn.make_request('POST', 'nothing', body=xml,
headers={'Content-MD5': content_md5},
query=query)
self.assertEqual(get_error_code(body), 'NoSuchBucket')
# without Object tag
xml = self._gen_invalid_multi_delete_xml()
content_md5 = calculate_md5(xml)
status, headers, body = \
self.conn.make_request('POST', bucket, body=xml,
headers={'Content-MD5': content_md5},
query=query)
self.assertEqual(get_error_code(body), 'MalformedXML')
# without value of Key tag
xml = self._gen_invalid_multi_delete_xml(hasObjectTag=True)
content_md5 = calculate_md5(xml)
status, headers, body = \
self.conn.make_request('POST', bucket, body=xml,
headers={'Content-MD5': content_md5},
query=query)
self.assertEqual(get_error_code(body), 'UserKeyMustBeSpecified')
max_deletes = int(tf.cluster_info.get('s3api', {}).get(
'max_multi_delete_objects', 1000))
# specified number of objects are over max_multi_delete_objects
# (Default 1000), but xml size is relatively small
req_objects = ['obj%s' for var in range(max_deletes + 1)]
xml = self._gen_multi_delete_xml(req_objects)
content_md5 = calculate_md5(xml)
status, headers, body = \
self.conn.make_request('POST', bucket, body=xml,
headers={'Content-MD5': content_md5},
query=query)
self.assertEqual(get_error_code(body), 'MalformedXML')
# specified xml size is large, but number of objects are
# smaller than max_multi_delete_objects.
obj = 'a' * 102400
req_objects = [obj + str(var) for var in range(max_deletes - 1)]
xml = self._gen_multi_delete_xml(req_objects)
content_md5 = calculate_md5(xml)
status, headers, body = \
self.conn.make_request('POST', bucket, body=xml,
headers={'Content-MD5': content_md5},
query=query)
self.assertEqual(get_error_code(body), 'MalformedXML')
def test_delete_multi_objects_with_quiet(self):
bucket = 'bucket'
put_objects = ['obj']
query = 'delete'
# with Quiet true
quiet = 'true'
self._prepare_test_delete_multi_objects(bucket, put_objects)
xml = self._gen_multi_delete_xml(put_objects, quiet)
content_md5 = calculate_md5(xml)
status, headers, body = \
self.conn.make_request('POST', bucket, body=xml,
headers={'Content-MD5': content_md5},
query=query)
self.assertEqual(status, 200)
elem = fromstring(body, 'DeleteResult')
resp_objects = elem.findall('Deleted')
self.assertEqual(len(resp_objects), 0)
# with Quiet false
quiet = 'false'
self._prepare_test_delete_multi_objects(bucket, put_objects)
xml = self._gen_multi_delete_xml(put_objects, quiet)
content_md5 = calculate_md5(xml)
status, headers, body = \
self.conn.make_request('POST', bucket, body=xml,
headers={'Content-MD5': content_md5},
query=query)
self.assertEqual(status, 200)
elem = fromstring(body, 'DeleteResult')
resp_objects = elem.findall('Deleted')
self.assertEqual(len(resp_objects), 1)
class TestS3ApiMultiDeleteSigV4(TestS3ApiMultiDelete):
@classmethod
def setUpClass(cls):
os.environ['S3_USE_SIGV4'] = "True"
@classmethod
def tearDownClass(cls):
del os.environ['S3_USE_SIGV4']
def setUp(self):
super(TestS3ApiMultiDeleteSigV4, self).setUp()
if __name__ == '__main__':
unittest.main()
| swift-master | test/functional/s3api/test_multi_delete.py |
# Copyright (c) 2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import os
import test.functional as tf
from swift.common.middleware.s3api.etree import fromstring
from test.functional.s3api import S3ApiBase
from test.functional.s3api.s3_test_client import Connection
from test.functional.s3api.utils import get_error_code
def setUpModule():
tf.setup_package()
def tearDownModule():
tf.teardown_package()
class TestS3ApiService(S3ApiBase):
def setUp(self):
super(TestS3ApiService, self).setUp()
def test_service(self):
# GET Service(without bucket)
status, headers, body = self.conn.make_request('GET')
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self.assertTrue(headers['content-type'] is not None)
# TODO; requires consideration
# self.assertEqual(headers['transfer-encoding'], 'chunked')
elem = fromstring(body, 'ListAllMyBucketsResult')
buckets = elem.findall('./Buckets/Bucket')
self.assertEqual(list(buckets), [])
owner = elem.find('Owner')
self.assertEqual(self.conn.user_id, owner.find('ID').text)
self.assertEqual(self.conn.user_id, owner.find('DisplayName').text)
# GET Service(with Bucket)
req_buckets = ('bucket', 'bucket2')
for bucket in req_buckets:
self.conn.make_request('PUT', bucket)
status, headers, body = self.conn.make_request('GET')
self.assertEqual(status, 200)
elem = fromstring(body, 'ListAllMyBucketsResult')
resp_buckets = elem.findall('./Buckets/Bucket')
self.assertEqual(len(list(resp_buckets)), 2)
for b in resp_buckets:
self.assertTrue(b.find('Name').text in req_buckets)
self.assertTrue(b.find('CreationDate') is not None)
def test_service_error_signature_not_match(self):
auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = auth_error_conn.make_request('GET')
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
self.assertEqual(headers['content-type'], 'application/xml')
def test_service_error_no_date_header(self):
# Without x-amz-date/Date header, that makes 403 forbidden
status, headers, body = self.conn.make_request(
'GET', headers={'Date': '', 'x-amz-date': ''})
self.assertEqual(status, 403)
self.assertEqual(get_error_code(body), 'AccessDenied')
self.assertIn(b'AWS authentication requires a valid Date '
b'or x-amz-date header', body)
class TestS3ApiServiceSigV4(TestS3ApiService):
@classmethod
def setUpClass(cls):
os.environ['S3_USE_SIGV4'] = "True"
@classmethod
def tearDownClass(cls):
del os.environ['S3_USE_SIGV4']
def setUp(self):
super(TestS3ApiServiceSigV4, self).setUp()
if __name__ == '__main__':
unittest.main()
| swift-master | test/functional/s3api/test_service.py |
# Copyright (c) 2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
from six.moves.urllib.parse import urlparse
import test.functional as tf
import boto3
from botocore.exceptions import ClientError
from boto.s3.connection import S3Connection, OrdinaryCallingFormat, \
S3ResponseError
import six
import sys
import traceback
RETRY_COUNT = 3
if os.environ.get('SWIFT_TEST_QUIET_BOTO_LOGS'):
logging.getLogger('boto').setLevel(logging.INFO)
logging.getLogger('botocore').setLevel(logging.INFO)
logging.getLogger('boto3').setLevel(logging.INFO)
def setUpModule():
tf.setup_package()
def tearDownModule():
tf.teardown_package()
class Connection(object):
"""
Connection class used for S3 functional testing.
"""
def __init__(self, aws_access_key,
aws_secret_key,
user_id=None):
"""
Initialize method.
:param aws_access_key: a string of aws access key
:param aws_secret_key: a string of aws secret key
:param user_id: a string consists of TENANT and USER name used for
asserting Owner ID (not required S3Connection)
In default, Connection class will be initialized as tester user
behaves as:
user_test_tester = testing .admin
"""
self.aws_access_key = aws_access_key
self.aws_secret_key = aws_secret_key
self.user_id = user_id or aws_access_key
parsed = urlparse(tf.config['s3_storage_url'])
self.host = parsed.hostname
self.port = parsed.port
self.conn = \
S3Connection(aws_access_key, aws_secret_key,
is_secure=(parsed.scheme == 'https'),
host=self.host, port=self.port,
calling_format=OrdinaryCallingFormat())
self.conn.auth_region_name = tf.config.get('s3_region', 'us-east-1')
def reset(self):
"""
Reset all swift environment to keep clean. As a result by calling this
method, we can assume the backend swift keeps no containers and no
objects on this connection's account.
"""
exceptions = []
for i in range(RETRY_COUNT):
try:
buckets = self.conn.get_all_buckets()
if not buckets:
break
for bucket in buckets:
if six.PY2 and not isinstance(bucket.name, bytes):
bucket.name = bucket.name.encode('utf-8')
try:
for upload in bucket.list_multipart_uploads():
upload.cancel_upload()
for obj in bucket.list_versions():
if six.PY2:
if not isinstance(obj.name, bytes):
obj.name = obj.name.encode('utf-8')
if obj.version_id is not None and \
not isinstance(obj.version_id, bytes):
obj.version_id = \
obj.version_id.encode('utf-8')
bucket.delete_key(
obj.name, version_id=obj.version_id)
try:
self.conn.delete_bucket(bucket.name)
except ClientError as e:
err_code = e.response.get('Error', {}).get('Code')
if err_code != 'BucketNotEmpty':
raise
# else, listing consistency issue; try again
except S3ResponseError as e:
# 404 means NoSuchBucket, NoSuchKey, or NoSuchUpload
if e.status != 404:
raise
except Exception:
exceptions.append(''.join(
traceback.format_exception(*sys.exc_info())))
if exceptions:
exceptions.insert(0, 'Too many errors to continue:')
raise Exception('\n========\n'.join(exceptions))
def make_request(self, method, bucket='', obj='', headers=None, body=b'',
query=None):
"""
Wrapper method of S3Connection.make_request.
:param method: a string of HTTP request method
:param bucket: a string of bucket name
:param obj: a string of object name
:param headers: a dictionary of headers
:param body: a string of data binary sent to S3 as a request body
:param query: a string of HTTP query argument
:returns: a tuple of (int(status_code), headers dict, response body)
"""
response = \
self.conn.make_request(method, bucket=bucket, key=obj,
headers=headers, data=body,
query_args=query, sender=None,
override_num_retries=RETRY_COUNT,
retry_handler=None)
return (response.status,
{h.lower(): v for h, v in response.getheaders()},
response.read())
def generate_url_and_headers(self, method, bucket='', obj='',
expires_in=3600):
url = self.conn.generate_url(expires_in, method, bucket, obj)
if os.environ.get('S3_USE_SIGV4') == "True":
# V4 signatures are known-broken in boto, but we can work around it
if url.startswith('https://') and not tf.config[
's3_storage_url'].startswith('https://'):
url = 'http://' + url[8:]
if self.port is None:
return url, {}
else:
return url, {'Host': '%(host)s:%(port)d:%(port)d' % {
'host': self.host, 'port': self.port}}
return url, {}
def get_boto3_conn(aws_access_key, aws_secret_key):
endpoint_url = tf.config['s3_storage_url']
config = boto3.session.Config(s3={'addressing_style': 'path'})
return boto3.client(
's3', aws_access_key_id=aws_access_key,
aws_secret_access_key=aws_secret_key,
config=config, region_name=tf.config.get('s3_region', 'us-east-1'),
use_ssl=endpoint_url.startswith('https:'),
endpoint_url=endpoint_url)
def tear_down_s3(conn):
"""
Reset all swift environment to keep clean. As a result by calling this
method, we can assume the backend swift keeps no containers and no
objects on this connection's account.
"""
exceptions = []
for i in range(RETRY_COUNT):
try:
resp = conn.list_buckets()
buckets = [bucket['Name'] for bucket in resp.get('Buckets', [])]
for bucket in buckets:
try:
resp = conn.list_multipart_uploads(Bucket=bucket)
for upload in resp.get('Uploads', []):
conn.abort_multipart_upload(
Bucket=bucket,
Key=upload['Key'],
UploadId=upload['UploadId'])
resp = conn.list_objects(Bucket=bucket)
for obj in resp.get('Contents', []):
conn.delete_object(Bucket=bucket, Key=obj['Key'])
try:
conn.delete_bucket(Bucket=bucket)
except ClientError as e:
err_code = e.response.get('Error', {}).get('Code')
if err_code != 'BucketNotEmpty':
raise
# else, listing consistency issue; try again
except ClientError as e:
# 404 means NoSuchBucket, NoSuchKey, or NoSuchUpload
if e.response['ResponseMetadata']['HTTPStatusCode'] != 404:
raise
except Exception:
exceptions.append(''.join(
traceback.format_exception(*sys.exc_info())))
if exceptions:
exceptions.insert(0, 'Too many errors to continue:')
raise Exception('\n========\n'.join(exceptions))
# TODO: make sure where this function is used
def get_admin_connection():
"""
Return tester connection behaves as:
user_test_admin = admin .admin
"""
aws_access_key = tf.config['s3_access_key']
aws_secret_key = tf.config['s3_secret_key']
user_id = tf.config['s3_access_key']
return Connection(aws_access_key, aws_secret_key, user_id)
| swift-master | test/functional/s3api/s3_test_client.py |
# Copyright (c) 2011-2014 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import traceback
from contextlib import contextmanager
import logging
from unittest import SkipTest
import test.functional as tf
from test.functional.s3api.s3_test_client import (
Connection, get_boto3_conn, tear_down_s3)
def setUpModule():
tf.setup_package()
def tearDownModule():
tf.teardown_package()
class S3ApiBase(unittest.TestCase):
def __init__(self, method_name):
super(S3ApiBase, self).__init__(method_name)
self.method_name = method_name
@contextmanager
def quiet_boto_logging(self):
try:
logging.getLogger('boto').setLevel(logging.INFO)
yield
finally:
logging.getLogger('boto').setLevel(logging.DEBUG)
def setUp(self):
if not tf.config.get('s3_access_key'):
raise SkipTest('no s3api user configured')
if 's3api' not in tf.cluster_info:
raise SkipTest('s3api middleware is not enabled')
if tf.config.get('account'):
user_id = '%s:%s' % (tf.config['account'], tf.config['username'])
else:
user_id = tf.config['username']
try:
self.conn = Connection(
tf.config['s3_access_key'], tf.config['s3_secret_key'],
user_id=user_id)
self.conn.reset()
except Exception:
message = '%s got an error during initialize process.\n\n%s' % \
(self.method_name, traceback.format_exc())
# TODO: Find a way to make this go to FAIL instead of Error
self.fail(message)
def assertCommonResponseHeaders(self, headers, etag=None):
"""
asserting common response headers with args
:param headers: a dict of response headers
:param etag: a string of md5(content).hexdigest() if not given,
this won't assert anything about etag. (e.g. DELETE obj)
"""
self.assertTrue(headers['x-amz-id-2'] is not None)
self.assertTrue(headers['x-amz-request-id'] is not None)
self.assertTrue(headers['date'] is not None)
# TODO; requires consideration
# self.assertTrue(headers['server'] is not None)
if etag is not None:
self.assertTrue('etag' in headers) # sanity
self.assertEqual(etag, headers['etag'].strip('"'))
class S3ApiBaseBoto3(S3ApiBase):
def setUp(self):
if not tf.config.get('s3_access_key'):
raise SkipTest('no s3api user configured')
if 's3api' not in tf.cluster_info:
raise SkipTest('s3api middleware is not enabled')
try:
self.conn = get_boto3_conn(
tf.config['s3_access_key'], tf.config['s3_secret_key'])
self.endpoint_url = self.conn._endpoint.host
self.access_key = self.conn._request_signer._credentials.access_key
self.region = self.conn._client_config.region_name
tear_down_s3(self.conn)
except Exception:
message = '%s got an error during initialize process.\n\n%s' % \
(self.method_name, traceback.format_exc())
# TODO: Find a way to make this go to FAIL instead of Error
self.fail(message)
def tearDown(self):
tear_down_s3(self.conn)
| swift-master | test/functional/s3api/__init__.py |
#!/usr/bin/env python
# Copyright (c) 2022 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import requests
import botocore
import test.functional as tf
from test.functional.s3api import S3ApiBaseBoto3
def setUpModule():
tf.setup_package()
def tearDownModule():
tf.teardown_package()
class TestS3ApiXxeInjection(S3ApiBaseBoto3):
def setUp(self):
super(TestS3ApiXxeInjection, self).setUp()
self.bucket = 'test-s3api-xxe-injection'
def _create_bucket(self, **kwargs):
resp = self.conn.create_bucket(Bucket=self.bucket, **kwargs)
response_metadata = resp.pop('ResponseMetadata', {})
self.assertEqual(200, response_metadata.get('HTTPStatusCode'))
@staticmethod
def _clear_data(request, **_kwargs):
request.data = b''
def _presign_url(self, method, key=None, **kwargs):
params = {
'Bucket': self.bucket
}
if key:
params['Key'] = key
params.update(kwargs)
try:
# https://github.com/boto/boto3/issues/2192
self.conn.meta.events.register(
'before-sign.s3.*', self._clear_data)
return self.conn.generate_presigned_url(
method, Params=params, ExpiresIn=60)
finally:
self.conn.meta.events.unregister(
'before-sign.s3.*', self._clear_data)
def test_put_bucket_acl(self):
if not tf.cluster_info['s3api'].get('s3_acl'):
self.skipTest('s3_acl must be enabled')
self._create_bucket()
url = self._presign_url('put_bucket_acl')
resp = requests.put(url, data="""
<!DOCTYPE foo [<!ENTITY xxe SYSTEM "file:///etc/swift/swift.conf"> ]>
<AccessControlPolicy xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Owner>
<DisplayName>test:tester</DisplayName>
<ID>test:tester</ID>
</Owner>
<AccessControlList>
<Grant>
<Grantee xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="CanonicalUser">
<DisplayName>name&xxe;</DisplayName>
<ID>id&xxe;</ID>
</Grantee>
<Permission>WRITE</Permission>
</Grant>
</AccessControlList>
</AccessControlPolicy>
""") # noqa: E501
self.assertEqual(200, resp.status_code)
self.assertNotIn(b'xxe', resp.content)
self.assertNotIn(b'[swift-hash]', resp.content)
acl = self.conn.get_bucket_acl(Bucket=self.bucket)
response_metadata = acl.pop('ResponseMetadata', {})
self.assertEqual(200, response_metadata.get('HTTPStatusCode'))
self.assertDictEqual({
'Owner': {
'DisplayName': 'test:tester',
'ID': 'test:tester'
},
'Grants': [
{
'Grantee': {
'DisplayName': 'id',
'ID': 'id',
'Type': 'CanonicalUser'
},
'Permission': 'WRITE'
}
]
}, acl)
def test_create_bucket(self):
url = self._presign_url('create_bucket')
resp = requests.put(url, data="""
<!DOCTYPE foo [<!ENTITY xxe SYSTEM "file:///etc/swift/swift.conf"> ]>
<CreateBucketConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<LocationConstraint>&xxe;</LocationConstraint>
</CreateBucketConfiguration>
""") # noqa: E501
self.assertEqual(400, resp.status_code)
self.assertNotIn(b'xxe', resp.content)
self.assertNotIn(b'[swift-hash]', resp.content)
self.assertRaisesRegex(
botocore.exceptions.ClientError, 'Not Found',
self.conn.head_bucket, Bucket=self.bucket)
def test_delete_objects(self):
self._create_bucket()
url = self._presign_url(
'delete_objects',
Delete={
'Objects': [
{
'Key': 'string',
'VersionId': 'string'
}
]
})
body = """
<!DOCTYPE foo [<!ENTITY xxe SYSTEM "file:///etc/swift/swift.conf"> ]>
<Delete xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Object>
<Key>&xxe;</Key>
</Object>
</Delete>
"""
body = body.encode('utf-8')
resp = requests.post(url, data=body)
self.assertEqual(400, resp.status_code, resp.content)
self.assertNotIn(b'xxe', resp.content)
self.assertNotIn(b'[swift-hash]', resp.content)
def test_complete_multipart_upload(self):
self._create_bucket()
resp = self.conn.create_multipart_upload(
Bucket=self.bucket, Key='test')
response_metadata = resp.pop('ResponseMetadata', {})
self.assertEqual(200, response_metadata.get('HTTPStatusCode'))
uploadid = resp.get('UploadId')
try:
url = self._presign_url(
'complete_multipart_upload',
Key='key',
MultipartUpload={
'Parts': [
{
'ETag': 'string',
'PartNumber': 1
}
],
},
UploadId=uploadid)
resp = requests.post(url, data="""
<!DOCTYPE foo [<!ENTITY xxe SYSTEM "file:///etc/swift/swift.conf"> ]>
<CompleteMultipartUpload xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Part>
<ETag>"{uploadid}"</ETag>
<PartNumber>&xxe;</PartNumber>
</Part>
</CompleteMultipartUpload>
""") # noqa: E501
self.assertEqual(404, resp.status_code)
self.assertNotIn(b'xxe', resp.content)
self.assertNotIn(b'[swift-hash]', resp.content)
resp = requests.post(url, data="""
<!DOCTYPE foo [<!ENTITY xxe SYSTEM "file:///etc/swift/swift.conf"> ]>
<CompleteMultipartUpload xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Part>
<ETag>"&xxe;"</ETag>
<PartNumber>1</PartNumber>
</Part>
</CompleteMultipartUpload>
""") # noqa: E501
self.assertEqual(404, resp.status_code)
self.assertNotIn(b'xxe', resp.content)
self.assertNotIn(b'[swift-hash]', resp.content)
finally:
resp = self.conn.abort_multipart_upload(
Bucket=self.bucket, Key='test', UploadId=uploadid)
response_metadata = resp.pop('ResponseMetadata', {})
self.assertEqual(204, response_metadata.get('HTTPStatusCode'))
def test_put_bucket_versioning(self):
if 'object_versioning' not in tf.cluster_info:
raise tf.SkipTest('S3 versioning requires that Swift object '
'versioning be enabled')
self._create_bucket()
url = self._presign_url(
'put_bucket_versioning',
VersioningConfiguration={
'Status': 'Enabled'
})
resp = requests.put(url, data="""
<!DOCTYPE foo [<!ENTITY xxe SYSTEM "file:///etc/swift/swift.conf"> ]>
<VersioningConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Status>&xxe;</Status>
</VersioningConfiguration>
""") # noqa: E501
self.assertEqual(400, resp.status_code)
self.assertNotIn(b'xxe', resp.content)
self.assertNotIn(b'[swift-hash]', resp.content)
versioning = self.conn.get_bucket_versioning(Bucket=self.bucket)
response_metadata = versioning.pop('ResponseMetadata', {})
self.assertEqual(200, response_metadata.get('HTTPStatusCode'))
self.assertDictEqual({}, versioning)
| swift-master | test/functional/s3api/test_xxe_injection.py |
# Copyright (c) 2017 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from swift.common.middleware.s3api.etree import fromstring, tostring, \
Element, SubElement
import test.functional as tf
from test.functional.s3api import S3ApiBase
from test.functional.s3api.utils import get_error_code
def setUpModule():
tf.setup_package()
def tearDownModule():
tf.teardown_package()
class TestS3ApiVersioning(S3ApiBase):
def setUp(self):
super(TestS3ApiVersioning, self).setUp()
if 'object_versioning' not in tf.cluster_info:
# Alternatively, maybe we should assert we get 501s...
self.skipTest('S3 versioning requires that Swift object '
'versioning be enabled')
status, headers, body = self.conn.make_request('PUT', 'bucket')
self.assertEqual(status, 200)
def tearDown(self):
# TODO: is this necessary on AWS? or can you delete buckets while
# versioning is enabled?
elem = Element('VersioningConfiguration')
SubElement(elem, 'Status').text = 'Suspended'
xml = tostring(elem)
status, headers, body = self.conn.make_request(
'PUT', 'bucket', body=xml, query='versioning')
self.assertEqual(status, 200)
status, headers, body = self.conn.make_request('DELETE', 'bucket')
self.assertEqual(status, 204)
super(TestS3ApiVersioning, self).tearDown()
def test_versioning_put(self):
# Versioning not configured
status, headers, body = self.conn.make_request(
'GET', 'bucket', query='versioning')
self.assertEqual(status, 200)
elem = fromstring(body)
self.assertEqual(list(elem), [])
# Enable versioning
elem = Element('VersioningConfiguration')
SubElement(elem, 'Status').text = 'Enabled'
xml = tostring(elem)
status, headers, body = self.conn.make_request(
'PUT', 'bucket', body=xml, query='versioning')
self.assertEqual(status, 200)
status, headers, body = self.conn.make_request(
'GET', 'bucket', query='versioning')
self.assertEqual(status, 200)
elem = fromstring(body)
self.assertEqual(elem.find('./Status').text, 'Enabled')
# Suspend versioning
elem = Element('VersioningConfiguration')
SubElement(elem, 'Status').text = 'Suspended'
xml = tostring(elem)
status, headers, body = self.conn.make_request(
'PUT', 'bucket', body=xml, query='versioning')
self.assertEqual(status, 200)
status, headers, body = self.conn.make_request(
'GET', 'bucket', query='versioning')
self.assertEqual(status, 200)
elem = fromstring(body)
self.assertEqual(elem.find('./Status').text, 'Suspended')
# Resume versioning
elem = Element('VersioningConfiguration')
SubElement(elem, 'Status').text = 'Enabled'
xml = tostring(elem)
status, headers, body = self.conn.make_request(
'PUT', 'bucket', body=xml, query='versioning')
self.assertEqual(status, 200)
status, headers, body = self.conn.make_request(
'GET', 'bucket', query='versioning')
self.assertEqual(status, 200)
elem = fromstring(body)
self.assertEqual(elem.find('./Status').text, 'Enabled')
def test_versioning_immediately_suspend(self):
# Versioning not configured
status, headers, body = self.conn.make_request(
'GET', 'bucket', query='versioning')
self.assertEqual(status, 200)
elem = fromstring(body)
self.assertEqual(list(elem), [])
# Suspend versioning
elem = Element('VersioningConfiguration')
SubElement(elem, 'Status').text = 'Suspended'
xml = tostring(elem)
status, headers, body = self.conn.make_request(
'PUT', 'bucket', body=xml, query='versioning')
self.assertEqual(status, 200)
status, headers, body = self.conn.make_request(
'GET', 'bucket', query='versioning')
self.assertEqual(status, 200)
elem = fromstring(body)
self.assertEqual(elem.find('./Status').text, 'Suspended')
# Enable versioning
elem = Element('VersioningConfiguration')
SubElement(elem, 'Status').text = 'Enabled'
xml = tostring(elem)
status, headers, body = self.conn.make_request(
'PUT', 'bucket', body=xml, query='versioning')
self.assertEqual(status, 200)
status, headers, body = self.conn.make_request(
'GET', 'bucket', query='versioning')
self.assertEqual(status, 200)
elem = fromstring(body)
self.assertEqual(elem.find('./Status').text, 'Enabled')
def test_versioning_put_error(self):
# Root tag is not VersioningConfiguration
elem = Element('foo')
SubElement(elem, 'Status').text = 'Enabled'
xml = tostring(elem)
status, headers, body = self.conn.make_request(
'PUT', 'bucket', body=xml, query='versioning')
self.assertEqual(status, 400)
self.assertEqual(get_error_code(body), 'MalformedXML')
# Status is not "Enabled" or "Suspended"
elem = Element('VersioningConfiguration')
SubElement(elem, 'Status').text = '...'
xml = tostring(elem)
status, headers, body = self.conn.make_request(
'PUT', 'bucket', body=xml, query='versioning')
self.assertEqual(status, 400)
self.assertEqual(get_error_code(body), 'MalformedXML')
elem = Element('VersioningConfiguration')
SubElement(elem, 'Status').text = ''
xml = tostring(elem)
status, headers, body = self.conn.make_request(
'PUT', 'bucket', body=xml, query='versioning')
self.assertEqual(status, 400)
self.assertEqual(get_error_code(body), 'MalformedXML')
| swift-master | test/functional/s3api/test_versioning.py |
# Copyright (c) 2016 SwiftStack, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import requests
from swift.common.bufferedhttp import http_connect_raw
from swift.common.middleware.s3api.etree import fromstring
import test.functional as tf
from test.functional.s3api import S3ApiBase
from test.functional.s3api.utils import get_error_code, get_error_msg
def setUpModule():
tf.setup_package()
def tearDownModule():
tf.teardown_package()
class TestS3ApiPresignedUrls(S3ApiBase):
def test_bucket(self):
bucket = 'test-bucket'
req_objects = ('object', 'object2')
max_bucket_listing = tf.cluster_info['s3api'].get(
'max_bucket_listing', 1000)
# GET Bucket (Without Object)
status, _junk, _junk = self.conn.make_request('PUT', bucket)
self.assertEqual(status, 200)
url, headers = self.conn.generate_url_and_headers('GET', bucket)
resp = requests.get(url, headers=headers)
self.assertEqual(resp.status_code, 200,
'Got %d %s' % (resp.status_code, resp.content))
self.assertCommonResponseHeaders(resp.headers)
self.assertIsNotNone(resp.headers['content-type'])
self.assertEqual(resp.headers['content-length'],
str(len(resp.content)))
elem = fromstring(resp.content, 'ListBucketResult')
self.assertEqual(elem.find('Name').text, bucket)
self.assertIsNone(elem.find('Prefix').text)
self.assertIsNone(elem.find('Marker').text)
self.assertEqual(elem.find('MaxKeys').text,
str(max_bucket_listing))
self.assertEqual(elem.find('IsTruncated').text, 'false')
objects = elem.findall('./Contents')
self.assertEqual(list(objects), [])
# GET Bucket (With Object)
for obj in req_objects:
status, _junk, _junk = self.conn.make_request('PUT', bucket, obj)
self.assertEqual(
status, 200,
'Got %d response while creating %s' % (status, obj))
resp = requests.get(url, headers=headers)
self.assertEqual(resp.status_code, 200,
'Got %d %s' % (resp.status_code, resp.content))
self.assertCommonResponseHeaders(resp.headers)
self.assertIsNotNone(resp.headers['content-type'])
self.assertEqual(resp.headers['content-length'],
str(len(resp.content)))
elem = fromstring(resp.content, 'ListBucketResult')
self.assertEqual(elem.find('Name').text, bucket)
self.assertIsNone(elem.find('Prefix').text)
self.assertIsNone(elem.find('Marker').text)
self.assertEqual(elem.find('MaxKeys').text,
str(max_bucket_listing))
self.assertEqual(elem.find('IsTruncated').text, 'false')
resp_objects = elem.findall('./Contents')
self.assertEqual(len(list(resp_objects)), 2)
for o in resp_objects:
self.assertIn(o.find('Key').text, req_objects)
self.assertIsNotNone(o.find('LastModified').text)
self.assertRegex(
o.find('LastModified').text,
r'^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.000Z$')
self.assertIsNotNone(o.find('ETag').text)
self.assertEqual(o.find('Size').text, '0')
self.assertIsNotNone(o.find('StorageClass').text is not None)
self.assertEqual(o.find('Owner/ID').text, self.conn.user_id)
self.assertEqual(o.find('Owner/DisplayName').text,
self.conn.user_id)
# DELETE Bucket
for obj in req_objects:
self.conn.make_request('DELETE', bucket, obj)
url, headers = self.conn.generate_url_and_headers('DELETE', bucket)
resp = requests.delete(url, headers=headers)
self.assertEqual(resp.status_code, 204,
'Got %d %s' % (resp.status_code, resp.content))
def test_expiration_limits(self):
if os.environ.get('S3_USE_SIGV4'):
self._test_expiration_limits_v4()
else:
self._test_expiration_limits_v2()
def _test_expiration_limits_v2(self):
bucket = 'test-bucket'
# Expiration date is too far in the future
url, headers = self.conn.generate_url_and_headers(
'GET', bucket, expires_in=2 ** 32)
resp = requests.get(url, headers=headers)
self.assertEqual(resp.status_code, 403,
'Got %d %s' % (resp.status_code, resp.content))
self.assertEqual(get_error_code(resp.content),
'AccessDenied')
self.assertIn('Invalid date (should be seconds since epoch)',
get_error_msg(resp.content))
def _test_expiration_limits_v4(self):
bucket = 'test-bucket'
# Expiration is negative
url, headers = self.conn.generate_url_and_headers(
'GET', bucket, expires_in=-1)
resp = requests.get(url, headers=headers)
self.assertEqual(resp.status_code, 400,
'Got %d %s' % (resp.status_code, resp.content))
self.assertEqual(get_error_code(resp.content),
'AuthorizationQueryParametersError')
self.assertIn('X-Amz-Expires must be non-negative',
get_error_msg(resp.content))
# Expiration date is too far in the future
for exp in (7 * 24 * 60 * 60 + 1,
2 ** 63 - 1):
url, headers = self.conn.generate_url_and_headers(
'GET', bucket, expires_in=exp)
resp = requests.get(url, headers=headers)
self.assertEqual(resp.status_code, 400,
'Got %d %s' % (resp.status_code, resp.content))
self.assertEqual(get_error_code(resp.content),
'AuthorizationQueryParametersError')
self.assertIn('X-Amz-Expires must be less than 604800 seconds',
get_error_msg(resp.content))
# Expiration date is *way* too far in the future, or isn't a number
for exp in (2 ** 63, 'foo'):
url, headers = self.conn.generate_url_and_headers(
'GET', bucket, expires_in=2 ** 63)
resp = requests.get(url, headers=headers)
self.assertEqual(resp.status_code, 400,
'Got %d %s' % (resp.status_code, resp.content))
self.assertEqual(get_error_code(resp.content),
'AuthorizationQueryParametersError')
self.assertEqual('X-Amz-Expires should be a number',
get_error_msg(resp.content))
def test_object(self):
bucket = 'test-bucket'
obj = 'object'
status, _junk, _junk = self.conn.make_request('PUT', bucket)
self.assertEqual(status, 200)
# HEAD/missing object
head_url, headers = self.conn.generate_url_and_headers(
'HEAD', bucket, obj)
resp = requests.head(head_url, headers=headers)
self.assertEqual(resp.status_code, 404,
'Got %d %s' % (resp.status_code, resp.content))
# Wrong verb
resp = requests.get(head_url)
self.assertEqual(resp.status_code, 403,
'Got %d %s' % (resp.status_code, resp.content))
self.assertEqual(get_error_code(resp.content),
'SignatureDoesNotMatch')
# PUT empty object
put_url, headers = self.conn.generate_url_and_headers(
'PUT', bucket, obj)
resp = requests.put(put_url, data=b'', headers=headers)
self.assertEqual(resp.status_code, 200,
'Got %d %s' % (resp.status_code, resp.content))
# GET empty object
get_url, headers = self.conn.generate_url_and_headers(
'GET', bucket, obj)
resp = requests.get(get_url, headers=headers)
self.assertEqual(resp.status_code, 200,
'Got %d %s' % (resp.status_code, resp.content))
self.assertEqual(resp.content, b'')
# PUT over object
resp = requests.put(put_url, data=b'foobar', headers=headers)
self.assertEqual(resp.status_code, 200,
'Got %d %s' % (resp.status_code, resp.content))
# GET non-empty object
resp = requests.get(get_url, headers=headers)
self.assertEqual(resp.status_code, 200,
'Got %d %s' % (resp.status_code, resp.content))
self.assertEqual(resp.content, b'foobar')
# DELETE Object
delete_url, headers = self.conn.generate_url_and_headers(
'DELETE', bucket, obj)
resp = requests.delete(delete_url, headers=headers)
self.assertEqual(resp.status_code, 204,
'Got %d %s' % (resp.status_code, resp.content))
# Final cleanup
status, _junk, _junk = self.conn.make_request('DELETE', bucket)
self.assertEqual(status, 204)
def test_absolute_form_request(self):
bucket = 'test-bucket'
put_url, headers = self.conn.generate_url_and_headers(
'PUT', bucket)
resp = http_connect_raw(
self.conn.host,
self.conn.port,
'PUT',
put_url, # whole URL, not just the path/query!
headers=headers,
ssl=put_url.startswith('https:'),
).getresponse()
self.assertEqual(resp.status, 200,
'Got %d %s' % (resp.status, resp.read()))
delete_url, headers = self.conn.generate_url_and_headers(
'DELETE', bucket)
resp = http_connect_raw(
self.conn.host,
self.conn.port,
'DELETE',
delete_url, # whole URL, not just the path/query!
headers=headers,
ssl=delete_url.startswith('https:'),
).getresponse()
self.assertEqual(resp.status, 204,
'Got %d %s' % (resp.status, resp.read()))
class TestS3ApiPresignedUrlsSigV4(TestS3ApiPresignedUrls):
@classmethod
def setUpClass(cls):
os.environ['S3_USE_SIGV4'] = "True"
@classmethod
def tearDownClass(cls):
del os.environ['S3_USE_SIGV4']
def setUp(self):
super(TestS3ApiPresignedUrlsSigV4, self).setUp()
| swift-master | test/functional/s3api/test_presigned.py |
# Copyright (c) 2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from base64 import b64encode
from swift.common.middleware.s3api.etree import fromstring
from swift.common.utils import md5
def get_error_code(body):
elem = fromstring(body, 'Error')
return elem.find('Code').text
def get_error_msg(body):
elem = fromstring(body, 'Error')
return elem.find('Message').text
def calculate_md5(body):
return b64encode(
md5(body, usedforsecurity=False).digest()).strip().decode('ascii')
| swift-master | test/functional/s3api/utils.py |
# Copyright (c) 2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import botocore
import datetime
import unittest
import os
from unittest import SkipTest
import test.functional as tf
from swift.common.utils import config_true_value
from test.functional.s3api import S3ApiBaseBoto3
from test.functional.s3api.s3_test_client import get_boto3_conn
from test.functional.swift_test_client import Connection
def setUpModule():
tf.setup_package()
def tearDownModule():
tf.teardown_package()
class TestS3ApiBucket(S3ApiBaseBoto3):
def _validate_object_listing(self, resp_objects, req_objects,
expect_owner=True):
self.assertEqual(len(resp_objects), len(req_objects))
for i, obj in enumerate(resp_objects):
self.assertEqual(obj['Key'], req_objects[i])
self.assertEqual(type(obj['LastModified']), datetime.datetime)
self.assertIn('ETag', obj)
self.assertIn('Size', obj)
self.assertEqual(obj['StorageClass'], 'STANDARD')
if not expect_owner:
self.assertNotIn('Owner', obj)
elif tf.cluster_info['s3api'].get('s3_acl'):
self.assertEqual(obj['Owner']['ID'], self.access_key)
self.assertEqual(obj['Owner']['DisplayName'], self.access_key)
else:
self.assertIn('Owner', obj)
self.assertIn('ID', obj['Owner'])
self.assertIn('DisplayName', obj['Owner'])
def test_bucket(self):
bucket = 'bucket'
max_bucket_listing = int(tf.cluster_info['s3api'].get(
'max_bucket_listing', 1000))
# PUT Bucket
resp = self.conn.create_bucket(Bucket=bucket)
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
headers = resp['ResponseMetadata']['HTTPHeaders']
self.assertCommonResponseHeaders(headers)
self.assertIn(headers['location'], (
'/' + bucket, # swob won't touch it...
# but webob (which we get because of auth_token) *does*
'%s/%s' % (self.endpoint_url, bucket),
))
self.assertEqual(headers['content-length'], '0')
# GET Bucket(Without Object)
resp = self.conn.list_objects(Bucket=bucket)
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
headers = resp['ResponseMetadata']['HTTPHeaders']
self.assertCommonResponseHeaders(headers)
self.assertIsNotNone(headers['content-type'])
# TODO; requires consideration
# self.assertEqual(headers['transfer-encoding'], 'chunked')
self.assertEqual(resp['Name'], bucket)
self.assertEqual(resp['Prefix'], '')
self.assertEqual(resp['Marker'], '')
self.assertEqual(resp['MaxKeys'], max_bucket_listing)
self.assertFalse(resp['IsTruncated'])
self.assertNotIn('Contents', bucket)
# GET Bucket(With Object)
req_objects = ['object', 'object2']
for obj in req_objects:
self.conn.put_object(Bucket=bucket, Key=obj, Body=b'')
resp = self.conn.list_objects(Bucket=bucket)
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
self.assertEqual(resp['Name'], bucket)
self.assertEqual(resp['Prefix'], '')
self.assertEqual(resp['Marker'], '')
self.assertEqual(resp['MaxKeys'], max_bucket_listing)
self.assertFalse(resp['IsTruncated'])
self._validate_object_listing(resp['Contents'], req_objects)
# HEAD Bucket
resp = self.conn.head_bucket(Bucket=bucket)
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
headers = resp['ResponseMetadata']['HTTPHeaders']
self.assertCommonResponseHeaders(headers)
self.assertIsNotNone(headers['content-type'])
# TODO; requires consideration
# self.assertEqual(headers['transfer-encoding'], 'chunked')
# DELETE Bucket
for obj in req_objects:
self.conn.delete_object(Bucket=bucket, Key=obj)
resp = self.conn.delete_bucket(Bucket=bucket)
self.assertEqual(204, resp['ResponseMetadata']['HTTPStatusCode'])
self.assertCommonResponseHeaders(
resp['ResponseMetadata']['HTTPHeaders'])
def test_bucket_listing_with_staticweb(self):
if 'staticweb' not in tf.cluster_info:
raise SkipTest('Staticweb not enabled')
bucket = 'bucket'
resp = self.conn.create_bucket(Bucket=bucket)
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
resp = self.conn.list_objects(Bucket=bucket)
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
# enable staticweb listings; make publicly-readable
conn = Connection(tf.config)
conn.authenticate()
post_status = conn.make_request('POST', [bucket], hdrs={
'X-Container-Read': '.r:*,.rlistings',
'X-Container-Meta-Web-Listings': 'true',
})
self.assertEqual(post_status, 204)
resp = self.conn.list_objects(Bucket=bucket)
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
def test_put_bucket_error(self):
event_system = self.conn.meta.events
event_system.unregister(
'before-parameter-build.s3',
botocore.handlers.validate_bucket_name)
with self.assertRaises(botocore.exceptions.ClientError) as ctx:
self.conn.create_bucket(Bucket='bucket+invalid')
self.assertEqual(
ctx.exception.response['ResponseMetadata']['HTTPStatusCode'], 400)
self.assertEqual(
ctx.exception.response['Error']['Code'], 'InvalidBucketName')
auth_error_conn = get_boto3_conn(tf.config['s3_access_key'], 'invalid')
with self.assertRaises(botocore.exceptions.ClientError) as ctx:
auth_error_conn.create_bucket(Bucket='bucket')
self.assertEqual(
ctx.exception.response['ResponseMetadata']['HTTPStatusCode'], 403)
self.assertEqual(ctx.exception.response['Error']['Code'],
'SignatureDoesNotMatch')
self.conn.create_bucket(Bucket='bucket')
with self.assertRaises(botocore.exceptions.ClientError) as ctx:
self.conn.create_bucket(Bucket='bucket')
self.assertEqual(
ctx.exception.response['ResponseMetadata']['HTTPStatusCode'], 409)
self.assertEqual(
ctx.exception.response['Error']['Code'], 'BucketAlreadyOwnedByYou')
def test_put_bucket_error_key2(self):
if config_true_value(tf.cluster_info['s3api'].get('s3_acl')):
if 's3_access_key2' not in tf.config or \
's3_secret_key2' not in tf.config:
raise SkipTest(
'Cannot test for BucketAlreadyExists with second user; '
'need s3_access_key2 and s3_secret_key2 configured')
self.conn.create_bucket(Bucket='bucket')
# Other users of the same account get the same 409 error
conn2 = get_boto3_conn(tf.config['s3_access_key2'],
tf.config['s3_secret_key2'])
with self.assertRaises(botocore.exceptions.ClientError) as ctx:
conn2.create_bucket(Bucket='bucket')
self.assertEqual(
ctx.exception.response['ResponseMetadata']['HTTPStatusCode'],
409)
self.assertEqual(
ctx.exception.response['Error']['Code'], 'BucketAlreadyExists')
def test_put_bucket_error_key3(self):
if 's3_access_key3' not in tf.config or \
's3_secret_key3' not in tf.config:
raise SkipTest('Cannot test for AccessDenied; need '
's3_access_key3 and s3_secret_key3 configured')
self.conn.create_bucket(Bucket='bucket')
# If the user can't create buckets, they shouldn't even know
# whether the bucket exists.
conn3 = get_boto3_conn(tf.config['s3_access_key3'],
tf.config['s3_secret_key3'])
with self.assertRaises(botocore.exceptions.ClientError) as ctx:
conn3.create_bucket(Bucket='bucket')
self.assertEqual(
ctx.exception.response['ResponseMetadata']['HTTPStatusCode'], 403)
self.assertEqual(
ctx.exception.response['Error']['Code'], 'AccessDenied')
def test_put_bucket_with_LocationConstraint(self):
resp = self.conn.create_bucket(
Bucket='bucket',
CreateBucketConfiguration={'LocationConstraint': self.region})
self.assertEqual(resp['ResponseMetadata']['HTTPStatusCode'], 200)
def test_get_bucket_error(self):
event_system = self.conn.meta.events
event_system.unregister(
'before-parameter-build.s3',
botocore.handlers.validate_bucket_name)
self.conn.create_bucket(Bucket='bucket')
with self.assertRaises(botocore.exceptions.ClientError) as ctx:
self.conn.list_objects(Bucket='bucket+invalid')
self.assertEqual(
ctx.exception.response['Error']['Code'], 'InvalidBucketName')
auth_error_conn = get_boto3_conn(tf.config['s3_access_key'], 'invalid')
with self.assertRaises(botocore.exceptions.ClientError) as ctx:
auth_error_conn.list_objects(Bucket='bucket')
self.assertEqual(
ctx.exception.response['Error']['Code'], 'SignatureDoesNotMatch')
with self.assertRaises(botocore.exceptions.ClientError) as ctx:
self.conn.list_objects(Bucket='nothing')
self.assertEqual(
ctx.exception.response['Error']['Code'], 'NoSuchBucket')
def _prepare_test_get_bucket(self, bucket, objects):
try:
self.conn.create_bucket(Bucket=bucket)
except botocore.exceptions.ClientError as e:
err_code = e.response.get('Error', {}).get('Code')
if err_code != 'BucketAlreadyOwnedByYou':
raise
for obj in objects:
self.conn.put_object(Bucket=bucket, Key=obj, Body=b'')
def test_blank_params(self):
bucket = 'bucket'
self._prepare_test_get_bucket(bucket, ())
resp = self.conn.list_objects(
Bucket=bucket, Delimiter='', Marker='', Prefix='')
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
self.assertNotIn('Delimiter', resp)
self.assertIn('Marker', resp)
self.assertEqual('', resp['Marker'])
self.assertIn('Prefix', resp)
self.assertEqual('', resp['Prefix'])
resp = self.conn.list_objects_v2(
Bucket=bucket, Delimiter='', StartAfter='', Prefix='')
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
self.assertNotIn('Delimiter', resp)
self.assertIn('StartAfter', resp)
self.assertEqual('', resp['StartAfter'])
self.assertIn('Prefix', resp)
self.assertEqual('', resp['Prefix'])
resp = self.conn.list_object_versions(
Bucket=bucket, Delimiter='', KeyMarker='', Prefix='')
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
self.assertIn('Delimiter', resp)
self.assertEqual('', resp['Delimiter'])
self.assertIn('KeyMarker', resp)
self.assertEqual('', resp['KeyMarker'])
self.assertIn('Prefix', resp)
self.assertEqual('', resp['Prefix'])
def test_get_bucket_with_delimiter(self):
bucket = 'bucket'
put_objects = ('object', 'object2', 'subdir/object', 'subdir2/object',
'dir/subdir/object')
self._prepare_test_get_bucket(bucket, put_objects)
delimiter = '/'
expect_objects = ('object', 'object2')
expect_prefixes = ('dir/', 'subdir/', 'subdir2/')
resp = self.conn.list_objects(Bucket=bucket, Delimiter=delimiter)
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
self.assertEqual(resp['Delimiter'], delimiter)
self._validate_object_listing(resp['Contents'], expect_objects)
resp_prefixes = resp['CommonPrefixes']
self.assertEqual(
resp_prefixes,
[{'Prefix': p} for p in expect_prefixes])
def test_get_bucket_with_multi_char_delimiter(self):
bucket = 'bucket'
put_objects = ('object', 'object2', 'subdir/object', 'subdir2/object',
'dir/subdir/object')
self._prepare_test_get_bucket(bucket, put_objects)
delimiter = '/obj'
expect_objects = ('object', 'object2')
expect_prefixes = ('dir/subdir/obj', 'subdir/obj', 'subdir2/obj')
resp = self.conn.list_objects(Bucket=bucket, Delimiter=delimiter)
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
self.assertEqual(resp['Delimiter'], delimiter)
self._validate_object_listing(resp['Contents'], expect_objects)
resp_prefixes = resp['CommonPrefixes']
self.assertEqual(
resp_prefixes,
[{'Prefix': p} for p in expect_prefixes])
def test_get_bucket_with_non_ascii_delimiter(self):
bucket = 'bucket'
put_objects = (
'bar',
'foo',
u'foobar\N{SNOWMAN}baz',
u'foo\N{SNOWMAN}bar',
u'foo\N{SNOWMAN}bar\N{SNOWMAN}baz',
)
self._prepare_test_get_bucket(bucket, put_objects)
# boto3 doesn't always unquote everything it should; see
# https://github.com/boto/botocore/pull/1901
# Fortunately, we can just drop the encoding-type=url param
self.conn.meta.events.unregister(
'before-parameter-build.s3.ListObjects',
botocore.handlers.set_list_objects_encoding_type_url)
delimiter = u'\N{SNOWMAN}'
expect_objects = ('bar', 'foo')
expect_prefixes = (u'foobar\N{SNOWMAN}', u'foo\N{SNOWMAN}')
resp = self.conn.list_objects(Bucket=bucket, Delimiter=delimiter)
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
self.assertEqual(resp['Delimiter'], delimiter)
self._validate_object_listing(resp['Contents'], expect_objects)
resp_prefixes = resp['CommonPrefixes']
self.assertEqual(
resp_prefixes,
[{'Prefix': p} for p in expect_prefixes])
prefix = u'foo\N{SNOWMAN}'
expect_objects = (u'foo\N{SNOWMAN}bar',)
expect_prefixes = (u'foo\N{SNOWMAN}bar\N{SNOWMAN}',)
resp = self.conn.list_objects(
Bucket=bucket, Delimiter=delimiter, Prefix=prefix)
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
self.assertEqual(resp['Delimiter'], delimiter)
self.assertEqual(resp['Prefix'], prefix)
self._validate_object_listing(resp['Contents'], expect_objects)
resp_prefixes = resp['CommonPrefixes']
self.assertEqual(
resp_prefixes,
[{'Prefix': p} for p in expect_prefixes])
def test_get_bucket_with_encoding_type(self):
bucket = 'bucket'
put_objects = ('object', 'object2')
self._prepare_test_get_bucket(bucket, put_objects)
encoding_type = 'url'
resp = self.conn.list_objects(
Bucket=bucket, EncodingType=encoding_type)
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
self.assertEqual(resp['EncodingType'], encoding_type)
def test_get_bucket_with_marker(self):
bucket = 'bucket'
put_objects = ('object', 'object2', 'subdir/object', 'subdir2/object',
'dir/subdir/object')
self._prepare_test_get_bucket(bucket, put_objects)
marker = 'object'
expect_objects = ('object2', 'subdir/object', 'subdir2/object')
resp = self.conn.list_objects(Bucket=bucket, Marker=marker)
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
self.assertEqual(resp['Marker'], marker)
self._validate_object_listing(resp['Contents'], expect_objects)
def test_get_bucket_with_max_keys(self):
bucket = 'bucket'
put_objects = ('object', 'object2', 'subdir/object', 'subdir2/object',
'dir/subdir/object')
self._prepare_test_get_bucket(bucket, put_objects)
max_keys = 2
expect_objects = ('dir/subdir/object', 'object')
resp = self.conn.list_objects(Bucket=bucket, MaxKeys=max_keys)
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
self.assertEqual(resp['MaxKeys'], max_keys)
self._validate_object_listing(resp['Contents'], expect_objects)
def test_get_bucket_with_prefix(self):
bucket = 'bucket'
req_objects = ('object', 'object2', 'subdir/object', 'subdir2/object',
'dir/subdir/object')
self._prepare_test_get_bucket(bucket, req_objects)
prefix = 'object'
expect_objects = ('object', 'object2')
resp = self.conn.list_objects(Bucket=bucket, Prefix=prefix)
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
self.assertEqual(resp['Prefix'], prefix)
self._validate_object_listing(resp['Contents'], expect_objects)
def test_get_bucket_v2_with_start_after(self):
bucket = 'bucket'
put_objects = ('object', 'object2', 'subdir/object', 'subdir2/object',
'dir/subdir/object')
self._prepare_test_get_bucket(bucket, put_objects)
marker = 'object'
expect_objects = ('object2', 'subdir/object', 'subdir2/object')
resp = self.conn.list_objects_v2(Bucket=bucket, StartAfter=marker)
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
self.assertEqual(resp['StartAfter'], marker)
self.assertEqual(resp['KeyCount'], 3)
self._validate_object_listing(resp['Contents'], expect_objects,
expect_owner=False)
def test_get_bucket_v2_with_fetch_owner(self):
bucket = 'bucket'
put_objects = ('object', 'object2', 'subdir/object', 'subdir2/object',
'dir/subdir/object')
self._prepare_test_get_bucket(bucket, put_objects)
expect_objects = ('dir/subdir/object', 'object', 'object2',
'subdir/object', 'subdir2/object')
resp = self.conn.list_objects_v2(Bucket=bucket, FetchOwner=True)
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
self.assertEqual(resp['KeyCount'], 5)
self._validate_object_listing(resp['Contents'], expect_objects)
def test_get_bucket_v2_with_continuation_token_and_delimiter(self):
bucket = 'bucket'
put_objects = ('object', u'object2-\u062a', 'subdir/object',
u'subdir2-\u062a/object', 'dir/subdir/object',
'x', 'y', 'z')
self._prepare_test_get_bucket(bucket, put_objects)
expected = [{'objects': ['object', u'object2-\u062a'],
'subdirs': ['dir/']},
{'objects': ['x'],
'subdirs': ['subdir/', u'subdir2-\u062a/']},
{'objects': ['y', 'z'],
'subdirs': []}]
continuation_token = ''
for i in range(len(expected)):
resp = self.conn.list_objects_v2(
Bucket=bucket,
MaxKeys=3,
Delimiter='/',
ContinuationToken=continuation_token)
self.assertEqual(200, resp['ResponseMetadata']['HTTPStatusCode'])
self.assertEqual(resp['MaxKeys'], 3)
self.assertEqual(
resp['KeyCount'],
len(expected[i]['objects']) + len(expected[i]['subdirs']))
expect_truncated = i < len(expected) - 1
self.assertEqual(resp['IsTruncated'], expect_truncated)
if expect_truncated:
self.assertIsNotNone(resp['NextContinuationToken'])
continuation_token = resp['NextContinuationToken']
self._validate_object_listing(resp['Contents'],
expected[i]['objects'],
expect_owner=False)
resp_subdirs = resp.get('CommonPrefixes', [])
self.assertEqual(
resp_subdirs,
[{'Prefix': p} for p in expected[i]['subdirs']])
def test_head_bucket_error(self):
event_system = self.conn.meta.events
event_system.unregister(
'before-parameter-build.s3',
botocore.handlers.validate_bucket_name)
self.conn.create_bucket(Bucket='bucket')
with self.assertRaises(botocore.exceptions.ClientError) as ctx:
self.conn.head_bucket(Bucket='bucket+invalid')
self.assertEqual(
ctx.exception.response['ResponseMetadata']['HTTPStatusCode'], 400)
self.assertEqual(ctx.exception.response['Error']['Code'], '400')
self.assertEqual(
ctx.exception.response[
'ResponseMetadata']['HTTPHeaders']['content-length'], '0')
auth_error_conn = get_boto3_conn(tf.config['s3_access_key'], 'invalid')
with self.assertRaises(botocore.exceptions.ClientError) as ctx:
auth_error_conn.head_bucket(Bucket='bucket')
self.assertEqual(
ctx.exception.response['ResponseMetadata']['HTTPStatusCode'], 403)
self.assertEqual(
ctx.exception.response['Error']['Code'], '403')
self.assertEqual(
ctx.exception.response[
'ResponseMetadata']['HTTPHeaders']['content-length'], '0')
with self.assertRaises(botocore.exceptions.ClientError) as ctx:
self.conn.head_bucket(Bucket='nothing')
self.assertEqual(
ctx.exception.response['ResponseMetadata']['HTTPStatusCode'], 404)
self.assertEqual(
ctx.exception.response['Error']['Code'], '404')
self.assertEqual(
ctx.exception.response[
'ResponseMetadata']['HTTPHeaders']['content-length'], '0')
def test_delete_bucket_error(self):
event_system = self.conn.meta.events
event_system.unregister(
'before-parameter-build.s3',
botocore.handlers.validate_bucket_name)
with self.assertRaises(botocore.exceptions.ClientError) as ctx:
self.conn.delete_bucket(Bucket='bucket+invalid')
self.assertEqual(
ctx.exception.response['Error']['Code'], 'InvalidBucketName')
auth_error_conn = get_boto3_conn(tf.config['s3_access_key'], 'invalid')
with self.assertRaises(botocore.exceptions.ClientError) as ctx:
auth_error_conn.delete_bucket(Bucket='bucket')
self.assertEqual(
ctx.exception.response['Error']['Code'], 'SignatureDoesNotMatch')
with self.assertRaises(botocore.exceptions.ClientError) as ctx:
self.conn.delete_bucket(Bucket='bucket')
self.assertEqual(
ctx.exception.response['Error']['Code'], 'NoSuchBucket')
def test_bucket_invalid_method_error(self):
def _mangle_req_method(request, **kwargs):
request.method = 'GETPUT'
def _mangle_req_controller_method(request, **kwargs):
request.method = '_delete_segments_bucket'
event_system = self.conn.meta.events
event_system.register(
'request-created.s3.CreateBucket',
_mangle_req_method)
# non existed verb in the controller
with self.assertRaises(botocore.exceptions.ClientError) as ctx:
self.conn.create_bucket(Bucket='bucket')
self.assertEqual(
ctx.exception.response['Error']['Code'], 'MethodNotAllowed')
event_system.unregister('request-created.s3.CreateBucket',
_mangle_req_method)
event_system.register('request-created.s3.CreateBucket',
_mangle_req_controller_method)
# the method exists in the controller but deny as MethodNotAllowed
with self.assertRaises(botocore.exceptions.ClientError) as ctx:
self.conn.create_bucket(Bucket='bucket')
self.assertEqual(
ctx.exception.response['Error']['Code'], 'MethodNotAllowed')
class TestS3ApiBucketSigV4(TestS3ApiBucket):
@classmethod
def setUpClass(cls):
os.environ['S3_USE_SIGV4'] = "True"
@classmethod
def tearDownClass(cls):
del os.environ['S3_USE_SIGV4']
def setUp(self):
super(TestS3ApiBucket, self).setUp()
if __name__ == '__main__':
unittest.main()
| swift-master | test/functional/s3api/test_bucket.py |
# -*- coding: utf-8 -*-
# Copyright (c) 2015-2021 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import os
import boto
# For an issue with venv and distutils, disable pylint message here
# pylint: disable-msg=E0611,F0401
from distutils.version import StrictVersion
import calendar
import email.parser
from email.utils import formatdate, parsedate
from time import mktime
import six
import test.functional as tf
from swift.common import utils
from swift.common.middleware.s3api.etree import fromstring
from swift.common.middleware.s3api.utils import S3Timestamp
from swift.common.utils import md5, quote
from test.functional.s3api import S3ApiBase
from test.functional.s3api.s3_test_client import Connection
from test.functional.s3api.utils import get_error_code, calculate_md5, \
get_error_msg
DAY = 86400.0 # 60 * 60 * 24 (sec)
def setUpModule():
tf.setup_package()
def tearDownModule():
tf.teardown_package()
class TestS3ApiObject(S3ApiBase):
def setUp(self):
super(TestS3ApiObject, self).setUp()
self.bucket = 'bucket'
self.conn.make_request('PUT', self.bucket)
def _assertObjectEtag(self, bucket, obj, etag):
status, headers, _ = self.conn.make_request('HEAD', bucket, obj)
self.assertEqual(status, 200) # sanity
self.assertCommonResponseHeaders(headers, etag)
def test_object(self):
obj = u'object name with %-sign 🙂'
content = b'abc123'
etag = md5(content, usedforsecurity=False).hexdigest()
# PUT Object
status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, body=content)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self.assertTrue('content-length' in headers) # sanity
self.assertEqual(headers['content-length'], '0')
self._assertObjectEtag(self.bucket, obj, etag)
# PUT Object Copy
dst_bucket = 'dst-bucket'
dst_obj = 'dst_obj'
self.conn.make_request('PUT', dst_bucket)
headers = {'x-amz-copy-source': '/%s/%s' % (self.bucket, obj)}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj,
headers=headers)
self.assertEqual(status, 200)
# PUT Object Copy with URL-encoded Source
dst_bucket = 'dst-bucket'
dst_obj = 'dst_obj'
self.conn.make_request('PUT', dst_bucket)
headers = {'x-amz-copy-source': quote('/%s/%s' % (self.bucket, obj))}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj,
headers=headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self.assertEqual(headers['content-length'], str(len(body)))
elem = fromstring(body, 'CopyObjectResult')
self.assertTrue(elem.find('LastModified').text is not None)
copy_resp_last_modified_xml = elem.find('LastModified').text
self.assertTrue(elem.find('ETag').text is not None)
self.assertEqual(etag, elem.find('ETag').text.strip('"'))
self._assertObjectEtag(dst_bucket, dst_obj, etag)
# Check timestamp on Copy in listing:
status, headers, body = \
self.conn.make_request('GET', dst_bucket)
self.assertEqual(status, 200)
elem = fromstring(body, 'ListBucketResult')
self.assertEqual(
elem.find('Contents').find("LastModified").text,
copy_resp_last_modified_xml)
# GET Object copy
status, headers, body = \
self.conn.make_request('GET', dst_bucket, dst_obj)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers, etag)
self.assertTrue(headers['last-modified'] is not None)
self.assertEqual(
float(S3Timestamp.from_s3xmlformat(copy_resp_last_modified_xml)),
calendar.timegm(parsedate(headers['last-modified'])))
self.assertTrue(headers['content-type'] is not None)
self.assertEqual(headers['content-length'], str(len(content)))
# GET Object
status, headers, body = \
self.conn.make_request('GET', self.bucket, obj)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers, etag)
self.assertTrue(headers['last-modified'] is not None)
self.assertTrue(headers['content-type'] is not None)
self.assertEqual(headers['content-length'], str(len(content)))
# HEAD Object
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, obj)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers, etag)
self.assertTrue(headers['last-modified'] is not None)
self.assertTrue('content-type' in headers)
self.assertEqual(headers['content-length'], str(len(content)))
# DELETE Object
status, headers, body = \
self.conn.make_request('DELETE', self.bucket, obj)
self.assertEqual(status, 204)
self.assertCommonResponseHeaders(headers)
# DELETE Non-Existent Object
status, headers, body = \
self.conn.make_request('DELETE', self.bucket, 'does-not-exist')
self.assertEqual(status, 204)
self.assertCommonResponseHeaders(headers)
def test_put_object_error(self):
auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('PUT', self.bucket, 'object')
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
self.assertEqual(headers['content-type'], 'application/xml')
status, headers, body = \
self.conn.make_request('PUT', 'bucket2', 'object')
self.assertEqual(get_error_code(body), 'NoSuchBucket')
self.assertEqual(headers['content-type'], 'application/xml')
def test_put_object_name_too_long(self):
status, headers, body = self.conn.make_request(
'PUT', self.bucket,
'x' * (tf.cluster_info['swift']['max_object_name_length'] + 1))
self.assertEqual(get_error_code(body), 'KeyTooLongError')
self.assertEqual(headers['content-type'], 'application/xml')
def test_put_object_copy_error(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
dst_bucket = 'dst-bucket'
self.conn.make_request('PUT', dst_bucket)
dst_obj = 'dst_object'
headers = {'x-amz-copy-source': '/%s/%s' % (self.bucket, obj)}
auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('PUT', dst_bucket, dst_obj, headers)
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
self.assertEqual(headers['content-type'], 'application/xml')
# /src/nothing -> /dst/dst
headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, 'nothing')}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj, headers)
self.assertEqual(get_error_code(body), 'NoSuchKey')
self.assertEqual(headers['content-type'], 'application/xml')
# /nothing/src -> /dst/dst
headers = {'X-Amz-Copy-Source': '/%s/%s' % ('nothing', obj)}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj, headers)
# TODO: source bucket is not check.
# self.assertEqual(get_error_code(body), 'NoSuchBucket')
# /src/src -> /nothing/dst
headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj)}
status, headers, body = \
self.conn.make_request('PUT', 'nothing', dst_obj, headers)
self.assertEqual(get_error_code(body), 'NoSuchBucket')
self.assertEqual(headers['content-type'], 'application/xml')
def test_get_object_error(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('GET', self.bucket, obj)
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
self.assertEqual(headers['content-type'], 'application/xml')
status, headers, body = \
self.conn.make_request('GET', self.bucket, 'invalid')
self.assertEqual(get_error_code(body), 'NoSuchKey')
self.assertEqual(headers['content-type'], 'application/xml')
status, headers, body = self.conn.make_request('GET', 'invalid', obj)
self.assertEqual(get_error_code(body), 'NoSuchBucket')
self.assertEqual(headers['content-type'], 'application/xml')
def test_head_object_error(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('HEAD', self.bucket, obj)
self.assertEqual(status, 403)
self.assertEqual(body, b'') # sanity
self.assertEqual(headers['content-type'], 'application/xml')
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, 'invalid')
self.assertEqual(status, 404)
self.assertEqual(body, b'') # sanity
self.assertEqual(headers['content-type'], 'application/xml')
status, headers, body = \
self.conn.make_request('HEAD', 'invalid', obj)
self.assertEqual(status, 404)
self.assertEqual(body, b'') # sanity
self.assertEqual(headers['content-type'], 'application/xml')
def test_delete_object_error(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
auth_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
auth_error_conn.make_request('DELETE', self.bucket, obj)
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
self.assertEqual(headers['content-type'], 'application/xml')
status, headers, body = \
self.conn.make_request('DELETE', 'invalid', obj)
self.assertEqual(get_error_code(body), 'NoSuchBucket')
self.assertEqual(headers['content-type'], 'application/xml')
def test_put_object_content_encoding(self):
obj = 'object'
etag = md5(usedforsecurity=False).hexdigest()
headers = {'Content-Encoding': 'gzip'}
status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, headers)
self.assertEqual(status, 200)
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, obj)
self.assertTrue('content-encoding' in headers) # sanity
self.assertEqual(headers['content-encoding'], 'gzip')
self.assertCommonResponseHeaders(headers)
self._assertObjectEtag(self.bucket, obj, etag)
def test_put_object_content_md5(self):
obj = 'object'
content = b'abcdefghij'
etag = md5(content, usedforsecurity=False).hexdigest()
headers = {'Content-MD5': calculate_md5(content)}
status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, headers, content)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self._assertObjectEtag(self.bucket, obj, etag)
def test_put_object_content_type(self):
obj = 'object'
content = b'abcdefghij'
etag = md5(content, usedforsecurity=False).hexdigest()
headers = {'Content-Type': 'text/plain'}
status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, headers, content)
self.assertEqual(status, 200)
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, obj)
self.assertEqual(headers['content-type'], 'text/plain')
self.assertCommonResponseHeaders(headers)
self._assertObjectEtag(self.bucket, obj, etag)
def test_put_object_conditional_requests(self):
obj = 'object'
content = b'abcdefghij'
headers = {'If-None-Match': '*'}
status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, headers, content)
self.assertEqual(status, 501)
headers = {'If-Match': '*'}
status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, headers, content)
self.assertEqual(status, 501)
headers = {'If-Modified-Since': 'Sat, 27 Jun 2015 00:00:00 GMT'}
status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, headers, content)
self.assertEqual(status, 501)
headers = {'If-Unmodified-Since': 'Sat, 27 Jun 2015 00:00:00 GMT'}
status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, headers, content)
self.assertEqual(status, 501)
# None of the above should actually have created an object
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, obj, {}, '')
self.assertEqual(status, 404)
def test_put_object_expect(self):
obj = 'object'
content = b'abcdefghij'
etag = md5(content, usedforsecurity=False).hexdigest()
headers = {'Expect': '100-continue'}
status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, headers, content)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self._assertObjectEtag(self.bucket, obj, etag)
def _test_put_object_headers(self, req_headers, expected_headers=None):
if expected_headers is None:
expected_headers = req_headers
obj = 'object'
content = b'abcdefghij'
etag = md5(content, usedforsecurity=False).hexdigest()
status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj,
req_headers, content)
self.assertEqual(status, 200)
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, obj)
for header, value in expected_headers.items():
self.assertIn(header.lower(), headers)
self.assertEqual(headers[header.lower()], value)
self.assertCommonResponseHeaders(headers)
self._assertObjectEtag(self.bucket, obj, etag)
def test_put_object_metadata(self):
self._test_put_object_headers({
'X-Amz-Meta-Bar': 'foo',
'X-Amz-Meta-Bar2': 'foo2'})
def test_put_object_weird_metadata(self):
req_headers = dict(
('x-amz-meta-' + c, c)
for c in '!"#$%&\'()*+-./<=>?@[\\]^`{|}~')
exp_headers = dict(
('x-amz-meta-' + c, c)
for c in '!#$%&\'(*+-.^`|~')
self._test_put_object_headers(req_headers, exp_headers)
def test_put_object_underscore_in_metadata(self):
# Break this out separately for ease of testing pre-0.19.0 eventlet
self._test_put_object_headers({
'X-Amz-Meta-Foo-Bar': 'baz',
'X-Amz-Meta-Foo_Bar': 'also baz'})
def test_put_object_content_headers(self):
self._test_put_object_headers({
'Content-Type': 'foo/bar',
'Content-Encoding': 'baz',
'Content-Disposition': 'attachment',
'Content-Language': 'en'})
def test_put_object_cache_control(self):
self._test_put_object_headers({
'Cache-Control': 'private, some-extension'})
def test_put_object_expires(self):
self._test_put_object_headers({
# We don't validate that the Expires header is a valid date
'Expires': 'a valid HTTP-date timestamp'})
def test_put_object_robots_tag(self):
self._test_put_object_headers({
'X-Robots-Tag': 'googlebot: noarchive'})
def test_put_object_storage_class(self):
obj = 'object'
content = b'abcdefghij'
etag = md5(content, usedforsecurity=False).hexdigest()
headers = {'X-Amz-Storage-Class': 'STANDARD'}
status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, headers, content)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self._assertObjectEtag(self.bucket, obj, etag)
def test_put_object_valid_delete_headers(self):
obj = 'object'
content = b'abcdefghij'
ts = utils.Timestamp.now()
delete_at = {'X-Delete-At': str(int(ts) + 70)}
delete_after = {'X-Delete-After': str(int(ts) + 130)}
status, delete_at, body = \
self.conn.make_request('PUT', self.bucket, obj, delete_at, content)
self.assertEqual(status, 200)
status, delete_after, body = \
self.conn.make_request('PUT', self.bucket, obj, delete_after,
content)
self.assertEqual(status, 200)
def test_put_object_invalid_x_delete_at(self):
obj = 'object'
content = b'abcdefghij'
ts = utils.Timestamp.now()
headers = {'X-Delete-At': str(int(ts) - 140)}
status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, headers, content)
self.assertEqual(status, 400)
self.assertEqual(get_error_code(body), 'InvalidArgument')
self.assertEqual(get_error_msg(body), 'X-Delete-At in past')
headers = {'X-Delete-At': 'test'}
status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, headers, content)
self.assertEqual(status, 400)
self.assertEqual(get_error_code(body), 'InvalidArgument')
self.assertEqual(get_error_msg(body), 'Non-integer X-Delete-At')
def test_put_object_invalid_x_delete_after(self):
obj = 'object'
content = b'abcdefghij'
headers = {'X-Delete-After': 'test'}
status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, headers, content)
self.assertEqual(status, 400)
self.assertEqual(get_error_code(body), 'InvalidArgument')
self.assertEqual(get_error_msg(body), 'Non-integer X-Delete-After')
headers = {'X-Delete-After': '-140'}
status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, headers, content)
self.assertEqual(status, 400)
self.assertEqual(get_error_code(body), 'InvalidArgument')
self.assertEqual(get_error_msg(body), 'X-Delete-After in past')
def test_put_object_copy_source_params(self):
obj = 'object'
src_headers = {'X-Amz-Meta-Test': 'src'}
src_body = b'some content'
dst_bucket = 'dst-bucket'
dst_obj = 'dst_object'
self.conn.make_request('PUT', self.bucket, obj, src_headers, src_body)
self.conn.make_request('PUT', dst_bucket)
headers = {'X-Amz-Copy-Source': '/%s/%s?nonsense' % (
self.bucket, obj)}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj, headers)
self.assertEqual(status, 400)
self.assertEqual(get_error_code(body), 'InvalidArgument')
headers = {'X-Amz-Copy-Source': '/%s/%s?versionId=null&nonsense' % (
self.bucket, obj)}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj, headers)
self.assertEqual(status, 400)
self.assertEqual(get_error_code(body), 'InvalidArgument')
headers = {'X-Amz-Copy-Source': '/%s/%s?versionId=null' % (
self.bucket, obj)}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj, headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
status, headers, body = \
self.conn.make_request('GET', dst_bucket, dst_obj)
self.assertEqual(status, 200)
self.assertEqual(headers['x-amz-meta-test'], 'src')
self.assertEqual(body, src_body)
def test_put_object_copy_source(self):
obj = 'object'
content = b'abcdefghij'
etag = md5(content, usedforsecurity=False).hexdigest()
self.conn.make_request('PUT', self.bucket, obj, body=content)
dst_bucket = 'dst-bucket'
dst_obj = 'dst_object'
self.conn.make_request('PUT', dst_bucket)
# /src/src -> /dst/dst
headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj)}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj, headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self._assertObjectEtag(dst_bucket, dst_obj, etag)
# /src/src -> /src/dst
headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj)}
status, headers, body = \
self.conn.make_request('PUT', self.bucket, dst_obj, headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self._assertObjectEtag(self.bucket, dst_obj, etag)
# /src/src -> /src/src
# need changes to copy itself (e.g. metadata)
headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj),
'X-Amz-Meta-Foo': 'bar',
'X-Amz-Metadata-Directive': 'REPLACE'}
status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, headers)
self.assertEqual(status, 200)
self._assertObjectEtag(self.bucket, obj, etag)
self.assertCommonResponseHeaders(headers)
def test_put_object_copy_metadata_directive(self):
obj = 'object'
src_headers = {'X-Amz-Meta-Test': 'src'}
dst_bucket = 'dst-bucket'
dst_obj = 'dst_object'
self.conn.make_request('PUT', self.bucket, obj, headers=src_headers)
self.conn.make_request('PUT', dst_bucket)
headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj),
'X-Amz-Metadata-Directive': 'REPLACE',
'X-Amz-Meta-Test': 'dst'}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj, headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
status, headers, body = \
self.conn.make_request('HEAD', dst_bucket, dst_obj)
self.assertEqual(headers['x-amz-meta-test'], 'dst')
headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj),
'X-Amz-Metadata-Directive': 'COPY',
'X-Amz-Meta-Test': 'dst'}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj, headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
status, headers, body = \
self.conn.make_request('HEAD', dst_bucket, dst_obj)
self.assertEqual(headers['x-amz-meta-test'], 'src')
headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj),
'X-Amz-Meta-Test2': 'dst',
'X-Amz-Metadata-Directive': 'REPLACE'}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj, headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
status, headers, body = \
self.conn.make_request('HEAD', dst_bucket, dst_obj)
self.assertNotIn('x-amz-meta-test', headers)
self.assertEqual(headers['x-amz-meta-test2'], 'dst')
headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj),
'X-Amz-Metadata-Directive': 'BAD'}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj, headers)
self.assertEqual(status, 400)
def test_put_object_copy_source_if_modified_since(self):
obj = 'object'
dst_bucket = 'dst-bucket'
dst_obj = 'dst_object'
etag = md5(usedforsecurity=False).hexdigest()
self.conn.make_request('PUT', self.bucket, obj)
self.conn.make_request('PUT', dst_bucket)
_, headers, _ = self.conn.make_request('HEAD', self.bucket, obj)
src_datetime = mktime(parsedate(headers['last-modified']))
src_datetime = src_datetime - DAY
headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj),
'X-Amz-Copy-Source-If-Modified-Since':
formatdate(src_datetime)}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj, headers=headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self._assertObjectEtag(self.bucket, obj, etag)
def test_put_object_copy_source_if_unmodified_since(self):
obj = 'object'
dst_bucket = 'dst-bucket'
dst_obj = 'dst_object'
etag = md5(usedforsecurity=False).hexdigest()
self.conn.make_request('PUT', self.bucket, obj)
self.conn.make_request('PUT', dst_bucket)
_, headers, _ = self.conn.make_request('HEAD', self.bucket, obj)
src_datetime = mktime(parsedate(headers['last-modified']))
src_datetime = src_datetime + DAY
headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj),
'X-Amz-Copy-Source-If-Unmodified-Since':
formatdate(src_datetime)}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj, headers=headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self._assertObjectEtag(self.bucket, obj, etag)
def test_put_object_copy_source_if_match(self):
obj = 'object'
dst_bucket = 'dst-bucket'
dst_obj = 'dst_object'
etag = md5(usedforsecurity=False).hexdigest()
self.conn.make_request('PUT', self.bucket, obj)
self.conn.make_request('PUT', dst_bucket)
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, obj)
headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj),
'X-Amz-Copy-Source-If-Match': etag}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj, headers=headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self._assertObjectEtag(self.bucket, obj, etag)
def test_put_object_copy_source_if_none_match(self):
obj = 'object'
dst_bucket = 'dst-bucket'
dst_obj = 'dst_object'
etag = md5(usedforsecurity=False).hexdigest()
self.conn.make_request('PUT', self.bucket, obj)
self.conn.make_request('PUT', dst_bucket)
headers = {'X-Amz-Copy-Source': '/%s/%s' % (self.bucket, obj),
'X-Amz-Copy-Source-If-None-Match': 'none-match'}
status, headers, body = \
self.conn.make_request('PUT', dst_bucket, dst_obj, headers=headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self._assertObjectEtag(self.bucket, obj, etag)
def test_get_object_response_content_type(self):
obj = 'obj'
self.conn.make_request('PUT', self.bucket, obj)
query = 'response-content-type=text/plain'
status, headers, body = \
self.conn.make_request('GET', self.bucket, obj, query=query)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self.assertEqual(headers['content-type'], 'text/plain')
def test_get_object_response_content_language(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
query = 'response-content-language=en'
status, headers, body = \
self.conn.make_request('GET', self.bucket, obj, query=query)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self.assertEqual(headers['content-language'], 'en')
def test_get_object_response_cache_control(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
query = 'response-cache-control=private'
status, headers, body = \
self.conn.make_request('GET', self.bucket, obj, query=query)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self.assertEqual(headers['cache-control'], 'private')
def test_get_object_response_content_disposition(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
query = 'response-content-disposition=inline'
status, headers, body = \
self.conn.make_request('GET', self.bucket, obj, query=query)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self.assertEqual(headers['content-disposition'], 'inline')
def test_get_object_response_content_encoding(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
query = 'response-content-encoding=gzip'
status, headers, body = \
self.conn.make_request('GET', self.bucket, obj, query=query)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self.assertEqual(headers['content-encoding'], 'gzip')
def test_get_object_range(self):
obj = 'object'
content = b'abcdefghij'
headers = {'x-amz-meta-test': 'swift',
'content-type': 'application/octet-stream'}
self.conn.make_request(
'PUT', self.bucket, obj, headers=headers, body=content)
headers = {'Range': 'bytes=1-5'}
status, headers, body = \
self.conn.make_request('GET', self.bucket, obj, headers=headers)
self.assertEqual(status, 206)
self.assertCommonResponseHeaders(headers)
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '5')
self.assertTrue('x-amz-meta-test' in headers)
self.assertEqual('swift', headers['x-amz-meta-test'])
self.assertEqual(body, b'bcdef')
self.assertEqual('application/octet-stream', headers['content-type'])
headers = {'Range': 'bytes=5-'}
status, headers, body = \
self.conn.make_request('GET', self.bucket, obj, headers=headers)
self.assertEqual(status, 206)
self.assertCommonResponseHeaders(headers)
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '5')
self.assertTrue('x-amz-meta-test' in headers)
self.assertEqual('swift', headers['x-amz-meta-test'])
self.assertEqual(body, b'fghij')
headers = {'Range': 'bytes=-5'}
status, headers, body = \
self.conn.make_request('GET', self.bucket, obj, headers=headers)
self.assertEqual(status, 206)
self.assertCommonResponseHeaders(headers)
self.assertTrue('content-length' in headers)
self.assertEqual(headers['content-length'], '5')
self.assertTrue('x-amz-meta-test' in headers)
self.assertEqual('swift', headers['x-amz-meta-test'])
self.assertEqual(body, b'fghij')
ranges = ['1-2', '4-5']
headers = {'Range': 'bytes=%s' % ','.join(ranges)}
status, headers, body = \
self.conn.make_request('GET', self.bucket, obj, headers=headers)
self.assertEqual(status, 206)
self.assertCommonResponseHeaders(headers)
self.assertIn('content-length', headers)
self.assertIn('content-type', headers) # sanity
content_type, boundary = headers['content-type'].split(';')
self.assertEqual('multipart/byteranges', content_type)
self.assertTrue(boundary.startswith('boundary=')) # sanity
boundary_str = boundary[len('boundary='):]
# TODO: Using swift.common.utils.multipart_byteranges_to_document_iters
# could be easy enough.
if six.PY2:
parser = email.parser.FeedParser()
else:
parser = email.parser.BytesFeedParser()
parser.feed(
b"Content-Type: multipart/byterange; boundary=%s\r\n\r\n" %
boundary_str.encode('ascii'))
parser.feed(body)
message = parser.close()
self.assertTrue(message.is_multipart()) # sanity check
mime_parts = message.get_payload()
self.assertEqual(len(mime_parts), len(ranges)) # sanity
for index, range_value in enumerate(ranges):
start, end = map(int, range_value.split('-'))
# go to next section and check sanity
self.assertTrue(mime_parts[index])
part = mime_parts[index]
self.assertEqual(
'application/octet-stream', part.get_content_type())
expected_range = 'bytes %s/%s' % (range_value, len(content))
self.assertEqual(
expected_range, part.get('Content-Range'))
# rest
payload = part.get_payload(decode=True).strip()
self.assertEqual(content[start:end + 1], payload)
def test_get_object_if_modified_since(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
_, headers, _ = self.conn.make_request('HEAD', self.bucket, obj)
src_datetime = mktime(parsedate(headers['last-modified']))
src_datetime = src_datetime - DAY
headers = {'If-Modified-Since': formatdate(src_datetime)}
status, headers, body = \
self.conn.make_request('GET', self.bucket, obj, headers=headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
def test_get_object_if_unmodified_since(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
_, headers, _ = self.conn.make_request('HEAD', self.bucket, obj)
src_datetime = mktime(parsedate(headers['last-modified']))
src_datetime = src_datetime + DAY
headers = \
{'If-Unmodified-Since': formatdate(src_datetime)}
status, headers, body = \
self.conn.make_request('GET', self.bucket, obj, headers=headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
# check we can use the last modified time from the listing...
status, headers, body = \
self.conn.make_request('GET', self.bucket)
elem = fromstring(body, 'ListBucketResult')
last_modified = elem.find('./Contents/LastModified').text
listing_datetime = S3Timestamp.from_s3xmlformat(last_modified)
# Make sure there's no fractions of a second
self.assertEqual(int(listing_datetime), float(listing_datetime))
header_datetime = formatdate(int(listing_datetime))
headers = {'If-Unmodified-Since': header_datetime}
status, headers, body = \
self.conn.make_request('GET', self.bucket, obj, headers=headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
headers = {'If-Modified-Since': header_datetime}
status, headers, body = \
self.conn.make_request('GET', self.bucket, obj, headers=headers)
self.assertEqual(status, 304)
self.assertCommonResponseHeaders(headers)
def test_get_object_if_match(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, obj)
etag = headers['etag']
headers = {'If-Match': etag}
status, headers, body = \
self.conn.make_request('GET', self.bucket, obj, headers=headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
def test_get_object_if_none_match(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
headers = {'If-None-Match': 'none-match'}
status, headers, body = \
self.conn.make_request('GET', self.bucket, obj, headers=headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
def test_head_object_range(self):
obj = 'object'
content = b'abcdefghij'
self.conn.make_request('PUT', self.bucket, obj, body=content)
headers = {'Range': 'bytes=1-5'}
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, obj, headers=headers)
self.assertEqual(headers['content-length'], '5')
self.assertCommonResponseHeaders(headers)
headers = {'Range': 'bytes=5-'}
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, obj, headers=headers)
self.assertEqual(headers['content-length'], '5')
self.assertCommonResponseHeaders(headers)
headers = {'Range': 'bytes=-5'}
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, obj, headers=headers)
self.assertEqual(headers['content-length'], '5')
self.assertCommonResponseHeaders(headers)
def test_head_object_if_modified_since(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
_, headers, _ = self.conn.make_request('HEAD', self.bucket, obj)
dt = mktime(parsedate(headers['last-modified']))
dt = dt - DAY
headers = {'If-Modified-Since': formatdate(dt)}
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, obj, headers=headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
def test_head_object_if_unmodified_since(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
_, headers, _ = self.conn.make_request('HEAD', self.bucket, obj)
dt = mktime(parsedate(headers['last-modified']))
dt = dt + DAY
headers = {'If-Unmodified-Since': formatdate(dt)}
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, obj, headers=headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
def test_head_object_if_match(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, obj)
etag = headers['etag']
headers = {'If-Match': etag}
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, obj, headers=headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
def test_head_object_if_none_match(self):
obj = 'object'
self.conn.make_request('PUT', self.bucket, obj)
headers = {'If-None-Match': 'none-match'}
status, headers, body = \
self.conn.make_request('HEAD', self.bucket, obj, headers=headers)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
class TestS3ApiObjectSigV4(TestS3ApiObject):
@classmethod
def setUpClass(cls):
os.environ['S3_USE_SIGV4'] = "True"
@classmethod
def tearDownClass(cls):
del os.environ['S3_USE_SIGV4']
def setUp(self):
super(TestS3ApiObjectSigV4, self).setUp()
@unittest.skipIf(StrictVersion(boto.__version__) < StrictVersion('3.0'),
'This stuff got the signing issue of boto<=2.x')
def test_put_object_metadata(self):
super(TestS3ApiObjectSigV4, self).test_put_object_metadata()
@unittest.skipIf(StrictVersion(boto.__version__) < StrictVersion('3.0'),
'This stuff got the signing issue of boto<=2.x')
def test_put_object_copy_source_if_modified_since(self):
super(TestS3ApiObjectSigV4, self).\
test_put_object_copy_source_if_modified_since()
@unittest.skipIf(StrictVersion(boto.__version__) < StrictVersion('3.0'),
'This stuff got the signing issue of boto<=2.x')
def test_put_object_copy_source_if_unmodified_since(self):
super(TestS3ApiObjectSigV4, self).\
test_put_object_copy_source_if_unmodified_since()
@unittest.skipIf(StrictVersion(boto.__version__) < StrictVersion('3.0'),
'This stuff got the signing issue of boto<=2.x')
def test_put_object_copy_source_if_match(self):
super(TestS3ApiObjectSigV4,
self).test_put_object_copy_source_if_match()
@unittest.skipIf(StrictVersion(boto.__version__) < StrictVersion('3.0'),
'This stuff got the signing issue of boto<=2.x')
def test_put_object_copy_source_if_none_match(self):
super(TestS3ApiObjectSigV4,
self).test_put_object_copy_source_if_none_match()
if __name__ == '__main__':
unittest.main()
| swift-master | test/functional/s3api/test_object.py |
# Copyright (c) 2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import os
import test.functional as tf
from swift.common.middleware.s3api.etree import fromstring
from unittest import SkipTest
from test.functional.s3api import S3ApiBase
from test.functional.s3api.s3_test_client import Connection
from test.functional.s3api.utils import get_error_code
def setUpModule():
tf.setup_package()
def tearDownModule():
tf.teardown_package()
class TestS3Acl(S3ApiBase):
def setUp(self):
super(TestS3Acl, self).setUp()
self.bucket = 'bucket'
self.obj = 'object'
if 's3_access_key3' not in tf.config or \
's3_secret_key3' not in tf.config:
raise SkipTest(
'TestS3Acl requires s3_access_key3 and s3_secret_key3 '
'configured for reduced-access user')
status, headers, body = self.conn.make_request('PUT', self.bucket)
self.assertEqual(status, 200, body)
access_key3 = tf.config['s3_access_key3']
secret_key3 = tf.config['s3_secret_key3']
self.conn3 = Connection(access_key3, secret_key3, access_key3)
def test_acl(self):
self.conn.make_request('PUT', self.bucket, self.obj)
query = 'acl'
# PUT Bucket ACL
headers = {'x-amz-acl': 'public-read'}
status, headers, body = \
self.conn.make_request('PUT', self.bucket, headers=headers,
query=query)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
self.assertEqual(headers['content-length'], '0')
# GET Bucket ACL
status, headers, body = \
self.conn.make_request('GET', self.bucket, query=query)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
# TODO: Fix the response that last-modified must be in the response.
# self.assertTrue(headers['last-modified'] is not None)
self.assertEqual(headers['content-length'], str(len(body)))
self.assertTrue(headers['content-type'] is not None)
elem = fromstring(body, 'AccessControlPolicy')
owner = elem.find('Owner')
self.assertEqual(owner.find('ID').text, self.conn.user_id)
self.assertEqual(owner.find('DisplayName').text, self.conn.user_id)
acl = elem.find('AccessControlList')
self.assertTrue(acl.find('Grant') is not None)
# GET Object ACL
status, headers, body = \
self.conn.make_request('GET', self.bucket, self.obj, query=query)
self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers)
# TODO: Fix the response that last-modified must be in the response.
# self.assertTrue(headers['last-modified'] is not None)
self.assertEqual(headers['content-length'], str(len(body)))
self.assertTrue(headers['content-type'] is not None)
elem = fromstring(body, 'AccessControlPolicy')
owner = elem.find('Owner')
self.assertEqual(owner.find('ID').text, self.conn.user_id)
self.assertEqual(owner.find('DisplayName').text, self.conn.user_id)
acl = elem.find('AccessControlList')
self.assertTrue(acl.find('Grant') is not None)
def test_put_bucket_acl_error(self):
req_headers = {'x-amz-acl': 'public-read'}
aws_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
aws_error_conn.make_request('PUT', self.bucket,
headers=req_headers, query='acl')
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
status, headers, body = \
self.conn.make_request('PUT', 'nothing',
headers=req_headers, query='acl')
self.assertEqual(get_error_code(body), 'NoSuchBucket')
status, headers, body = \
self.conn3.make_request('PUT', self.bucket,
headers=req_headers, query='acl')
self.assertEqual(get_error_code(body), 'AccessDenied')
def test_get_bucket_acl_error(self):
aws_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
aws_error_conn.make_request('GET', self.bucket, query='acl')
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
status, headers, body = \
self.conn.make_request('GET', 'nothing', query='acl')
self.assertEqual(get_error_code(body), 'NoSuchBucket')
status, headers, body = \
self.conn3.make_request('GET', self.bucket, query='acl')
self.assertEqual(get_error_code(body), 'AccessDenied')
def test_get_object_acl_error(self):
self.conn.make_request('PUT', self.bucket, self.obj)
aws_error_conn = Connection(tf.config['s3_access_key'], 'invalid')
status, headers, body = \
aws_error_conn.make_request('GET', self.bucket, self.obj,
query='acl')
self.assertEqual(get_error_code(body), 'SignatureDoesNotMatch')
status, headers, body = \
self.conn.make_request('GET', self.bucket, 'nothing', query='acl')
self.assertEqual(get_error_code(body), 'NoSuchKey')
status, headers, body = \
self.conn3.make_request('GET', self.bucket, self.obj, query='acl')
self.assertEqual(get_error_code(body), 'AccessDenied')
class TestS3AclSigV4(TestS3Acl):
@classmethod
def setUpClass(cls):
os.environ['S3_USE_SIGV4'] = "True"
@classmethod
def tearDownClass(cls):
del os.environ['S3_USE_SIGV4']
def setUp(self):
super(TestS3AclSigV4, self).setUp()
if __name__ == '__main__':
unittest.main()
| swift-master | test/functional/s3api/test_acl.py |