Skip to content

Commit 0acbafd

Browse files
committed
Merge branch 'release-1.3.22'
* release-1.3.22: Bumping version to 1.3.22 Update CHANGELOG with the latest features Update completer test with new services Update changelog with aws#825 Add changelog entry for aws#834 Fix changelog entry for merge of aws#831 Added test_cancel_after_upload_id to test_tasks Update changelog with fix for aws#549 Disable fix_s3_host when --endpoint-url is given Fixes issue aws#834 Update changelog with bugfix Add validation to ensure we don't mv a file onto itself Let aws.cmd find python.exe on paths with spaces.
2 parents 3f48075 + ecfda6c commit 0acbafd

File tree

14 files changed

+240
-17
lines changed

14 files changed

+240
-17
lines changed

CHANGELOG.rst

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,34 @@
22
CHANGELOG
33
=========
44

5+
6+
1.3.22
7+
======
8+
9+
* feature:``aws cwlogs``: Add support for Amazon CloudWatch Logs
10+
* feature:``aws cognito-sync``: Add support for
11+
Amazon Cognito Service
12+
* feature:``aws cognito-identity``: Add support for
13+
Amazon Cognito Identity Service
14+
* feature:``aws route53``: Update ``aws route53`` command to the
15+
latest version
16+
* feature:``aws ec2``: Update ``aws ec2`` command to the
17+
latest version
18+
* bugfix:``aws s3/s3api``: Fix issue where ``--endpoint-url``
19+
wasn't being used for ``aws s3/s3api`` commands
20+
(`issue 549 <https://github.com/aws/aws-cli/issues/549>`__)
21+
* bugfix:``aws s3 mv``: Fix bug where using the ``aws s3 mv``
22+
command to move a large file onto itself results in the
23+
file being deleted
24+
(`issue 831 <https://github.com/aws/aws-cli/issues/831>`__)
25+
* bugfix:``aws s3``: Fix issue where parts in a multipart
26+
upload are stil being uploaded when a part has failed
27+
(`issue 834 <https://github.com/aws/aws-cli/issues/834>`__)
28+
* bugfix:Windows: Fix issue where ``python.exe`` is on a path
29+
that contains spaces
30+
(`issue 825 <https://github.com/aws/aws-cli/pull/825>`__)
31+
32+
533
1.3.21
634
======
735

awscli/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
"""
1818
import os
1919

20-
__version__ = '1.3.21'
20+
__version__ = '1.3.22'
2121

2222
#
2323
# Get our data path to be added to botocore's search path

awscli/customizations/s3/s3.py

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -662,6 +662,23 @@ def add_paths(self, paths):
662662
self.parameters['dest'] = paths[1]
663663
elif len(paths) == 1:
664664
self.parameters['dest'] = paths[0]
665+
self._validate_path_args()
666+
667+
def _validate_path_args(self):
668+
# If we're using a mv command, you can't copy the object onto itself.
669+
params = self.parameters
670+
if self.cmd == 'mv' and self._same_path(params['src'], params['dest']):
671+
raise ValueError("Cannot mv a file onto itself: '%s' - '%s'" % (
672+
params['src'], params['dest']))
673+
674+
def _same_path(self, src, dest):
675+
if not self.parameters['paths_type'] == 's3s3':
676+
return False
677+
elif src == dest:
678+
return True
679+
elif dest.endswith('/'):
680+
src_base = os.path.basename(src)
681+
return src == os.path.join(dest, src_base)
665682

666683
def _normalize_s3_trailing_slash(self, paths):
667684
for i, path in enumerate(paths):

awscli/customizations/s3/tasks.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -560,10 +560,10 @@ def wait_for_parts_to_finish(self):
560560

561561
def wait_for_upload_id(self):
562562
with self._upload_id_condition:
563-
while self._upload_id is None:
564-
if self._state == self._CANCELLED:
565-
raise UploadCancelledError("Upload has been cancelled.")
566-
self._upload_id_condition.wait(timeout=1)
563+
while self._upload_id is None and self._state != self._CANCELLED:
564+
self._upload_id_condition.wait(timeout=1)
565+
if self._state == self._CANCELLED:
566+
raise UploadCancelledError("Upload has been cancelled.")
567567
return self._upload_id
568568

569569
def wait_for_completion(self):
Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License"). You
4+
# may not use this file except in compliance with the License. A copy of
5+
# the License is located at
6+
#
7+
# http://aws.amazon.com/apache2.0/
8+
#
9+
# or in the "license" file accompanying this file. This file is
10+
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11+
# ANY KIND, either express or implied. See the License for the specific
12+
# language governing permissions and limitations under the License.
13+
"""Disable endpoint url customizations for s3.
14+
15+
There's a customization in botocore such that for S3 operations
16+
we try to fix the S3 endpoint url based on whether a bucket is
17+
dns compatible. We also try to map the endpoint url to the
18+
standard S3 region (s3.amazonaws.com). This normally happens
19+
even if a user provides an --endpoint-url (if the bucket is
20+
DNS compatible).
21+
22+
This customization ensures that if a user specifies
23+
an --endpoint-url, then we turn off the botocore customization
24+
that messes with endpoint url.
25+
26+
"""
27+
from functools import partial
28+
29+
from botocore.handlers import fix_s3_host
30+
31+
32+
def register_s3_endpoint(cli):
33+
handler = partial(on_top_level_args_parsed, event_handler=cli)
34+
cli.register('top-level-args-parsed', handler)
35+
36+
37+
def on_top_level_args_parsed(parsed_args, event_handler, **kwargs):
38+
# The fix_s3_host has logic to set the endpoint to the
39+
# standard region endpoint for s3 (s3.amazonaws.com) under
40+
# certain conditions. We're making sure that if
41+
# the user provides an --endpoint-url, that entire handler
42+
# is disabled.
43+
if parsed_args.command in ['s3', 's3api'] and \
44+
parsed_args.endpoint_url is not None:
45+
event_handler.unregister('before-auth.s3', fix_s3_host)

awscli/handlers.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,7 @@
4646
from awscli.customizations.cloudsearch import initialize as cloudsearch_init
4747
from awscli.customizations.emr.emr import emr_initialize
4848
from awscli.customizations.cloudsearchdomain import register_cloudsearchdomain
49+
from awscli.customizations.s3endpoint import register_s3_endpoint
4950

5051

5152
def awscli_initialize(event_handlers):
@@ -94,3 +95,4 @@ def awscli_initialize(event_handlers):
9495
cloudsearch_init(event_handlers)
9596
emr_initialize(event_handlers)
9697
register_cloudsearchdomain(event_handlers)
98+
register_s3_endpoint(event_handlers)

bin/aws.cmd

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
@echo OFF
22
REM="""
33
setlocal
4-
set PythonExe=
4+
set PythonExe=""
55
set PythonExeFlags=
66

77
for %%i in (cmd bat exe) do (
@@ -16,13 +16,13 @@ for /f "tokens=2 delims==" %%i in ('assoc .py') do (
1616
)
1717
)
1818
)
19-
"%PythonExe%" -x %PythonExeFlags% "%~f0" %*
19+
%PythonExe% -x %PythonExeFlags% "%~f0" %*
2020
goto :EOF
2121

2222
:SetPythonExe
23-
if not [%1]==[""] (
24-
if ["%PythonExe%"]==[""] (
25-
set PythonExe=%~1
23+
if not ["%~1"]==[""] (
24+
if [%PythonExe%]==[""] (
25+
set PythonExe="%~1"
2626
)
2727
)
2828
goto :EOF

doc/source/conf.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@
5252
# The short X.Y version.
5353
version = '1.3.'
5454
# The full version, including alpha/beta/rc tags.
55-
release = '1.3.21'
55+
release = '1.3.22'
5656

5757
# The language for content autogenerated by Sphinx. Refer to documentation
5858
# for a list of supported languages.

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
import awscli
77

88

9-
requires = ['botocore>=0.55.0,<0.56.0',
9+
requires = ['botocore>=0.56.0,<0.57.0',
1010
'bcdoc>=0.12.0,<0.13.0',
1111
'six>=1.1.0',
1212
'colorama==0.2.5',

tests/integration/customizations/s3/test_plugin.py

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -247,6 +247,27 @@ def test_mv_to_nonexistent_bucket(self):
247247
p = aws('s3 mv %s s3://bad-noexist-13143242/foo.txt' % (full_path,))
248248
self.assertEqual(p.rc, 1)
249249

250+
def test_cant_move_file_onto_itself_small_file(self):
251+
# We don't even need a remote file in this case. We can
252+
# immediately validate that we can't move a file onto itself.
253+
bucket_name = self.create_bucket()
254+
self.put_object(bucket_name, key_name='key.txt', contents='foo')
255+
p = aws('s3 mv s3://%s/key.txt s3://%s/key.txt' % (bucket_name, bucket_name))
256+
self.assertEqual(p.rc, 255)
257+
self.assertIn('Cannot mv a file onto itself', p.stderr)
258+
259+
def test_cant_move_large_file_onto_itself(self):
260+
# At the API level, you can multipart copy an object onto itself,
261+
# but a mv command doesn't make sense because a mv is just a
262+
# cp + an rm of the src file. We should be consistent and
263+
# not allow large files to be mv'd onto themselves.
264+
file_contents = six.BytesIO(b'a' * (1024 * 1024 * 10))
265+
bucket_name = self.create_bucket()
266+
self.put_object(bucket_name, key_name='key.txt', contents=file_contents)
267+
p = aws('s3 mv s3://%s/key.txt s3://%s/key.txt' % (bucket_name, bucket_name))
268+
self.assertEqual(p.rc, 255)
269+
self.assertIn('Cannot mv a file onto itself', p.stderr)
270+
250271

251272
class TestRm(BaseS3CLICommand):
252273
@unittest.skipIf(platform.system() not in ['Darwin', 'Linux'],

0 commit comments

Comments
 (0)