diff --git a/awscli/customizations/cloudformation/artifact_exporter.py b/awscli/customizations/cloudformation/artifact_exporter.py index 20f955305066..80158c4ad73e 100644 --- a/awscli/customizations/cloudformation/artifact_exporter.py +++ b/awscli/customizations/cloudformation/artifact_exporter.py @@ -289,7 +289,7 @@ class ResourceWithS3UrlDict(Resource): VERSION_PROPERTY = None def __init__(self, uploader): - super(ResourceWithS3UrlDict, self).__init__(uploader) + super().__init__(uploader) def do_export(self, resource_id, resource_dict, parent_dir): """ @@ -444,7 +444,7 @@ class CloudFormationStackResource(Resource): PROPERTY_NAME = "TemplateURL" def __init__(self, uploader): - super(CloudFormationStackResource, self).__init__(uploader) + super().__init__(uploader) def do_export(self, resource_id, resource_dict, parent_dir): """ diff --git a/awscli/customizations/cloudtrail/utils.py b/awscli/customizations/cloudtrail/utils.py index 995b32fcace4..3ade7383bd22 100644 --- a/awscli/customizations/cloudtrail/utils.py +++ b/awscli/customizations/cloudtrail/utils.py @@ -28,7 +28,7 @@ def get_trail_by_arn(cloudtrail_client, trail_arn): for trail in trails: if trail.get("TrailARN", None) == trail_arn: return trail - raise ValueError("A trail could not be found for %s" % trail_arn) + raise ValueError(f"A trail could not be found for {trail_arn}") def format_date(date): @@ -45,7 +45,7 @@ def parse_date(date_string): try: return parser.parse(date_string) except ValueError: - raise ValueError("Unable to parse date value: %s" % date_string) + raise ValueError(f"Unable to parse date value: {date_string}") class PublicKeyProvider: @@ -94,5 +94,5 @@ def get_public_key(self, signature_generate_time, public_key_fingerprint): return key["Value"] raise RuntimeError( - "No public keys found for key with fingerprint: %s" % public_key_fingerprint + f"No public keys found for key with fingerprint: {public_key_fingerprint}" ) diff --git a/awscli/customizations/cloudtrail/validation.py b/awscli/customizations/cloudtrail/validation.py index fb4518535c66..6df84ab9e87a 100644 --- a/awscli/customizations/cloudtrail/validation.py +++ b/awscli/customizations/cloudtrail/validation.py @@ -66,7 +66,7 @@ def parse_date(date_string): return parser.parse(date_string) except ValueError: raise ParamValidationError( - 'Unable to parse date value: %s' % date_string + f'Unable to parse date value: {date_string}' ) @@ -77,7 +77,7 @@ def assert_cloudtrail_arn_is_valid(trail_arn): pattern = re.compile(r'arn:.+:cloudtrail:.+:\d{12}:trail/.+') if not pattern.match(trail_arn): raise ParamValidationError( - 'Invalid trail ARN provided: %s' % trail_arn + f'Invalid trail ARN provided: {trail_arn}' ) @@ -202,17 +202,16 @@ class DigestError(ValueError): class DigestSignatureError(DigestError): """Exception raised when a digest signature is invalid""" def __init__(self, bucket, key): - message = ('Digest file\ts3://%s/%s\tINVALID: signature verification ' - 'failed') % (bucket, key) - super(DigestSignatureError, self).__init__(message) + message = (f'Digest file\ts3://{bucket}/{key}\tINVALID: signature verification ' + 'failed') + super().__init__(message) class InvalidDigestFormat(DigestError): """Exception raised when a digest has an invalid format""" def __init__(self, bucket, key): - message = 'Digest file\ts3://%s/%s\tINVALID: invalid format' % (bucket, - key) - super(InvalidDigestFormat, self).__init__(message) + message = f'Digest file\ts3://{bucket}/{key}\tINVALID: invalid format' + super().__init__(message) class DigestProvider: @@ -444,8 +443,7 @@ def traverse(self, start_date, end_date=None): key, end_date = self._find_next_digest( digests=digests, bucket=bucket, last_key=key, last_start_date=last_start_date, cb=self._on_invalid, - message='Digest file\ts3://%s/%s\tINVALID: %s' - % (bucket, key, str(e))) + message=f'Digest file\ts3://{bucket}/{key}\tINVALID: {str(e)}') def _load_digests(self, bucket, prefix, start_date, end_date): return self.digest_provider.load_digest_keys_in_range( @@ -502,16 +500,14 @@ def _load_and_validate_digest(self, public_keys, bucket, key): if digest_data['digestS3Bucket'] != bucket \ or digest_data['digestS3Object'] != key: raise DigestError( - ('Digest file\ts3://%s/%s\tINVALID: has been moved from its ' - 'original location') % (bucket, key)) + f'Digest file\ts3://{bucket}/{key}\tINVALID: has been moved from its ' + 'original location') # Get the public keys in the given time range. fingerprint = digest_data['digestPublicKeyFingerprint'] if fingerprint not in public_keys: raise DigestError( - ('Digest file\ts3://%s/%s\tINVALID: public key not found in ' - 'region %s for fingerprint %s') % - (bucket, key, self.digest_provider.trail_home_region, - fingerprint)) + f'Digest file\ts3://{bucket}/{key}\tINVALID: public key not found in ' + f'region {self.digest_provider.trail_home_region} for fingerprint {fingerprint}') public_key_hex = public_keys[fingerprint]['Value'] self._digest_validator.validate( bucket, key, public_key_hex, digest_data, digest) @@ -523,9 +519,7 @@ def _load_public_keys(self, start_date, end_date): start_date, end_date) if not public_keys: raise RuntimeError( - 'No public keys found between %s and %s' % - (format_display_date(start_date), - format_display_date(end_date))) + f'No public keys found between {format_display_date(start_date)} and {format_display_date(end_date)}') return public_keys @@ -554,9 +548,8 @@ def validate(self, bucket, key, public_key, digest_data, inflated_digest): public_key = RSA.new_public_key_from_der_data(decoded_key) except RuntimeError: raise DigestError( - ('Digest file\ts3://%s/%s\tINVALID: Unable to load PKCS #1 key' - ' with fingerprint %s') - % (bucket, key, digest_data['digestPublicKeyFingerprint'])) + ('Digest file\ts3://{}/{}\tINVALID: Unable to load PKCS #1 key' + ' with fingerprint {}').format(bucket, key, digest_data['digestPublicKeyFingerprint'])) to_sign = self._create_string_to_sign(digest_data, inflated_digest) signature_bytes = binascii.unhexlify(digest_data['_signature']) @@ -577,7 +570,7 @@ def _create_string_to_sign(self, digest_data, inflated_digest): if previous_signature is None: # The value must be 'null' to match the Java implementation. previous_signature = 'null' - string_to_sign = "%s\n%s/%s\n%s\n%s" % ( + string_to_sign = "{}\n{}/{}\n{}\n{}".format( digest_data['digestEndTime'], digest_data['digestS3Bucket'], digest_data['digestS3Object'], @@ -668,7 +661,7 @@ class CloudTrailValidateLogs(BasicCommand): ] def __init__(self, session): - super(CloudTrailValidateLogs, self).__init__(session) + super().__init__(session) self.trail_arn = None self.is_verbose = False self.start_time = None @@ -750,8 +743,7 @@ def _call(self): self._track_found_times(digest) self._valid_digests += 1 self._write_status( - 'Digest file\ts3://%s/%s\tvalid' - % (digest['digestS3Bucket'], digest['digestS3Object'])) + 'Digest file\ts3://{}/{}\tvalid'.format(digest['digestS3Bucket'], digest['digestS3Object'])) if not digest['logFiles']: continue for log in digest['logFiles']: @@ -790,8 +782,7 @@ def _download_log(self, log): self._on_log_invalid(log) else: self._valid_logs += 1 - self._write_status('Log file\ts3://%s/%s\tvalid' - % (log['s3Bucket'], log['s3Object'])) + self._write_status('Log file\ts3://{}/{}\tvalid'.format(log['s3Bucket'], log['s3Object'])) except ClientError as e: if e.response['Error']['Code'] != 'NoSuchKey': raise @@ -802,35 +793,29 @@ def _download_log(self, log): def _write_status(self, message, is_error=False): if is_error: if self._is_last_status_double_space: - sys.stderr.write("%s\n\n" % message) + sys.stderr.write(f"{message}\n\n") else: - sys.stderr.write("\n%s\n\n" % message) + sys.stderr.write(f"\n{message}\n\n") self._is_last_status_double_space = True elif self.is_verbose: self._is_last_status_double_space = False - sys.stdout.write("%s\n" % message) + sys.stdout.write(f"{message}\n") def _write_startup_text(self): sys.stdout.write( - 'Validating log files for trail %s between %s and %s\n\n' - % (self.trail_arn, format_display_date(self.start_time), - format_display_date(self.end_time))) + f'Validating log files for trail {self.trail_arn} between {format_display_date(self.start_time)} and {format_display_date(self.end_time)}\n\n') def _write_summary_text(self): if not self._is_last_status_double_space: sys.stdout.write('\n') - sys.stdout.write('Results requested for %s to %s\n' - % (format_display_date(self.start_time), - format_display_date(self.end_time))) + sys.stdout.write(f'Results requested for {format_display_date(self.start_time)} to {format_display_date(self.end_time)}\n') if not self._valid_digests and not self._invalid_digests: sys.stdout.write('No digests found\n') return if not self._found_start_time or not self._found_end_time: sys.stdout.write('No valid digests found in range\n') else: - sys.stdout.write('Results found for %s to %s:\n' - % (format_display_date(self._found_start_time), - format_display_date(self._found_end_time))) + sys.stdout.write(f'Results found for {format_display_date(self._found_start_time)} to {format_display_date(self._found_end_time)}:\n') self._write_ratio(self._valid_digests, self._invalid_digests, 'digest') self._write_ratio(self._valid_logs, self._invalid_logs, 'log') sys.stdout.write('\n') @@ -845,13 +830,11 @@ def _write_ratio(self, valid, invalid, name): def _on_missing_digest(self, bucket, last_key, **kwargs): self._invalid_digests += 1 - self._write_status('Digest file\ts3://%s/%s\tINVALID: not found' - % (bucket, last_key), True) + self._write_status(f'Digest file\ts3://{bucket}/{last_key}\tINVALID: not found', True) def _on_digest_gap(self, **kwargs): self._write_status( - 'No log files were delivered by CloudTrail between %s and %s' - % (format_display_date(kwargs['next_end_date']), + 'No log files were delivered by CloudTrail between {} and {}'.format(format_display_date(kwargs['next_end_date']), format_display_date(kwargs['last_start_date'])), True) def _on_invalid_digest(self, message, **kwargs): @@ -861,17 +844,14 @@ def _on_invalid_digest(self, message, **kwargs): def _on_invalid_log_format(self, log_data): self._invalid_logs += 1 self._write_status( - ('Log file\ts3://%s/%s\tINVALID: invalid format' - % (log_data['s3Bucket'], log_data['s3Object'])), True) + ('Log file\ts3://{}/{}\tINVALID: invalid format'.format(log_data['s3Bucket'], log_data['s3Object'])), True) def _on_log_invalid(self, log_data): self._invalid_logs += 1 self._write_status( - "Log file\ts3://%s/%s\tINVALID: hash value doesn't match" - % (log_data['s3Bucket'], log_data['s3Object']), True) + "Log file\ts3://{}/{}\tINVALID: hash value doesn't match".format(log_data['s3Bucket'], log_data['s3Object']), True) def _on_missing_log(self, log_data): self._invalid_logs += 1 self._write_status( - 'Log file\ts3://%s/%s\tINVALID: not found' - % (log_data['s3Bucket'], log_data['s3Object']), True) + 'Log file\ts3://{}/{}\tINVALID: not found'.format(log_data['s3Bucket'], log_data['s3Object']), True) diff --git a/awscli/customizations/configservice/getstatus.py b/awscli/customizations/configservice/getstatus.py index d3a2dd8ce2e9..fe01925065b0 100644 --- a/awscli/customizations/configservice/getstatus.py +++ b/awscli/customizations/configservice/getstatus.py @@ -30,7 +30,7 @@ class GetStatusCommand(BasicCommand): def __init__(self, session): self._config_client = None - super(GetStatusCommand, self).__init__(session) + super().__init__(session) def _run_main(self, parsed_args, parsed_globals): self._setup_client(parsed_globals) @@ -57,12 +57,12 @@ def _check_configuration_recorders(self): def _check_configure_recorder_status(self, configuration_recorder): # Get the name of the recorder and print it out. name = configuration_recorder['name'] - sys.stdout.write('name: %s\n' % name) + sys.stdout.write(f'name: {name}\n') # Get the recording status and print it out. recording = configuration_recorder['recording'] recording_map = {False: 'OFF', True: 'ON'} - sys.stdout.write('recorder: %s\n' % recording_map[recording]) + sys.stdout.write(f'recorder: {recording_map[recording]}\n') # If the recorder is on, get the last status and print it out. if recording: @@ -78,7 +78,7 @@ def _check_delivery_channels(self): def _check_delivery_channel_status(self, delivery_channel): # Get the name of the delivery channel and print it out. name = delivery_channel['name'] - sys.stdout.write('name: %s\n' % name) + sys.stdout.write(f'name: {name}\n') # Obtain the various delivery statuses. stream_delivery = delivery_channel['configStreamDeliveryInfo'] @@ -95,7 +95,7 @@ def _check_delivery_channel_status(self, delivery_channel): def _check_last_status(self, status, status_name=''): last_status = status['lastStatus'] - sys.stdout.write('last %sstatus: %s\n' % (status_name, last_status)) + sys.stdout.write(f'last {status_name}status: {last_status}\n') if last_status == "FAILURE": - sys.stdout.write('error code: %s\n' % status['lastErrorCode']) - sys.stdout.write('message: %s\n' % status['lastErrorMessage']) + sys.stdout.write('error code: {}\n'.format(status['lastErrorCode'])) + sys.stdout.write('message: {}\n'.format(status['lastErrorMessage'])) diff --git a/awscli/customizations/configservice/subscribe.py b/awscli/customizations/configservice/subscribe.py index 0a3c1b2def31..d1c62af6a3ba 100644 --- a/awscli/customizations/configservice/subscribe.py +++ b/awscli/customizations/configservice/subscribe.py @@ -59,7 +59,7 @@ def __init__(self, session): self._s3_client = None self._sns_client = None self._config_client = None - super(SubscribeCommand, self).__init__(session) + super().__init__(session) def _run_main(self, parsed_args, parsed_globals): # Setup the necessary all of the necessary clients. @@ -139,9 +139,9 @@ def prepare_bucket(self, s3_path): bucket_exists = self._check_bucket_exists(bucket) if not bucket_exists: self._create_bucket(bucket) - sys.stdout.write('Using new S3 bucket: %s\n' % bucket) + sys.stdout.write(f'Using new S3 bucket: {bucket}\n') else: - sys.stdout.write('Using existing S3 bucket: %s\n' % bucket) + sys.stdout.write(f'Using existing S3 bucket: {bucket}\n') return bucket, key def _check_bucket_exists(self, bucket): @@ -168,9 +168,9 @@ def prepare_topic(self, sns_topic): if not self._check_is_arn(sns_topic): response = self._sns_client.create_topic(Name=sns_topic) sns_topic_arn = response['TopicArn'] - sys.stdout.write('Using new SNS topic: %s\n' % sns_topic_arn) + sys.stdout.write(f'Using new SNS topic: {sns_topic_arn}\n') else: - sys.stdout.write('Using existing SNS topic: %s\n' % sns_topic_arn) + sys.stdout.write(f'Using existing SNS topic: {sns_topic_arn}\n') return sns_topic_arn def _check_is_arn(self, sns_topic): diff --git a/awscli/customizations/configure/configure.py b/awscli/customizations/configure/configure.py index aa8fc88ea9fd..6f7770cf618d 100644 --- a/awscli/customizations/configure/configure.py +++ b/awscli/customizations/configure/configure.py @@ -45,7 +45,7 @@ class InteractivePrompter: def get_value(self, current_value, config_name, prompt_text=''): if config_name in ('aws_access_key_id', 'aws_secret_access_key'): current_value = mask_value(current_value) - response = compat_input("%s [%s]: " % (prompt_text, current_value)) + response = compat_input(f"{prompt_text} [{current_value}]: ") if not response: # If the user hits enter, we return a value of None # instead of an empty string. That way we can determine @@ -98,7 +98,7 @@ class ConfigureCommand(BasicCommand): ] def __init__(self, session, prompter=None, config_writer=None): - super(ConfigureCommand, self).__init__(session) + super().__init__(session) if prompter is None: prompter = InteractivePrompter() self._prompter = prompter diff --git a/awscli/customizations/configure/exportcreds.py b/awscli/customizations/configure/exportcreds.py index 7082f0c5b8ba..bf516865390e 100644 --- a/awscli/customizations/configure/exportcreds.py +++ b/awscli/customizations/configure/exportcreds.py @@ -194,7 +194,7 @@ class ConfigureExportCredentialsCommand(BasicCommand): _MAX_RECURSION = 4 def __init__(self, session, out_stream=None, error_stream=None, env=None): - super(ConfigureExportCredentialsCommand, self).__init__(session) + super().__init__(session) if out_stream is None: out_stream = sys.stdout if error_stream is None: diff --git a/awscli/customizations/configure/get.py b/awscli/customizations/configure/get.py index 624b8768d793..306604bcf3b4 100644 --- a/awscli/customizations/configure/get.py +++ b/awscli/customizations/configure/get.py @@ -34,7 +34,7 @@ class ConfigureGetCommand(BasicCommand): ] def __init__(self, session, stream=None, error_stream=None): - super(ConfigureGetCommand, self).__init__(session) + super().__init__(session) if stream is None: stream = sys.stdout if error_stream is None: @@ -53,7 +53,7 @@ def _run_main(self, args, parsed_globals): else: value = self._get_dotted_config_value(varname) - LOG.debug('Config value retrieved: %s' % value) + LOG.debug(f'Config value retrieved: {value}') if isinstance(value, str): self._stream.write(value) @@ -63,8 +63,8 @@ def _run_main(self, args, parsed_globals): # TODO: add support for this. We would need to print it off in # the same format as the config file. self._error_stream.write( - 'varname (%s) must reference a value, not a section or ' - 'sub-section.' % varname + f'varname ({varname}) must reference a value, not a section or ' + 'sub-section.' ) return 1 else: diff --git a/awscli/customizations/configure/importer.py b/awscli/customizations/configure/importer.py index 2aeb4627c787..2a35a264771f 100644 --- a/awscli/customizations/configure/importer.py +++ b/awscli/customizations/configure/importer.py @@ -60,7 +60,7 @@ class ConfigureImportCommand(BasicCommand): def __init__(self, session, csv_parser=None, importer=None, out_stream=None): - super(ConfigureImportCommand, self).__init__(session) + super().__init__(session) if csv_parser is None: csv_parser = CSVCredentialParser() self._csv_parser = csv_parser @@ -86,7 +86,7 @@ def _import_csv(self, contents): credential, config_path, profile_prefix=self._profile_prefix, ) - import_msg = 'Successfully imported %s profile(s)\n' % len(credentials) + import_msg = f'Successfully imported {len(credentials)} profile(s)\n' uni_print(import_msg, out_file=self._out_stream) def _run_main(self, parsed_args, parsed_globals): diff --git a/awscli/customizations/configure/list.py b/awscli/customizations/configure/list.py index c1f0f342e058..a1c073b5a961 100644 --- a/awscli/customizations/configure/list.py +++ b/awscli/customizations/configure/list.py @@ -51,7 +51,7 @@ class ConfigureListCommand(BasicCommand): ) def __init__(self, session, stream=None): - super(ConfigureListCommand, self).__init__(session) + super().__init__(session) if stream is None: stream = sys.stdout self._stream = stream diff --git a/awscli/customizations/configure/listprofiles.py b/awscli/customizations/configure/listprofiles.py index 411fb858ad8c..9d1be3926c5e 100644 --- a/awscli/customizations/configure/listprofiles.py +++ b/awscli/customizations/configure/listprofiles.py @@ -26,7 +26,7 @@ class ListProfilesCommand(BasicCommand): ) def __init__(self, session, out_stream=None): - super(ListProfilesCommand, self).__init__(session) + super().__init__(session) if out_stream is None: out_stream = sys.stdout @@ -34,5 +34,5 @@ def __init__(self, session, out_stream=None): def _run_main(self, parsed_args, parsed_globals): for profile in self._session.available_profiles: - uni_print('%s\n' % profile, out_file=self._out_stream) + uni_print(f'{profile}\n', out_file=self._out_stream) return 0 diff --git a/awscli/customizations/configure/set.py b/awscli/customizations/configure/set.py index c17f38d955a3..ce2df90236be 100644 --- a/awscli/customizations/configure/set.py +++ b/awscli/customizations/configure/set.py @@ -41,7 +41,7 @@ class ConfigureSetCommand(BasicCommand): 'aws_session_token'] def __init__(self, session, config_writer=None): - super(ConfigureSetCommand, self).__init__(session) + super().__init__(session) if config_writer is None: config_writer = ConfigFileWriter() self._config_writer = config_writer diff --git a/awscli/customizations/configure/sso.py b/awscli/customizations/configure/sso.py index d10e9a4703ee..251a507b234b 100644 --- a/awscli/customizations/configure/sso.py +++ b/awscli/customizations/configure/sso.py @@ -64,7 +64,7 @@ class ValidatorWithDefault(Validator): def __init__(self, default=None): - super(ValidatorWithDefault, self).__init__() + super().__init__() self._default = default def _raise_validation_error(self, document, message): @@ -326,7 +326,7 @@ def _get_previously_used_scopes_to_sso_sessions(self): class BaseSSOConfigurationCommand(BaseSSOCommand): def __init__(self, session, prompter=None, config_writer=None): - super(BaseSSOConfigurationCommand, self).__init__(session) + super().__init__(session) if prompter is None: prompter = PTKPrompt() self._prompter = prompter @@ -370,7 +370,7 @@ class ConfigureSSOCommand(BaseSSOConfigurationCommand): def __init__(self, session, prompter=None, selector=None, config_writer=None, sso_token_cache=None, sso_login=None): - super(ConfigureSSOCommand, self).__init__( + super().__init__( session, prompter=prompter, config_writer=config_writer) if selector is None: selector = select_menu diff --git a/awscli/customizations/configure/writer.py b/awscli/customizations/configure/writer.py index 077196c085e9..b7185c2de1cd 100644 --- a/awscli/customizations/configure/writer.py +++ b/awscli/customizations/configure/writer.py @@ -92,7 +92,7 @@ def _write_new_section(self, section_name, new_values, config_filename): with open(config_filename, 'a') as f: if needs_newline: f.write('\n') - f.write('[%s]\n' % section_name) + f.write(f'[{section_name}]\n') contents = [] self._insert_new_values(line_number=0, contents=contents, @@ -143,7 +143,7 @@ def _update_section_contents(self, contents, section_name, new_values): # out now. if not isinstance(new_values[key_name], dict): option_value = new_values[key_name] - new_line = '%s = %s\n' % (key_name, option_value) + new_line = f'{key_name} = {option_value}\n' contents[j] = new_line del new_values[key_name] else: @@ -171,7 +171,7 @@ def _update_subattributes(self, index, contents, values, starting_indent): key_name = match.group(1).strip() if key_name in values: option_value = values[key_name] - new_line = '%s%s = %s\n' % (' ' * current_indent, + new_line = '{}{} = {}\n'.format(' ' * current_indent, key_name, option_value) contents[i] = new_line del values[key_name] @@ -192,20 +192,19 @@ def _insert_new_values(self, line_number, contents, new_values, indent=''): for key, value in list(new_values.items()): if isinstance(value, dict): subindent = indent + ' ' - new_contents.append('%s%s =\n' % (indent, key)) + new_contents.append(f'{indent}{key} =\n') for subkey, subval in list(value.items()): - new_contents.append('%s%s = %s\n' % (subindent, subkey, - subval)) + new_contents.append(f'{subindent}{subkey} = {subval}\n') else: - new_contents.append('%s%s = %s\n' % (indent, key, value)) + new_contents.append(f'{indent}{key} = {value}\n') del new_values[key] contents.insert(line_number + 1, ''.join(new_contents)) def _matches_section(self, match, section_name): parts = section_name.split(' ') - unquoted_match = match.group(0) == '[%s]' % section_name + unquoted_match = match.group(0) == f'[{section_name}]' if len(parts) > 1: - quoted_match = match.group(0) == '[%s "%s"]' % ( + quoted_match = match.group(0) == '[{} "{}"]'.format( parts[0], ' '.join(parts[1:])) return unquoted_match or quoted_match return unquoted_match