File manager - Edit - /home/newsbmcs.com/public_html/static/img/logo/resources.tar
Back
base.py 0000644 00000011624 15030054237 0006033 0 ustar 00 # Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # https://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import logging import boto3 logger = logging.getLogger(__name__) class ResourceMeta: """ An object containing metadata about a resource. """ def __init__( self, service_name, identifiers=None, client=None, data=None, resource_model=None, ): #: (``string``) The service name, e.g. 's3' self.service_name = service_name if identifiers is None: identifiers = [] #: (``list``) List of identifier names self.identifiers = identifiers #: (:py:class:`~botocore.client.BaseClient`) Low-level Botocore client self.client = client #: (``dict``) Loaded resource data attributes self.data = data # The resource model for that resource self.resource_model = resource_model def __repr__(self): return f'ResourceMeta(\'{self.service_name}\', identifiers={self.identifiers})' def __eq__(self, other): # Two metas are equal if their components are all equal if other.__class__.__name__ != self.__class__.__name__: return False return self.__dict__ == other.__dict__ def copy(self): """ Create a copy of this metadata object. """ params = self.__dict__.copy() service_name = params.pop('service_name') return ResourceMeta(service_name, **params) class ServiceResource: """ A base class for resources. :type client: botocore.client :param client: A low-level Botocore client instance """ meta = None """ Stores metadata about this resource instance, such as the ``service_name``, the low-level ``client`` and any cached ``data`` from when the instance was hydrated. For example:: # Get a low-level client from a resource instance client = resource.meta.client response = client.operation(Param='foo') # Print the resource instance's service short name print(resource.meta.service_name) See :py:class:`ResourceMeta` for more information. """ def __init__(self, *args, **kwargs): # Always work on a copy of meta, otherwise we would affect other # instances of the same subclass. self.meta = self.meta.copy() # Create a default client if none was passed if kwargs.get('client') is not None: self.meta.client = kwargs.get('client') else: self.meta.client = boto3.client(self.meta.service_name) # Allow setting identifiers as positional arguments in the order # in which they were defined in the ResourceJSON. for i, value in enumerate(args): setattr(self, '_' + self.meta.identifiers[i], value) # Allow setting identifiers via keyword arguments. Here we need # extra logic to ignore other keyword arguments like ``client``. for name, value in kwargs.items(): if name == 'client': continue if name not in self.meta.identifiers: raise ValueError(f'Unknown keyword argument: {name}') setattr(self, '_' + name, value) # Validate that all identifiers have been set. for identifier in self.meta.identifiers: if getattr(self, identifier) is None: raise ValueError(f'Required parameter {identifier} not set') def __repr__(self): identifiers = [] for identifier in self.meta.identifiers: identifiers.append( f'{identifier}={repr(getattr(self, identifier))}' ) return "{}({})".format( self.__class__.__name__, ', '.join(identifiers), ) def __eq__(self, other): # Should be instances of the same resource class if other.__class__.__name__ != self.__class__.__name__: return False # Each of the identifiers should have the same value in both # instances, e.g. two buckets need the same name to be equal. for identifier in self.meta.identifiers: if getattr(self, identifier) != getattr(other, identifier): return False return True def __hash__(self): identifiers = [] for identifier in self.meta.identifiers: identifiers.append(getattr(self, identifier)) return hash((self.__class__.__name__, tuple(identifiers))) factory.py 0000644 00000054264 15030054237 0006577 0 ustar 00 # Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # https://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import logging from functools import partial from ..docs import docstring from ..exceptions import ResourceLoadException from .action import ServiceAction, WaiterAction from .base import ResourceMeta, ServiceResource from .collection import CollectionFactory from .model import ResourceModel from .response import ResourceHandler, build_identifiers logger = logging.getLogger(__name__) class ResourceFactory: """ A factory to create new :py:class:`~boto3.resources.base.ServiceResource` classes from a :py:class:`~boto3.resources.model.ResourceModel`. There are two types of lookups that can be done: one on the service itself (e.g. an SQS resource) and another on models contained within the service (e.g. an SQS Queue resource). """ def __init__(self, emitter): self._collection_factory = CollectionFactory() self._emitter = emitter def load_from_definition( self, resource_name, single_resource_json_definition, service_context ): """ Loads a resource from a model, creating a new :py:class:`~boto3.resources.base.ServiceResource` subclass with the correct properties and methods, named based on the service and resource name, e.g. EC2.Instance. :type resource_name: string :param resource_name: Name of the resource to look up. For services, this should match the ``service_name``. :type single_resource_json_definition: dict :param single_resource_json_definition: The loaded json of a single service resource or resource definition. :type service_context: :py:class:`~boto3.utils.ServiceContext` :param service_context: Context about the AWS service :rtype: Subclass of :py:class:`~boto3.resources.base.ServiceResource` :return: The service or resource class. """ logger.debug( 'Loading %s:%s', service_context.service_name, resource_name ) # Using the loaded JSON create a ResourceModel object. resource_model = ResourceModel( resource_name, single_resource_json_definition, service_context.resource_json_definitions, ) # Do some renaming of the shape if there was a naming collision # that needed to be accounted for. shape = None if resource_model.shape: shape = service_context.service_model.shape_for( resource_model.shape ) resource_model.load_rename_map(shape) # Set some basic info meta = ResourceMeta( service_context.service_name, resource_model=resource_model ) attrs = { 'meta': meta, } # Create and load all of attributes of the resource class based # on the models. # Identifiers self._load_identifiers( attrs=attrs, meta=meta, resource_name=resource_name, resource_model=resource_model, ) # Load/Reload actions self._load_actions( attrs=attrs, resource_name=resource_name, resource_model=resource_model, service_context=service_context, ) # Attributes that get auto-loaded self._load_attributes( attrs=attrs, meta=meta, resource_name=resource_name, resource_model=resource_model, service_context=service_context, ) # Collections and their corresponding methods self._load_collections( attrs=attrs, resource_model=resource_model, service_context=service_context, ) # References and Subresources self._load_has_relations( attrs=attrs, resource_name=resource_name, resource_model=resource_model, service_context=service_context, ) # Waiter resource actions self._load_waiters( attrs=attrs, resource_name=resource_name, resource_model=resource_model, service_context=service_context, ) # Create the name based on the requested service and resource cls_name = resource_name if service_context.service_name == resource_name: cls_name = 'ServiceResource' cls_name = service_context.service_name + '.' + cls_name base_classes = [ServiceResource] if self._emitter is not None: self._emitter.emit( f'creating-resource-class.{cls_name}', class_attributes=attrs, base_classes=base_classes, service_context=service_context, ) return type(str(cls_name), tuple(base_classes), attrs) def _load_identifiers(self, attrs, meta, resource_model, resource_name): """ Populate required identifiers. These are arguments without which the resource cannot be used. Identifiers become arguments for operations on the resource. """ for identifier in resource_model.identifiers: meta.identifiers.append(identifier.name) attrs[identifier.name] = self._create_identifier( identifier, resource_name ) def _load_actions( self, attrs, resource_name, resource_model, service_context ): """ Actions on the resource become methods, with the ``load`` method being a special case which sets internal data for attributes, and ``reload`` is an alias for ``load``. """ if resource_model.load: attrs['load'] = self._create_action( action_model=resource_model.load, resource_name=resource_name, service_context=service_context, is_load=True, ) attrs['reload'] = attrs['load'] for action in resource_model.actions: attrs[action.name] = self._create_action( action_model=action, resource_name=resource_name, service_context=service_context, ) def _load_attributes( self, attrs, meta, resource_name, resource_model, service_context ): """ Load resource attributes based on the resource shape. The shape name is referenced in the resource JSON, but the shape itself is defined in the Botocore service JSON, hence the need for access to the ``service_model``. """ if not resource_model.shape: return shape = service_context.service_model.shape_for(resource_model.shape) identifiers = { i.member_name: i for i in resource_model.identifiers if i.member_name } attributes = resource_model.get_attributes(shape) for name, (orig_name, member) in attributes.items(): if name in identifiers: prop = self._create_identifier_alias( resource_name=resource_name, identifier=identifiers[name], member_model=member, service_context=service_context, ) else: prop = self._create_autoload_property( resource_name=resource_name, name=orig_name, snake_cased=name, member_model=member, service_context=service_context, ) attrs[name] = prop def _load_collections(self, attrs, resource_model, service_context): """ Load resource collections from the model. Each collection becomes a :py:class:`~boto3.resources.collection.CollectionManager` instance on the resource instance, which allows you to iterate and filter through the collection's items. """ for collection_model in resource_model.collections: attrs[collection_model.name] = self._create_collection( resource_name=resource_model.name, collection_model=collection_model, service_context=service_context, ) def _load_has_relations( self, attrs, resource_name, resource_model, service_context ): """ Load related resources, which are defined via a ``has`` relationship but conceptually come in two forms: 1. A reference, which is a related resource instance and can be ``None``, such as an EC2 instance's ``vpc``. 2. A subresource, which is a resource constructor that will always return a resource instance which shares identifiers/data with this resource, such as ``s3.Bucket('name').Object('key')``. """ for reference in resource_model.references: # This is a dangling reference, i.e. we have all # the data we need to create the resource, so # this instance becomes an attribute on the class. attrs[reference.name] = self._create_reference( reference_model=reference, resource_name=resource_name, service_context=service_context, ) for subresource in resource_model.subresources: # This is a sub-resource class you can create # by passing in an identifier, e.g. s3.Bucket(name). attrs[subresource.name] = self._create_class_partial( subresource_model=subresource, resource_name=resource_name, service_context=service_context, ) self._create_available_subresources_command( attrs, resource_model.subresources ) def _create_available_subresources_command(self, attrs, subresources): _subresources = [subresource.name for subresource in subresources] _subresources = sorted(_subresources) def get_available_subresources(factory_self): """ Returns a list of all the available sub-resources for this Resource. :returns: A list containing the name of each sub-resource for this resource :rtype: list of str """ return _subresources attrs['get_available_subresources'] = get_available_subresources def _load_waiters( self, attrs, resource_name, resource_model, service_context ): """ Load resource waiters from the model. Each waiter allows you to wait until a resource reaches a specific state by polling the state of the resource. """ for waiter in resource_model.waiters: attrs[waiter.name] = self._create_waiter( resource_waiter_model=waiter, resource_name=resource_name, service_context=service_context, ) def _create_identifier(factory_self, identifier, resource_name): """ Creates a read-only property for identifier attributes. """ def get_identifier(self): # The default value is set to ``None`` instead of # raising an AttributeError because when resources are # instantiated a check is made such that none of the # identifiers have a value ``None``. If any are ``None``, # a more informative user error than a generic AttributeError # is raised. return getattr(self, '_' + identifier.name, None) get_identifier.__name__ = str(identifier.name) get_identifier.__doc__ = docstring.IdentifierDocstring( resource_name=resource_name, identifier_model=identifier, include_signature=False, ) return property(get_identifier) def _create_identifier_alias( factory_self, resource_name, identifier, member_model, service_context ): """ Creates a read-only property that aliases an identifier. """ def get_identifier(self): return getattr(self, '_' + identifier.name, None) get_identifier.__name__ = str(identifier.member_name) get_identifier.__doc__ = docstring.AttributeDocstring( service_name=service_context.service_name, resource_name=resource_name, attr_name=identifier.member_name, event_emitter=factory_self._emitter, attr_model=member_model, include_signature=False, ) return property(get_identifier) def _create_autoload_property( factory_self, resource_name, name, snake_cased, member_model, service_context, ): """ Creates a new property on the resource to lazy-load its value via the resource's ``load`` method (if it exists). """ # The property loader will check to see if this resource has already # been loaded and return the cached value if possible. If not, then # it first checks to see if it CAN be loaded (raise if not), then # calls the load before returning the value. def property_loader(self): if self.meta.data is None: if hasattr(self, 'load'): self.load() else: raise ResourceLoadException( f'{self.__class__.__name__} has no load method' ) return self.meta.data.get(name) property_loader.__name__ = str(snake_cased) property_loader.__doc__ = docstring.AttributeDocstring( service_name=service_context.service_name, resource_name=resource_name, attr_name=snake_cased, event_emitter=factory_self._emitter, attr_model=member_model, include_signature=False, ) return property(property_loader) def _create_waiter( factory_self, resource_waiter_model, resource_name, service_context ): """ Creates a new wait method for each resource where both a waiter and resource model is defined. """ waiter = WaiterAction( resource_waiter_model, waiter_resource_name=resource_waiter_model.name, ) def do_waiter(self, *args, **kwargs): waiter(self, *args, **kwargs) do_waiter.__name__ = str(resource_waiter_model.name) do_waiter.__doc__ = docstring.ResourceWaiterDocstring( resource_name=resource_name, event_emitter=factory_self._emitter, service_model=service_context.service_model, resource_waiter_model=resource_waiter_model, service_waiter_model=service_context.service_waiter_model, include_signature=False, ) return do_waiter def _create_collection( factory_self, resource_name, collection_model, service_context ): """ Creates a new property on the resource to lazy-load a collection. """ cls = factory_self._collection_factory.load_from_definition( resource_name=resource_name, collection_model=collection_model, service_context=service_context, event_emitter=factory_self._emitter, ) def get_collection(self): return cls( collection_model=collection_model, parent=self, factory=factory_self, service_context=service_context, ) get_collection.__name__ = str(collection_model.name) get_collection.__doc__ = docstring.CollectionDocstring( collection_model=collection_model, include_signature=False ) return property(get_collection) def _create_reference( factory_self, reference_model, resource_name, service_context ): """ Creates a new property on the resource to lazy-load a reference. """ # References are essentially an action with no request # or response, so we can re-use the response handlers to # build up resources from identifiers and data members. handler = ResourceHandler( search_path=reference_model.resource.path, factory=factory_self, resource_model=reference_model.resource, service_context=service_context, ) # Are there any identifiers that need access to data members? # This is important when building the resource below since # it requires the data to be loaded. needs_data = any( i.source == 'data' for i in reference_model.resource.identifiers ) def get_reference(self): # We need to lazy-evaluate the reference to handle circular # references between resources. We do this by loading the class # when first accessed. # This is using a *response handler* so we need to make sure # our data is loaded (if possible) and pass that data into # the handler as if it were a response. This allows references # to have their data loaded properly. if needs_data and self.meta.data is None and hasattr(self, 'load'): self.load() return handler(self, {}, self.meta.data) get_reference.__name__ = str(reference_model.name) get_reference.__doc__ = docstring.ReferenceDocstring( reference_model=reference_model, include_signature=False ) return property(get_reference) def _create_class_partial( factory_self, subresource_model, resource_name, service_context ): """ Creates a new method which acts as a functools.partial, passing along the instance's low-level `client` to the new resource class' constructor. """ name = subresource_model.resource.type def create_resource(self, *args, **kwargs): # We need a new method here because we want access to the # instance's client. positional_args = [] # We lazy-load the class to handle circular references. json_def = service_context.resource_json_definitions.get(name, {}) resource_cls = factory_self.load_from_definition( resource_name=name, single_resource_json_definition=json_def, service_context=service_context, ) # Assumes that identifiers are in order, which lets you do # e.g. ``sqs.Queue('foo').Message('bar')`` to create a new message # linked with the ``foo`` queue and which has a ``bar`` receipt # handle. If we did kwargs here then future positional arguments # would lead to failure. identifiers = subresource_model.resource.identifiers if identifiers is not None: for identifier, value in build_identifiers(identifiers, self): positional_args.append(value) return partial( resource_cls, *positional_args, client=self.meta.client )(*args, **kwargs) create_resource.__name__ = str(name) create_resource.__doc__ = docstring.SubResourceDocstring( resource_name=resource_name, sub_resource_model=subresource_model, service_model=service_context.service_model, include_signature=False, ) return create_resource def _create_action( factory_self, action_model, resource_name, service_context, is_load=False, ): """ Creates a new method which makes a request to the underlying AWS service. """ # Create the action in in this closure but before the ``do_action`` # method below is invoked, which allows instances of the resource # to share the ServiceAction instance. action = ServiceAction( action_model, factory=factory_self, service_context=service_context ) # A resource's ``load`` method is special because it sets # values on the resource instead of returning the response. if is_load: # We need a new method here because we want access to the # instance via ``self``. def do_action(self, *args, **kwargs): response = action(self, *args, **kwargs) self.meta.data = response # Create the docstring for the load/reload methods. lazy_docstring = docstring.LoadReloadDocstring( action_name=action_model.name, resource_name=resource_name, event_emitter=factory_self._emitter, load_model=action_model, service_model=service_context.service_model, include_signature=False, ) else: # We need a new method here because we want access to the # instance via ``self``. def do_action(self, *args, **kwargs): response = action(self, *args, **kwargs) if hasattr(self, 'load'): # Clear cached data. It will be reloaded the next # time that an attribute is accessed. # TODO: Make this configurable in the future? self.meta.data = None return response lazy_docstring = docstring.ActionDocstring( resource_name=resource_name, event_emitter=factory_self._emitter, action_model=action_model, service_model=service_context.service_model, include_signature=False, ) do_action.__name__ = str(action_model.name) do_action.__doc__ = lazy_docstring return do_action collection.py 0000644 00000045251 15030054237 0007257 0 ustar 00 # Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # https://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. import copy import logging from botocore import xform_name from botocore.utils import merge_dicts from ..docs import docstring from .action import BatchAction from .params import create_request_parameters from .response import ResourceHandler logger = logging.getLogger(__name__) class ResourceCollection: """ Represents a collection of resources, which can be iterated through, optionally with filtering. Collections automatically handle pagination for you. See :ref:`guide_collections` for a high-level overview of collections, including when remote service requests are performed. :type model: :py:class:`~boto3.resources.model.Collection` :param model: Collection model :type parent: :py:class:`~boto3.resources.base.ServiceResource` :param parent: The collection's parent resource :type handler: :py:class:`~boto3.resources.response.ResourceHandler` :param handler: The resource response handler used to create resource instances """ def __init__(self, model, parent, handler, **kwargs): self._model = model self._parent = parent self._py_operation_name = xform_name(model.request.operation) self._handler = handler self._params = copy.deepcopy(kwargs) def __repr__(self): return '{}({}, {})'.format( self.__class__.__name__, self._parent, f'{self._parent.meta.service_name}.{self._model.resource.type}', ) def __iter__(self): """ A generator which yields resource instances after doing the appropriate service operation calls and handling any pagination on your behalf. Page size, item limit, and filter parameters are applied if they have previously been set. >>> bucket = s3.Bucket('boto3') >>> for obj in bucket.objects.all(): ... print(obj.key) 'key1' 'key2' """ limit = self._params.get('limit', None) count = 0 for page in self.pages(): for item in page: yield item # If the limit is set and has been reached, then # we stop processing items here. count += 1 if limit is not None and count >= limit: return def _clone(self, **kwargs): """ Create a clone of this collection. This is used by the methods below to provide a chainable interface that returns copies rather than the original. This allows things like: >>> base = collection.filter(Param1=1) >>> query1 = base.filter(Param2=2) >>> query2 = base.filter(Param3=3) >>> query1.params {'Param1': 1, 'Param2': 2} >>> query2.params {'Param1': 1, 'Param3': 3} :rtype: :py:class:`ResourceCollection` :return: A clone of this resource collection """ params = copy.deepcopy(self._params) merge_dicts(params, kwargs, append_lists=True) clone = self.__class__( self._model, self._parent, self._handler, **params ) return clone def pages(self): """ A generator which yields pages of resource instances after doing the appropriate service operation calls and handling any pagination on your behalf. Non-paginated calls will return a single page of items. Page size, item limit, and filter parameters are applied if they have previously been set. >>> bucket = s3.Bucket('boto3') >>> for page in bucket.objects.pages(): ... for obj in page: ... print(obj.key) 'key1' 'key2' :rtype: list(:py:class:`~boto3.resources.base.ServiceResource`) :return: List of resource instances """ client = self._parent.meta.client cleaned_params = self._params.copy() limit = cleaned_params.pop('limit', None) page_size = cleaned_params.pop('page_size', None) params = create_request_parameters(self._parent, self._model.request) merge_dicts(params, cleaned_params, append_lists=True) # Is this a paginated operation? If so, we need to get an # iterator for the various pages. If not, then we simply # call the operation and return the result as a single # page in a list. For non-paginated results, we just ignore # the page size parameter. if client.can_paginate(self._py_operation_name): logger.debug( 'Calling paginated %s:%s with %r', self._parent.meta.service_name, self._py_operation_name, params, ) paginator = client.get_paginator(self._py_operation_name) pages = paginator.paginate( PaginationConfig={'MaxItems': limit, 'PageSize': page_size}, **params, ) else: logger.debug( 'Calling %s:%s with %r', self._parent.meta.service_name, self._py_operation_name, params, ) pages = [getattr(client, self._py_operation_name)(**params)] # Now that we have a page iterator or single page of results # we start processing and yielding individual items. count = 0 for page in pages: page_items = [] for item in self._handler(self._parent, params, page): page_items.append(item) # If the limit is set and has been reached, then # we stop processing items here. count += 1 if limit is not None and count >= limit: break yield page_items # Stop reading pages if we've reached out limit if limit is not None and count >= limit: break def all(self): """ Get all items from the collection, optionally with a custom page size and item count limit. This method returns an iterable generator which yields individual resource instances. Example use:: # Iterate through items >>> for queue in sqs.queues.all(): ... print(queue.url) 'https://url1' 'https://url2' # Convert to list >>> queues = list(sqs.queues.all()) >>> len(queues) 2 """ return self._clone() def filter(self, **kwargs): """ Get items from the collection, passing keyword arguments along as parameters to the underlying service operation, which are typically used to filter the results. This method returns an iterable generator which yields individual resource instances. Example use:: # Iterate through items >>> for queue in sqs.queues.filter(Param='foo'): ... print(queue.url) 'https://url1' 'https://url2' # Convert to list >>> queues = list(sqs.queues.filter(Param='foo')) >>> len(queues) 2 :rtype: :py:class:`ResourceCollection` """ return self._clone(**kwargs) def limit(self, count): """ Return at most this many resources. >>> for bucket in s3.buckets.limit(5): ... print(bucket.name) 'bucket1' 'bucket2' 'bucket3' 'bucket4' 'bucket5' :type count: int :param count: Return no more than this many items :rtype: :py:class:`ResourceCollection` """ return self._clone(limit=count) def page_size(self, count): """ Fetch at most this many resources per service request. >>> for obj in s3.Bucket('boto3').objects.page_size(100): ... print(obj.key) :type count: int :param count: Fetch this many items per request :rtype: :py:class:`ResourceCollection` """ return self._clone(page_size=count) class CollectionManager: """ A collection manager provides access to resource collection instances, which can be iterated and filtered. The manager exposes some convenience functions that are also found on resource collections, such as :py:meth:`~ResourceCollection.all` and :py:meth:`~ResourceCollection.filter`. Get all items:: >>> for bucket in s3.buckets.all(): ... print(bucket.name) Get only some items via filtering:: >>> for queue in sqs.queues.filter(QueueNamePrefix='AWS'): ... print(queue.url) Get whole pages of items: >>> for page in s3.Bucket('boto3').objects.pages(): ... for obj in page: ... print(obj.key) A collection manager is not iterable. You **must** call one of the methods that return a :py:class:`ResourceCollection` before trying to iterate, slice, or convert to a list. See the :ref:`guide_collections` guide for a high-level overview of collections, including when remote service requests are performed. :type collection_model: :py:class:`~boto3.resources.model.Collection` :param model: Collection model :type parent: :py:class:`~boto3.resources.base.ServiceResource` :param parent: The collection's parent resource :type factory: :py:class:`~boto3.resources.factory.ResourceFactory` :param factory: The resource factory to create new resources :type service_context: :py:class:`~boto3.utils.ServiceContext` :param service_context: Context about the AWS service """ # The class to use when creating an iterator _collection_cls = ResourceCollection def __init__(self, collection_model, parent, factory, service_context): self._model = collection_model operation_name = self._model.request.operation self._parent = parent search_path = collection_model.resource.path self._handler = ResourceHandler( search_path=search_path, factory=factory, resource_model=collection_model.resource, service_context=service_context, operation_name=operation_name, ) def __repr__(self): return '{}({}, {})'.format( self.__class__.__name__, self._parent, f'{self._parent.meta.service_name}.{self._model.resource.type}', ) def iterator(self, **kwargs): """ Get a resource collection iterator from this manager. :rtype: :py:class:`ResourceCollection` :return: An iterable representing the collection of resources """ return self._collection_cls( self._model, self._parent, self._handler, **kwargs ) # Set up some methods to proxy ResourceCollection methods def all(self): return self.iterator() all.__doc__ = ResourceCollection.all.__doc__ def filter(self, **kwargs): return self.iterator(**kwargs) filter.__doc__ = ResourceCollection.filter.__doc__ def limit(self, count): return self.iterator(limit=count) limit.__doc__ = ResourceCollection.limit.__doc__ def page_size(self, count): return self.iterator(page_size=count) page_size.__doc__ = ResourceCollection.page_size.__doc__ def pages(self): return self.iterator().pages() pages.__doc__ = ResourceCollection.pages.__doc__ class CollectionFactory: """ A factory to create new :py:class:`CollectionManager` and :py:class:`ResourceCollection` subclasses from a :py:class:`~boto3.resources.model.Collection` model. These subclasses include methods to perform batch operations. """ def load_from_definition( self, resource_name, collection_model, service_context, event_emitter ): """ Loads a collection from a model, creating a new :py:class:`CollectionManager` subclass with the correct properties and methods, named based on the service and resource name, e.g. ec2.InstanceCollectionManager. It also creates a new :py:class:`ResourceCollection` subclass which is used by the new manager class. :type resource_name: string :param resource_name: Name of the resource to look up. For services, this should match the ``service_name``. :type service_context: :py:class:`~boto3.utils.ServiceContext` :param service_context: Context about the AWS service :type event_emitter: :py:class:`~botocore.hooks.HierarchialEmitter` :param event_emitter: An event emitter :rtype: Subclass of :py:class:`CollectionManager` :return: The collection class. """ attrs = {} collection_name = collection_model.name # Create the batch actions for a collection self._load_batch_actions( attrs, resource_name, collection_model, service_context.service_model, event_emitter, ) # Add the documentation to the collection class's methods self._load_documented_collection_methods( attrs=attrs, resource_name=resource_name, collection_model=collection_model, service_model=service_context.service_model, event_emitter=event_emitter, base_class=ResourceCollection, ) if service_context.service_name == resource_name: cls_name = ( f'{service_context.service_name}.{collection_name}Collection' ) else: cls_name = f'{service_context.service_name}.{resource_name}.{collection_name}Collection' collection_cls = type(str(cls_name), (ResourceCollection,), attrs) # Add the documentation to the collection manager's methods self._load_documented_collection_methods( attrs=attrs, resource_name=resource_name, collection_model=collection_model, service_model=service_context.service_model, event_emitter=event_emitter, base_class=CollectionManager, ) attrs['_collection_cls'] = collection_cls cls_name += 'Manager' return type(str(cls_name), (CollectionManager,), attrs) def _load_batch_actions( self, attrs, resource_name, collection_model, service_model, event_emitter, ): """ Batch actions on the collection become methods on both the collection manager and iterators. """ for action_model in collection_model.batch_actions: snake_cased = xform_name(action_model.name) attrs[snake_cased] = self._create_batch_action( resource_name, snake_cased, action_model, collection_model, service_model, event_emitter, ) def _load_documented_collection_methods( factory_self, attrs, resource_name, collection_model, service_model, event_emitter, base_class, ): # The base class already has these methods defined. However # the docstrings are generic and not based for a particular service # or resource. So we override these methods by proxying to the # base class's builtin method and adding a docstring # that pertains to the resource. # A collection's all() method. def all(self): return base_class.all(self) all.__doc__ = docstring.CollectionMethodDocstring( resource_name=resource_name, action_name='all', event_emitter=event_emitter, collection_model=collection_model, service_model=service_model, include_signature=False, ) attrs['all'] = all # The collection's filter() method. def filter(self, **kwargs): return base_class.filter(self, **kwargs) filter.__doc__ = docstring.CollectionMethodDocstring( resource_name=resource_name, action_name='filter', event_emitter=event_emitter, collection_model=collection_model, service_model=service_model, include_signature=False, ) attrs['filter'] = filter # The collection's limit method. def limit(self, count): return base_class.limit(self, count) limit.__doc__ = docstring.CollectionMethodDocstring( resource_name=resource_name, action_name='limit', event_emitter=event_emitter, collection_model=collection_model, service_model=service_model, include_signature=False, ) attrs['limit'] = limit # The collection's page_size method. def page_size(self, count): return base_class.page_size(self, count) page_size.__doc__ = docstring.CollectionMethodDocstring( resource_name=resource_name, action_name='page_size', event_emitter=event_emitter, collection_model=collection_model, service_model=service_model, include_signature=False, ) attrs['page_size'] = page_size def _create_batch_action( factory_self, resource_name, snake_cased, action_model, collection_model, service_model, event_emitter, ): """ Creates a new method which makes a batch operation request to the underlying service API. """ action = BatchAction(action_model) def batch_action(self, *args, **kwargs): return action(self, *args, **kwargs) batch_action.__name__ = str(snake_cased) batch_action.__doc__ = docstring.BatchActionDocstring( resource_name=resource_name, event_emitter=event_emitter, batch_action_model=action_model, service_model=service_model, collection_model=collection_model, include_signature=False, ) return batch_action __pycache__/response.cpython-310.pyc 0000644 00000016513 15030054237 0013320 0 ustar 00 o �hv- � @ sZ d dl Z d dlmZ ddlmZ dd� Zddd�Zd d � ZG dd� d�ZG d d� d�Z dS )� N)� xform_name� )�get_data_memberc C s | D ] }|du r dS qdS )z� Return True if all elements of the iterable are not None (or if the iterable is empty). This is like the built-in ``all``, except checks against None, so 0 and False are allowable values. NFT� )�iterable�elementr r �K/usr/local/CyberCP/lib/python3.10/site-packages/boto3/resources/response.py�all_not_none s �r c C s� g }| D ]M}|j }|j}|dkrt�|j|�}n0|dkr$t�|j|�}n$|dkr1t|t|j��}n|dkr<t||j�}n|dkrAqt d|� ���|� t|�|f� q|S )a� Builds a mapping of identifier names to values based on the identifier source location, type, and target. Identifier values may be scalars or lists depending on the source type and location. :type identifiers: list :param identifiers: List of :py:class:`~boto3.resources.model.Parameter` definitions :type parent: ServiceResource :param parent: The resource instance to which this action is attached. :type params: dict :param params: Request parameters sent to the service. :type raw_response: dict :param raw_response: Low-level operation response. :rtype: list :return: An ordered list of ``(name, value)`` identifier tuples. �response�requestParameter� identifier�data�inputzUnsupported source type: )�source�target�jmespath�search�path�getattrr �namer �NotImplementedError�append) �identifiers�parent�params�raw_response�resultsr r r �valuer r r �build_identifiers s"