o
    ^i                  	   @   s   d dl mZmZ d dlZd dlZd dlmZ d dlm	Z
 ddgZG dd dejZejed ddd	d
edee
 dee ddfddZdS )    )AnyOptionalN)
_to_dlpack)DeviceDLDeviceTypefrom_dlpackc                   @   sH   e Zd ZdZdZdZdZdZdZdZ	dZ
d	Zd
ZdZdZdZdZdZdS )r   )   )   )   )   )   )   )	   )
   )   )   )   )   )   )   )   N)__name__
__module____qualname__kDLCPUkDLCUDAkDLCUDAHost	kDLOpenCL	kDLVulkankDLMetalkDLVPIkDLROCMkDLROCMHost	kDLExtDevkDLCUDAManaged	kDLOneAPI	kDLWebGPU
kDLHexagonkDLMAIA r)   r)   M/var/www/html/RAG/RAG_venv/lib/python3.10/site-packages/torch/utils/dlpack.pyr      s     a  to_dlpack(tensor) -> PyCapsule

Returns an opaque object (a "DLPack capsule") representing the tensor.

.. note::
  ``to_dlpack`` is a legacy DLPack interface. The capsule it returns
  cannot be used for anything in Python other than use it as input to
  ``from_dlpack``. The more idiomatic use of DLPack is to call
  ``from_dlpack`` directly on the tensor object - this works when that
  object has a ``__dlpack__`` method, which PyTorch and most other
  libraries indeed have now.

.. warning::
  Only call ``from_dlpack`` once per capsule produced with ``to_dlpack``.
  Behavior when a capsule is consumed multiple times is undefined.

Args:
    tensor: a tensor to be exported

The DLPack capsule shares the tensor's memory.
)devicecopy
ext_tensorr+   r,   returnztorch.Tensorc          	      C   s:  t | dri }d|d< |dur||d< |dur8t|tr!t|}t|tjs0J dt| tj||d< |  }|d t	j
t	jfv rhtjd	|d
  }|d t	j
k}|ra|jdkrad
n|j}||d< z
| jdi |}W n$ ty   |d | jdi |}Y nw |du r|du sJ d| }tj|S )a  from_dlpack(ext_tensor) -> Tensor

    Converts a tensor from an external library into a ``torch.Tensor``.

    The returned PyTorch tensor will share the memory with the input tensor
    (which may have come from another library). Note that in-place operations
    will therefore also affect the data of the input tensor. This may lead to
    unexpected issues (e.g., other libraries may have read-only flags or
    immutable data structures), so the user should only do this if they know
    for sure that this is fine.

    Args:
        ext_tensor (object with ``__dlpack__`` attribute, or a DLPack capsule):
            The tensor or DLPack capsule to convert.

            If ``ext_tensor`` is a tensor (or ndarray) object, it must support
            the ``__dlpack__`` protocol (i.e., have a ``ext_tensor.__dlpack__``
            method). Otherwise ``ext_tensor`` may be a DLPack capsule, which is
            an opaque ``PyCapsule`` instance, typically produced by a
            ``to_dlpack`` function or method.

        device (torch.device or str or None): An optional PyTorch device
            specifying where to place the new tensor. If None (default), the
            new tensor will be on the same device as ``ext_tensor``.

        copy (bool or None): An optional boolean indicating whether or not to copy
            ``self``. If None, PyTorch will copy only if necessary.

    Examples::

        >>> import torch.utils.dlpack
        >>> t = torch.arange(4)

        # Convert a tensor directly (supported in PyTorch >= 1.10)
        >>> t2 = torch.from_dlpack(t)
        >>> t2[:2] = -1  # show that memory is shared
        >>> t2
        tensor([-1, -1,  2,  3])
        >>> t
        tensor([-1, -1,  2,  3])

        # The old-style DLPack usage, with an intermediate capsule object
        >>> capsule = torch.utils.dlpack.to_dlpack(t)
        >>> capsule
        <capsule object "dltensor" at ...>
        >>> t3 = torch.from_dlpack(capsule)
        >>> t3
        tensor([-1, -1,  2,  3])
        >>> t3[0] = -9  # now we're sharing memory between 3 tensors
        >>> t3
        tensor([-9, -1,  2,  3])
        >>> t2
        tensor([-9, -1,  2,  3])
        >>> t
        tensor([-9, -1,  2,  3])

    
__dlpack__)r   r   max_versionNr,   z&from_dlpack: unsupported device type: 	dl_devicer   zcuda:r   streamzQdevice and copy kwargs not supported when ext_tensor is already a DLPack capsule.r)   )hasattr
isinstancestrtorchr+   type_C_torchDeviceToDLDevice__dlpack_device__r   r   r!   cudacurrent_streamcuda_streamr/   	TypeErrorpop_from_dlpack)	r-   r+   r,   kwargs
ext_devicer2   is_cuda
stream_ptrdlpackr)   r)   r*   r   :   s:   
?


)typingr   r   r6   enumtorch._Cr   	to_dlpacktorch.typesr   _Device__all__IntEnumr   r8   _add_docstrboolr   r)   r)   r)   r*   <module>   s*    