Source code for sionna.mimo.precoding

#
# SPDX-FileCopyrightText: Copyright (c) 2021-2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
#
"""Classes and functions related to MIMO transmit precoding"""

import tensorflow as tf
from sionna.utils import matrix_inv

[docs]def zero_forcing_precoder(x, h, return_precoding_matrix=False): # pylint: disable=line-too-long r"""Zero-Forcing (ZF) Precoder This function implements ZF precoding for a MIMO link, assuming the following model: .. math:: \mathbf{y} = \mathbf{H}\mathbf{G}\mathbf{x} + \mathbf{n} where :math:`\mathbf{y}\in\mathbb{C}^K` is the received signal vector, :math:`\mathbf{H}\in\mathbb{C}^{K\times M}` is the known channel matrix, :math:`\mathbf{G}\in\mathbb{C}^{M\times K}` is the precoding matrix, :math:`\mathbf{x}\in\mathbb{C}^K` is the symbol vector to be precoded, and :math:`\mathbf{n}\in\mathbb{C}^K` is a noise vector. It is assumed that :math:`K\le M`. The precoding matrix :math:`\mathbf{G}` is defined as (Eq. 4.37) [BHS2017]_ : .. math:: \mathbf{G} = \mathbf{V}\mathbf{D} where .. math:: \mathbf{V} &= \mathbf{H}^{\mathsf{H}}\left(\mathbf{H} \mathbf{H}^{\mathsf{H}}\right)^{-1}\\ \mathbf{D} &= \mathop{\text{diag}}\left( \lVert \mathbf{v}_{k} \rVert_2^{-1}, k=0,\dots,K-1 \right). This ensures that each stream is precoded with a unit-norm vector, i.e., :math:`\mathop{\text{tr}}\left(\mathbf{G}\mathbf{G}^{\mathsf{H}}\right)=K`. The function returns the precoded vector :math:`\mathbf{G}\mathbf{x}`. Input ----- x : [...,K], tf.complex 1+D tensor containing the symbol vectors to be precoded. h : [...,K,M], tf.complex 2+D tensor containing the channel matrices return_precoding_matrices : bool Indicates if the precoding matrices should be returned or not. Defaults to False. Output ------- x_precoded : [...,M], tf.complex Tensor of the same shape and dtype as ``x`` apart from the last dimensions that has changed from `K` to `M`. It contains the precoded symbol vectors. g : [...,M,K], tf.complex 2+D tensor containing the precoding matrices. It is only returned if ``return_precoding_matrices=True``. Note ---- If you want to use this function in Graph mode with XLA, i.e., within a function that is decorated with ``@tf.function(jit_compile=True)``, you must set ``sionna.Config.xla_compat=true``. See :py:attr:`~sionna.Config.xla_compat`. """ # Compute pseudo inverse for precoding g = tf.matmul(h, h, adjoint_b=True) g = tf.matmul(h, matrix_inv(g), adjoint_a=True) # Normalize each column to unit power norm = tf.sqrt(tf.reduce_sum(tf.abs(g)**2, axis=-2, keepdims=True)) g = g/tf.cast(norm, g.dtype) # Expand last dim of `x` for precoding x_precoded = tf.expand_dims(x, -1) # Precode x_precoded = tf.squeeze(tf.matmul(g, x_precoded), -1) if return_precoding_matrix: return (x_precoded, g) else: return x_precoded