Skip to content

Commit

Permalink
upd
Browse files Browse the repository at this point in the history
  • Loading branch information
yzh119 committed Jan 31, 2024
1 parent 4d07829 commit b36b5bf
Show file tree
Hide file tree
Showing 3 changed files with 7 additions and 7 deletions.
4 changes: 2 additions & 2 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,6 @@
}

# add additional overrides
# templates_path += [tlcpack_sphinx_addon.get_templates_path()]
# html_static_path += [tlcpack_sphinx_addon.get_static_path()]
templates_path += [tlcpack_sphinx_addon.get_templates_path()]
html_static_path += [tlcpack_sphinx_addon.get_static_path()]

2 changes: 1 addition & 1 deletion python/flashinfer/cascade.py
Original file line number Diff line number Diff line change
Expand Up @@ -289,7 +289,7 @@ def begin_forward(
The ``num_qo_heads`` must be a multiple of ``num_kv_heads``. If ``num_qo_heads``
is not equal to ``num_kv_heads``, the function will use
`grouped query attention<https://arxiv.org/abs/2305.13245>`_.
`grouped query attention <https://arxiv.org/abs/2305.13245>`_.
"""
self._batch_decode_wrapper.begin_forward(
unique_kv_indptr,
Expand Down
8 changes: 4 additions & 4 deletions python/flashinfer/decode.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,12 +157,12 @@ def batch_decode_with_padded_kv_cache(
The padded key tensor, shape:
``[batch_size, padded_seq_len, num_kv_heads, head_dim]`` if :attr:`kv_layout`
is ``NHD`` or ``[batch_size, num_kv_heads, padded_seq_len, head_dim]`` if
:attr:`kv_layout` is ``HND`.
:attr:`kv_layout` is ``HND``.
v_padded : torch.Tensor
The padded value tensor, shape:
``[batch_size, padded_seq_len, num_kv_heads, head_dim]`` if :attr:`kv_layout`
is ``NHD`` or ``[batch_size, num_kv_heads, padded_seq_len, head_dim]`` if
:attr:`kv_layout` is ``HND`.
:attr:`kv_layout` is ``HND``.
kv_layout : str
The layout of the input k/v tensors, could be either ``NHD`` or ``HND``.
rotary_mode : str
Expand Down Expand Up @@ -228,12 +228,12 @@ def batch_decode_with_padded_kv_cache_return_lse(
The padded key tensor, shape:
``[batch_size, padded_seq_len, num_kv_heads, head_dim]`` if :attr:`kv_layout`
is ``NHD`` or ``[batch_size, num_kv_heads, padded_seq_len, head_dim]`` if
:attr:`kv_layout` is ``HND`.
:attr:`kv_layout` is ``HND``.
v_padded : torch.Tensor
The padded value tensor, shape:
``[batch_size, padded_seq_len, num_kv_heads, head_dim]`` if :attr:`kv_layout`
is ``NHD`` or ``[batch_size, num_kv_heads, padded_seq_len, head_dim]`` if
:attr:`kv_layout` is ``HND`.
:attr:`kv_layout` is ``HND``.
kv_layout : str
The layout of the input k/v tensors, could be either ``NHD`` or ``HND``.
rotary_mode : str
Expand Down

0 comments on commit b36b5bf

Please sign in to comment.