Skip to content

Commit

Permalink
Fix all periods
Browse files Browse the repository at this point in the history
  • Loading branch information
vyasr committed Aug 30, 2023
1 parent 6f105f5 commit c942202
Show file tree
Hide file tree
Showing 12 changed files with 24 additions and 26 deletions.
2 changes: 1 addition & 1 deletion doxygen/Doxyfile
Original file line number Diff line number Diff line change
Expand Up @@ -772,7 +772,7 @@ CITE_BIB_FILES =
# messages are off.
# The default value is: NO.

QUIET = YES
QUIET = NO

# The WARNINGS tag can be used to turn on/off the warning messages that are
# generated to standard error (stderr) by doxygen. If WARNINGS is set to YES
Expand Down
2 changes: 1 addition & 1 deletion include/rmm/cuda_device.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ struct cuda_device_id {
using value_type = int; ///< Integer type used for device identifier

/**
* @brief Construct a `cuda_device_id` from the specified integer value.
* @brief Construct a `cuda_device_id` from the specified integer value
*
* @param dev_id The device's integer identifier
*/
Expand Down
8 changes: 4 additions & 4 deletions include/rmm/cuda_stream_view.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,9 @@ class cuda_stream_view {
constexpr cuda_stream_view(std::nullptr_t) = delete; //< Prevent cast from nullptr

/**
* @brief Constructor from a cudaStream_t.
* @brief Constructor from a cudaStream_t
*
* @param stream The underlying stream for this view.
* @param stream The underlying stream for this view
*/
constexpr cuda_stream_view(cudaStream_t stream) noexcept : stream_{stream} {}

Expand All @@ -68,12 +68,12 @@ class cuda_stream_view {
constexpr operator cudaStream_t() const noexcept { return value(); }

/**
* @briefreturn{true if the wrapped stream is the CUDA per-thread default stream.}
* @briefreturn{true if the wrapped stream is the CUDA per-thread default stream}
*/
[[nodiscard]] inline bool is_per_thread_default() const noexcept;

/**
* @briefreturn{true if the wrapped stream is explicitly the CUDA legacy default stream.}
* @briefreturn{true if the wrapped stream is explicitly the CUDA legacy default stream}
*/
[[nodiscard]] inline bool is_default() const noexcept;

Expand Down
2 changes: 1 addition & 1 deletion include/rmm/device_buffer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,7 @@ class device_buffer {
*
* @param other The `device_buffer` whose contents will be moved.
*
* @return A reference to this `device_buffer`.
* @return A reference to this `device_buffer`
*/
device_buffer& operator=(device_buffer&& other) noexcept
{
Expand Down
7 changes: 3 additions & 4 deletions include/rmm/device_scalar.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,8 @@ class device_scalar {
public:
static_assert(std::is_trivially_copyable<T>::value, "Scalar type must be trivially copyable");

using value_type =
typename device_uvector<T>::value_type; ///< T, the type of the scalar element
using reference = typename device_uvector<T>::reference; ///< value_type&
using value_type = typename device_uvector<T>::value_type; ///< T, the type of the scalar element
using reference = typename device_uvector<T>::reference; ///< value_type&
using const_reference = typename device_uvector<T>::const_reference; ///< const value_type&
using pointer =
typename device_uvector<T>::pointer; ///< The type of the pointer returned by data()
Expand All @@ -55,7 +54,7 @@ class device_scalar {
/**
* @brief Default move assignment operator
*
* @return device_scalar& A reference to the assigned-to object.
* @return device_scalar& A reference to the assigned-to object
*/
device_scalar& operator=(device_scalar&&) noexcept = default;

Expand Down
2 changes: 1 addition & 1 deletion include/rmm/device_uvector.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -504,7 +504,7 @@ class device_uvector {
[[nodiscard]] std::size_t size() const noexcept { return bytes_to_elements(_storage.size()); }

/**
* @briefreturn{The signed number of elements in the vector.}
* @briefreturn{The signed number of elements in the vector}
*/
[[nodiscard]] std::int64_t ssize() const noexcept
{
Expand Down
4 changes: 2 additions & 2 deletions include/rmm/exec_policy.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
namespace rmm {

/**
* @brief Synchronous execution policy for allocations using thrust.
* @brief Synchronous execution policy for allocations using thrust
*/
using thrust_exec_policy_t =
thrust::detail::execute_with_allocator<rmm::mr::thrust_allocator<char>,
Expand Down Expand Up @@ -60,7 +60,7 @@ class exec_policy : public thrust_exec_policy_t {
#if THRUST_VERSION >= 101600

/**
* @brief Asynchronous execution policy for allocations using thrust.
* @brief Asynchronous execution policy for allocations using thrust
*/
using thrust_exec_policy_nosync_t =
thrust::detail::execute_with_allocator<rmm::mr::thrust_allocator<char>,
Expand Down
4 changes: 2 additions & 2 deletions include/rmm/mr/device/limiting_resource_adaptor.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,8 @@ class limiting_resource_adaptor final : public device_memory_resource {
* @throws `rmm::logic_error` if `upstream == nullptr`
*
* @param upstream The resource used for allocating/deallocating device memory
* @param allocation_limit Maximum memory allowed for this allocator.
* @param alignment Alignment in bytes for the start of each allocated buffer.
* @param allocation_limit Maximum memory allowed for this allocator
* @param alignment Alignment in bytes for the start of each allocated buffer
*/
limiting_resource_adaptor(Upstream* upstream,
std::size_t allocation_limit,
Expand Down
6 changes: 3 additions & 3 deletions include/rmm/mr/device/owning_wrapper.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -117,12 +117,12 @@ class owning_wrapper : public device_memory_resource {
}

/**
* @briefreturn{A constant reference to the wrapped resource.}
* @briefreturn{A constant reference to the wrapped resource}
*/
[[nodiscard]] Resource const& wrapped() const noexcept { return *wrapped_; }

/**
* @briefreturn{A reference to the wrapped resource.}
* @briefreturn{A reference to the wrapped resource}
*/
[[nodiscard]] Resource& wrapped() noexcept { return *wrapped_; }

Expand All @@ -135,7 +135,7 @@ class owning_wrapper : public device_memory_resource {
}

/**
* @briefreturn{true if the wrapped resource supports get_mem_info, false otherwise.}
* @briefreturn{true if the wrapped resource supports get_mem_info, false otherwise}
*/
[[nodiscard]] bool supports_get_mem_info() const noexcept override
{
Expand Down
2 changes: 1 addition & 1 deletion include/rmm/mr/device/per_device_resource.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ inline std::mutex& map_lock()

// This symbol must have default visibility, see: https://github.com/rapidsai/rmm/issues/826
/**
* @briefreturn{Reference to the map from device id -> resource.}
* @briefreturn{Reference to the map from device id -> resource}
*/
RMM_EXPORT inline auto& get_map()
{
Expand Down
5 changes: 3 additions & 2 deletions include/rmm/mr/device/polymorphic_allocator.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,8 @@ template <typename Allocator>
class stream_allocator_adaptor {
public:
using value_type =
typename std::allocator_traits<Allocator>::value_type; ///< The value type of objects allocated by this allocator
typename std::allocator_traits<Allocator>::value_type; ///< The value type of objects allocated
///< by this allocator

stream_allocator_adaptor() = delete;

Expand Down Expand Up @@ -213,7 +214,7 @@ class stream_allocator_adaptor {
void deallocate(value_type* ptr, std::size_t num) { alloc_.deallocate(ptr, num, stream()); }

/**
* @briefreturn{The stream on which calls to the underlying allocator are made.}
* @briefreturn{The stream on which calls to the underlying allocator are made}
*/
[[nodiscard]] cuda_stream_view stream() const noexcept { return stream_; }

Expand Down
6 changes: 2 additions & 4 deletions include/rmm/mr/device/statistics_resource_adaptor.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,7 @@ class statistics_resource_adaptor final : public device_memory_resource {
int64_t total{0}; ///< Sum of all added values

/**
* @brief Add `val` to the current value and update the peak value if
* necessary.
* @brief Add `val` to the current value and update the peak value if necessary
*
* @param val Value to add
* @return Reference to this object
Expand All @@ -68,8 +67,7 @@ class statistics_resource_adaptor final : public device_memory_resource {
}

/**
* @brief Subtract `val` from the current value and update the peak value if
* necessary.
* @brief Subtract `val` from the current value and update the peak value if necessary
*
* @param val Value to subtract
* @return Reference to this object
Expand Down

0 comments on commit c942202

Please sign in to comment.