Skip to content

Commit 958c271

Browse files
author
Krzysztof Parzyszek
authored
[Fix] Remove duplicated words from comments, NFC (#15873)
Removed instances of accidentally repeated words from comments. There are cases where duplicated words appear legitimately, those cases remain unmodified.
1 parent e754bc2 commit 958c271

File tree

29 files changed

+32
-32
lines changed

29 files changed

+32
-32
lines changed

docs/Doxyfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -306,7 +306,7 @@ EXTENSION_MAPPING =
306306

307307
# When enabled doxygen tries to link words that correspond to documented
308308
# classes, or namespaces to their corresponding documentation. Such a link can
309-
# be prevented in individual cases by by putting a % sign in front of the word
309+
# be prevented in individual cases by putting a % sign in front of the word
310310
# or globally by setting AUTOLINK_SUPPORT to NO.
311311
# The default value is: YES.
312312

docs/conf.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -244,15 +244,15 @@ def install_request_hook(gallery_conf, fname):
244244
# Installs the latest dev build of TVM from PyPI, with CUDA enabled. To use this,
245245
# you must request a Google Colab instance with a GPU by going to Runtime ->
246246
# Change runtime type -> Hardware accelerator -> GPU. If you wish to build from
247-
# source, see see https://tvm.apache.org/docs/install/from_source.html
247+
# source, see https://tvm.apache.org/docs/install/from_source.html
248248
pip install tlcpack-nightly-cu113 --pre -f https://tlcpack.ai/wheels"""
249249

250250
INSTALL_TVM_CUDA_FIXED = f"""\
251251
%%shell
252252
# Installs TVM version {version} from PyPI, with CUDA enabled. To use this,
253253
# you must request a Google Colab instance with a GPU by going to Runtime ->
254254
# Change runtime type -> Hardware accelerator -> GPU. If you wish to build from
255-
# source, see see https://tvm.apache.org/docs/install/from_source.html
255+
# source, see https://tvm.apache.org/docs/install/from_source.html
256256
pip install apache-tvm-cu113=={version} -f https://tlcpack.ai/wheels"""
257257

258258

include/tvm/runtime/logging.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,7 @@
113113
* in a function, or 'continue' or 'break' in a loop)
114114
* The default behavior when quit_on_assertion is false, is to 'return false'. If this is not
115115
* desirable, the macro caller can pass one more last parameter to COND_X to tell COND_X what
116-
* to do when when quit_on_assertion is false and the assertion fails.
116+
* to do when quit_on_assertion is false and the assertion fails.
117117
*
118118
* Rationale: These macros were designed to implement functions that have two behaviors
119119
* in a concise way. Those behaviors are quitting on assertion failures, or trying to

include/tvm/runtime/ndarray.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -275,7 +275,7 @@ class NDArray::ContainerBase {
275275
protected:
276276
/*!
277277
* \brief The shape container,
278-
* can be used used for shape data.
278+
* can be used for shape data.
279279
*/
280280
ShapeTuple shape_;
281281
};

include/tvm/runtime/packed_func.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -133,7 +133,7 @@ class PackedFuncSubObj : public PackedFuncObj {
133133
* The arguments are passed by packed format.
134134
*
135135
* This is an useful unified interface to call generated functions,
136-
* It is the unified function function type of TVM.
136+
* It is the unified function type of TVM.
137137
* It corresponds to TVMFunctionHandle in C runtime API.
138138
*/
139139
class PackedFunc : public ObjectRef {

include/tvm/tir/expr.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1030,7 +1030,7 @@ class CommReducer : public ObjectRef {
10301030
TVM_DEFINE_OBJECT_REF_METHODS(CommReducer, ObjectRef, CommReducerNode);
10311031
};
10321032

1033-
/*! \brief Reduction operator operator */
1033+
/*! \brief Reduction operator */
10341034
class ReduceNode : public PrimExprNode {
10351035
public:
10361036
/*! \brief The commutative combiner */

include/tvm/tir/stmt.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -778,7 +778,7 @@ class SeqStmt : public Stmt {
778778
}
779779

780780
// If the argument is a single SeqStmt argument with no
781-
// flattening or unwrapping required required, then we may
781+
// flattening or unwrapping required, then we may
782782
// return the SeqStmt as-is.
783783
if constexpr (sizeof...(seq_args) == 1) {
784784
if (auto opt = Flattener::AsSeqStmt(std::forward<Args>(seq_args)...)) {

python/tvm/relay/op/contrib/clml.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@
3535

3636

3737
def clml_sdk_version():
38-
"""Utility function to get clml version version"""
38+
"""Utility function to get clml version"""
3939

4040
return int(tvm.support.libinfo().get("TVM_CLML_VERSION", 2))
4141

python/tvm/relay/transform/memory_plan.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -287,7 +287,7 @@ def process_alloc_storage(self, dynamic_regions, lhs, call):
287287
dynamic_regions.append(lhs)
288288
else:
289289
# A new scope is created when entering a new region with different
290-
# device device.
290+
# device.
291291
region = self.current_region(dtype)
292292
if region.device and region.device.device_type != dev.device_type:
293293
self.enter_scope()

python/tvm/runtime/ndarray.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -617,7 +617,7 @@ def array(arr, device=cpu(0), mem_scope=None):
617617
The array to be copied from
618618
619619
device : Device, optional
620-
The device device to create the array
620+
The device to create the array
621621
622622
mem_scope : Optional[str]
623623
The memory scope of the array

0 commit comments

Comments
 (0)