aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMarkus S. Wamser <github-dev@mail2013.wamser.eu>2020-05-15 13:53:23 +0200
committerJérôme Forissier <jerome@forissier.org>2020-05-15 17:22:56 +0200
commit055830db2999dd9a0fc6a7bbe28393f2a82176ec (patch)
treef9cc34d1d466bb1e16dcbeba22186c24909310ef
parent6af446abf4e6821b355b19b59d01f1bfbdb0b0fe (diff)
core: simple typo fixes in comments in core/arch tree
* changed "the the" to "the" in thread.h * changed "the the" to "to the" in wait_queue.c * changed "Optinally" to "Optionally" in generic_entry_a32.S Signed-off-by: Markus S. Wamser <github-dev@mail2013.wamser.eu> Reviewed-by: Etienne Carriere <etienne.carriere@linaro.org> Reviewed-by: Jens Wiklander <jens.wiklander@linaro.org>
-rw-r--r--core/arch/arm/include/kernel/thread.h2
-rw-r--r--core/arch/arm/kernel/generic_entry_a32.S4
-rw-r--r--core/arch/arm/kernel/wait_queue.c2
3 files changed, 4 insertions, 4 deletions
diff --git a/core/arch/arm/include/kernel/thread.h b/core/arch/arm/include/kernel/thread.h
index 1dd60377..ff1a3038 100644
--- a/core/arch/arm/include/kernel/thread.h
+++ b/core/arch/arm/include/kernel/thread.h
@@ -320,7 +320,7 @@ void thread_set_foreign_intr(bool enable);
void thread_restore_foreign_intr(void);
/*
- * Defines the bits for the exception mask used the the
+ * Defines the bits for the exception mask used by the
* thread_*_exceptions() functions below.
* These definitions are compatible with both ARM32 and ARM64.
*/
diff --git a/core/arch/arm/kernel/generic_entry_a32.S b/core/arch/arm/kernel/generic_entry_a32.S
index ff361fea..6fac6768 100644
--- a/core/arch/arm/kernel/generic_entry_a32.S
+++ b/core/arch/arm/kernel/generic_entry_a32.S
@@ -141,8 +141,8 @@ END_FUNC reset_vect_table
* - Disable data and instruction cache.
* - MMU is expected off and exceptions trapped in ARM mode.
* - Enable or disable alignment checks upon platform configuration.
- * - Optinally enable write-implies-execute-never.
- * - Optinally enable round robin strategy for cache replacement.
+ * - Optionally enable write-implies-execute-never.
+ * - Optionally enable round robin strategy for cache replacement.
*
* Clobbers r0.
*/
diff --git a/core/arch/arm/kernel/wait_queue.c b/core/arch/arm/kernel/wait_queue.c
index c9b1b060..e293d82d 100644
--- a/core/arch/arm/kernel/wait_queue.c
+++ b/core/arch/arm/kernel/wait_queue.c
@@ -160,7 +160,7 @@ void wq_promote_condvar(struct wait_queue *wq, struct condvar *cv,
/*
* Find condvar waiter(s) and promote each to an active waiter.
* This is a bit unfair to eventual other active waiters as a
- * condvar waiter is added the the queue when waiting for the
+ * condvar waiter is added to the queue when waiting for the
* condvar.
*/
SLIST_FOREACH(wqe, wq, link) {