Skip to content

Commit

Permalink
fix typo (#21324)
Browse files Browse the repository at this point in the history
  • Loading branch information
xuwei-k authored Aug 6, 2024
1 parent 976133a commit fd45847
Show file tree
Hide file tree
Showing 41 changed files with 47 additions and 47 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ class ClassfileWriters(frontendAccess: PostProcessorFrontendAccess) {
if (file.isInstanceOf[JarArchive]) {
val jarCompressionLevel = compilerSettings.jarCompressionLevel
// Writing to non-empty JAR might be an undefined behaviour, e.g. in case if other files where
// created using `AbstractFile.bufferedOutputStream`instead of JarWritter
// created using `AbstractFile.bufferedOutputStream`instead of JarWriter
val jarFile = file.underlyingSource.getOrElse{
throw new IllegalStateException("No underlying source for jar")
}
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/CompilationUnit.scala
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ class CompilationUnit protected (val source: SourceFile, val info: CompilationUn
*/
val depRecorder: sbt.DependencyRecorder = sbt.DependencyRecorder()

/** Suspends the compilation unit by thowing a SuspendException
/** Suspends the compilation unit by throwing a SuspendException
* and recording the suspended compilation unit
*/
def suspend(hint: => String)(using Context): Nothing =
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/Compiler.scala
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ class Compiler {
new LetOverApply, // Lift blocks from receivers of applications
new ArrayConstructors) :: // Intercept creation of (non-generic) arrays and intrinsify.
List(new Erasure) :: // Rewrite types to JVM model, erasing all type parameters, abstract types and refinements.
List(new ElimErasedValueType, // Expand erased value types to their underlying implmementation types
List(new ElimErasedValueType, // Expand erased value types to their underlying implementation types
new PureStats, // Remove pure stats from blocks
new VCElideAllocations, // Peep-hole optimization to eliminate unnecessary value class allocations
new EtaReduce, // Reduce eta expansions of pure paths to the underlying function reference
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ abstract class TreeMapWithTrackedStats extends TreeMapWithImplicits:
case _ => tree
end updateTracked

/** Process a list of trees and give the priority to trakced trees */
/** Process a list of trees and give the priority to tracked trees */
private final def withUpdatedTrackedTrees(stats: List[Tree])(using Context) =
val trackedTrees = TreeMapWithTrackedStats.trackedTrees
stats.mapConserve:
Expand Down Expand Up @@ -67,7 +67,7 @@ end TreeMapWithTrackedStats
object TreeMapWithTrackedStats:
private val TrackedTrees = new Property.Key[mutable.Map[Symbol, tpd.MemberDef]]

/** Fetch the tracked trees in the cuurent context */
/** Fetch the tracked trees in the current context */
private def trackedTrees(using Context): mutable.Map[Symbol, MemberDef] =
ctx.property(TrackedTrees).get

Expand Down
4 changes: 2 additions & 2 deletions compiler/src/dotty/tools/dotc/cc/CaptureOps.scala
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ private val Captures: Key[CaptureSet] = Key()

object ccConfig:

/** If true, allow mappping capture set variables under captureChecking with maps that are neither
/** If true, allow mapping capture set variables under captureChecking with maps that are neither
* bijective nor idempotent. We currently do now know how to do this correctly in all
* cases, though.
*/
Expand All @@ -35,7 +35,7 @@ object ccConfig:

/** If enabled, use a special path in recheckClosure for closures
* that are eta expansions. This can improve some error messages but
* currently leads to unsoundess for handlng reach capabilities.
* currently leads to unsoundess for handling reach capabilities.
* TODO: The unsoundness needs followin up.
*/
inline val handleEtaExpansionsSpecially = false
Expand Down
4 changes: 2 additions & 2 deletions compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ object CheckCaptures:
/** A class describing environments.
* @param owner the current owner
* @param kind the environment's kind
* @param captured the caputure set containing all references to tracked free variables outside of boxes
* @param captured the capture set containing all references to tracked free variables outside of boxes
* @param outer0 the next enclosing environment
*/
case class Env(
Expand Down Expand Up @@ -509,7 +509,7 @@ class CheckCaptures extends Recheck, SymTransformer:
override def recheckApply(tree: Apply, pt: Type)(using Context): Type =
val meth = tree.fun.symbol

// Unsafe box/unbox handlng, only for versions < 3.3
// Unsafe box/unbox handling, only for versions < 3.3
def mapArgUsing(f: Type => Type) =
val arg :: Nil = tree.args: @unchecked
val argType0 = f(recheckStart(arg, pt))
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/core/Annotations.scala
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ object Annotations {
def argumentConstantString(i: Int)(using Context): Option[String] =
for (case Constant(s: String) <- argumentConstant(i)) yield s

/** The tree evaluaton is in progress. */
/** The tree evaluation is in progress. */
def isEvaluating: Boolean = false

/** The tree evaluation has finished. */
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/core/Contexts.scala
Original file line number Diff line number Diff line change
Expand Up @@ -265,7 +265,7 @@ object Contexts {
/** SourceFile with given path, memoized */
def getSource(path: String): SourceFile = getSource(path.toTermName)

/** AbstraFile with given path name, memoized */
/** AbstractFile with given path name, memoized */
def getFile(name: TermName): AbstractFile = base.files.get(name) match
case Some(file) =>
file
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ object CommentPickler:
def traverse(x: Any): Unit = x match
case x: untpd.Tree @unchecked =>
x match
case x: tpd.MemberDef @unchecked => // at this point all MembderDefs are t(y)p(e)d.
case x: tpd.MemberDef @unchecked => // at this point all MemberDefs are t(y)p(e)d.
for comment <- docString(x) do pickleComment(addrOfTree(x), comment)
case _ =>
val limit = x.productArity
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/parsing/Scanners.scala
Original file line number Diff line number Diff line change
Expand Up @@ -684,7 +684,7 @@ object Scanners {
if !r.isOutermost
&& closingRegionTokens.contains(token)
&& !(token == CASE && r.prefix == MATCH)
&& next.token == EMPTY // can be violated for ill-formed programs, e.g. neg/i12605.sala
&& next.token == EMPTY // can be violated for ill-formed programs, e.g. neg/i12605.scala
=>
insert(OUTDENT, offset)
case _ =>
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/transform/CtxLazy.scala
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import scala.compiletime.uninitialized
* with a different context.
*
* A typical use case is a lazy val in a phase object which exists once per root context where
* the expression intiializing the lazy val depends only on the root context, but not any changes afterwards.
* the expression initializing the lazy val depends only on the root context, but not any changes afterwards.
*/
class CtxLazy[T](expr: Context ?=> T) {
private var myValue: T = uninitialized
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ import NameKinds.SuperAccessorName

object ElimErasedValueType {
val name: String = "elimErasedValueType"
val description: String = "expand erased value types to their underlying implmementation types"
val description: String = "expand erased value types to their underlying implementation types"

def elimEVT(tp: Type)(using Context): Type = tp match {
case ErasedValueType(_, underlying) =>
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ class ExpandSAMs extends MiniPhase:
* }
* ```
*
* is expanded to an anomymous class:
* is expanded to an anonymous class:
*
* ```
* val x: PartialFunction[A, B] = {
Expand Down
4 changes: 2 additions & 2 deletions compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ class ExplicitOuter extends MiniPhase with InfoTransformer { thisPhase =>
val parentCls = parent.tpe.classSymbol.asClass
parent match
// if we are in a regular class and first parent is also a regular class,
// make sure we have a contructor
// make sure we have a constructor
case parent: TypeTree
if !cls.is(Trait) && !parentCls.is(Trait) && !defn.NotRuntimeClasses.contains(parentCls) =>
New(parent.tpe, Nil).withSpan(impl.span)
Expand Down Expand Up @@ -454,7 +454,7 @@ object ExplicitOuter {
val enclClass = ctx.owner.lexicallyEnclosingClass.asClass
val outerAcc = atPhaseNoLater(lambdaLiftPhase) {
// lambdalift mangles local class names, which means we cannot
// reliably find outer acessors anymore
// reliably find outer accessors anymore
tree match
case tree: This if tree.symbol == enclClass && !enclClass.is(Trait) =>
outerParamAccessor(enclClass)
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/transform/Pickler.scala
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ object Pickler {
*/
inline val ParallelPickling = true

/**A holder for syncronization points and reports when writing TASTy asynchronously.
/**A holder for synchronization points and reports when writing TASTy asynchronously.
* The callbacks should only be called once.
*/
class AsyncTastyHolder private (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ trait ReifiedReflect:
.select(defn.Quotes_reflect_TypeApply_apply)
.appliedTo(fn, argTrees)

/** Create tree for `quotes.reflect.Assing(<lhs>, <rhs>)` */
/** Create tree for `quotes.reflect.Assign(<lhs>, <rhs>)` */
def Assign(lhs: Tree, rhs: Tree)(using Context) =
self.select(defn.Quotes_reflect_Assign)
.select(defn.Quotes_reflect_Assign_apply)
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/transform/SelectStatic.scala
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ import dotty.tools.dotc.transform.MegaPhase.*
* Otherwise, the backend needs to be aware that some qualifiers need to be
* dropped.
*
* A tranformation similar to what this phase does seems to be performed by
* A transformation similar to what this phase does seems to be performed by
* flatten in nsc.
*
* The side effects of the qualifier of a dropped `Select` is normally
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/typer/Synthesizer.scala
Original file line number Diff line number Diff line change
Expand Up @@ -455,7 +455,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context):
MirrorSource.reduce(mirroredType) match
case Right(msrc) => msrc match
case MirrorSource.Singleton(_, tref) =>
val singleton = tref.termSymbol // prefer alias name over the orignal name
val singleton = tref.termSymbol // prefer alias name over the original name
val singletonPath = tpd.singleton(tref).withSpan(span)
if tref.classSymbol.is(Scala2x) then // could be Scala 3 alias of Scala 2 case object.
val mirrorType = formal.constrained_& {
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/dotc/typer/Typer.scala
Original file line number Diff line number Diff line change
Expand Up @@ -1282,7 +1282,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer
* For example, both `@Annot(5)` and `@Annot({5, 6}) are viable calls of the constructor
* of annotation defined as `@interface Annot { int[] value() }`
* We assume that calling `typedNamedArg` in context of Java implies that we are dealing
* with annotation contructor, as named arguments are not allowed anywhere else in Java.
* with annotation constructor, as named arguments are not allowed anywhere else in Java.
* Under explicit nulls, the pt could be nullable. We need to strip `Null` type first.
*/
val arg1 = pt.stripNull() match {
Expand Down
2 changes: 1 addition & 1 deletion compiler/src/dotty/tools/io/FileWriters.scala
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,7 @@ object FileWriters {
if (file.isInstanceOf[JarArchive]) {
val jarCompressionLevel = ctx.settings.jarCompressionLevel
// Writing to non-empty JAR might be an undefined behaviour, e.g. in case if other files where
// created using `AbstractFile.bufferedOutputStream`instead of JarWritter
// created using `AbstractFile.bufferedOutputStream`instead of JarWriter
val jarFile = file.underlyingSource.getOrElse{
throw new IllegalStateException("No underlying source for jar")
}
Expand Down
2 changes: 1 addition & 1 deletion docs/_docs/contributing/architecture/phases.md
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ Finally are [staging], which ensures that quotes conform to the
trees to embedded TASTy strings.

### `transformPhases`
These phases are concerned with tranformation into lower-level forms
These phases are concerned with transformation into lower-level forms
suitable for the runtime system, with two sub-groupings:
- High-level transformations: All phases from [firstTransform] to [erasure].
Most of these phases transform syntax trees, expanding high-level constructs
Expand Down
2 changes: 1 addition & 1 deletion docs/_docs/internals/overall-structure.md
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ phases. The current list of phases is specified in class [Compiler] as follows:
new LetOverApply, // Lift blocks from receivers of applications
new ArrayConstructors) :: // Intercept creation of (non-generic) arrays and intrinsify.
List(new Erasure) :: // Rewrite types to JVM model, erasing all type parameters, abstract types and refinements.
List(new ElimErasedValueType, // Expand erased value types to their underlying implmementation types
List(new ElimErasedValueType, // Expand erased value types to their underlying implementation types
new PureStats, // Remove pure stats from blocks
new VCElideAllocations, // Peep-hole optimization to eliminate unnecessary value class allocations
new ArrayApply, // Optimize `scala.Array.apply([....])` and `scala.Array.apply(..., [....])` into `[...]`
Expand Down
2 changes: 1 addition & 1 deletion docs/_docs/reference/experimental/runtimeChecked.md
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ As an escape hatch in 3.2 we recommended to use a type ascription of `: @uncheck
|which may result in a MatchError at runtime.
```

However, `: @unchecked` is syntactically awkward, and is also a misnomer - in fact in this case the the pattern _is_ fully checked, but the necessary checks occur at runtime. The `runtimeChecked` method is intended to replace `@unchecked` for this purpose.
However, `: @unchecked` is syntactically awkward, and is also a misnomer - in fact in this case the pattern _is_ fully checked, but the necessary checks occur at runtime. The `runtimeChecked` method is intended to replace `@unchecked` for this purpose.

The `@unchecked` annotation is still retained for silencing warnings on unsound type tests.

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -583,7 +583,7 @@ class CompletionArgSuite extends BaseCompletionSuite:
|""".stripMargin
)

@Test def `contructor-param` =
@Test def `constructor-param` =
check(
"""|class Foo (xxx: Int)
|
Expand All @@ -595,7 +595,7 @@ class CompletionArgSuite extends BaseCompletionSuite:
|""".stripMargin
)

@Test def `contructor-param2` =
@Test def `constructor-param2` =
check(
"""|class Foo ()
|
Expand Down
2 changes: 1 addition & 1 deletion scaladoc/src/dotty/tools/scaladoc/tasty/BasicSupport.scala
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ trait BasicSupport:
"scala.transient",
"scala.volatile",
"scala.annotation.experimental",
"scala.annotation.contructorOnly",
"scala.annotation.constructorOnly",
"scala.annotation.static",
"scala.annotation.targetName",
"scala.annotation.threadUnsafe",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ abstract class TreeInterpreter[Q <: Quotes & Singleton](using val q: Q) {
}

case Assign(lhs, rhs) =>
log("<interpretAssing>", tree)(localValue(lhs.symbol).update(eval(rhs)))
log("<interpretAssign>", tree)(localValue(lhs.symbol).update(eval(rhs)))

case If(cond, thenp, elsep) => log("interpretIf", tree)(interpretIf(cond, thenp, elsep))
case While(cond, body) => log("interpretWhile", tree)(interpretWhile(cond, body))
Expand Down
2 changes: 1 addition & 1 deletion tests/pos-with-compiler-cc/dotc/CompilationUnit.scala
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ class CompilationUnit protected (val source: SourceFile) {
/** Can this compilation unit be suspended */
def isSuspendable: Boolean = true

/** Suspends the compilation unit by thowing a SuspendException
/** Suspends the compilation unit by throwing a SuspendException
* and recording the suspended compilation unit
*/
def suspend()(using Context): Nothing =
Expand Down
2 changes: 1 addition & 1 deletion tests/pos-with-compiler-cc/dotc/Compiler.scala
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ class Compiler {
new LetOverApply, // Lift blocks from receivers of applications
new ArrayConstructors) :: // Intercept creation of (non-generic) arrays and intrinsify.
List(new Erasure) :: // Rewrite types to JVM model, erasing all type parameters, abstract types and refinements.
List(new ElimErasedValueType, // Expand erased value types to their underlying implmementation types
List(new ElimErasedValueType, // Expand erased value types to their underlying implementation types
new PureStats, // Remove pure stats from blocks
new VCElideAllocations, // Peep-hole optimization to eliminate unnecessary value class allocations
new EtaReduce, // Reduce eta expansions of pure paths to the underlying function reference
Expand Down
2 changes: 1 addition & 1 deletion tests/pos-with-compiler-cc/dotc/config/Config.scala
Original file line number Diff line number Diff line change
Expand Up @@ -248,7 +248,7 @@ object Config {
*/
inline val printCaptureSetsAsPrefix = true

/** If true, allow mappping capture set variables under captureChecking with maps that are neither
/** If true, allow mapping capture set variables under captureChecking with maps that are neither
* bijective nor idempotent. We currently do now know how to do this correctly in all
* cases, though.
*/
Expand Down
2 changes: 1 addition & 1 deletion tests/pos-with-compiler-cc/dotc/core/Contexts.scala
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,7 @@ object Contexts {
/** SourceFile with given path, memoized */
def getSource(path: String): SourceFile = getSource(path.toTermName)

/** AbstraFile with given path name, memoized */
/** AbstractFile with given path name, memoized */
def getFile(name: TermName): AbstractFile = base.files.get(name) match
case Some(file) =>
file
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ class CommentPickler(pickler: TastyPickler, addrOfTree: tpd.Tree => Addr, docStr
private def traverse(x: Any): Unit = x match
case x: untpd.Tree @unchecked =>
x match
case x: tpd.MemberDef @unchecked => // at this point all MembderDefs are t(y)p(e)d.
case x: tpd.MemberDef @unchecked => // at this point all MemberDefs are t(y)p(e)d.
for comment <- docString(x) do pickleComment(addrOfTree(x), comment)
case _ =>
val limit = x.productArity
Expand Down
2 changes: 1 addition & 1 deletion tests/pos-with-compiler-cc/dotc/parsing/Scanners.scala
Original file line number Diff line number Diff line change
Expand Up @@ -667,7 +667,7 @@ object Scanners {
if !r.isOutermost
&& closingRegionTokens.contains(token)
&& !(token == CASE && r.prefix == MATCH)
&& next.token == EMPTY // can be violated for ill-formed programs, e.g. neg/i12605.sala
&& next.token == EMPTY // can be violated for ill-formed programs, e.g. neg/i12605.scala
=>
insert(OUTDENT, offset)
case _ =>
Expand Down
2 changes: 1 addition & 1 deletion tests/pos-with-compiler-cc/dotc/transform/CtxLazy.scala
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import core.Contexts._
* with a different context.
*
* A typical use case is a lazy val in a phase object which exists once per root context where
* the expression intiializing the lazy val depends only on the root context, but not any changes afterwards.
* the expression initializing the lazy val depends only on the root context, but not any changes afterwards.
*/
class CtxLazy[T](expr: Context ?=> T) {
private var myValue: T = _
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ import NameKinds.SuperAccessorName

object ElimErasedValueType {
val name: String = "elimErasedValueType"
val description: String = "expand erased value types to their underlying implmementation types"
val description: String = "expand erased value types to their underlying implementation types"

def elimEVT(tp: Type)(using Context): Type = tp match {
case ErasedValueType(_, underlying) =>
Expand Down
2 changes: 1 addition & 1 deletion tests/pos-with-compiler-cc/dotc/transform/ExpandSAMs.scala
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ class ExpandSAMs extends MiniPhase:
* }
* ```
*
* is expanded to an anomymous class:
* is expanded to an anonymous class:
*
* ```
* val x: PartialFunction[A, B] = {
Expand Down
4 changes: 2 additions & 2 deletions tests/pos-with-compiler-cc/dotc/transform/ExplicitOuter.scala
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ class ExplicitOuter extends MiniPhase with InfoTransformer { thisPhase =>
val parentCls = parent.tpe.classSymbol.asClass
parent match
// if we are in a regular class and first parent is also a regular class,
// make sure we have a contructor
// make sure we have a constructor
case parent: TypeTree
if !cls.is(Trait) && !parentCls.is(Trait) && !defn.NotRuntimeClasses.contains(parentCls) =>
New(parent.tpe, Nil).withSpan(impl.span)
Expand Down Expand Up @@ -459,7 +459,7 @@ object ExplicitOuter {
val enclClass = ctx.owner.lexicallyEnclosingClass.asClass
val outerAcc = atPhaseNoLater(lambdaLiftPhase) {
// lambdalift mangles local class names, which means we cannot
// reliably find outer acessors anymore
// reliably find outer accessors anymore
tree match
case tree: This if tree.symbol == enclClass && !enclClass.is(Trait) =>
outerParamAccessor(enclClass)
Expand Down
Loading

0 comments on commit fd45847

Please sign in to comment.