Skip to content

Commit

Permalink
WIP: cross build with sbt 2.x
Browse files Browse the repository at this point in the history
  • Loading branch information
agboom committed Oct 10, 2024
1 parent fa20613 commit 0ae7c53
Show file tree
Hide file tree
Showing 24 changed files with 690 additions and 380 deletions.
2 changes: 1 addition & 1 deletion .scalafmt.conf
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# Version https://scalameta.org/scalafmt/docs/configuration.html#version
version = 3.8.3
# Dialect https://scalameta.org/scalafmt/docs/configuration.html#scala-dialects
runner.dialect = scala212
runner.dialect = scala3

style = IntelliJ
maxColumn = 120
Expand Down
16 changes: 11 additions & 5 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,18 @@ organization := "com.github.sbt"
homepage := Some(url("https://github.com/sbt/sbt-native-packager"))

Global / onChangedBuildSource := ReloadOnSourceChanges
Global / scalaVersion := "2.12.20"
Global / scalaVersion := "3.3.4"

// crossBuildingSettings
crossSbtVersions := Vector("1.1.6")
crossScalaVersions := Seq("2.12.20", "3.3.4")

Compile / scalacOptions ++= Seq("-deprecation")
(pluginCrossBuild / sbtVersion) := {
scalaBinaryVersion.value match {
case "2.12" => "1.1.6"
case _ => "2.0.0-M2"
}
}

Compile / scalacOptions ++= Seq("-deprecation", "-rewrite", "-source", "3.0-migration")
javacOptions ++= Seq("-source", "1.8", "-target", "1.8")

// put jdeb on the classpath for scripted tests
Expand Down Expand Up @@ -37,7 +43,7 @@ libraryDependencies ++= {
// scala version depended libraries
libraryDependencies ++= {
scalaBinaryVersion.value match {
case "2.10" => Nil
case "2.10" | "3" => Nil
case _ =>
Seq(
// Do NOT upgrade these dependencies to 2.x or newer! sbt-native-packager is a sbt-plugin
Expand Down
54 changes: 54 additions & 0 deletions src/main/scala-sbt-2.0.0-M2/com/typesafe/sbt/packager/Compat.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
package com.typesafe.sbt.packager

import sbt.{librarymanagement as lm, PathFinder, File}
import sbt.internal.{librarymanagement as ilm, BuildDependencies as InternalBuildDependencies}
import sbt.util.CacheStore

import java.nio.file.Path

object Compat {
val IvyActions = ilm.IvyActions
type IvySbt = ilm.IvySbt
type IvyScala = sbt.librarymanagement.ScalaModuleInfo
val IvyScala = sbt.librarymanagement.ScalaModuleInfo

type UpdateConfiguration = lm.UpdateConfiguration

/**
* Used in
* - [[com.typesafe.sbt.packager.archetypes.JavaAppPackaging]]
*/
type BuildDependencies = InternalBuildDependencies

/**
*/
type Process = sys.process.Process

/**
* Used in
* - [[com.typesafe.sbt.packager.docker.DockerPlugin]]
*/
type ProcessLogger = sys.process.ProcessLogger

/**
* Used in
* - [[com.typesafe.sbt.packager.Stager]]
* @param file
* @return
* a CacheStore
*/
implicit def fileToCacheStore(file: java.io.File): CacheStore = CacheStore(file)

type CompatFile = xsbti.HashedVirtualFileRef

def fromFile(file: File, extracted: sbt.Extracted): CompatFile = {
extracted.get(sbt.Keys.fileConverter).toVirtualFile(Path.of(file.toURI))
}

def toFile(file: CompatFile, extracted: sbt.Extracted): File = {
extracted.get(sbt.Keys.fileConverter).toPath(file).toFile
}

val moduleKey = sbt.Keys.moduleIDStr
val artifactKey = sbt.Keys.artifactStr
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,160 @@
package com.typesafe.sbt.packager

import sbt.*
import sbt.io.*
import sbt.librarymanagement.LibraryManagementCodec.*
import sjsonnew.support.scalajson.unsafe.*
import sjsonnew.JsonFormat
import Compat.*


/** A set of helper methods to simplify the writing of mappings */
object MappingsHelper extends Mapper {

/**
* It lightens the build file if one wants to give a string instead of file.
*
* @example
* {{{
* mappings in Universal ++= directory("extra")
* }}}
*
* @param sourceDir
* @return
* mappings
*/
def directory(sourceDir: String): Seq[(File, String)] =
directory(file(sourceDir))

/**
* It lightens the build file if one wants to give a string instead of file.
*
* @example
* {{{
* mappings in Universal ++= contentOf("extra")
* }}}
*
* @param sourceDir
* as string representation
* @return
* mappings
*/
def contentOf(sourceDir: String): Seq[(File, String)] =
contentOf(file(sourceDir))

/**
* Create mappings from your classpath. For example if you want to add additional dependencies, like test or model.
*
* @example
* Add all test artifacts to a separated test folder
* {{{
* mappings in Universal ++= fromClasspath((managedClasspath in Test).value, target = "test")
* }}}
*
* @param entries
* @param target
* @return
* a list of mappings
*/
def fromClasspath(entries: Def.Classpath, target: String, extracted: Extracted): Seq[(File, String)] =
fromClasspath(entries, target, extracted, _ => true)

/**
* Create mappings from your classpath. For example if you want to add additional dependencies, like test or model.
* You can also filter the artifacts that should be mapped to mappings.
*
* @example
* Filter all osgi bundles
* {{{
* Universal / mappings ++= fromClasspath(
* (Runtime / managedClasspath).value,
* "osgi",
* artifact => artifact.`type` == "bundle"
* )
* }}}
*
* @param entries
* from where mappings should be created from
* @param target
* folder, e.g. `model`. Must not end with a slash
* @param includeArtifact
* function to determine if an artifact should result in a mapping
* @param includeOnNoArtifact
* default is false. When there's no Artifact meta data remove it
*/
def fromClasspath(
entries: Def.Classpath,
target: String,
extracted: Extracted,
includeArtifact: Artifact => Boolean,
includeOnNoArtifact: Boolean = false
): Seq[(File, String)] =
// TODO: test: https://github.com/sbt/sbt/blob/78ac6d38097dac7eed75e857edb2262d05ce219e/main/src/main/scala/sbt/Defaults.scala#L4566
entries
.filter(attr =>
attr
.get(sbt.Keys.artifactStr)
.map(artifactFromStr)
.map(includeArtifact)
.getOrElse(includeOnNoArtifact)
)
.map { attribute =>
val file = Compat.toFile(attribute.data, extracted)
file -> s"$target/${file.getName}"
}

private def artifactFromStr(str: String): Artifact = {
val format: JsonFormat[Artifact] = summon[JsonFormat[Artifact]]
val json = Parser.parseFromString(str).get
Converter.fromJsonUnsafe(json)(format)
}

private def artifactToStr(art: Artifact): String = {
val format: JsonFormat[Artifact] = summon[JsonFormat[Artifact]]
CompactPrinter(Converter.toJsonUnsafe(art)(format))
}

/**
* Get the mappings for the given files relative to the given directories.
*/
def relative(files: Seq[File], dirs: Seq[File]): Seq[(File, String)] =
(files --- dirs) pair (relativeTo(dirs) | flat)

/**
* Constructs a jar name from components...(ModuleID/Artifact)
*/
def makeJarName(
org: String,
name: String,
revision: String,
artifactName: String,
artifactClassifier: Option[String]
): String =
org + "." +
name + "-" +
Option(artifactName.replace(name, "")).filterNot(_.isEmpty).map(_ + "-").getOrElse("") +
revision +
artifactClassifier.filterNot(_.isEmpty).map("-" + _).getOrElse("") +
".jar"

// Determines a nicer filename for an attributed jar file, using the
// ivy metadata if available.
def getJarFullFilename(dep: Attributed[CompatFile], extracted: Extracted): String = {
val filename: Option[String] = for {
moduleStr <- dep.metadata.get(sbt.Keys.moduleIDStr)
artifactStr <- dep.metadata.get(sbt.Keys.artifactStr)
} yield {
val module = Classpaths.moduleIdJsonKeyFormat.read(moduleStr)
val artifact = artifactFromStr(artifactStr)
makeJarName(module.organization, module.name, module.revision, artifact.name, artifact.classifier)
}
filename.getOrElse(toFile(dep.data, extracted).getName)
}

def getArtifact(file: Attributed[CompatFile]): Option[Artifact] =
file.get(sbt.Keys.artifactStr).map(artifactFromStr)

def setArtifact(artifact: Artifact, attr: Attributed[CompatFile]): Attributed[CompatFile] =
attr.put(sbt.Keys.artifactStr, artifactToStr(artifact))

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
package com.typesafe.sbt.packager

import sbt.*
import sbt.Keys.*
import sbt.librarymanagement.{IvyFileConfiguration, PublishConfiguration}
import Compat.*

/**
* - TODO write tests for the SettingsHelper
* - TODO document methods properly
* - TODO document the sbt internal stuff that is used
*/
object SettingsHelper {
def addPackage(
config: Configuration,
packageTask: TaskKey[CompatFile],
extension: String,
classifier: Option[String] = None
): Seq[Setting[?]] =
inConfig(config)(
addArtifact(
name apply (Artifact(
_,
extension,
extension,
classifier = classifier,
configurations = Vector.empty,
url = None
)),
packageTask
)
)

def makeDeploymentSettings(
config: Configuration,
packageTask: TaskKey[CompatFile],
extension: String,
classifier: Option[String] = None
): Seq[Setting[?]] =
// Why do we need the ivyPublishSettings and jvmPublishSettings ?
inConfig(config)(Classpaths.ivyPublishSettings ++ Classpaths.jvmPublishSettings) ++ inConfig(config)(
Seq(
artifacts := Seq.empty,
packagedArtifacts := Map.empty,
projectID := ModuleID(organization.value, name.value, version.value),
// Custom module settings to skip the ivy XmlModuleDescriptorParser
moduleSettings := ModuleDescriptorConfiguration(projectID.value, projectInfo.value)
.withScalaModuleInfo(scalaModuleInfo.value),
ivyModule := {
val ivy = ivySbt.value
new ivy.Module(moduleSettings.value)
},
// Where have these settings gone?
// -------------------------------
// deliverLocalConfiguration := Classpaths.deliverConfig(crossTarget.value, logging = ivyLoggingLevel.value)
// deliverConfiguration := deliverLocalConfiguration.value,
// -------------------------------
publishConfiguration := {
val converter = fileConverter.value
PublishConfiguration()
.withResolverName(Classpaths.getPublishTo(publishTo.value).name)
.withArtifacts(packagedArtifacts.value.toVector.map {
case (artifact, virtualFile) => artifact -> converter.toPath(virtualFile).toFile
})
.withChecksums(checksums.value.toVector)
.withOverwrite(isSnapshot.value)
.withLogging(UpdateLogging.DownloadOnly)
},
publishLocalConfiguration := {
val converter = fileConverter.value
PublishConfiguration()
.withResolverName("local")
.withArtifacts(packagedArtifacts.value.toVector.map {
case (artifact, virtualFile) => artifact -> converter.toPath(virtualFile).toFile
})
.withChecksums(checksums.value.toVector)
.withOverwrite(isSnapshot.value)
.withLogging(UpdateLogging.DownloadOnly)
},
publishM2Configuration := {
val converter = fileConverter.value
PublishConfiguration()
.withResolverName(Resolver.mavenLocal.name)
.withArtifacts(packagedArtifacts.value.toVector.map {
case (artifact, virtualFile) => artifact -> converter.toPath(virtualFile).toFile
})
.withChecksums(checksums.value.toVector)
.withOverwrite(isSnapshot.value)
.withLogging(UpdateLogging.DownloadOnly)
}
)
) ++ addPackage(config, packageTask, extension, classifier) ++ addResolver(config)

/**
* SBT looks in the `otherResolvers` setting for resolvers defined in `publishTo`. If a user scopes a `publishTo`,
* e.g.
*
* {{{
* // publish the rpm to the target folder
* Rpm / publishTo := Some(Resolver.file("target-resolver", target.value / "rpm-repo" ))
* }}}
*
* then the resolver must also be present in the `otherResolvers`
*
* @param config
* the ivy configuration to look for resolvers
*/
private def addResolver(config: Configuration): Seq[Setting[_]] =
Seq(otherResolvers ++= (config / publishTo).value.toSeq)
}
Loading

0 comments on commit 0ae7c53

Please sign in to comment.