Skip to content

Commit

Permalink
Merge pull request #1520 from adpi2/fix-1493
Browse files Browse the repository at this point in the history
Fix indexing sbt 2.x plugin from artifact reference
  • Loading branch information
adpi2 authored Jan 3, 2025
2 parents f2c5c80 + 9ad37b5 commit 3b6724c
Show file tree
Hide file tree
Showing 3 changed files with 46 additions and 42 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -76,9 +76,10 @@ class MavenCentralClientImpl()(implicit val system: ActorSystem)
}

override def getPomFile(ref: Artifact.Reference): Future[Option[(String, Instant)]] = {
val pomUri = getPomUri(ref)
val future = for {
response <- getHttpResponse(ref)
res <- getPomFileWithLastModifiedTime(response)
response <- queueRequest(HttpRequest(uri = pomUri))
res <- getPomFileWithLastModifiedTime(response, pomUri)
} yield res
future.recoverWith {
case NonFatal(exception) =>
Expand All @@ -87,22 +88,22 @@ class MavenCentralClientImpl()(implicit val system: ActorSystem)
}
}

private def getPomFileWithLastModifiedTime(response: HttpResponse): Future[Option[(String, Instant)]] =
private def getPomFileWithLastModifiedTime(response: HttpResponse, uri: String): Future[Option[(String, Instant)]] =
response match {
case _ @HttpResponse(StatusCodes.OK, headers: Seq[model.HttpHeader], entity, _) =>
val lastModified = headers.find(_.is("last-modified")).map(header => parseDate(header.value))
Unmarshaller
.stringUnmarshaller(entity)
.map(page => lastModified.map(page -> _))
case _ => Future.successful(None)
case _ =>
logger.warn(s"Cannot get $uri: ${response.status}")
Future.successful(None)
}

private def getHttpResponse(ref: Artifact.Reference): Future[HttpResponse] = {
private def getPomUri(ref: Artifact.Reference): String = {
val groupIdUrl: String = ref.groupId.value.replace('.', '/')
val pomUrl = getPomFileName(ref.artifactId, ref.version)
val uri = s"$baseUri/${groupIdUrl}/${ref.artifactId.value}/${ref.version.value}/$pomUrl"
val request = HttpRequest(uri = uri)
queueRequest(request)
val pomFileName = getPomFileName(ref.artifactId, ref.version)
s"$baseUri/${groupIdUrl}/${ref.artifactId.value}/${ref.version.value}/$pomFileName"
}

// Wed, 04 Nov 2020 23:36:02 GMT
Expand All @@ -111,7 +112,7 @@ class MavenCentralClientImpl()(implicit val system: ActorSystem)

private def getPomFileName(artifactId: Artifact.ArtifactId, version: Version): String =
artifactId.binaryVersion.platform match {
case SbtPlugin(_) => s"${artifactId.name.value}-${version.value}.pom"
case _ => s"${artifactId.value}-${version.value}.pom"
case SbtPlugin(Version.Major(1) | Version.Minor(0, 13)) => s"${artifactId.name.value}-${version.value}.pom"
case _ => s"${artifactId.value}-${version.value}.pom"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ class MavenCentralService(
missingVersions = versions.map(Artifact.Reference(groupId, artifactId, _)).filterNot(knownRefs)
_ = if (missingVersions.nonEmpty)
logger.warn(s"${missingVersions.size} artifacts are missing for ${groupId.value}:${artifactId.value}")
missingPomFiles <- missingVersions.map(ref => mavenCentralClient.getPomFile(ref).map(_.map(ref -> _))).sequence
missingPomFiles <- missingVersions.mapSync(ref => mavenCentralClient.getPomFile(ref).map(_.map(ref -> _)))
publishResult <- missingPomFiles.flatten.mapSync {
case (mavenRef, (pomFile, creationDate)) =>
publishProcess.publishPom(mavenRef.toString(), pomFile, creationDate, None)
Expand Down
63 changes: 33 additions & 30 deletions project/Postgres.scala
Original file line number Diff line number Diff line change
Expand Up @@ -24,40 +24,43 @@ object Postgres extends AutoPlugin {

import autoImport._

def settings(config: Configuration, defaultPort: Int, database: String): Seq[Setting[_]] = Seq(
config / startPostgres := {
import sbt.util.CacheImplicits._
val dataFolder = Keys.baseDirectory.value / s".postgresql-${config.name}"
val streams = Keys.streams.value
val logger = streams.log
def settings(config: Configuration, defaultPort: Int, database: String): Seq[Setting[_]] = inConfig(config)(
Seq(
startPostgres := {
import sbt.util.CacheImplicits._
val dataFolder = Keys.baseDirectory.value / s".postgresql-${config.name}"
val streams = Keys.streams.value
val logger = streams.log

if (canConnect(defaultPort, database)) {
logger.info(s"Postgres is available on port $defaultPort")
defaultPort
} else {
// we cache the container to reuse it after a reload
val store = streams.cacheStoreFactory.make("container")
val tracker = util.Tracked.lastOutput[Unit, (String, Int)](store) {
case (_, None) =>
startContainer(dataFolder, database, logger)
case (_, Some((containerId, port))) =>
if (canConnect(port, database)) {
logger.info(s"Postgres container already started on port $port")
(containerId, port)
} else {
Docker.kill(containerId)
if (canConnect(defaultPort, database)) {
logger.info(s"Postgres is available on port $defaultPort")
defaultPort
} else {
// we cache the container to reuse it after a reload
val store = streams.cacheStoreFactory.make(s"container-${config.id}")
val tracker = util.Tracked.lastOutput[Unit, (String, Int)](store) {
case (_, None) =>
startContainer(dataFolder, database, logger)
}
case (_, Some((containerId, port))) =>
if (canConnect(port, database)) {
logger.info(s"Postgres container already started on port $port")
(containerId, port)
} else {
logger.info(s"Cannot connect to Postgres on port $port")
Docker.kill(containerId)
startContainer(dataFolder, database, logger)
}
}
tracker(())._2
}
tracker(())._2
},
Keys.clean := {
Keys.clean.value
val dataFolder = Keys.baseDirectory.value / s".postgresql-${config.name}"
containers.get(dataFolder.toPath).foreach(_.close())
containers.remove(dataFolder.toPath)
}
},
Keys.clean := {
Keys.clean.value
val dataFolder = Keys.baseDirectory.value / s".postgresql-${config.name}"
containers.get(dataFolder.toPath).foreach(_.close())
containers.remove(dataFolder.toPath)
}
)
)

private def startContainer(dataFolder: File, database: String, logger: Logger): (String, Int) = {
Expand Down

0 comments on commit 3b6724c

Please sign in to comment.