From 0e6e6e7d2210cf8ccb578bd092557334d3daebb8 Mon Sep 17 00:00:00 2001 From: frcroth Date: Tue, 18 Apr 2023 09:45:36 +0200 Subject: [PATCH] Write wkurl into annotation nml file (#6964) Co-authored-by: Philipp Otto --- CHANGELOG.unreleased.md | 3 +- app/controllers/AnnotationIOController.scala | 58 ++++++++++++------ app/models/annotation/AnnotationService.scala | 6 +- .../annotation/AnnotationUploadService.scala | 12 ++-- app/models/annotation/nml/NmlParser.scala | 8 ++- app/models/annotation/nml/NmlResults.scala | 5 +- app/models/annotation/nml/NmlWriter.scala | 9 +++ conf/messages | 12 ++-- frontend/javascripts/libs/window.ts | 4 +- .../oxalis/model/helpers/nml_helpers.ts | 3 + frontend/javascripts/test/libs/nml.spec.ts | 1 + .../test-bundle/test/libs/nml.spec.js.md | 8 +-- .../test-bundle/test/libs/nml.spec.js.snap | Bin 1169 -> 1193 bytes package.json | 7 ++- test/backend/NMLUnitTestSuite.scala | 28 ++++++--- 15 files changed, 112 insertions(+), 52 deletions(-) diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index bd8d8b92f5..5a4659205f 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -20,7 +20,8 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released ### Changed - Moved the view mode selection in the toolbar next to the position field. [#6949](https://github.com/scalableminds/webknossos/pull/6949) -- Redesigned welcome toast for new, anonymous users with new branding. [#6961](https://github.com/scalableminds/webknossos/pull/6961) +- Redesigned welcome toast for new, annonymous users with new branding. [#6961](https://github.com/scalableminds/webknossos/pull/6961) +- When saving annotations, the URL of the webknossos instance is stored in the resulting NML file. [#6964](https://github.com/scalableminds/webknossos/pull/6964) ### Fixed - Fixed unintended dependencies between segments of different volume layers which used the same segment id. Now, using the same segment id for segments in different volume layers should work without any problems. [#6960](https://github.com/scalableminds/webknossos/pull/6960) diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index 160ae90630..5de33ca4a9 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -43,7 +43,7 @@ import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.Files.{TemporaryFile, TemporaryFileCreator} import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, MultipartFormData} -import utils.ObjectId +import utils.{ObjectId, WkConf} import scala.concurrent.{ExecutionContext, Future} @@ -62,6 +62,7 @@ class AnnotationIOController @Inject()( temporaryFileCreator: TemporaryFileCreator, annotationService: AnnotationService, analyticsService: AnalyticsService, + conf: WkConf, sil: Silhouette[WkEnv], provider: AnnotationInformationProvider, annotationUploadService: AnnotationUploadService)(implicit ec: ExecutionContext, val materializer: Materializer) @@ -102,9 +103,9 @@ Expects: val attachedFiles = request.body.files.map(f => (f.ref.path.toFile, f.filename)) val parsedFiles = annotationUploadService.extractFromFiles(attachedFiles, useZipName = true, overwritingDataSetName) - val parsedFilesWraped = + val parsedFilesWrapped = annotationUploadService.wrapOrPrefixTrees(parsedFiles.parseResults, shouldCreateGroupForEachFile) - val parseResultsFiltered: List[NmlParseResult] = parsedFilesWraped.filter(_.succeeded) + val parseResultsFiltered: List[NmlParseResult] = parsedFilesWrapped.filter(_.succeeded) if (parseResultsFiltered.isEmpty) { returnError(parsedFiles) @@ -113,13 +114,15 @@ Expects: parseSuccesses <- Fox.serialCombined(parseResultsFiltered)(r => r.toSuccessBox) name = nameForUploaded(parseResultsFiltered.map(_.fileName)) description = descriptionForNMLs(parseResultsFiltered.map(_.description)) + wkUrl = wkUrlsForNMLs(parseResultsFiltered.map(_.wkUrl)) _ <- assertNonEmpty(parseSuccesses) skeletonTracings = parseSuccesses.flatMap(_.skeletonTracing) // Create a list of volume layers for each uploaded (non-skeleton-only) annotation. // This is what determines the merging strategy for volume layers volumeLayersGroupedRaw = parseSuccesses.map(_.volumeLayers).filter(_.nonEmpty) dataSet <- findDataSetForUploadedAnnotations(skeletonTracings, - volumeLayersGroupedRaw.flatten.map(_.tracing)) + volumeLayersGroupedRaw.flatten.map(_.tracing), + wkUrl) volumeLayersGrouped <- adaptVolumeTracingsToFallbackLayer(volumeLayersGroupedRaw, dataSet) tracingStoreClient <- tracingStoreService.clientFor(dataSet) mergedVolumeLayers <- mergeAndSaveVolumeLayers(volumeLayersGrouped, @@ -198,19 +201,31 @@ Expects: private def findDataSetForUploadedAnnotations( skeletonTracings: List[SkeletonTracing], - volumeTracings: List[VolumeTracing])(implicit mp: MessagesProvider, ctx: DBAccessContext): Fox[DataSet] = + volumeTracings: List[VolumeTracing], + wkUrl: String)(implicit mp: MessagesProvider, ctx: DBAccessContext): Fox[DataSet] = for { dataSetName <- assertAllOnSameDataSet(skeletonTracings, volumeTracings) ?~> "nml.file.differentDatasets" organizationNameOpt <- assertAllOnSameOrganization(skeletonTracings, volumeTracings) ?~> "nml.file.differentDatasets" organizationIdOpt <- Fox.runOptional(organizationNameOpt) { organizationDAO.findOneByName(_)(GlobalAccessContext).map(_._id) - } ?~> Messages("organization.notFound", organizationNameOpt.getOrElse("")) ~> NOT_FOUND + } ?~> (if (wkUrl.nonEmpty && conf.Http.uri != wkUrl) { + Messages("organization.notFound.wrongHost", organizationNameOpt.getOrElse(""), wkUrl, conf.Http.uri) + } else { Messages("organization.notFound", organizationNameOpt.getOrElse("")) }) ~> + NOT_FOUND organizationId <- Fox.fillOption(organizationIdOpt) { dataSetDAO.getOrganizationForDataSet(dataSetName)(GlobalAccessContext) } ?~> Messages("dataSet.noAccess", dataSetName) ~> FORBIDDEN - dataSet <- dataSetDAO.findOneByNameAndOrganization(dataSetName, organizationId) ?~> Messages( - "dataSet.noAccess", - dataSetName) ~> FORBIDDEN + dataSet <- dataSetDAO.findOneByNameAndOrganization(dataSetName, organizationId) ?~> (if (wkUrl.nonEmpty && conf.Http.uri != wkUrl) { + Messages( + "dataSet.noAccess.wrongHost", + dataSetName, + wkUrl, + conf.Http.uri) + } else { + Messages( + "dataSet.noAccess", + dataSetName) + }) ~> FORBIDDEN } yield dataSet private def nameForUploaded(fileNames: Seq[String]) = @@ -222,6 +237,9 @@ Expects: private def descriptionForNMLs(descriptions: Seq[Option[String]]) = if (descriptions.size == 1) descriptions.headOption.flatten.getOrElse("") else "" + private def wkUrlsForNMLs(wkUrls: Seq[Option[String]]) = + if (wkUrls.toSet.size == 1) wkUrls.headOption.flatten.getOrElse("") else "" + private def returnError(zipParseResult: NmlResults.MultiNmlParseResult)(implicit messagesProvider: MessagesProvider) = if (zipParseResult.containsFailure) { val errors = zipParseResult.parseResults.flatMap { @@ -369,6 +387,7 @@ Expects: dataSet.scale, None, organizationName, + conf.Http.uri, dataSet.name, Some(user), taskOpt) @@ -394,15 +413,18 @@ Expects: } user <- userService.findOneById(annotation._user, useCache = true) taskOpt <- Fox.runOptional(annotation._task)(taskDAO.findOne) - nmlStream = nmlWriter.toNmlStream(fetchedSkeletonLayers ::: fetchedVolumeLayers, - Some(annotation), - dataset.scale, - None, - organizationName, - dataset.name, - Some(user), - taskOpt, - skipVolumeData) + nmlStream = nmlWriter.toNmlStream( + fetchedSkeletonLayers ::: fetchedVolumeLayers, + Some(annotation), + dataset.scale, + None, + organizationName, + conf.Http.uri, + dataset.name, + Some(user), + taskOpt, + skipVolumeData + ) temporaryFile = temporaryFileCreator.create() zipper = ZipIO.startZip(new BufferedOutputStream(new FileOutputStream(new File(temporaryFile.path.toString)))) _ <- zipper.addFileFromEnumerator(name + ".nml", nmlStream) diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index 72f888e293..a66eb42db1 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -52,7 +52,7 @@ import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.Files.{TemporaryFile, TemporaryFileCreator} import play.api.libs.iteratee.Enumerator import play.api.libs.json.{JsNull, JsObject, JsValue, Json} -import utils.ObjectId +import utils.{ObjectId, WkConf} import java.io.{BufferedOutputStream, File, FileOutputStream} import javax.inject.Inject @@ -104,7 +104,8 @@ class AnnotationService @Inject()( nmlWriter: NmlWriter, temporaryFileCreator: TemporaryFileCreator, meshDAO: MeshDAO, - meshService: MeshService + meshService: MeshService, + conf: WkConf, )(implicit ec: ExecutionContext, val materializer: Materializer) extends BoxImplicits with FoxImplicits @@ -639,6 +640,7 @@ class AnnotationService @Inject()( scaleOpt, Some(name + "_data.zip"), organizationName, + conf.Http.uri, datasetName, Some(user), taskOpt) diff --git a/app/models/annotation/AnnotationUploadService.scala b/app/models/annotation/AnnotationUploadService.scala index 90f16e81a6..29a113fbf7 100644 --- a/app/models/annotation/AnnotationUploadService.scala +++ b/app/models/annotation/AnnotationUploadService.scala @@ -38,8 +38,8 @@ class AnnotationUploadService @Inject()(tempFileService: TempFileService) extend isTaskUpload: Boolean, basePath: Option[String] = None)(implicit m: MessagesProvider): NmlParseResult = NmlParser.parse(name, inputStream, overwritingDataSetName, isTaskUpload, basePath) match { - case Full((skeletonTracing, uploadedVolumeLayers, description)) => - NmlParseSuccess(name, skeletonTracing, uploadedVolumeLayers, description) + case Full((skeletonTracing, uploadedVolumeLayers, description, wkUrl)) => + NmlParseSuccess(name, skeletonTracing, uploadedVolumeLayers, description, wkUrl) case Failure(msg, _, chain) => NmlParseFailure(name, msg + chain.map(_ => formatChain(chain)).getOrElse("")) case Empty => NmlParseEmpty(name) } @@ -82,8 +82,8 @@ class AnnotationUploadService @Inject()(tempFileService: TempFileService) extend if (parseResults.length > 1) { parseResults.map { - case NmlParseSuccess(name, Some(skeletonTracing), uploadedVolumeLayers, description) => - NmlParseSuccess(name, Some(renameTrees(name, skeletonTracing)), uploadedVolumeLayers, description) + case NmlParseSuccess(name, Some(skeletonTracing), uploadedVolumeLayers, description, wkUrl) => + NmlParseSuccess(name, Some(renameTrees(name, skeletonTracing)), uploadedVolumeLayers, description, wkUrl) case r => r } } else { @@ -104,8 +104,8 @@ class AnnotationUploadService @Inject()(tempFileService: TempFileService) extend } parseResults.map { - case NmlParseSuccess(name, Some(skeletonTracing), uploadedVolumeLayers, description) => - NmlParseSuccess(name, Some(wrapTreesInGroup(name, skeletonTracing)), uploadedVolumeLayers, description) + case NmlParseSuccess(name, Some(skeletonTracing), uploadedVolumeLayers, description, wkUrl) => + NmlParseSuccess(name, Some(wrapTreesInGroup(name, skeletonTracing)), uploadedVolumeLayers, description, wkUrl) case r => r } } diff --git a/app/models/annotation/nml/NmlParser.scala b/app/models/annotation/nml/NmlParser.scala index 5d7b68fd16..cc94cb16d1 100755 --- a/app/models/annotation/nml/NmlParser.scala +++ b/app/models/annotation/nml/NmlParser.scala @@ -34,7 +34,7 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener overwritingDataSetName: Option[String], isTaskUpload: Boolean, basePath: Option[String] = None)( - implicit m: MessagesProvider): Box[(Option[SkeletonTracing], List[UploadedVolumeLayer], String)] = + implicit m: MessagesProvider): Box[(Option[SkeletonTracing], List[UploadedVolumeLayer], String, Option[String])] = try { val data = XML.load(nmlInputStream) for { @@ -52,6 +52,7 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener } yield { val dataSetName = overwritingDataSetName.getOrElse(parseDataSetName(parameters \ "experiment")) val description = parseDescription(parameters \ "experiment") + val wkUrl = parseWkUrl(parameters \ "experiment") val organizationName = if (overwritingDataSetName.isDefined) None else parseOrganizationName(parameters \ "experiment") val activeNodeId = parseActiveNode(parameters \ "activeNode") @@ -115,7 +116,7 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener ) ) - (skeletonTracingOpt, volumeLayers, description) + (skeletonTracingOpt, volumeLayers, description, wkUrl) } } catch { case e: org.xml.sax.SAXParseException if e.getMessage.startsWith("Premature end of file") => @@ -232,6 +233,9 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener private def parseDescription(nodes: NodeSeq): String = nodes.headOption.map(node => getSingleAttribute(node, "description")).getOrElse(DEFAULT_DESCRIPTION) + private def parseWkUrl(nodes: NodeSeq): Option[String] = + nodes.headOption.map(node => getSingleAttribute(node, "wkUrl")) + private def parseOrganizationName(nodes: NodeSeq): Option[String] = nodes.headOption.flatMap(node => getSingleAttributeOpt(node, "organization")) diff --git a/app/models/annotation/nml/NmlResults.scala b/app/models/annotation/nml/NmlResults.scala index e4fbf08966..a89b6ffac4 100644 --- a/app/models/annotation/nml/NmlResults.scala +++ b/app/models/annotation/nml/NmlResults.scala @@ -14,6 +14,7 @@ object NmlResults extends LazyLogging { def fileName: String def description: Option[String] = None + def wkUrl: Option[String] = None def succeeded: Boolean @@ -32,11 +33,13 @@ object NmlResults extends LazyLogging { case class NmlParseSuccess(fileName: String, skeletonTracing: Option[SkeletonTracing], volumeLayers: List[UploadedVolumeLayer], - _description: String) + _description: String, + _wkUrl: Option[String]) extends NmlParseResult { def succeeded = true override def description: Option[String] = Some(_description) + override def wkUrl: Option[String] = _wkUrl override def withName(name: String): NmlParseResult = this.copy(fileName = name) } diff --git a/app/models/annotation/nml/NmlWriter.scala b/app/models/annotation/nml/NmlWriter.scala index 9ff95f17d5..a6fa66dea2 100644 --- a/app/models/annotation/nml/NmlWriter.scala +++ b/app/models/annotation/nml/NmlWriter.scala @@ -22,6 +22,7 @@ case class NmlParameters( dataSetName: String, organizationName: String, description: Option[String], + wkUrl: String, scale: Option[Vec3Double], createdTimestamp: Long, editPosition: Vec3IntProto, @@ -40,6 +41,7 @@ class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits { scale: Option[Vec3Double], volumeFilename: Option[String], organizationName: String, + wkUrl: String, datasetName: String, annotationOwner: Option[User], annotationTask: Option[Task], @@ -53,6 +55,7 @@ class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits { scale, volumeFilename, organizationName, + wkUrl, datasetName, annotationOwner, annotationTask, @@ -66,6 +69,7 @@ class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits { scale: Option[Vec3Double], volumeFilename: Option[String], organizationName: String, + wkUrl: String, datasetName: String, annotationOwner: Option[User], annotationTask: Option[Task], @@ -82,6 +86,7 @@ class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits { volumeLayers, annotation: Option[Annotation], organizationName, + wkUrl, datasetName, scale) _ = writeParameters(parameters) @@ -103,6 +108,7 @@ class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits { volumeLayers: List[FetchedAnnotationLayer], annotation: Option[Annotation], organizationName: String, + wkUrl: String, datasetName: String, scale: Option[Vec3Double]): Fox[NmlParameters] = for { @@ -113,6 +119,7 @@ class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits { datasetName, organizationName, annotation.map(_.description), + wkUrl, scale, s.createdTimestamp, s.editPosition, @@ -127,6 +134,7 @@ class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits { datasetName, organizationName, annotation.map(_.description), + wkUrl, scale, v.createdTimestamp, v.editPosition, @@ -154,6 +162,7 @@ class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits { writer.writeAttribute("name", parameters.dataSetName) writer.writeAttribute("organization", parameters.organizationName) parameters.description.foreach(writer.writeAttribute("description", _)) + writer.writeAttribute("wkUrl", parameters.wkUrl) } Xml.withinElementSync("scale") { writer.writeAttribute("x", parameters.scale.map(_.x).getOrElse(-1).toString) diff --git a/conf/messages b/conf/messages index d3f99045ec..e060dbda0c 100644 --- a/conf/messages +++ b/conf/messages @@ -33,6 +33,7 @@ team.inUse.projects=Team is referenced by {0} projects organization.create.forbidden=You are not allowed to create a new organization organization.create.failed=Failed to create a new organization organization.notFound=Organization {0} could not be found +organization.notFound.wrongHost=Organization {0} could not be found. Please check whether you are on the correct WEBKNOSSOS instance. The uploaded file indicates {1} while this instance is {2}. organization.list.failed=Failed to retrieve list of organizations. organization.name.invalid=This organization name contains illegal characters. Please only use letters and numbers. organization.name.alreadyInUse=This name is already claimed by a different organization and not available anymore. Please choose a different name. @@ -46,7 +47,7 @@ user.notFound=User not found user.noAdmin=Access denied. Only admin users can execute this operation. user.deactivated=Your account has not been activated by an admin yet. Please contact your organization’s admin for help. user.noSelfDeactivate=You cannot deactivate yourself. Please contact an admin to do it for you. -user.lastAdmin=This user is the last remaining admin in your organzation. You cannot remove admin privileges from this account. +user.lastAdmin=This user is the last remaining admin in your organization. You cannot remove admin privileges from this account. user.lastOwner=Cannot deactivate the organization owner. Please talk to the WEBKNOSSOS team to transfer organization ownership. user.email.alreadyInUse=This email address is already in use @@ -67,17 +68,18 @@ oidc.disabled=OIDC is disabled oidc.configuration.invalid=OIDC configuration is invalid braintracing.new=An account on braintracing.org was created for you. You can use the same credentials as on WEBKNOSSOS to login. -braintracing.error=We could not atomatically create an account for you on braintracing.org. Please do it on your own. +braintracing.error=We could not automatically create an account for you on braintracing.org. Please do it on your own. braintracing.exists=Great, you already have an account on braintracing.org. Please double check that you have uploaded all requested information. dataSet=Dataset dataSet.notFound=Dataset {0} does not exist or could not be accessed dataSet.notFoundConsiderLogin=Dataset {0} does not exist or could not be accessed. You may need to log in. dataSet.notFoundForAnnotation=The Dataset for this annotation does not exist or could not be accessed. -dataSet.noAccess=Could not access DataSet {0}. Does your team have access? -dataSet.noAccessById=Could not access the corresponding DataSet. This is likely because you are not a member of a team that has access to it. +dataSet.noAccess=Could not access dataset {0}. Does your team have access? +dataSet.noAccess.wrongHost=Could not access dataset {0}. Please check whether you are on the correct WEBKNOSSOS instance. The uploaded file indicates {1} while this instance is {2}. +dataSet.noAccessById=Could not access the corresponding dataset. This is likely because you are not a member of a team that has access to it. dataSet.notImported=Dataset {0} is not imported -dataSet.name.invalid.characters=Dataset name is invalid. Please use only letters, digits, dots, underscores, hypens. +dataSet.name.invalid.characters=Dataset name is invalid. Please use only letters, digits, dots, underscores, hyphens. dataSet.name.invalid.startsWithDot=Dataset name is invalid. Please use a name that does not start with a dot. dataSet.name.invalid.lessThanThreeCharacters=Dataset name is invalid. Please use at least three characters. dataSet.name.alreadyTaken=This name is already being used by a different dataset. Please choose a different name. diff --git a/frontend/javascripts/libs/window.ts b/frontend/javascripts/libs/window.ts index 68a134086e..b512632601 100644 --- a/frontend/javascripts/libs/window.ts +++ b/frontend/javascripts/libs/window.ts @@ -32,13 +32,13 @@ export const document = const dummyLocation = { ancestorOrigins: [], hash: "", - host: "", + host: "localhost", hostname: "", href: "", origin: "", pathname: "", port: "", - protocol: "", + protocol: "http:", search: "", reload: () => {}, diff --git a/frontend/javascripts/oxalis/model/helpers/nml_helpers.ts b/frontend/javascripts/oxalis/model/helpers/nml_helpers.ts index 292a9fa4f5..b77757bd8f 100644 --- a/frontend/javascripts/oxalis/model/helpers/nml_helpers.ts +++ b/frontend/javascripts/oxalis/model/helpers/nml_helpers.ts @@ -27,6 +27,8 @@ import messages from "messages"; import * as Utils from "libs/utils"; import type { BoundingBoxType, Vector3 } from "oxalis/constants"; import Constants from "oxalis/constants"; +import { location } from "libs/window"; + // NML Defaults const DEFAULT_COLOR: Vector3 = [1, 0, 0]; const TASK_BOUNDING_BOX_COLOR: Vector3 = [0, 1, 0]; @@ -224,6 +226,7 @@ function serializeParameters( name: state.dataset.name, description: annotation.description, organization: state.dataset.owningOrganization, + wkUrl: `${location.protocol}//${location.host}`, }), serializeTag("scale", { x: state.dataset.dataSource.scale[0], diff --git a/frontend/javascripts/test/libs/nml.spec.ts b/frontend/javascripts/test/libs/nml.spec.ts index 656f14c958..0fc966499f 100644 --- a/frontend/javascripts/test/libs/nml.spec.ts +++ b/frontend/javascripts/test/libs/nml.spec.ts @@ -10,6 +10,7 @@ import EdgeCollection from "oxalis/model/edge_collection"; import { findGroup } from "oxalis/view/right-border-tabs/tree_hierarchy_view_helpers"; import mock from "mock-require"; import test from "ava"; + const TIMESTAMP = 123456789; const buildInfo = { webknossos: { diff --git a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/libs/nml.spec.js.md b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/libs/nml.spec.js.md index ca81a97bb7..0f46424d07 100644 --- a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/libs/nml.spec.js.md +++ b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/libs/nml.spec.js.md @@ -1,8 +1,8 @@ -# Snapshot report for `public/test-bundle/test/libs/nml.spec.js` +# Snapshot report for `public-test/test-bundle/test/libs/nml.spec.js` The actual snapshot is saved in `nml.spec.js.snap`. -Generated by [AVA](https://ava.li). +Generated by [AVA](https://avajs.dev). ## nml @@ -14,7 +14,7 @@ Generated by [AVA](https://ava.li). ␊ - ␊ +