You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@daffodil.apache.org by gi...@apache.org on 2022/07/11 00:12:17 UTC

[daffodil-vscode] branch update/scalafmt-core-2.7.5 created (now 20d200e)

This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a change to branch update/scalafmt-core-2.7.5
in repository https://gitbox.apache.org/repos/asf/daffodil-vscode.git


      at 20d200e  Reformat with scalafmt 2.7.5

This branch includes the following new commits:

     new 4ac203d  Update scalafmt-core to 2.7.5
     new 20d200e  Reformat with scalafmt 2.7.5

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.



[daffodil-vscode] 02/02: Reformat with scalafmt 2.7.5

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch update/scalafmt-core-2.7.5
in repository https://gitbox.apache.org/repos/asf/daffodil-vscode.git

commit 20d200e22ca411def48dc80b521b9c96f159b034
Author: github-actions[bot] <41...@users.noreply.github.com>
AuthorDate: Mon Jul 11 00:12:13 2022 +0000

    Reformat with scalafmt 2.7.5
---
 build.sbt                                          |  3 +-
 project/Rat.scala                                  |  3 +-
 .../Compiler.scala                                 |  9 +-
 .../org.apache.daffodil.debugger.dap/DAPodil.scala | 24 +++---
 .../org.apache.daffodil.debugger.dap/Parse.scala   | 97 ++++++++++++----------
 5 files changed, 72 insertions(+), 64 deletions(-)

diff --git a/build.sbt b/build.sbt
index b0a64b8..448baff 100644
--- a/build.sbt
+++ b/build.sbt
@@ -20,7 +20,7 @@ import play.api.libs.json._
 lazy val packageData = Json.parse(scala.io.Source.fromFile("./package.json").mkString).as[JsObject]
 lazy val daffodilVer = packageData("daffodilVersion").as[String]
 
-lazy val commonSettings = {
+lazy val commonSettings =
   Seq(
     version := {
       val versionRegex = raw"""  "version": "(.*)",""".r
@@ -46,7 +46,6 @@ lazy val commonSettings = {
     scalacOptions --= Seq("-Xcheckinit"),
     startYear := Some(2021)
   )
-}
 
 lazy val ratSettings = Seq(
   ratLicenses := Seq(
diff --git a/project/Rat.scala b/project/Rat.scala
index ec69e68..374c72c 100644
--- a/project/Rat.scala
+++ b/project/Rat.scala
@@ -22,8 +22,7 @@ object Rat {
   lazy val excludes = Seq(
     // git files
     file(".git"),
-    /**
-      * Can't add license headers in JSON files.
+    /** Can't add license headers in JSON files.
       * Adding a license attribute breaks things in some of these fiels as well.
       */
     file("language/dfdl.json"),
diff --git a/server/core/src/main/scala/org.apache.daffodil.debugger.dap/Compiler.scala b/server/core/src/main/scala/org.apache.daffodil.debugger.dap/Compiler.scala
index 5258e98..e586965 100644
--- a/server/core/src/main/scala/org.apache.daffodil.debugger.dap/Compiler.scala
+++ b/server/core/src/main/scala/org.apache.daffodil.debugger.dap/Compiler.scala
@@ -31,11 +31,10 @@ object Compiler {
     new Compiler {
       def compile(schema: Path): IO[DataProcessor] =
         IO.blocking(
-            Daffodil
-              .compiler()
-              .compileFile(schema.toFile())
-          )
-          .ensureOr(pf => CompilationFailed(pf.getDiagnostics))(!_.isError)
+          Daffodil
+            .compiler()
+            .compileFile(schema.toFile())
+        ).ensureOr(pf => CompilationFailed(pf.getDiagnostics))(!_.isError)
           .map(_.onPath("/"))
     }
 
diff --git a/server/core/src/main/scala/org.apache.daffodil.debugger.dap/DAPodil.scala b/server/core/src/main/scala/org.apache.daffodil.debugger.dap/DAPodil.scala
index d661ae2..0c793f5 100644
--- a/server/core/src/main/scala/org.apache.daffodil.debugger.dap/DAPodil.scala
+++ b/server/core/src/main/scala/org.apache.daffodil.debugger.dap/DAPodil.scala
@@ -84,8 +84,8 @@ object DAPSession {
       dispatcher.unsafeRunSync {
         for {
           _ <- Logger[IO].info(show"R> $request")
-          _ <- requests.offer(Some(request)).recoverWith {
-            case t => Logger[IO].error(t)(show"error during handling of request $request")
+          _ <- requests.offer(Some(request)).recoverWith { case t =>
+            Logger[IO].error(t)(show"error during handling of request $request")
           }
         } yield ()
       }
@@ -194,7 +194,7 @@ class DAPodil(
           case Left(errors) =>
             state.set(DAPodil.State.FailedToLaunch(request, errors, None)) *>
               Logger[IO].warn(show"error parsing launch args: ${errors.mkString_(", ")}") *> session
-              .sendResponse(request.respondFailure(Some(show"error parsing launch args: ${errors.mkString_(", ")}")))
+                .sendResponse(request.respondFailure(Some(show"error parsing launch args: ${errors.mkString_(", ")}")))
           case Right(dbgee) =>
             for {
               launched <- hotswap.swap {
@@ -253,8 +253,8 @@ class DAPodil(
             Paths.get(args.source.path).toUri(),
             args.breakpoints.toList.map(bp => DAPodil.Line(bp.line))
           )
-          breakpoints = args.breakpoints.toList.zipWithIndex.map {
-            case (bp, i) => new Types.Breakpoint(i, true, bp.line, "")
+          breakpoints = args.breakpoints.toList.zipWithIndex.map { case (bp, i) =>
+            new Types.Breakpoint(i, true, bp.line, "")
           }
           response = request.respondSuccess(
             new Responses.SetBreakpointsResponseBody(breakpoints.asJava)
@@ -365,7 +365,9 @@ class DAPodil(
             .variables(DAPodil.VariablesReference(args.variablesReference))
             .fold(
               Logger[IO]
-                .warn(show"couldn't find variablesReference ${args.variablesReference} in stack ${data}") *> // TODO: handle better
+                .warn(
+                  show"couldn't find variablesReference ${args.variablesReference} in stack ${data}"
+                ) *> // TODO: handle better
                 session.sendResponse(request.respondFailure())
             )(variables =>
               session.sendResponse(request.respondSuccess(new Responses.VariablesResponseBody(variables.asJava)))
@@ -446,9 +448,8 @@ object DAPodil extends IOApp {
       code <- listen(serverSocket, uri)
         .iterateWhile(_.restart)
         .as(ExitCode.Success)
-        .recoverWith {
-          case _: SocketTimeoutException =>
-            Logger[IO].warn(s"timed out listening for connection on $uri, exiting").as(ExitCode.Error)
+        .recoverWith { case _: SocketTimeoutException =>
+          Logger[IO].warn(s"timed out listening for connection on $uri, exiting").as(ExitCode.Error)
         }
 
     } yield code
@@ -699,9 +700,8 @@ object DAPodil extends IOApp {
       copy(value = value + (uri.normalize -> lines))
 
     def contains(location: Location): Boolean =
-      value.exists {
-        case (uri, lines) =>
-          uri == location.uri && lines.exists(_ == location.line)
+      value.exists { case (uri, lines) =>
+        uri == location.uri && lines.exists(_ == location.line)
       }
   }
 
diff --git a/server/core/src/main/scala/org.apache.daffodil.debugger.dap/Parse.scala b/server/core/src/main/scala/org.apache.daffodil.debugger.dap/Parse.scala
index 6c84f49..2c39616 100644
--- a/server/core/src/main/scala/org.apache.daffodil.debugger.dap/Parse.scala
+++ b/server/core/src/main/scala/org.apache.daffodil.debugger.dap/Parse.scala
@@ -78,9 +78,11 @@ object Parse {
 
             val parse =
               IO.interruptible(true) {
-                  dp.parse(new InputSourceDataInputStream(data), new XMLTextInfosetOutputter(os, true)) // WARNING: parse doesn't close the OutputStream, so closed below
-                }
-                .guaranteeCase(outcome => Logger[IO].debug(s"parse finished: $outcome"))
+                dp.parse(
+                  new InputSourceDataInputStream(data),
+                  new XMLTextInfosetOutputter(os, true)
+                ) // WARNING: parse doesn't close the OutputStream, so closed below
+              }.guaranteeCase(outcome => Logger[IO].debug(s"parse finished: $outcome"))
                 .void
 
             stopper &> parse.guarantee(IO(os.close) *> done.set(true))
@@ -222,7 +224,9 @@ object Parse {
                         )
                         .toEitherNel
                     case invalidType =>
-                      Left(s"invalid 'infosetOutput.type': '$invalidType', must be 'none', 'console', or 'file'").toEitherNel
+                      Left(
+                        s"invalid 'infosetOutput.type': '$invalidType', must be 'none', 'console', or 'file'"
+                      ).toEitherNel
                   }
               }
           }
@@ -242,7 +246,9 @@ object Parse {
       state <- Resource.eval(Queue.bounded[IO, Option[DAPodil.Debugee.State]](10))
       dapEvents <- Resource.eval(Queue.bounded[IO, Option[Events.DebugEvent]](10))
       breakpoints <- Resource.eval(Breakpoints())
-      infoset <- Resource.eval(Queue.bounded[IO, Option[String]](10)) // TODO: it's a bit incongruous to have a separate channel for infoset changes, vs. streaming Parse.Event values
+      infoset <- Resource.eval(
+        Queue.bounded[IO, Option[String]](10)
+      ) // TODO: it's a bit incongruous to have a separate channel for infoset changes, vs. streaming Parse.Event values
       control <- Resource.eval(Control.stopped())
 
       latestData <- Stream.fromQueueNoneTerminated(data).holdResource(DAPodil.Data.empty)
@@ -274,8 +280,12 @@ object Parse {
           parse.run().through(Files[IO].writeAll(path))
       }
 
-      nextFrameId <- Resource.eval(Next.int.map(_.map(DAPodil.Frame.Id.apply)).flatTap(_.next())) // `.flatTap(_.next())`: ids start at 1
-      nextRef <- Resource.eval(Next.int.map(_.map(DAPodil.VariablesReference.apply)).flatTap(_.next())) // `.flatTap(_.next())`: ids start at 1
+      nextFrameId <- Resource.eval(
+        Next.int.map(_.map(DAPodil.Frame.Id.apply)).flatTap(_.next())
+      ) // `.flatTap(_.next())`: ids start at 1
+      nextRef <- Resource.eval(
+        Next.int.map(_.map(DAPodil.VariablesReference.apply)).flatTap(_.next())
+      ) // `.flatTap(_.next())`: ids start at 1
 
       // convert Parse.Event values to DAPodil.Data values
       deliverParseData = Stream
@@ -366,7 +376,11 @@ object Parse {
         Try(Paths.get(URI.create(startElement.schemaLocation.uriString)).toString())
           .fold(
             _ =>
-              new Types.Source(startElement.schemaLocation.uriString, null, 0), // there is no valid path if the location is a schema contained in a jar file; see #76.
+              new Types.Source(
+                startElement.schemaLocation.uriString,
+                null,
+                0
+              ), // there is no valid path if the location is a schema contained in a jar file; see #76.
             path => new Types.Source(path, 0)
           ),
         startElement.schemaLocation.lineNumber
@@ -454,38 +468,36 @@ object Parse {
     state.variableMapForDebugger.qnames.toList
       .groupBy(_.namespace) // TODO: handle NoNamespace or UnspecifiedNamespace as top-level?
       .toList
-      .flatTraverse {
-        case (ns, vs) =>
-          // every namespace is a DAP variable in the current scope, and links to its set of Daffodil-as-DAP variables
-          refs.next.map { ref =>
-            List(scopeRef -> List(new Types.Variable(ns.toString(), "", null, ref.value, null))) ++
-              List(
-                ref -> vs
-                  .sortBy(_.toPrettyString)
-                  .fproduct(state.variableMapForDebugger.find)
-                  .map {
-                    case (name, value) =>
-                      new Types.Variable(
-                        name.toQNameString,
-                        value
-                          .flatMap(v => Option(v.value.value).map(_.toString) orElse Some("null"))
-                          .getOrElse("???"),
-                        value
-                          .map(_.state match {
-                            case VariableDefined      => "default"
-                            case VariableRead         => "read"
-                            case VariableSet          => "set"
-                            case VariableUndefined    => "undefined"
-                            case VariableBeingDefined => "being defined"
-                            case VariableInProcess    => "in process"
-                          })
-                          .getOrElse("???"),
-                        0,
-                        null
-                      )
-                  }
-              )
-          }
+      .flatTraverse { case (ns, vs) =>
+        // every namespace is a DAP variable in the current scope, and links to its set of Daffodil-as-DAP variables
+        refs.next.map { ref =>
+          List(scopeRef -> List(new Types.Variable(ns.toString(), "", null, ref.value, null))) ++
+            List(
+              ref -> vs
+                .sortBy(_.toPrettyString)
+                .fproduct(state.variableMapForDebugger.find)
+                .map { case (name, value) =>
+                  new Types.Variable(
+                    name.toQNameString,
+                    value
+                      .flatMap(v => Option(v.value.value).map(_.toString) orElse Some("null"))
+                      .getOrElse("???"),
+                    value
+                      .map(_.state match {
+                        case VariableDefined      => "default"
+                        case VariableRead         => "read"
+                        case VariableSet          => "set"
+                        case VariableUndefined    => "undefined"
+                        case VariableBeingDefined => "being defined"
+                        case VariableInProcess    => "in process"
+                      })
+                      .getOrElse("???"),
+                    0,
+                    null
+                  )
+                }
+            )
+        }
       }
       .map { refVars =>
         val sv = refVars.foldMap(Map(_)) // combine values of map to accumulate namespaces
@@ -536,9 +548,8 @@ object Parse {
               mark.context.toString()
             )
           ),
-          pstate.mpstate.delimiters.toList.zipWithIndex.map {
-            case (delimiter, i) =>
-              Delimiter(if (i < pstate.mpstate.delimitersLocalIndexStack.top) "remote" else "local", delimiter)
+          pstate.mpstate.delimiters.toList.zipWithIndex.map { case (delimiter, i) =>
+            Delimiter(if (i < pstate.mpstate.delimitersLocalIndexStack.top) "remote" else "local", delimiter)
           }
         )
     }


[daffodil-vscode] 01/02: Update scalafmt-core to 2.7.5

Posted by gi...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a commit to branch update/scalafmt-core-2.7.5
in repository https://gitbox.apache.org/repos/asf/daffodil-vscode.git

commit 4ac203dccc9361d12cec71371db7133182de573f
Author: github-actions[bot] <41...@users.noreply.github.com>
AuthorDate: Mon Jul 11 00:12:06 2022 +0000

    Update scalafmt-core to 2.7.5
---
 .scalafmt.conf | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.scalafmt.conf b/.scalafmt.conf
index ecbccc1..e25303c 100644
--- a/.scalafmt.conf
+++ b/.scalafmt.conf
@@ -13,6 +13,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-version = "2.3.2"
+version = "2.7.5"
 maxColumn = 120
 rewrite.rules = [SortImports, RedundantBraces]