mirror of
https://github.com/fluencelabs/aqua.git
synced 2025-03-15 11:40:50 +00:00
This commit is contained in:
parent
835a7f7672
commit
621e06dd9c
9
aqua-src/import-empty.aqua
Normal file
9
aqua-src/import-empty.aqua
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
-- import.aqua
|
||||||
|
module Import.Test
|
||||||
|
import foobar from "export.aqua"
|
||||||
|
|
||||||
|
use foo as f from "export.aqua" as Exp
|
||||||
|
|
||||||
|
use "export.aqua"
|
||||||
|
|
||||||
|
export foobar as barfoo
|
@ -1,7 +1,5 @@
|
|||||||
package aqua.backend
|
package aqua.backend
|
||||||
|
|
||||||
object Version {
|
object Version {
|
||||||
|
lazy val version = BuildInfo.version
|
||||||
// TODO: get version for JS compiler
|
|
||||||
lazy val version = "Unknown (JS)"
|
|
||||||
}
|
}
|
||||||
|
@ -2,7 +2,5 @@ package aqua.backend
|
|||||||
|
|
||||||
object Version {
|
object Version {
|
||||||
|
|
||||||
lazy val version = Option(getClass.getPackage.getImplementationVersion)
|
lazy val version = BuildInfo.version
|
||||||
.filter(_.nonEmpty)
|
|
||||||
.getOrElse("Unknown")
|
|
||||||
}
|
}
|
||||||
|
@ -146,6 +146,11 @@ lazy val backend = crossProject(JVMPlatform, JSPlatform)
|
|||||||
.crossType(CrossType.Pure)
|
.crossType(CrossType.Pure)
|
||||||
.in(file("backend"))
|
.in(file("backend"))
|
||||||
.settings(commons: _*)
|
.settings(commons: _*)
|
||||||
|
.enablePlugins(BuildInfoPlugin)
|
||||||
|
.settings(
|
||||||
|
buildInfoKeys := Seq[BuildInfoKey](version),
|
||||||
|
buildInfoPackage := "aqua.backend"
|
||||||
|
)
|
||||||
.dependsOn(transform)
|
.dependsOn(transform)
|
||||||
|
|
||||||
lazy val `backend-air` = crossProject(JVMPlatform, JSPlatform)
|
lazy val `backend-air` = crossProject(JVMPlatform, JSPlatform)
|
||||||
|
@ -12,8 +12,9 @@ object Test extends IOApp.Simple {
|
|||||||
implicit val aio: AquaIO[IO] = new AquaFilesIO[IO]
|
implicit val aio: AquaIO[IO] = new AquaFilesIO[IO]
|
||||||
|
|
||||||
override def run: IO[Unit] =
|
override def run: IO[Unit] =
|
||||||
IO.println("Start ms: " + System.currentTimeMillis()) *>
|
for {
|
||||||
AquaPathCompiler
|
start <- IO(System.currentTimeMillis())
|
||||||
|
_ <- AquaPathCompiler
|
||||||
.compileFilesTo[IO](
|
.compileFilesTo[IO](
|
||||||
Path("./aqua-src"),
|
Path("./aqua-src"),
|
||||||
List(Path("./aqua")),
|
List(Path("./aqua")),
|
||||||
@ -26,6 +27,8 @@ object Test extends IOApp.Simple {
|
|||||||
errs.map(System.err.println): Unit
|
errs.map(System.err.println): Unit
|
||||||
case Validated.Valid(res) =>
|
case Validated.Valid(res) =>
|
||||||
res.map(println): Unit
|
res.map(println): Unit
|
||||||
} <* IO.println("End ms : " + System.currentTimeMillis())
|
}
|
||||||
|
_ <- IO.println("Compilation ends in : " + (System.currentTimeMillis() - start) + " ms")
|
||||||
|
} yield ()
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -2,14 +2,16 @@ package aqua.parser
|
|||||||
|
|
||||||
import cats.data.{Validated, ValidatedNec}
|
import cats.data.{Validated, ValidatedNec}
|
||||||
import aqua.parser.Ast
|
import aqua.parser.Ast
|
||||||
|
import aqua.parser.Ast.Tree
|
||||||
import aqua.parser.ParserError
|
import aqua.parser.ParserError
|
||||||
import aqua.parser.LexerError
|
import aqua.parser.LexerError
|
||||||
import aqua.parser.expr.RootExpr
|
import aqua.parser.expr.RootExpr
|
||||||
import aqua.parser.head.HeadExpr
|
import aqua.parser.head.HeadExpr
|
||||||
|
import aqua.parser.lexer.Token
|
||||||
import aqua.parser.lift.{FileSpan, LiftParser, Span}
|
import aqua.parser.lift.{FileSpan, LiftParser, Span}
|
||||||
import cats.{Comonad, Eval, ~>}
|
import cats.{Comonad, Eval, ~>}
|
||||||
import cats.parse.LocationMap
|
import cats.parse.LocationMap
|
||||||
import cats.parse.Parser0 as P0
|
import cats.parse.{Parser as P, Parser0 as P0}
|
||||||
import cats.Id
|
import cats.Id
|
||||||
import aqua.parser.lift.LiftParser.LiftErrorOps
|
import aqua.parser.lift.LiftParser.LiftErrorOps
|
||||||
|
|
||||||
@ -22,7 +24,7 @@ object Parser {
|
|||||||
lazy val idParser = parserSchema[Id]()
|
lazy val idParser = parserSchema[Id]()
|
||||||
|
|
||||||
def parserSchema[S[_] : LiftParser : Comonad](): P0[ValidatedNec[ParserError[S], Ast[S]]] =
|
def parserSchema[S[_] : LiftParser : Comonad](): P0[ValidatedNec[ParserError[S], Ast[S]]] =
|
||||||
(HeadExpr.ast[S].with1 ~ RootExpr.ast[S]()).map { case (head, bodyMaybe) =>
|
(HeadExpr.ast[S] ~ RootExpr.ast0[S]()).map { case (head, bodyMaybe) =>
|
||||||
bodyMaybe.map(Ast(head, _))
|
bodyMaybe.map(Ast(head, _))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2,13 +2,13 @@ package aqua.parser.expr
|
|||||||
|
|
||||||
import aqua.parser.Ast.Tree
|
import aqua.parser.Ast.Tree
|
||||||
import aqua.parser.lexer.Token
|
import aqua.parser.lexer.Token
|
||||||
import aqua.parser.lexer.Token._
|
import aqua.parser.lexer.Token.*
|
||||||
import aqua.parser.lift.LiftParser
|
import aqua.parser.lift.LiftParser
|
||||||
import aqua.parser.lift.LiftParser._
|
import aqua.parser.lift.LiftParser.*
|
||||||
import aqua.parser.{Expr, ParserError}
|
import aqua.parser.{Expr, ParserError}
|
||||||
import cats.data.{Chain, NonEmptyChain, Validated, ValidatedNec}
|
import cats.data.{Chain, NonEmptyChain, NonEmptyList, Validated, ValidatedNec}
|
||||||
import cats.free.Cofree
|
import cats.free.Cofree
|
||||||
import cats.parse.{Parser => P}
|
import cats.parse.{Parser0 as P0, Parser as P}
|
||||||
import cats.{Comonad, Eval}
|
import cats.{Comonad, Eval}
|
||||||
import cats.~>
|
import cats.~>
|
||||||
|
|
||||||
@ -23,16 +23,38 @@ object RootExpr extends Expr.Companion {
|
|||||||
def validChildren: List[Expr.Lexem] =
|
def validChildren: List[Expr.Lexem] =
|
||||||
ServiceExpr :: AliasExpr :: DataStructExpr :: ConstantExpr :: FuncExpr :: Nil
|
ServiceExpr :: AliasExpr :: DataStructExpr :: ConstantExpr :: FuncExpr :: Nil
|
||||||
|
|
||||||
|
private def gatherResults[F[_]: LiftParser: Comonad](results: NonEmptyList[ValidatedNec[ParserError[F], Tree[F]]]): (Chain[ParserError[F]], Chain[Tree[F]]) = {
|
||||||
|
results.foldLeft[(Chain[ParserError[F]], Chain[Tree[F]])](Chain.empty -> Chain.empty) {
|
||||||
|
case ((errs, trees), Validated.Valid(tree)) => (errs, trees :+ tree)
|
||||||
|
case ((errs, trees), Validated.Invalid(err)) => (errs ++ err.toChain, trees)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private def linesParser[F[_]: LiftParser: Comonad](): P[NonEmptyList[ValidatedNec[ParserError[F], Tree[F]]]] =
|
||||||
|
P.repSep(
|
||||||
|
P.oneOf(RootExpr.validChildren.map(_.ast[F]())),
|
||||||
|
` \n+`
|
||||||
|
).surroundedBy(` \n+`.?)
|
||||||
|
|
||||||
|
private def rootToken[F[_]: LiftParser: Comonad]: P0[Token[F]] =
|
||||||
|
P.unit.lift0.map(Token.lift[F, Unit](_))
|
||||||
|
|
||||||
|
private def parserSchema[F[_]: LiftParser: Comonad](): P[(Token[F], (Chain[ParserError[F]], Chain[Tree[F]]))] =
|
||||||
|
rootToken.with1 ~
|
||||||
|
linesParser().map(l => gatherResults(l))
|
||||||
|
|
||||||
|
def empty[F[_] : LiftParser : Comonad](): P0[ValidatedNec[ParserError[F], Tree[F]]] =
|
||||||
|
(rootToken <* (Token.` \n*` *> Token.` `.? *> P.end))
|
||||||
|
.map(point => Validated.validNec(Cofree(RootExpr[F](point), Eval.now(Chain.empty))))
|
||||||
|
|
||||||
|
// Could handle empty body
|
||||||
|
def ast0[F[_]: LiftParser: Comonad](): P0[ValidatedNec[ParserError[F], Tree[F]]] =
|
||||||
|
// `empty` is first to handle errors from `ast` at a first place
|
||||||
|
empty().backtrack | ast()
|
||||||
|
|
||||||
override def ast[F[_]: LiftParser: Comonad](): P[ValidatedNec[ParserError[F], Tree[F]]] =
|
override def ast[F[_]: LiftParser: Comonad](): P[ValidatedNec[ParserError[F], Tree[F]]] =
|
||||||
(P.unit.lift0.map(Token.lift[F, Unit](_)).with1 ~
|
parserSchema()
|
||||||
P.repSep(
|
.map { case (point, (errs, trees)) =>
|
||||||
P.oneOf(RootExpr.validChildren.map(_.ast[F]())),
|
|
||||||
` \n+`
|
|
||||||
).surroundedBy(` \n+`.?)
|
|
||||||
.map(_.foldLeft[(Chain[ParserError[F]], Chain[Tree[F]])](Chain.empty -> Chain.empty) {
|
|
||||||
case ((errs, trees), Validated.Valid(tree)) => (errs, trees :+ tree)
|
|
||||||
case ((errs, trees), Validated.Invalid(err)) => (errs ++ err.toChain, trees)
|
|
||||||
})).map { case (point, (errs, trees)) =>
|
|
||||||
NonEmptyChain
|
NonEmptyChain
|
||||||
.fromChain(errs)
|
.fromChain(errs)
|
||||||
.fold[ValidatedNec[ParserError[F], Tree[F]]](
|
.fold[ValidatedNec[ParserError[F], Tree[F]]](
|
||||||
|
@ -71,6 +71,7 @@ object Token {
|
|||||||
(` `.?.void *> (`--` *> P.charsWhile0(_ != '\n')).?.void).with1 *> `\n`
|
(` `.?.void *> (`--` *> P.charsWhile0(_ != '\n')).?.void).with1 *> `\n`
|
||||||
|
|
||||||
val ` \n+` : P[Unit] = P.repAs[Unit, Unit](` \n`.backtrack, 1)(Accumulator0.unitAccumulator0)
|
val ` \n+` : P[Unit] = P.repAs[Unit, Unit](` \n`.backtrack, 1)(Accumulator0.unitAccumulator0)
|
||||||
|
val ` \n*` : P0[Unit] = P.repAs0[Unit, Unit](` \n`.backtrack)(Accumulator0.unitAccumulator0)
|
||||||
val ` : \n+` : P[Unit] = ` `.?.with1 *> `:` *> ` \n+`
|
val ` : \n+` : P[Unit] = ` `.?.with1 *> `:` *> ` \n+`
|
||||||
val `,` : P[Unit] = P.char(',') <* ` `.?
|
val `,` : P[Unit] = P.char(',') <* ` `.?
|
||||||
val `.` : P[Unit] = P.char('.')
|
val `.` : P[Unit] = P.char('.')
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.15.0")
|
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.15.0")
|
||||||
addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.7.0")
|
addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.7.0")
|
||||||
addSbtPlugin("org.portable-scala" % "sbt-scalajs-crossproject" % "1.1.0")
|
addSbtPlugin("org.portable-scala" % "sbt-scalajs-crossproject" % "1.1.0")
|
||||||
|
addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.10.0")
|
||||||
|
@ -1,26 +1,21 @@
|
|||||||
package aqua.semantics
|
package aqua.semantics
|
||||||
|
|
||||||
import aqua.model.func.raw.FuncOp
|
import aqua.model.func.raw.FuncOp
|
||||||
import aqua.model.{AquaContext, Model, ScriptModel}
|
import aqua.model.{AquaContext, EmptyModel, Model, ScriptModel}
|
||||||
import aqua.parser.lexer.Token
|
import aqua.parser.lexer.Token
|
||||||
import aqua.parser.{Ast, Expr}
|
import aqua.parser.{Ast, Expr}
|
||||||
import aqua.semantics.rules.ReportError
|
import aqua.semantics.rules.ReportError
|
||||||
import aqua.semantics.rules.abilities.{
|
import aqua.semantics.rules.abilities.{AbilitiesAlgebra, AbilitiesInterpreter, AbilitiesState, AbilityOp}
|
||||||
AbilitiesAlgebra,
|
|
||||||
AbilitiesInterpreter,
|
|
||||||
AbilitiesState,
|
|
||||||
AbilityOp
|
|
||||||
}
|
|
||||||
import aqua.semantics.rules.names.{NameOp, NamesAlgebra, NamesInterpreter, NamesState}
|
import aqua.semantics.rules.names.{NameOp, NamesAlgebra, NamesInterpreter, NamesState}
|
||||||
import aqua.semantics.rules.types.{TypeOp, TypesAlgebra, TypesInterpreter, TypesState}
|
import aqua.semantics.rules.types.{TypeOp, TypesAlgebra, TypesInterpreter, TypesState}
|
||||||
import cats.Eval
|
import cats.Eval
|
||||||
import cats.arrow.FunctionK
|
import cats.arrow.FunctionK
|
||||||
import cats.data.Validated.{Invalid, Valid}
|
import cats.data.Validated.{Invalid, Valid}
|
||||||
import cats.data._
|
import cats.data.*
|
||||||
import cats.free.Free
|
import cats.free.Free
|
||||||
import cats.kernel.Monoid
|
import cats.kernel.Monoid
|
||||||
import cats.syntax.apply._
|
import cats.syntax.apply.*
|
||||||
import cats.syntax.semigroup._
|
import cats.syntax.semigroup.*
|
||||||
import monocle.Lens
|
import monocle.Lens
|
||||||
import monocle.macros.GenLens
|
import monocle.macros.GenLens
|
||||||
import scribe.Logging
|
import scribe.Logging
|
||||||
@ -96,7 +91,11 @@ object Semantics extends Logging {
|
|||||||
NonEmptyChain
|
NonEmptyChain
|
||||||
.fromChain(state.errors)
|
.fromChain(state.errors)
|
||||||
.fold[ValidatedNec[SemanticError[S], AquaContext]](Valid(ctx))(Invalid(_))
|
.fold[ValidatedNec[SemanticError[S], AquaContext]](Valid(ctx))(Invalid(_))
|
||||||
case (state, _) =>
|
case (state, _: EmptyModel) =>
|
||||||
|
NonEmptyChain
|
||||||
|
.fromChain(state.errors)
|
||||||
|
.fold[ValidatedNec[SemanticError[S], AquaContext]](Valid(init))(Invalid(_))
|
||||||
|
case (state, m) =>
|
||||||
NonEmptyChain
|
NonEmptyChain
|
||||||
.fromChain(state.errors)
|
.fromChain(state.errors)
|
||||||
.map(Invalid(_))
|
.map(Invalid(_))
|
||||||
|
Loading…
x
Reference in New Issue
Block a user