mirror of
https://github.com/fluencelabs/aqua.git
synced 2025-03-15 11:40:50 +00:00
Use global node_modules
path to find aqua dependencies (#450)
This commit is contained in:
parent
2fa3a09548
commit
04e68b553f
12
build.sbt
12
build.sbt
@ -6,13 +6,13 @@ val baseAquaVersion = settingKey[String]("base aqua version")
|
|||||||
|
|
||||||
val catsV = "2.7.0"
|
val catsV = "2.7.0"
|
||||||
val catsParseV = "0.3.6"
|
val catsParseV = "0.3.6"
|
||||||
val monocleV = "3.0.0-M6"
|
val monocleV = "3.1.0"
|
||||||
val scalaTestV = "3.2.10"
|
val scalaTestV = "3.2.10"
|
||||||
val fs2V = "3.2.3"
|
val fs2V = "3.2.5"
|
||||||
val catsEffectV = "3.3.1"
|
val catsEffectV = "3.3.7"
|
||||||
val declineV = "2.2.0"
|
val declineV = "2.2.0"
|
||||||
val circeVersion = "0.14.1"
|
val circeVersion = "0.14.1"
|
||||||
val scribeV = "3.6.3"
|
val scribeV = "3.6.6"
|
||||||
|
|
||||||
name := "aqua-hll"
|
name := "aqua-hll"
|
||||||
|
|
||||||
@ -148,8 +148,8 @@ lazy val semantics = crossProject(JVMPlatform, JSPlatform)
|
|||||||
.settings(commons: _*)
|
.settings(commons: _*)
|
||||||
.settings(
|
.settings(
|
||||||
libraryDependencies ++= Seq(
|
libraryDependencies ++= Seq(
|
||||||
"com.github.julien-truffaut" %%% "monocle-core" % monocleV,
|
"dev.optics" %%% "monocle-core" % monocleV,
|
||||||
"com.github.julien-truffaut" %%% "monocle-macro" % monocleV
|
"dev.optics" %%% "monocle-macro" % monocleV
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.dependsOn(raw, parser)
|
.dependsOn(raw, parser)
|
||||||
|
@ -57,18 +57,26 @@ object PlatformOpts extends Logging {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// get path to node modules if there is `aqua-lib` module with `builtin.aqua` in it
|
// get path to node modules if there is `aqua-lib` module with `builtin.aqua` in it
|
||||||
def getGlobalNodeModulePath: Option[Path] = {
|
def getGlobalNodeModulePath: List[Path] = {
|
||||||
val meta = Meta.metaUrl
|
val meta = Meta.metaUrl
|
||||||
val req = Module.createRequire(meta)
|
val req = Module.createRequire(meta)
|
||||||
Try {
|
Try {
|
||||||
// this can throw an error
|
// this can throw an error
|
||||||
val pathStr = req.resolve("@fluencelabs/aqua-lib/builtin.aqua").toString
|
val pathStr = req.resolve("@fluencelabs/aqua-lib/builtin.aqua").toString
|
||||||
// hack
|
// hack
|
||||||
Path(pathStr).parent.map(_.resolve("../.."))
|
val globalAquaPath = Path(pathStr).parent.flatMap(_.parent.flatMap(_.parent))
|
||||||
|
|
||||||
|
// Also hack. If we found installed `aqua-lib`, it should be in `node_modules` global path.
|
||||||
|
// In global `node_modules` could be installed aqua libs and we must use them,
|
||||||
|
// if they were imported in aqua files
|
||||||
|
val globalNodeModulesPath =
|
||||||
|
globalAquaPath.flatMap(_.parent.flatMap(_.parent.flatMap(_.parent)))
|
||||||
|
|
||||||
|
globalAquaPath.toList ++ globalNodeModulesPath.toList
|
||||||
}.getOrElse {
|
}.getOrElse {
|
||||||
// we don't care about path if there is no builtins, but must write an error
|
// we don't care about path if there is no builtins, but must write an error
|
||||||
logger.error("Unexpected. Cannot find 'aqua-lib' dependency with `builtin.aqua` in it")
|
logger.error("Unexpected. Cannot find 'aqua-lib' dependency with `builtin.aqua` in it")
|
||||||
None
|
Nil
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -30,9 +30,9 @@ object Prelude extends Logging {
|
|||||||
|
|
||||||
nodeImportF.map { nodeImport =>
|
nodeImportF.map { nodeImport =>
|
||||||
val imports =
|
val imports =
|
||||||
nodeImport.toList ++ PlatformOpts.getGlobalNodeModulePath.toList ++ (if (withRunImports)
|
nodeImport.toList ++ PlatformOpts.getGlobalNodeModulePath ++ (if (withRunImports)
|
||||||
runImports
|
runImports
|
||||||
else Nil)
|
else Nil)
|
||||||
|
|
||||||
new Prelude(imports)
|
new Prelude(imports)
|
||||||
}
|
}
|
||||||
|
@ -9,8 +9,7 @@ import aqua.parser.lift.LiftParser.LiftErrorOps
|
|||||||
import aqua.parser.lift.{FileSpan, LiftParser, Span}
|
import aqua.parser.lift.{FileSpan, LiftParser, Span}
|
||||||
import cats.data.{Validated, ValidatedNec}
|
import cats.data.{Validated, ValidatedNec}
|
||||||
import cats.parse.{LocationMap, Parser as P, Parser0 as P0}
|
import cats.parse.{LocationMap, Parser as P, Parser0 as P0}
|
||||||
import cats.{Comonad, Eval, Id, ~>}
|
import cats.{~>, Comonad, Eval, Id}
|
||||||
|
|
||||||
|
|
||||||
object Parser extends scribe.Logging {
|
object Parser extends scribe.Logging {
|
||||||
|
|
||||||
@ -27,14 +26,16 @@ object Parser extends scribe.Logging {
|
|||||||
parser
|
parser
|
||||||
}
|
}
|
||||||
|
|
||||||
def parse[S[_] : LiftParser : Comonad](p: P0[ValidatedNec[ParserError[S], Ast[S]]])(source: String): ValidatedNec[ParserError[S], Ast[S]] = {
|
def parse[S[_]: LiftParser: Comonad](
|
||||||
|
p: P0[ValidatedNec[ParserError[S], Ast[S]]]
|
||||||
|
)(source: String): ValidatedNec[ParserError[S], Ast[S]] = {
|
||||||
p.parseAll(source) match {
|
p.parseAll(source) match {
|
||||||
case Right(value) => value
|
case Right(value) => value
|
||||||
case Left(e) => Validated.invalidNec(LexerError(e.wrapErr))
|
case Left(e) => Validated.invalidNec(LexerError(e.wrapErr))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
def natParser[S[_] : LiftParser : Comonad, K[_] : Comonad](
|
def natParser[S[_]: LiftParser: Comonad, K[_]: Comonad](
|
||||||
p: P0[ValidatedNec[ParserError[S], Ast[S]]],
|
p: P0[ValidatedNec[ParserError[S], Ast[S]]],
|
||||||
nat: S ~> K
|
nat: S ~> K
|
||||||
)(source: String): ValidatedNec[ParserError[K], Ast[K]] =
|
)(source: String): ValidatedNec[ParserError[K], Ast[K]] =
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.15.0")
|
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.15.0")
|
||||||
addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.7.1")
|
addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.9.0")
|
||||||
addSbtPlugin("org.portable-scala" % "sbt-scalajs-crossproject" % "1.1.0")
|
addSbtPlugin("org.portable-scala" % "sbt-scalajs-crossproject" % "1.1.0")
|
||||||
addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.10.0")
|
addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.11.0")
|
||||||
|
@ -27,7 +27,7 @@ class IfSem[S[_]](val expr: IfExpr[S]) extends AnyVal {
|
|||||||
case Some(lt) =>
|
case Some(lt) =>
|
||||||
V.valueToRaw(expr.right).flatMap {
|
V.valueToRaw(expr.right).flatMap {
|
||||||
case Some(rt) =>
|
case Some(rt) =>
|
||||||
T.ensureTypeMatches(expr.right, lt.`type`, rt.`type`)
|
T.ensureValuesComparable(expr.right, lt.`type`, rt.`type`)
|
||||||
.map(m => Some(lt -> rt).filter(_ => m))
|
.map(m => Some(lt -> rt).filter(_ => m))
|
||||||
case None =>
|
case None =>
|
||||||
None.pure[Alg]
|
None.pure[Alg]
|
||||||
|
@ -26,6 +26,8 @@ trait TypesAlgebra[S[_], Alg[_]] {
|
|||||||
def resolveIndex(rootT: Type, op: IntoIndex[S], idx: ValueRaw): Alg[Option[LambdaRaw]]
|
def resolveIndex(rootT: Type, op: IntoIndex[S], idx: ValueRaw): Alg[Option[LambdaRaw]]
|
||||||
def resolveField(rootT: Type, op: IntoField[S]): Alg[Option[LambdaRaw]]
|
def resolveField(rootT: Type, op: IntoField[S]): Alg[Option[LambdaRaw]]
|
||||||
|
|
||||||
|
def ensureValuesComparable(token: Token[S], left: Type, right: Type): Alg[Boolean]
|
||||||
|
|
||||||
def ensureTypeMatches(token: Token[S], expected: Type, givenType: Type): Alg[Boolean]
|
def ensureTypeMatches(token: Token[S], expected: Type, givenType: Type): Alg[Boolean]
|
||||||
|
|
||||||
def expectNoExport(token: Token[S]): Alg[Unit]
|
def expectNoExport(token: Token[S]): Alg[Unit]
|
||||||
|
@ -7,6 +7,7 @@ import aqua.types.{
|
|||||||
ArrayType,
|
ArrayType,
|
||||||
ArrowType,
|
ArrowType,
|
||||||
BoxType,
|
BoxType,
|
||||||
|
LiteralType,
|
||||||
OptionType,
|
OptionType,
|
||||||
ProductType,
|
ProductType,
|
||||||
ScalarType,
|
ScalarType,
|
||||||
@ -140,15 +141,32 @@ class TypesInterpreter[S[_], X](implicit lens: Lens[X, TypesState[S]], error: Re
|
|||||||
report(op, s"Expected $rootT to be a collection type").as(None)
|
report(op, s"Expected $rootT to be a collection type").as(None)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
override def ensureValuesComparable(
|
||||||
|
token: Token[S],
|
||||||
|
left: Type,
|
||||||
|
right: Type
|
||||||
|
): State[X, Boolean] = {
|
||||||
|
val isComparable = (left, right) match {
|
||||||
|
case (LiteralType(xs, _), LiteralType(ys, _)) =>
|
||||||
|
xs.intersect(ys).nonEmpty
|
||||||
|
case _ =>
|
||||||
|
left.acceptsValueOf(right)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isComparable) State.pure(true)
|
||||||
|
else
|
||||||
|
report(token, s"Cannot compare '$left' with '$right''")
|
||||||
|
.as(false)
|
||||||
|
}
|
||||||
|
|
||||||
override def ensureTypeMatches(
|
override def ensureTypeMatches(
|
||||||
token: Token[S],
|
token: Token[S],
|
||||||
expected: Type,
|
expected: Type,
|
||||||
givenType: Type
|
givenType: Type
|
||||||
): State[X, Boolean] =
|
): State[X, Boolean] =
|
||||||
// TODO in case of two literals, check for types intersection?
|
|
||||||
if (expected.acceptsValueOf(givenType)) State.pure(true)
|
if (expected.acceptsValueOf(givenType)) State.pure(true)
|
||||||
else
|
else
|
||||||
report(token, s"Types mismatch, expected: ${expected}, given: ${givenType}")
|
report(token, s"Types mismatch, expected: $expected, given: $givenType")
|
||||||
.as(false)
|
.as(false)
|
||||||
|
|
||||||
override def expectNoExport(token: Token[S]): State[X, Unit] =
|
override def expectNoExport(token: Token[S]): State[X, Unit] =
|
||||||
|
Loading…
x
Reference in New Issue
Block a user