mirror of
https://github.com/fluencelabs/aqua.git
synced 2025-03-15 11:40:50 +00:00
DXJ-82 DXJ-42 Go-to definition for data structs and imports in VSCode extension (#541)
This commit is contained in:
parent
61e8976c3f
commit
d452070b9f
@ -3,11 +3,11 @@ package aqua.lsp
|
||||
import aqua.compiler.*
|
||||
import aqua.files.{AquaFileSources, AquaFilesIO, FileModuleId}
|
||||
import aqua.io.*
|
||||
import aqua.parser.lexer.Token
|
||||
import aqua.parser.lexer.{LiteralToken, Token}
|
||||
import aqua.parser.lift.FileSpan.F
|
||||
import aqua.parser.lift.{FileSpan, Span}
|
||||
import aqua.parser.{ArrowReturnError, BlockIndentError, LexerError, ParserError}
|
||||
import aqua.semantics.lsp.LspContext
|
||||
import aqua.semantics.lsp.{LspContext, TokenInfo}
|
||||
import aqua.semantics.{CompilerState, HeaderError, RulesViolated, WrongAST}
|
||||
import aqua.{AquaIO, SpanParser}
|
||||
import cats.data.{NonEmptyChain, Validated}
|
||||
@ -27,7 +27,8 @@ import scala.scalajs.js.{undefined, UndefOr}
|
||||
@JSExportAll
|
||||
case class CompilationResult(
|
||||
errors: js.Array[ErrorInfo],
|
||||
locations: js.Array[TokenLink]
|
||||
locations: js.Array[TokenLink],
|
||||
importLocations: js.Array[TokenImport]
|
||||
)
|
||||
|
||||
@JSExportAll
|
||||
@ -36,6 +37,9 @@ case class TokenLocation(name: String, startLine: Int, startCol: Int, endLine: I
|
||||
@JSExportAll
|
||||
case class TokenLink(current: TokenLocation, definition: TokenLocation)
|
||||
|
||||
@JSExportAll
|
||||
case class TokenImport(current: TokenLocation, path: String)
|
||||
|
||||
object TokenLocation {
|
||||
|
||||
def fromSpan(span: FileSpan): Option[TokenLocation] = {
|
||||
@ -168,36 +172,50 @@ object AquaLSP extends App with Logging {
|
||||
|
||||
logger.debug("Compilation done.")
|
||||
|
||||
def locationsToJs(
|
||||
locations: List[(Token[FileSpan.F], TokenInfo[FileSpan.F])]
|
||||
): js.Array[TokenLink] = {
|
||||
locations.flatMap { case (t, tInfo) =>
|
||||
tInfo.definition match {
|
||||
case None => Nil
|
||||
case Some(d) =>
|
||||
val fromOp = TokenLocation.fromSpan(t.unit._1)
|
||||
val toOp = TokenLocation.fromSpan(d.unit._1)
|
||||
|
||||
val link = for {
|
||||
from <- fromOp
|
||||
to <- toOp
|
||||
} yield {
|
||||
TokenLink(from, to)
|
||||
}
|
||||
|
||||
if (link.isEmpty)
|
||||
logger.warn(s"Incorrect coordinates for token '${t.unit._1.name}'")
|
||||
|
||||
link.toList
|
||||
}
|
||||
}.toJSArray
|
||||
}
|
||||
|
||||
def importsToTokenImport(imports: List[LiteralToken[FileSpan.F]]): js.Array[TokenImport] =
|
||||
imports.flatMap { lt =>
|
||||
val (span, str) = lt.valueToken
|
||||
val unquoted = str.substring(1, str.length - 1)
|
||||
TokenLocation.fromSpan(span).map(l => TokenImport(l, unquoted))
|
||||
}.toJSArray
|
||||
|
||||
val result = fileRes match {
|
||||
case Valid(lsp) =>
|
||||
logger.debug("No errors on compilation.")
|
||||
CompilationResult(
|
||||
List.empty.toJSArray,
|
||||
lsp.locations.flatMap { case (t, tInfo) =>
|
||||
tInfo.definition match {
|
||||
case None => Nil
|
||||
case Some(d) =>
|
||||
val fromOp = TokenLocation.fromSpan(t.unit._1)
|
||||
val toOp = TokenLocation.fromSpan(d.unit._1)
|
||||
|
||||
val link = for {
|
||||
from <- fromOp
|
||||
to <- toOp
|
||||
} yield {
|
||||
TokenLink(from, to)
|
||||
}
|
||||
|
||||
if (link.isEmpty)
|
||||
logger.warn(s"Incorrect coordinates for token '${t.unit._1.name}'")
|
||||
|
||||
link.toList
|
||||
}
|
||||
}.toJSArray
|
||||
locationsToJs(lsp.locations),
|
||||
importsToTokenImport(lsp.importTokens)
|
||||
)
|
||||
case Invalid(e: NonEmptyChain[AquaError[FileModuleId, AquaFileError, FileSpan.F]]) =>
|
||||
val errors = e.toNonEmptyList.toList.flatMap(errorToInfo)
|
||||
logger.debug("Errors: " + errors.mkString("\n"))
|
||||
CompilationResult(errors.toJSArray, List.empty.toJSArray)
|
||||
CompilationResult(errors.toJSArray, List.empty.toJSArray, List.empty.toJSArray)
|
||||
}
|
||||
result
|
||||
}
|
||||
|
8
language-server-npm/aqua-lsp-api.d.ts
vendored
8
language-server-npm/aqua-lsp-api.d.ts
vendored
@ -11,6 +11,11 @@ export interface TokenLink {
|
||||
definition: TokenLocation
|
||||
}
|
||||
|
||||
export interface TokenImport {
|
||||
current: TokenLocation,
|
||||
path: string
|
||||
}
|
||||
|
||||
export interface ErrorInfo {
|
||||
start: number,
|
||||
end: number,
|
||||
@ -20,7 +25,8 @@ export interface ErrorInfo {
|
||||
|
||||
export interface CompilationResult {
|
||||
errors: ErrorInfo[],
|
||||
locations: TokenLink[]
|
||||
locations: TokenLink[],
|
||||
importLocations: TokenImport[]
|
||||
}
|
||||
|
||||
export class Compiler {
|
||||
|
@ -1,7 +1,9 @@
|
||||
package aqua.semantics
|
||||
|
||||
import aqua.parser.lexer.Token
|
||||
import aqua.raw.Raw
|
||||
import aqua.raw.RawContext
|
||||
import aqua.semantics.lsp.{TokenInfo, TokenType}
|
||||
import aqua.semantics.rules.abilities.AbilitiesState
|
||||
import aqua.semantics.rules.names.NamesState
|
||||
import aqua.semantics.rules.types.TypesState
|
||||
@ -15,7 +17,9 @@ case class CompilerState[S[_]](
|
||||
names: NamesState[S] = NamesState[S](),
|
||||
abilities: AbilitiesState[S] = AbilitiesState[S](),
|
||||
types: TypesState[S] = TypesState[S]()
|
||||
)
|
||||
) {
|
||||
lazy val locations: List[(Token[S], TokenInfo[S])] = names.locations ++ abilities.locations ++ types.locations
|
||||
}
|
||||
|
||||
object CompilerState {
|
||||
type St[S[_]] = State[CompilerState[S], Raw]
|
||||
|
@ -1,29 +1,35 @@
|
||||
package aqua.semantics
|
||||
|
||||
import aqua.parser.lexer.Token
|
||||
import aqua.parser.head.{HeadExpr, HeaderExpr, ImportExpr, ImportFromExpr}
|
||||
import aqua.parser.lexer.{LiteralToken, Token}
|
||||
import aqua.parser.{Ast, Expr}
|
||||
import aqua.raw.ops.{FuncOp, SeqGroupTag}
|
||||
import aqua.raw.{Raw, RawContext, RawPart}
|
||||
import aqua.semantics.header.Picker
|
||||
import aqua.semantics.header.Picker.*
|
||||
import aqua.semantics.lsp.LspContext
|
||||
import aqua.semantics.lsp.{LspContext, TokenDef, TokenInfo, TokenType}
|
||||
import aqua.semantics.rules.abilities.{AbilitiesAlgebra, AbilitiesInterpreter, AbilitiesState}
|
||||
import aqua.semantics.rules.names.{NamesAlgebra, NamesInterpreter, NamesState}
|
||||
import aqua.semantics.rules.types.{TypesAlgebra, TypesInterpreter, TypesState}
|
||||
import aqua.semantics.rules.{ReportError, ValuesAlgebra}
|
||||
import cats.arrow.FunctionK
|
||||
import cats.data.*
|
||||
import cats.Reducible
|
||||
import cats.data.Validated.{Invalid, Valid}
|
||||
import cats.kernel.Monoid
|
||||
import cats.syntax.applicative.*
|
||||
import cats.syntax.apply.*
|
||||
import cats.syntax.flatMap.*
|
||||
import cats.syntax.functor.*
|
||||
import cats.syntax.foldable.*
|
||||
import cats.syntax.reducible.*
|
||||
import cats.free.CofreeInstances
|
||||
import cats.syntax.semigroup.*
|
||||
import cats.{Eval, Monad, Semigroup}
|
||||
import monocle.Lens
|
||||
import monocle.macros.GenLens
|
||||
import scribe.{Logging, log}
|
||||
import cats.free.Cofree
|
||||
|
||||
sealed trait Semantics[S[_], C] {
|
||||
|
||||
@ -54,6 +60,18 @@ class RawSemantics[S[_]](implicit p: Picker[RawContext]) extends Semantics[S, Ra
|
||||
|
||||
class LspSemantics[S[_]] extends Semantics[S, LspContext[S]] {
|
||||
|
||||
def getImportTokens(ast: Ast[S]): List[LiteralToken[S]] = {
|
||||
ast.head.foldLeft[List[LiteralToken[S]]](Nil){ case (l, header) =>
|
||||
header match {
|
||||
case ImportExpr(fn) =>
|
||||
println("import: " + fn)
|
||||
l :+ fn
|
||||
case ImportFromExpr(_, fn) => l :+ fn
|
||||
case _ => l
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def process(
|
||||
ast: Ast[S],
|
||||
init: LspContext[S]
|
||||
@ -70,6 +88,9 @@ class LspSemantics[S[_]] extends Semantics[S, LspContext[S]] {
|
||||
)
|
||||
)
|
||||
|
||||
val importTokens = getImportTokens(ast)
|
||||
|
||||
|
||||
Semantics
|
||||
.interpret(ast, initState, init.raw)
|
||||
.map { case (state, ctx) =>
|
||||
@ -82,7 +103,8 @@ class LspSemantics[S[_]] extends Semantics[S, LspContext[S]] {
|
||||
rootArrows = state.names.rootArrows,
|
||||
constants = state.names.constants,
|
||||
abDefinitions = state.abilities.definitions,
|
||||
locations = state.names.locations ++ state.abilities.locations
|
||||
locations = state.locations,
|
||||
importTokens = importTokens
|
||||
)
|
||||
)
|
||||
}(Invalid(_))
|
||||
|
@ -1,6 +1,6 @@
|
||||
package aqua.semantics.lsp
|
||||
|
||||
import aqua.parser.lexer.{Ability, Name, Token}
|
||||
import aqua.parser.lexer.{Ability, LiteralToken, Name, Token}
|
||||
import aqua.raw.{RawContext, RawPart}
|
||||
import aqua.types.ArrowType
|
||||
import cats.{Monoid, Semigroup}
|
||||
@ -15,7 +15,8 @@ case class LspContext[S[_]](
|
||||
Map.empty[String, (Ability[S], List[(Name[S], ArrowType)])],
|
||||
rootArrows: Map[String, TokenArrowInfo[S]] = Map.empty[String, TokenArrowInfo[S]],
|
||||
constants: Map[String, TokenType[S]] = Map.empty[String, TokenType[S]],
|
||||
locations: List[(Token[S], TokenInfo[S])] = Nil
|
||||
locations: List[(Token[S], TokenInfo[S])] = Nil,
|
||||
importTokens: List[LiteralToken[S]] = Nil
|
||||
)
|
||||
|
||||
object LspContext {
|
||||
|
@ -14,7 +14,7 @@ trait TypesAlgebra[S[_], Alg[_]] {
|
||||
|
||||
def defineField(name: Name[S], `type`: Type): Alg[Boolean]
|
||||
|
||||
def purgeFields(token: Token[S]): Alg[Option[NonEmptyMap[String, Type]]]
|
||||
def purgeFields(token: CustomTypeToken[S]): Alg[Option[NonEmptyMap[String, Type]]]
|
||||
|
||||
def defineDataType(
|
||||
name: CustomTypeToken[S],
|
||||
|
@ -2,6 +2,7 @@ package aqua.semantics.rules.types
|
||||
|
||||
import aqua.parser.lexer.*
|
||||
import aqua.raw.value.{IntoFieldRaw, IntoIndexRaw, LambdaRaw, ValueRaw}
|
||||
import aqua.semantics.lsp.{TokenDef, TokenTypeInfo}
|
||||
import aqua.semantics.rules.{ReportError, StackInterpreter}
|
||||
import aqua.types.{
|
||||
ArrayType,
|
||||
@ -42,13 +43,25 @@ class TypesInterpreter[S[_], X](implicit lens: Lens[X, TypesState[S]], error: Re
|
||||
|
||||
override def resolveType(token: TypeToken[S]): State[X, Option[Type]] =
|
||||
getState.map(_.resolveTypeToken(token)).flatMap {
|
||||
case Some(t) => State.pure(Some(t))
|
||||
case Some(t) =>
|
||||
val (tt, tokens) = t
|
||||
modify(st =>
|
||||
st.copy(locations = st.locations ++ tokens.map { case (t, td) =>
|
||||
(t, TokenDef(Some(td)))
|
||||
})
|
||||
).map(_ => Some(tt))
|
||||
case None => report(token, s"Unresolved type").as(None)
|
||||
}
|
||||
|
||||
override def resolveArrowDef(arrowDef: ArrowTypeToken[S]): State[X, Option[ArrowType]] =
|
||||
getState.map(_.resolveArrowDef(arrowDef)).flatMap {
|
||||
case Valid(t) => State.pure[X, Option[ArrowType]](Some(t))
|
||||
case Valid(t) =>
|
||||
val (tt, tokens) = t
|
||||
modify(st =>
|
||||
st.copy(locations = st.locations ++ tokens.map { case (t, td) =>
|
||||
(t, TokenDef(Some(td)))
|
||||
})
|
||||
).map(_ => Some(tt))
|
||||
case Invalid(errs) =>
|
||||
errs
|
||||
.foldLeft[ST[Option[ArrowType]]](State.pure(None)) { case (n, (tkn, hint)) =>
|
||||
@ -66,15 +79,23 @@ class TypesInterpreter[S[_], X](implicit lens: Lens[X, TypesState[S]], error: Re
|
||||
.as(false)
|
||||
}
|
||||
|
||||
override def purgeFields(token: Token[S]): State[X, Option[NonEmptyMap[String, Type]]] =
|
||||
getState
|
||||
.map(_.fields.view.mapValues(_._2))
|
||||
.map(SortedMap.from(_))
|
||||
.map(NonEmptyMap.fromMap(_))
|
||||
.flatMap {
|
||||
case Some(fs) => modify(_.copy(fields = Map.empty)).as(Some(fs))
|
||||
override def purgeFields(
|
||||
token: CustomTypeToken[S]
|
||||
): State[X, Option[NonEmptyMap[String, Type]]] = {
|
||||
getState.map(_.fields).flatMap { fields =>
|
||||
NonEmptyMap.fromMap(SortedMap.from(fields.view.mapValues(_._2))) match {
|
||||
case Some(fs) =>
|
||||
modify { st =>
|
||||
val tokens = st.fieldsToken
|
||||
val updated = tokens ++ fields.toList.map { case (n, (tt, t)) =>
|
||||
(token.value + "." + n, TokenTypeInfo(Some(tt), t))
|
||||
}
|
||||
st.copy(fields = Map.empty, fieldsToken = updated)
|
||||
}.map(_ => Some(fs))
|
||||
case None => report(token, "Cannot define a data type without fields").as(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override def defineDataType(
|
||||
name: CustomTypeToken[S],
|
||||
@ -85,12 +106,12 @@ class TypesInterpreter[S[_], X](implicit lens: Lens[X, TypesState[S]], error: Re
|
||||
case Some(_) =>
|
||||
report(name, s"Type `${name.value}` was already defined").as(false)
|
||||
case None =>
|
||||
modify(st =>
|
||||
modify { st =>
|
||||
st.copy(
|
||||
strict = st.strict.updated(name.value, StructType(name.value, fields)),
|
||||
definitions = st.definitions.updated(name.value, name)
|
||||
)
|
||||
)
|
||||
}
|
||||
.as(true)
|
||||
}
|
||||
|
||||
@ -107,8 +128,7 @@ class TypesInterpreter[S[_], X](implicit lens: Lens[X, TypesState[S]], error: Re
|
||||
).as(true)
|
||||
}
|
||||
|
||||
// TODO actually it's stateless, exists there just for reporting needs
|
||||
override def resolveField(rootT: Type, op: IntoField[S]): State[X, Option[LambdaRaw]] =
|
||||
override def resolveField(rootT: Type, op: IntoField[S]): State[X, Option[LambdaRaw]] = {
|
||||
rootT match {
|
||||
case StructType(name, fields) =>
|
||||
fields(op.value).fold(
|
||||
@ -116,10 +136,19 @@ class TypesInterpreter[S[_], X](implicit lens: Lens[X, TypesState[S]], error: Re
|
||||
op,
|
||||
s"Field `${op.value}` not found in type `$name`, available: ${fields.toNel.toList.map(_._1).mkString(", ")}"
|
||||
).as(None)
|
||||
)(t => State.pure(Some(IntoFieldRaw(op.value, t))))
|
||||
) { t =>
|
||||
modify { st =>
|
||||
st.fieldsToken.get(name + "." + op.value) match {
|
||||
case Some(td) => st.copy(locations = st.locations :+ (op, td))
|
||||
case None => st
|
||||
}
|
||||
|
||||
}.as(Some(IntoFieldRaw(op.value, t)))
|
||||
}
|
||||
case _ =>
|
||||
report(op, s"Expected Struct type to resolve a field, got $rootT").as(None)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO actually it's stateless, exists there just for reporting needs
|
||||
override def resolveIndex(
|
||||
|
@ -1,91 +1,73 @@
|
||||
package aqua.semantics.rules.types
|
||||
|
||||
import aqua.raw.value.{IntoFieldRaw, IntoIndexRaw, LambdaRaw, LiteralRaw, ValueRaw}
|
||||
import aqua.parser.lexer.{
|
||||
ArrayTypeToken,
|
||||
ArrowTypeToken,
|
||||
BasicTypeToken,
|
||||
CustomTypeToken,
|
||||
IntoField,
|
||||
IntoIndex,
|
||||
LambdaOp,
|
||||
Name,
|
||||
OptionTypeToken,
|
||||
StreamTypeToken,
|
||||
Token,
|
||||
TopBottomToken,
|
||||
TypeToken
|
||||
}
|
||||
import aqua.types.{
|
||||
ArrayType,
|
||||
ArrowType,
|
||||
BottomType,
|
||||
DataType,
|
||||
OptionType,
|
||||
ProductType,
|
||||
StreamType,
|
||||
StructType,
|
||||
TopType,
|
||||
Type
|
||||
}
|
||||
import aqua.parser.lexer.{ArrayTypeToken, ArrowTypeToken, BasicTypeToken, CustomTypeToken, IntoField, IntoIndex, LambdaOp, Name, OptionTypeToken, StreamTypeToken, Token, TopBottomToken, TypeToken}
|
||||
import aqua.types.{ArrayType, ArrowType, BottomType, DataType, OptionType, ProductType, StreamType, StructType, TopType, Type}
|
||||
import cats.data.Validated.{Invalid, Valid}
|
||||
import cats.data.{Chain, NonEmptyChain, ValidatedNec}
|
||||
import cats.kernel.Monoid
|
||||
import aqua.raw.RawContext
|
||||
import aqua.semantics.lsp.{TokenInfo, TokenType, TokenTypeInfo}
|
||||
|
||||
case class TypesState[S[_]](
|
||||
fields: Map[String, (Name[S], Type)] = Map.empty[String, (Name[S], Type)],
|
||||
strict: Map[String, Type] = Map.empty[String, Type],
|
||||
definitions: Map[String, CustomTypeToken[S]] = Map.empty[String, CustomTypeToken[S]],
|
||||
stack: List[TypesState.Frame[S]] = Nil
|
||||
fieldsToken: Map[String, TokenTypeInfo[S]] = Map.empty[String, TokenTypeInfo[S]],
|
||||
stack: List[TypesState.Frame[S]] = Nil,
|
||||
locations: List[(Token[S], TokenInfo[S])] = Nil
|
||||
) {
|
||||
def isDefined(t: String): Boolean = strict.contains(t)
|
||||
|
||||
def resolveTypeToken(tt: TypeToken[S]): Option[Type] =
|
||||
// TODO: an ugly return type, refactoring
|
||||
// Returns type and a token with its definition
|
||||
def resolveTypeToken(tt: TypeToken[S]): Option[(Type, List[(Token[S], CustomTypeToken[S])])] =
|
||||
tt match {
|
||||
case TopBottomToken(_, isTop) =>
|
||||
Option(if (isTop) TopType else BottomType)
|
||||
Option((if (isTop) TopType else BottomType, Nil))
|
||||
case ArrayTypeToken(_, dtt) =>
|
||||
resolveTypeToken(dtt).collect { case it: DataType =>
|
||||
ArrayType(it)
|
||||
resolveTypeToken(dtt).collect { case (it: DataType, t) =>
|
||||
(ArrayType(it), t)
|
||||
}
|
||||
case StreamTypeToken(_, dtt) =>
|
||||
resolveTypeToken(dtt).collect { case it: DataType =>
|
||||
StreamType(it)
|
||||
resolveTypeToken(dtt).collect { case (it: DataType, t) =>
|
||||
(StreamType(it), t)
|
||||
}
|
||||
case OptionTypeToken(_, dtt) =>
|
||||
resolveTypeToken(dtt).collect { case it: DataType =>
|
||||
OptionType(it)
|
||||
resolveTypeToken(dtt).collect { case (it: DataType, t) =>
|
||||
(OptionType(it), t)
|
||||
}
|
||||
case ctt: CustomTypeToken[S] => strict.get(ctt.value)
|
||||
case btt: BasicTypeToken[S] => Some(btt.value)
|
||||
case ctt: CustomTypeToken[S] => strict.get(ctt.value).map(t => (t, definitions.get(ctt.value).toList.map(ctt -> _)))
|
||||
case btt: BasicTypeToken[S] => Some((btt.value, Nil))
|
||||
case ArrowTypeToken(_, args, res) =>
|
||||
val strictArgs = args.map(_._2).map(resolveTypeToken).collect { case Some(dt: DataType) =>
|
||||
dt
|
||||
val strictArgs = args.map(_._2).map(resolveTypeToken).collect { case Some((dt: DataType, t)) =>
|
||||
(dt, t)
|
||||
}
|
||||
val strictRes: List[DataType] = res.flatMap(resolveTypeToken).collect { case dt: DataType =>
|
||||
dt
|
||||
val strictRes = res.map(resolveTypeToken).collect { case Some((dt: DataType, t)) =>
|
||||
(dt, t)
|
||||
}
|
||||
Option.when(strictRes.length == res.length && strictArgs.length == args.length){
|
||||
val (sArgs, argTokens) = strictArgs.unzip
|
||||
val (sRes, resTokens) = strictRes.unzip
|
||||
(ArrowType(ProductType(sArgs), ProductType(sRes)), argTokens.flatten ++ resTokens.flatten)
|
||||
}
|
||||
Option.when(strictRes.length == res.length && strictArgs.length == args.length)(
|
||||
ArrowType(ProductType(strictArgs), ProductType(strictRes))
|
||||
)
|
||||
}
|
||||
|
||||
def resolveArrowDef(ad: ArrowTypeToken[S]): ValidatedNec[(Token[S], String), ArrowType] = {
|
||||
def resolveArrowDef(ad: ArrowTypeToken[S]): ValidatedNec[(Token[S], String), (ArrowType, List[(Token[S], CustomTypeToken[S])])] = {
|
||||
val resType = ad.res.map(resolveTypeToken)
|
||||
|
||||
NonEmptyChain
|
||||
.fromChain(Chain.fromSeq(ad.res.zip(resType).collect { case (dt, None) =>
|
||||
dt -> "Cannot resolve the result type"
|
||||
}))
|
||||
.fold[ValidatedNec[(Token[S], String), ArrowType]] {
|
||||
.fold[ValidatedNec[(Token[S], String), (ArrowType, List[(Token[S], CustomTypeToken[S])])]] {
|
||||
val (errs, argTypes) = ad.args.map { (argName, tt) =>
|
||||
resolveTypeToken(tt)
|
||||
.toRight(tt -> s"Type unresolved")
|
||||
.map(argName.map(_.value) -> _)
|
||||
}
|
||||
.foldLeft[(Chain[(Token[S], String)], Chain[(Option[String], Type)])](
|
||||
(Chain.empty, Chain.empty)
|
||||
.foldLeft[(Chain[(Token[S], String)], Chain[(Option[String], (Type, List[(Token[S], CustomTypeToken[S])]))])](
|
||||
(Chain.empty, Chain.empty[(Option[String], (Type, List[(Token[S], CustomTypeToken[S])]))])
|
||||
) {
|
||||
case ((errs, argTypes), Right(at)) => (errs, argTypes.append(at))
|
||||
case ((errs, argTypes), Left(e)) => (errs.append(e), argTypes)
|
||||
@ -93,13 +75,15 @@ case class TypesState[S[_]](
|
||||
|
||||
NonEmptyChain
|
||||
.fromChain(errs)
|
||||
.fold[ValidatedNec[(Token[S], String), ArrowType]](
|
||||
Valid(
|
||||
ArrowType(
|
||||
ProductType.maybeLabelled(argTypes.toList),
|
||||
ProductType(resType.flatten)
|
||||
)
|
||||
)
|
||||
.fold[ValidatedNec[(Token[S], String), (ArrowType, List[(Token[S], CustomTypeToken[S])])]](
|
||||
Valid{
|
||||
val (labels, types) = argTypes.toList.unzip
|
||||
val (resTypes, resTokens) = resType.flatten.unzip
|
||||
(ArrowType(
|
||||
ProductType.maybeLabelled(labels.zip(types.map(_._1))),
|
||||
ProductType(resTypes)
|
||||
), types.map(_._2).flatten ++ resTokens.flatten)
|
||||
}
|
||||
)(Invalid(_))
|
||||
}(Invalid(_))
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user