aboutsummaryrefslogtreecommitdiff
path: root/compilador/parse
diff options
context:
space:
mode:
Diffstat (limited to 'compilador/parse')
-rw-r--r--compilador/parse/base.py60
-rw-r--r--compilador/parse/decl.py102
-rw-r--r--compilador/parse/expr.py12
-rw-r--r--compilador/parse/ident.py15
-rw-r--r--compilador/parse/type.py63
-rw-r--r--compilador/parse/unit.py15
6 files changed, 267 insertions, 0 deletions
diff --git a/compilador/parse/base.py b/compilador/parse/base.py
new file mode 100644
index 0000000..a8f6f8a
--- /dev/null
+++ b/compilador/parse/base.py
@@ -0,0 +1,60 @@
+from enum import Enum
+from tabla import LexToken, TablaLex, tokens
+from arbol import Arbol, Nodo
+from shared import Control
+from pprint import pprint
+from errors import Error
+from typing import NoReturn
+from more_itertools import seekable
+
+from tabla import TablaLex, Token
+from errors import Error
+
+class BaseParser:
+ def __init__(self, iterador: seekable):
+ self.iterador: seekable = iterador
+
+ ''' Requires the next token to have a matching ltok. Returns that
+ token, or an error. '''
+ def want(self, *want: Token) -> (LexToken | Error):
+ tok: LexToken = self.lex()
+ if len(want) == 0:
+ return tok
+ for w in want:
+ if tok.tipo == w:
+ return tok
+
+ return Error.syntax(tok.tipo, want, tok.numlinea)
+
+ ''' Looks for a matching ltok from the lexer, and if not present,
+ unlexes the token and returns void. If found, the token is
+ consumed from the lexer and is returned. '''
+ def _try(self, *want: Token) -> (LexToken | NoReturn):
+ tok: LexToken = self.lex()
+ if len(want) == 0:
+ return tok
+ for w in want:
+ if tok.tipo == w:
+ return tok
+ self.unlex()
+
+ ''' Looks for a matching ltok from the lexer, unlexes the token,
+ and returns it; or void if it was not an ltok. '''
+ def peek(self, *want: Token) -> (LexToken | NoReturn):
+ tok: LexToken = self.iterador.peek()
+ if len(want) == 0:
+ return tok
+ for w in want:
+ if tok.tipo == w:
+ return tok
+
+ def lex(self):
+ return next(self.iterador)
+
+ def unlex(self):
+ self.iterador.seek(-1)
+
+ ''' Returns a syntax error if cond is false and void otherwise '''
+ def synassert(self, cond: bool, msg: str) -> (Error | NoReturn):
+ if not cond:
+ return Error(msg = msg)
diff --git a/compilador/parse/decl.py b/compilador/parse/decl.py
new file mode 100644
index 0000000..73f8580
--- /dev/null
+++ b/compilador/parse/decl.py
@@ -0,0 +1,102 @@
+from typing import List, cast, Optional
+from more_itertools import peekable
+
+from tabla import Token, LexToken
+from parse.base import BaseParser
+from errors import Error
+from parse.type import ParseType
+from parse.ident import ParseIdent
+from parse.expr import ParseExpr, Expr
+from astree.decl import DeclGlobal, DeclFunc, Decl
+
+class ParseDecl:
+ def __init__(self, parser: BaseParser):
+ self.parser = parser
+
+ def decl_global(self) -> (DeclGlobal | Error):
+ # Tipo
+ _type = ParseType(self.parser)._type()
+ if type(_type) is Error:
+ return _type
+
+ # Identificador
+ ident = ParseIdent(self.parser).ident()
+ if type(ident) is Error:
+ return ident
+
+ # =
+ init: Optional[Expr] = None
+ eq = self.parser._try(Token.EQUAL)
+ if eq:
+ # Expresión
+ init = ParseExpr(self.parser).expr()
+ if type(init) is Error:
+ return init
+
+ return DeclGlobal(ident = ident,
+ _type = _type,
+ init = init)
+
+ def decl_func(self) -> (DeclFunc | Error):
+ # funcion
+ tok = self.parser.want(Token.FUNCTION)
+ if type(tok) is Error:
+ return tok
+
+ # Tipo
+ _type = ParseType(self.parser)._type()
+ if type(_type) is Error:
+ return _type
+
+ # Identificador
+ ident = ParseIdent(self.parser).ident()
+ if type(ident) is Error:
+ return ident
+
+ # Prototipo
+ proto = ParseType(self.parser).prototype()
+ if type(proto) is Error:
+ return proto
+
+ # ;
+ # semicolon = self.parser.want(Token.SEMICOLON)
+ # if type(semicolon) is Error:
+ # return semicolon
+ # self.parser.unlex()
+
+ return DeclFunc(ident = ident,
+ prototype = proto,
+ body = None)
+
+ # Parses a declaration.
+ def decl(self) -> (Decl | Error):
+ toks = [Token.BOOLEAN, Token.CHAR, Token.INT, Token.STRING, Token.VOID]
+ _next = self.parser.peek(*toks)
+ decl: Optional[Decl] = None
+ if not _next:
+ decl = self.decl_func()
+ else:
+ decl = self.decl_global()
+
+ if type(decl) is Error:
+ return decl
+
+ # ;
+ semicolon = self.parser.want(Token.SEMICOLON)
+ if type(semicolon) is Error:
+ return semicolon
+
+ return decl
+
+ # Parses the declarations for a sub-parser.
+ def decls(self) -> (List[Decl] | Error):
+ decls: List[Decl] = []
+ while not self.parser.peek(Token.EOF):
+ # print(self.parser.peek())
+ # print(next(self.parser.iterador))
+ decl = self.decl()
+ if type(decl) is Error:
+ return decl
+ decls.append(decl)
+
+ return decls
diff --git a/compilador/parse/expr.py b/compilador/parse/expr.py
new file mode 100644
index 0000000..a00f3d5
--- /dev/null
+++ b/compilador/parse/expr.py
@@ -0,0 +1,12 @@
+from parse.base import BaseParser
+from errors import Error
+from astree.expr import Expr
+
+class ParseExpr:
+ def __init__(self, parser: BaseParser):
+ self.parser = parser
+
+ def expr(self) -> Expr | Error:
+ next(self.parser.iterador)
+ return
+
diff --git a/compilador/parse/ident.py b/compilador/parse/ident.py
new file mode 100644
index 0000000..5887fa2
--- /dev/null
+++ b/compilador/parse/ident.py
@@ -0,0 +1,15 @@
+
+from tabla import Token, LexToken
+from parse.base import BaseParser
+from astree.ident import Ident
+from errors import Error
+
+class ParseIdent:
+ def __init__(self, parser: BaseParser):
+ self.parser = parser
+
+ def ident(self) -> (Ident | Error):
+ tok: LexToken = self.parser.want(Token.IDENT)
+ if type(tok) is Error:
+ return tok
+ return tok.nombre
diff --git a/compilador/parse/type.py b/compilador/parse/type.py
new file mode 100644
index 0000000..27b83d3
--- /dev/null
+++ b/compilador/parse/type.py
@@ -0,0 +1,63 @@
+from typing import List
+
+from parse.base import BaseParser
+from lexer import LexToken, Token
+from astree.type import Type, BuiltinType, FuncType, FuncParam
+from errors import Error
+
+class ParseType:
+ def __init__(self, parser: BaseParser):
+ self.parser = parser
+
+ def _type(self) -> (Type | Error):
+ types = [Token.BOOLEAN, Token.CHAR, Token.INT, Token.STRING, Token.VOID]
+ tok = self.parser.want(*types)
+ if type(tok) is Error:
+ return tok
+ return BuiltinType(tok.tipo)
+
+ def prototype(self) -> (FuncType | Error):
+ params: List[FuncParam] = []
+
+ # Tipo
+ tok = ParseType(self.parser)._type()
+ if type(tok) is Error:
+ return tok
+ _type = tok
+
+ # (
+ tok = self.parser.want(Token.L_PAREN)
+ if type(tok) is Error:
+ return tok
+ while True:
+ tok = self.parser._try(Token.R_PAREN)
+ if not tok:
+ break
+
+ # Tipo
+ tok = ParseType(self.parser)._type()
+ if type(tok) is Error:
+ return tok
+ __type: Type = tok
+
+ # Identificador
+ tok = self.parser.want(Token.IDENT)
+ if type(tok) is Error:
+ return tok
+ name: str = tok
+
+ params.append(FuncParam(name = name,
+ _type = __type))
+
+ # ,
+ tok = self.parser._try(Token.COMMA)
+ if not tok:
+ continue
+
+ # )
+ tok = self.parser.want(Token.R_PAREN)
+ if type(tok) is Error:
+ return tok
+
+ return FuncType(result = _type,
+ params = params)
diff --git a/compilador/parse/unit.py b/compilador/parse/unit.py
new file mode 100644
index 0000000..954b8b4
--- /dev/null
+++ b/compilador/parse/unit.py
@@ -0,0 +1,15 @@
+
+from errors import Error
+from astree.unit import Unit
+from parse.base import BaseParser
+from parse.decl import ParseDecl
+
+class ParseUnit:
+ def __init__(self, parser: BaseParser):
+ self.parser: BaseParser = parser
+
+ def unit(self) -> (Unit | Error):
+ decls = ParseDecl(self.parser).decls()
+ if type(decls) is Error:
+ return decls
+ return Unit(decls = decls)