Skip to content
Snippets Groups Projects
Commit 92eec492 authored by Moritz Leon Beier's avatar Moritz Leon Beier
Browse files

die Aufgabe halt

parent b67e08b1
Branches main
No related merge requests found
use logos::{Lexer, Logos}; use logos::{Lexer, Logos};
use crate::ParseResult;
#[derive(Logos, Debug, PartialEq, Copy, Clone)] #[derive(Logos, Debug, PartialEq, Copy, Clone)]
pub enum C1Token { pub enum C1Token {
...@@ -138,6 +139,9 @@ pub enum C1Token { ...@@ -138,6 +139,9 @@ pub enum C1Token {
// it can be named anything you wish. // it can be named anything you wish.
#[error] #[error]
Error, Error,
//
EOF
} }
/// # Overview /// # Overview
...@@ -206,8 +210,8 @@ impl<'a> C1Lexer<'a> { ...@@ -206,8 +210,8 @@ impl<'a> C1Lexer<'a> {
/// assert_eq!(lexer.current_token(), Some(C1Token::Identifier)); /// assert_eq!(lexer.current_token(), Some(C1Token::Identifier));
/// assert_eq!(lexer.current_text(), Some("current")); /// assert_eq!(lexer.current_text(), Some("current"));
/// ``` /// ```
pub fn current_token(&self) -> Option<C1Token> { pub fn current_token(&self) -> C1Token {
self.current_token.token_type() self.current_token.token_type().unwrap_or(C1Token::EOF)
} }
/// Return the C1Token variant of the next token without consuming it. /// Return the C1Token variant of the next token without consuming it.
...@@ -221,8 +225,8 @@ impl<'a> C1Lexer<'a> { ...@@ -221,8 +225,8 @@ impl<'a> C1Lexer<'a> {
/// assert_eq!(lexer.peek_token(), Some(C1Token::Identifier)); /// assert_eq!(lexer.peek_token(), Some(C1Token::Identifier));
/// assert_eq!(lexer.peek_text(), Some("next")); /// assert_eq!(lexer.peek_text(), Some("next"));
/// ``` /// ```
pub fn peek_token(&self) -> Option<C1Token> { pub fn peek_token(&self) -> C1Token {
self.peek_token.token_type() self.peek_token.token_type().unwrap_or(C1Token::EOF)
} }
/// Return the text of the current token /// Return the text of the current token
...@@ -240,6 +244,11 @@ impl<'a> C1Lexer<'a> { ...@@ -240,6 +244,11 @@ impl<'a> C1Lexer<'a> {
self.current_token.line_number() self.current_token.line_number()
} }
/// Return the line number where the current token is located
pub fn current_line_number_no_options(&self) -> usize {
self.current_token.line_number().unwrap_or(self.logos_line_number)
}
/// Return the line number where the next token is located /// Return the line number where the next token is located
pub fn peek_line_number(&self) -> Option<usize> { pub fn peek_line_number(&self) -> Option<usize> {
self.peek_token.line_number() self.peek_token.line_number()
...@@ -270,6 +279,16 @@ impl<'a> C1Lexer<'a> { ...@@ -270,6 +279,16 @@ impl<'a> C1Lexer<'a> {
self.peek_token = self.next_token(); self.peek_token = self.next_token();
} }
pub fn expect(&mut self, token: C1Token) -> ParseResult {
return if self.current_token() == token {
self.current_token = self.peek_token.take();
self.peek_token = self.next_token();
Ok(())
} else {
Err(format!("Unexpected token {:?} in line {} (expected {:?})", self.current_token(), self.current_line_number_no_options(), token))
}
}
/// Private method for reading the next token from the logos::Lexer and extracting the required data /// Private method for reading the next token from the logos::Lexer and extracting the required data
/// from it /// from it
fn next_token(&mut self) -> Option<TokenData<'a>> { fn next_token(&mut self) -> Option<TokenData<'a>> {
...@@ -301,7 +320,7 @@ struct TokenData<'a> { ...@@ -301,7 +320,7 @@ struct TokenData<'a> {
token_line: usize, token_line: usize,
} }
/// Hidden trait that makes it possible to implemented the required getter functionality directly for /// Hidden trait that makes it possible to implement the required getter functionality directly for
/// Option<TokenData>. /// Option<TokenData>.
trait TokenDataProvider<'a> { trait TokenDataProvider<'a> {
/// Return the type of the token, aka. its C1Token variant. /// Return the type of the token, aka. its C1Token variant.
...@@ -361,27 +380,27 @@ mod tests { ...@@ -361,27 +380,27 @@ mod tests {
#[test] #[test]
fn float_recognition() { fn float_recognition() {
let lexer = C1Lexer::new("1.2"); let lexer = C1Lexer::new("1.2");
assert_eq!(lexer.current_token(), Some(C1Token::ConstFloat)); assert_eq!(lexer.current_token(), C1Token::ConstFloat);
let lexer = C1Lexer::new("1.000"); let lexer = C1Lexer::new("1.000");
assert_eq!(lexer.current_token(), Some(C1Token::ConstFloat)); assert_eq!(lexer.current_token(), C1Token::ConstFloat);
let lexer = C1Lexer::new(".2"); let lexer = C1Lexer::new(".2");
assert_eq!(lexer.current_token(), Some(C1Token::ConstFloat)); assert_eq!(lexer.current_token(), C1Token::ConstFloat);
let lexer = C1Lexer::new("1.2e4"); let lexer = C1Lexer::new("1.2e4");
assert_eq!(lexer.current_token(), Some(C1Token::ConstFloat)); assert_eq!(lexer.current_token(), C1Token::ConstFloat);
let lexer = C1Lexer::new("1.2e+4"); let lexer = C1Lexer::new("1.2e+4");
assert_eq!(lexer.current_token(), Some(C1Token::ConstFloat)); assert_eq!(lexer.current_token(), C1Token::ConstFloat);
let lexer = C1Lexer::new("1.2e-10"); let lexer = C1Lexer::new("1.2e-10");
assert_eq!(lexer.current_token(), Some(C1Token::ConstFloat)); assert_eq!(lexer.current_token(), C1Token::ConstFloat);
let lexer = C1Lexer::new("1.2E-10"); let lexer = C1Lexer::new("1.2E-10");
assert_eq!(lexer.current_token(), Some(C1Token::ConstFloat)); assert_eq!(lexer.current_token(), C1Token::ConstFloat);
let lexer = C1Lexer::new("33E+2"); let lexer = C1Lexer::new("33E+2");
assert_eq!(lexer.current_token(), Some(C1Token::ConstFloat)); assert_eq!(lexer.current_token(), C1Token::ConstFloat);
} }
} }
...@@ -8,5 +8,5 @@ pub use lexer::C1Lexer; ...@@ -8,5 +8,5 @@ pub use lexer::C1Lexer;
pub use lexer::C1Token; pub use lexer::C1Token;
// You will need a re-export of your C1Parser definition. Here is an example: // You will need a re-export of your C1Parser definition. Here is an example:
// mod parser; mod parser;
// pub use parser::C1Parser; pub use parser::C1Parser;
use crate::{C1Lexer, C1Token, ParseResult};
use crate::C1Token::*;
pub struct C1Parser<'a>{
lexer: C1Lexer<'a>
}
const COMP_OPS: [C1Token; 6] = [Equal, NotEqual, LessEqual, GreaterEqual, Less, Greater];
const EXPR_OPS: [C1Token; 3] = [Plus, Minus, Or];
const TERM_OPS: [C1Token; 3] = [Asterisk, Slash, And];
impl C1Parser<'_>{
pub fn parse(text: &str) -> ParseResult{
C1Parser{lexer: C1Lexer::new(text)}.p()
}
fn p(&mut self) -> ParseResult{
while self.lexer.current_token() != EOF {
self.functiondefinition()?;
}
Ok(())
}
fn functiondefinition(&mut self) -> ParseResult{
self.type_()?;
self.lexer.expect(Identifier)?;
self.lexer.expect(LeftParenthesis)?;
self.lexer.expect(RightParenthesis)?;
self.lexer.expect(LeftBrace)?;
self.statementlist()?;
self.lexer.expect(RightBrace)
}
fn type_(&mut self) -> ParseResult{
let kek = [KwBoolean, KwVoid, KwFloat, KwInt];
return if kek.contains(&self.lexer.current_token()){
self.lexer.eat();
return Ok(());
}else{
Err(format!("Unexpected token {:?} in line {} (expected type)", self.lexer.current_token(), self.lexer.current_line_number_no_options()))
}
}
fn statementlist(&mut self) -> ParseResult{
let firstsl = [LeftBrace, KwIf, KwReturn, KwPrintf, Identifier];
while firstsl.contains(&self.lexer.current_token()) {
self.block()?;
}
Ok(())
}
fn block(&mut self) -> ParseResult{
if self.lexer.current_token() == LeftBrace {
self.lexer.eat();
self.statementlist()?;
self.lexer.expect(RightBrace)
}else {
self.statement()
}
}
fn statement(&mut self) -> ParseResult{
match self.lexer.current_token() {
KwIf => self.ifstatement(),
KwReturn => {self.returnstatement()?; self.lexer.expect(Semicolon)},
KwPrintf => {self.printf()?; self.lexer.expect(Semicolon)},
Identifier => {
if self.lexer.peek_token() == Assign {self.statassignment()?;}
else if self.lexer.peek_token() == LeftParenthesis {self.functioncall()?;}
else {return Err(format!("Unexpected token in line {} (expected assignment or function call)", self.lexer.current_line_number_no_options()))}
self.lexer.expect(Semicolon)
}
_ => Err(format!("Unexpected token in line {} (expected statement)", self.lexer.current_line_number_no_options()))
}
}
fn ifstatement(&mut self) -> ParseResult{
self.lexer.expect(KwIf)?;
self.lexer.expect(LeftParenthesis)?;
self.assignment()?;
self.lexer.expect(RightParenthesis)?;
self.block()
}
fn assignment(&mut self) -> ParseResult{
if self.lexer.peek_token() == Assign{
self.lexer.eat();
self.lexer.expect(Assign)?;
self.assignment()
} else {
self.expr()
}
}
fn expr(&mut self) -> ParseResult{
self.simpexpr()?;
while COMP_OPS.contains(&self.lexer.current_token()){
self.lexer.eat();
self.simpexpr()?;
}
Ok(())
}
fn simpexpr(&mut self) -> ParseResult{
if self.lexer.current_token() == Minus {
self.lexer.eat();
}
self.term()?;
while EXPR_OPS.contains(&self.lexer.current_token()) {
self.lexer.eat();
self.term()?;
}
Ok(())
}
fn factor(&mut self) -> ParseResult{
match self.lexer.current_token() {
ConstInt => self.lexer.eat(),
ConstFloat => self.lexer.eat(),
ConstBoolean => self.lexer.eat(),
LeftParenthesis => {
self.lexer.eat();
self.assignment()?;
self.lexer.expect(RightParenthesis)?;
}
Identifier => {
if self.lexer.peek_token() == LeftParenthesis {self.functioncall()?;}
else {self.lexer.eat();}
}
_=> {return Err(format!("Unexpected token {:?} in line {}", self.lexer.current_token(), self.lexer.current_line_number_no_options()))}
}
Ok(())
}
fn term(&mut self) -> ParseResult{
self.factor()?;
while TERM_OPS.contains(&self.lexer.current_token()){
self.lexer.eat();
self.factor()?;
}
Ok(())
}
fn statassignment(&mut self) -> ParseResult{
self.lexer.expect(Identifier)?;
self.lexer.expect(Assign)?;
self.assignment()
}
fn returnstatement(&mut self) -> ParseResult{
self.lexer.expect(KwReturn)?;
if self.lexer.current_token() != Semicolon{
self.assignment()?;
}
Ok(())
}
fn printf(&mut self) -> ParseResult{
self.lexer.expect(KwPrintf)?;
self.lexer.expect(LeftParenthesis)?;
self.assignment()?;
self.lexer.expect(RightParenthesis)
}
fn functioncall(&mut self) -> ParseResult{
self.lexer.expect(Identifier)?;
self.lexer.expect(LeftParenthesis)?;
self.lexer.expect(RightParenthesis)
}
}
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment