mirror of
https://github.com/Alvin-Zilverstand/femcode.git
synced 2026-03-06 11:06:47 +01:00
feat: Implement booleans (Kawaii/Cringe)
This commit is contained in:
14
src/lexer.py
14
src/lexer.py
@@ -31,14 +31,12 @@ class Lexer:
|
||||
|
||||
if current_char == '"':
|
||||
self.pos += 1
|
||||
start_string = self.pos
|
||||
string_start = self.pos
|
||||
while self.pos < len(self.text) and self.text[self.pos] != '"':
|
||||
self.pos += 1
|
||||
if self.pos == len(self.text):
|
||||
self.error() # Unterminated string
|
||||
string = self.text[start_string:self.pos]
|
||||
string_value = self.text[string_start:self.pos]
|
||||
self.pos += 1 # Consume closing quote
|
||||
return Token('STRING', string)
|
||||
return Token('STRING', string_value)
|
||||
|
||||
if current_char.isdigit():
|
||||
start_pos = self.pos
|
||||
@@ -121,6 +119,12 @@ class Lexer:
|
||||
if re.match(r'\bPeriodt\b', self.text[self.pos:]):
|
||||
self.pos += len('Periodt')
|
||||
return Token('PERIODT', 'Periodt')
|
||||
if re.match(r'\bKawaii\b', self.text[self.pos:]):
|
||||
self.pos += len('Kawaii')
|
||||
return Token('KAWAII', True)
|
||||
if re.match(r'\bCringe\b', self.text[self.pos:]):
|
||||
self.pos += len('Cringe')
|
||||
return Token('CRINGE', False)
|
||||
|
||||
# Match identifiers
|
||||
match = re.match(r'\b[a-zA-Z_][a-zA-Z0-9_]*\b', self.text[self.pos:])
|
||||
|
||||
@@ -196,25 +196,19 @@ class Parser:
|
||||
if name_token.type != 'ID':
|
||||
raise Exception("Expected function name (ID)")
|
||||
|
||||
print(f"After function name: {self.peek_next_token()}")
|
||||
|
||||
# Parse parameters
|
||||
parameters = []
|
||||
if self.peek_next_token().type == 'LPAREN':
|
||||
self.get_next_token() # Consume '('
|
||||
print(f"After LPAREN: {self.peek_next_token()}")
|
||||
while self.peek_next_token().type != 'RPAREN':
|
||||
param_token = self.get_next_token()
|
||||
if param_token.type != 'ID':
|
||||
raise Exception("Expected parameter name (ID)")
|
||||
parameters.append(param_token.value)
|
||||
print(f"After parameter {param_token.value}: {self.peek_next_token()}")
|
||||
if self.peek_next_token().type == 'COMMA':
|
||||
self.get_next_token() # Consume ','
|
||||
print(f"After COMMA: {self.peek_next_token()}")
|
||||
# No 'elif' here, the loop condition handles the RPAREN
|
||||
self.get_next_token() # Consume ')'
|
||||
print(f"After RPAREN: {self.peek_next_token()}")
|
||||
|
||||
|
||||
if self.peek_next_token().type != 'FEMBOYCORE':
|
||||
raise Exception("Expected 'Femboycore' to start function body")
|
||||
@@ -243,7 +237,7 @@ class Parser:
|
||||
return String(token) # Now returns a String AST node
|
||||
elif token.type == 'ID':
|
||||
# Check for function call
|
||||
if self.peek_next_token().value == '(': # Assuming '(' is the next token for a function call
|
||||
if self.peek_next_token().type == 'LPAREN': # Assuming '(' is the next token for a function call
|
||||
return self.parse_function_call(token)
|
||||
return Variable(token)
|
||||
else:
|
||||
|
||||
Reference in New Issue
Block a user