feature: parser

This commit is contained in:
xkm
2025-12-04 18:20:07 +08:00
parent 43aac96559
commit bb714b68b1
5 changed files with 368 additions and 0 deletions

27
main.go
View File

@@ -1 +1,28 @@
package main
import (
"fmt"
"log"
"os"
"sfdl/parser"
"sfdl/scanner"
)
func main() {
if len(os.Args) < 2 {
fmt.Println("usage: sfdl <source-file>")
return
}
filename := os.Args[1]
sc, err := scanner.NewScannerFromFile(filename)
if err != nil {
log.Fatalf("open file failed: %v", err)
}
p := parser.NewParser(sc)
p.Parse()
fmt.Println("Parse OK.")
}

View File

@@ -1 +1,52 @@
package parser
import "sfdl/scanner"
type ExprNode struct {
Op scanner.TokenType
Left *ExprNode
Right *ExprNode
Value float64
Func func(float64) float64
}
func NewConstNode(v float64) *ExprNode {
return &ExprNode{
Op: scanner.CONST_ID,
Value: v,
}
}
func NewTNode() *ExprNode {
return &ExprNode{
Op: scanner.T,
}
}
func NewFuncNode(fn func(float64) float64, child *ExprNode) *ExprNode {
return &ExprNode{
Op: scanner.FUNC,
Left: child,
Func: fn,
}
}
func NewBinaryNode(op scanner.TokenType, left, right *ExprNode) *ExprNode {
return &ExprNode{
Op: op,
Left: left,
Right: right,
}
}
func NewUnaryNode(op scanner.TokenType, child *ExprNode) *ExprNode {
if op == scanner.PLUS {
return child
}
// -x -> 0 - x
if op == scanner.MINUS {
zero := NewConstNode(0)
return NewBinaryNode(scanner.MINUS, zero, child)
}
return child
}

View File

@@ -1 +1,197 @@
package parser
import (
"fmt"
"sfdl/scanner"
)
type Parser struct {
sc *scanner.Scanner
tok scanner.Token
}
func NewParser(sc *scanner.Scanner) *Parser {
p := &Parser{sc: sc}
p.nextToken()
return p
}
func (p *Parser) nextToken() {
p.tok = p.sc.GetToken()
}
func (p *Parser) errorf(format string, args ...interface{}) {
msg := fmt.Sprintf(format, args...)
panic("Syntax error: " + msg)
}
func (p *Parser) match(t scanner.TokenType) {
if p.tok.Type != t {
p.errorf("expect %v, got %v (lexeme=%q)",
t, p.tok.Type, p.tok.Lexeme)
}
p.nextToken()
}
// Expression → Term { (PLUS | MINUS) Term }
func (p *Parser) parseExpression() *ExprNode {
left := p.parseTerm()
for p.tok.Type == scanner.PLUS || p.tok.Type == scanner.MINUS {
op := p.tok.Type
p.match(op)
right := p.parseTerm()
left = NewBinaryNode(op, left, right)
}
return left
}
// Term → Factor { (MUL | DIV) Factor }
func (p *Parser) parseTerm() *ExprNode {
left := p.parseFactor()
for p.tok.Type == scanner.MUL || p.tok.Type == scanner.DIV {
op := p.tok.Type
p.match(op)
right := p.parseFactor()
left = NewBinaryNode(op, left, right)
}
return left
}
// Factor → (PLUS | MINUS) Factor | Component
func (p *Parser) parseFactor() *ExprNode {
if p.tok.Type == scanner.PLUS || p.tok.Type == scanner.MINUS {
op := p.tok.Type
p.match(op)
child := p.parseFactor()
return NewUnaryNode(op, child)
}
return p.parseComponent()
}
// Component → Atom [ POWER Component ]
func (p *Parser) parseComponent() *ExprNode {
left := p.parseAtom()
if p.tok.Type == scanner.POWER {
p.match(scanner.POWER)
right := p.parseComponent() // 右结合
left = NewBinaryNode(scanner.POWER, left, right)
}
return left
}
// Atom → CONST_ID
//
// | T
// | FUNC L_BRACKET Expression R_BRACKET
// | L_BRACKET Expression R_BRACKET
func (p *Parser) parseAtom() *ExprNode {
switch p.tok.Type {
case scanner.CONST_ID:
node := NewConstNode(p.tok.Value)
p.nextToken()
return node
case scanner.T:
p.nextToken()
return NewTNode()
case scanner.FUNC:
fn := p.tok.Func
p.nextToken()
p.match(scanner.L_BRACKET)
child := p.parseExpression()
p.match(scanner.R_BRACKET)
return NewFuncNode(fn, child)
case scanner.L_BRACKET:
p.match(scanner.L_BRACKET)
node := p.parseExpression()
p.match(scanner.R_BRACKET)
return node
default:
p.errorf("unexpected token in Atom: %v (lexeme=%q)", p.tok.Type, p.tok.Lexeme)
return nil
}
}
// Parse all
func (p *Parser) Parse() {
for p.tok.Type != scanner.NONTOKEN {
p.parseStatement()
p.match(scanner.SEMICO)
}
}
func (p *Parser) parseStatement() {
switch p.tok.Type {
case scanner.ORIGIN:
p.parseOriginStatement()
case scanner.SCALE:
p.parseScaleStatement()
case scanner.ROT:
p.parseRotStatement()
case scanner.FOR:
p.parseForStatement()
default:
p.errorf("unknown statement start: %v (lexeme=%q)", p.tok.Type, p.tok.Lexeme)
}
}
// ORIGIN IS ( Expression , Expression )
func (p *Parser) parseOriginStatement() {
p.match(scanner.ORIGIN)
p.match(scanner.IS)
p.match(scanner.L_BRACKET)
xExpr := p.parseExpression()
p.match(scanner.COMMA)
yExpr := p.parseExpression()
p.match(scanner.R_BRACKET)
// todo:
// semantices.SetOrigin(xExpr, yExpr)
fmt.Println("Parsed ORIGIN", xExpr, yExpr)
}
// SCALE IS ( Expression , Expression )
func (p *Parser) parseScaleStatement() {
p.match(scanner.SCALE)
p.match(scanner.IS)
p.match(scanner.L_BRACKET)
xExpr := p.parseExpression()
p.match(scanner.COMMA)
yExpr := p.parseExpression()
p.match(scanner.R_BRACKET)
fmt.Println("Parsed SCALE", xExpr, yExpr)
}
// ROT IS Expression
func (p *Parser) parseRotStatement() {
p.match(scanner.ROT)
p.match(scanner.IS)
angleExpr := p.parseExpression()
fmt.Println("Parsed ROT", angleExpr)
}
// FOR T FROM Expression TO Expression STEP Expression
//
// DRAW ( Expression , Expression )
func (p *Parser) parseForStatement() {
p.match(scanner.FOR)
p.match(scanner.T)
p.match(scanner.FROM)
start := p.parseExpression()
p.match(scanner.TO)
end := p.parseExpression()
p.match(scanner.STEP)
step := p.parseExpression()
p.match(scanner.DRAW)
p.match(scanner.L_BRACKET)
xExpr := p.parseExpression()
p.match(scanner.COMMA)
yExpr := p.parseExpression()
p.match(scanner.R_BRACKET)
fmt.Println("Parsed FOR", start, end, step, xExpr, yExpr)
// todo:
// semantices.DrawLoop(start, end, step, xExpr, yExpr)
}

88
scanner/scanner_test.go Normal file
View File

@@ -0,0 +1,88 @@
package scanner
import (
"testing"
)
func TestSimpleLine(t *testing.T) {
src := "origin is (1, 2);"
sc := NewScannerFromString(src)
expect := []TokenType{
ORIGIN, IS, L_BRACKET, CONST_ID, COMMA,
CONST_ID, R_BRACKET, SEMICO, NONTOKEN,
}
for i, et := range expect {
tok := sc.GetToken()
t.Logf("%s: type %d, value: %f, func: %p", tok.Lexeme, tok.Type, tok.Value, tok.Func)
if tok.Type != et {
t.Fatalf("token %d: expect %v, got %v (lexeme=%q)",
i, et, tok.Type, tok.Lexeme)
}
}
}
func TestMultiLine(t *testing.T) {
src := `
-- Arrow
color is (255,255,0);
scale is (1, 1);
size is 10;
origin is (450, 450);
rot is pi;
for t from 0 to 400 step 1 draw( t, t );
-- 心形曲线
origin is (200, 200);
rot is pi/2;
size is 5;
-- 圆润型
color is blue;
scale is (50, 50);
for t from -pi to pi step pi/200 draw((2*cos(t) - cos(2*t)), (2*sin(t)-sin(2*t)) );
-- 尖锐型
origin is (200+80, 200+80);
color is red;
rot is pi;
scale is (8, 8);
for t from 0 to 2*pi step pi/200 draw(
16*(sin(t)**3),
13*cos(t) - 5*cos(2*t) - 2*cos(3*t)-cos(4*t)
);
`
sc := NewScannerFromString(src)
expect := []TokenType{
COLOR, IS, L_BRACKET, CONST_ID, COMMA,
CONST_ID, COMMA, CONST_ID, R_BRACKET, SEMICO,
SCALE,
}
for i, et := range expect {
tok := sc.GetToken()
t.Logf("%s: type %d, value: %f, func: %p", tok.Lexeme, tok.Type, tok.Value, tok.Func)
if tok.Type != et {
t.Fatalf("token %d: expect %v, got %v (lexeme=%q)",
i, et, tok.Type, tok.Lexeme)
}
}
}
func TestWrongNumber(t *testing.T) {
data := "123.456abc"
s := NewScannerFromString(data)
for {
token := s.GetToken()
t.Logf("%s: type %d, value: %f, func: %p", token.Lexeme, token.Type, token.Value, token.Func)
if token.Type == NONTOKEN {
break
}
}
}

6
test.sfdl Normal file
View File

@@ -0,0 +1,6 @@
origin is (100, 300);
rot is 0;
scale is (1, 1);
-- 画一个简单的函数
for t from 0 to 10 step 0.5 draw (t, t*t);