Skip to content

Commit

Permalink
Remove --debug option from the lex command
Browse files Browse the repository at this point in the history
  • Loading branch information
nihei9 committed Sep 8, 2021
1 parent 6ae619c commit 6332aaf
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 63 deletions.
29 changes: 1 addition & 28 deletions cmd/maleeni/lex.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,13 @@ import (
"fmt"
"io/ioutil"
"os"
"time"

"github.com/nihei9/maleeni/driver"
"github.com/nihei9/maleeni/spec"
"github.com/spf13/cobra"
)

var lexFlags = struct {
debug *bool
source *string
output *string
breakOnError *bool
Expand All @@ -32,7 +30,6 @@ your lexical specification that is set passively, lexemes in that mode will not
Args: cobra.ExactArgs(1),
RunE: runLex,
}
lexFlags.debug = cmd.Flags().BoolP("debug", "d", false, "enable logging")
lexFlags.source = cmd.Flags().StringP("source", "s", "", "source file path (default stdin)")
lexFlags.output = cmd.Flags().StringP("output", "o", "", "output file path (default stdout)")
lexFlags.breakOnError = cmd.Flags().BoolP("break-on-error", "b", false, "break lexical analysis with exit status 1 immediately when an error token appears.")
Expand All @@ -45,30 +42,6 @@ func runLex(cmd *cobra.Command, args []string) (retErr error) {
return fmt.Errorf("Cannot read a compiled lexical specification: %w", err)
}

var opts []driver.LexerOption
if *lexFlags.debug {
fileName := "maleeni-lex.log"
f, err := os.OpenFile(fileName, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0644)
if err != nil {
return fmt.Errorf("Cannot open the log file %s: %w", fileName, err)
}
defer f.Close()
fmt.Fprintf(f, `maleeni lex starts.
Date time: %v
---
`, time.Now().Format(time.RFC3339))
defer func() {
fmt.Fprintf(f, "---\n")
if retErr != nil {
fmt.Fprintf(f, "maleeni lex failed: %v\n", retErr)
} else {
fmt.Fprintf(f, "maleeni lex succeeded.\n")
}
}()

opts = append(opts, driver.EnableLogging(f))
}

var lex *driver.Lexer
{
src := os.Stdin
Expand All @@ -80,7 +53,7 @@ Date time: %v
defer f.Close()
src = f
}
lex, err = driver.NewLexer(clspec, src, opts...)
lex, err = driver.NewLexer(clspec, src)
if err != nil {
return err
}
Expand Down
35 changes: 0 additions & 35 deletions driver/lexer.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ import (
"io/ioutil"
"strings"

"github.com/nihei9/maleeni/log"
"github.com/nihei9/maleeni/spec"
)

Expand Down Expand Up @@ -146,17 +145,6 @@ func DisableModeTransition() LexerOption {
}
}

func EnableLogging(w io.Writer) LexerOption {
return func(l *Lexer) error {
logger, err := log.NewLogger(w)
if err != nil {
return err
}
l.logger = logger
return nil
}
}

type Lexer struct {
clspec *spec.CompiledLexSpec
src []byte
Expand All @@ -168,7 +156,6 @@ type Lexer struct {
tokBuf []*Token
modeStack []spec.LexModeID
passiveModeTran bool
logger log.Logger
}

func NewLexer(clspec *spec.CompiledLexSpec, src io.Reader, opts ...LexerOption) (*Lexer, error) {
Expand All @@ -186,65 +173,43 @@ func NewLexer(clspec *spec.CompiledLexSpec, src io.Reader, opts ...LexerOption)
clspec.InitialModeID,
},
passiveModeTran: false,
logger: log.NewNopLogger(),
}
for _, opt := range opts {
err := opt(l)
if err != nil {
return nil, err
}
}
l.logger.Log("Initializing the lexer finished.")

return l, nil
}

func (l *Lexer) Next() (*Token, error) {
l.logger.Log(`lexer#Next():
State:
mode: #%v %v
pointer: %v
token buffer: %v`, l.Mode(), l.clspec.ModeNames[l.Mode()], l.srcPtr, l.tokBuf)

if len(l.tokBuf) > 0 {
tok := l.tokBuf[0]
l.tokBuf = l.tokBuf[1:]
l.logger.Log(` Returns a buffered token:
token: %v
token buffer: %v`, tok, l.tokBuf)
return tok, nil
}

tok, err := l.nextAndTransition()
if err != nil {
l.logger.Log(" Detectes an error: %v", err)
return nil, err
}
l.logger.Log(" Detects a token: %v", tok)
if !tok.Invalid {
l.logger.Log(` Returns a token:
token: %v
token buffer: %v`, tok, l.tokBuf)
return tok, nil
}
errTok := tok
for {
tok, err = l.nextAndTransition()
if err != nil {
l.logger.Log(" Detectes an error: %v", err)
return nil, err
}
l.logger.Log(" Detects a token: %v", tok)
if !tok.Invalid {
break
}
errTok.match = errTok.match.merge(tok.match)
l.logger.Log(" error token: %v", errTok)
}
l.tokBuf = append(l.tokBuf, tok)
l.logger.Log(` Returns a token:
token: %v
token buffer: %v`, errTok, l.tokBuf)

return errTok, nil
}
Expand Down

0 comments on commit 6332aaf

Please sign in to comment.