diff --git a/lexer.go b/lexer.go index 338a4c1..59d8670 100644 --- a/lexer.go +++ b/lexer.go @@ -38,9 +38,9 @@ func eatWhiteSpace(r *strings.Reader) { } } -func eatString(r *strings.Reader) string { +func eatString(r *strings.Reader, delim rune) string { var buf strings.Builder - buf.WriteRune('"') + buf.WriteRune(delim) escapeCount := 0 for r.Len() > 0 { c, _, err := r.ReadRune() @@ -48,9 +48,10 @@ func eatString(r *strings.Reader) string { panic("Lexing error while eating string") } buf.WriteRune(c) - if c == '"' && (escapeCount == 0 || escapeCount%2 == 0) { + if c == delim && (escapeCount == 0 || escapeCount%2 == 0) { break - } else if c == '\n' || (c != '"' && r.Len() == 0) { + } else if c == '\n' || (c != delim && r.Len() == 0) { + fmt.Println(delim, c, r.Len()) panic(fmt.Sprintf("Parse error: Unclosed string on line %d ", lexLine)) } if c == '\\' { @@ -136,8 +137,8 @@ Loop: case '\'': t := eatQuote(r) tokens = append(tokens, t...) - case '"': - currentString = eatString(r) + case '"', '`': + currentString = eatString(r, c) tokens = append(tokens, currentString) if !list { break Loop @@ -225,9 +226,9 @@ TokenizationLoop: case '\'': t := eatQuote(reader) tokens = append(tokens, t...) - case '"': - currentString = eatString(reader) - tokens = append(tokens, unescapeString(currentString)) + case '"', '`': + currentString = eatString(reader, c) + tokens = append(tokens, currentString) case ';': eatSingleLineComment(reader) case '#': diff --git a/parser.go b/parser.go index c2ff24e..92e9e5d 100644 --- a/parser.go +++ b/parser.go @@ -101,6 +101,8 @@ func parserRead(tokens *[]string) expression { } if strings.HasPrefix(token, "\"") && strings.HasSuffix(token, "\"") { + return unescapeString(token[1 : len(token)-1]) + } else if strings.HasPrefix(token, "`") && strings.HasSuffix(token, "`") { return token[1 : len(token)-1] } else if token == "#t" { return true