Sorta kinda fix the frozen lexer issue
This commit is contained in:
parent
50728795b5
commit
7d05f28f5d
|
@ -277,7 +277,6 @@ require_relative File.join("pigeon", "message_serializer.rb")
|
|||
require_relative File.join("pigeon", "draft_serializer.rb")
|
||||
require_relative File.join("pigeon", "message.rb")
|
||||
require_relative File.join("pigeon", "draft.rb")
|
||||
require_relative File.join("pigeon", "string_scanner.rb")
|
||||
require_relative File.join("pigeon", "lexer.rb")
|
||||
require_relative File.join("pigeon", "parser.rb")
|
||||
require_relative File.join("pigeon", "database.rb")
|
||||
|
|
|
@ -1,28 +0,0 @@
|
|||
module Pigeon
|
||||
class StringScanner
|
||||
attr_reader :pos, :matched, :string
|
||||
MAX_TOKEN_SIZE = 500
|
||||
|
||||
def initialize(string)
|
||||
@string = string.freeze
|
||||
@pos = 0
|
||||
@matched = ""
|
||||
end
|
||||
|
||||
def eos?
|
||||
result = @pos == @string.length - 1
|
||||
puts result ? "is eos" : "not eos"
|
||||
end
|
||||
|
||||
def scan(regex)
|
||||
puts "Scanning #{regex}"
|
||||
@last = regex
|
||||
match = regex.match(@string[@pos...MAX_TOKEN_SIZE])
|
||||
if match
|
||||
length = match.end(0)
|
||||
@pos += length
|
||||
@matched = match.values_at(0).first
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
|
@ -167,14 +167,13 @@ RSpec.describe Pigeon::Message do
|
|||
# This was originally a bug nooted during development
|
||||
# That caused a runaway loop in the tokenizer.
|
||||
it "handles this key: '\\nVUx0hC3'" do
|
||||
pending("Known bug- will fix after writing docs.")
|
||||
db.delete_current_draft
|
||||
db.new_draft(kind: "unit_test")
|
||||
db.update_draft("\nVUx0hC3", "n")
|
||||
db.update_draft("n", "\nVUx0hC3")
|
||||
Timeout::timeout(0.5) do
|
||||
boom = -> { Pigeon::Lexer.tokenize(db.publish_draft.render) }
|
||||
expect(boom).to raise_error(Pigeon::Lexer::LexError)
|
||||
expect(boom).to raise_error("RUNAWAY LOOP DETECTED")
|
||||
end
|
||||
end
|
||||
|
||||
|
|
Loading…
Reference in New Issue