Dont reuse :TERMINATOR tokens. Add tests for bundle consumption. Found bug when consuming single message (?)

This commit is contained in:
Netscape Navigator 2020-03-20 09:01:02 -05:00
parent d8ec5867c0
commit 33cf84fa7b
2 changed files with 58 additions and 10 deletions

19
dist/pigeon/lexer.rb vendored
View File

@ -37,13 +37,14 @@ module Pigeon
def tokenize
until scanner.eos?
puts scanner.matched || "No match"
case @state
when HEADER then do_header
when BODY then do_body
when FOOTER then do_footer
end
end
maybe_end_message!
return tokens
end
private
@ -59,8 +60,9 @@ module Pigeon
raise "Syntax error at #{scanner.pos}"
end
def add_terminator!
@tokens << [:TERMINATOR]
# This might be a mistake or uneccessary. NN 20 MAR 2020
def maybe_end_message!
@tokens << [:MESSAGE_END] unless tokens.last.last == :MESSAGE_END
end
def do_header
@ -90,7 +92,7 @@ module Pigeon
if scanner.scan(SEPERATOR)
@state = BODY
add_terminator!
@tokens << [:HEADER_END]
return
end
flunk!
@ -105,7 +107,7 @@ module Pigeon
if scanner.scan(SEPERATOR)
@state = FOOTER
add_terminator!
@tokens << [:BODY_END]
return
end
@ -124,10 +126,11 @@ module Pigeon
if scanner.scan(SEPERATOR)
@state = HEADER
add_terminator!
else
raise "Parse error at #{scanner.pos}. Did you add two carriage returns?"
maybe_end_message!
return
end
raise "Parse error at #{scanner.pos}. Double carriage return not found."
end
end
end

View File

@ -1,7 +1,52 @@
require "spec_helper"
RSpec.describe Pigeon::Lexer do
it "tokenizes the bundle" do
Pigeon::Lexer.tokenize(File.read("./example.bundle"))
EXPECTED_TOKENS = [
[:AUTHOR, "@DYdgK1KUInVtG3lS45hA1HZ-jTuvfLKsxDpXPFCve04=.ed25519"],
[:KIND, "scratch_pad"],
[:PREV, "NONE"],
[:DEPTH, 0],
[:HEADER_END],
[:BODY_ENTRY, "key1", "\"my_value\\n\""],
[:BODY_ENTRY, "key2", "\"my_value2\""],
[:BODY_ENTRY, "key3", "\"my_value3\""],
[:BODY_ENTRY, "key4", "%jvKh9yoiEJaePzoWCF1nnqpIlPgTk9FHEtqczQbvzGM=.sha256"],
[:BODY_ENTRY, "key5", "&29f3933302c49c60841d7620886ce54afc68630242aee6ff683926d2465e6ca3.sha256"],
[:BODY_ENTRY, "key6", "@galdahnB3L2DE2cTU0Me54IpIUKVEgKmBwvZVtWJccg=.ed25519"],
[:BODY_END],
[:SIGNATURE, "DN7yPTE-m433ND3jBL4oM23XGxBKafjq0Dp9ArBQa_TIGU7DmCxTumieuPBN-NKxlx_0N7-c5zjLb5XXVHYPCQ==.sig.ed25519"],
[:MESSAGE_END],
[:AUTHOR, "@DYdgK1KUInVtG3lS45hA1HZ-jTuvfLKsxDpXPFCve04=.ed25519"],
[:KIND, "second_test"],
[:PREV, "%ZTBmYWZlMGU0Nzg0ZWZlYjA5NjA0MzdlZWVlNTBiMmY4ODEyZWI1NTZkODcwN2FlMDQxYThmMDExNTNhM2E4NQ==.sha256"],
[:DEPTH, 1],
[:HEADER_END],
[:BODY_ENTRY, "hello", "\"world\""],
[:BODY_END],
[:SIGNATURE, "AerpDKbKRrcaM9wihwFsPC4YRAfYWie5XFEKAdnxQom7MTvsXd9W39AvHfljJnEePZpsQVdfq2TtBPoQHc-MCw==.sig.ed25519"],
[:MESSAGE_END],
]
let(:message) do
draft = Pigeon::Draft.create(kind: "unit_test")
hash = Pigeon::Storage.current.set_blob(File.read("./logo.png"))
draft["a"] = "bar"
draft["b"] = hash
Pigeon::Message.publish(draft)
end
it "tokenizes a bundle" do
bundle = File.read("./example.bundle")
tokens = Pigeon::Lexer.tokenize(bundle)
EXPECTED_TOKENS.each_with_index do |item, i|
expect(tokens[i]).to eq(EXPECTED_TOKENS[i])
end
end
it "tokenizes a single message" do
fail([
"This currently freezes the lexer.",
"Maybe I need to add a scanner.peek call or sth?",
].join(" "))
tokens = Pigeon::Lexer.tokenize(message.render)
end
end