Test case: Parsing meessage with duplicate keys

This commit is contained in:
Netscape Navigator 2020-04-02 07:38:00 -05:00
parent 4b3905b676
commit 57a5630659
2 changed files with 20 additions and 1 deletions

View File

@ -1,5 +1,7 @@
module Pigeon
class Parser
class DuplicateKeyError < StandardError; end
def self.parse(tokens)
self.new(tokens).parse
end
@ -37,7 +39,7 @@ module Pigeon
def set(key, value, hash = @scratchpad)
if hash[key]
raise "Double entry detected: #{key}"
raise DuplicateKeyError, "Found duplicate keys: #{key}"
else
hash[key] = value
end

View File

@ -9,6 +9,18 @@ RSpec.describe Pigeon::Lexer do
let(:example_bundle) { File.read("./spec/fixtures/normal.bundle") }
let(:tokens) { Pigeon::Lexer.tokenize(example_bundle) }
BAD_TOKENS = [
[:AUTHOR, "@DYdgK1KUInVtG3lS45hA1HZ-jTuvfLKsxDpXPFCve04=.ed25519"],
[:KIND, "invalid"],
[:PREV, "NONE"],
[:DEPTH, 0],
[:HEADER_END],
[:BODY_ENTRY, "duplicate", "Pigeon does not allow duplicate keys."],
[:BODY_ENTRY, "duplicate", "This key is a duplicate."],
[:SIGNATURE, "DN7yPTE-m433ND3jBL4oM23XGxBKafjq0Dp9ArBQa_TIGU7DmCxTumieuPBN-NKxlx_0N7-c5zjLb5XXVHYPCQ==.sig.ed25519"],
[:MESSAGE_END],
]
it "parses tokens" do
results = Pigeon::Parser.parse(tokens)
expect(results.length).to eq(2)
@ -24,4 +36,9 @@ RSpec.describe Pigeon::Lexer do
re_bundled = messages.map(&:render).join("\n\n") + "\n"
expect(re_bundled).to eq(example_bundle)
end
it "finds duplicate keys" do
error = Pigeon::Parser::DuplicateKeyError
expect { Pigeon::Parser.parse(BAD_TOKENS) }.to raise_error(error)
end
end