Test case: bundle ingestion
This commit is contained in:
parent
191a73c419
commit
75a4492acf
|
@ -35,10 +35,11 @@ Eg: `pigeon identity show` becomes `./pigeon-cli show`.
|
|||
- [X] Move literals into `Pigeon` module as constants, again.
|
||||
- [X] pigeon message find
|
||||
- [X] pigeon message find-all for local feed.
|
||||
- [ ] Fork detection?
|
||||
- [X] pigeon bundle consume (We are minimally feature complete at this point)
|
||||
- [X] Fix the diagram in the spec document
|
||||
- [ ] 100% test coverage
|
||||
- [ ] Use URNs instead of multihash?
|
||||
- [ ] pigeon bundle consume (We are minimally feature complete at this point)
|
||||
- [ ] Fix the diagram in the spec document
|
||||
- [ ] Fork detection?
|
||||
- [ ] Put all the [HEADER, string, FOOTER].join("") nonsense into Pigeon::Helpers
|
||||
- [ ] Change all the `{40,90}` values in ::Lexer to real length values
|
||||
- [ ] Update the bundles.md document once `bundle consume` works.
|
||||
|
|
|
@ -14,7 +14,9 @@ module Pigeon
|
|||
end
|
||||
|
||||
def self.ingest(file_path)
|
||||
raise "???"
|
||||
bundle = File.read(file_path)
|
||||
tokens = Pigeon::Lexer.tokenize(bundle)
|
||||
Pigeon::Parser.parse(tokens)
|
||||
end
|
||||
|
||||
private
|
||||
|
|
|
@ -33,7 +33,6 @@ module Pigeon
|
|||
class LexError < StandardError; end
|
||||
|
||||
def self.tokenize(bundle_string)
|
||||
# TODO: Maybe move #tokeinze into constructor.
|
||||
new(bundle_string).tokenize
|
||||
end
|
||||
|
||||
|
@ -122,7 +121,6 @@ module Pigeon
|
|||
# Reset the lexer to ingest the next entry.
|
||||
# If scanner.eos? == true, it will just terminate.
|
||||
|
||||
# This freezes everything:
|
||||
if scanner.scan(FOOTER_ENTRY)
|
||||
sig = scanner.matched.strip.gsub("signature ", "")
|
||||
@tokens << [:SIGNATURE, sig]
|
||||
|
|
|
@ -6,7 +6,8 @@ RSpec.describe Pigeon::Lexer do
|
|||
Pigeon::LocalIdentity.reset
|
||||
end
|
||||
|
||||
let(:tokens) { Pigeon::Lexer.tokenize(File.read("./example.bundle")) }
|
||||
let(:example_bundle) { File.read("./example.bundle") }
|
||||
let(:tokens) { Pigeon::Lexer.tokenize(example_bundle) }
|
||||
|
||||
it "parses tokens" do
|
||||
results = Pigeon::Parser.parse(tokens)
|
||||
|
@ -15,5 +16,11 @@ RSpec.describe Pigeon::Lexer do
|
|||
expect(results.last).to be_kind_of(Pigeon::Message)
|
||||
end
|
||||
|
||||
it "crashes on forged messages"
|
||||
it "ingests and reconstructs a bundle" do
|
||||
messages = Pigeon::Bundle.ingest("./example.bundle")
|
||||
expect(messages.length).to eq(2)
|
||||
expect(messages.map(&:class).uniq).to eq([Pigeon::Message])
|
||||
re_bundled = messages.map(&:render).join("\n\n") + "\n"
|
||||
expect(re_bundled).to eq(example_bundle)
|
||||
end
|
||||
end
|
||||
|
|
Loading…
Reference in New Issue