(FAILING) Finish all the regexes required for a bundle lexer
This commit is contained in:
parent
f94200750b
commit
decc2f83a0
|
@ -35,9 +35,11 @@ Eg: `pigeon identity show` becomes `./pigeon-cli show`.
|
|||
- [X] Move literals into `Pigeon` module as constants, again.
|
||||
- [X] pigeon message find
|
||||
- [X] pigeon message find-all for local feed.
|
||||
- [ ] pigeon bundle consume
|
||||
- [ ] Change all the `{40,90}` values in ::Lexer to real length values
|
||||
- [ ] Create regexes in ::Lexer using strings and Regexp.new() for cleaner regexes.
|
||||
- [ ] pigeon message find-all for peer feed. I will need to add index for `author => message_count`
|
||||
- [ ] refactor `Bundle.create` to use `message find-all`.
|
||||
- [ ] pigeon bundle consume
|
||||
- [ ] add parsers and validators for all CLI inputs
|
||||
- [ ] Ensure all disks writes perform verification!
|
||||
- [ ] Remove all `.current` "singletons" / hacks
|
||||
|
|
|
@ -1,5 +1,72 @@
|
|||
module Pigeon
|
||||
class Bundle
|
||||
class Lexer
|
||||
# No *_VALUE can be > 128 chars.
|
||||
MAX_CHUNK_SIZE = 128
|
||||
# TODO: Change all the `{40,90}` values in ::Lexer to real values
|
||||
# TODO: Create regexes using string and Regexp.new() for cleaner regexes.
|
||||
FEED_VALUE = /@.{40,90}.ed25519/
|
||||
DEPTH_COUNT = /\d{1,7}/
|
||||
MESG_VALUE = /%.{40,90}.sha256/
|
||||
BLOB_VALUE = /&.{40,90}.sha256/
|
||||
NULL_VALUE = /NONE/
|
||||
STRG_VALUE = /".{1,64}"/
|
||||
ALPHANUMERICISH = /[a-zA-Z\d\.]{1,64}/
|
||||
ALL_VALUES = [
|
||||
FEED_VALUE,
|
||||
MESG_VALUE,
|
||||
NULL_VALUE,
|
||||
STRG_VALUE,
|
||||
BLOB_VALUE,
|
||||
].map(&:source).join("|")
|
||||
ANY_VALUE = Regexp.new(ALL_VALUES)
|
||||
|
||||
SEPERATOR = /\n\n/
|
||||
AUTHOR = /author #{FEED_VALUE}/
|
||||
DEPTH = /depth #{DEPTH_COUNT}/
|
||||
PREV = /prev (#{MESG_VALUE}|#{NULL_VALUE})/
|
||||
KIND = /kind #{ALPHANUMERICISH}/
|
||||
BODY_ENTRY = /#{ALPHANUMERICISH}:#{ANY_VALUE}\n/
|
||||
|
||||
FOOTER = /signature .*{40,90}\.sig\.ed25519/
|
||||
|
||||
def self.tokenize(bundle_string)
|
||||
new.tokenize(bundle_string)
|
||||
end
|
||||
|
||||
def stack
|
||||
@stack ||= []
|
||||
end
|
||||
|
||||
def state
|
||||
stack.last || :header
|
||||
end
|
||||
|
||||
def push_state(state)
|
||||
stack.push(state)
|
||||
end
|
||||
|
||||
def pop_state
|
||||
stack.pop
|
||||
end
|
||||
|
||||
def scan_header(scanner)
|
||||
end
|
||||
|
||||
def tokenize(bundle_string)
|
||||
scanner = StringScanner.new(bundle_string)
|
||||
tokens = []
|
||||
until scanner.eos?
|
||||
case state
|
||||
when :header
|
||||
raise "WIP"
|
||||
else
|
||||
raise "Bad state?"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def self.create(file_path = DEFAULT_BUNDLE_PATH)
|
||||
s = Pigeon::Storage.current
|
||||
last = s.message_count
|
||||
|
@ -14,11 +81,16 @@ module Pigeon
|
|||
end
|
||||
|
||||
def self.ingest(file_path)
|
||||
raise "WIP"
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
class Parser
|
||||
end
|
||||
|
||||
class Interpreter
|
||||
end
|
||||
|
||||
def initialize
|
||||
end
|
||||
end
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
require "spec_helper"
|
||||
|
||||
RSpec.describe Pigeon::Bundle::Lexer do
|
||||
it "tokenizes the bundle" do
|
||||
Pigeon::Bundle::Lexer.tokenize(File.read("./example.bundle"))
|
||||
end
|
||||
end
|
Loading…
Reference in New Issue