WIP, Need to fix saved drafts.

This commit is contained in:
Netscape Navigator 2020-04-17 08:55:18 -05:00
parent 780823ccaf
commit d3ddc21de3
12 changed files with 104 additions and 73 deletions

View File

@ -56,15 +56,8 @@ Eg: `pigeon identity show` becomes `./pigeon-cli show`.
- [X] Make all methods private except those required for the CLI.
- [X] Add Lipmaa links like the Bamboo folks do.
- [X] Set a max message size.
- [ ] Clean up all singletons / .current hack from storage.rb
- [ ] Clean up all singletons / .current hack from parser.rb
- [ ] Clean up all singletons / .current hack from lexer.rb
- [ ] Clean up all singletons / .current hack from message.rb
- [ ] Clean up all singletons / .current hack from draft_serializer.rb
- [ ] Clean up all singletons / .current hack from message_serializer.rb
- [ ] Clean up all singletons / .current hack from remote_identity.rb
- [ ] Clean up all singletons / .current hack from draft.rb
- [ ] Clean up all singletons / .current hack from local_identity.rb
- [ ] Clean up all singletons / .current hacks
- [ ] Reduce cross cutting where collaborating objects need access to `@db`
- [ ] Update README.md. Needs user manual for new `Pigeon::Database` class.
- [ ] Make the switch to LevelDB, RocksDB, [UNQLite](https://unqlite.org/features.html) or similar (currently using Ruby PStore).
- [ ] Make CLI names consistent with API names. Eg: find vs. read.

View File

@ -8,12 +8,29 @@ module Pigeon
init_local_identity(seed)
end
def find_all; store.find_all; end
def find_all(mhash); store.find_all(mhash); end
def put_blob(b); store.put_blob(b); end
def get_blob(b); store.get_blob(b); end
def set_config(k, v); store.set_config(k, v); end
def get_config(k); store.get_config(k); end
def reset_current_draft; set_config(CURRENT_DRAFT, nil); end
def reset; store.reset; end
def add_peer(p); store.add_peer(p); end
def block_peer(p); store.block_peer(p); end
def remove_peer(p); store.remove_peer(p); end
def all_peers(); store.all_peers(); end
def all_blocks(); store.all_blocks(); end
def message?(multihash); store.message?(multihash); end
def save_message(msg_obj); store.save_message(msg_obj); end
def read_message(multihash); store.read_message(multihash); end
def get_message_count_for(multihash)
store.get_message_count_for(multihash)
end
def get_message_by_depth(multihash, depth)
store.get_message_by_depth(multihash, depth)
end
def create_message(kind, params)
draft = Pigeon::Draft.new(kind: kind, db: self)
@ -23,8 +40,8 @@ module Pigeon
def create_bundle(file_path = DEFAULT_BUNDLE_PATH)
content = store
.find_all(Pigeon::LocalIdentity.current.multihash)
.map { |multihash| s.read_message(multihash) }
.find_all(local_identity.multihash)
.map { |multihash| store.read_message(multihash) }
.sort_by(&:depth)
.map { |message| message.render }
.join(BUNDLE_MESSAGE_SEPARATOR)
@ -34,11 +51,12 @@ module Pigeon
def ingest_bundle(file_path = DEFAULT_BUNDLE_PATH)
bundle = File.read(file_path)
tokens = Pigeon::Lexer.tokenize(bundle)
Pigeon::Parser.parse(tokens)
Pigeon::Parser.parse(self, tokens)
end
def create_draft(kind:, body: {})
save_draft(Draft.new(kind: kind, body: body))
draft = Draft.new(kind: kind, body: body, db: self)
save_draft(draft)
end
def save_draft(draft)
@ -47,7 +65,7 @@ module Pigeon
end
def current_draft
store.get_config(CURRENT_DRAFT) or raise NO_DRAFT_FOUND
store.get_config(CURRENT_DRAFT)
end
private

23
dist/pigeon/draft.rb vendored
View File

@ -6,8 +6,8 @@ module Pigeon
:depth, :body, :author
def discard
if Draft.current&.internal_id == @internal_id
Draft.reset_current
if @db.current_draft&.internal_id == @internal_id
@db.reset_current_draft
end
end
@ -39,21 +39,26 @@ module Pigeon
# This might be a bad or good idea. Not sure yet.
self.body[key] = value.inspect
end
@db.save_draft(self)
# TODO: You can't store a PStore in a PStore.
# This is terrible and should be fixed:
old_db = @db
@db = nil
old_db.save_draft(self)
@db = old_db
return self.body[key]
end
# Author a new message.
def publish
template = MessageSerializer.new(self)
@author = LocalIdentity.current
@depth = store.get_message_count_for(author.multihash)
@prev = store.get_message_by_depth(author.multihash, @depth - 1)
@author = @db.local_identity
@depth = @db.get_message_count_for(author.multihash)
@prev = @db.get_message_by_depth(author.multihash, @depth - 1)
@lipmaa = Helpers.lipmaa(@depth)
unsigned = template.render_without_signature
@signature = author.sign(unsigned)
tokens = Lexer.tokenize_unsigned(unsigned, signature)
message = Parser.parse(tokens)[0]
message = Parser.parse(@db, tokens)[0]
self.discard
message
end
@ -61,9 +66,5 @@ module Pigeon
def render_as_draft
DraftSerializer.new(self).render
end
def store
Pigeon::Storage.current
end
end
end

View File

@ -47,11 +47,13 @@ module Pigeon
private
def save!
return db.read_message(multihash) if db.message?(multihash)
return @db.read_message(multihash) if @db.message?(multihash)
verify_counted_fields
verify_signature
old_db = @db
@db = nil
self.freeze
db.save_message(self)
old_db.save_message(self)
self
end
@ -68,8 +70,8 @@ module Pigeon
msg = MSG_SIZE_ERROR % key_count
raise MessageSizeError, msg
end
count = db.get_message_count_for(author.multihash)
expected_prev = db.get_message_by_depth(author.multihash, count - 1) || Pigeon::NOTHING
count = @db.get_message_count_for(author.multihash)
expected_prev = @db.get_message_by_depth(author.multihash, count - 1) || Pigeon::NOTHING
assert("depth", count, depth)
# TODO: Re-visit this. Our current verification method
# is probably too strict and won't allow for partial

15
dist/pigeon/parser.rb vendored
View File

@ -2,12 +2,13 @@ module Pigeon
class Parser
class DuplicateKeyError < StandardError; end
def self.parse(tokens)
self.new(tokens).parse
def self.parse(db, tokens)
self.new(db, tokens).parse
end
def initialize(tokens)
@scratchpad = {}
def initialize(db, tokens)
@db = db
reset_scratchpad
@tokens = tokens
@results = []
end
@ -32,10 +33,14 @@ module Pigeon
private
def reset_scratchpad
@scratchpad = { db: @db }
end
def finish_this_message!
@scratchpad.freeze
@results.push(Message.ingest(**@scratchpad))
@scratchpad = {}
reset_scratchpad
end
def set(key, value, hash = @scratchpad)

View File

@ -6,12 +6,13 @@ module Pigeon
def initialize(path: PIGEON_DB_PATH)
@path = path
bootstrap unless bootstrapped?
store.ultra_safe = true
bootstrap
end
def reset
File.delete(path) if bootstrapped?
@current = nil
File.delete(path) if on_disk?
bootstrap
end
def add_peer(identity)
@ -45,14 +46,20 @@ module Pigeon
end
def set_config(key, value)
write { store[CONF_NS][key] = value }
write do
a = store.fetch(CONF_NS)
raise "FIX SAVED DRAFTS" if value.instance_variable_get(:@db)
a[key] = value
end
end
def put_blob(data)
raw_digest = Digest::SHA256.digest(data)
b64_digest = Helpers.b32_encode(raw_digest)
multihash = [BLOB_SIGIL, b64_digest, BLOB_FOOTER].join("")
write { store[BLOB_NS][multihash] = data }
b32_hash = Helpers.b32_encode(raw_digest)
multihash = [BLOB_SIGIL, b32_hash, BLOB_FOOTER].join("")
write do
store[BLOB_NS][multihash] = data
end
multihash
end
@ -105,26 +112,20 @@ module Pigeon
def bootstrap
write do
# Wait what? Why is there a depth and count
# index??
store[MESSAGE_BY_DEPTH_NS] ||= {}
store[COUNT_INDEX_NS] ||= {}
# TODO: Why is there a depth and count index??
store[BLCK_NS] ||= Set.new
store[BLOB_NS] ||= {}
store[CONF_NS] ||= {}
store[COUNT_INDEX_NS] ||= {}
store[MESG_NS] ||= {}
store[BLCK_NS] ||= Set.new
store[MESSAGE_BY_DEPTH_NS] ||= {}
store[PEER_NS] ||= Set.new
end
store
end
def store
if @store
return @store
else
@store = PStore.new(PIGEON_DB_PATH)
bootstrap
end
@store ||= PStore.new(PIGEON_DB_PATH)
end
def insert_and_update_index(message)
@ -149,9 +150,6 @@ module Pigeon
def write(&blk); transaction(false, &blk); end
def read(&blk); transaction(true, &blk); end
def bootstrapped?
File.file?(path)
end
def on_disk?; File.file?(path); end
end
end

View File

@ -6,7 +6,11 @@ RSpec.describe Pigeon::Message do
File.delete(p) if File.file?(p)
end
let(:db) { Pigeon::Database.new }
let(:db) do
db = Pigeon::Database.new
db.reset
db
end
def create_fake_messages
(1..10)

View File

@ -2,7 +2,9 @@ require "spec_helper"
RSpec.describe Pigeon::Draft do
let(:db) do
Pigeon::Database.new
db = Pigeon::Database.new
db.reset
db
end
let(:message) do

View File

@ -116,7 +116,9 @@ RSpec.describe Pigeon::Lexer do
].freeze
let(:db) do
Pigeon::Database.new
db = Pigeon::Database.new
db.reset
db
end
let(:message) do

View File

@ -13,7 +13,9 @@ RSpec.describe Pigeon::Message do
end
let(:db) do
Pigeon::Database.new
db = Pigeon::Database.new
db.reset
db
end
let(:draft) do
@ -129,7 +131,7 @@ RSpec.describe Pigeon::Message do
it "crashes on forged fields" do
m = "Expected field `depth` to equal 0, got: 10"
expect do
Pigeon::Parser.parse([
Pigeon::Parser.parse(db, [
[:AUTHOR, "@DYdgK1KUInVtG3lS45hA1HZ-jTuvfLKsxDpXPFCve04=.ed25519"],
[:KIND, "invalid"],
[:PREV, "NONE"],

View File

@ -1,7 +1,11 @@
require "spec_helper"
RSpec.describe Pigeon::Lexer do
let(:db) { Pigeon::Database.new }
let(:db) do
db = Pigeon::Database.new
db.reset
db
end
let(:example_bundle) { File.read("./spec/fixtures/normal.bundle") }
let(:tokens) { Pigeon::Lexer.tokenize(example_bundle) }
@ -19,7 +23,7 @@ RSpec.describe Pigeon::Lexer do
]
it "parses tokens" do
results = Pigeon::Parser.parse(tokens)
results = Pigeon::Parser.parse(db, tokens)
expect(results.length).to eq(10)
expect(results.first).to be_kind_of(Pigeon::Message)
expect(results.last).to be_kind_of(Pigeon::Message)
@ -35,6 +39,6 @@ RSpec.describe Pigeon::Lexer do
it "finds duplicate keys" do
error = Pigeon::Parser::DuplicateKeyError
expect { Pigeon::Parser.parse(BAD_TOKENS) }.to raise_error(error)
expect { Pigeon::Parser.parse(db, BAD_TOKENS) }.to raise_error(error)
end
end

View File

@ -5,12 +5,12 @@ RSpec.describe Pigeon::Storage do
IDS = %w(@ZMWM5PSXRN7RFRMSWW1E3V5DNGC4XGGJTHKCAGB48SNRG4XXE5NG.ed25519
@VF0Q4KXQNY6WCAXF17GAZGDPAX8XKM70SB8N7V0NSD1H370ZCJBG.ed25519)
before(:each) do
let(:db) do
db = Pigeon::Database.new
db.reset
db
end
let(:db) { Pigeon::Database.new }
it "sets a config" do
db.set_config("FOO", "BAR")
value = db.get_config("FOO")
@ -44,12 +44,12 @@ RSpec.describe Pigeon::Storage do
db.block_peer(IDS[1])
expect(db.all_peers).not_to include(IDS[1])
expect(db.all_blocks).to include(IDS[1])
expect(db.all_blockdb.count).to eq(1)
expect(db.all_blocks.count).to eq(1)
end
it "finds all authored by a particular feed" do
ingested_messages = db.ingest_bundle("./spec/fixtures/normal.bundle")
author = ingested_messagedb.first.author.multihash
author = ingested_messages.first.author.multihash
actual_messages = db.find_all(author)
search_results = db.find_all(author)
end
@ -64,12 +64,12 @@ RSpec.describe Pigeon::Storage do
"e" => db.put_blob(File.read("./logo.png")),
}),
db.create_message("g", {
"me_myself_and_i" => Pigeon::LocalIdentity.current.multihash,
"me_myself_and_i" => db.local_identity.multihash,
}),
]
me = Pigeon::LocalIdentity.current.multihash
me = db.local_identity.multihash
results = db.find_all(me)
expect(resultdb.length).to eq(3)
expect(results.length).to eq(3)
expect(msgs[0].multihash).to eq(results[0])
expect(msgs[1].multihash).to eq(results[1])
expect(msgs[2].multihash).to eq(results[2])