Ability to use blob as key, value or kind.

This commit is contained in:
Netscape Navigator 2020-04-21 07:31:34 -05:00
parent d53e7392ed
commit 4f38f12a1a
8 changed files with 51 additions and 190 deletions

View File

@ -206,6 +206,22 @@ module Pigeon
msg
end
def self.mkdir_p(path)
Dir.mkdir(path) unless Dir.exists?(path)
end
def self.split_file_path(b32_hash)
[
b32_hash[0],
b32_hash[1...9],
b32_hash[9...17],
b32_hash[17...25],
b32_hash[25...33],
b32_hash[33...41],
[b32_hash[41...49], ".", b32_hash[49...52]].join(""),
]
end
def self.decode_multihash(string)
if string[SIG_RANGE] == SIG_FOOTER
return b32_decode(string.gsub(SIG_FOOTER, ""))

View File

@ -94,12 +94,26 @@ module Pigeon
# === BUNDLES
def create_bundle(file_path = DEFAULT_BUNDLE_PATH)
content = store
.find_all_messages(local_identity.multihash)
.map { |multihash| store.read_message(multihash) }
.sort_by(&:depth)
# Fetch messages for all peers
peers = all_peers + [local_identity.multihash]
messages = peers.map do |peer|
find_all_messages(peer)
.map { |multihash| read_message(multihash) }
.sort_by(&:depth)
end.flatten
# Render messages for all peers.
content = messages
.map { |message| message.render }
.join(BUNDLE_MESSAGE_SEPARATOR)
# MKdir
Helpers.mkdir_p("bundle")
# Get blobs for _all_ peers
blobs = messages.map(&:collect_blobs).flatten.uniq
# binding.pry if blobs.any?
# Write bundle to dir
# Link blobs to dir
File.write(file_path, content + CR)
end

View File

@ -1,160 +0,0 @@
module Pigeon
class Helpers
class VerificationError < StandardError; end
class MessageSizeError < StandardError; end
B32_ENC = {
"00000" => "0", "00001" => "1", "00010" => "2", "00011" => "3",
"00100" => "4", "00101" => "5", "00110" => "6", "00111" => "7",
"01000" => "8", "01001" => "9", "01010" => "A", "01011" => "B",
"01100" => "C", "01101" => "D", "01110" => "E", "01111" => "F",
"10000" => "G", "10001" => "H", "10010" => "J", "10011" => "K",
"10100" => "M", "10101" => "N", "10110" => "P", "10111" => "Q",
"11000" => "R", "11001" => "S", "11010" => "T", "11011" => "V",
"11100" => "W", "11101" => "X", "11110" => "Y", "11111" => "Z",
}.freeze
B32_DEC = {
"0" => 0b00000, "O" => 0b00000, "1" => 0b00001, "I" => 0b00001,
"L" => 0b00001, "2" => 0b00010, "3" => 0b00011, "4" => 0b00100,
"5" => 0b00101, "6" => 0b00110, "7" => 0b00111, "8" => 0b01000,
"9" => 0b01001, "A" => 0b01010, "B" => 0b01011, "C" => 0b01100,
"D" => 0b01101, "E" => 0b01110, "F" => 0b01111, "G" => 0b10000,
"H" => 0b10001, "J" => 0b10010, "K" => 0b10011, "M" => 0b10100,
"N" => 0b10101, "P" => 0b10110, "Q" => 0b10111, "R" => 0b11000,
"S" => 0b11001, "T" => 0b11010, "V" => 0b11011, "W" => 0b11100,
"X" => 0b11101, "Y" => 0b11110, "Z" => 0b11111,
}.freeze
def self.lipmaa(n)
# The original lipmaa function returns -1 for 0
# but that does not mesh well with our serialization
# scheme. Comments welcome on this one.
return 0 if n < 1 # Prevent -1, division by zero etc..
m, po3, x = 1, 3, n
# find k such that (3^k - 1)/2 >= n
while (m < n)
po3 *= 3
m = (po3 - 1) / 2
end
po3 /= 3
# find longest possible backjump
if (m != n)
while x != 0
m = (po3 - 1) / 2
po3 /= 3
x %= m
end
if (m != po3)
po3 = m
end
end
return n - po3
end
# http://www.crockford.com/wrmg/base32.html
def self.b32_encode(string)
string
.each_byte
.to_a
.map { |x| x.to_s(2).rjust(8, "0") }
.join
.scan(/.{1,5}/)
.map { |x| x.ljust(5, "0") }
.map { |bits| B32_ENC.fetch(bits) }
.join
end
# http://www.crockford.com/wrmg/base32.html
def self.b32_decode(string)
string
.split("")
.map { |x| B32_DEC.fetch(x.upcase) }
.map { |x| x.to_s(2).rjust(5, "0") }
.join("")
.scan(/.{1,8}/)
.map { |x| x.length == 8 ? x.to_i(2).chr : "" }
.join("")
end
def self.verify_string(identity, string_signature, string)
binary_signature = decode_multihash(string_signature)
string_key = identity.multihash
binary_key = decode_multihash(string_key)
verify_key = Ed25519::VerifyKey.new(binary_key)
verify_key.verify(binary_signature, string)
end
def self.assert(field, actual, expected)
unless actual == expected
message = VERFIY_ERROR % [field, actual || "nil", expected || "nil"]
raise VerificationError, message
end
end
def self.publish_draft(db, draft)
author = db.local_identity
mhash = author.multihash
template = MessageSerializer.new(draft)
depth = db.get_message_count_for(mhash)
draft.author = author
draft.depth = depth
draft.prev = db.get_message_by_depth(mhash, depth - 1)
draft.lipmaa = Helpers.lipmaa(depth)
unsigned = template.render_without_signature
draft.signature = author.sign(unsigned)
tokens = Lexer.tokenize_unsigned(unsigned, draft.signature)
message = Parser.parse(db, tokens)[0]
db.reset_draft
message
end
def self.update_draft(db, key, value)
draft = db.current_draft
draft[key] = value
db.save_draft(draft)
return draft.body[key]
end
def self.verify_message(db, msg)
msg_hash = msg.multihash
body = msg.body
key_count = body.count
author = msg.author
signature = msg.signature
return db.read_message(msg_hash) if db.message_saved?(msg_hash)
if key_count > 64
msg = MSG_SIZE_ERROR % key_count
raise MessageSizeError, msg
end
count = db.get_message_count_for(author.multihash)
expected_prev = db.get_message_by_depth(author.multihash, count - 1) || Pigeon::NOTHING
assert("depth", count, msg.depth)
# TODO: Re-visit this. Our current verification method
# is probably too strict and won't allow for partial
# verification of feeds.
assert("lipmaa", Helpers.lipmaa(msg.depth), msg.lipmaa)
assert("prev", msg.prev, expected_prev)
tpl = msg.template.render_without_signature
Helpers.verify_string(author, signature, tpl)
msg.untaint
msg.freeze
msg
end
def self.decode_multihash(string)
if string[SIG_RANGE] == SIG_FOOTER
return b32_decode(string.gsub(SIG_FOOTER, ""))
else
return b32_decode(string[1, -1].gsub(FOOTERS_REGEX, ""))
end
end
end
end

View File

@ -51,7 +51,7 @@ module Pigeon
BLOB_VALUE = /&.{52}\.sha256/
STRG_VALUE = /".{1,128}"/
# If you need other characters (but not spaces) submit an issue.
ALPHANUMERICISH = /[a-zA-Z0-9_\-=\.]{1,90}/
ALPHANUMERICISH = /[a-zA-Z0-9_\-=\.\@\&]{1,90}/
ALL_VALUES = [
FEED_VALUE,
MESG_VALUE,

View File

@ -36,5 +36,11 @@ module Pigeon
def template
MessageSerializer.new(self)
end
def collect_blobs
([kind] + body.keys + body.values)
.select { |x| x.match? Lexer::BLOB_VALUE }
.uniq
end
end
end

View File

@ -16,13 +16,11 @@ module Pigeon
end
def add_peer(identity)
path = Helpers.decode_multihash(identity)
write { store[PEER_NS].add(identity) }
identity
end
def remove_peer(identity)
path = Helpers.decode_multihash(identity)
write { store[PEER_NS].delete(identity) }
identity
end
@ -66,7 +64,7 @@ module Pigeon
end
def get_blob(blob_multihash)
path = File.join(split_file_path(blob_multihash[1..52]))
path = File.join(Helpers.split_file_path(blob_multihash[1..52]))
path = File.join(PIGEON_BLOB_PATH, path)
if File.file?(path)
File.read(path)
@ -134,24 +132,12 @@ module Pigeon
private
def split_file_path(b32_hash)
[
b32_hash[0],
b32_hash[1...9],
b32_hash[9...17],
b32_hash[17...25],
b32_hash[25...33],
b32_hash[33...41],
[b32_hash[41...49], ".", b32_hash[49...52]].join(""),
]
end
def write_to_disk(b32_hash, data)
p = split_file_path(b32_hash)
p = Helpers.split_file_path(b32_hash)
file_name = p.pop
dir = p.reduce(PIGEON_BLOB_PATH) do |accum, item|
path = File.join(accum, item)
mkdir_p(path)
Helpers.mkdir_p(path)
path
end
full_path = File.join(dir, file_name)
@ -170,7 +156,7 @@ module Pigeon
store[MESSAGE_BY_DEPTH_NS] ||= {}
store[PEER_NS] ||= Set.new
end
mkdir_p(PIGEON_BLOB_PATH)
Helpers.mkdir_p(PIGEON_BLOB_PATH)
store
end
@ -201,9 +187,5 @@ module Pigeon
def write(&blk); transaction(false, &blk); end
def read(&blk); transaction(true, &blk); end
def on_disk?; File.file?(path); end
def mkdir_p(path)
Dir.mkdir(path) unless Dir.exists?(path)
end
end
end

View File

@ -13,10 +13,15 @@ RSpec.describe Pigeon::Message do
end
def create_fake_messages
(1..10)
blobs = [db.create_message(db.put_blob("one"), { "a" => "b" }),
db.create_message("a", { db.put_blob("two") => "b" }),
db.create_message("a", { "b" => db.put_blob("three") })]
normal = (1..10)
.to_a
.map do |n| { "foo" => ["bar", "123", SecureRandom.uuid].sample } end
.map do |d| db.create_message(SecureRandom.uuid, d) end
blobs + normal
end
it "creates a bundle" do

View File

@ -29,5 +29,3 @@ db.get_config
db.ingest_bundle
db.set_config
db.reset_database
binding.pry