This commit is contained in:
Ehmry - 2019-03-16 11:52:24 +01:00
parent 7a3f231d0b
commit 2ae7687403
5 changed files with 54 additions and 23 deletions

View File

@ -255,7 +255,7 @@ proc processPacket(session: SessionRef; pkt: var FsPacket) =
of READ:
let
node = session.nodes[pkt.handle]
pktBuf = cast[ptr array[maxChunkSize, char]](session.cpp.packetContent pkt)
pktBuf = session.cpp.packetContent pkt
# cast the pointer to an array pointer for indexing
case node.kind
of dirNode:
@ -341,7 +341,7 @@ componentConstructHook = proc(env: GenodeEnv) =
setId = toSetId(e)
break
except ValueError: continue
if setId.isValid:
if setId.isNonZero:
try:
let
#rootPath = args.argString "root"

View File

@ -17,8 +17,6 @@ const
blobHexLen* = 32 * 2
blobVisualLen* = 32 * 3
maxChunkSize* {.deprecated} = blobLeafSize
type
Blake2b256* = Blake2bContext[256]
@ -99,8 +97,6 @@ proc toBlobId*(cbor: CborNode): BlobId =
for i in 0..<digestLen:
result.data[i] = cbor.bytes[i].uint8
{.deprecated: [newCborBytes: toCbor].}
proc toHex*(id: BlobId|SetId): string = hex.encode(id.data)
## Return BlobId encoded in hexidecimal.
@ -115,7 +111,6 @@ proc take*(cid: var BlobId; buf: var string) =
proc blobHash*(buf: pointer; len: Natural): BlobId =
## Generate a BlobId for a string of data using the BLAKE2b hash algorithm.
assert(len <= maxChunkSize)
var b: Blake2b256
init(b)
update(b, buf, len)
@ -123,7 +118,6 @@ proc blobHash*(buf: pointer; len: Natural): BlobId =
proc blobHash*(data: string): BlobId =
## Generate a BlobId for a string of data using the BLAKE2b hash algorithm.
assert(data.len <= maxChunkSize)
var b: Blake2b256
init(b)
update(b, data)
@ -136,15 +130,6 @@ proc verify*(cid: BlobId; data: string): bool =
update(b, data)
finish(b) == cid
iterator simpleChunks*(s: Stream; size = maxChunkSize): string =
## Iterator that breaks a stream into simple chunks.
doAssert(size <= maxChunkSize)
var tmp = newString(size)
while not s.atEnd:
tmp.setLen(size)
tmp.setLen(s.readData(tmp[0].addr, size))
yield tmp
func isNonZero*(bh: BlobId): bool =
## Test if a blob hash is not zeroed.
var r: byte
@ -153,15 +138,14 @@ func isNonZero*(bh: BlobId): bool =
r = r or b
r != 0
{.deprecated: [isValid: isNonZero].}
type
Key* = uint64
const
keyBits = sizeof(Key) shl 3
keyBits = sizeof(Key) * 8
keyChunkBits = fastLog2 keyBits
keyChunkMask = not ((not 0.Key) shl (keyChunkBits))
maxDepth = keyBits/keyChunkBits
func toKey*(s: string): Key =
var key: siphash.Key
@ -673,3 +657,38 @@ proc randomApply*(store: BlobStore; trie: BlobSet; seed: int64;
of hotNode:
trie = next
i = rng.rand(countSetBits(trie.bitmap)-1)
proc applyAll*(store: BlobStore; trie: BlobSet; seed: int64;
f: proc(id: BlobId; size: BiggestInt)) =
## Apply to all members of the set in a pseuedo-random order
## derived from `seed`.
if trie.table.len == 0: return
var
rng = initRand(seed)
path: array[maxDepth.int, tuple[mask: uint64, trie: BlobSet]]
level = 0
if trie.kind == coldNode:
path[0].trie = store.load(trie.setId)
else:
path[0].trie = trie
path[0].mask = not (not(0'u64) shl path[0].trie.table.len)
# set the bits of indexes to hit
while 0 < level or path[0].mask != 0'u64:
if path[level].mask == 0'u64:
dec level
continue
let
i = rng.rand(path[level].trie.table.high)
bi = 1'u64 shl i
if (path[level].mask and bi) == 0'u64:
continue
path[level].mask = path[level].mask xor bi
var node = path[level].trie.table[i]
if node.kind == leafNode:
f(node.blob, node.size)
else:
if node.kind == coldNode:
node = store.load(node.setId)
inc level
path[level].mask = not (not(0'u64) shl node.table.len)
path[level].trie = node

View File

@ -12,7 +12,7 @@ type
# TODO: tables must be purged periodically
rng: Rand
proc newHttpStoreServer*(backend: BlobStore): HttpStoreServer =
proc newHttpStoreServer*(backend: BlobStore; seed = 0'i64): HttpStoreServer =
## Create a new HTTP server for a given store.
randomize()
HttpStoreServer(
@ -20,7 +20,7 @@ proc newHttpStoreServer*(backend: BlobStore): HttpStoreServer =
store: backend,
ingests: initTable[string, IngestStream](),
blobs: initTable[BlobId, BlobStream](),
rng: initRand(random(high int)))
rng: initRand(seed))
func parseRange(range: string): tuple[a: int, b: int] =
## Parse an HTTP byte range string.

View File

@ -69,3 +69,15 @@ suite "store":
echo "randomApply: ", id, " ", size
let stream = store.openBlobStream(id, size, dataBlob)
close stream
test "applyAll":
bs = load(client, setId)
for i in 1..count:
var found = false
store.applyAll(bs, i) do (id: BlobId; size: BiggestInt):
if i == size: found = true
if not found:
echo i, " not found"
apply(client, bs, $i) do (id: BlobId; size: BiggestInt):
echo "but ", i, " really is in the set"
raiseAssert($i)

View File

@ -5,7 +5,7 @@ import ../src/blobsets
suite "Blob set tests":
var
randomCid = blobHash(newString(maxChunkSize))
randomCid = blobHash("")
# test "zero blob":
# doAssert(randomCid == zeroChunk)