Skip to content
This repository has been archived by the owner on Jun 2, 2022. It is now read-only.

wip: new fbl based file #7

Open
wants to merge 6 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 0 additions & 44 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,47 +1,3 @@
# js-unixfsv2

This library is a full implementation of [`unixfs-v2`](https://github.com/ipfs/unixfs-v2) for JavaScript.

## encoder(path, options={})

Async generator that yields blocks for every file and
and directory in the path.

Last block is the root block.

Runs recursively through directories. Accepts any valid
file or directory string path.

```javascript
const { encoder } = require('unixfsv2')
const storage = require('any-key-value-store')

const putBlock = async b => storage.put((await b.cid()).toString(), b.encode())

const storeDirectory = async path => {
for await (const { block, root } of encoder(__dirname)) {
await storage.putBlock(block || root.block())
if (root) return root.block().cid()
}
}
```

## reader(rootBlock, getBlock)

Returns a new Reader instance for the
root block.

```javascript
const { reader } = require('unixfsv2')
const storage = require('any-key-value-store')
const Block = require('@ipld/block')

const getBlock = async cid => Block.create(await storage.get(cid.toString()), cid)

/* rootBlock would be the same as the last block in
/ encode example.
*/
const r = reader(rootBlock, getBlock)
```


6 changes: 0 additions & 6 deletions browser.js

This file was deleted.

88 changes: 0 additions & 88 deletions cli.js

This file was deleted.

117 changes: 108 additions & 9 deletions index.js
Original file line number Diff line number Diff line change
@@ -1,9 +1,108 @@
'use strict'
const fs = require('./src/fs')
const createTypes = require('./src/types')
const reader = require('./src/reader')

exports.encoder = fs
exports.reader = reader
exports.createTypes = createTypes
exports.types = fs.types
import { promises as fs } from 'fs'
import * as hamt from 'hamt-utils'
import fbl from '@ipld/fbl'

/* bit of a hack since it buffers the whole
file in memory but i don't want to take
a larger dep that will stream properly.
eventaully, i'll come back and make this properly stream.
*/
const onemeg = async function * (buffer) {
let chunk = buffer
while (chunk.byteLength) {
yield chunk.subarray(0, 1024 * 1000)
chunk = chunk.subarray(1024 * 1000)
}
}

const encode = async function * (Block, path, chunker=onemeg) {
const stat = await fs.stat(path)
if (stat.isDirectory()) {
const files = await fs.readdir(path)
const dir = {}
for (const file of files) {
let last
for await (const block of encode(Block, new URL(file, path + '/sub'), chunker)) {
if (last) yield last
last = block
}
dir[file] = last
}
let last
for await (const block of hamt.from(Block, dir)) {
yield block
last = block
}
yield { content: { d: await last.cid() } }
} else {
let last
for await (const block of fbl.from(chunker(await fs.readFile(path)))) {
yield block
last = block
}
yield { content: { f: await last.cid() } }
}
}

const encoder = async function * (Block, path, chunker) {
let last
for await (const block of encode(Block, path, chunker)) {
if (last) yield last
last = block
}
yield Block.encoder(last, 'dag-cbor')
}

const readFile = async function * (reader, parts, start, end) {
const block = await reader.traverse(parts)
yield * fbl.read(block, reader.get, start, end)
}

const toString = b => (new TextDecoder()).decode(b)

const lsDirectory = async function * (reader, parts) {
const block = await reader.traverse(parts)
const decoded = block.decodeUnsafe()
if (!decoded.content) throw new Error('Not a valid DirEnt')
if (!decoded.content.d) throw new Error('Not a file')
for await (const { key } of hamt.all(decoded.content.d, reader.get, start, end)) {
yield toString(key)
}
}

class Reader {
constructor (head, get) {
this.head = head
this.get = get
}
async traverse (parts) {
let head = await this.get(this.head)
if (!parts.length) {
const { d, f } = head.decodeUnsafe().content
return d || f
}
while (parts.length) {
const key = parts.shift()
const decoded = head.decodeUnsafe()
if (!decoded.content) throw new Error('Not a valid DirEnt')
if (!decoded.content.d) throw new Error('Not a directory')
const dirEnt = await hamt.get(decoded.content.d, key, this.get)
const { d, f } = dirEnt.content
if (f && parts.length) throw new Error(`${key} is not a directory`)
head = await this.get(d || f)
}
return head
}
read (path='', start, end) {
path = path.split('/').filter(x => x)
return readFile(this, path, start, end)
}
ls (path='') {
path = path.split('/').filter(x => x)
return lsDirectory(this, path)
}
}

const reader = (...args) => new Reader(...args)

export { encoder, reader }
27 changes: 8 additions & 19 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,24 +1,15 @@
{
"name": "@ipld/unixfsv2",
"name": "@ipld/fs",
"version": "0.0.0-dev",
"description": "Filesystem in a merkle tree.",
"browser": "./browser.js",
"scripts": {
"lint": "aegir lint",
"pretest": "npm run lint",
"build": "ipld-schema to-json doc/DataLayout.md > src/data-layout.json && ipld-schema to-json doc/README.md > src/schema.json",
"test": "aegir test -t node"
},
"description": "Implementation of UnixFSv2.",
"type": "module",
"scripts": {},
"keywords": [],
"author": "Mikeal Rogers <[email protected]> (http://www.mikealrogers.com)",
"dependencies": {
"@ipld/block": "^2.0.6",
"@ipld/printify": "0.0.0",
"@ipld/schema-gen": "0.0.1",
"bytesish": "^0.4.1",
"mime-types": "^2.1.24",
"rabin-generator": "0.0.1",
"stream-chunker": "^1.2.8"
"@ipld/fbl": "^2.0.1",
"@ipld/block": "^6.0.3",
"hamt-utils": "^0.0.4"
},
"directories": {
"test": "test"
Expand All @@ -33,8 +24,6 @@
},
"homepage": "https://github.com/ipld/js-unixfsv2#readme",
"devDependencies": {
"aegir": "^20.4.1",
"ipld-schema": "^0.3.1",
"tsame": "^2.0.1"
"estest": "^10.3.5"
}
}
Loading