Skip to content
GitLab
Projects
Groups
Snippets
Help
Loading...
Help
What's new
10
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Open sidebar
dms3
go-dms3
Commits
093c8fb0
"README.md" did not exist on "ac8a9d12422a1dc72dca02c908da6b762bcf99d8"
Commit
093c8fb0
authored
10 years ago
by
Jeromy
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Rework package structure for unixfs and subpackage
cc @jbenet
parent
15a47010
Changes
19
Hide whitespace changes
Inline
Side-by-side
Showing
19 changed files
with
81 additions
and
66 deletions
+81
-66
core/commands/add.go
core/commands/add.go
+1
-1
core/commands/cat.go
core/commands/cat.go
+2
-2
fuse/ipns/ipns_unix.go
fuse/ipns/ipns_unix.go
+8
-7
fuse/readonly/readonly_unix.go
fuse/readonly/readonly_unix.go
+4
-3
importer/chunk/rabin.go
importer/chunk/rabin.go
+1
-1
importer/chunk/splitting.go
importer/chunk/splitting.go
+10
-2
importer/importer.go
importer/importer.go
+4
-5
importer/importer_test.go
importer/importer_test.go
+11
-7
server/http/ipfs.go
server/http/ipfs.go
+2
-1
unixfs/Makefile
unixfs/Makefile
+0
-0
unixfs/data.pb.go
unixfs/data.pb.go
+5
-5
unixfs/data.proto
unixfs/data.proto
+1
-1
unixfs/format.go
unixfs/format.go
+1
-1
unixfs/format_test.go
unixfs/format_test.go
+1
-1
unixfs/io/dagmodifier.go
unixfs/io/dagmodifier.go
+6
-6
unixfs/io/dagmodifier_test.go
unixfs/io/dagmodifier_test.go
+7
-7
unixfs/io/dagreader.go
unixfs/io/dagreader.go
+6
-5
unixfs/io/dagwriter.go
unixfs/io/dagwriter.go
+5
-5
unixfs/io/dagwriter_test.go
unixfs/io/dagwriter_test.go
+6
-6
No files found.
core/commands/add.go
View file @
093c8fb0
...
...
@@ -10,8 +10,8 @@ import (
"github.com/jbenet/go-ipfs/core"
"github.com/jbenet/go-ipfs/importer"
ft
"github.com/jbenet/go-ipfs/importer/format"
dag
"github.com/jbenet/go-ipfs/merkledag"
ft
"github.com/jbenet/go-ipfs/unixfs"
u
"github.com/jbenet/go-ipfs/util"
)
...
...
This diff is collapsed.
Click to expand it.
core/commands/cat.go
View file @
093c8fb0
...
...
@@ -5,7 +5,7 @@ import (
"io"
"github.com/jbenet/go-ipfs/core"
mdag
"github.com/jbenet/go-ipfs/
merkledag
"
uio
"github.com/jbenet/go-ipfs/
unixfs/io
"
)
func
Cat
(
n
*
core
.
IpfsNode
,
args
[]
string
,
opts
map
[
string
]
interface
{},
out
io
.
Writer
)
error
{
...
...
@@ -15,7 +15,7 @@ func Cat(n *core.IpfsNode, args []string, opts map[string]interface{}, out io.Wr
return
fmt
.
Errorf
(
"catFile error: %v"
,
err
)
}
read
,
err
:=
mdag
.
NewDagReader
(
dagnode
,
n
.
DAG
)
read
,
err
:=
uio
.
NewDagReader
(
dagnode
,
n
.
DAG
)
if
err
!=
nil
{
return
fmt
.
Errorf
(
"cat error: %v"
,
err
)
}
...
...
This diff is collapsed.
Click to expand it.
fuse/ipns/ipns_unix.go
View file @
093c8fb0
package
ipns
import
(
"errors"
"io/ioutil"
"os"
"path/filepath"
...
...
@@ -12,10 +13,10 @@ import (
"github.com/jbenet/go-ipfs/core"
ci
"github.com/jbenet/go-ipfs/crypto"
imp
"github.com/jbenet/go-ipfs/importer"
dt
"github.com/jbenet/go-ipfs/importer/dagwriter"
ft
"github.com/jbenet/go-ipfs/importer/format"
"github.com/jbenet/go-ipfs/importer/chunk"
mdag
"github.com/jbenet/go-ipfs/merkledag"
ft
"github.com/jbenet/go-ipfs/unixfs"
uio
"github.com/jbenet/go-ipfs/unixfs/io"
u
"github.com/jbenet/go-ipfs/util"
)
...
...
@@ -204,7 +205,7 @@ type Node struct {
Ipfs
*
core
.
IpfsNode
Nd
*
mdag
.
Node
dagMod
*
dt
.
DagModifier
dagMod
*
uio
.
DagModifier
cached
*
ft
.
PBData
}
...
...
@@ -293,7 +294,7 @@ func (s *Node) ReadDir(intr fs.Intr) ([]fuse.Dirent, fuse.Error) {
// ReadAll reads the object data as file data
func
(
s
*
Node
)
ReadAll
(
intr
fs
.
Intr
)
([]
byte
,
fuse
.
Error
)
{
log
.
Debug
(
"ipns: ReadAll [%s]"
,
s
.
name
)
r
,
err
:=
mdag
.
NewDagReader
(
s
.
Nd
,
s
.
Ipfs
.
DAG
)
r
,
err
:=
uio
.
NewDagReader
(
s
.
Nd
,
s
.
Ipfs
.
DAG
)
if
err
!=
nil
{
return
nil
,
err
}
...
...
@@ -312,7 +313,7 @@ func (n *Node) Write(req *fuse.WriteRequest, resp *fuse.WriteResponse, intr fs.I
if
n
.
dagMod
==
nil
{
// Create a DagModifier to allow us to change the existing dag node
dmod
,
err
:=
dt
.
NewDagModifier
(
n
.
Nd
,
n
.
Ipfs
.
DAG
,
imp
.
DefaultSplitter
)
dmod
,
err
:=
uio
.
NewDagModifier
(
n
.
Nd
,
n
.
Ipfs
.
DAG
,
chunk
.
DefaultSplitter
)
if
err
!=
nil
{
log
.
Error
(
"Error creating dag modifier: %s"
,
err
)
return
err
...
...
@@ -541,7 +542,7 @@ func (n *Node) Rename(req *fuse.RenameRequest, newDir fs.Node, intr fs.Intr) fus
}
default
:
log
.
Critical
(
"Unknown node type for rename target dir!"
)
return
err
return
err
ors
.
New
(
"Unknown fs node type!"
)
}
return
nil
}
...
...
This diff is collapsed.
Click to expand it.
fuse/readonly/readonly_unix.go
View file @
093c8fb0
...
...
@@ -19,8 +19,9 @@ import (
"github.com/jbenet/go-ipfs/Godeps/_workspace/src/bazil.org/fuse"
"github.com/jbenet/go-ipfs/Godeps/_workspace/src/bazil.org/fuse/fs"
core
"github.com/jbenet/go-ipfs/core"
ft
"github.com/jbenet/go-ipfs/importer/format"
mdag
"github.com/jbenet/go-ipfs/merkledag"
ft
"github.com/jbenet/go-ipfs/unixfs"
uio
"github.com/jbenet/go-ipfs/unixfs/io"
u
"github.com/jbenet/go-ipfs/util"
)
...
...
@@ -79,7 +80,7 @@ func (*Root) ReadDir(intr fs.Intr) ([]fuse.Dirent, fuse.Error) {
type
Node
struct
{
Ipfs
*
core
.
IpfsNode
Nd
*
mdag
.
Node
fd
*
mdag
.
DagReader
fd
*
uio
.
DagReader
cached
*
ft
.
PBData
}
...
...
@@ -143,7 +144,7 @@ func (s *Node) ReadDir(intr fs.Intr) ([]fuse.Dirent, fuse.Error) {
// ReadAll reads the object data as file data
func
(
s
*
Node
)
ReadAll
(
intr
fs
.
Intr
)
([]
byte
,
fuse
.
Error
)
{
u
.
DOut
(
"Read node.
\n
"
)
r
,
err
:=
mdag
.
NewDagReader
(
s
.
Nd
,
s
.
Ipfs
.
DAG
)
r
,
err
:=
uio
.
NewDagReader
(
s
.
Nd
,
s
.
Ipfs
.
DAG
)
if
err
!=
nil
{
return
nil
,
err
}
...
...
This diff is collapsed.
Click to expand it.
importer/rabin.go
→
importer/
chunk/
rabin.go
View file @
093c8fb0
package
importer
package
chunk
import
(
"bufio"
...
...
This diff is collapsed.
Click to expand it.
importer/splitting.go
→
importer/
chunk/
splitting.go
View file @
093c8fb0
package
importer
package
chunk
import
"io"
import
(
"io"
"github.com/jbenet/go-ipfs/util"
)
var
log
=
util
.
Logger
(
"chunk"
)
var
DefaultSplitter
=
&
SizeSplitter
{
1024
*
512
}
type
BlockSplitter
interface
{
Split
(
r
io
.
Reader
)
chan
[]
byte
...
...
This diff is collapsed.
Click to expand it.
importer/importer.go
View file @
093c8fb0
...
...
@@ -5,8 +5,9 @@ import (
"io"
"os"
ft
"github.com/jbenet/go-ipfs/importer/
format
"
"github.com/jbenet/go-ipfs/importer/
chunk
"
dag
"github.com/jbenet/go-ipfs/merkledag"
ft
"github.com/jbenet/go-ipfs/unixfs"
"github.com/jbenet/go-ipfs/util"
)
...
...
@@ -18,18 +19,16 @@ var BlockSizeLimit = int64(1048576) // 1 MB
// ErrSizeLimitExceeded signals that a block is larger than BlockSizeLimit.
var
ErrSizeLimitExceeded
=
fmt
.
Errorf
(
"object size limit exceeded"
)
var
DefaultSplitter
=
&
SizeSplitter
{
1024
*
512
}
// todo: incremental construction with an ipfs node. dumping constructed
// objects into the datastore, to avoid buffering all in memory
// NewDagFromReader constructs a Merkle DAG from the given io.Reader.
// size required for block construction.
func
NewDagFromReader
(
r
io
.
Reader
)
(
*
dag
.
Node
,
error
)
{
return
NewDagFromReaderWithSplitter
(
r
,
DefaultSplitter
)
return
NewDagFromReaderWithSplitter
(
r
,
chunk
.
DefaultSplitter
)
}
func
NewDagFromReaderWithSplitter
(
r
io
.
Reader
,
spl
BlockSplitter
)
(
*
dag
.
Node
,
error
)
{
func
NewDagFromReaderWithSplitter
(
r
io
.
Reader
,
spl
chunk
.
BlockSplitter
)
(
*
dag
.
Node
,
error
)
{
blkChan
:=
spl
.
Split
(
r
)
first
:=
<-
blkChan
root
:=
&
dag
.
Node
{}
...
...
This diff is collapsed.
Click to expand it.
importer/importer_test.go
View file @
093c8fb0
...
...
@@ -9,9 +9,13 @@ import (
"os"
"testing"
dag
"github.com/jbenet/go-ipfs/merkledag"
"github.com/jbenet/go-ipfs/importer/chunk"
uio
"github.com/jbenet/go-ipfs/unixfs/io"
)
// NOTE:
// These tests tests a combination of unixfs/io/dagreader and importer/chunk.
// Maybe split them up somehow?
func
TestBuildDag
(
t
*
testing
.
T
)
{
td
:=
os
.
TempDir
()
fi
,
err
:=
os
.
Create
(
td
+
"/tmpfi"
)
...
...
@@ -34,9 +38,9 @@ func TestBuildDag(t *testing.T) {
//Test where calls to read are smaller than the chunk size
func
TestSizeBasedSplit
(
t
*
testing
.
T
)
{
bs
:=
&
SizeSplitter
{
512
}
bs
:=
&
chunk
.
SizeSplitter
{
512
}
testFileConsistency
(
t
,
bs
,
32
*
512
)
bs
=
&
SizeSplitter
{
4096
}
bs
=
&
chunk
.
SizeSplitter
{
4096
}
testFileConsistency
(
t
,
bs
,
32
*
4096
)
// Uneven offset
...
...
@@ -49,7 +53,7 @@ func dup(b []byte) []byte {
return
o
}
func
testFileConsistency
(
t
*
testing
.
T
,
bs
BlockSplitter
,
nbytes
int
)
{
func
testFileConsistency
(
t
*
testing
.
T
,
bs
chunk
.
BlockSplitter
,
nbytes
int
)
{
buf
:=
new
(
bytes
.
Buffer
)
io
.
CopyN
(
buf
,
rand
.
Reader
,
int64
(
nbytes
))
should
:=
dup
(
buf
.
Bytes
())
...
...
@@ -57,7 +61,7 @@ func testFileConsistency(t *testing.T, bs BlockSplitter, nbytes int) {
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
r
,
err
:=
dag
.
NewDagReader
(
nd
,
nil
)
r
,
err
:=
uio
.
NewDagReader
(
nd
,
nil
)
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
...
...
@@ -86,14 +90,14 @@ func arrComp(a, b []byte) error {
}
func
TestMaybeRabinConsistency
(
t
*
testing
.
T
)
{
testFileConsistency
(
t
,
NewMaybeRabin
(
4096
),
256
*
4096
)
testFileConsistency
(
t
,
chunk
.
NewMaybeRabin
(
4096
),
256
*
4096
)
}
func
TestRabinBlockSize
(
t
*
testing
.
T
)
{
buf
:=
new
(
bytes
.
Buffer
)
nbytes
:=
1024
*
1024
io
.
CopyN
(
buf
,
rand
.
Reader
,
int64
(
nbytes
))
rab
:=
NewMaybeRabin
(
4096
)
rab
:=
chunk
.
NewMaybeRabin
(
4096
)
blkch
:=
rab
.
Split
(
buf
)
var
blocks
[][]
byte
...
...
This diff is collapsed.
Click to expand it.
server/http/ipfs.go
View file @
093c8fb0
...
...
@@ -6,6 +6,7 @@ import (
core
"github.com/jbenet/go-ipfs/core"
"github.com/jbenet/go-ipfs/importer"
dag
"github.com/jbenet/go-ipfs/merkledag"
uio
"github.com/jbenet/go-ipfs/unixfs/io"
u
"github.com/jbenet/go-ipfs/util"
)
...
...
@@ -33,5 +34,5 @@ func (i *ipfsHandler) AddNodeToDAG(nd *dag.Node) (u.Key, error) {
}
func
(
i
*
ipfsHandler
)
NewDagReader
(
nd
*
dag
.
Node
)
(
io
.
Reader
,
error
)
{
return
dag
.
NewDagReader
(
nd
,
i
.
node
.
DAG
)
return
uio
.
NewDagReader
(
nd
,
i
.
node
.
DAG
)
}
This diff is collapsed.
Click to expand it.
importer/format
/Makefile
→
unixfs
/Makefile
View file @
093c8fb0
File moved
This diff is collapsed.
Click to expand it.
importer/format
/data.pb.go
→
unixfs
/data.pb.go
View file @
093c8fb0
...
...
@@ -3,7 +3,7 @@
// DO NOT EDIT!
/*
Package
format
is a generated protocol buffer package.
Package
unixfs
is a generated protocol buffer package.
It is generated from these files:
data.proto
...
...
@@ -11,9 +11,9 @@ It is generated from these files:
It has these top-level messages:
PBData
*/
package
format
package
unixfs
import
proto
"
github.com/jbenet/go-ipfs/Godeps/_workspace/src/
code.google.com/p/goprotobuf/proto"
import
proto
"code.google.com/p/goprotobuf/proto"
import
math
"math"
// Reference imports to suppress errors if they are not otherwise used.
...
...
@@ -57,7 +57,7 @@ func (x *PBData_DataType) UnmarshalJSON(data []byte) error {
}
type
PBData
struct
{
Type
*
PBData_DataType
`protobuf:"varint,1,req,enum=
format
.PBData_DataType" json:"Type,omitempty"`
Type
*
PBData_DataType
`protobuf:"varint,1,req,enum=
unixfs
.PBData_DataType" json:"Type,omitempty"`
Data
[]
byte
`protobuf:"bytes,2,opt" json:"Data,omitempty"`
Filesize
*
uint64
`protobuf:"varint,3,opt,name=filesize" json:"filesize,omitempty"`
Blocksizes
[]
uint64
`protobuf:"varint,4,rep,name=blocksizes" json:"blocksizes,omitempty"`
...
...
@@ -97,5 +97,5 @@ func (m *PBData) GetBlocksizes() []uint64 {
}
func
init
()
{
proto
.
RegisterEnum
(
"
format
.PBData_DataType"
,
PBData_DataType_name
,
PBData_DataType_value
)
proto
.
RegisterEnum
(
"
unixfs
.PBData_DataType"
,
PBData_DataType_name
,
PBData_DataType_value
)
}
This diff is collapsed.
Click to expand it.
importer/format
/data.proto
→
unixfs
/data.proto
View file @
093c8fb0
package
format
;
package
unixfs
;
message
PBData
{
enum
DataType
{
...
...
This diff is collapsed.
Click to expand it.
importer/format
/format.go
→
unixfs
/format.go
View file @
093c8fb0
// Package format implements a data format for files in the ipfs filesystem
// It is not the only format in ipfs, but it is the one that the filesystem assumes
package
format
package
unixfs
import
(
"errors"
...
...
This diff is collapsed.
Click to expand it.
importer/format
/format_test.go
→
unixfs
/format_test.go
View file @
093c8fb0
package
format
package
unixfs
import
(
"testing"
...
...
This diff is collapsed.
Click to expand it.
importer/dagwriter
/dagmodifier.go
→
unixfs/io
/dagmodifier.go
View file @
093c8fb0
package
dagwriter
package
io
import
(
"bytes"
...
...
@@ -6,9 +6,9 @@ import (
"github.com/jbenet/go-ipfs/Godeps/_workspace/src/code.google.com/p/goprotobuf/proto"
imp
"github.com/jbenet/go-ipfs/importer"
ft
"github.com/jbenet/go-ipfs/importer/format"
"github.com/jbenet/go-ipfs/importer/chunk"
mdag
"github.com/jbenet/go-ipfs/merkledag"
ft
"github.com/jbenet/go-ipfs/unixfs"
u
"github.com/jbenet/go-ipfs/util"
)
...
...
@@ -20,10 +20,10 @@ type DagModifier struct {
curNode
*
mdag
.
Node
pbdata
*
ft
.
PBData
splitter
imp
.
BlockSplitter
splitter
chunk
.
BlockSplitter
}
func
NewDagModifier
(
from
*
mdag
.
Node
,
serv
*
mdag
.
DAGService
,
spl
imp
.
BlockSplitter
)
(
*
DagModifier
,
error
)
{
func
NewDagModifier
(
from
*
mdag
.
Node
,
serv
*
mdag
.
DAGService
,
spl
chunk
.
BlockSplitter
)
(
*
DagModifier
,
error
)
{
pbd
,
err
:=
ft
.
FromBytes
(
from
.
Data
)
if
err
!=
nil
{
return
nil
,
err
...
...
@@ -172,7 +172,7 @@ func (dm *DagModifier) WriteAt(b []byte, offset uint64) (int, error) {
// splitBytes uses a splitterFunc to turn a large array of bytes
// into many smaller arrays of bytes
func
splitBytes
(
b
[]
byte
,
spl
imp
.
BlockSplitter
)
[][]
byte
{
func
splitBytes
(
b
[]
byte
,
spl
chunk
.
BlockSplitter
)
[][]
byte
{
out
:=
spl
.
Split
(
bytes
.
NewReader
(
b
))
var
arr
[][]
byte
for
blk
:=
range
out
{
...
...
This diff is collapsed.
Click to expand it.
importer/dagwriter
/dagmodifier_test.go
→
unixfs/io
/dagmodifier_test.go
View file @
093c8fb0
package
dagwriter
package
io
import
(
"fmt"
...
...
@@ -8,9 +8,9 @@ import (
"github.com/jbenet/go-ipfs/Godeps/_workspace/src/github.com/op/go-logging"
bs
"github.com/jbenet/go-ipfs/blockservice"
imp
"github.com/jbenet/go-ipfs/importer"
ft
"github.com/jbenet/go-ipfs/importer/format"
"github.com/jbenet/go-ipfs/importer/chunk"
mdag
"github.com/jbenet/go-ipfs/merkledag"
ft
"github.com/jbenet/go-ipfs/unixfs"
u
"github.com/jbenet/go-ipfs/util"
ds
"github.com/jbenet/go-ipfs/Godeps/_workspace/src/github.com/jbenet/datastore.go"
...
...
@@ -26,7 +26,7 @@ func getMockDagServ(t *testing.T) *mdag.DAGService {
}
func
getNode
(
t
*
testing
.
T
,
dserv
*
mdag
.
DAGService
,
size
int64
)
([]
byte
,
*
mdag
.
Node
)
{
dw
:=
NewDagWriter
(
dserv
,
&
imp
.
SizeSplitter
{
500
})
dw
:=
NewDagWriter
(
dserv
,
&
chunk
.
SizeSplitter
{
500
})
n
,
err
:=
io
.
CopyN
(
dw
,
u
.
NewFastRand
(),
size
)
if
err
!=
nil
{
...
...
@@ -39,7 +39,7 @@ func getNode(t *testing.T, dserv *mdag.DAGService, size int64) ([]byte, *mdag.No
dw
.
Close
()
node
:=
dw
.
GetNode
()
dr
,
err
:=
mdag
.
NewDagReader
(
node
,
dserv
)
dr
,
err
:=
NewDagReader
(
node
,
dserv
)
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
...
...
@@ -76,7 +76,7 @@ func testModWrite(t *testing.T, beg, size uint64, orig []byte, dm *DagModifier)
t
.
Fatal
(
err
)
}
rd
,
err
:=
mdag
.
NewDagReader
(
nd
,
dm
.
dagserv
)
rd
,
err
:=
NewDagReader
(
nd
,
dm
.
dagserv
)
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
...
...
@@ -99,7 +99,7 @@ func TestDagModifierBasic(t *testing.T) {
dserv
:=
getMockDagServ
(
t
)
b
,
n
:=
getNode
(
t
,
dserv
,
50000
)
dagmod
,
err
:=
NewDagModifier
(
n
,
dserv
,
&
imp
.
SizeSplitter
{
512
})
dagmod
,
err
:=
NewDagModifier
(
n
,
dserv
,
&
chunk
.
SizeSplitter
{
512
})
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
...
...
This diff is collapsed.
Click to expand it.
merkledag
/dagreader.go
→
unixfs/io
/dagreader.go
View file @
093c8fb0
package
merkledag
package
io
import
(
"bytes"
...
...
@@ -6,7 +6,8 @@ import (
"io"
proto
"github.com/jbenet/go-ipfs/Godeps/_workspace/src/code.google.com/p/goprotobuf/proto"
ft
"github.com/jbenet/go-ipfs/importer/format"
mdag
"github.com/jbenet/go-ipfs/merkledag"
ft
"github.com/jbenet/go-ipfs/unixfs"
u
"github.com/jbenet/go-ipfs/util"
)
...
...
@@ -14,15 +15,15 @@ var ErrIsDir = errors.New("this dag node is a directory")
// DagReader provides a way to easily read the data contained in a dag.
type
DagReader
struct
{
serv
*
DAGService
node
*
Node
serv
*
mdag
.
DAGService
node
*
mdag
.
Node
position
int
buf
*
bytes
.
Buffer
}
// NewDagReader creates a new reader object that reads the data represented by the given
// node, using the passed in DAGService for data retreival
func
NewDagReader
(
n
*
Node
,
serv
*
DAGService
)
(
io
.
Reader
,
error
)
{
func
NewDagReader
(
n
*
mdag
.
Node
,
serv
*
mdag
.
DAGService
)
(
io
.
Reader
,
error
)
{
pb
:=
new
(
ft
.
PBData
)
err
:=
proto
.
Unmarshal
(
n
.
Data
,
pb
)
if
err
!=
nil
{
...
...
This diff is collapsed.
Click to expand it.
importer/dagwriter
/dagwriter.go
→
unixfs/io
/dagwriter.go
View file @
093c8fb0
package
dagwriter
package
io
import
(
imp
"github.com/jbenet/go-ipfs/importer"
ft
"github.com/jbenet/go-ipfs/importer/format"
"github.com/jbenet/go-ipfs/importer/chunk"
dag
"github.com/jbenet/go-ipfs/merkledag"
ft
"github.com/jbenet/go-ipfs/unixfs"
"github.com/jbenet/go-ipfs/util"
)
...
...
@@ -15,11 +15,11 @@ type DagWriter struct {
totalSize
int64
splChan
chan
[]
byte
done
chan
struct
{}
splitter
imp
.
BlockSplitter
splitter
chunk
.
BlockSplitter
seterr
error
}
func
NewDagWriter
(
ds
*
dag
.
DAGService
,
splitter
imp
.
BlockSplitter
)
*
DagWriter
{
func
NewDagWriter
(
ds
*
dag
.
DAGService
,
splitter
chunk
.
BlockSplitter
)
*
DagWriter
{
dw
:=
new
(
DagWriter
)
dw
.
dagserv
=
ds
dw
.
splChan
=
make
(
chan
[]
byte
,
8
)
...
...
This diff is collapsed.
Click to expand it.
importer/dagwriter
/dagwriter_test.go
→
unixfs/io
/dagwriter_test.go
View file @
093c8fb0
package
dagwriter
package
io
import
(
"testing"
...
...
@@ -7,7 +7,7 @@ import (
ds
"github.com/jbenet/go-ipfs/Godeps/_workspace/src/github.com/jbenet/datastore.go"
bs
"github.com/jbenet/go-ipfs/blockservice"
imp
"github.com/jbenet/go-ipfs/importer"
chunk
"github.com/jbenet/go-ipfs/importer
/chunk
"
mdag
"github.com/jbenet/go-ipfs/merkledag"
)
...
...
@@ -54,7 +54,7 @@ func TestDagWriter(t *testing.T) {
t
.
Fatal
(
err
)
}
dag
:=
&
mdag
.
DAGService
{
bserv
}
dw
:=
NewDagWriter
(
dag
,
&
imp
.
SizeSplitter
{
4096
})
dw
:=
NewDagWriter
(
dag
,
&
chunk
.
SizeSplitter
{
4096
})
nbytes
:=
int64
(
1024
*
1024
*
2
)
n
,
err
:=
io
.
CopyN
(
dw
,
&
datasource
{},
nbytes
)
...
...
@@ -69,7 +69,7 @@ func TestDagWriter(t *testing.T) {
dw
.
Close
()
node
:=
dw
.
GetNode
()
read
,
err
:=
mdag
.
NewDagReader
(
node
,
dag
)
read
,
err
:=
NewDagReader
(
node
,
dag
)
if
err
!=
nil
{
t
.
Fatal
(
err
)
}
...
...
@@ -88,7 +88,7 @@ func TestMassiveWrite(t *testing.T) {
t
.
Fatal
(
err
)
}
dag
:=
&
mdag
.
DAGService
{
bserv
}
dw
:=
NewDagWriter
(
dag
,
&
imp
.
SizeSplitter
{
4096
})
dw
:=
NewDagWriter
(
dag
,
&
chunk
.
SizeSplitter
{
4096
})
nbytes
:=
int64
(
1024
*
1024
*
1024
*
16
)
n
,
err
:=
io
.
CopyN
(
dw
,
&
datasource
{},
nbytes
)
...
...
@@ -113,7 +113,7 @@ func BenchmarkDagWriter(b *testing.B) {
nbytes
:=
int64
(
100000
)
for
i
:=
0
;
i
<
b
.
N
;
i
++
{
b
.
SetBytes
(
nbytes
)
dw
:=
NewDagWriter
(
dag
,
&
imp
.
SizeSplitter
{
4096
})
dw
:=
NewDagWriter
(
dag
,
&
chunk
.
SizeSplitter
{
4096
})
n
,
err
:=
io
.
CopyN
(
dw
,
&
datasource
{},
nbytes
)
if
err
!=
nil
{
b
.
Fatal
(
err
)
...
...
This diff is collapsed.
Click to expand it.
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment