pax_global_header00006660000000000000000000000064145313222420014510gustar00rootroot0000000000000052 comment=15e0f179cabbce73a594e528685627f1fb77bd99 gopickle-0.3.0/000077500000000000000000000000001453132224200133055ustar00rootroot00000000000000gopickle-0.3.0/.github/000077500000000000000000000000001453132224200146455ustar00rootroot00000000000000gopickle-0.3.0/.github/workflows/000077500000000000000000000000001453132224200167025ustar00rootroot00000000000000gopickle-0.3.0/.github/workflows/go.yml000066400000000000000000000010221453132224200200250ustar00rootroot00000000000000name: Go on: [push, pull_request] jobs: test: name: Test runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - uses: actions/setup-go@v4 with: go-version: 1.17 - name: Get dependencies run: go get -v -t -d ./... - name: Run tests and generate coverage report run: go test -race -coverprofile cover.out -covermode atomic ./... - name: Upload coverage to Codecov uses: codecov/codecov-action@v3 with: file: ./cover.out gopickle-0.3.0/.gitignore000066400000000000000000000004151453132224200152750ustar00rootroot00000000000000# Binaries for programs and plugins *.exe *.exe~ *.dll *.so *.dylib # Test binary, built with `go test -c` *.test # Output of the go coverage tool, specifically when used with LiteIDE *.out # Dependency directories (remove the comment below to include it) # vendor/ gopickle-0.3.0/CHANGELOG.md000066400000000000000000000032401453132224200151150ustar00rootroot00000000000000# Changelog All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). ## [Unreleased] ## [0.2.0] - 2023-01-31 ### Added - Support for unpickling torch `BFloat16Storage` (PyTorch `bfloat16` values are converted to Go `float32`). ### Changed - Performance improvements (mostly involving better buffered reading, and new default capacities for maps and slices) - `types.Dict` is now implemented as a slice of `DictEntry`, rather than a slice of pointers to it, to reduce extra allocations. - Test data has been regenerated with Python `3.10.9` and PyTorch `1.13.1`. ## [0.1.0] - 2021-01-06 ### Added - More and better documentation - `OrderedDict.MustGet()` - `Dict.MustGet()` - `pytorch.LoadWithUnpickler()` which allows loading PyTorch modules using a custom unpickler. - Handle legacy method `torch.nn.backends.thnn._get_thnn_function_backend` when loading pytorch modules. ### Changed - `FrozenSet` implementation was modified, avoiding confusion with `Set`. - Replace build CI job with tests and coverage - `Dict` has been reimplemented using a slice, instead of a map, because in Go not all types can be map's keys (e.g. slices). - Use Go version `1.15` ### Removed - Unused method `List.Extend` ## [0.0.1-alpha.1] - 2020-05-23 ### Fixed - Modify GitHub Action steps `Build` and `Test` including all sub-packages. ## [0.0.1-alpha.0] - 2020-05-23 ### Added - Initial implementation of `types` package - Initial implementation of `pickle` package - Initial implementation of `pytorch` package gopickle-0.3.0/LICENSE000066400000000000000000000024611453132224200143150ustar00rootroot00000000000000BSD 2-Clause License Copyright (c) 2020, NLP Odyssey Authors All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. gopickle-0.3.0/README.md000066400000000000000000000154541453132224200145750ustar00rootroot00000000000000# GoPickle GoPickle is a Go library for loading Python's data serialized with `pickle` and PyTorch module files. The `pickle` sub-package provides the core functionality for loading data serialized with Python `pickle` module, from a file, string, or byte sequence. All _pickle_ protocols from 0 to 5 are supported. The `pytorch` sub-package implements types and functions for loading PyTorch module files. Both the _modern_ zip-compressed format and the _legacy_ non-tar format are supported. Legacy tar-compressed files and TorchScript archives are _not_ supported. ## Project Status and Contributions This project is currently in **alpha** development stage. While we provide better documentation, tests, and functionalities, above all we'd like to battle-test this library with real data and models. This would tremendously help us find tricky bugs, add more built-in types, or change the library API to make it easier to use. The simplest and most useful and way to contribute is to try this library yourself and give us your feedback: report bugs, suggest improvements, or just tell us your opinion. And of course, if you feel like it, please go on with your own pull requests! We can discuss any issue and try to find the best solution for it together. ## Usage ### Pickle Simple usage: ```go import "github.com/nlpodyssey/gopickle/pickle" // ... // from file foo, err := pickle.Load("foo.p") // from string stringDump := "I42\n." bar, err := pickle.Loads(stringDump) // ... ``` Advanced/custom usage: ```go import "github.com/nlpodyssey/gopickle/pickle" var r io.Reader // ... u := pickle.NewUnpickler(r) // Handle custom classes u.FindClass = func(module, name string) (interface{}, error) { if module == "foo" && name == "Bar" { return myFooBarClass, nil } return nil, fmt.Errorf("class not found :(") } // Resolve objects by persistent ID u.PersistentLoad = func(persistentId interface{}) (interface{}, error) { obj := doSomethingWithPersistentId(persistentId) return obj, nil } // Handle custom pickle extensions u.GetExtension = func(code int) (interface{}, error) { obj := doSomethingToResolveExtension(code) return obj, nil } // Handle Out-of-band Buffers // https://docs.python.org/3/library/pickle.html#out-of-band-buffers u.NextBuffer = func() (interface{}, error) { buf := getMyNextBuffer() return buf, nil } // Low-level function to handle pickle protocol 5 READONLY_BUFFER opcode. // By default it is completely ignored (sort of no-op); here you have the // ability to manipulate objects as you need. u.MakeReadOnly = func(obj interface{}) (interface{}, error) { newObj := myReadOnlyTransform(obj) return newObj, nil } data, err := u.Load() // ... ``` ### PyTorch The library currently provides a high-level function for loading a module file: ```go import "github.com/nlpodyssey/gopickle/pytorch" // ... myModel, err := pytorch.Load("module.pt") // ... ``` More features will be provided in the future. ## How it works ### Pickle Unlike more traditional data serialization formats, (such as JSON or YAML), a "pickle" is a _program_ for a so-called _unpickling machine_, also known as _virtual pickle machine_, or _PM_ for short. A program consists in a sequence of opcodes which instructs the virtual machine about how to build arbitrarily complex Python objects. You can learn more from Python `pickletools` [module documentation](https://github.com/python/cpython/blob/3.8/Lib/pickletools.py). Python PM implementation is straightforward, since it can take advantage of the whole environment provided by a running Python interpreter. For this Go implementation we want to keep things simple, for example avoiding dependencies or foreign bindings, yet we want to provide flexibility, and a way for any user to extend basic functionalities of the library. This Go unpickling machine implementation makes use of a set of types defined in `types`. This sub-package contains Go types representing classes, instances and common interfaces for some of the most commonly used builtin non-scalar types in Python. We chose to provide only minimal functionalities for each type, for the sole purpose of making them easy to be handled by the machine. Since Python's _pickle_ can dump and load _any_ object, the aforementioned types are clearly not always sufficient. You can easily handle the loading of any missing class by explicitly providing a `FindClass` callback to an `Unpickler` object. The implementation of your custom classes can be as simple or as sophisticated as you need. If a certain class is required but is not found, by default a `GenericClass` is used. In some circumstances, this is enough to fully load a _pickle_ program, but on other occasions the pickle program might require a certain class with specific traits: in this case, the `GenericClass` is not enough and an error is returned. You should be able to fix this situation providing a custom class implementation, that jas to reflect the same basic behaviour you can observe in the original Python implementation. A similar approach is adopted for other peculiar aspects, such as persistent objects loading, extensions handling, and a couple of protocol-5 opcodes: whenever necessary, you can implement custom behaviours providing one or more callback functions. Once resolved, all representation of classes and objects are casted to `interface{}` type; then the machine looks for specific types or interfaces to be implemented on an object only where strictly necessary. The virtual machine closely follows the original implementation from Python 3.8 - see the [`Unpickler` class](https://github.com/python/cpython/blob/3.8/Lib/pickle.py#L1134). ### PyTorch [PyTorch](https://pytorch.org/) machine learning framework allows you to save and load Python objects which include (or _are_) [Tensors](https://pytorch.org/docs/stable/tensors.html) or other framework-specific elements. Tensors data are handled by the more primitive [Storage](https://pytorch.org/docs/stable/storage.html) classes, which are efficiently serialized as raw sequences of bytes. All the rest is dumped using `pickle`. When serializing, the programmer can choose any available pickle protocol, and whether to use zip compression. The package `pytorch` implements loading functionalities for data files serialized with PyTorch (called _modules_). The Go implementation strictly follows the original Python (and C++) [code](https://github.com/pytorch/pytorch/blob/master/torch/serialization.py#L486). The `pickle` and `types` packages are used to read some parts of a given file. Other specific types are implemented in the `pytorch` module itself, most notably to reflect the content of PyTorch Tensor and Storage objects. ## License GoPickle is licensed under a BSD-style license. See [LICENSE](https://github.com/nlpodyssey/gopickle/blob/master/LICENSE) for the full license text. gopickle-0.3.0/go.mod000066400000000000000000000003671453132224200144210ustar00rootroot00000000000000// Copyright 2020 NLP Odyssey Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. module github.com/nlpodyssey/gopickle go 1.17 require golang.org/x/text v0.14.0 gopickle-0.3.0/go.sum000066400000000000000000000060011453132224200144350ustar00rootroot00000000000000github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= gopickle-0.3.0/pickle/000077500000000000000000000000001453132224200145545ustar00rootroot00000000000000gopickle-0.3.0/pickle/pickle.go000066400000000000000000000732271453132224200163650ustar00rootroot00000000000000// Copyright 2020 NLP Odyssey Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package pickle import ( "bufio" "bytes" "encoding/binary" "fmt" "io" "math" "math/big" "os" "strconv" "strings" "github.com/nlpodyssey/gopickle/types" ) const HighestProtocol byte = 5 func Load(filename string) (interface{}, error) { f, err := os.Open(filename) if err != nil { return nil, err } defer f.Close() u := NewUnpickler(bufio.NewReader(f)) return u.Load() } func Loads(s string) (interface{}, error) { sr := strings.NewReader(s) u := NewUnpickler(sr) return u.Load() } type reader interface { io.Reader io.ByteReader } type bytereader struct { io.Reader bytebuf [1]byte } func (r *bytereader) ReadByte() (byte, error) { n, err := r.Read(r.bytebuf[:]) if n == 1 { return r.bytebuf[0], nil } if err == nil { err = io.EOF } return 0, err } type Unpickler struct { r reader proto byte currentFrame *bytes.Reader stack []interface{} metaStack [][]interface{} memo map[int]interface{} FindClass func(module, name string) (interface{}, error) PersistentLoad func(interface{}) (interface{}, error) GetExtension func(code int) (interface{}, error) NextBuffer func() (interface{}, error) MakeReadOnly func(interface{}) (interface{}, error) } func NewUnpickler(ior io.Reader) Unpickler { r, ok := ior.(reader) if !ok { r = &bytereader{Reader: ior} } return Unpickler{ r: r, memo: make(map[int]interface{}, 256+128), } } func (u *Unpickler) Load() (interface{}, error) { u.metaStack = make([][]interface{}, 0, 16) u.stack = make([]interface{}, 0, 16) u.proto = 0 for { opcode, err := u.readOne() if err != nil { return nil, err } opFunc := dispatch[opcode] if opFunc == nil { return nil, fmt.Errorf("unknown opcode: 0x%x '%c'", opcode, opcode) } err = opFunc(u) if err != nil { if p, ok := err.(pickleStop); ok { return p.value, nil } return nil, err } } } type pickleStop struct{ value interface{} } func (p pickleStop) Error() string { return "STOP" } var _ error = pickleStop{} func (u *Unpickler) findClass(module, name string) (interface{}, error) { switch module { case "collections": switch name { case "OrderedDict": return &types.OrderedDictClass{}, nil } case "builtins": switch name { case "list": return &types.List{}, nil case "dict": return &types.Dict{}, nil } case "__builtin__": switch name { case "object": return &types.ObjectClass{}, nil } case "array": switch name { case "_array_reconstructor": return &types.Array{}, nil } case "copy_reg": switch name { case "_reconstructor": return &types.Reconstructor{}, nil } } if u.FindClass != nil { return u.FindClass(module, name) } return types.NewGenericClass(module, name), nil } func (u *Unpickler) read(n int) ([]byte, error) { buf := make([]byte, n) if u.currentFrame != nil { m, err := io.ReadFull(u.currentFrame, buf) if err != nil && err != io.EOF && err != io.ErrUnexpectedEOF { return nil, err } if m == 0 && n != 0 { u.currentFrame = nil m, err := io.ReadFull(u.r, buf) return buf[0:m], err } if m < n { return nil, fmt.Errorf("pickle exhausted before end of frame") } return buf[0:m], nil } m, err := io.ReadFull(u.r, buf) return buf[0:m], err } func (u *Unpickler) readOne() (byte, error) { var err error var b byte if u.currentFrame != nil { b, err = u.currentFrame.ReadByte() } else { b, err = u.r.ReadByte() } if err == nil { return b, nil } buf, err := u.read(1) if err != nil { return 0, err } return buf[0], nil } func (u *Unpickler) readLine() ([]byte, error) { if u.currentFrame != nil { line, err := readLine(u.currentFrame) if err != nil { if err == io.EOF && len(line) == 0 { u.currentFrame = nil return readLine(u.r) } return nil, err } if len(line) == 0 { return nil, fmt.Errorf("readLine no data") } if line[len(line)-1] != '\n' { return nil, fmt.Errorf("pickle exhausted before end of frame") } return line, nil } return readLine(u.r) } func readLine(r reader) (line []byte, err error) { line = make([]byte, 0, 32) var b byte for { b, err = r.ReadByte() if err != nil { return } line = append(line, b) if b == '\n' { return } } } func (u *Unpickler) loadFrame(frameSize int) error { buf := make([]byte, frameSize) if u.currentFrame != nil { n, err := (*u.currentFrame).Read(buf) if n > 0 || err == nil { return fmt.Errorf( "beginning of a new frame before end of current frame") } } _, err := io.ReadFull(u.r, buf) if err != nil { return err } u.currentFrame = bytes.NewReader(buf) return nil } func (u *Unpickler) append(element interface{}) { u.stack = append(u.stack, element) } func (u *Unpickler) stackLast() (interface{}, error) { if len(u.stack) == 0 { return nil, fmt.Errorf("the stack is empty") } return u.stack[len(u.stack)-1], nil } func (u *Unpickler) stackPop() (interface{}, error) { element, err := u.stackLast() if err != nil { return nil, err } u.stack = u.stack[:len(u.stack)-1] return element, nil } func (u *Unpickler) metaStackLast() ([]interface{}, error) { if len(u.metaStack) == 0 { return nil, fmt.Errorf("the meta stack is empty") } return u.metaStack[len(u.metaStack)-1], nil } func (u *Unpickler) metaStackPop() ([]interface{}, error) { element, err := u.metaStackLast() if err != nil { return nil, err } u.metaStack = u.metaStack[:len(u.metaStack)-1] return element, nil } // Returns a list of items pushed in the stack after last MARK instruction. func (u *Unpickler) popMark() ([]interface{}, error) { items := u.stack newStack, err := u.metaStackPop() if err != nil { return nil, err } u.stack = newStack return items, nil } var dispatch [math.MaxUint8]func(*Unpickler) error func init() { // Initialize `dispatch` assigning functions to opcodes // Protocol 0 and 1 dispatch['('] = loadMark dispatch['.'] = loadStop dispatch['0'] = loadPop dispatch['1'] = loadPopMark dispatch['2'] = loadDup dispatch['F'] = loadFloat dispatch['I'] = loadInt dispatch['J'] = loadBinInt dispatch['K'] = loadBinInt1 dispatch['L'] = loadLong dispatch['M'] = loadBinInt2 dispatch['N'] = loadNone dispatch['P'] = loadPersId dispatch['Q'] = loadBinPersId dispatch['R'] = loadReduce dispatch['S'] = loadString dispatch['T'] = loadBinString dispatch['U'] = loadShortBinString dispatch['V'] = loadUnicode dispatch['X'] = loadBinUnicode dispatch['a'] = loadAppend dispatch['b'] = loadBuild dispatch['c'] = loadGlobal dispatch['d'] = loadDict dispatch['}'] = loadEmptyDict dispatch['e'] = loadAppends dispatch['g'] = loadGet dispatch['h'] = loadBinGet dispatch['i'] = loadInst dispatch['j'] = loadLongBinGet dispatch['l'] = loadList dispatch[']'] = loadEmptyList dispatch['o'] = loadObj dispatch['p'] = loadPut dispatch['q'] = loadBinPut dispatch['r'] = loadLongBinPut dispatch['s'] = loadSetItem dispatch['t'] = loadTuple dispatch[')'] = loadEmptyTuple dispatch['u'] = loadSetItems dispatch['G'] = loadBinFloat // Protocol 2 dispatch['\x80'] = loadProto dispatch['\x81'] = loadNewObj dispatch['\x82'] = opExt1 dispatch['\x83'] = opExt2 dispatch['\x84'] = opExt4 dispatch['\x85'] = loadTuple1 dispatch['\x86'] = loadTuple2 dispatch['\x87'] = loadTuple3 dispatch['\x88'] = loadTrue dispatch['\x89'] = loadFalse dispatch['\x8a'] = loadLong1 dispatch['\x8b'] = loadLong4 // Protocol 3 (Python 3.x) dispatch['B'] = loadBinBytes dispatch['C'] = loadShortBinBytes // Protocol 4 dispatch['\x8c'] = loadShortBinUnicode dispatch['\x8d'] = loadBinUnicode8 dispatch['\x8e'] = loadBinBytes8 dispatch['\x8f'] = loadEmptySet dispatch['\x90'] = loadAddItems dispatch['\x91'] = loadFrozenSet dispatch['\x92'] = loadNewObjEx dispatch['\x93'] = loadStackGlobal dispatch['\x94'] = loadMemoize dispatch['\x95'] = loadFrame // Protocol 5 dispatch['\x96'] = loadByteArray8 dispatch['\x97'] = loadNextBuffer dispatch['\x98'] = loadReadOnlyBuffer } // identify pickle protocol func loadProto(u *Unpickler) error { proto, err := u.readOne() if err != nil { return err } if proto > HighestProtocol { return fmt.Errorf("unsupported pickle protocol: %d", proto) } u.proto = proto return nil } // indicate the beginning of a new frame func loadFrame(u *Unpickler) error { buf, err := u.read(8) if err != nil { return err } frameSize := binary.LittleEndian.Uint64(buf) if frameSize > math.MaxInt64 { return fmt.Errorf("frame size > max int64: %d", frameSize) } return u.loadFrame(int(frameSize)) } // push persistent object; id is taken from string arg func loadPersId(u *Unpickler) error { if u.PersistentLoad == nil { return fmt.Errorf("unsupported persistent ID encountered") } line, err := u.readLine() if err != nil { return err } pid := string(line[:len(line)-1]) result, err := u.PersistentLoad(pid) if err != nil { return err } u.append(result) return nil } // push persistent object; id is taken from stack func loadBinPersId(u *Unpickler) error { if u.PersistentLoad == nil { return fmt.Errorf("unsupported persistent ID encountered") } pid, err := u.stackPop() if err != nil { return err } result, err := u.PersistentLoad(pid) if err != nil { return err } u.append(result) return nil } // push None (nil) func loadNone(u *Unpickler) error { u.append(nil) return nil } // push False func loadFalse(u *Unpickler) error { u.append(false) return nil } // push True func loadTrue(u *Unpickler) error { u.append(true) return nil } // push integer or bool; decimal string argument func loadInt(u *Unpickler) error { line, err := u.readLine() if err != nil { return err } data := string(line[:len(line)-1]) if len(data) == 2 && data[0] == '0' && data[1] == '0' { u.append(false) return nil } if len(data) == 2 && data[0] == '0' && data[1] == '1' { u.append(true) return nil } i, err := strconv.Atoi(data) if err != nil { return err } u.append(i) return nil } // push four-byte signed int func loadBinInt(u *Unpickler) error { buf, err := u.read(4) if err != nil { return err } u.append(decodeInt32(buf)) return nil } // push 1-byte unsigned int func loadBinInt1(u *Unpickler) error { i, err := u.readOne() if err != nil { return err } u.append(int(i)) return nil } // push 2-byte unsigned int func loadBinInt2(u *Unpickler) error { buf, err := u.read(2) if err != nil { return err } u.append(int(binary.LittleEndian.Uint16(buf))) return nil } // push long; decimal string argument func loadLong(u *Unpickler) error { line, err := u.readLine() if err != nil { return err } sub := line[:len(line)-1] if len(sub) == 0 { return fmt.Errorf("invalid long data") } if sub[len(sub)-1] == 'L' { sub = sub[0 : len(sub)-1] } str := string(sub) i, err := strconv.ParseInt(str, 10, 64) if err != nil { if ne, isNe := err.(*strconv.NumError); isNe && ne.Err == strconv.ErrRange { bi, ok := new(big.Int).SetString(str, 10) if !ok { return fmt.Errorf("invalid long data") } u.append(bi) return nil } return err } u.append(int(i)) return nil } // push long from < 256 bytes func loadLong1(u *Unpickler) error { length, err := u.readOne() if err != nil { return err } data, err := u.read(int(length)) if err != nil { return err } u.append(decodeLong(data)) return nil } // push really big long func loadLong4(u *Unpickler) error { buf, err := u.read(4) if err != nil { return err } length := decodeInt32(buf) if length < 0 { return fmt.Errorf("LONG pickle has negative byte count") } data, err := u.read(length) if err != nil { return err } u.append(decodeLong(data)) return nil } func decodeLong(bytes []byte) interface{} { msBitSet := bytes[len(bytes)-1]&0x80 != 0 if len(bytes) > 8 { bi := new(big.Int) _ = bytes[len(bytes)-1] for i := len(bytes) - 1; i >= 0; i-- { bi = bi.Lsh(bi, 8) if msBitSet { bi = bi.Or(bi, big.NewInt(int64(^bytes[i]))) } else { bi = bi.Or(bi, big.NewInt(int64(bytes[i]))) } } if msBitSet { bi = bi.Add(bi, big.NewInt(1)) bi = bi.Neg(bi) } return bi } var ux, bitMask uint64 _ = bytes[len(bytes)-1] for i := len(bytes) - 1; i >= 0; i-- { ux = (ux << 8) | uint64(bytes[i]) bitMask = (bitMask << 8) | 0xFF } if msBitSet { return -(int(^ux&bitMask) + 1) } return int(ux) } // push float object; decimal string argument func loadFloat(u *Unpickler) error { line, err := u.readLine() if err != nil { return err } f, err := strconv.ParseFloat(string(line[:len(line)-1]), 64) if err != nil { return err } u.append(f) return nil } // push float; arg is 8-byte float encoding func loadBinFloat(u *Unpickler) error { buf, err := u.read(8) if err != nil { return err } u.append(math.Float64frombits(binary.BigEndian.Uint64(buf))) return nil } // push string; NL-terminated string argument func loadString(u *Unpickler) error { line, err := u.readLine() if err != nil { return err } data := line[:len(line)-1] // Strip outermost quotes if !isQuotedString(data) { return fmt.Errorf("the STRING opcode argument must be quoted") } data = data[1 : len(data)-1] // TODO: decode to string with the desired decoder u.append(string(data)) return nil } func isQuotedString(b []byte) bool { return len(b) >= 2 && b[0] == b[len(b)-1] && (b[0] == '\'' || b[0] == '"') } // push string; counted binary string argument func loadBinString(u *Unpickler) error { // Deprecated BINSTRING uses signed 32-bit length buf, err := u.read(4) if err != nil { return err } length := decodeInt32(buf) if length < 0 { return fmt.Errorf("BINSTRING pickle has negative byte count") } data, err := u.read(length) if err != nil { return err } // TODO: decode to string with the desired decoder u.append(string(data)) return nil } // push bytes; counted binary string argument func loadBinBytes(u *Unpickler) error { buf, err := u.read(4) if err != nil { return err } length := int(binary.LittleEndian.Uint32(buf)) buf, err = u.read(length) if err != nil { return err } u.append(buf) return nil } // push Unicode string; raw-unicode-escaped'd argument func loadUnicode(u *Unpickler) error { line, err := u.readLine() if err != nil { return err } u.append(string(line[:len(line)-1])) return nil } // push Unicode string; counted UTF-8 string argument func loadBinUnicode(u *Unpickler) error { buf, err := u.read(4) if err != nil { return err } length := int(binary.LittleEndian.Uint32(buf)) buf, err = u.read(length) if err != nil { return err } u.append(string(buf)) return nil } // push very long string func loadBinUnicode8(u *Unpickler) error { buf, err := u.read(8) if err != nil { return err } length := binary.LittleEndian.Uint64(buf) if length > math.MaxInt64 { return fmt.Errorf("BINUNICODE8 exceeds system's maximum size") } buf, err = u.read(int(length)) if err != nil { return err } u.append(string(buf)) // TODO: decode UTF-8? return nil } // push very long bytes string func loadBinBytes8(u *Unpickler) error { buf, err := u.read(8) if err != nil { return err } length := binary.LittleEndian.Uint64(buf) if length > math.MaxInt64 { return fmt.Errorf("BINBYTES8 exceeds system's maximum size") } buf, err = u.read(int(length)) if err != nil { return err } u.append(buf) return nil } // push bytearray func loadByteArray8(u *Unpickler) error { buf, err := u.read(8) if err != nil { return err } length := binary.LittleEndian.Uint64(buf) if length > math.MaxInt64 { return fmt.Errorf("BYTEARRAY8 exceeds system's maximum size") } buf, err = u.read(int(length)) if err != nil { return err } u.append(types.NewByteArrayFromSlice(buf)) return nil } // push next out-of-band buffer func loadNextBuffer(u *Unpickler) error { if u.NextBuffer == nil { return fmt.Errorf("pickle stream refers to out-of-band data but NextBuffer was not given") } buf, err := u.NextBuffer() if err != nil { return err } u.append(buf) return nil } // make top of stack readonly func loadReadOnlyBuffer(u *Unpickler) error { if u.MakeReadOnly == nil { return nil } buf, err := u.stackPop() if err != nil { return err } buf, err = u.MakeReadOnly(buf) if err != nil { return err } u.append(buf) return nil } // push string; counted binary string argument < 256 bytes func loadShortBinString(u *Unpickler) error { length, err := u.readOne() if err != nil { return err } data, err := u.read(int(length)) if err != nil { return err } // TODO: decode to string with the desired decoder u.append(string(data)) return nil } // push bytes; counted binary string argument < 256 bytes func loadShortBinBytes(u *Unpickler) error { length, err := u.readOne() if err != nil { return err } buf, err := u.read(int(length)) if err != nil { return err } u.append(buf) return nil } // push short string; UTF-8 length < 256 bytes func loadShortBinUnicode(u *Unpickler) error { length, err := u.readOne() if err != nil { return err } buf, err := u.read(int(length)) if err != nil { return err } u.append(string(buf)) return nil } // build tuple from topmost stack items func loadTuple(u *Unpickler) error { items, err := u.popMark() if err != nil { return err } u.append(types.NewTupleFromSlice(items)) return nil } // push empty tuple func loadEmptyTuple(u *Unpickler) error { u.append(types.NewTupleFromSlice([]interface{}{})) return nil } // build 1-tuple from stack top func loadTuple1(u *Unpickler) error { value, err := u.stackPop() if err != nil { return err } u.append(types.NewTupleFromSlice([]interface{}{value})) return nil } // build 2-tuple from two topmost stack items func loadTuple2(u *Unpickler) error { second, err := u.stackPop() if err != nil { return err } first, err := u.stackPop() if err != nil { return err } u.append(types.NewTupleFromSlice([]interface{}{first, second})) return nil } // build 3-tuple from three topmost stack items func loadTuple3(u *Unpickler) error { third, err := u.stackPop() if err != nil { return err } second, err := u.stackPop() if err != nil { return err } first, err := u.stackPop() if err != nil { return err } u.append(types.NewTupleFromSlice([]interface{}{first, second, third})) return nil } // push empty list func loadEmptyList(u *Unpickler) error { u.append(types.NewList()) return nil } // push empty dict func loadEmptyDict(u *Unpickler) error { u.append(types.NewDict()) return nil } // push empty set on the stack func loadEmptySet(u *Unpickler) error { u.append(types.NewSet()) return nil } // build frozenset from topmost stack items func loadFrozenSet(u *Unpickler) error { items, err := u.popMark() if err != nil { return err } u.append(types.NewFrozenSetFromSlice(items)) return nil } // build list from topmost stack items func loadList(u *Unpickler) error { items, err := u.popMark() if err != nil { return err } u.append(types.NewListFromSlice(items)) return nil } // build a dict from stack items func loadDict(u *Unpickler) error { items, err := u.popMark() if err != nil { return err } d := types.NewDict() itemsLen := len(items) for i := 0; i < itemsLen; i += 2 { d.Set(items[i], items[i+1]) } u.append(d) return nil } // build & push class instance func loadInst(u *Unpickler) error { line, err := u.readLine() if err != nil { return err } module := string(line[0 : len(line)-1]) line, err = u.readLine() if err != nil { return err } name := string(line[0 : len(line)-1]) class, err := u.findClass(module, name) if err != nil { return err } args, err := u.popMark() if err != nil { return err } return u.instantiate(class, args) } // build & push class instance func loadObj(u *Unpickler) error { // Stack is ... markobject classobject arg1 arg2 ... args, err := u.popMark() if err != nil { return err } if len(args) == 0 { return fmt.Errorf("OBJ class missing") } class := args[0] args = args[1:] return u.instantiate(class, args) } func (u *Unpickler) instantiate(class interface{}, args []interface{}) error { var err error var value interface{} switch ct := class.(type) { case types.Callable: value, err = ct.Call(args...) case types.PyNewable: value, err = ct.PyNew(args...) default: return fmt.Errorf("cannot instantiate %#v", class) } if err != nil { return err } u.append(value) return nil } // build object by applying cls.__new__ to argtuple func loadNewObj(u *Unpickler) error { args, err := u.stackPop() if err != nil { return err } argsTuple, argsOk := args.(*types.Tuple) if !argsOk { return fmt.Errorf("NEWOBJ args must be *Tuple") } rawClass, err := u.stackPop() if err != nil { return err } class, classOk := rawClass.(types.PyNewable) if !classOk { return fmt.Errorf("NEWOBJ requires a PyNewable object: %#v", rawClass) } result, err := class.PyNew(*argsTuple...) if err != nil { return err } u.append(result) return nil } // like NEWOBJ but work with keyword only arguments func loadNewObjEx(u *Unpickler) error { kwargs, err := u.stackPop() if err != nil { return err } args, err := u.stackPop() if err != nil { return err } argsTuple, argsOk := args.(*types.Tuple) if !argsOk { return fmt.Errorf("NEWOBJ_EX args must be *Tuple") } rawClass, err := u.stackPop() if err != nil { return err } class, classOk := rawClass.(types.PyNewable) if !classOk { return fmt.Errorf("NEWOBJ_EX requires a PyNewable object") } allArgs := []interface{}(*argsTuple) allArgs = append(allArgs, kwargs) result, err := class.PyNew(allArgs...) if err != nil { return err } u.append(result) return nil } // push self.find_class(modname, name); 2 string args func loadGlobal(u *Unpickler) error { line, err := u.readLine() // TODO: deode UTF-8? if err != nil { return err } module := string(line[0 : len(line)-1]) line, err = u.readLine() // TODO: deode UTF-8? if err != nil { return err } name := string(line[0 : len(line)-1]) class, err := u.findClass(module, name) if err != nil { return err } u.append(class) return nil } // same as GLOBAL but using names on the stacks func loadStackGlobal(u *Unpickler) error { rawName, err := u.stackPop() if err != nil { return err } name, nameOk := rawName.(string) if !nameOk { return fmt.Errorf("STACK_GLOBAL requires str name: %#v", rawName) } rawModule, err := u.stackPop() if err != nil { return err } module, moduleOk := rawModule.(string) if !moduleOk { return fmt.Errorf("STACK_GLOBAL requires str module: %#v", rawModule) } class, err := u.findClass(module, name) if err != nil { return err } u.append(class) return nil } // push object from extension registry; 1-byte index func opExt1(u *Unpickler) error { if u.GetExtension == nil { return fmt.Errorf("unsupported extension code encountered") } i, err := u.readOne() if err != nil { return err } obj, err := u.GetExtension(int(i)) if err != nil { return err } u.append(obj) return nil } // ditto, but 2-byte index func opExt2(u *Unpickler) error { if u.GetExtension == nil { return fmt.Errorf("unsupported extension code encountered") } buf, err := u.read(2) if err != nil { return err } code := int(binary.LittleEndian.Uint16(buf)) obj, err := u.GetExtension(code) if err != nil { return err } u.append(obj) return nil } // ditto, but 4-byte index func opExt4(u *Unpickler) error { if u.GetExtension == nil { return fmt.Errorf("unsupported extension code encountered") } buf, err := u.read(4) if err != nil { return err } code := int(binary.LittleEndian.Uint32(buf)) obj, err := u.GetExtension(code) if err != nil { return err } u.append(obj) return nil } // apply callable to argtuple, both on stack func loadReduce(u *Unpickler) error { args, err := u.stackPop() if err != nil { return err } argsTuple, argsOk := args.(*types.Tuple) if !argsOk { return fmt.Errorf("REDUCE args must be *Tuple") } function, err := u.stackPop() if err != nil { return err } callable, callableOk := function.(types.Callable) if !callableOk { return fmt.Errorf("REDUCE requires a Callable object: %#v", function) } result, err := callable.Call(*argsTuple...) if err != nil { return err } u.append(result) return nil } // discard topmost stack item func loadPop(u *Unpickler) error { if len(u.stack) == 0 { _, err := u.popMark() return err } u.stack = u.stack[:len(u.stack)-1] return nil } // discard stack top through topmost markobject func loadPopMark(u *Unpickler) error { _, err := u.popMark() return err } // duplicate top stack item func loadDup(u *Unpickler) error { item, err := u.stackLast() if err != nil { return err } u.append(item) return nil } // push item from memo on stack; index is string arg func loadGet(u *Unpickler) error { line, err := u.readLine() if err != nil { return err } i, err := strconv.Atoi(string(line[:len(line)-1])) if err != nil { return err } u.append(u.memo[i]) return nil } // push item from memo on stack; index is 1-byte arg func loadBinGet(u *Unpickler) error { i, err := u.readOne() if err != nil { return err } u.append(u.memo[int(i)]) return nil } // push item from memo on stack; index is 4-byte arg func loadLongBinGet(u *Unpickler) error { buf, err := u.read(4) if err != nil { return err } i := int(binary.LittleEndian.Uint32(buf)) u.append(u.memo[i]) return nil } // store stack top in memo; index is string arg func loadPut(u *Unpickler) error { line, err := u.readLine() if err != nil { return err } i, err := strconv.Atoi(string(line[:len(line)-1])) if err != nil { return err } if i < 0 { return fmt.Errorf("negative PUT argument") } u.memo[i], err = u.stackLast() return err } // store stack top in memo; index is 1-byte arg func loadBinPut(u *Unpickler) error { i, err := u.readOne() if err != nil { return err } u.memo[int(i)], err = u.stackLast() return err } // store stack top in memo; index is 4-byte arg func loadLongBinPut(u *Unpickler) error { buf, err := u.read(4) if err != nil { return err } i := int(binary.LittleEndian.Uint32(buf)) u.memo[i], err = u.stackLast() return err } // store top of the stack in memo func loadMemoize(u *Unpickler) error { value, err := u.stackLast() if err != nil { return err } u.memo[len(u.memo)] = value return nil } // append stack top to list below it func loadAppend(u *Unpickler) error { value, err := u.stackPop() if err != nil { return err } obj, err := u.stackPop() if err != nil { return err } list, listOk := obj.(types.ListAppender) if !listOk { return fmt.Errorf("APPEND requires ListAppender") } list.Append(value) u.append(list) return nil } // extend list on stack by topmost stack slice func loadAppends(u *Unpickler) error { items, err := u.popMark() if err != nil { return err } obj, err := u.stackPop() if err != nil { return err } list, listOk := obj.(types.ListAppender) if !listOk { return fmt.Errorf("APPEND requires List") } for _, item := range items { list.Append(item) } u.append(list) return nil } // add key+value pair to dict func loadSetItem(u *Unpickler) error { value, err := u.stackPop() if err != nil { return err } key, err := u.stackPop() if err != nil { return err } obj, err := u.stackLast() if err != nil { return err } dict, dictOk := obj.(types.DictSetter) if !dictOk { return fmt.Errorf("SETITEM requires DictSetter") } dict.Set(key, value) return nil } // modify dict by adding topmost key+value pairs func loadSetItems(u *Unpickler) error { items, err := u.popMark() if err != nil { return err } obj, err := u.stackPop() if err != nil { return err } dict, dictOk := obj.(types.DictSetter) if !dictOk { return fmt.Errorf("SETITEMS requires DictSetter") } itemsLen := len(items) for i := 0; i < itemsLen; i += 2 { dict.Set(items[i], items[i+1]) } u.append(dict) return nil } // modify set by adding topmost stack items func loadAddItems(u *Unpickler) error { items, err := u.popMark() if err != nil { return err } obj, err := u.stackPop() if err != nil { return err } set, setOk := obj.(types.SetAdder) if !setOk { return fmt.Errorf("ADDITEMS requires SetAdder") } for _, item := range items { set.Add(item) } u.append(set) return nil } // call __setstate__ or __dict__.update() func loadBuild(u *Unpickler) error { state, err := u.stackPop() if err != nil { return err } inst, err := u.stackLast() if err != nil { return err } if obj, ok := inst.(types.PyStateSettable); ok { return obj.PySetState(state) } var slotState interface{} if tuple, ok := state.(*types.Tuple); ok && tuple.Len() == 2 { state = tuple.Get(0) slotState = tuple.Get(1) } if stateDict, ok := state.(*types.Dict); ok { instPds, instPdsOk := inst.(types.PyDictSettable) if !instPdsOk { return fmt.Errorf("BUILD requires a PyDictSettable instance: %#v", inst) } for _, entry := range *stateDict { err := instPds.PyDictSet(entry.Key, entry.Value) if err != nil { return err } } } if slotStateDict, ok := slotState.(*types.Dict); ok { instSa, instOk := inst.(types.PyAttrSettable) if !instOk { return fmt.Errorf( "BUILD requires a PyAttrSettable instance: %#v", inst) } for _, entry := range *slotStateDict { sk, keyOk := entry.Key.(string) if !keyOk { return fmt.Errorf("BUILD requires string slot state keys") } err := instSa.PySetAttr(sk, entry.Value) if err != nil { return err } } } return nil } // push special markobject on stack func loadMark(u *Unpickler) error { u.metaStack = append(u.metaStack, u.stack) u.stack = make([]interface{}, 0, 16) return nil } // every pickle ends with STOP func loadStop(u *Unpickler) error { value, err := u.stackPop() if err != nil { return err } return pickleStop{value: value} } func decodeInt32(b []byte) int { ux := binary.LittleEndian.Uint32(b) x := int(ux) if b[3]&0x80 != 0 { x = -(int(^ux) + 1) } return x } gopickle-0.3.0/pickle/pickle_test.go000066400000000000000000000575361453132224200174310ustar00rootroot00000000000000// Copyright 2020 NLP Odyssey Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package pickle import ( "fmt" "math/big" "reflect" "strings" "testing" "github.com/nlpodyssey/gopickle/types" ) func TestNoneP1(t *testing.T) { // pickle.dumps(None, protocol=1) loadsNoErrEqual(t, "N.", nil) } func TestNoneP2(t *testing.T) { // pickle.dumps(None, protocol=2) loadsNoErrEqual(t, "\x80\x02N.", nil) } func TestTrueP1(t *testing.T) { // pickle.dumps(True, protocol=1) loadsNoErrEqual(t, "I01\n.", true) } func TestTrueP2(t *testing.T) { // pickle.dumps(True, protocol=2) loadsNoErrEqual(t, "\x80\x02\x88.", true) } func TestFalseP1(t *testing.T) { // pickle.dumps(False, protocol=1) loadsNoErrEqual(t, "I00\n.", false) } func TestFalseP2(t *testing.T) { // pickle.dumps(False, protocol=2) loadsNoErrEqual(t, "\x80\x02\x89.", false) } func TestIntP0Positive(t *testing.T) { // pickle.dumps(42, protocol=0) loadsNoErrEqual(t, "I42\n.", 42) } func TestIntP0Negative(t *testing.T) { // pickle.dumps(-42, protocol=0) loadsNoErrEqual(t, "I-42\n.", -42) } func TestFloatP0Positive(t *testing.T) { // pickle.dumps(4.2, protocol=0) loadsNoErrEqual(t, "F4.2\n.", 4.2) } func TestFloatP0Negative(t *testing.T) { // pickle.dumps(-4.2, protocol=0) loadsNoErrEqual(t, "F-4.2\n.", -4.2) } func TestBinIntP1Positive(t *testing.T) { // pickle.dumps(100200, protocol=1) loadsNoErrEqual(t, "Jh\x87\x01\x00.", 100200) } func TestBinIntP4Positive(t *testing.T) { // pickle.dumps(70100, protocol=4) loadsNoErrEqual(t, "\x80\x04\x95\x06\x00\x00\x00\x00\x00\x00\x00J\xd4\x11\x01\x00.", 70100) } func TestBinIntP1Negative(t *testing.T) { // pickle.dumps(-100200, protocol=1) loadsNoErrEqual(t, "J\x98x\xfe\xff.", -100200) } func TestBinIntP4Negative(t *testing.T) { // pickle.dumps(-70100, protocol=4) loadsNoErrEqual(t, "\x80\x04\x95\x06\x00\x00\x00\x00\x00\x00\x00J,\xee\xfe\xff.", -70100) } func TestBinInt1P2(t *testing.T) { // pickle.dumps(42, protocol=2) loadsNoErrEqual(t, "\x80\x02K*.", 42) } func TestBinInt2P2(t *testing.T) { // pickle.dumps(300, protocol=2) loadsNoErrEqual(t, "\x80\x02M,\x01.", 300) } func TestLongP1Positive(t *testing.T) { // pickle.dumps(100200300400, protocol=1) loadsNoErrEqual(t, "L100200300400L\n.", 100200300400) } func TestLongP1Negative(t *testing.T) { // pickle.dumps(-100200300400, protocol=1) loadsNoErrEqual(t, "L-100200300400L\n.", -100200300400) } func TestLongP1BigPositive(t *testing.T) { // pickle.dumps(100200300400500600700, protocol=1) actual := loadsNoErr(t, "L100200300400500600700L\n.") switch v := actual.(type) { case *big.Int: expected := "100200300400500600700" if v.String() != expected { t.Errorf("expected %s, actual %s", expected, v.String()) } default: t.Error("expected big Int", actual) } } func TestLongP1BigNegative(t *testing.T) { // pickle.dumps(-100200300400500600700, protocol=1) actual := loadsNoErr(t, "L-100200300400500600700L\n.") switch v := actual.(type) { case *big.Int: expected := "-100200300400500600700" if v.String() != expected { t.Errorf("expected %s, actual %s", expected, v.String()) } default: t.Error("expected big Int", actual) } } func TestStringPython27P0(t *testing.T) { // pickle.dumps('Café', protocol=0) # Python 2.7 // TODO: the string should be decoded loadsNoErrEqual(t, "S'Caf\\xc3\\xa9'\np0\n.", "Caf\\xc3\\xa9") } func TestBinStringPython27P1(t *testing.T) { // pickle.dumps(b'1234567890'*26, protocol=1) # Python 2.7 loadsNoErrEqual(t, "T\x04\x01\x00\x0012345678901234567890123456789012345678901234567890"+ "123456789012345678901234567890123456789012345678901234567890"+ "123456789012345678901234567890123456789012345678901234567890"+ "123456789012345678901234567890123456789012345678901234567890"+ "123456789012345678901234567890q\x00.", strings.Repeat("1234567890", 26)) } func TestShortBinStringPython27P1(t *testing.T) { // pickle.dumps(b"Café", protocol=1) # Python 2.7 loadsNoErrEqual(t, "U\x05Caf\xc3\xa9q\x00.", "Café") } func TestUnicodePython27P0(t *testing.T) { // pickle.dumps(u"Café", protocol=0) # Python 2.7 loadsNoErrEqual(t, "VCaf\xe9\np0\n.", "Caf\xe9") } func TestBinUnicodeP1(t *testing.T) { // pickle.dumps('Café', protocol=1) loadsNoErrEqual(t, "X\x05\x00\x00\x00Caf\xc3\xa9q\x00.", "Café") } func TestShortBinUnicodeP4(t *testing.T) { // pickle.dumps('Café', protocol=4) loadsNoErrEqual(t, "\x80\x04\x95\t\x00\x00\x00\x00\x00\x00\x00\x8c\x05Caf\xc3\xa9\x94.", "Café") } func TestDictP0Empty(t *testing.T) { // pickle.dumps({}, protocol=0) actual := loadsNoErr(t, "(dp0\n.") switch v := actual.(type) { case *types.Dict: if v.Len() != 0 { t.Error("expected empty Dict, actual:", actual) } default: t.Error("expected Dict, actual:", actual) } } func TestDictP0OneKeyValue(t *testing.T) { // pickle.dumps({'a': 1}, protocol=0) actual := loadsNoErr(t, "(dp0\nVa\np1\nI1\ns.") switch v := actual.(type) { case *types.Dict: if x, ok := v.Get("a"); v.Len() != 1 || !ok || x != 1 { t.Error("expected {'a': 1}, actual:", actual) } default: t.Error("expected Dict, actual:", actual) } } func TestEmptyDictP2(t *testing.T) { // pickle.dumps({}, protocol=2) actual := loadsNoErr(t, "\x80\x02}q\x00.") switch v := actual.(type) { case *types.Dict: if v.Len() != 0 { t.Error("expected empty Dict, actual:", actual) } default: t.Error("expected Dict, actual:", actual) } } func TestTupleP0EmptyTuple(t *testing.T) { // pickle.dumps(tuple(), protocol=0) actual := loadsNoErr(t, "(t.") switch v := actual.(type) { case *types.Tuple: if v.Len() != 0 { t.Error("expected empty Tuple, actual:", actual) } default: t.Error("expected Tuple, actual:", actual) } } func TestTupleP0OneItem(t *testing.T) { // pickle.dumps((1,), protocol=0) actual := loadsNoErr(t, "(I1\ntp0\n.") switch v := actual.(type) { case *types.Tuple: if v.Len() != 1 || v.Get(0) != 1 { t.Error("expected (1,), actual:", actual) } default: t.Error("expected Tuple, actual:", actual) } } func TestEmptyTupleP2(t *testing.T) { // pickle.dumps(tuple(), protocol=2) actual := loadsNoErr(t, "\x80\x02).") switch v := actual.(type) { case *types.Tuple: if v.Len() != 0 { t.Error("expected empty Tuple, actual:", actual) } default: t.Error("expected Tuple, actual:", actual) } } func TestTuple1P2(t *testing.T) { // pickle.dumps((1,), protocol=2) actual := loadsNoErr(t, "\x80\x02K\x01\x85q\x00.") switch v := actual.(type) { case *types.Tuple: if v.Len() != 1 || v.Get(0) != 1 { t.Error("expected (1,), actual:", actual) } default: t.Error("expected Tuple, actual:", actual) } } func TestTuple2P2(t *testing.T) { // pickle.dumps((1, 2), protocol=2) actual := loadsNoErr(t, "\x80\x02K\x01K\x02\x86q\x00.") switch v := actual.(type) { case *types.Tuple: if v.Len() != 2 || v.Get(0) != 1 || v.Get(1) != 2 { t.Error("expected (1, 2), actual:", actual) } default: t.Error("expected Tuple, actual:", actual) } } func TestTuple3P2(t *testing.T) { // pickle.dumps((1, 2, 3), protocol=2) actual := loadsNoErr(t, "\x80\x02K\x01K\x02K\x03\x87q\x00.") switch v := actual.(type) { case *types.Tuple: if v.Len() != 3 || v.Get(0) != 1 || v.Get(1) != 2 || v.Get(2) != 3 { t.Error("expected (1, 2, 3), actual:", actual) } default: t.Error("expected Tuple, actual:", actual) } } func TestListP0EmptyList(t *testing.T) { // pickle.dumps([], protocol=0) actual := loadsNoErr(t, "(lp0\n.") switch v := actual.(type) { case *types.List: if v.Len() != 0 { t.Error("expected empty List, actual:", actual) } default: t.Error("expected List, actual:", actual) } } func TestEmptyListP2(t *testing.T) { // pickle.dumps([], protocol=2) actual := loadsNoErr(t, "\x80\x02]q\x00.") switch v := actual.(type) { case *types.List: if v.Len() != 0 { t.Error("expected empty List, actual:", actual) } default: t.Error("expected List, actual:", actual) } } func TestListP2OneItem(t *testing.T) { // pickle.dumps([1], protocol=2) actual := loadsNoErr(t, "\x80\x02]q\x00K\x01a.") switch v := actual.(type) { case *types.List: if v.Len() != 1 || v.Get(0) != 1 { t.Error("expected [1], actual:", actual) } default: t.Error("expected List, actual:", actual) } } func TestListP2TwoItems(t *testing.T) { // pickle.dumps([1, 2], protocol=2) actual := loadsNoErr(t, "\x80\x02]q\x00(K\x01K\x02e.") switch v := actual.(type) { case *types.List: if v.Len() != 2 || v.Get(0) != 1 || v.Get(1) != 2 { t.Error("expected [1, 2], actual:", actual) } default: t.Error("expected List, actual:", actual) } } func TestBinFloatP2Positive(t *testing.T) { // pickle.dumps(1.2, protocol=2) loadsNoErrEqual(t, "\x80\x02G?\xf3333333.", 1.2) } func TestBinFloaP2tNegative(t *testing.T) { // pickle.dumps(-1.2, protocol=2) loadsNoErrEqual(t, "\x80\x02G\xbf\xf3333333.", -1.2) } func TestLong1P2SmallPositive(t *testing.T) { // pickle.dumps(100200300400, protocol=2) loadsNoErrEqual(t, "\x80\x02\x8a\x05p?gT\x17.", 100200300400) } func TestLong1P2SmallNegative(t *testing.T) { // pickle.dumps(-100200300400, protocol=2) loadsNoErrEqual(t, "\x80\x02\x8a\x05\x90\xc0\x98\xab\xe8.", -100200300400) } func TestLong1P2BigPositive(t *testing.T) { // pickle.dumps(100200300400500600700, protocol=2) actual := loadsNoErr(t, "\x80\x02\x8a\t|\xefD\x8fT\xfa\x8en\x05.") switch v := actual.(type) { case *big.Int: expected := "100200300400500600700" if v.String() != expected { t.Errorf("expected %s, actual %s", expected, v.String()) } default: t.Error("expected big Int", actual) } } func TestLong1P2BigNegative(t *testing.T) { // pickle.dumps(-100200300400500600700, protocol=2) actual := loadsNoErr(t, "\x80\x02\x8a\t\x84\x10\xbbp\xab\x05q\x91\xfa.") switch v := actual.(type) { case *big.Int: expected := "-100200300400500600700" if v.String() != expected { t.Errorf("expected %s, actual %s", expected, v.String()) } default: t.Error("expected big Int", actual) } } func TestBinBytesP3(t *testing.T) { // pickle.dumps(b'1234567890'*26, protocol=3) actual := loadsNoErr(t, "\x80\x03B\x04\x01\x00\x001234567890123456789012345678901234567890"+ "123456789012345678901234567890123456789012345678901234567890"+ "123456789012345678901234567890123456789012345678901234567890"+ "123456789012345678901234567890123456789012345678901234567890"+ "1234567890123456789012345678901234567890q\x00.") switch v := actual.(type) { case []byte: expected := []byte(strings.Repeat("1234567890", 26)) if string(v) != string(expected) { t.Errorf("expected %v actual: %v", expected, actual) } default: t.Error("expected []byte, actual:", actual) } } func TestShortBinBytesP3(t *testing.T) { // pickle.dumps(b'ab', protocol=3) actual := loadsNoErr(t, "\x80\x03C\x02abq\x00.") switch v := actual.(type) { case []byte: expected := []byte{'a', 'b'} if string(v) != string(expected) { t.Errorf("expected %v actual: %v", expected, actual) } default: t.Error("expected []byte, actual:", actual) } } func TestEmptySetP4(t *testing.T) { // pickle.dumps(set(), protocol=4) actual := loadsNoErr(t, "\x80\x04\x8f\x94.") switch v := actual.(type) { case *types.Set: if v.Len() != 0 { t.Error("expected empty Set, actual:", actual) } default: t.Error("expected Set, actual:", actual) } } func TestP4SetWithOneItem(t *testing.T) { // pickle.dumps(set([1]), protocol=4) actual := loadsNoErr(t, "\x80\x04\x95\x07\x00\x00\x00\x00\x00\x00\x00\x8f\x94(K\x01\x90.") switch v := actual.(type) { case *types.Set: if v.Len() != 1 || !v.Has(1) { t.Error("expected [1], actual:", actual) } default: t.Error("expected Set, actual:", actual) } } func TestFrozenSetP4EmptyFrozenSet(t *testing.T) { // pickle.dumps(frozenset(), protocol=4) actual := loadsNoErr(t, "\x80\x04\x95\x04\x00\x00\x00\x00\x00\x00\x00(\x91\x94.") switch v := actual.(type) { case *types.FrozenSet: if v.Len() != 0 { t.Error("expected empty FrozenSet, actual:", actual) } default: t.Error("expected FrozenSet, actual:", actual) } } func TestFrozenSetP4OneItem(t *testing.T) { // pickle.dumps(frozenset([1]), protocol=4) actual := loadsNoErr(t, "\x80\x04\x95\x06\x00\x00\x00\x00\x00\x00\x00(K\x01\x91\x94.") switch v := actual.(type) { case *types.FrozenSet: if v.Len() != 1 || !v.Has(1) { t.Error("expected [1], actual:", actual) } default: t.Error("expected FrozenSet, actual:", actual) } } func TestP0GenericObject(t *testing.T) { // class Foo(): pass // pickle.dumps(Foo(), protocol=0) actual := loadsNoErr(t, "ccopy_reg\n_reconstructor\np0\n(c__main__\nFoo\n"+ "p1\nc__builtin__\nobject\np2\nNtp3\nRp4\n.") switch v := actual.(type) { case *types.GenericObject: if v.Class.Module != "__main__" || v.Class.Name != "Foo" || len(v.ConstructorArgs) != 0 { t.Errorf("expected __main__.Foo(), actual: %#v", v) } default: t.Error("expected GenericObject, actual:", actual) } } func TestP1GenericObject(t *testing.T) { // class Foo(): pass // pickle.dumps(Foo(), protocol=1) actual := loadsNoErr(t, "ccopy_reg\n_reconstructor\nq\x00(c__main__\nFoo\n"+ "q\x01c__builtin__\nobject\nq\x02Ntq\x03Rq\x04.") switch v := actual.(type) { case *types.GenericObject: if v.Class.Module != "__main__" || v.Class.Name != "Foo" || len(v.ConstructorArgs) != 0 { t.Errorf("expected __main__.Foo(), actual: %#v", v) } default: t.Error("expected GenericObject, actual:", actual) } } func TestP2GenericObject(t *testing.T) { // class Foo(): pass // pickle.dumps(Foo(), protocol=2) actual := loadsNoErr(t, "\x80\x02c__main__\nFoo\nq\x00)\x81q\x01.") switch v := actual.(type) { case *types.GenericObject: if v.Class.Module != "__main__" || v.Class.Name != "Foo" || len(v.ConstructorArgs) != 0 { t.Errorf("expected __main__.Foo(), actual: %#v", v) } default: t.Error("expected GenericObject, actual:", actual) } } func TestP3GenericObject(t *testing.T) { // class Foo(): pass // pickle.dumps(Foo(), protocol=3) actual := loadsNoErr(t, "\x80\x03c__main__\nFoo\nq\x00)\x81q\x01.") switch v := actual.(type) { case *types.GenericObject: if v.Class.Module != "__main__" || v.Class.Name != "Foo" || len(v.ConstructorArgs) != 0 { t.Errorf("expected __main__.Foo(), actual: %#v", v) } default: t.Error("expected GenericObject, actual:", actual) } } func TestP4GenericObject(t *testing.T) { // class Foo(): pass // pickle.dumps(Foo(), protocol=4) actual := loadsNoErr(t, "\x80\x04\x95\x17\x00\x00\x00\x00\x00\x00\x00"+ "\x8c\x08__main__\x94\x8c\x03Foo\x94\x93\x94)\x81\x94.") switch v := actual.(type) { case *types.GenericObject: if v.Class.Module != "__main__" || v.Class.Name != "Foo" || len(v.ConstructorArgs) != 0 { t.Errorf("expected __main__.Foo(), actual: %#v", v) } default: t.Error("expected GenericObject, actual:", actual) } } func TestP5GenericObject(t *testing.T) { // class Foo(): pass // pickle.dumps(Foo(), protocol=5) actual := loadsNoErr(t, "\x80\x05\x95\x17\x00\x00\x00\x00\x00\x00\x00"+ "\x8c\x08__main__\x94\x8c\x03Foo\x94\x93\x94)\x81\x94.") switch v := actual.(type) { case *types.GenericObject: if v.Class.Module != "__main__" || v.Class.Name != "Foo" || len(v.ConstructorArgs) != 0 { t.Errorf("expected __main__.Foo(), actual: %#v", v) } default: t.Error("expected GenericObject, actual:", actual) } } func TestP4EmptyOrderedDict(t *testing.T) { // pickle.dumps(collections.OrderedDict(), protocol=4) actual := loadsNoErr(t, "\x80\x04\x95\"\x00\x00\x00\x00\x00\x00\x00"+ "\x8c\x0bcollections\x94\x8c\x0bOrderedDict\x94\x93\x94)R\x94.") switch v := actual.(type) { case *types.OrderedDict: if v.Len() != 0 { t.Error("expected empty OrderedDict, actual:", actual) } default: t.Error("expected OrderedDict, actual:", actual) } } func TestP4OrderedDictWithOneKeyValue(t *testing.T) { // pickle.dumps(collections.OrderedDict({'a': 1}), protocol=4) actual := loadsNoErr(t, "\x80\x04\x95)\x00\x00\x00\x00\x00\x00\x00"+ "\x8c\x0bcollections\x94\x8c\x0bOrderedDict\x94\x93\x94)R\x94"+ "\x8c\x01a\x94K\x01s.") switch v := actual.(type) { case *types.OrderedDict: if x, ok := v.Get("a"); v.Len() != 1 || !ok || x != 1 { t.Error("expected {'a': 1}, actual:", actual) } default: t.Error("expected Dict, actual:", actual) } } func TestP4NestedDicts(t *testing.T) { // pickle.dumps({'a': 1, 'b': {'c': 2}}, protocol=4) actual := loadsNoErr(t, "\x80\x04\x95\x18\x00\x00\x00\x00\x00\x00\x00}"+ "\x94(\x8c\x01a\x94K\x01\x8c\x01b\x94}\x94\x8c\x01c\x94K\x02su.") switch v := actual.(type) { case *types.Dict: if v.Len() != 2 { t.Error("expected two entries, actual:", actual) } if a, ok := v.Get("a"); !ok || a != 1 { t.Error("expected 'a' => 1, actual:", actual) } b, bOk := v.Get("b") bDict, bDictOk := b.(*types.Dict) if !bOk || !bDictOk { t.Error("expected 'b' => Dict, actual:", actual) } if c, ok := bDict.Get("c"); bDict.Len() != 1 || !ok || c != 2 { t.Error("expected 'c' => 2, actual:", actual) } default: t.Error("expected Dict, actual:", actual) } } func TestByteArrayP5(t *testing.T) { // pickle.dumps(bytearray(b'ab'), protocol=5) actual := loadsNoErr(t, "\x80\x05\x95\r\x00\x00\x00\x00\x00\x00\x00"+ "\x96\x02\x00\x00\x00\x00\x00\x00\x00ab\x94.") switch v := actual.(type) { case *types.ByteArray: if v.Len() != 2 || v.Get(0) != 'a' || v.Get(1) != 'b' { t.Error("expected b'ab', actual:", actual) } default: t.Error("expected ByteArray, actual:", actual) } } func TestFindClass(t *testing.T) { u := &Unpickler{} v, _ := u.findClass("builtins", "list") actual, _ := fmt.Println(reflect.TypeOf(v)) expected, _ := fmt.Println(reflect.TypeOf(&types.List{})) if actual != expected { t.Errorf("expected %v, actual: %v", expected, actual) } } func TestP4Carray(t *testing.T) { for _, tc := range []struct { name string pkl string want interface{} }{ { // pickle.dumps(array.array("b", [0,1,2,-3], protocol=4) name: "b", pkl: "\x80\x04\x95F\x00\x00\x00\x00\x00\x00\x00\x8c\x05array\x94\x8c\x14_array_reconstructor\x94\x93\x94(\x8c\x05array\x94\x8c\x05array\x94\x93\x94\x8c\x01b\x94K\x01C\x04\x00\x01\x02\xfd\x94t\x94R\x94.", want: []int8{0, 1, 2, -3}, }, { // pickle.dumps(array.array("h", [0,1,2,-3], protocol=4) name: "h", pkl: "\x80\x04\x95J\x00\x00\x00\x00\x00\x00\x00\x8c\x05array\x94\x8c\x14_array_reconstructor\x94\x93\x94(\x8c\x05array\x94\x8c\x05array\x94\x93\x94\x8c\x01h\x94K\x04C\x08\x00\x00\x01\x00\x02\x00\xfd\xff\x94t\x94R\x94.", want: []int16{0, 1, 2, -3}, }, { // pickle.dumps(array.array("i", [0,1,2,-3], protocol=4) name: "i", pkl: "\x80\x04\x95R\x00\x00\x00\x00\x00\x00\x00\x8c\x05array\x94\x8c\x14_array_reconstructor\x94\x93\x94(\x8c\x05array\x94\x8c\x05array\x94\x93\x94\x8c\x01i\x94K\x08C\x10\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\xfd\xff\xff\xff\x94t\x94R\x94.", want: []int32{0, 1, 2, -3}, }, { // pickle.dumps(array.array("l", [0,1,2,-3], protocol=4) name: "l", pkl: "\x80\x04\x95b\x00\x00\x00\x00\x00\x00\x00\x8c\x05array\x94\x8c\x14_array_reconstructor\x94\x93\x94(\x8c\x05array\x94\x8c\x05array\x94\x93\x94\x8c\x01l\x94K\x0cC \x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\xfd\xff\xff\xff\xff\xff\xff\xff\x94t\x94R\x94.", want: []int64{0, 1, 2, -3}, }, { // pickle.dumps(array.array("q", [0,1,2,-3], protocol=4) name: "q", pkl: "\x80\x04\x95b\x00\x00\x00\x00\x00\x00\x00\x8c\x05array\x94\x8c\x14_array_reconstructor\x94\x93\x94(\x8c\x05array\x94\x8c\x05array\x94\x93\x94\x8c\x01q\x94K\x0cC \x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\xfd\xff\xff\xff\xff\xff\xff\xff\x94t\x94R\x94.", want: []int64{0, 1, 2, -3}, }, { // pickle.dumps(array.array("B", [0,1,2,3], protocol=4) name: "B", pkl: "\x80\x04\x95F\x00\x00\x00\x00\x00\x00\x00\x8c\x05array\x94\x8c\x14_array_reconstructor\x94\x93\x94(\x8c\x05array\x94\x8c\x05array\x94\x93\x94\x8c\x01B\x94K\x00C\x04\x00\x01\x02\x03\x94t\x94R\x94.", want: []uint8{0, 1, 2, 3}, }, { // pickle.dumps(array.array("H", [0,1,2,3], protocol=4) name: "H", pkl: "\x80\x04\x95J\x00\x00\x00\x00\x00\x00\x00\x8c\x05array\x94\x8c\x14_array_reconstructor\x94\x93\x94(\x8c\x05array\x94\x8c\x05array\x94\x93\x94\x8c\x01H\x94K\x02C\x08\x00\x00\x01\x00\x02\x00\x03\x00\x94t\x94R\x94.", want: []uint16{0, 1, 2, 3}, }, { // pickle.dumps(array.array("I", [0,1,2,3], protocol=4) name: "I", pkl: "\x80\x04\x95R\x00\x00\x00\x00\x00\x00\x00\x8c\x05array\x94\x8c\x14_array_reconstructor\x94\x93\x94(\x8c\x05array\x94\x8c\x05array\x94\x93\x94\x8c\x01I\x94K\x06C\x10\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x94t\x94R\x94.", want: []uint32{0, 1, 2, 3}, }, { // pickle.dumps(array.array("L", [0,1,2,3], protocol=4) name: "L", pkl: "'\x80\x04\x95b\x00\x00\x00\x00\x00\x00\x00\x8c\x05array\x94\x8c\x14_array_reconstructor\x94\x93\x94(\x8c\x05array\x94\x8c\x05array\x94\x93\x94\x8c\x01L\x94K\nC \x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x94t\x94R\x94.", want: []uint64{0, 1, 2, 3}, }, { // pickle.dumps(array.array("Q", [0,1,2,3], protocol=4) name: "Q", pkl: "'\x80\x04\x95b\x00\x00\x00\x00\x00\x00\x00\x8c\x05array\x94\x8c\x14_array_reconstructor\x94\x93\x94(\x8c\x05array\x94\x8c\x05array\x94\x93\x94\x8c\x01Q\x94K\nC \x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x94t\x94R\x94.", want: []uint64{0, 1, 2, 3}, }, { // pickle.dumps(array.array("f", [0,1,2,3], protocol=4) name: "f", pkl: "\x80\x04\x95R\x00\x00\x00\x00\x00\x00\x00\x8c\x05array\x94\x8c\x14_array_reconstructor\x94\x93\x94(\x8c\x05array\x94\x8c\x05array\x94\x93\x94\x8c\x01f\x94K\x0eC\x10\x00\x00\x00\x00\x00\x00\x80?\x00\x00\x00@\x00\x00@@\x94t\x94R\x94.", want: []float32{0, 1, 2, 3}, }, { // pickle.dumps(array.array("d", [0,1,2,3], protocol=4) name: "d", pkl: "\x80\x04\x95b\x00\x00\x00\x00\x00\x00\x00\x8c\x05array\x94\x8c\x14_array_reconstructor\x94\x93\x94(\x8c\x05array\x94\x8c\x05array\x94\x93\x94\x8c\x01d\x94K\x10C \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf0?\x00\x00\x00\x00\x00\x00\x00@\x00\x00\x00\x00\x00\x00\x08@\x94t\x94R\x94.", want: []float64{0, 1, 2, 3}, }, { // pickle.dumps(array.array("u", "Hello, 世界".encode("utf-32")], protocol=4) name: "u", pkl: "\x80\x04\x95f\x00\x00\x00\x00\x00\x00\x00\x8c\x05array\x94\x8c\x14_array_reconstructor\x94\x93\x94(\x8c\x05array\x94\x8c\x05array\x94\x93\x94\x8c\x01u\x94K\x14C$H\x00\x00\x00e\x00\x00\x00l\x00\x00\x00l\x00\x00\x00o\x00\x00\x00,\x00\x00\x00 \x00\x00\x00\x16N\x00\x00Lu\x00\x00\x94t\x94R\x94.", want: []rune("Hello, 世界"), }, { // pickle.dumps(array.array("u", "".encode("utf-32")], protocol=4) name: "u-empty", pkl: "\x80\x04\x95B\x00\x00\x00\x00\x00\x00\x00\x8c\x05array\x94\x8c\x14_array_reconstructor\x94\x93\x94(\x8c\x05array\x94\x8c\x05array\x94\x93\x94\x8c\x01u\x94K\x14C\x00\x94t\x94R\x94.", want: []rune(""), }, } { t.Run(tc.name, func(t *testing.T) { switch tc.name { case "L", "Q": t.SkipNow() // unknown opcode: 0x27 ''' } got := loadsNoErr(t, tc.pkl) if !reflect.DeepEqual(got, tc.want) { t.Fatalf("got=%v, want=%v", got, tc.want) } }) } } // TODO: test BinPersId // TODO: test Get // TODO: test BinGet // TODO: test LongBinPut // TODO: test LongBinGet // TODO: test Build // TODO: test PersId // TODO: test Pop // TODO: test PopMark // TODO: test Dup // TODO: test Inst // TODO: test Obj // TODO: test Long4 // TODO: test BinUnicode8 // TODO: test BinBytes8 // TODO: test Ext1 // TODO: test Ext2 // TODO: test Ext4 // TODO: test NextBuffer // TODO: test ReadOnlyBuffer // TODO: test NewObjEx func loadsNoErrEqual(t *testing.T, s string, expected interface{}) { actual := loadsNoErr(t, s) if actual != expected { t.Errorf("expected %v, actual: %v", expected, actual) } } func loadsNoErr(t *testing.T, s string) interface{} { result, err := Loads(s) if err != nil { t.Error(err) } return result } gopickle-0.3.0/pytorch/000077500000000000000000000000001453132224200147755ustar00rootroot00000000000000gopickle-0.3.0/pytorch/float_conversion.go000066400000000000000000000034521453132224200207020ustar00rootroot00000000000000// Copyright 2020 NLP Odyssey Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package pytorch // FloatBits16to32 converts the bits representation of a Half Float (16 bits) // number to an IEEE 754 float representation (32 bits) // From http://www.fox-toolkit.org/ftp/fasthalffloatconversion.pdf func FloatBits16to32(u16 uint16) uint32 { return mantissaTable[offsetTable[u16>>10]+(uint32(u16)&0x3ff)] + exponentTable[u16>>10] } var mantissaTable [2048]uint32 var exponentTable [64]uint32 var offsetTable [64]uint32 func init() { initMantissaTable() initExponentTable() initOffsetTable() } func initMantissaTable() { mantissaTable[0] = 0 for i := uint32(1); i < 1024; i++ { mantissaTable[i] = convertMantissa(i) } for i := uint32(1024); i < 2048; i++ { mantissaTable[i] = 0x38000000 + ((i - 1024) << 13) } } func initExponentTable() { exponentTable[0] = 0 exponentTable[31] = 0x47800000 exponentTable[32] = 0x80000000 exponentTable[63] = 0xC7800000 for i := uint32(1); i < 31; i++ { exponentTable[i] = i << 23 } for i := uint32(33); i < 63; i++ { exponentTable[i] = 0x80000000 + (i-32)<<23 } } func initOffsetTable() { offsetTable[0] = 0 offsetTable[32] = 0 for i := uint32(1); i < 31; i++ { offsetTable[i] = 1024 } for i := uint32(32); i < 64; i++ { offsetTable[i] = 1024 } } func convertMantissa(i uint32) uint32 { var m uint32 = i << 13 // zero pad mantissa bits var e uint32 = 0 // zero exponent for m&0x00800000 != 0 { // while not normalized e -= 0x00800000 // decrement exponent (1 << 23) m <<= 1 // shift mantissa } m &= ^uint32(0x00800000) // clear leading 1 bit e += 0x38800000 // adjust bias ((127-14)<<23) return m | e // return combined number } gopickle-0.3.0/pytorch/limited_buffer_reader.go000066400000000000000000000023051453132224200216260ustar00rootroot00000000000000// Copyright 2020 NLP Odyssey Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package pytorch import "io" type LimitedBufferReader struct { r io.Reader scalarSize int remainingBytes int buf []byte bufIndex int } func NewLimitedBufferReader( r io.Reader, dataSize, scalarSize, bufferSize int, ) *LimitedBufferReader { size := bufferSize * scalarSize return &LimitedBufferReader{ r: r, scalarSize: scalarSize, remainingBytes: scalarSize * dataSize, buf: make([]byte, size), bufIndex: size, } } func (br *LimitedBufferReader) HasNext() bool { return br.remainingBytes != 0 } func (br *LimitedBufferReader) ReadNext() ([]byte, error) { if br.remainingBytes == 0 { return nil, io.EOF } if br.bufIndex == len(br.buf) { br.bufIndex = 0 if br.remainingBytes < len(br.buf) { br.buf = br.buf[0:br.remainingBytes] } _, err := br.r.Read(br.buf) if err != nil { return nil, err } } result := br.buf[br.bufIndex : br.bufIndex+br.scalarSize] br.bufIndex += br.scalarSize br.remainingBytes -= br.scalarSize return result, nil } gopickle-0.3.0/pytorch/limited_buffer_reader_test.go000066400000000000000000000076711453132224200227000ustar00rootroot00000000000000// Copyright 2020 NLP Odyssey Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package pytorch import ( "bytes" "io" "testing" ) func TestLimitedBufferReader(t *testing.T) { t.Run("empty input, data size 0", func(t *testing.T) { r := bytes.NewReader([]byte{}) br := NewLimitedBufferReader(r, 0, 2, 3) assertNotHasNext(t, br) assertReadNextEof(t, br) }) t.Run("empty input, data size > 0", func(t *testing.T) { r := bytes.NewReader([]byte{}) br := NewLimitedBufferReader(r, 10, 2, 3) assertHasNext(t, br) assertReadNextEof(t, br) }) t.Run("data size = scalar * buffer", func(t *testing.T) { input := []byte{11, 12, 21, 22, 31, 32, 41, 42, 51, 52, 61, 62} r := bytes.NewReader(input) br := NewLimitedBufferReader(r, 6, 2, 3) assertReadNextValue(t, br, []byte{11, 12}) assertReadNextValue(t, br, []byte{21, 22}) assertReadNextValue(t, br, []byte{31, 32}) assertReadNextValue(t, br, []byte{41, 42}) assertReadNextValue(t, br, []byte{51, 52}) assertReadNextValue(t, br, []byte{61, 62}) assertNotHasNext(t, br) assertReadNextEof(t, br) }) t.Run("data size > scalar * buffer", func(t *testing.T) { input := []byte{11, 12, 21, 22, 31, 32, 41, 42, 51, 52, 61, 62, 71, 72} r := bytes.NewReader(input) br := NewLimitedBufferReader(r, 7, 2, 3) assertReadNextValue(t, br, []byte{11, 12}) assertReadNextValue(t, br, []byte{21, 22}) assertReadNextValue(t, br, []byte{31, 32}) assertReadNextValue(t, br, []byte{41, 42}) assertReadNextValue(t, br, []byte{51, 52}) assertReadNextValue(t, br, []byte{61, 62}) assertReadNextValue(t, br, []byte{71, 72}) assertNotHasNext(t, br) assertReadNextEof(t, br) }) t.Run("data size < scalar * buffer", func(t *testing.T) { input := []byte{11, 12, 21, 22, 31, 32, 41, 42, 51, 52} r := bytes.NewReader(input) br := NewLimitedBufferReader(r, 5, 2, 3) assertReadNextValue(t, br, []byte{11, 12}) assertReadNextValue(t, br, []byte{21, 22}) assertReadNextValue(t, br, []byte{31, 32}) assertReadNextValue(t, br, []byte{41, 42}) assertReadNextValue(t, br, []byte{51, 52}) assertNotHasNext(t, br) assertReadNextEof(t, br) }) t.Run("remaining data in buffer", func(t *testing.T) { input := []byte{11, 12, 21, 22, 31, 32, 41, 42, 51, 52, 90, 91, 92} r := bytes.NewReader(input) br := NewLimitedBufferReader(r, 5, 2, 3) assertReadNextValue(t, br, []byte{11, 12}) assertReadNextValue(t, br, []byte{21, 22}) assertReadNextValue(t, br, []byte{31, 32}) assertReadNextValue(t, br, []byte{41, 42}) assertReadNextValue(t, br, []byte{51, 52}) assertNotHasNext(t, br) assertReadNextEof(t, br) rest := make([]byte, 3) n, err := r.Read(rest) if n != 3 || err != nil { t.Errorf("expected 3 bytes and no error, got %d and %#v", n, err) } assertByteSliceEqual(t, rest, []byte{90, 91, 92}) }) } func assertHasNext(t *testing.T, br *LimitedBufferReader) { if !br.HasNext() { t.Errorf("expected HasNext() true, but it is false") } } func assertNotHasNext(t *testing.T, br *LimitedBufferReader) { if br.HasNext() { t.Errorf("expected HasNext() false, but it is true") } } func assertReadNextValue(t *testing.T, br *LimitedBufferReader, val []byte) { assertHasNext(t, br) result, err := br.ReadNext() if err != nil { t.Errorf("expected nil error, actual %#v", err) } assertByteSliceEqual(t, result, val) } func assertByteSliceEqual(t *testing.T, actual, expected []byte) { if len(expected) != len(actual) { t.Errorf("expected %#v, actual %#v", expected, actual) return } for index, expected := range expected { actual := actual[index] if expected != actual { t.Errorf("expected %#v, actual %#v", expected, actual) return } } } func assertReadNextEof(t *testing.T, br *LimitedBufferReader) { result, err := br.ReadNext() if result != nil { t.Errorf("expected nil result, actual %#v", result) } if err != io.EOF { t.Errorf("expected EOF error, actual %#v", err) } } gopickle-0.3.0/pytorch/pytorch.go000066400000000000000000000227251453132224200170240ustar00rootroot00000000000000// Copyright 2020 NLP Odyssey Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package pytorch import ( "archive/tar" "archive/zip" "errors" "fmt" "io" "math/big" "os" "path" "github.com/nlpodyssey/gopickle/pickle" "github.com/nlpodyssey/gopickle/types" ) const hexMagicNumber = "1950a86a20f9469cfc6c" const protocolVersion = 1001 var ErrInvalidMagicNumber = errors.New("invalid pytorch magic number") var ErrInvalidProtocolVersion = errors.New("invalid pytorch protocol version") func Load(filename string) (interface{}, error) { newUnpickler := func(r io.Reader) pickle.Unpickler { return pickle.NewUnpickler(r) } return LoadWithUnpickler(filename, newUnpickler) } // LoadWithUnpickler is like Load, but it accepts a newUnpickler function which // is used to create new customized pickle.Unpickler instances. func LoadWithUnpickler(filename string, newUnpickler func(r io.Reader) pickle.Unpickler) (interface{}, error) { if !isZipFile(filename) { return loadLegacyFile(filename, newUnpickler) } return loadZipFile(filename, newUnpickler) } func loadZipFile(filename string, newUnpickler func(r io.Reader) pickle.Unpickler) (interface{}, error) { // Open a zip archive for reading. r, err := zip.OpenReader(filename) if err != nil { return nil, err } defer r.Close() fileRecords := make(map[string]*zip.File, len(r.File)) for _, f := range r.File { _, recordName := path.Split(f.Name) fileRecords[recordName] = f } if _, isTorchScript := fileRecords["constants.pkl"]; isTorchScript { return nil, fmt.Errorf("TorchScript is not supported") } dataFile, hasDataFile := fileRecords["data.pkl"] if !hasDataFile { return nil, fmt.Errorf("data.pkl not found in zip file") } df, err := dataFile.Open() if err != nil { return nil, err } defer df.Close() loadedStorages := make(map[string]StorageInterface) u := newUnpickler(df) u.FindClass = makePickleFindClass(u.FindClass) u.PersistentLoad = func(savedId interface{}) (interface{}, error) { tuple, tupleOk := savedId.(*types.Tuple) if !tupleOk || tuple.Len() == 0 { return nil, fmt.Errorf("PersistentLoad: non-empty tuple expected, got %#v", savedId) } typename, typenameOk := tuple.Get(0).(string) if !typenameOk { return nil, fmt.Errorf("PersistentLoad: cannot get typename") } if typename != "storage" { return nil, fmt.Errorf("unknown typename for PersistentLoad, expected 'storage' but got '%s'", typename) } if tuple.Len() < 5 { return nil, fmt.Errorf("PersistentLoad: unexpected storage data length") } dataType, dataTypeOk := tuple.Get(1).(StorageClassInterface) key, keyOk := tuple.Get(2).(string) location, locationOk := tuple.Get(3).(string) size, sizeOk := tuple.Get(4).(int) if !dataTypeOk || !keyOk || !locationOk || !sizeOk { return nil, fmt.Errorf("PersistentLoad: unexpected data types") } storage, storageExists := loadedStorages[key] if !storageExists { storage, err = loadTensor(dataType, size, location, key, fileRecords) if err != nil { return nil, err } loadedStorages[key] = storage } return storage, nil } return u.Load() } func loadTensor( dataType StorageClassInterface, size int, location, key string, zipFileRecords map[string]*zip.File, ) (StorageInterface, error) { file, fileOk := zipFileRecords[key] if !fileOk { return nil, fmt.Errorf("cannot find zip record '%s'", key) } f, err := file.Open() if err != nil { return nil, err } defer f.Close() storage := dataType.New(size, location) err = storage.SetFromFileWithSize(f, size) return storage, err } func loadLegacyFile(filename string, newUnpickler func(r io.Reader) pickle.Unpickler) (interface{}, error) { f, err := os.Open(filename) if err != nil { return nil, err } defer f.Close() tr := tar.NewReader(f) for { _, err := tr.Next() switch err { case nil: // TODO: ... panic("legacy load from tar not implemented") case io.EOF: break // End of archive case tar.ErrHeader, io.ErrUnexpectedEOF: _, err = f.Seek(0, io.SeekStart) if err != nil { return nil, err } return loadLegacyNoTar(f, newUnpickler) default: return nil, err } } } func loadLegacyNoTar(f *os.File, newUnpickler func(r io.Reader) pickle.Unpickler) (interface{}, error) { if err := readAndCheckMagicNumber(f); err != nil { return nil, err } if err := readAndChecProtocolVersion(f); err != nil { return nil, err } if _, err := unpickle(f); err != nil { // sys info return nil, err } deserializedObjects := make(map[string]StorageInterface) u := newUnpickler(f) u.FindClass = makePickleFindClass(u.FindClass) u.PersistentLoad = func(savedId interface{}) (interface{}, error) { tuple, tupleOk := savedId.(*types.Tuple) if !tupleOk || tuple.Len() == 0 { return nil, fmt.Errorf("PersistentLoad: non-empty tuple expected, got %#v", savedId) } typename, typenameOk := tuple.Get(0).(string) if !typenameOk { return nil, fmt.Errorf("PersistentLoad: cannot get typename") } switch typename { case "storage": if tuple.Len() < 6 { return nil, fmt.Errorf( "PersistentLoad: unexpected storage data length") } dataType, dataTypeOk := tuple.Get(1).(StorageClassInterface) rootKey, rootKeyOk := tuple.Get(2).(string) location, locationOk := tuple.Get(3).(string) size, sizeOk := tuple.Get(4).(int) viewMetadata := tuple.Get(5) if !dataTypeOk || !rootKeyOk || !locationOk || !sizeOk { return nil, fmt.Errorf("PersistentLoad: unexpected data types") } storage, storageExists := deserializedObjects[rootKey] if !storageExists { storage = dataType.New(size, location) deserializedObjects[rootKey] = storage } switch vm := viewMetadata.(type) { case nil: return storage, nil case []interface{}: if len(vm) != 3 { return nil, fmt.Errorf( "PersistentLoad: unexpected view metadata length") } panic("viewMetadata not implemented") // TODO: ... // view_key, offset, view_size = view_metadata // if view_key not in deserialized_objects: // deserialized_objects[view_key] = storage[offset:offset + view_size] // return deserialized_objects[view_key] default: return nil, fmt.Errorf("PersistentLoad: unexpected view metadata type") } case "module": if tuple.Len() < 2 { return nil, fmt.Errorf("PersistentLoad: unexpected module data length") } return tuple.Get(1), nil default: return nil, fmt.Errorf("Unexpected saved ID type: %s", typename) } } result, err := u.Load() if err != nil { return nil, err } rawStorageKeys, err := unpickle(f) if err != nil { return nil, err } storageKeys, err := makeStorageKeys(rawStorageKeys) if err != nil { return nil, err } for _, key := range storageKeys { storageObj, ok := deserializedObjects[key] if !ok { return nil, fmt.Errorf("storage object not found for key '%s'", key) } err = storageObj.SetFromFile(f) if err != nil { return nil, err } } return result, nil } func makeStorageKeys(obj interface{}) ([]string, error) { list, ok := obj.(*types.List) if !ok { return nil, fmt.Errorf("invalid storage keys data") } keys := make([]string, len(*list)) for i, rawKey := range *list { key, keyOk := rawKey.(string) if !keyOk { return nil, fmt.Errorf("invalid storage key") } keys[i] = key } return keys, nil } func readAndCheckMagicNumber(r io.Reader) error { obj, err := unpickle(r) if err != nil { return err } if n, ok := obj.(*big.Int); !ok || n.Text(16) != hexMagicNumber { return ErrInvalidMagicNumber } return nil } func readAndChecProtocolVersion(r io.Reader) error { obj, err := unpickle(r) if err != nil { return err } if n, ok := obj.(int); !ok || n != protocolVersion { return ErrInvalidProtocolVersion } return nil } func unpickle(r io.Reader) (interface{}, error) { u := pickle.NewUnpickler(r) return u.Load() } func isZipFile(filename string) bool { r, err := zip.OpenReader(filename) if err != nil { return false } r.Close() return true } func makePickleFindClass(fallback func(module, name string) (interface{}, error)) func(module, name string) (interface{}, error) { return func(module, name string) (interface{}, error) { switch module + "." + name { case "torch._utils._rebuild_tensor_v2": return &RebuildTensorV2{}, nil case "torch.FloatStorage": return &FloatStorageClass{}, nil case "torch.HalfStorage": return &HalfStorageClass{}, nil case "torch.DoubleStorage": return &DoubleStorageClass{}, nil case "torch.CharStorage": return &CharStorageClass{}, nil case "torch.ShortStorage": return &ShortStorageClass{}, nil case "torch.IntStorage": return &IntStorageClass{}, nil case "torch.LongStorage": return &LongStorageClass{}, nil case "torch.ByteStorage": return &ByteStorageClass{}, nil case "torch.BoolStorage": return &BoolStorageClass{}, nil case "torch.BFloat16Storage": return &BFloat16StorageClass{}, nil case "torch.nn.backends.thnn._get_thnn_function_backend": // this is for historical pickle deserilaization, it is not used otherwise return getThnnFunctionBackend{}, nil default: if fallback == nil { return nil, fmt.Errorf("class not found: %s %s", module, name) } return fallback(module, name) } } } // getThnnFunctionBackend is for historical pickle deserilaization, it is not used otherwise type getThnnFunctionBackend struct{} var _ types.Callable = &getThnnFunctionBackend{} func (getThnnFunctionBackend) Call(_ ...interface{}) (interface{}, error) { return nil, nil } gopickle-0.3.0/pytorch/pytorch_test.go000066400000000000000000000223301453132224200200530ustar00rootroot00000000000000// Copyright 2020 NLP Odyssey Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package pytorch import ( "fmt" "path" "testing" ) func TestFloat16Tensors(t *testing.T) { // Half for _, filename := range makeFilenames("tensor_float16") { t.Run(filename, func(t *testing.T) { tensor := loadTensorFromFile(t, filename) assertCommonTensorFields(t, tensor) fs, fsOk := tensor.Source.(*HalfStorage) if !fsOk { t.Fatalf("expected *HalfStorage, got %#v", tensor.Source) } assertBaseStorageFields(t, fs.BaseStorage, 4, "cpu") assertFloat32SliceEqual(t, fs.Data, []float32{1.2, -3.4, 5.6, -7.8}, 0.002) }) } } func TestFloat32Tensors(t *testing.T) { // Float for _, filename := range makeFilenames("tensor_float32") { t.Run(filename, func(t *testing.T) { tensor := loadTensorFromFile(t, filename) assertCommonTensorFields(t, tensor) fs, fsOk := tensor.Source.(*FloatStorage) if !fsOk { t.Fatalf("expected *FloatStorage, got %#v", tensor.Source) } assertBaseStorageFields(t, fs.BaseStorage, 4, "cpu") assertFloat32SliceEqual(t, fs.Data, []float32{1.2, -3.4, 5.6, -7.8}, 0.0) }) } } func TestFloat64Tensors(t *testing.T) { // Double for _, filename := range makeFilenames("tensor_float64") { t.Run(filename, func(t *testing.T) { tensor := loadTensorFromFile(t, filename) assertCommonTensorFields(t, tensor) fs, fsOk := tensor.Source.(*DoubleStorage) if !fsOk { t.Fatalf("expected *DoubleStorage, got %#v", tensor.Source) } assertBaseStorageFields(t, fs.BaseStorage, 4, "cpu") assertFloat64SliceEqual(t, fs.Data, []float64{1.2, -3.4, 5.6, -7.8}, 0.0) }) } } func TestInt8Tensors(t *testing.T) { // Char for _, filename := range makeFilenames("tensor_int8") { t.Run(filename, func(t *testing.T) { tensor := loadTensorFromFile(t, filename) assertCommonTensorFields(t, tensor) fs, fsOk := tensor.Source.(*CharStorage) if !fsOk { t.Fatalf("expected *CharStorage, got %#v", tensor.Source) } assertBaseStorageFields(t, fs.BaseStorage, 4, "cpu") assertInt8SliceEqual(t, fs.Data, []int8{1, -2, 3, -4}) }) } } func TestInt16Tensors(t *testing.T) { // Short for _, filename := range makeFilenames("tensor_int16") { t.Run(filename, func(t *testing.T) { tensor := loadTensorFromFile(t, filename) assertCommonTensorFields(t, tensor) fs, fsOk := tensor.Source.(*ShortStorage) if !fsOk { t.Fatalf("expected *ShortStorage, got %#v", tensor.Source) } assertBaseStorageFields(t, fs.BaseStorage, 4, "cpu") assertInt16SliceEqual(t, fs.Data, []int16{1, -2, 3, -4}) }) } } func TestInt32Tensors(t *testing.T) { // Int for _, filename := range makeFilenames("tensor_int32") { t.Run(filename, func(t *testing.T) { tensor := loadTensorFromFile(t, filename) assertCommonTensorFields(t, tensor) fs, fsOk := tensor.Source.(*IntStorage) if !fsOk { t.Fatalf("expected *IntStorage, got %#v", tensor.Source) } assertBaseStorageFields(t, fs.BaseStorage, 4, "cpu") assertInt32SliceEqual(t, fs.Data, []int32{1, -2, 3, -4}) }) } } func TestInt64Tensors(t *testing.T) { // Long for _, filename := range makeFilenames("tensor_int64") { t.Run(filename, func(t *testing.T) { tensor := loadTensorFromFile(t, filename) assertCommonTensorFields(t, tensor) fs, fsOk := tensor.Source.(*LongStorage) if !fsOk { t.Fatalf("expected *LongStorage, got %#v", tensor.Source) } assertBaseStorageFields(t, fs.BaseStorage, 4, "cpu") assertInt64SliceEqual(t, fs.Data, []int64{1, -2, 3, -4}) }) } } func TestUInt8Tensors(t *testing.T) { // Byte for _, filename := range makeFilenames("tensor_uint8") { t.Run(filename, func(t *testing.T) { tensor := loadTensorFromFile(t, filename) assertCommonTensorFields(t, tensor) fs, fsOk := tensor.Source.(*ByteStorage) if !fsOk { t.Fatalf("expected *ByteStorage, got %#v", tensor.Source) } assertBaseStorageFields(t, fs.BaseStorage, 4, "cpu") assertUInt8SliceEqual(t, fs.Data, []uint8{1, 10, 100, 255}) }) } } func TestBoolTensors(t *testing.T) { for _, filename := range makeFilenames("tensor_bool") { t.Run(filename, func(t *testing.T) { tensor := loadTensorFromFile(t, filename) assertCommonTensorFields(t, tensor) fs, fsOk := tensor.Source.(*BoolStorage) if !fsOk { t.Fatalf("expected *ByteStorage, got %#v", tensor.Source) } assertBaseStorageFields(t, fs.BaseStorage, 4, "cpu") assertBoolSliceEqual(t, fs.Data, []bool{true, false, true, false}) }) } } func TestBFloat16Tensors(t *testing.T) { // Float for _, filename := range makeFilenames("tensor_bfloat16") { t.Run(filename, func(t *testing.T) { tensor := loadTensorFromFile(t, filename) assertCommonTensorFields(t, tensor) fs, fsOk := tensor.Source.(*BFloat16Storage) if !fsOk { t.Fatalf("expected *BFloat16Storage, got %#v", tensor.Source) } assertBaseStorageFields(t, fs.BaseStorage, 4, "cpu") assertFloat32SliceEqual(t, fs.Data, []float32{1.2, -3.4, 5.6, -7.8}, 0.013) }) } } func loadTensorFromFile(t *testing.T, filename string) *Tensor { result, err := Load(path.Join("testdata", filename)) if err != nil { t.Fatal(err) } tensor, tensorOk := result.(*Tensor) if !tensorOk { t.Fatalf("expected *Tensor, got %#v", result) } return tensor } func makeFilenames(prefix string) []string { filenames := make([]string, 0, 10) for _, proto := range [...]int{1, 2, 3, 4, 5} { for _, useZip := range [...]bool{false, true} { suffix := fmt.Sprintf("_proto%d", proto) if useZip { suffix += "_zip" } filenames = append(filenames, prefix+suffix+".pt") } } return filenames } func assertIntSliceEqual(t *testing.T, actual, expected []int) { if len(actual) != len(expected) { t.Errorf("expected %v, actual %v", expected, actual) return } for index, actualVal := range actual { if actualVal != expected[index] { t.Errorf("expected %v, actual %v", expected, actual) return } } } func assertFloat32SliceEqual(t *testing.T, actual, expected []float32, eps float32) { if len(actual) != len(expected) { t.Errorf("expected %v, actual %v", expected, actual) return } for index, actualVal := range actual { expectedVal := expected[index] if actualVal < expectedVal-eps || actualVal > expectedVal+eps { t.Errorf("expected %v, actual %v", expected, actual) return } } } func assertFloat64SliceEqual(t *testing.T, actual, expected []float64, eps float64) { if len(actual) != len(expected) { t.Errorf("expected %v, actual %v", expected, actual) return } for index, actualVal := range actual { expectedVal := expected[index] if actualVal < expectedVal-eps || actualVal > expectedVal+eps { t.Errorf("expected %v, actual %v", expected, actual) return } } } func assertInt8SliceEqual(t *testing.T, actual, expected []int8) { if len(actual) != len(expected) { t.Errorf("expected %v, actual %v", expected, actual) return } for index, actualVal := range actual { if actualVal != expected[index] { t.Errorf("expected %v, actual %v", expected, actual) return } } } func assertInt16SliceEqual(t *testing.T, actual, expected []int16) { if len(actual) != len(expected) { t.Errorf("expected %v, actual %v", expected, actual) return } for index, actualVal := range actual { if actualVal != expected[index] { t.Errorf("expected %v, actual %v", expected, actual) return } } } func assertInt32SliceEqual(t *testing.T, actual, expected []int32) { if len(actual) != len(expected) { t.Errorf("expected %v, actual %v", expected, actual) return } for index, actualVal := range actual { if actualVal != expected[index] { t.Errorf("expected %v, actual %v", expected, actual) return } } } func assertInt64SliceEqual(t *testing.T, actual, expected []int64) { if len(actual) != len(expected) { t.Errorf("expected %v, actual %v", expected, actual) return } for index, actualVal := range actual { if actualVal != expected[index] { t.Errorf("expected %v, actual %v", expected, actual) return } } } func assertUInt8SliceEqual(t *testing.T, actual, expected []uint8) { if len(actual) != len(expected) { t.Errorf("expected %v, actual %v", expected, actual) return } for index, actualVal := range actual { if actualVal != expected[index] { t.Errorf("expected %v, actual %v", expected, actual) return } } } func assertBoolSliceEqual(t *testing.T, actual, expected []bool) { if len(actual) != len(expected) { t.Errorf("expected %v, actual %v", expected, actual) return } for index, actualVal := range actual { if actualVal != expected[index] { t.Errorf("expected %v, actual %v", expected, actual) return } } } func assertCommonTensorFields(t *testing.T, tensor *Tensor) { assertIntSliceEqual(t, tensor.Size, []int{4}) assertIntSliceEqual(t, tensor.Stride, []int{1}) if tensor.StorageOffset != 0 { t.Errorf("expected StorageOffset 0, got %d", tensor.StorageOffset) } if tensor.RequiresGrad { t.Errorf("expected RequiresGrad false, got True") } } func assertBaseStorageFields(t *testing.T, bs BaseStorage, size int, location string) { if bs.Size != size { t.Errorf("expected storage Size %d, got %d", size, bs.Size) } if bs.Location != location { t.Errorf("expected storage Location %#v, got %#v", location, bs.Location) } } gopickle-0.3.0/pytorch/rebuild_tensor.go000066400000000000000000000027631453132224200203540ustar00rootroot00000000000000// Copyright 2020 NLP Odyssey Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package pytorch import ( "fmt" "github.com/nlpodyssey/gopickle/types" ) type RebuildTensorV2 struct{} var _ types.Callable = &RebuildTensorV2{} func (r *RebuildTensorV2) Call(args ...interface{}) (interface{}, error) { if len(args) != 6 { return nil, fmt.Errorf("RebuildTensorV2 unexpected args: %#v", args) } storage, storageOk := args[0].(StorageInterface) storageOffset, storageOffsetOk := args[1].(int) size, sizeOk := args[2].(*types.Tuple) stride, strideOk := args[3].(*types.Tuple) requiresGrad, requiresGradOk := args[4].(bool) // arg[5] "backward hooks" is unused if !storageOk || !storageOffsetOk || !sizeOk || !strideOk || !requiresGradOk { return nil, fmt.Errorf("RebuildTensorV2 unexpected args: %#v", args) } tensor := &Tensor{ Source: storage, StorageOffset: storageOffset, RequiresGrad: requiresGrad, } var err error tensor.Size, err = tupleToIntSlice(size) if err != nil { return nil, err } tensor.Stride, err = tupleToIntSlice(stride) if err != nil { return nil, err } return tensor, nil } func tupleToIntSlice(tuple *types.Tuple) ([]int, error) { length := tuple.Len() slice := make([]int, length) for i := 0; i < length; i++ { value, ok := tuple.Get(i).(int) if !ok { return nil, fmt.Errorf("tuple of ints expected: %#v", tuple) } slice[i] = value } return slice, nil } gopickle-0.3.0/pytorch/storage.go000066400000000000000000000212111453132224200167650ustar00rootroot00000000000000// Copyright 2020 NLP Odyssey Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package pytorch import ( "encoding/binary" "io" "math" ) type StorageClassInterface interface { New(size int, location string) StorageInterface } type StorageInterface interface { SetFromFile(r io.Reader) error SetFromFileWithSize(r io.Reader, size int) error } type BaseStorage struct { Size int Location string } // ----- Half ----- type HalfStorageClass struct{} var _ StorageClassInterface = &HalfStorageClass{} func (f *HalfStorageClass) New(size int, location string) StorageInterface { return &HalfStorage{ BaseStorage: BaseStorage{Size: size, Location: location}, Data: nil, } } type HalfStorage struct { BaseStorage Data []float32 } var _ StorageInterface = &HalfStorage{} func (f *HalfStorage) SetFromFile(r io.Reader) error { return setFromFile(f, r) } func (f *HalfStorage) SetFromFileWithSize(r io.Reader, size int) error { data := make([]float32, size) br := NewLimitedBufferReader(r, size, 2, 512) for i := 0; i < size; i++ { bytes, err := br.ReadNext() if err != nil { return err } u16 := binary.LittleEndian.Uint16(bytes) data[i] = math.Float32frombits(FloatBits16to32(u16)) } f.Data = data return nil } // ----- Float ----- type FloatStorageClass struct{} var _ StorageClassInterface = &FloatStorageClass{} func (f *FloatStorageClass) New(size int, location string) StorageInterface { return &FloatStorage{ BaseStorage: BaseStorage{Size: size, Location: location}, Data: nil, } } type FloatStorage struct { BaseStorage Data []float32 } var _ StorageInterface = &FloatStorage{} func (f *FloatStorage) SetFromFile(r io.Reader) error { return setFromFile(f, r) } func (f *FloatStorage) SetFromFileWithSize(r io.Reader, size int) error { data := make([]float32, size) br := NewLimitedBufferReader(r, size, 4, 512) for i := 0; i < size; i++ { bytes, err := br.ReadNext() if err != nil { return err } data[i] = math.Float32frombits(binary.LittleEndian.Uint32(bytes)) } f.Data = data return nil } // ----- Double ----- type DoubleStorageClass struct{} var _ StorageClassInterface = &DoubleStorageClass{} func (f *DoubleStorageClass) New(size int, location string) StorageInterface { return &DoubleStorage{ BaseStorage: BaseStorage{Size: size, Location: location}, Data: nil, } } type DoubleStorage struct { BaseStorage Data []float64 } var _ StorageInterface = &DoubleStorage{} func (f *DoubleStorage) SetFromFile(r io.Reader) error { return setFromFile(f, r) } func (f *DoubleStorage) SetFromFileWithSize(r io.Reader, size int) error { data := make([]float64, size) br := NewLimitedBufferReader(r, size, 8, 512) for i := 0; i < size; i++ { bytes, err := br.ReadNext() if err != nil { return err } data[i] = math.Float64frombits(binary.LittleEndian.Uint64(bytes)) } f.Data = data return nil } // ----- Char ----- type CharStorageClass struct{} var _ StorageClassInterface = &CharStorageClass{} func (f *CharStorageClass) New(size int, location string) StorageInterface { return &CharStorage{ BaseStorage: BaseStorage{Size: size, Location: location}, Data: nil, } } type CharStorage struct { BaseStorage Data []int8 } var _ StorageInterface = &CharStorage{} func (f *CharStorage) SetFromFile(r io.Reader) error { return setFromFile(f, r) } func (f *CharStorage) SetFromFileWithSize(r io.Reader, size int) error { data := make([]int8, size) br := NewLimitedBufferReader(r, size, 1, 512) for i := 0; i < size; i++ { bytes, err := br.ReadNext() if err != nil { return err } data[i] = int8(bytes[0]) } f.Data = data return nil } // ----- Short ----- type ShortStorageClass struct{} var _ StorageClassInterface = &ShortStorageClass{} func (f *ShortStorageClass) New(size int, location string) StorageInterface { return &ShortStorage{ BaseStorage: BaseStorage{Size: size, Location: location}, Data: nil, } } type ShortStorage struct { BaseStorage Data []int16 } var _ StorageInterface = &ShortStorage{} func (f *ShortStorage) SetFromFile(r io.Reader) error { return setFromFile(f, r) } func (f *ShortStorage) SetFromFileWithSize(r io.Reader, size int) error { data := make([]int16, size) br := NewLimitedBufferReader(r, size, 2, 512) for i := 0; i < size; i++ { bytes, err := br.ReadNext() if err != nil { return err } data[i] = int16(binary.LittleEndian.Uint16(bytes)) } f.Data = data return nil } // ----- Int ----- type IntStorageClass struct{} var _ StorageClassInterface = &IntStorageClass{} func (f *IntStorageClass) New(size int, location string) StorageInterface { return &IntStorage{ BaseStorage: BaseStorage{Size: size, Location: location}, Data: nil, } } type IntStorage struct { BaseStorage Data []int32 } var _ StorageInterface = &IntStorage{} func (f *IntStorage) SetFromFile(r io.Reader) error { return setFromFile(f, r) } func (f *IntStorage) SetFromFileWithSize(r io.Reader, size int) error { data := make([]int32, size) br := NewLimitedBufferReader(r, size, 4, 512) for i := 0; i < size; i++ { bytes, err := br.ReadNext() if err != nil { return err } data[i] = int32(binary.LittleEndian.Uint32(bytes)) } f.Data = data return nil } // ----- Long ----- type LongStorageClass struct{} var _ StorageClassInterface = &LongStorageClass{} func (f *LongStorageClass) New(size int, location string) StorageInterface { return &LongStorage{ BaseStorage: BaseStorage{Size: size, Location: location}, Data: nil, } } type LongStorage struct { BaseStorage Data []int64 } var _ StorageInterface = &LongStorage{} func (f *LongStorage) SetFromFile(r io.Reader) error { return setFromFile(f, r) } func (f *LongStorage) SetFromFileWithSize(r io.Reader, size int) error { data := make([]int64, size) br := NewLimitedBufferReader(r, size, 8, 512) for i := 0; i < size; i++ { bytes, err := br.ReadNext() if err != nil { return err } data[i] = int64(binary.LittleEndian.Uint64(bytes)) } f.Data = data return nil } // ----- Byte ----- type ByteStorageClass struct{} var _ StorageClassInterface = &ByteStorageClass{} func (f *ByteStorageClass) New(size int, location string) StorageInterface { return &ByteStorage{ BaseStorage: BaseStorage{Size: size, Location: location}, Data: nil, } } type ByteStorage struct { BaseStorage Data []uint8 } var _ StorageInterface = &ByteStorage{} func (f *ByteStorage) SetFromFile(r io.Reader) error { return setFromFile(f, r) } func (f *ByteStorage) SetFromFileWithSize(r io.Reader, size int) error { data := make([]uint8, size) br := NewLimitedBufferReader(r, size, 1, 512) for i := 0; i < size; i++ { bytes, err := br.ReadNext() if err != nil { return err } data[i] = bytes[0] } f.Data = data return nil } // ----- Bool ----- type BoolStorageClass struct{} var _ StorageClassInterface = &BoolStorageClass{} func (f *BoolStorageClass) New(size int, location string) StorageInterface { return &BoolStorage{ BaseStorage: BaseStorage{Size: size, Location: location}, Data: nil, } } type BoolStorage struct { BaseStorage Data []bool } var _ StorageInterface = &BoolStorage{} func (f *BoolStorage) SetFromFile(r io.Reader) error { return setFromFile(f, r) } func (f *BoolStorage) SetFromFileWithSize(r io.Reader, size int) error { data := make([]bool, size) br := NewLimitedBufferReader(r, size, 1, 512) for i := 0; i < size; i++ { bytes, err := br.ReadNext() if err != nil { return err } data[i] = bytes[0] == 1 } f.Data = data return nil } // ----- BFloat16 ----- type BFloat16StorageClass struct{} var _ StorageClassInterface = &BFloat16StorageClass{} func (f *BFloat16StorageClass) New(size int, location string) StorageInterface { return &BFloat16Storage{ BaseStorage: BaseStorage{Size: size, Location: location}, Data: nil, } } type BFloat16Storage struct { BaseStorage Data []float32 } var _ StorageInterface = &BFloat16Storage{} func (f *BFloat16Storage) SetFromFile(r io.Reader) error { return setFromFile(f, r) } func (f *BFloat16Storage) SetFromFileWithSize(r io.Reader, size int) error { data := make([]float32, size) br := NewLimitedBufferReader(r, size, 2, 512) for i := 0; i < size; i++ { bytes, err := br.ReadNext() if err != nil { return err } u16 := binary.LittleEndian.Uint16(bytes) data[i] = math.Float32frombits(uint32(u16) << 16) } f.Data = data return nil } func setFromFile(s StorageInterface, r io.Reader) error { sizeBuf := make([]byte, 8) _, err := r.Read(sizeBuf) if err != nil { return err } size := int(binary.LittleEndian.Uint64(sizeBuf)) return s.SetFromFileWithSize(r, size) } gopickle-0.3.0/pytorch/tensor.go000066400000000000000000000004761453132224200166450ustar00rootroot00000000000000// Copyright 2020 NLP Odyssey Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package pytorch type Tensor struct { Source StorageInterface StorageOffset int Size []int Stride []int RequiresGrad bool } gopickle-0.3.0/pytorch/testdata/000077500000000000000000000000001453132224200166065ustar00rootroot00000000000000gopickle-0.3.0/pytorch/testdata/generate_fixtures.py000077500000000000000000000023471453132224200227140ustar00rootroot00000000000000#!/usr/bin/env python3 # Copyright 2020 NLP Odyssey Authors. All rights reserved. # Use of this source code is governed by a BSD-style # license that can be found in the LICENSE file. import torch FLOAT_DTYPES = [ torch.float16, # or torch.half torch.float32, # or torch.float torch.float64, # or torch.double torch.bfloat16 ] INT_DTYPES = [ torch.int8, torch.int16, # or torch.short torch.int32, # or torch.int torch.int64, # or torch.long ] def main(): for proto in range(1, 6): for use_zip in [False, True]: for dtype in FLOAT_DTYPES: save([1.2, -3.4, 5.6, -7.8], dtype, proto, use_zip) for dtype in INT_DTYPES: save([1, -2, 3, -4], dtype, proto, use_zip) save([1, 10, 100, 255], torch.uint8, proto, use_zip) save([True, False, True, False], torch.bool, proto, use_zip) def save(data, dtype, proto, use_zip): str_dtype = str(dtype)[6:] str_zip = '_zip' if use_zip else '' torch.save( torch.tensor(data, dtype=dtype), f'tensor_{str_dtype}_proto{proto}{str_zip}.pt', pickle_protocol=proto, _use_new_zipfile_serialization=use_zip) if __name__ == '__main__': main() gopickle-0.3.0/pytorch/testdata/requirements.txt000066400000000000000000000000161453132224200220670ustar00rootroot00000000000000torch==1.13.1 gopickle-0.3.0/pytorch/testdata/tensor_bfloat16_proto1.pt000066400000000000000000000005511453132224200234700ustar00rootroot00000000000000L119547037146038801333356L .M.}q(Xprotocol_versionqMX little_endianqI01 X type_sizesq}q(XshortqKXintqKXlongqKuu.ctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch BFloat16Storage qX94282293535712qXcpuqKNtqQK(Ktq(KtqI00 ccollections OrderedDict q)Rq tq Rq .]qX94282293535712qa.?Z@gopickle-0.3.0/pytorch/testdata/tensor_bfloat16_proto1_zip.pt000066400000000000000000000015441453132224200243550ustar00rootroot00000000000000PK#?tensor_bfloat16_proto1_zip/data.pklFB;ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch BFloat16Storage qX0qXcpuqKtqQK(Ktq(KtqI00 ccollections OrderedDict q)Rq tq Rq .PK_МPK!tensor_bfloat16_proto1_zip/data/0FBZZZZZZZZZZZZZZZZZ?Z@PK#PK"(tensor_bfloat16_proto1_zip/versionFB$ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPK_М#tensor_bfloat16_proto1_zip/data.pklPK#!,tensor_bfloat16_proto1_zip/data/0PKўgU"tensor_bfloat16_proto1_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_bfloat16_proto2.pt000066400000000000000000000005341453132224200234720ustar00rootroot00000000000000 lF jP.M.}q(Xprotocol_versionqMX little_endianqX type_sizesq}q(XshortqKXintqKXlongqKuu.ctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch BFloat16Storage qX94282293536288qXcpuqKNtqQKKqKqccollections OrderedDict q)Rq tq Rq .]qX94282293536288qa.?Z@gopickle-0.3.0/pytorch/testdata/tensor_bfloat16_proto2_zip.pt000066400000000000000000000015441453132224200243560ustar00rootroot00000000000000PK#?tensor_bfloat16_proto2_zip/data.pklFB;ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch BFloat16Storage qX0qXcpuqKtqQKKqKqccollections OrderedDict q)Rq tq Rq .PKEPK!tensor_bfloat16_proto2_zip/data/0FBZZZZZZZZZZZZZZZZZZZZ?Z@PK#PK"(tensor_bfloat16_proto2_zip/versionFB$ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPKE#tensor_bfloat16_proto2_zip/data.pklPK#!)tensor_bfloat16_proto2_zip/data/0PKўgU"tensor_bfloat16_proto2_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_bfloat16_proto3.pt000066400000000000000000000005341453132224200234730ustar00rootroot00000000000000 lF jP.M.}q(Xprotocol_versionqMX little_endianqX type_sizesq}q(XshortqKXintqKXlongqKuu.ctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch BFloat16Storage qX94282293536288qXcpuqKNtqQKKqKqccollections OrderedDict q)Rq tq Rq .]qX94282293536288qa.?Z@gopickle-0.3.0/pytorch/testdata/tensor_bfloat16_proto3_zip.pt000066400000000000000000000015441453132224200243570ustar00rootroot00000000000000PK#?tensor_bfloat16_proto3_zip/data.pklFB;ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch BFloat16Storage qX0qXcpuqKtqQKKqKqccollections OrderedDict q)Rq tq Rq .PK5}ՙPK!tensor_bfloat16_proto3_zip/data/0FBZZZZZZZZZZZZZZZZZZZZ?Z@PK#PK"(tensor_bfloat16_proto3_zip/versionFB$ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPK5}ՙ#tensor_bfloat16_proto3_zip/data.pklPK#!)tensor_bfloat16_proto3_zip/data/0PKўgU"tensor_bfloat16_proto3_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_bfloat16_proto4.pt000066400000000000000000000005411453132224200234720ustar00rootroot00000000000000 lF jP.M.X}(protocol_versionM little_endian type_sizes}(shortKintKlongKuu. torch._utils_rebuild_tensor_v2((storagetorchBFloat16Storage94282293536288cpuKNtQKKK collections OrderedDict)RtR.]94282293536288a.?Z@gopickle-0.3.0/pytorch/testdata/tensor_bfloat16_proto4_zip.pt000066400000000000000000000015441453132224200243600ustar00rootroot00000000000000PK#?tensor_bfloat16_proto4_zip/data.pklFB;ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ torch._utils_rebuild_tensor_v2((storagetorchBFloat16Storage0cpuKtQKKK collections OrderedDict)RtR.PK&PK!tensor_bfloat16_proto4_zip/data/0FBZZZZZZZZZZZZZZZZZZZZ?Z@PK#PK"(tensor_bfloat16_proto4_zip/versionFB$ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPK&#tensor_bfloat16_proto4_zip/data.pklPK#!)tensor_bfloat16_proto4_zip/data/0PKўgU"tensor_bfloat16_proto4_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_bfloat16_proto5.pt000066400000000000000000000005411453132224200234730ustar00rootroot00000000000000 lF jP.M.X}(protocol_versionM little_endian type_sizes}(shortKintKlongKuu. torch._utils_rebuild_tensor_v2((storagetorchBFloat16Storage94282293536288cpuKNtQKKK collections OrderedDict)RtR.]94282293536288a.?Z@gopickle-0.3.0/pytorch/testdata/tensor_bfloat16_proto5_zip.pt000066400000000000000000000015441453132224200243610ustar00rootroot00000000000000PK#?tensor_bfloat16_proto5_zip/data.pklFB;ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ torch._utils_rebuild_tensor_v2((storagetorchBFloat16Storage0cpuKtQKKK collections OrderedDict)RtR.PKŬPK!tensor_bfloat16_proto5_zip/data/0FBZZZZZZZZZZZZZZZZZZZZ?Z@PK#PK"(tensor_bfloat16_proto5_zip/versionFB$ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPKŬ#tensor_bfloat16_proto5_zip/data.pklPK#!)tensor_bfloat16_proto5_zip/data/0PKўgU"tensor_bfloat16_proto5_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_bool_proto1.pt000066400000000000000000000005411453132224200230040ustar00rootroot00000000000000L119547037146038801333356L .M.}q(Xprotocol_versionqMX little_endianqI01 X type_sizesq}q(XshortqKXintqKXlongqKuu.ctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch BoolStorage qX94282293535712qXcpuqKNtqQK(Ktq(KtqI00 ccollections OrderedDict q)Rq tq Rq .]qX94282293535712qa.gopickle-0.3.0/pytorch/testdata/tensor_bool_proto1_zip.pt000066400000000000000000000015301453132224200236650ustar00rootroot00000000000000PKCtensor_bool_proto1_zip/data.pklFB?ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch BoolStorage qX0qXcpuqKtqQK(Ktq(KtqI00 ccollections OrderedDict q)Rq tq Rq .PK=PKtensor_bool_proto1_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZZPK8PK0tensor_bool_proto1_zip/versionFB,ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPK=tensor_bool_proto1_zip/data.pklPK8(tensor_bool_proto1_zip/data/0PKўgUtensor_bool_proto1_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_bool_proto2.pt000066400000000000000000000005241453132224200230060ustar00rootroot00000000000000 lF jP.M.}q(Xprotocol_versionqMX little_endianqX type_sizesq}q(XshortqKXintqKXlongqKuu.ctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch BoolStorage qX94282293536288qXcpuqKNtqQKKqKqccollections OrderedDict q)Rq tq Rq .]qX94282293536288qa.gopickle-0.3.0/pytorch/testdata/tensor_bool_proto2_zip.pt000066400000000000000000000015301453132224200236660ustar00rootroot00000000000000PKCtensor_bool_proto2_zip/data.pklFB?ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch BoolStorage qX0qXcpuqKtqQKKqKqccollections OrderedDict q)Rq tq Rq .PKH=PK tensor_bool_proto2_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZZZZZPK8PK0tensor_bool_proto2_zip/versionFB,ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPKH=tensor_bool_proto2_zip/data.pklPK8%tensor_bool_proto2_zip/data/0PKўgUtensor_bool_proto2_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_bool_proto3.pt000066400000000000000000000005241453132224200230070ustar00rootroot00000000000000 lF jP.M.}q(Xprotocol_versionqMX little_endianqX type_sizesq}q(XshortqKXintqKXlongqKuu.ctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch BoolStorage qX94282293536288qXcpuqKNtqQKKqKqccollections OrderedDict q)Rq tq Rq .]qX94282293536288qa.gopickle-0.3.0/pytorch/testdata/tensor_bool_proto3_zip.pt000066400000000000000000000015301453132224200236670ustar00rootroot00000000000000PKCtensor_bool_proto3_zip/data.pklFB?ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch BoolStorage qX0qXcpuqKtqQKKqKqccollections OrderedDict q)Rq tq Rq .PKYfЕPK tensor_bool_proto3_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZZZZZPK8PK0tensor_bool_proto3_zip/versionFB,ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPKYfЕtensor_bool_proto3_zip/data.pklPK8%tensor_bool_proto3_zip/data/0PKўgUtensor_bool_proto3_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_bool_proto4.pt000066400000000000000000000005311453132224200230060ustar00rootroot00000000000000 lF jP.M.X}(protocol_versionM little_endian type_sizes}(shortKintKlongKuu. torch._utils_rebuild_tensor_v2((storagetorch BoolStorage94282293536288cpuKNtQKKK collections OrderedDict)RtR.]94282293536288a.gopickle-0.3.0/pytorch/testdata/tensor_bool_proto4_zip.pt000066400000000000000000000015301453132224200236700ustar00rootroot00000000000000PKCtensor_bool_proto4_zip/data.pklFB?ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ torch._utils_rebuild_tensor_v2((storagetorch BoolStorage0cpuKtQKKK collections OrderedDict)RtR.PKPK tensor_bool_proto4_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZZZZZPK8PK0tensor_bool_proto4_zip/versionFB,ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPKtensor_bool_proto4_zip/data.pklPK8%tensor_bool_proto4_zip/data/0PKўgUtensor_bool_proto4_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_bool_proto5.pt000066400000000000000000000005311453132224200230070ustar00rootroot00000000000000 lF jP.M.X}(protocol_versionM little_endian type_sizes}(shortKintKlongKuu. torch._utils_rebuild_tensor_v2((storagetorch BoolStorage94282293536288cpuKNtQKKK collections OrderedDict)RtR.]94282293536288a.gopickle-0.3.0/pytorch/testdata/tensor_bool_proto5_zip.pt000066400000000000000000000015301453132224200236710ustar00rootroot00000000000000PKCtensor_bool_proto5_zip/data.pklFB?ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ torch._utils_rebuild_tensor_v2((storagetorch BoolStorage0cpuKtQKKK collections OrderedDict)RtR.PK2$PK tensor_bool_proto5_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZZZZZPK8PK0tensor_bool_proto5_zip/versionFB,ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPK2$tensor_bool_proto5_zip/data.pklPK8%tensor_bool_proto5_zip/data/0PKўgUtensor_bool_proto5_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_float16_proto1.pt000066400000000000000000000005451453132224200233310ustar00rootroot00000000000000L119547037146038801333356L .M.}q(Xprotocol_versionqMX little_endianqI01 X type_sizesq}q(XshortqKXintqKXlongqKuu.ctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch HalfStorage qX94282293535712qXcpuqKNtqQK(Ktq(KtqI00 ccollections OrderedDict q)Rq tq Rq .]qX94282293535712qa.<šEgopickle-0.3.0/pytorch/testdata/tensor_float16_proto1_zip.pt000066400000000000000000000015411453132224200242100ustar00rootroot00000000000000PK"@tensor_float16_proto1_zip/data.pklFB<ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch HalfStorage qX0qXcpuqKtqQK(Ktq(KtqI00 ccollections OrderedDict q)Rq tq Rq .PKҽGPK tensor_float16_proto1_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZ<šEPKD PK!)tensor_float16_proto1_zip/versionFB%ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPKҽG"tensor_float16_proto1_zip/data.pklPKD  (tensor_float16_proto1_zip/data/0PKўgU!tensor_float16_proto1_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_float16_proto2.pt000066400000000000000000000005301453132224200233240ustar00rootroot00000000000000 lF jP.M.}q(Xprotocol_versionqMX little_endianqX type_sizesq}q(XshortqKXintqKXlongqKuu.ctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch HalfStorage qX94282293536288qXcpuqKNtqQKKqKqccollections OrderedDict q)Rq tq Rq .]qX94282293536288qa.<šEgopickle-0.3.0/pytorch/testdata/tensor_float16_proto2_zip.pt000066400000000000000000000015411453132224200242110ustar00rootroot00000000000000PK"@tensor_float16_proto2_zip/data.pklFB<ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch HalfStorage qX0qXcpuqKtqQKKqKqccollections OrderedDict q)Rq tq Rq .PKrȕPK tensor_float16_proto2_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZZ<šEPKD PK!)tensor_float16_proto2_zip/versionFB%ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPKrȕ"tensor_float16_proto2_zip/data.pklPKD  %tensor_float16_proto2_zip/data/0PKўgU!tensor_float16_proto2_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_float16_proto3.pt000066400000000000000000000005301453132224200233250ustar00rootroot00000000000000 lF jP.M.}q(Xprotocol_versionqMX little_endianqX type_sizesq}q(XshortqKXintqKXlongqKuu.ctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch HalfStorage qX94282293536288qXcpuqKNtqQKKqKqccollections OrderedDict q)Rq tq Rq .]qX94282293536288qa.<šEgopickle-0.3.0/pytorch/testdata/tensor_float16_proto3_zip.pt000066400000000000000000000015411453132224200242120ustar00rootroot00000000000000PK"@tensor_float16_proto3_zip/data.pklFB<ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch HalfStorage qX0qXcpuqKtqQKKqKqccollections OrderedDict q)Rq tq Rq .PKPK tensor_float16_proto3_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZZ<šEPKD PK!)tensor_float16_proto3_zip/versionFB%ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPK"tensor_float16_proto3_zip/data.pklPKD  %tensor_float16_proto3_zip/data/0PKўgU!tensor_float16_proto3_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_float16_proto4.pt000066400000000000000000000005351453132224200233330ustar00rootroot00000000000000 lF jP.M.X}(protocol_versionM little_endian type_sizes}(shortKintKlongKuu. torch._utils_rebuild_tensor_v2((storagetorch HalfStorage94282293536288cpuKNtQKKK collections OrderedDict)RtR.]94282293536288a.<šEgopickle-0.3.0/pytorch/testdata/tensor_float16_proto4_zip.pt000066400000000000000000000015411453132224200242130ustar00rootroot00000000000000PK"@tensor_float16_proto4_zip/data.pklFB<ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ torch._utils_rebuild_tensor_v2((storagetorch HalfStorage0cpuKtQKKK collections OrderedDict)RtR.PKjPK tensor_float16_proto4_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZZ<šEPKD PK!)tensor_float16_proto4_zip/versionFB%ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPKj"tensor_float16_proto4_zip/data.pklPKD  %tensor_float16_proto4_zip/data/0PKўgU!tensor_float16_proto4_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_float16_proto5.pt000066400000000000000000000005351453132224200233340ustar00rootroot00000000000000 lF jP.M.X}(protocol_versionM little_endian type_sizes}(shortKintKlongKuu. torch._utils_rebuild_tensor_v2((storagetorch HalfStorage94282293536288cpuKNtQKKK collections OrderedDict)RtR.]94282293536288a.<šEgopickle-0.3.0/pytorch/testdata/tensor_float16_proto5_zip.pt000066400000000000000000000015411453132224200242140ustar00rootroot00000000000000PK"@tensor_float16_proto5_zip/data.pklFB<ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ torch._utils_rebuild_tensor_v2((storagetorch HalfStorage0cpuKtQKKK collections OrderedDict)RtR.PK`PK tensor_float16_proto5_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZZ<šEPKD PK!)tensor_float16_proto5_zip/versionFB%ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPK`"tensor_float16_proto5_zip/data.pklPKD  %tensor_float16_proto5_zip/data/0PKўgU!tensor_float16_proto5_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_float32_proto1.pt000066400000000000000000000005561453132224200233310ustar00rootroot00000000000000L119547037146038801333356L .M.}q(Xprotocol_versionqMX little_endianqI01 X type_sizesq}q(XshortqKXintqKXlongqKuu.ctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch FloatStorage qX94282293535712qXcpuqKNtqQK(Ktq(KtqI00 ccollections OrderedDict q)Rq tq Rq .]qX94282293535712qa.?Y33@gopickle-0.3.0/pytorch/testdata/tensor_float32_proto1_zip.pt000066400000000000000000000015411453132224200242060ustar00rootroot00000000000000PK"@tensor_float32_proto1_zip/data.pklFB<ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch FloatStorage qX0qXcpuqKtqQK(Ktq(KtqI00 ccollections OrderedDict q)Rq tq Rq .PKPK tensor_float32_proto1_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZ?Y33@PKDPK!!tensor_float32_proto1_zip/versionFBZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPK"tensor_float32_proto1_zip/data.pklPKD )tensor_float32_proto1_zip/data/0PKўgU!tensor_float32_proto1_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_float32_proto2.pt000066400000000000000000000005411453132224200233240ustar00rootroot00000000000000 lF jP.M.}q(Xprotocol_versionqMX little_endianqX type_sizesq}q(XshortqKXintqKXlongqKuu.ctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch FloatStorage qX94282293536288qXcpuqKNtqQKKqKqccollections OrderedDict q)Rq tq Rq .]qX94282293536288qa.?Y33@gopickle-0.3.0/pytorch/testdata/tensor_float32_proto2_zip.pt000066400000000000000000000015411453132224200242070ustar00rootroot00000000000000PK"@tensor_float32_proto2_zip/data.pklFB<ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch FloatStorage qX0qXcpuqKtqQKKqKqccollections OrderedDict q)Rq tq Rq .PK:tPK tensor_float32_proto2_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZ?Y33@PKDPK!!tensor_float32_proto2_zip/versionFBZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPK:t"tensor_float32_proto2_zip/data.pklPKD &tensor_float32_proto2_zip/data/0PKўgU!tensor_float32_proto2_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_float32_proto3.pt000066400000000000000000000005411453132224200233250ustar00rootroot00000000000000 lF jP.M.}q(Xprotocol_versionqMX little_endianqX type_sizesq}q(XshortqKXintqKXlongqKuu.ctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch FloatStorage qX94282293536288qXcpuqKNtqQKKqKqccollections OrderedDict q)Rq tq Rq .]qX94282293536288qa.?Y33@gopickle-0.3.0/pytorch/testdata/tensor_float32_proto3_zip.pt000066400000000000000000000015411453132224200242100ustar00rootroot00000000000000PK"@tensor_float32_proto3_zip/data.pklFB<ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch FloatStorage qX0qXcpuqKtqQKKqKqccollections OrderedDict q)Rq tq Rq .PK2dPK tensor_float32_proto3_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZ?Y33@PKDPK!!tensor_float32_proto3_zip/versionFBZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPK2d"tensor_float32_proto3_zip/data.pklPKD &tensor_float32_proto3_zip/data/0PKўgU!tensor_float32_proto3_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_float32_proto4.pt000066400000000000000000000005461453132224200233330ustar00rootroot00000000000000 lF jP.M.X}(protocol_versionM little_endian type_sizes}(shortKintKlongKuu. torch._utils_rebuild_tensor_v2((storagetorch FloatStorage94282293536288cpuKNtQKKK collections OrderedDict)RtR.]94282293536288a.?Y33@gopickle-0.3.0/pytorch/testdata/tensor_float32_proto4_zip.pt000066400000000000000000000015411453132224200242110ustar00rootroot00000000000000PK"@tensor_float32_proto4_zip/data.pklFB<ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ torch._utils_rebuild_tensor_v2((storagetorch FloatStorage0cpuKtQKKK collections OrderedDict)RtR.PKPK tensor_float32_proto4_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZ?Y33@PKDPK!!tensor_float32_proto4_zip/versionFBZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPK"tensor_float32_proto4_zip/data.pklPKD &tensor_float32_proto4_zip/data/0PKўgU!tensor_float32_proto4_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_float32_proto5.pt000066400000000000000000000005461453132224200233340ustar00rootroot00000000000000 lF jP.M.X}(protocol_versionM little_endian type_sizes}(shortKintKlongKuu. torch._utils_rebuild_tensor_v2((storagetorch FloatStorage94282293536288cpuKNtQKKK collections OrderedDict)RtR.]94282293536288a.?Y33@gopickle-0.3.0/pytorch/testdata/tensor_float32_proto5_zip.pt000066400000000000000000000015411453132224200242120ustar00rootroot00000000000000PK"@tensor_float32_proto5_zip/data.pklFB<ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ torch._utils_rebuild_tensor_v2((storagetorch FloatStorage0cpuKtQKKK collections OrderedDict)RtR.PK[PK tensor_float32_proto5_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZ?Y33@PKDPK!!tensor_float32_proto5_zip/versionFBZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPK["tensor_float32_proto5_zip/data.pklPKD &tensor_float32_proto5_zip/data/0PKўgU!tensor_float32_proto5_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_float64_proto1.pt000066400000000000000000000005771453132224200233410ustar00rootroot00000000000000L119547037146038801333356L .M.}q(Xprotocol_versionqMX little_endianqI01 X type_sizesq}q(XshortqKXintqKXlongqKuu.ctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch DoubleStorage qX94282293535712qXcpuqKNtqQK(Ktq(KtqI00 ccollections OrderedDict q)Rq tq Rq .]qX94282293535712qa.333333?333333 ffffff@333333gopickle-0.3.0/pytorch/testdata/tensor_float64_proto1_zip.pt000066400000000000000000000015411453132224200242130ustar00rootroot00000000000000PK"@tensor_float64_proto1_zip/data.pklFB<ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch DoubleStorage qX0qXcpuqKtqQK(Ktq(KtqI00 ccollections OrderedDict q)Rq tq Rq .PKMnPK tensor_float64_proto1_zip/data/0FBZZZZZZZZZZZZZZZZZZZZ333333?333333 ffffff@333333PKc PK!tensor_float64_proto1_zip/versionFB ZZZZZZZZZZZZZ3 PKўgUPKMn"tensor_float64_proto1_zip/data.pklPKc *tensor_float64_proto1_zip/data/0PKўgU!tensor_float64_proto1_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_float64_proto2.pt000066400000000000000000000005621453132224200233340ustar00rootroot00000000000000 lF jP.M.}q(Xprotocol_versionqMX little_endianqX type_sizesq}q(XshortqKXintqKXlongqKuu.ctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch DoubleStorage qX94282293536288qXcpuqKNtqQKKqKqccollections OrderedDict q)Rq tq Rq .]qX94282293536288qa.333333?333333 ffffff@333333gopickle-0.3.0/pytorch/testdata/tensor_float64_proto2_zip.pt000066400000000000000000000015411453132224200242140ustar00rootroot00000000000000PK"@tensor_float64_proto2_zip/data.pklFB<ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch DoubleStorage qX0qXcpuqKtqQKKqKqccollections OrderedDict q)Rq tq Rq .PK}_PK tensor_float64_proto2_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZ333333?333333 ffffff@333333PKc PK!tensor_float64_proto2_zip/versionFB ZZZZZZZZZZZZZ3 PKўgUPK}_"tensor_float64_proto2_zip/data.pklPKc 'tensor_float64_proto2_zip/data/0PKўgU!tensor_float64_proto2_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_float64_proto3.pt000066400000000000000000000005621453132224200233350ustar00rootroot00000000000000 lF jP.M.}q(Xprotocol_versionqMX little_endianqX type_sizesq}q(XshortqKXintqKXlongqKuu.ctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch DoubleStorage qX94282293536288qXcpuqKNtqQKKqKqccollections OrderedDict q)Rq tq Rq .]qX94282293536288qa.333333?333333 ffffff@333333gopickle-0.3.0/pytorch/testdata/tensor_float64_proto3_zip.pt000066400000000000000000000015411453132224200242150ustar00rootroot00000000000000PK"@tensor_float64_proto3_zip/data.pklFB<ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch DoubleStorage qX0qXcpuqKtqQKKqKqccollections OrderedDict q)Rq tq Rq .PK.EPK tensor_float64_proto3_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZ333333?333333 ffffff@333333PKc PK!tensor_float64_proto3_zip/versionFB ZZZZZZZZZZZZZ3 PKўgUPK.E"tensor_float64_proto3_zip/data.pklPKc 'tensor_float64_proto3_zip/data/0PKўgU!tensor_float64_proto3_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_float64_proto4.pt000066400000000000000000000005671453132224200233430ustar00rootroot00000000000000 lF jP.M.X}(protocol_versionM little_endian type_sizes}(shortKintKlongKuu. torch._utils_rebuild_tensor_v2((storagetorch DoubleStorage94282293536288cpuKNtQKKK collections OrderedDict)RtR.]94282293536288a.333333?333333 ffffff@333333gopickle-0.3.0/pytorch/testdata/tensor_float64_proto4_zip.pt000066400000000000000000000015411453132224200242160ustar00rootroot00000000000000PK"@tensor_float64_proto4_zip/data.pklFB<ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ torch._utils_rebuild_tensor_v2((storagetorch DoubleStorage0cpuKtQKKK collections OrderedDict)RtR.PKPK tensor_float64_proto4_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZ333333?333333 ffffff@333333PKc PK!tensor_float64_proto4_zip/versionFB ZZZZZZZZZZZZZ3 PKўgUPK"tensor_float64_proto4_zip/data.pklPKc 'tensor_float64_proto4_zip/data/0PKўgU!tensor_float64_proto4_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_float64_proto5.pt000066400000000000000000000005671453132224200233440ustar00rootroot00000000000000 lF jP.M.X}(protocol_versionM little_endian type_sizes}(shortKintKlongKuu. torch._utils_rebuild_tensor_v2((storagetorch DoubleStorage94282293536288cpuKNtQKKK collections OrderedDict)RtR.]94282293536288a.333333?333333 ffffff@333333gopickle-0.3.0/pytorch/testdata/tensor_float64_proto5_zip.pt000066400000000000000000000015411453132224200242170ustar00rootroot00000000000000PK"@tensor_float64_proto5_zip/data.pklFB<ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ torch._utils_rebuild_tensor_v2((storagetorch DoubleStorage0cpuKtQKKK collections OrderedDict)RtR.PKPK tensor_float64_proto5_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZ333333?333333 ffffff@333333PKc PK!tensor_float64_proto5_zip/versionFB ZZZZZZZZZZZZZ3 PKўgUPK"tensor_float64_proto5_zip/data.pklPKc 'tensor_float64_proto5_zip/data/0PKўgU!tensor_float64_proto5_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_int16_proto1.pt000066400000000000000000000005461453132224200230170ustar00rootroot00000000000000L119547037146038801333356L .M.}q(Xprotocol_versionqMX little_endianqI01 X type_sizesq}q(XshortqKXintqKXlongqKuu.ctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch ShortStorage qX94282293535712qXcpuqKNtqQK(Ktq(KtqI00 ccollections OrderedDict q)Rq tq Rq .]qX94282293535712qa.gopickle-0.3.0/pytorch/testdata/tensor_int16_proto1_zip.pt000066400000000000000000000015331453132224200236760ustar00rootroot00000000000000PK Btensor_int16_proto1_zip/data.pklFB>ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch ShortStorage qX0qXcpuqKtqQK(Ktq(KtqI00 ccollections OrderedDict q)Rq tq Rq .PKg翙PKtensor_int16_proto1_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZPKPK+tensor_int16_proto1_zip/versionFB'ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPKg翙 tensor_int16_proto1_zip/data.pklPK)tensor_int16_proto1_zip/data/0PKўgUtensor_int16_proto1_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_int16_proto2.pt000066400000000000000000000005311453132224200230120ustar00rootroot00000000000000 lF jP.M.}q(Xprotocol_versionqMX little_endianqX type_sizesq}q(XshortqKXintqKXlongqKuu.ctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch ShortStorage qX94282293536288qXcpuqKNtqQKKqKqccollections OrderedDict q)Rq tq Rq .]qX94282293536288qa.gopickle-0.3.0/pytorch/testdata/tensor_int16_proto2_zip.pt000066400000000000000000000015331453132224200236770ustar00rootroot00000000000000PK Btensor_int16_proto2_zip/data.pklFB>ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch ShortStorage qX0qXcpuqKtqQKKqKqccollections OrderedDict q)Rq tq Rq .PKPKtensor_int16_proto2_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZZZPKPK+tensor_int16_proto2_zip/versionFB'ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPK tensor_int16_proto2_zip/data.pklPK&tensor_int16_proto2_zip/data/0PKўgUtensor_int16_proto2_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_int16_proto3.pt000066400000000000000000000005311453132224200230130ustar00rootroot00000000000000 lF jP.M.}q(Xprotocol_versionqMX little_endianqX type_sizesq}q(XshortqKXintqKXlongqKuu.ctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch ShortStorage qX94282293536288qXcpuqKNtqQKKqKqccollections OrderedDict q)Rq tq Rq .]qX94282293536288qa.gopickle-0.3.0/pytorch/testdata/tensor_int16_proto3_zip.pt000066400000000000000000000015331453132224200237000ustar00rootroot00000000000000PK Btensor_int16_proto3_zip/data.pklFB>ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch ShortStorage qX0qXcpuqKtqQKKqKqccollections OrderedDict q)Rq tq Rq .PKPKtensor_int16_proto3_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZZZPKPK+tensor_int16_proto3_zip/versionFB'ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPK tensor_int16_proto3_zip/data.pklPK&tensor_int16_proto3_zip/data/0PKўgUtensor_int16_proto3_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_int16_proto4.pt000066400000000000000000000005361453132224200230210ustar00rootroot00000000000000 lF jP.M.X}(protocol_versionM little_endian type_sizes}(shortKintKlongKuu. torch._utils_rebuild_tensor_v2((storagetorch ShortStorage94282293536288cpuKNtQKKK collections OrderedDict)RtR.]94282293536288a.gopickle-0.3.0/pytorch/testdata/tensor_int16_proto4_zip.pt000066400000000000000000000015331453132224200237010ustar00rootroot00000000000000PK Btensor_int16_proto4_zip/data.pklFB>ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ torch._utils_rebuild_tensor_v2((storagetorch ShortStorage0cpuKtQKKK collections OrderedDict)RtR.PK ؖPKtensor_int16_proto4_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZZZPKPK+tensor_int16_proto4_zip/versionFB'ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPK ؖ tensor_int16_proto4_zip/data.pklPK&tensor_int16_proto4_zip/data/0PKўgUtensor_int16_proto4_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_int16_proto5.pt000066400000000000000000000005361453132224200230220ustar00rootroot00000000000000 lF jP.M.X}(protocol_versionM little_endian type_sizes}(shortKintKlongKuu. torch._utils_rebuild_tensor_v2((storagetorch ShortStorage94282293536288cpuKNtQKKK collections OrderedDict)RtR.]94282293536288a.gopickle-0.3.0/pytorch/testdata/tensor_int16_proto5_zip.pt000066400000000000000000000015331453132224200237020ustar00rootroot00000000000000PK Btensor_int16_proto5_zip/data.pklFB>ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ torch._utils_rebuild_tensor_v2((storagetorch ShortStorage0cpuKtQKKK collections OrderedDict)RtR.PK(ȖPKtensor_int16_proto5_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZZZPKPK+tensor_int16_proto5_zip/versionFB'ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPK(Ȗ tensor_int16_proto5_zip/data.pklPK&tensor_int16_proto5_zip/data/0PKўgUtensor_int16_proto5_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_int32_proto1.pt000066400000000000000000000005541453132224200230140ustar00rootroot00000000000000L119547037146038801333356L .M.}q(Xprotocol_versionqMX little_endianqI01 X type_sizesq}q(XshortqKXintqKXlongqKuu.ctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch IntStorage qX94282293535712qXcpuqKNtqQK(Ktq(KtqI00 ccollections OrderedDict q)Rq tq Rq .]qX94282293535712qa.gopickle-0.3.0/pytorch/testdata/tensor_int32_proto1_zip.pt000066400000000000000000000015331453132224200236740ustar00rootroot00000000000000PK Btensor_int32_proto1_zip/data.pklFB>ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch IntStorage qX0qXcpuqKtqQK(Ktq(KtqI00 ccollections OrderedDict q)Rq tq Rq .PK#ZPKtensor_int32_proto1_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZZPK'PK#tensor_int32_proto1_zip/versionFBZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPK#Z tensor_int32_proto1_zip/data.pklPK''tensor_int32_proto1_zip/data/0PKўgUtensor_int32_proto1_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_int32_proto2.pt000066400000000000000000000005371453132224200230160ustar00rootroot00000000000000 lF jP.M.}q(Xprotocol_versionqMX little_endianqX type_sizesq}q(XshortqKXintqKXlongqKuu.ctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch IntStorage qX94282293536288qXcpuqKNtqQKKqKqccollections OrderedDict q)Rq tq Rq .]qX94282293536288qa.gopickle-0.3.0/pytorch/testdata/tensor_int32_proto2_zip.pt000066400000000000000000000015331453132224200236750ustar00rootroot00000000000000PK Btensor_int32_proto2_zip/data.pklFB>ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch IntStorage qX0qXcpuqKtqQKKqKqccollections OrderedDict q)Rq tq Rq .PKPK tensor_int32_proto2_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZZZZZPK'PK#tensor_int32_proto2_zip/versionFBZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPK tensor_int32_proto2_zip/data.pklPK'$tensor_int32_proto2_zip/data/0PKўgUtensor_int32_proto2_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_int32_proto3.pt000066400000000000000000000005371453132224200230170ustar00rootroot00000000000000 lF jP.M.}q(Xprotocol_versionqMX little_endianqX type_sizesq}q(XshortqKXintqKXlongqKuu.ctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch IntStorage qX94282293536288qXcpuqKNtqQKKqKqccollections OrderedDict q)Rq tq Rq .]qX94282293536288qa.gopickle-0.3.0/pytorch/testdata/tensor_int32_proto3_zip.pt000066400000000000000000000015331453132224200236760ustar00rootroot00000000000000PK Btensor_int32_proto3_zip/data.pklFB>ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch IntStorage qX0qXcpuqKtqQKKqKqccollections OrderedDict q)Rq tq Rq .PKzPK tensor_int32_proto3_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZZZZZPK'PK#tensor_int32_proto3_zip/versionFBZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPKz tensor_int32_proto3_zip/data.pklPK'$tensor_int32_proto3_zip/data/0PKўgUtensor_int32_proto3_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_int32_proto4.pt000066400000000000000000000005441453132224200230160ustar00rootroot00000000000000 lF jP.M.X}(protocol_versionM little_endian type_sizes}(shortKintKlongKuu. torch._utils_rebuild_tensor_v2((storagetorch IntStorage94282293536288cpuKNtQKKK collections OrderedDict)RtR.]94282293536288a.gopickle-0.3.0/pytorch/testdata/tensor_int32_proto4_zip.pt000066400000000000000000000015331453132224200236770ustar00rootroot00000000000000PK Btensor_int32_proto4_zip/data.pklFB>ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ torch._utils_rebuild_tensor_v2((storagetorch IntStorage0cpuKtQKKK collections OrderedDict)RtR.PKaٹPK tensor_int32_proto4_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZZZZZPK'PK#tensor_int32_proto4_zip/versionFBZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPKaٹ tensor_int32_proto4_zip/data.pklPK'$tensor_int32_proto4_zip/data/0PKўgUtensor_int32_proto4_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_int32_proto5.pt000066400000000000000000000005441453132224200230170ustar00rootroot00000000000000 lF jP.M.X}(protocol_versionM little_endian type_sizes}(shortKintKlongKuu. torch._utils_rebuild_tensor_v2((storagetorch IntStorage94282293536288cpuKNtQKKK collections OrderedDict)RtR.]94282293536288a.gopickle-0.3.0/pytorch/testdata/tensor_int32_proto5_zip.pt000066400000000000000000000015331453132224200237000ustar00rootroot00000000000000PK Btensor_int32_proto5_zip/data.pklFB>ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ torch._utils_rebuild_tensor_v2((storagetorch IntStorage0cpuKtQKKK collections OrderedDict)RtR.PKƺ|PK tensor_int32_proto5_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZZZZZPK'PK#tensor_int32_proto5_zip/versionFBZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPKƺ| tensor_int32_proto5_zip/data.pklPK'$tensor_int32_proto5_zip/data/0PKўgUtensor_int32_proto5_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_int64_proto1.pt000066400000000000000000000005751453132224200230240ustar00rootroot00000000000000L119547037146038801333356L .M.}q(Xprotocol_versionqMX little_endianqI01 X type_sizesq}q(XshortqKXintqKXlongqKuu.ctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch LongStorage qX94282293535712qXcpuqKNtqQK(Ktq(KtqI00 ccollections OrderedDict q)Rq tq Rq .]qX94282293535712qa.gopickle-0.3.0/pytorch/testdata/tensor_int64_proto1_zip.pt000066400000000000000000000015331453132224200237010ustar00rootroot00000000000000PK Btensor_int64_proto1_zip/data.pklFB>ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch LongStorage qX0qXcpuqKtqQK(Ktq(KtqI00 ccollections OrderedDict q)Rq tq Rq .PKPKtensor_int64_proto1_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZPK& PKtensor_int64_proto1_zip/versionFBZZZZZZZZZZZZZZZ3 PKўgUPK tensor_int64_proto1_zip/data.pklPK& (tensor_int64_proto1_zip/data/0PKўgUtensor_int64_proto1_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_int64_proto2.pt000066400000000000000000000005601453132224200230170ustar00rootroot00000000000000 lF jP.M.}q(Xprotocol_versionqMX little_endianqX type_sizesq}q(XshortqKXintqKXlongqKuu.ctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch LongStorage qX94282293536288qXcpuqKNtqQKKqKqccollections OrderedDict q)Rq tq Rq .]qX94282293536288qa.gopickle-0.3.0/pytorch/testdata/tensor_int64_proto2_zip.pt000066400000000000000000000015331453132224200237020ustar00rootroot00000000000000PK Btensor_int64_proto2_zip/data.pklFB>ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch LongStorage qX0qXcpuqKtqQKKqKqccollections OrderedDict q)Rq tq Rq .PKܚaPKtensor_int64_proto2_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZZZZPK& PKtensor_int64_proto2_zip/versionFBZZZZZZZZZZZZZZZ3 PKўgUPKܚa tensor_int64_proto2_zip/data.pklPK& %tensor_int64_proto2_zip/data/0PKўgUtensor_int64_proto2_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_int64_proto3.pt000066400000000000000000000005601453132224200230200ustar00rootroot00000000000000 lF jP.M.}q(Xprotocol_versionqMX little_endianqX type_sizesq}q(XshortqKXintqKXlongqKuu.ctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch LongStorage qX94282293536288qXcpuqKNtqQKKqKqccollections OrderedDict q)Rq tq Rq .]qX94282293536288qa.gopickle-0.3.0/pytorch/testdata/tensor_int64_proto3_zip.pt000066400000000000000000000015331453132224200237030ustar00rootroot00000000000000PK Btensor_int64_proto3_zip/data.pklFB>ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch LongStorage qX0qXcpuqKtqQKKqKqccollections OrderedDict q)Rq tq Rq .PKZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ torch._utils_rebuild_tensor_v2((storagetorch LongStorage0cpuKtQKKK collections OrderedDict)RtR.PKܑՕPKtensor_int64_proto4_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZZZZPK& PKtensor_int64_proto4_zip/versionFBZZZZZZZZZZZZZZZ3 PKўgUPKܑՕ tensor_int64_proto4_zip/data.pklPK& %tensor_int64_proto4_zip/data/0PKўgUtensor_int64_proto4_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_int64_proto5.pt000066400000000000000000000005651453132224200230270ustar00rootroot00000000000000 lF jP.M.X}(protocol_versionM little_endian type_sizes}(shortKintKlongKuu. torch._utils_rebuild_tensor_v2((storagetorch LongStorage94282293536288cpuKNtQKKK collections OrderedDict)RtR.]94282293536288a.gopickle-0.3.0/pytorch/testdata/tensor_int64_proto5_zip.pt000066400000000000000000000015331453132224200237050ustar00rootroot00000000000000PK Btensor_int64_proto5_zip/data.pklFB>ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ torch._utils_rebuild_tensor_v2((storagetorch LongStorage0cpuKtQKKK collections OrderedDict)RtR.PK7CPKtensor_int64_proto5_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZZZZPK& PKtensor_int64_proto5_zip/versionFBZZZZZZZZZZZZZZZ3 PKўgUPK7C tensor_int64_proto5_zip/data.pklPK& %tensor_int64_proto5_zip/data/0PKўgUtensor_int64_proto5_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_int8_proto1.pt000066400000000000000000000005411453132224200227330ustar00rootroot00000000000000L119547037146038801333356L .M.}q(Xprotocol_versionqMX little_endianqI01 X type_sizesq}q(XshortqKXintqKXlongqKuu.ctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch CharStorage qX94282293535712qXcpuqKNtqQK(Ktq(KtqI00 ccollections OrderedDict q)Rq tq Rq .]qX94282293535712qa.gopickle-0.3.0/pytorch/testdata/tensor_int8_proto1_zip.pt000066400000000000000000000015301453132224200236140ustar00rootroot00000000000000PKCtensor_int8_proto1_zip/data.pklFB?ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch CharStorage qX0qXcpuqKtqQK(Ktq(KtqI00 ccollections OrderedDict q)Rq tq Rq .PKyPKtensor_int8_proto1_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZZPKW PK0tensor_int8_proto1_zip/versionFB,ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPKytensor_int8_proto1_zip/data.pklPKW (tensor_int8_proto1_zip/data/0PKўgUtensor_int8_proto1_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_int8_proto2.pt000066400000000000000000000005241453132224200227350ustar00rootroot00000000000000 lF jP.M.}q(Xprotocol_versionqMX little_endianqX type_sizesq}q(XshortqKXintqKXlongqKuu.ctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch CharStorage qX94282293536288qXcpuqKNtqQKKqKqccollections OrderedDict q)Rq tq Rq .]qX94282293536288qa.gopickle-0.3.0/pytorch/testdata/tensor_int8_proto2_zip.pt000066400000000000000000000015301453132224200236150ustar00rootroot00000000000000PKCtensor_int8_proto2_zip/data.pklFB?ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch CharStorage qX0qXcpuqKtqQKKqKqccollections OrderedDict q)Rq tq Rq .PK#uPK tensor_int8_proto2_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZZZZZPKW PK0tensor_int8_proto2_zip/versionFB,ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPK#utensor_int8_proto2_zip/data.pklPKW %tensor_int8_proto2_zip/data/0PKўgUtensor_int8_proto2_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_int8_proto3.pt000066400000000000000000000005241453132224200227360ustar00rootroot00000000000000 lF jP.M.}q(Xprotocol_versionqMX little_endianqX type_sizesq}q(XshortqKXintqKXlongqKuu.ctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch CharStorage qX94282293536288qXcpuqKNtqQKKqKqccollections OrderedDict q)Rq tq Rq .]qX94282293536288qa.gopickle-0.3.0/pytorch/testdata/tensor_int8_proto3_zip.pt000066400000000000000000000015301453132224200236160ustar00rootroot00000000000000PKCtensor_int8_proto3_zip/data.pklFB?ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch CharStorage qX0qXcpuqKtqQKKqKqccollections OrderedDict q)Rq tq Rq .PK PK tensor_int8_proto3_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZZZZZPKW PK0tensor_int8_proto3_zip/versionFB,ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPK tensor_int8_proto3_zip/data.pklPKW %tensor_int8_proto3_zip/data/0PKўgUtensor_int8_proto3_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_int8_proto4.pt000066400000000000000000000005311453132224200227350ustar00rootroot00000000000000 lF jP.M.X}(protocol_versionM little_endian type_sizes}(shortKintKlongKuu. torch._utils_rebuild_tensor_v2((storagetorch CharStorage94282293536288cpuKNtQKKK collections OrderedDict)RtR.]94282293536288a.gopickle-0.3.0/pytorch/testdata/tensor_int8_proto4_zip.pt000066400000000000000000000015301453132224200236170ustar00rootroot00000000000000PKCtensor_int8_proto4_zip/data.pklFB?ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ torch._utils_rebuild_tensor_v2((storagetorch CharStorage0cpuKtQKKK collections OrderedDict)RtR.PKf*PK tensor_int8_proto4_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZZZZZPKW PK0tensor_int8_proto4_zip/versionFB,ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPKf*tensor_int8_proto4_zip/data.pklPKW %tensor_int8_proto4_zip/data/0PKўgUtensor_int8_proto4_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_int8_proto5.pt000066400000000000000000000005311453132224200227360ustar00rootroot00000000000000 lF jP.M.X}(protocol_versionM little_endian type_sizes}(shortKintKlongKuu. torch._utils_rebuild_tensor_v2((storagetorch CharStorage94282293536288cpuKNtQKKK collections OrderedDict)RtR.]94282293536288a.gopickle-0.3.0/pytorch/testdata/tensor_int8_proto5_zip.pt000066400000000000000000000015301453132224200236200ustar00rootroot00000000000000PKCtensor_int8_proto5_zip/data.pklFB?ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ torch._utils_rebuild_tensor_v2((storagetorch CharStorage0cpuKtQKKK collections OrderedDict)RtR.PKXPK tensor_int8_proto5_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZZZZZPKW PK0tensor_int8_proto5_zip/versionFB,ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPKXtensor_int8_proto5_zip/data.pklPKW %tensor_int8_proto5_zip/data/0PKўgUtensor_int8_proto5_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_uint8_proto1.pt000066400000000000000000000005411453132224200231200ustar00rootroot00000000000000L119547037146038801333356L .M.}q(Xprotocol_versionqMX little_endianqI01 X type_sizesq}q(XshortqKXintqKXlongqKuu.ctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch ByteStorage qX94282293535712qXcpuqKNtqQK(Ktq(KtqI00 ccollections OrderedDict q)Rq tq Rq .]qX94282293535712qa. dgopickle-0.3.0/pytorch/testdata/tensor_uint8_proto1_zip.pt000066400000000000000000000015331453132224200240040ustar00rootroot00000000000000PK Btensor_uint8_proto1_zip/data.pklFB>ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch ByteStorage qX0qXcpuqKtqQK(Ktq(KtqI00 ccollections OrderedDict q)Rq tq Rq .PK^ϖPKtensor_uint8_proto1_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZ dPK|PK/tensor_uint8_proto1_zip/versionFB+ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPK^ϖ tensor_uint8_proto1_zip/data.pklPK|(tensor_uint8_proto1_zip/data/0PKўgUtensor_uint8_proto1_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_uint8_proto2.pt000066400000000000000000000005241453132224200231220ustar00rootroot00000000000000 lF jP.M.}q(Xprotocol_versionqMX little_endianqX type_sizesq}q(XshortqKXintqKXlongqKuu.ctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch ByteStorage qX94282293536288qXcpuqKNtqQKKqKqccollections OrderedDict q)Rq tq Rq .]qX94282293536288qa. dgopickle-0.3.0/pytorch/testdata/tensor_uint8_proto2_zip.pt000066400000000000000000000015331453132224200240050ustar00rootroot00000000000000PK Btensor_uint8_proto2_zip/data.pklFB>ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch ByteStorage qX0qXcpuqKtqQKKqKqccollections OrderedDict q)Rq tq Rq .PKPKtensor_uint8_proto2_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZZZZ dPK|PK/tensor_uint8_proto2_zip/versionFB+ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPK tensor_uint8_proto2_zip/data.pklPK|%tensor_uint8_proto2_zip/data/0PKўgUtensor_uint8_proto2_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_uint8_proto3.pt000066400000000000000000000005241453132224200231230ustar00rootroot00000000000000 lF jP.M.}q(Xprotocol_versionqMX little_endianqX type_sizesq}q(XshortqKXintqKXlongqKuu.ctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch ByteStorage qX94282293536288qXcpuqKNtqQKKqKqccollections OrderedDict q)Rq tq Rq .]qX94282293536288qa. dgopickle-0.3.0/pytorch/testdata/tensor_uint8_proto3_zip.pt000066400000000000000000000015331453132224200240060ustar00rootroot00000000000000PK Btensor_uint8_proto3_zip/data.pklFB>ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZctorch._utils _rebuild_tensor_v2 q((Xstorageqctorch ByteStorage qX0qXcpuqKtqQKKqKqccollections OrderedDict q)Rq tq Rq .PKZ PKtensor_uint8_proto3_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZZZZ dPK|PK/tensor_uint8_proto3_zip/versionFB+ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPKZ tensor_uint8_proto3_zip/data.pklPK|%tensor_uint8_proto3_zip/data/0PKўgUtensor_uint8_proto3_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_uint8_proto4.pt000066400000000000000000000005311453132224200231220ustar00rootroot00000000000000 lF jP.M.X}(protocol_versionM little_endian type_sizes}(shortKintKlongKuu. torch._utils_rebuild_tensor_v2((storagetorch ByteStorage94282293536288cpuKNtQKKK collections OrderedDict)RtR.]94282293536288a. dgopickle-0.3.0/pytorch/testdata/tensor_uint8_proto4_zip.pt000066400000000000000000000015331453132224200240070ustar00rootroot00000000000000PK Btensor_uint8_proto4_zip/data.pklFB>ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ torch._utils_rebuild_tensor_v2((storagetorch ByteStorage0cpuKtQKKK collections OrderedDict)RtR.PKDPKtensor_uint8_proto4_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZZZZ dPK|PK/tensor_uint8_proto4_zip/versionFB+ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPKD tensor_uint8_proto4_zip/data.pklPK|%tensor_uint8_proto4_zip/data/0PKўgUtensor_uint8_proto4_zip/versionPK,-PKPKgopickle-0.3.0/pytorch/testdata/tensor_uint8_proto5.pt000066400000000000000000000005311453132224200231230ustar00rootroot00000000000000 lF jP.M.X}(protocol_versionM little_endian type_sizes}(shortKintKlongKuu. torch._utils_rebuild_tensor_v2((storagetorch ByteStorage94282293536288cpuKNtQKKK collections OrderedDict)RtR.]94282293536288a. dgopickle-0.3.0/pytorch/testdata/tensor_uint8_proto5_zip.pt000066400000000000000000000015331453132224200240100ustar00rootroot00000000000000PK Btensor_uint8_proto5_zip/data.pklFB>ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ torch._utils_rebuild_tensor_v2((storagetorch ByteStorage0cpuKtQKKK collections OrderedDict)RtR.PKo •PKtensor_uint8_proto5_zip/data/0FBZZZZZZZZZZZZZZZZZZZZZZZZZZZ dPK|PK/tensor_uint8_proto5_zip/versionFB+ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ3 PKўgUPKo • tensor_uint8_proto5_zip/data.pklPK|%tensor_uint8_proto5_zip/data/0PKўgUtensor_uint8_proto5_zip/versionPK,-PKPKgopickle-0.3.0/types/000077500000000000000000000000001453132224200144515ustar00rootroot00000000000000gopickle-0.3.0/types/array.go000066400000000000000000000136111453132224200161200ustar00rootroot00000000000000// Copyright 2023 NLP Odyssey Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package types import ( "encoding/binary" "fmt" "math" "unicode/utf8" "golang.org/x/text/encoding" "golang.org/x/text/encoding/unicode" "golang.org/x/text/encoding/unicode/utf32" ) // Array unpickles array.array values as documented in: // // https://docs.python.org/3/library/array.html type Array struct{} var _ Callable = (*Array)(nil) func (Array) Call(args ...interface{}) (interface{}, error) { if got, want := len(args), 4; got != want { return nil, fmt.Errorf("invalid number of arguments (got=%d, want=%d)", got, want) } typ, ok := args[1].(string) if !ok { return nil, fmt.Errorf("invalid array type argument %T", args[1]) } mi, ok := args[2].(int) if !ok { return nil, fmt.Errorf("invalid array mformat code type %T", args[2]) } if mi >= len(arrayDescriptors) { return nil, fmt.Errorf("invalid array mformat value %d", mi) } descr := arrayDescriptors[mi] raw, ok := args[3].([]byte) if !ok { return nil, fmt.Errorf("invalid array payload type %T", args[3]) } switch typ { case "b": vs := make([]int8, len(raw)) for i := 0; i < len(raw); i++ { vs[i] = int8(raw[i]) } return vs, nil case "B": return raw, nil case "u": vs := make([]rune, 0, utf8.RuneCount(raw)) var enc encoding.Encoding switch descr.Size { case 4: order := unicode.BigEndian if descr.Order == binary.LittleEndian { order = unicode.LittleEndian } enc = unicode.UTF16(order, unicode.IgnoreBOM) case 8: order := utf32.BigEndian if descr.Order == binary.LittleEndian { order = utf32.LittleEndian } enc = utf32.UTF32(order, utf32.IgnoreBOM) default: return nil, fmt.Errorf("invalid machine description size (got=%d, want=4 or 8)", descr.Size) } dec := enc.NewDecoder() raw, err := dec.Bytes(raw) if err != nil { return nil, err } i := 0 loop: for { r, sz := utf8.DecodeRune(raw[i:]) switch r { case utf8.RuneError: if sz == 0 { break loop } return vs, fmt.Errorf("invalid rune") default: vs = append(vs, r) i += sz } } return vs, nil case "h": sz := descr.Size vs := make([]int16, len(raw)/sz) for i := 0; i < len(raw); i += sz { vs[i/sz] = int16(descr.Order.Uint16(raw[i:])) } return vs, nil case "H": sz := descr.Size vs := make([]uint16, len(raw)/sz) for i := 0; i < len(raw); i += sz { vs[i/sz] = descr.Order.Uint16(raw[i:]) } return vs, nil case "i": sz := descr.Size vs := make([]int32, len(raw)/sz) for i := 0; i < len(raw); i += sz { vs[i/sz] = int32(descr.Order.Uint32(raw[i:])) } return vs, nil case "I": sz := descr.Size vs := make([]uint32, len(raw)/sz) for i := 0; i < len(raw); i += sz { vs[i/sz] = descr.Order.Uint32(raw[i:]) } return vs, nil case "l": sz := descr.Size vs := make([]int64, len(raw)/sz) for i := 0; i < len(raw); i += sz { vs[i/sz] = int64(descr.Order.Uint64(raw[i:])) } return vs, nil case "L": sz := descr.Size vs := make([]uint64, len(raw)/sz) for i := 0; i < len(raw); i += sz { vs[i/sz] = descr.Order.Uint64(raw[i:]) } return vs, nil case "q": sz := descr.Size vs := make([]int64, len(raw)/sz) for i := 0; i < len(raw); i += sz { vs[i/sz] = int64(descr.Order.Uint64(raw[i:])) } return vs, nil case "Q": sz := descr.Size vs := make([]uint64, len(raw)/sz) for i := 0; i < len(raw); i += sz { vs[i/sz] = descr.Order.Uint64(raw[i:]) } return vs, nil case "f": sz := descr.Size vs := make([]float32, len(raw)/sz) for i := 0; i < len(raw); i += sz { vs[i/sz] = math.Float32frombits(descr.Order.Uint32(raw[i:])) } return vs, nil case "d": sz := descr.Size vs := make([]float64, len(raw)/sz) for i := 0; i < len(raw); i += sz { vs[i/sz] = math.Float64frombits(descr.Order.Uint64(raw[i:])) } return vs, nil default: return nil, fmt.Errorf("invalid array typecode '%s'", typ) } panic("impossible") } type arrayDescriptor struct { Size int Signed bool Order binary.ByteOrder } var ( arrayDescriptors = []arrayDescriptor{ 0: {Size: 1, Signed: false, Order: binary.LittleEndian}, // 0: UNSIGNED_INT8 1: {Size: 1, Signed: true, Order: binary.LittleEndian}, // 1: SIGNED_INT8 2: {Size: 2, Signed: false, Order: binary.LittleEndian}, // 2: UNSIGNED_INT16_LE 3: {Size: 2, Signed: false, Order: binary.BigEndian}, // 3: UNSIGNED_INT16_BE 4: {Size: 2, Signed: true, Order: binary.LittleEndian}, // 4: SIGNED_INT16_LE 5: {Size: 2, Signed: true, Order: binary.BigEndian}, // 5: SIGNED_INT16_BE 6: {Size: 4, Signed: false, Order: binary.LittleEndian}, // 6: UNSIGNED_INT32_LE 7: {Size: 4, Signed: false, Order: binary.BigEndian}, // 7: UNSIGNED_INT32_BE 8: {Size: 4, Signed: true, Order: binary.LittleEndian}, // 8: SIGNED_INT32_LE 9: {Size: 4, Signed: true, Order: binary.BigEndian}, // 9: SIGNED_INT32_BE 10: {Size: 8, Signed: false, Order: binary.LittleEndian}, // 10: UNSIGNED_INT64_LE 11: {Size: 8, Signed: false, Order: binary.BigEndian}, // 11: UNSIGNED_INT64_BE 12: {Size: 8, Signed: true, Order: binary.LittleEndian}, // 12: SIGNED_INT64_LE 13: {Size: 8, Signed: true, Order: binary.BigEndian}, // 13: SIGNED_INT64_BE 14: {Size: 4, Signed: false, Order: binary.LittleEndian}, // 14: IEEE_754_FLOAT_LE 15: {Size: 4, Signed: false, Order: binary.BigEndian}, // 15: IEEE_754_FLOAT_BE 16: {Size: 8, Signed: false, Order: binary.LittleEndian}, // 16: IEEE_754_DOUBLE_LE 17: {Size: 8, Signed: false, Order: binary.BigEndian}, // 17: IEEE_754_DOUBLE_BE 18: {Size: 4, Signed: false, Order: binary.LittleEndian}, // 18: UTF16_LE 19: {Size: 4, Signed: false, Order: binary.BigEndian}, // 19: UTF16_BE 20: {Size: 8, Signed: false, Order: binary.LittleEndian}, // 20: UTF32_LE 21: {Size: 8, Signed: false, Order: binary.BigEndian}, // 21: UTF32_BE } ) gopickle-0.3.0/types/bytearray.go000066400000000000000000000016711453132224200170070ustar00rootroot00000000000000// Copyright 2020 NLP Odyssey Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package types // ByteArray represents a Python "bytearray" (builtin type). type ByteArray []byte // NewByteArray makes and returns a new empty ByteArray. func NewByteArray() *ByteArray { b := make(ByteArray, 0) return &b } // NewByteArrayFromSlice makes and returns a new ByteArray initialized with // the elements of the given slice. // // The new ByteArray is a simple type cast of the input slice; the slice is // _not_ copied. func NewByteArrayFromSlice(slice []byte) *ByteArray { b := ByteArray(slice) return &b } // Get returns the element of the ByteArray at the given index. // // It panics if the index is out of range. func (b *ByteArray) Get(i int) byte { return (*b)[i] } // Len returns the length of the ByteArray. func (b *ByteArray) Len() int { return len(*b) } gopickle-0.3.0/types/dict.go000066400000000000000000000044361453132224200157320ustar00rootroot00000000000000// Copyright 2020 NLP Odyssey Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package types import ( "fmt" "reflect" "strings" ) // DictSetter is implemented by any value that exhibits a dict-like behaviour, // allowing arbitrary key/value pairs to be set. type DictSetter interface { Set(key, value interface{}) } // Dict represents a Python "dict" (builtin type). // // It is implemented as a slice, instead of a map, because in Go not // all types can be map's keys (e.g. slices). type Dict []DictEntry type DictEntry struct { Key interface{} Value interface{} } var _ DictSetter = &Dict{} // NewDict makes and returns a new empty Dict. func NewDict() *Dict { d := make(Dict, 0, 4) return &d } // Set sets into the Dict the given key/value pair. func (d *Dict) Set(key, value interface{}) { *d = append(*d, DictEntry{ Key: key, Value: value, }) } // Get returns the value associated with the given key (if any), and whether // the key is present or not. func (d *Dict) Get(key interface{}) (interface{}, bool) { for _, entry := range *d { if reflect.DeepEqual(entry.Key, key) { return entry.Value, true } } return nil, false } // MustGet returns the value associated with the given key, if if it exists, // otherwise it panics. func (d *Dict) MustGet(key interface{}) interface{} { value, ok := d.Get(key) if !ok { panic(fmt.Errorf("key not found in Dict: %#v", key)) } return value } // Len returns the length of the Dict, that is, the amount of key/value pairs // contained by the Dict. func (d *Dict) Len() int { return len(*d) } // Keys returns the keys of the dict func (d *Dict) Keys() []interface{} { out := make([]interface{}, len(*d)) for i, entry := range *d { out[i] = entry.Key } return out } func (*Dict) Call(args ...interface{}) (interface{}, error) { if len(args) == 0 { return NewDict(), nil } if len(args) == 1 { return args[0], nil } return nil, fmt.Errorf("Dict: invalid arguments: %#v", args) } func (d *Dict) String() string { if d == nil { return "nil" } o := new(strings.Builder) o.WriteString("{") for i, e := range *d { if i > 0 { o.WriteString(", ") } fmt.Fprintf(o, "%v: %v", e.Key, e.Value) } o.WriteString("}") return o.String() } gopickle-0.3.0/types/dict_test.go000066400000000000000000000020411453132224200167570ustar00rootroot00000000000000// Copyright 2023 NLP Odyssey Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package types import ( "bytes" "fmt" "testing" ) func TestDictCall(t *testing.T) { d := NewDict() d.Set("foo", "bar") args := []interface{}{d} result, _ := d.Call(args) resultdict := *result.([]interface{})[0].(*Dict) actual, _ := resultdict.Get("foo") expected := "bar" if actual != expected { t.Errorf("expected %v, actual: %v", expected, actual) } } func TestDictStringer(t *testing.T) { dct := NewDict() dct.Set("empty", NewDict()) dct.Set("one", "un") dct.Set("two", "deux") sub := NewDict() sub.Set("eins", "one") sub.Set("zwei", []string{"two", "deux"}) sub.Set(2, []int{2 * 2, 2 * 3, 2 * 4}) dct.Set("sub", sub) buf := new(bytes.Buffer) fmt.Fprintf(buf, "%v", dct) var ( got = buf.String() want = "{empty: {}, one: un, two: deux, sub: {eins: one, zwei: [two deux], 2: [4 6 8]}}" ) if got != want { t.Fatalf("got= %q\nwant=%q", got, want) } } gopickle-0.3.0/types/doc.go000066400000000000000000000011251453132224200155440ustar00rootroot00000000000000// Copyright 2020 NLP Odyssey Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. /* Package types provides a minimal implementation for commonly used Python classes, objects and functions. Most of the types implemented here are related to Python builtins or the Python standard library. Only a minimal subset of traits is reproduced here for the sole purpose of making them work with the unpickling machine and, more in general, in the context of the packages provided by the GoPickle library. */ package types gopickle-0.3.0/types/frozenset.go000066400000000000000000000020131453132224200170130ustar00rootroot00000000000000// Copyright 2020 NLP Odyssey Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package types // FrozenSet represents a Python "frozenset" (builtin type). // // It is implemented in Go as a map with empty struct values; the actual set // of generic "interface{}" items is thus represented by all the keys. type FrozenSet map[interface{}]frozenSetEmptyStruct type frozenSetEmptyStruct struct{} // NewFrozenSetFromSlice makes and returns a new FrozenSet initialized // with the elements of the given slice. func NewFrozenSetFromSlice(slice []interface{}) *FrozenSet { f := make(FrozenSet, len(slice)) for _, item := range slice { f[item] = frozenSetEmptyStruct{} } return &f } // Len returns the length of the FrozenSet. func (f *FrozenSet) Len() int { return len(*f) } // Has returns whether the given value is present in the FrozenSet (true) // or not (false). func (f *FrozenSet) Has(v interface{}) bool { _, ok := (*f)[v] return ok } gopickle-0.3.0/types/generic_class.go000066400000000000000000000011711453132224200176010ustar00rootroot00000000000000// Copyright 2020 NLP Odyssey Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package types type GenericClass struct { Module string Name string } var _ PyNewable = &GenericClass{} type GenericObject struct { Class *GenericClass ConstructorArgs []interface{} } func NewGenericClass(module, name string) *GenericClass { return &GenericClass{Module: module, Name: name} } func (g *GenericClass) PyNew(args ...interface{}) (interface{}, error) { return &GenericObject{ Class: g, ConstructorArgs: args, }, nil } gopickle-0.3.0/types/interfaces.go000066400000000000000000000046141453132224200171300ustar00rootroot00000000000000// Copyright 2020 NLP Odyssey Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package types // Callable is implemented by any value that can be directly called to get a // new value. // // It is usually implemented by Python-like functions (returning a value // given some arguments), or classes (typically returning an instance given // some constructor arguments). type Callable interface { // Call mimics a direct invocation on a Python value, such as a function // or class (constructor). Call(args ...interface{}) (interface{}, error) } // PyNewable is implemented by any value that has a Python-like // "__new__" method. // // It is usually implemented by values representing Python classes. type PyNewable interface { // PyNew mimics Python invocation of the "__new__" method, usually // provided by classes. // // See: https://docs.python.org/3/reference/datamodel.html#object.__new__ PyNew(args ...interface{}) (interface{}, error) } // PyStateSettable is implemented by any value that has a Python-like // "__setstate__" method. type PyStateSettable interface { // PySetState mimics Python invocation of the "__setstate__" method. // // See: https://docs.python.org/3/library/pickle.html#object.__setstate__ PySetState(state interface{}) error } // PyDictSettable is implemented by any value that can store dictionary-like // key/value pairs. It reflects Python behavior of setting a key/value pair on // an object's "__dict__" attribute. type PyDictSettable interface { // PyDictSet mimics the setting of a key/value pair on an object's //"__dict__" attribute. // // See: https://docs.python.org/3/library/stdtypes.html#object.__dict__ PyDictSet(key, value interface{}) error } // PyAttrSettable is implemented by any value on which an existing or new // Python-like attribute can be set. In Python this is done with "setattr" // builtin function. type PyAttrSettable interface { // PySetAttr mimics the setting of an arbitrary value to an object's // attribute. // // In Python this is done with "setattr" function, to which object, // attribute name, and value are passed. For an easy and clear // implementation, here instead we require this method to be implemented // on the "object" itself. // // See: https://docs.python.org/3/library/functions.html#setattr PySetAttr(key string, value interface{}) error } gopickle-0.3.0/types/list.go000066400000000000000000000032621453132224200157560ustar00rootroot00000000000000// Copyright 2020 NLP Odyssey Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package types import ( "fmt" "strings" ) // ListAppender is implemented by any value that exhibits a list-like // behaviour, allowing arbitrary values to be appended. type ListAppender interface { Append(v interface{}) } // List represents a Python "list" (builtin type). type List []interface{} var _ ListAppender = &List{} // NewList makes and returns a new empty List. func NewList() *List { l := make(List, 0, 4) return &l } // NewListFromSlice makes and returns a new List initialized with the elements // of the given slice. // // The new List is a simple type cast of the input slice; the slice is _not_ // copied. func NewListFromSlice(slice []interface{}) *List { l := List(slice) return &l } // Append appends one element to the end of the List. func (l *List) Append(v interface{}) { *l = append(*l, v) } // Get returns the element of the List at the given index. // // It panics if the index is out of range. func (l *List) Get(i int) interface{} { return (*l)[i] } // Len returns the length of the List. func (l *List) Len() int { return len(*l) } func (*List) Call(args ...interface{}) (interface{}, error) { if len(args) == 0 { return NewList(), nil } if len(args) == 1 { return args[0], nil } return nil, fmt.Errorf("List: invalid arguments: %#v", args) } func (l *List) String() string { if l == nil { return "nil" } o := new(strings.Builder) o.WriteString("[") for i, v := range *l { if i > 0 { o.WriteString(", ") } fmt.Fprintf(o, "%v", v) } o.WriteString("]") return o.String() } gopickle-0.3.0/types/list_test.go000066400000000000000000000015611453132224200170150ustar00rootroot00000000000000// Copyright 2023 NLP Odyssey Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package types import ( "bytes" "fmt" "testing" ) func TestCall(t *testing.T) { list := NewList() list.Append("foo") args := []interface{}{list} result, _ := list.Call(args) actual := (*result.([]interface{})[0].(*List))[0] expected := "foo" if actual != expected { t.Errorf("expected %v, actual: %v", expected, actual) } } func TestListStringer(t *testing.T) { pylist := func(sli ...interface{}) *List { return NewListFromSlice(sli) } lst := pylist(pylist(), pylist(1), pylist(2, 3), pylist(pylist(4, 5), 6)) buf := new(bytes.Buffer) fmt.Fprintf(buf, "%v", lst) got := buf.String() want := "[[], [1], [2, 3], [[4, 5], 6]]" if got != want { t.Fatalf("got= %q\nwant=%q", got, want) } } gopickle-0.3.0/types/object.go000066400000000000000000000011171453132224200162460ustar00rootroot00000000000000// Copyright 2020 NLP Odyssey Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package types import "fmt" type ObjectClass struct{} var _ PyNewable = &ObjectClass{} func (o *ObjectClass) PyNew(args ...interface{}) (interface{}, error) { if len(args) == 0 { return nil, fmt.Errorf("ObjectClass.PyNew called with no arguments") } switch class := args[0].(type) { case PyNewable: return class.PyNew() default: return nil, fmt.Errorf( "ObjectClass.PyNew unprocessable args: %#v", args) } } gopickle-0.3.0/types/ordered_dict.go000066400000000000000000000073761453132224200174440ustar00rootroot00000000000000// Copyright 2020 NLP Odyssey Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package types import ( "container/list" "fmt" ) // OrderedDictClass represent Python "collections.OrderedDict" class. // // This class allows the indirect creation of OrderedDict objects. type OrderedDictClass struct{} var _ Callable = &OrderedDictClass{} // Call returns a new empty OrderedDict. It is equivalent to Python // constructor "collections.OrderedDict()". // // No arguments are supported. func (*OrderedDictClass) Call(args ...interface{}) (interface{}, error) { if len(args) != 0 { return nil, fmt.Errorf( "OrderedDictClass.Call args not supported: %#v", args) } return NewOrderedDict(), nil } // OrderedDict is a minimal and trivial implementation of an ordered map, // which represent a Python "collections.OrderedDict" object. // // It is composed by a simple unordered Map, and a List to keep the order of // the entries. The former is useful for direct key lookups, the latter for // iteration. type OrderedDict struct { // Map associates a key of any type (interface{}) to OrderedDictEntry // pointer values. These values are shared with List. Map map[interface{}]*OrderedDictEntry // List is an ordered list of OrderedDictEntry pointers, which are // also shared with Map. List *list.List // PyDict represents Python "object.__dict__" dictionary of attributes. PyDict map[string]interface{} } var _ DictSetter = &OrderedDict{} var _ PyDictSettable = &OrderedDict{} // OrderedDictEntry is a single key/value pair stored in an OrderedDict. // // A pointer to an OrderedDictEntry is always shared between OrderedDict's Map // and List. type OrderedDictEntry struct { // Key of a single OrderedDict's entry. Key interface{} // Value of a single OrderedDict's entry. Value interface{} // ListElement is a pointer to the OrderedDict's List Element which // contains this very OrderedDictEntry. ListElement *list.Element } // NewOrderedDict makes and returns a new empty OrderedDict. func NewOrderedDict() *OrderedDict { return &OrderedDict{ Map: make(map[interface{}]*OrderedDictEntry), List: list.New(), PyDict: make(map[string]interface{}), } } // Set sets into the OrderedDict the given key/value pair. If the key does not // exist yet, the new pair is positioned at the end (back) of the OrderedDict. // If the key already exists, the existing associated value is replaced with the // new one, and the original position is maintained. func (o *OrderedDict) Set(k, v interface{}) { if entry, ok := o.Map[k]; ok { entry.Value = v return } entry := &OrderedDictEntry{ Key: k, Value: v, } entry.ListElement = o.List.PushBack(entry) o.Map[k] = entry } // Get returns the value associated with the given key (if any), and whether // the key is present or not. func (o *OrderedDict) Get(k interface{}) (interface{}, bool) { entry, ok := o.Map[k] if !ok { return nil, false } return entry.Value, true } // MustGet returns the value associated with the given key, if if it exists, // otherwise it panics. func (o *OrderedDict) MustGet(key interface{}) interface{} { value, ok := o.Get(key) if !ok { panic(fmt.Errorf("key not found in OrderedDict: %#v", key)) } return value } // Len returns the length of the OrderedDict, that is, the amount of key/value // pairs contained by the OrderedDict. func (o *OrderedDict) Len() int { return len(o.Map) } // PyDictSet mimics the setting of a key/value pair on Python "__dict__" // attribute of the OrderedDict. func (o *OrderedDict) PyDictSet(key, value interface{}) error { sKey, keyOk := key.(string) if !keyOk { return fmt.Errorf( "OrderedDict.PyDictSet() requires string key: %#v", key) } o.PyDict[sKey] = value return nil } gopickle-0.3.0/types/reconstructor.go000066400000000000000000000011521453132224200177130ustar00rootroot00000000000000// Copyright 2020 NLP Odyssey Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package types import "fmt" type Reconstructor struct{} var _ Callable = &Reconstructor{} func (r *Reconstructor) Call(args ...interface{}) (interface{}, error) { if len(args) < 2 { return nil, fmt.Errorf("Reconstructor: invalid arguments: %#v", args) } class := args[0] switch base := args[1].(type) { case PyNewable: return base.PyNew(class) default: return nil, fmt.Errorf( "Reconstructor: unprocessable arguments: %#v", args) } } gopickle-0.3.0/types/set.go000066400000000000000000000024721453132224200156000ustar00rootroot00000000000000// Copyright 2020 NLP Odyssey Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package types // SetAdder is implemented by any value that exhibits a set-like behaviour, // allowing arbitrary values to be added. type SetAdder interface { Add(v interface{}) } // Set represents a Python "set" (builtin type). // // It is implemented in Go as a map with empty struct values; the actual set // of generic "interface{}" items is thus represented by all the keys. type Set map[interface{}]setEmptyStruct var _ SetAdder = &Set{} type setEmptyStruct struct{} // NewSet makes and returns a new empty Set. func NewSet() *Set { s := make(Set, 4) return &s } // NewSetFromSlice makes and returns a new Set initialized with the elements // of the given slice. func NewSetFromSlice(slice []interface{}) *Set { s := make(Set, len(slice)) for _, item := range slice { s[item] = setEmptyStruct{} } return &s } // Len returns the length of the Set. func (s *Set) Len() int { return len(*s) } // Add adds one element to the Set. func (s *Set) Add(v interface{}) { (*s)[v] = setEmptyStruct{} } // Has returns whether the given value is present in the Set (true) // or not (false). func (s *Set) Has(v interface{}) bool { _, ok := (*s)[v] return ok } gopickle-0.3.0/types/tuple.go000066400000000000000000000013201453132224200161250ustar00rootroot00000000000000// Copyright 2020 NLP Odyssey Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package types import ( "fmt" "strings" ) type Tuple []interface{} func NewTupleFromSlice(slice []interface{}) *Tuple { t := Tuple(slice) return &t } func (t *Tuple) Get(i int) interface{} { return (*t)[i] } func (t *Tuple) Len() int { return len(*t) } func (t *Tuple) String() string { if t == nil { return "nil" } o := new(strings.Builder) o.WriteString("(") for i, v := range *t { if i > 0 { o.WriteString(", ") } fmt.Fprintf(o, "%v", v) } if t.Len() == 1 { o.WriteString(",") } o.WriteString(")") return o.String() } gopickle-0.3.0/types/tuple_test.go000066400000000000000000000011211453132224200171630ustar00rootroot00000000000000// Copyright 2023 NLP Odyssey Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package types import ( "bytes" "fmt" "testing" ) func TestTupleStringer(t *testing.T) { tuple := func(sli ...interface{}) *Tuple { return NewTupleFromSlice(sli) } tup := tuple(tuple(), tuple(1), tuple(2, 3), tuple(tuple(4, 5), 6)) buf := new(bytes.Buffer) fmt.Fprintf(buf, "%v", tup) got := buf.String() want := "((), (1,), (2, 3), ((4, 5), 6))" if got != want { t.Fatalf("got= %q\nwant=%q", got, want) } }