pax_global_header00006660000000000000000000000064137751632660014532gustar00rootroot0000000000000052 comment=07737475986b62bc0ac8681f972a3c4ff771cef6 json-patch-5.2.0/000077500000000000000000000000001377516326600136045ustar00rootroot00000000000000json-patch-5.2.0/.github/000077500000000000000000000000001377516326600151445ustar00rootroot00000000000000json-patch-5.2.0/.github/workflows/000077500000000000000000000000001377516326600172015ustar00rootroot00000000000000json-patch-5.2.0/.github/workflows/go.yml000066400000000000000000000005361377516326600203350ustar00rootroot00000000000000name: Go on: push: branches: [ master ] pull_request: branches: [ master ] jobs: build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Set up Go uses: actions/setup-go@v2 with: go-version: 1.15 - name: Setup run: go get - name: Test run: go test -v ./... json-patch-5.2.0/LICENSE000066400000000000000000000027271377516326600146210ustar00rootroot00000000000000Copyright (c) 2014, Evan Phoenix All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the Evan Phoenix nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. json-patch-5.2.0/README.md000066400000000000000000000216101377516326600150630ustar00rootroot00000000000000# JSON-Patch `jsonpatch` is a library which provides functionality for both applying [RFC6902 JSON patches](http://tools.ietf.org/html/rfc6902) against documents, as well as for calculating & applying [RFC7396 JSON merge patches](https://tools.ietf.org/html/rfc7396). [![GoDoc](https://godoc.org/github.com/evanphx/json-patch?status.svg)](http://godoc.org/github.com/evanphx/json-patch) [![Build Status](https://travis-ci.org/evanphx/json-patch.svg?branch=master)](https://travis-ci.org/evanphx/json-patch) [![Report Card](https://goreportcard.com/badge/github.com/evanphx/json-patch)](https://goreportcard.com/report/github.com/evanphx/json-patch) # Get It! **Latest and greatest**: ```bash go get -u github.com/evanphx/json-patch/v5 ``` **Stable Versions**: * Version 5: `go get -u gopkg.in/evanphx/json-patch.v5` * Version 4: `go get -u gopkg.in/evanphx/json-patch.v4` (previous versions below `v3` are unavailable) # Use It! * [Create and apply a merge patch](#create-and-apply-a-merge-patch) * [Create and apply a JSON Patch](#create-and-apply-a-json-patch) * [Comparing JSON documents](#comparing-json-documents) * [Combine merge patches](#combine-merge-patches) # Configuration * There is a global configuration variable `jsonpatch.SupportNegativeIndices`. This defaults to `true` and enables the non-standard practice of allowing negative indices to mean indices starting at the end of an array. This functionality can be disabled by setting `jsonpatch.SupportNegativeIndices = false`. * There is a global configuration variable `jsonpatch.AccumulatedCopySizeLimit`, which limits the total size increase in bytes caused by "copy" operations in a patch. It defaults to 0, which means there is no limit. These global variables control the behavior of `jsonpatch.Apply`. An alternative to `jsonpatch.Apply` is `jsonpatch.ApplyWithOptions` whose behavior is controlled by an `options` parameter of type `*jsonpatch.ApplyOptions`. Structure `jsonpatch.ApplyOptions` includes the configuration options above and adds two new options: `AllowMissingPathOnRemove` and `EnsurePathExistsOnAdd`. When `AllowMissingPathOnRemove` is set to `true`, `jsonpatch.ApplyWithOptions` will ignore `remove` operations whose `path` points to a non-existent location in the JSON document. `AllowMissingPathOnRemove` defaults to `false` which will lead to `jsonpatch.ApplyWithOptions` returning an error when hitting a missing `path` on `remove`. When `EnsurePathExistsOnAdd` is set to `true`, `jsonpatch.ApplyWithOptions` will make sure that `add` operations produce all the `path` elements that are missing from the target object. Use `jsonpatch.NewApplyOptions` to create an instance of `jsonpatch.ApplyOptions` whose values are populated from the global configuration variables. ## Create and apply a merge patch Given both an original JSON document and a modified JSON document, you can create a [Merge Patch](https://tools.ietf.org/html/rfc7396) document. It can describe the changes needed to convert from the original to the modified JSON document. Once you have a merge patch, you can apply it to other JSON documents using the `jsonpatch.MergePatch(document, patch)` function. ```go package main import ( "fmt" jsonpatch "github.com/evanphx/json-patch" ) func main() { // Let's create a merge patch from these two documents... original := []byte(`{"name": "John", "age": 24, "height": 3.21}`) target := []byte(`{"name": "Jane", "age": 24}`) patch, err := jsonpatch.CreateMergePatch(original, target) if err != nil { panic(err) } // Now lets apply the patch against a different JSON document... alternative := []byte(`{"name": "Tina", "age": 28, "height": 3.75}`) modifiedAlternative, err := jsonpatch.MergePatch(alternative, patch) fmt.Printf("patch document: %s\n", patch) fmt.Printf("updated alternative doc: %s\n", modifiedAlternative) } ``` When ran, you get the following output: ```bash $ go run main.go patch document: {"height":null,"name":"Jane"} updated alternative doc: {"age":28,"name":"Jane"} ``` ## Create and apply a JSON Patch You can create patch objects using `DecodePatch([]byte)`, which can then be applied against JSON documents. The following is an example of creating a patch from two operations, and applying it against a JSON document. ```go package main import ( "fmt" jsonpatch "github.com/evanphx/json-patch" ) func main() { original := []byte(`{"name": "John", "age": 24, "height": 3.21}`) patchJSON := []byte(`[ {"op": "replace", "path": "/name", "value": "Jane"}, {"op": "remove", "path": "/height"} ]`) patch, err := jsonpatch.DecodePatch(patchJSON) if err != nil { panic(err) } modified, err := patch.Apply(original) if err != nil { panic(err) } fmt.Printf("Original document: %s\n", original) fmt.Printf("Modified document: %s\n", modified) } ``` When ran, you get the following output: ```bash $ go run main.go Original document: {"name": "John", "age": 24, "height": 3.21} Modified document: {"age":24,"name":"Jane"} ``` ## Comparing JSON documents Due to potential whitespace and ordering differences, one cannot simply compare JSON strings or byte-arrays directly. As such, you can instead use `jsonpatch.Equal(document1, document2)` to determine if two JSON documents are _structurally_ equal. This ignores whitespace differences, and key-value ordering. ```go package main import ( "fmt" jsonpatch "github.com/evanphx/json-patch" ) func main() { original := []byte(`{"name": "John", "age": 24, "height": 3.21}`) similar := []byte(` { "age": 24, "height": 3.21, "name": "John" } `) different := []byte(`{"name": "Jane", "age": 20, "height": 3.37}`) if jsonpatch.Equal(original, similar) { fmt.Println(`"original" is structurally equal to "similar"`) } if !jsonpatch.Equal(original, different) { fmt.Println(`"original" is _not_ structurally equal to "different"`) } } ``` When ran, you get the following output: ```bash $ go run main.go "original" is structurally equal to "similar" "original" is _not_ structurally equal to "different" ``` ## Combine merge patches Given two JSON merge patch documents, it is possible to combine them into a single merge patch which can describe both set of changes. The resulting merge patch can be used such that applying it results in a document structurally similar as merging each merge patch to the document in succession. ```go package main import ( "fmt" jsonpatch "github.com/evanphx/json-patch" ) func main() { original := []byte(`{"name": "John", "age": 24, "height": 3.21}`) nameAndHeight := []byte(`{"height":null,"name":"Jane"}`) ageAndEyes := []byte(`{"age":4.23,"eyes":"blue"}`) // Let's combine these merge patch documents... combinedPatch, err := jsonpatch.MergeMergePatches(nameAndHeight, ageAndEyes) if err != nil { panic(err) } // Apply each patch individual against the original document withoutCombinedPatch, err := jsonpatch.MergePatch(original, nameAndHeight) if err != nil { panic(err) } withoutCombinedPatch, err = jsonpatch.MergePatch(withoutCombinedPatch, ageAndEyes) if err != nil { panic(err) } // Apply the combined patch against the original document withCombinedPatch, err := jsonpatch.MergePatch(original, combinedPatch) if err != nil { panic(err) } // Do both result in the same thing? They should! if jsonpatch.Equal(withCombinedPatch, withoutCombinedPatch) { fmt.Println("Both JSON documents are structurally the same!") } fmt.Printf("combined merge patch: %s", combinedPatch) } ``` When ran, you get the following output: ```bash $ go run main.go Both JSON documents are structurally the same! combined merge patch: {"age":4.23,"eyes":"blue","height":null,"name":"Jane"} ``` # CLI for comparing JSON documents You can install the commandline program `json-patch`. This program can take multiple JSON patch documents as arguments, and fed a JSON document from `stdin`. It will apply the patch(es) against the document and output the modified doc. **patch.1.json** ```json [ {"op": "replace", "path": "/name", "value": "Jane"}, {"op": "remove", "path": "/height"} ] ``` **patch.2.json** ```json [ {"op": "add", "path": "/address", "value": "123 Main St"}, {"op": "replace", "path": "/age", "value": "21"} ] ``` **document.json** ```json { "name": "John", "age": 24, "height": 3.21 } ``` You can then run: ```bash $ go install github.com/evanphx/json-patch/cmd/json-patch $ cat document.json | json-patch -p patch.1.json -p patch.2.json {"address":"123 Main St","age":"21","name":"Jane"} ``` # Help It! Contributions are welcomed! Leave [an issue](https://github.com/evanphx/json-patch/issues) or [create a PR](https://github.com/evanphx/json-patch/compare). Before creating a pull request, we'd ask that you make sure tests are passing and that you have added new tests when applicable. Contributors can run tests using: ```bash go test -cover ./... ``` Builds for pull requests are tested automatically using [TravisCI](https://travis-ci.org/evanphx/json-patch). json-patch-5.2.0/cmd/000077500000000000000000000000001377516326600143475ustar00rootroot00000000000000json-patch-5.2.0/cmd/json-patch/000077500000000000000000000000001377516326600164155ustar00rootroot00000000000000json-patch-5.2.0/cmd/json-patch/file_flag.go000066400000000000000000000013761377516326600206630ustar00rootroot00000000000000package main // Borrowed from Concourse: https://github.com/concourse/atc/blob/master/atccmd/file_flag.go import ( "fmt" "os" "path/filepath" ) // FileFlag is a flag for passing a path to a file on disk. The file is // expected to be a file, not a directory, that actually exists. type FileFlag string // UnmarshalFlag implements go-flag's Unmarshaler interface func (f *FileFlag) UnmarshalFlag(value string) error { stat, err := os.Stat(value) if err != nil { return err } if stat.IsDir() { return fmt.Errorf("path '%s' is a directory, not a file", value) } abs, err := filepath.Abs(value) if err != nil { return err } *f = FileFlag(abs) return nil } // Path is the path to the file func (f FileFlag) Path() string { return string(f) } json-patch-5.2.0/cmd/json-patch/main.go000066400000000000000000000021211377516326600176640ustar00rootroot00000000000000package main import ( "fmt" "io/ioutil" "log" "os" jsonpatch "github.com/evanphx/json-patch" flags "github.com/jessevdk/go-flags" ) type opts struct { PatchFilePaths []FileFlag `long:"patch-file" short:"p" value-name:"PATH" description:"Path to file with one or more operations"` } func main() { var o opts _, err := flags.Parse(&o) if err != nil { log.Fatalf("error: %s\n", err) } patches := make([]jsonpatch.Patch, len(o.PatchFilePaths)) for i, patchFilePath := range o.PatchFilePaths { var bs []byte bs, err = ioutil.ReadFile(patchFilePath.Path()) if err != nil { log.Fatalf("error reading patch file: %s", err) } var patch jsonpatch.Patch patch, err = jsonpatch.DecodePatch(bs) if err != nil { log.Fatalf("error decoding patch file: %s", err) } patches[i] = patch } doc, err := ioutil.ReadAll(os.Stdin) if err != nil { log.Fatalf("error reading from stdin: %s", err) } mdoc := doc for _, patch := range patches { mdoc, err = patch.Apply(mdoc) if err != nil { log.Fatalf("error applying patch: %s", err) } } fmt.Printf("%s", mdoc) } json-patch-5.2.0/errors.go000066400000000000000000000022411377516326600154460ustar00rootroot00000000000000package jsonpatch import "fmt" // AccumulatedCopySizeError is an error type returned when the accumulated size // increase caused by copy operations in a patch operation has exceeded the // limit. type AccumulatedCopySizeError struct { limit int64 accumulated int64 } // NewAccumulatedCopySizeError returns an AccumulatedCopySizeError. func NewAccumulatedCopySizeError(l, a int64) *AccumulatedCopySizeError { return &AccumulatedCopySizeError{limit: l, accumulated: a} } // Error implements the error interface. func (a *AccumulatedCopySizeError) Error() string { return fmt.Sprintf("Unable to complete the copy, the accumulated size increase of copy is %d, exceeding the limit %d", a.accumulated, a.limit) } // ArraySizeError is an error type returned when the array size has exceeded // the limit. type ArraySizeError struct { limit int size int } // NewArraySizeError returns an ArraySizeError. func NewArraySizeError(l, s int) *ArraySizeError { return &ArraySizeError{limit: l, size: s} } // Error implements the error interface. func (a *ArraySizeError) Error() string { return fmt.Sprintf("Unable to create array of size %d, limit is %d", a.size, a.limit) } json-patch-5.2.0/go.mod000066400000000000000000000001761377516326600147160ustar00rootroot00000000000000module github.com/evanphx/json-patch go 1.14 require ( github.com/jessevdk/go-flags v1.4.0 github.com/pkg/errors v0.9.1 ) json-patch-5.2.0/go.sum000066400000000000000000000005201377516326600147340ustar00rootroot00000000000000github.com/jessevdk/go-flags v1.4.0 h1:4IU2WS7AumrZ/40jfhf4QVDMsQwqA7VEHozFRrGARJA= github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= json-patch-5.2.0/merge.go000066400000000000000000000211521377516326600152330ustar00rootroot00000000000000package jsonpatch import ( "bytes" "encoding/json" "fmt" "reflect" ) func merge(cur, patch *lazyNode, mergeMerge bool) *lazyNode { curDoc, err := cur.intoDoc() if err != nil { pruneNulls(patch) return patch } patchDoc, err := patch.intoDoc() if err != nil { return patch } mergeDocs(curDoc, patchDoc, mergeMerge) return cur } func mergeDocs(doc, patch *partialDoc, mergeMerge bool) { for k, v := range *patch { if v == nil { if mergeMerge { (*doc)[k] = nil } else { delete(*doc, k) } } else { cur, ok := (*doc)[k] if !ok || cur == nil { pruneNulls(v) (*doc)[k] = v } else { (*doc)[k] = merge(cur, v, mergeMerge) } } } } func pruneNulls(n *lazyNode) { sub, err := n.intoDoc() if err == nil { pruneDocNulls(sub) } else { ary, err := n.intoAry() if err == nil { pruneAryNulls(ary) } } } func pruneDocNulls(doc *partialDoc) *partialDoc { for k, v := range *doc { if v == nil { delete(*doc, k) } else { pruneNulls(v) } } return doc } func pruneAryNulls(ary *partialArray) *partialArray { newAry := []*lazyNode{} for _, v := range *ary { if v != nil { pruneNulls(v) newAry = append(newAry, v) } } *ary = newAry return ary } var ErrBadJSONDoc = fmt.Errorf("Invalid JSON Document") var ErrBadJSONPatch = fmt.Errorf("Invalid JSON Patch") var errBadMergeTypes = fmt.Errorf("Mismatched JSON Documents") // MergeMergePatches merges two merge patches together, such that // applying this resulting merged merge patch to a document yields the same // as merging each merge patch to the document in succession. func MergeMergePatches(patch1Data, patch2Data []byte) ([]byte, error) { return doMergePatch(patch1Data, patch2Data, true) } // MergePatch merges the patchData into the docData. func MergePatch(docData, patchData []byte) ([]byte, error) { return doMergePatch(docData, patchData, false) } func doMergePatch(docData, patchData []byte, mergeMerge bool) ([]byte, error) { doc := &partialDoc{} docErr := json.Unmarshal(docData, doc) patch := &partialDoc{} patchErr := json.Unmarshal(patchData, patch) if _, ok := docErr.(*json.SyntaxError); ok { return nil, ErrBadJSONDoc } if _, ok := patchErr.(*json.SyntaxError); ok { return nil, ErrBadJSONPatch } if docErr == nil && *doc == nil { return nil, ErrBadJSONDoc } if patchErr == nil && *patch == nil { return nil, ErrBadJSONPatch } if docErr != nil || patchErr != nil { // Not an error, just not a doc, so we turn straight into the patch if patchErr == nil { if mergeMerge { doc = patch } else { doc = pruneDocNulls(patch) } } else { patchAry := &partialArray{} patchErr = json.Unmarshal(patchData, patchAry) if patchErr != nil { return nil, ErrBadJSONPatch } pruneAryNulls(patchAry) out, patchErr := json.Marshal(patchAry) if patchErr != nil { return nil, ErrBadJSONPatch } return out, nil } } else { mergeDocs(doc, patch, mergeMerge) } return json.Marshal(doc) } // resemblesJSONArray indicates whether the byte-slice "appears" to be // a JSON array or not. // False-positives are possible, as this function does not check the internal // structure of the array. It only checks that the outer syntax is present and // correct. func resemblesJSONArray(input []byte) bool { input = bytes.TrimSpace(input) hasPrefix := bytes.HasPrefix(input, []byte("[")) hasSuffix := bytes.HasSuffix(input, []byte("]")) return hasPrefix && hasSuffix } // CreateMergePatch will return a merge patch document capable of converting // the original document(s) to the modified document(s). // The parameters can be bytes of either two JSON Documents, or two arrays of // JSON documents. // The merge patch returned follows the specification defined at http://tools.ietf.org/html/draft-ietf-appsawg-json-merge-patch-07 func CreateMergePatch(originalJSON, modifiedJSON []byte) ([]byte, error) { originalResemblesArray := resemblesJSONArray(originalJSON) modifiedResemblesArray := resemblesJSONArray(modifiedJSON) // Do both byte-slices seem like JSON arrays? if originalResemblesArray && modifiedResemblesArray { return createArrayMergePatch(originalJSON, modifiedJSON) } // Are both byte-slices are not arrays? Then they are likely JSON objects... if !originalResemblesArray && !modifiedResemblesArray { return createObjectMergePatch(originalJSON, modifiedJSON) } // None of the above? Then return an error because of mismatched types. return nil, errBadMergeTypes } // createObjectMergePatch will return a merge-patch document capable of // converting the original document to the modified document. func createObjectMergePatch(originalJSON, modifiedJSON []byte) ([]byte, error) { originalDoc := map[string]interface{}{} modifiedDoc := map[string]interface{}{} err := json.Unmarshal(originalJSON, &originalDoc) if err != nil { return nil, ErrBadJSONDoc } err = json.Unmarshal(modifiedJSON, &modifiedDoc) if err != nil { return nil, ErrBadJSONDoc } dest, err := getDiff(originalDoc, modifiedDoc) if err != nil { return nil, err } return json.Marshal(dest) } // createArrayMergePatch will return an array of merge-patch documents capable // of converting the original document to the modified document for each // pair of JSON documents provided in the arrays. // Arrays of mismatched sizes will result in an error. func createArrayMergePatch(originalJSON, modifiedJSON []byte) ([]byte, error) { originalDocs := []json.RawMessage{} modifiedDocs := []json.RawMessage{} err := json.Unmarshal(originalJSON, &originalDocs) if err != nil { return nil, ErrBadJSONDoc } err = json.Unmarshal(modifiedJSON, &modifiedDocs) if err != nil { return nil, ErrBadJSONDoc } total := len(originalDocs) if len(modifiedDocs) != total { return nil, ErrBadJSONDoc } result := []json.RawMessage{} for i := 0; i < len(originalDocs); i++ { original := originalDocs[i] modified := modifiedDocs[i] patch, err := createObjectMergePatch(original, modified) if err != nil { return nil, err } result = append(result, json.RawMessage(patch)) } return json.Marshal(result) } // Returns true if the array matches (must be json types). // As is idiomatic for go, an empty array is not the same as a nil array. func matchesArray(a, b []interface{}) bool { if len(a) != len(b) { return false } if (a == nil && b != nil) || (a != nil && b == nil) { return false } for i := range a { if !matchesValue(a[i], b[i]) { return false } } return true } // Returns true if the values matches (must be json types) // The types of the values must match, otherwise it will always return false // If two map[string]interface{} are given, all elements must match. func matchesValue(av, bv interface{}) bool { if reflect.TypeOf(av) != reflect.TypeOf(bv) { return false } switch at := av.(type) { case string: bt := bv.(string) if bt == at { return true } case float64: bt := bv.(float64) if bt == at { return true } case bool: bt := bv.(bool) if bt == at { return true } case nil: // Both nil, fine. return true case map[string]interface{}: bt := bv.(map[string]interface{}) if len(bt) != len(at) { return false } for key := range bt { av, aOK := at[key] bv, bOK := bt[key] if aOK != bOK { return false } if !matchesValue(av, bv) { return false } } return true case []interface{}: bt := bv.([]interface{}) return matchesArray(at, bt) } return false } // getDiff returns the (recursive) difference between a and b as a map[string]interface{}. func getDiff(a, b map[string]interface{}) (map[string]interface{}, error) { into := map[string]interface{}{} for key, bv := range b { av, ok := a[key] // value was added if !ok { into[key] = bv continue } // If types have changed, replace completely if reflect.TypeOf(av) != reflect.TypeOf(bv) { into[key] = bv continue } // Types are the same, compare values switch at := av.(type) { case map[string]interface{}: bt := bv.(map[string]interface{}) dst := make(map[string]interface{}, len(bt)) dst, err := getDiff(at, bt) if err != nil { return nil, err } if len(dst) > 0 { into[key] = dst } case string, float64, bool: if !matchesValue(av, bv) { into[key] = bv } case []interface{}: bt := bv.([]interface{}) if !matchesArray(at, bt) { into[key] = bv } case nil: switch bv.(type) { case nil: // Both nil, fine. default: into[key] = bv } default: panic(fmt.Sprintf("Unknown type:%T in key %s", av, key)) } } // Now add all deleted values as nil for key := range a { _, found := b[key] if !found { into[key] = nil } } return into, nil } json-patch-5.2.0/merge_test.go000066400000000000000000000445621377516326600163040ustar00rootroot00000000000000package jsonpatch import ( "fmt" "strings" "testing" ) func mergePatch(doc, patch string) string { out, err := MergePatch([]byte(doc), []byte(patch)) if err != nil { panic(err) } return string(out) } func TestMergePatchReplaceKey(t *testing.T) { doc := `{ "title": "hello" }` pat := `{ "title": "goodbye" }` res := mergePatch(doc, pat) if !compareJSON(pat, res) { t.Fatalf("Key was not replaced") } } func TestMergePatchIgnoresOtherValues(t *testing.T) { doc := `{ "title": "hello", "age": 18 }` pat := `{ "title": "goodbye" }` res := mergePatch(doc, pat) exp := `{ "title": "goodbye", "age": 18 }` if !compareJSON(exp, res) { t.Fatalf("Key was not replaced") } } func TestMergePatchNilDoc(t *testing.T) { doc := `{ "title": null }` pat := `{ "title": {"foo": "bar"} }` res := mergePatch(doc, pat) exp := `{ "title": {"foo": "bar"} }` if !compareJSON(exp, res) { t.Fatalf("Key was not replaced") } } func TestMergePatchRecursesIntoObjects(t *testing.T) { doc := `{ "person": { "title": "hello", "age": 18 } }` pat := `{ "person": { "title": "goodbye" } }` res := mergePatch(doc, pat) exp := `{ "person": { "title": "goodbye", "age": 18 } }` if !compareJSON(exp, res) { t.Fatalf("Key was not replaced") } } type nonObjectCases struct { doc, pat, res string } func TestMergePatchReplacesNonObjectsWholesale(t *testing.T) { a1 := `[1]` a2 := `[2]` o1 := `{ "a": 1 }` o2 := `{ "a": 2 }` o3 := `{ "a": 1, "b": 1 }` o4 := `{ "a": 2, "b": 1 }` cases := []nonObjectCases{ {a1, a2, a2}, {o1, a2, a2}, {a1, o1, o1}, {o3, o2, o4}, } for _, c := range cases { act := mergePatch(c.doc, c.pat) if !compareJSON(c.res, act) { t.Errorf("whole object replacement failed") } } } func TestMergePatchReturnsErrorOnBadJSON(t *testing.T) { _, err := MergePatch([]byte(`[[[[`), []byte(`1`)) if err == nil { t.Errorf("Did not return an error for bad json: %s", err) } _, err = MergePatch([]byte(`1`), []byte(`[[[[`)) if err == nil { t.Errorf("Did not return an error for bad json: %s", err) } } func TestMergePatchReturnsEmptyArrayOnEmptyArray(t *testing.T) { doc := `{ "array": ["one", "two"] }` pat := `{ "array": [] }` exp := `{ "array": [] }` res, err := MergePatch([]byte(doc), []byte(pat)) if err != nil { t.Errorf("Unexpected error: %s, %s", err, string(res)) } if !compareJSON(exp, string(res)) { t.Fatalf("Emtpy array did not return not return as empty array") } } var rfcTests = []struct { target string patch string expected string }{ // test cases from https://tools.ietf.org/html/rfc7386#appendix-A {target: `{"a":"b"}`, patch: `{"a":"c"}`, expected: `{"a":"c"}`}, {target: `{"a":"b"}`, patch: `{"b":"c"}`, expected: `{"a":"b","b":"c"}`}, {target: `{"a":"b"}`, patch: `{"a":null}`, expected: `{}`}, {target: `{"a":"b","b":"c"}`, patch: `{"a":null}`, expected: `{"b":"c"}`}, {target: `{"a":["b"]}`, patch: `{"a":"c"}`, expected: `{"a":"c"}`}, {target: `{"a":"c"}`, patch: `{"a":["b"]}`, expected: `{"a":["b"]}`}, {target: `{"a":{"b": "c"}}`, patch: `{"a": {"b": "d","c": null}}`, expected: `{"a":{"b":"d"}}`}, {target: `{"a":[{"b":"c"}]}`, patch: `{"a":[1]}`, expected: `{"a":[1]}`}, {target: `["a","b"]`, patch: `["c","d"]`, expected: `["c","d"]`}, {target: `{"a":"b"}`, patch: `["c"]`, expected: `["c"]`}, // {target: `{"a":"foo"}`, patch: `null`, expected: `null`}, // {target: `{"a":"foo"}`, patch: `"bar"`, expected: `"bar"`}, {target: `{"e":null}`, patch: `{"a":1}`, expected: `{"a":1,"e":null}`}, {target: `[1,2]`, patch: `{"a":"b","c":null}`, expected: `{"a":"b"}`}, {target: `{}`, patch: `{"a":{"bb":{"ccc":null}}}`, expected: `{"a":{"bb":{}}}`}, } func TestMergePatchRFCCases(t *testing.T) { for i, c := range rfcTests { out := mergePatch(c.target, c.patch) if !compareJSON(out, c.expected) { t.Errorf("case[%d], patch '%s' did not apply properly to '%s'. expected:\n'%s'\ngot:\n'%s'", i, c.patch, c.target, c.expected, out) } } } var rfcFailTests = ` {"a":"foo"} | null {"a":"foo"} | "bar" ` func TestMergePatchFailRFCCases(t *testing.T) { tests := strings.Split(rfcFailTests, "\n") for _, c := range tests { if strings.TrimSpace(c) == "" { continue } parts := strings.SplitN(c, "|", 2) doc := strings.TrimSpace(parts[0]) pat := strings.TrimSpace(parts[1]) out, err := MergePatch([]byte(doc), []byte(pat)) if err != ErrBadJSONPatch { t.Errorf("error not returned properly: %s, %s", err, string(out)) } } } func TestResembleJSONArray(t *testing.T) { testCases := []struct { input []byte expected bool }{ // Failure cases {input: []byte(``), expected: false}, {input: []byte(`not an array`), expected: false}, {input: []byte(`{"foo": "bar"}`), expected: false}, {input: []byte(`{"fizz": ["buzz"]}`), expected: false}, {input: []byte(`[bad suffix`), expected: false}, {input: []byte(`bad prefix]`), expected: false}, {input: []byte(`][`), expected: false}, // Valid cases {input: []byte(`[]`), expected: true}, {input: []byte(`["foo", "bar"]`), expected: true}, {input: []byte(`[["foo", "bar"]]`), expected: true}, {input: []byte(`[not valid syntax]`), expected: true}, // Valid cases with whitespace {input: []byte(` []`), expected: true}, {input: []byte(`[] `), expected: true}, {input: []byte(` [] `), expected: true}, {input: []byte(` [ ] `), expected: true}, {input: []byte("\t[]"), expected: true}, {input: []byte("[]\n"), expected: true}, {input: []byte("\n\t\r[]"), expected: true}, } for _, test := range testCases { result := resemblesJSONArray(test.input) if result != test.expected { t.Errorf( `expected "%t" but received "%t" for case: "%s"`, test.expected, result, string(test.input), ) } } } func TestCreateMergePatchReplaceKey(t *testing.T) { doc := `{ "title": "hello", "nested": {"one": 1, "two": 2} }` pat := `{ "title": "goodbye", "nested": {"one": 2, "two": 2} }` exp := `{ "title": "goodbye", "nested": {"one": 2} }` res, err := CreateMergePatch([]byte(doc), []byte(pat)) if err != nil { t.Errorf("Unexpected error: %s, %s", err, string(res)) } if !compareJSON(exp, string(res)) { t.Fatalf("Key was not replaced") } } func TestCreateMergePatchGetArray(t *testing.T) { doc := `{ "title": "hello", "array": ["one", "two"], "notmatch": [1, 2, 3] }` pat := `{ "title": "hello", "array": ["one", "two", "three"], "notmatch": [1, 2, 3] }` exp := `{ "array": ["one", "two", "three"] }` res, err := CreateMergePatch([]byte(doc), []byte(pat)) if err != nil { t.Errorf("Unexpected error: %s, %s", err, string(res)) } if !compareJSON(exp, string(res)) { t.Fatalf("Array was not added") } } func TestCreateMergePatchGetObjArray(t *testing.T) { doc := `{ "title": "hello", "array": [{"banana": true}, {"evil": false}], "notmatch": [{"one":1}, {"two":2}, {"three":3}] }` pat := `{ "title": "hello", "array": [{"banana": false}, {"evil": true}], "notmatch": [{"one":1}, {"two":2}, {"three":3}] }` exp := `{ "array": [{"banana": false}, {"evil": true}] }` res, err := CreateMergePatch([]byte(doc), []byte(pat)) if err != nil { t.Errorf("Unexpected error: %s, %s", err, string(res)) } if !compareJSON(exp, string(res)) { t.Fatalf("Object array was not added") } } func TestCreateMergePatchDeleteKey(t *testing.T) { doc := `{ "title": "hello", "nested": {"one": 1, "two": 2} }` pat := `{ "title": "hello", "nested": {"one": 1} }` exp := `{"nested":{"two":null}}` res, err := CreateMergePatch([]byte(doc), []byte(pat)) if err != nil { t.Errorf("Unexpected error: %s, %s", err, string(res)) } // We cannot use "compareJSON", since Equals does not report a difference if the value is null if exp != string(res) { t.Fatalf("Key was not removed") } } func TestCreateMergePatchEmptyArray(t *testing.T) { doc := `{ "array": null }` pat := `{ "array": [] }` exp := `{"array":[]}` res, err := CreateMergePatch([]byte(doc), []byte(pat)) if err != nil { t.Errorf("Unexpected error: %s, %s", err, string(res)) } // We cannot use "compareJSON", since Equals does not report a difference if the value is null if exp != string(res) { t.Fatalf("Key was not removed") } } func TestCreateMergePatchNil(t *testing.T) { doc := `{ "title": "hello", "nested": {"one": 1, "two": [{"one":null}, {"two":null}, {"three":null}]} }` pat := doc exp := `{}` res, err := CreateMergePatch([]byte(doc), []byte(pat)) if err != nil { t.Errorf("Unexpected error: %s, %s", err, string(res)) } if !compareJSON(exp, string(res)) { t.Fatalf("Object array was not added") } } func TestCreateMergePatchObjArray(t *testing.T) { doc := `{ "array": [ {"a": {"b": 2}}, {"a": {"b": 3}} ]}` exp := `{}` res, err := CreateMergePatch([]byte(doc), []byte(doc)) if err != nil { t.Errorf("Unexpected error: %s, %s", err, string(res)) } // We cannot use "compareJSON", since Equals does not report a difference if the value is null if exp != string(res) { t.Fatalf("Array was not empty, was " + string(res)) } } func TestCreateMergePatchSameOuterArray(t *testing.T) { doc := `[{"foo": "bar"}]` pat := doc exp := `[{}]` res, err := CreateMergePatch([]byte(doc), []byte(pat)) if err != nil { t.Errorf("Unexpected error: %s, %s", err, string(res)) } if !compareJSON(exp, string(res)) { t.Fatalf("Outer array was not unmodified") } } func TestCreateMergePatchModifiedOuterArray(t *testing.T) { doc := `[{"name": "John"}, {"name": "Will"}]` pat := `[{"name": "Jane"}, {"name": "Will"}]` exp := `[{"name": "Jane"}, {}]` res, err := CreateMergePatch([]byte(doc), []byte(pat)) if err != nil { t.Errorf("Unexpected error: %s, %s", err, string(res)) } if !compareJSON(exp, string(res)) { t.Fatalf("Expected %s but received %s", exp, res) } } func TestCreateMergePatchMismatchedOuterArray(t *testing.T) { doc := `[{"name": "John"}, {"name": "Will"}]` pat := `[{"name": "Jane"}]` _, err := CreateMergePatch([]byte(doc), []byte(pat)) if err == nil { t.Errorf("Expected error due to array length differences but received none") } } func TestCreateMergePatchMismatchedOuterTypes(t *testing.T) { doc := `[{"name": "John"}]` pat := `{"name": "Jane"}` _, err := CreateMergePatch([]byte(doc), []byte(pat)) if err == nil { t.Errorf("Expected error due to mismatched types but received none") } } func TestCreateMergePatchNoDifferences(t *testing.T) { doc := `{ "title": "hello", "nested": {"one": 1, "two": 2} }` pat := doc exp := `{}` res, err := CreateMergePatch([]byte(doc), []byte(pat)) if err != nil { t.Errorf("Unexpected error: %s, %s", err, string(res)) } if !compareJSON(exp, string(res)) { t.Fatalf("Key was not replaced") } } func TestCreateMergePatchComplexMatch(t *testing.T) { doc := `{"hello": "world","t": true ,"f": false, "n": null,"i": 123,"pi": 3.1416,"a": [1, 2, 3, 4], "nested": {"hello": "world","t": true ,"f": false, "n": null,"i": 123,"pi": 3.1416,"a": [1, 2, 3, 4]} }` empty := `{}` res, err := CreateMergePatch([]byte(doc), []byte(doc)) if err != nil { t.Errorf("Unexpected error: %s, %s", err, string(res)) } // We cannot use "compareJSON", since Equals does not report a difference if the value is null if empty != string(res) { t.Fatalf("Did not get empty result, was:%s", string(res)) } } func TestCreateMergePatchComplexAddAll(t *testing.T) { doc := `{"hello": "world","t": true ,"f": false, "n": null,"i": 123,"pi": 3.1416,"a": [1, 2, 3, 4], "nested": {"hello": "world","t": true ,"f": false, "n": null,"i": 123,"pi": 3.1416,"a": [1, 2, 3, 4]} }` empty := `{}` res, err := CreateMergePatch([]byte(empty), []byte(doc)) if err != nil { t.Errorf("Unexpected error: %s, %s", err, string(res)) } if !compareJSON(doc, string(res)) { t.Fatalf("Did not get everything as, it was:\n%s", string(res)) } } // createNestedMap created a series of nested map objects such that the number of // objects is roughly 2^n (precisely, 2^(n+1)-1). func createNestedMap(m map[string]interface{}, depth int, objectCount *int) { if depth == 0 { return } for i := 0; i< 2;i++ { nested := map[string]interface{}{} *objectCount += 1 createNestedMap(nested, depth-1, objectCount) m[fmt.Sprintf("key-%v", i)] = nested } } func TestMatchesValue(t *testing.T) { testcases := []struct { name string a interface{} b interface{} want bool }{ { name: "map empty", a: map[string]interface{}{}, b: map[string]interface{}{}, want: true, }, { name: "map equal keys, equal non-nil value", a: map[string]interface{}{"1": true}, b: map[string]interface{}{"1": true}, want: true, }, { name: "map equal keys, equal nil value", a: map[string]interface{}{"1": nil}, b: map[string]interface{}{"1": nil}, want: true, }, { name: "map different value", a: map[string]interface{}{"1": true}, b: map[string]interface{}{"1": false}, want: false, }, { name: "map different key, matching non-nil value", a: map[string]interface{}{"1": true}, b: map[string]interface{}{"2": true}, want: false, }, { name: "map different key, matching nil value", a: map[string]interface{}{"1": nil}, b: map[string]interface{}{"2": nil}, want: false, }, { name: "map different key, first nil value", a: map[string]interface{}{"1": true}, b: map[string]interface{}{"2": nil}, want: false, }, { name: "map different key, second nil value", a: map[string]interface{}{"1": nil}, b: map[string]interface{}{"2": true}, want: false, }, } for _, tc := range testcases { t.Run(tc.name, func(t *testing.T) { got := matchesValue(tc.a, tc.b) if got != tc.want { t.Fatalf("want %v, got %v", tc.want, got) } }) } } func benchmarkMatchesValueWithDeeplyNestedFields(depth int, b *testing.B) { a := map[string]interface{}{} objCount := 1 createNestedMap(a, depth, &objCount) b.ResetTimer() b.Run(fmt.Sprintf("objectCount=%v", objCount), func(b *testing.B) { for i := 0; i < b.N; i++ { if !matchesValue(a, a) { b.Errorf("Should be equal") } } }) } func BenchmarkMatchesValue1(b *testing.B) { benchmarkMatchesValueWithDeeplyNestedFields(1, b) } func BenchmarkMatchesValue2(b *testing.B) { benchmarkMatchesValueWithDeeplyNestedFields(2, b) } func BenchmarkMatchesValue3(b *testing.B) { benchmarkMatchesValueWithDeeplyNestedFields(3, b) } func BenchmarkMatchesValue4(b *testing.B) { benchmarkMatchesValueWithDeeplyNestedFields(4, b) } func BenchmarkMatchesValue5(b *testing.B) { benchmarkMatchesValueWithDeeplyNestedFields(5, b) } func BenchmarkMatchesValue6(b *testing.B) { benchmarkMatchesValueWithDeeplyNestedFields(6, b) } func BenchmarkMatchesValue7(b *testing.B) { benchmarkMatchesValueWithDeeplyNestedFields(7, b) } func BenchmarkMatchesValue8(b *testing.B) { benchmarkMatchesValueWithDeeplyNestedFields(8, b) } func BenchmarkMatchesValue9(b *testing.B) { benchmarkMatchesValueWithDeeplyNestedFields(9, b) } func BenchmarkMatchesValue10(b *testing.B) { benchmarkMatchesValueWithDeeplyNestedFields(10, b) } func TestCreateMergePatchComplexRemoveAll(t *testing.T) { doc := `{"hello": "world","t": true ,"f": false, "n": null,"i": 123,"pi": 3.1416,"a": [1, 2, 3, 4], "nested": {"hello": "world","t": true ,"f": false, "n": null,"i": 123,"pi": 3.1416,"a": [1, 2, 3, 4]} }` exp := `{"a":null,"f":null,"hello":null,"i":null,"n":null,"nested":null,"pi":null,"t":null}` empty := `{}` res, err := CreateMergePatch([]byte(doc), []byte(empty)) if err != nil { t.Errorf("Unexpected error: %s, %s", err, string(res)) } if exp != string(res) { t.Fatalf("Did not get result, was:%s", string(res)) } // FIXME: Crashes if using compareJSON like this: /* if !compareJSON(doc, string(res)) { t.Fatalf("Did not get everything as, it was:\n%s", string(res)) } */ } func TestCreateMergePatchObjectWithInnerArray(t *testing.T) { stateString := `{ "OuterArray": [ { "InnerArray": [ { "StringAttr": "abc123" } ], "StringAttr": "def456" } ] }` patch, err := CreateMergePatch([]byte(stateString), []byte(stateString)) if err != nil { t.Fatal(err) } if string(patch) != "{}" { t.Fatalf("Patch should have been {} but was: %v", string(patch)) } } func TestCreateMergePatchReplaceKeyNotEscape(t *testing.T) { doc := `{ "title": "hello", "nested": {"title/escaped": 1, "two": 2} }` pat := `{ "title": "goodbye", "nested": {"title/escaped": 2, "two": 2} }` exp := `{ "title": "goodbye", "nested": {"title/escaped": 2} }` res, err := CreateMergePatch([]byte(doc), []byte(pat)) if err != nil { t.Errorf("Unexpected error: %s, %s", err, string(res)) } if !compareJSON(exp, string(res)) { t.Log(string(res)) t.Fatalf("Key was not replaced") } } func TestMergePatchReplaceKeyNotEscaping(t *testing.T) { doc := `{ "obj": { "title/escaped": "hello" } }` pat := `{ "obj": { "title/escaped": "goodbye" } }` exp := `{ "obj": { "title/escaped": "goodbye" } }` res := mergePatch(doc, pat) if !compareJSON(exp, res) { t.Fatalf("Key was not replaced") } } func TestMergeMergePatches(t *testing.T) { cases := []struct { demonstrates string p1 string p2 string exp string }{ { demonstrates: "simple patches are merged normally", p1: `{"add1": 1}`, p2: `{"add2": 2}`, exp: `{"add1": 1, "add2": 2}`, }, { demonstrates: "nulls are kept", p1: `{"del1": null}`, p2: `{"del2": null}`, exp: `{"del1": null, "del2": null}`, }, { demonstrates: "a key added then deleted is kept deleted", p1: `{"add_then_delete": "atd"}`, p2: `{"add_then_delete": null}`, exp: `{"add_then_delete": null}`, }, { demonstrates: "a key deleted then added is kept added", p1: `{"delete_then_add": null}`, p2: `{"delete_then_add": "dta"}`, exp: `{"delete_then_add": "dta"}`, }, { demonstrates: "object overrides array", p1: `[]`, p2: `{"del": null, "add": "a"}`, exp: `{"del": null, "add": "a"}`, }, { demonstrates: "array overrides object", p1: `{"del": null, "add": "a"}`, p2: `[]`, exp: `[]`, }, } for _, c := range cases { out, err := MergeMergePatches([]byte(c.p1), []byte(c.p2)) if err != nil { panic(err) } if !compareJSON(c.exp, string(out)) { t.Logf("Error while trying to demonstrate: %v", c.demonstrates) t.Logf("Got %v", string(out)) t.Logf("Expected %v", c.exp) t.Fatalf("Merged merge patch is incorrect") } } } json-patch-5.2.0/patch.go000066400000000000000000000355551377516326600152470ustar00rootroot00000000000000package jsonpatch import ( "bytes" "encoding/json" "fmt" "strconv" "strings" "github.com/pkg/errors" ) const ( eRaw = iota eDoc eAry ) var ( // SupportNegativeIndices decides whether to support non-standard practice of // allowing negative indices to mean indices starting at the end of an array. // Default to true. SupportNegativeIndices bool = true // AccumulatedCopySizeLimit limits the total size increase in bytes caused by // "copy" operations in a patch. AccumulatedCopySizeLimit int64 = 0 ) var ( ErrTestFailed = errors.New("test failed") ErrMissing = errors.New("missing value") ErrUnknownType = errors.New("unknown object type") ErrInvalid = errors.New("invalid state detected") ErrInvalidIndex = errors.New("invalid index referenced") ) type lazyNode struct { raw *json.RawMessage doc partialDoc ary partialArray which int } // Operation is a single JSON-Patch step, such as a single 'add' operation. type Operation map[string]*json.RawMessage // Patch is an ordered collection of Operations. type Patch []Operation type partialDoc map[string]*lazyNode type partialArray []*lazyNode type container interface { get(key string) (*lazyNode, error) set(key string, val *lazyNode) error add(key string, val *lazyNode) error remove(key string) error } func newLazyNode(raw *json.RawMessage) *lazyNode { return &lazyNode{raw: raw, doc: nil, ary: nil, which: eRaw} } func (n *lazyNode) MarshalJSON() ([]byte, error) { switch n.which { case eRaw: return json.Marshal(n.raw) case eDoc: return json.Marshal(n.doc) case eAry: return json.Marshal(n.ary) default: return nil, ErrUnknownType } } func (n *lazyNode) UnmarshalJSON(data []byte) error { dest := make(json.RawMessage, len(data)) copy(dest, data) n.raw = &dest n.which = eRaw return nil } func deepCopy(src *lazyNode) (*lazyNode, int, error) { if src == nil { return nil, 0, nil } a, err := src.MarshalJSON() if err != nil { return nil, 0, err } sz := len(a) ra := make(json.RawMessage, sz) copy(ra, a) return newLazyNode(&ra), sz, nil } func (n *lazyNode) intoDoc() (*partialDoc, error) { if n.which == eDoc { return &n.doc, nil } if n.raw == nil { return nil, ErrInvalid } err := json.Unmarshal(*n.raw, &n.doc) if err != nil { return nil, err } n.which = eDoc return &n.doc, nil } func (n *lazyNode) intoAry() (*partialArray, error) { if n.which == eAry { return &n.ary, nil } if n.raw == nil { return nil, ErrInvalid } err := json.Unmarshal(*n.raw, &n.ary) if err != nil { return nil, err } n.which = eAry return &n.ary, nil } func (n *lazyNode) compact() []byte { buf := &bytes.Buffer{} if n.raw == nil { return nil } err := json.Compact(buf, *n.raw) if err != nil { return *n.raw } return buf.Bytes() } func (n *lazyNode) tryDoc() bool { if n.raw == nil { return false } err := json.Unmarshal(*n.raw, &n.doc) if err != nil { return false } n.which = eDoc return true } func (n *lazyNode) tryAry() bool { if n.raw == nil { return false } err := json.Unmarshal(*n.raw, &n.ary) if err != nil { return false } n.which = eAry return true } func (n *lazyNode) equal(o *lazyNode) bool { if n.which == eRaw { if !n.tryDoc() && !n.tryAry() { if o.which != eRaw { return false } return bytes.Equal(n.compact(), o.compact()) } } if n.which == eDoc { if o.which == eRaw { if !o.tryDoc() { return false } } if o.which != eDoc { return false } if len(n.doc) != len(o.doc) { return false } for k, v := range n.doc { ov, ok := o.doc[k] if !ok { return false } if (v == nil) != (ov == nil) { return false } if v == nil && ov == nil { continue } if !v.equal(ov) { return false } } return true } if o.which != eAry && !o.tryAry() { return false } if len(n.ary) != len(o.ary) { return false } for idx, val := range n.ary { if !val.equal(o.ary[idx]) { return false } } return true } // Kind reads the "op" field of the Operation. func (o Operation) Kind() string { if obj, ok := o["op"]; ok && obj != nil { var op string err := json.Unmarshal(*obj, &op) if err != nil { return "unknown" } return op } return "unknown" } // Path reads the "path" field of the Operation. func (o Operation) Path() (string, error) { if obj, ok := o["path"]; ok && obj != nil { var op string err := json.Unmarshal(*obj, &op) if err != nil { return "unknown", err } return op, nil } return "unknown", errors.Wrapf(ErrMissing, "operation missing path field") } // From reads the "from" field of the Operation. func (o Operation) From() (string, error) { if obj, ok := o["from"]; ok && obj != nil { var op string err := json.Unmarshal(*obj, &op) if err != nil { return "unknown", err } return op, nil } return "unknown", errors.Wrapf(ErrMissing, "operation, missing from field") } func (o Operation) value() *lazyNode { if obj, ok := o["value"]; ok { return newLazyNode(obj) } return nil } // ValueInterface decodes the operation value into an interface. func (o Operation) ValueInterface() (interface{}, error) { if obj, ok := o["value"]; ok && obj != nil { var v interface{} err := json.Unmarshal(*obj, &v) if err != nil { return nil, err } return v, nil } return nil, errors.Wrapf(ErrMissing, "operation, missing value field") } func isArray(buf []byte) bool { Loop: for _, c := range buf { switch c { case ' ': case '\n': case '\t': continue case '[': return true default: break Loop } } return false } func findObject(pd *container, path string) (container, string) { doc := *pd split := strings.Split(path, "/") if len(split) < 2 { return nil, "" } parts := split[1 : len(split)-1] key := split[len(split)-1] var err error for _, part := range parts { next, ok := doc.get(decodePatchKey(part)) if next == nil || ok != nil { return nil, "" } if isArray(*next.raw) { doc, err = next.intoAry() if err != nil { return nil, "" } } else { doc, err = next.intoDoc() if err != nil { return nil, "" } } } return doc, decodePatchKey(key) } func (d *partialDoc) set(key string, val *lazyNode) error { (*d)[key] = val return nil } func (d *partialDoc) add(key string, val *lazyNode) error { (*d)[key] = val return nil } func (d *partialDoc) get(key string) (*lazyNode, error) { return (*d)[key], nil } func (d *partialDoc) remove(key string) error { _, ok := (*d)[key] if !ok { return errors.Wrapf(ErrMissing, "Unable to remove nonexistent key: %s", key) } delete(*d, key) return nil } // set should only be used to implement the "replace" operation, so "key" must // be an already existing index in "d". func (d *partialArray) set(key string, val *lazyNode) error { idx, err := strconv.Atoi(key) if err != nil { return err } (*d)[idx] = val return nil } func (d *partialArray) add(key string, val *lazyNode) error { if key == "-" { *d = append(*d, val) return nil } idx, err := strconv.Atoi(key) if err != nil { return errors.Wrapf(err, "value was not a proper array index: '%s'", key) } sz := len(*d) + 1 ary := make([]*lazyNode, sz) cur := *d if idx >= len(ary) { return errors.Wrapf(ErrInvalidIndex, "Unable to access invalid index: %d", idx) } if idx < 0 { if !SupportNegativeIndices { return errors.Wrapf(ErrInvalidIndex, "Unable to access invalid index: %d", idx) } if idx < -len(ary) { return errors.Wrapf(ErrInvalidIndex, "Unable to access invalid index: %d", idx) } idx += len(ary) } copy(ary[0:idx], cur[0:idx]) ary[idx] = val copy(ary[idx+1:], cur[idx:]) *d = ary return nil } func (d *partialArray) get(key string) (*lazyNode, error) { idx, err := strconv.Atoi(key) if err != nil { return nil, err } if idx >= len(*d) { return nil, errors.Wrapf(ErrInvalidIndex, "Unable to access invalid index: %d", idx) } return (*d)[idx], nil } func (d *partialArray) remove(key string) error { idx, err := strconv.Atoi(key) if err != nil { return err } cur := *d if idx >= len(cur) { return errors.Wrapf(ErrInvalidIndex, "Unable to access invalid index: %d", idx) } if idx < 0 { if !SupportNegativeIndices { return errors.Wrapf(ErrInvalidIndex, "Unable to access invalid index: %d", idx) } if idx < -len(cur) { return errors.Wrapf(ErrInvalidIndex, "Unable to access invalid index: %d", idx) } idx += len(cur) } ary := make([]*lazyNode, len(cur)-1) copy(ary[0:idx], cur[0:idx]) copy(ary[idx:], cur[idx+1:]) *d = ary return nil } func (p Patch) add(doc *container, op Operation) error { path, err := op.Path() if err != nil { return errors.Wrapf(ErrMissing, "add operation failed to decode path") } con, key := findObject(doc, path) if con == nil { return errors.Wrapf(ErrMissing, "add operation does not apply: doc is missing path: \"%s\"", path) } err = con.add(key, op.value()) if err != nil { return errors.Wrapf(err, "error in add for path: '%s'", path) } return nil } func (p Patch) remove(doc *container, op Operation) error { path, err := op.Path() if err != nil { return errors.Wrapf(ErrMissing, "remove operation failed to decode path") } con, key := findObject(doc, path) if con == nil { return errors.Wrapf(ErrMissing, "remove operation does not apply: doc is missing path: \"%s\"", path) } err = con.remove(key) if err != nil { return errors.Wrapf(err, "error in remove for path: '%s'", path) } return nil } func (p Patch) replace(doc *container, op Operation) error { path, err := op.Path() if err != nil { return errors.Wrapf(err, "replace operation failed to decode path") } con, key := findObject(doc, path) if con == nil { return errors.Wrapf(ErrMissing, "replace operation does not apply: doc is missing path: %s", path) } _, ok := con.get(key) if ok != nil { return errors.Wrapf(ErrMissing, "replace operation does not apply: doc is missing key: %s", path) } err = con.set(key, op.value()) if err != nil { return errors.Wrapf(err, "error in remove for path: '%s'", path) } return nil } func (p Patch) move(doc *container, op Operation) error { from, err := op.From() if err != nil { return errors.Wrapf(err, "move operation failed to decode from") } con, key := findObject(doc, from) if con == nil { return errors.Wrapf(ErrMissing, "move operation does not apply: doc is missing from path: %s", from) } val, err := con.get(key) if err != nil { return errors.Wrapf(err, "error in move for path: '%s'", key) } err = con.remove(key) if err != nil { return errors.Wrapf(err, "error in move for path: '%s'", key) } path, err := op.Path() if err != nil { return errors.Wrapf(err, "move operation failed to decode path") } con, key = findObject(doc, path) if con == nil { return errors.Wrapf(ErrMissing, "move operation does not apply: doc is missing destination path: %s", path) } err = con.add(key, val) if err != nil { return errors.Wrapf(err, "error in move for path: '%s'", path) } return nil } func (p Patch) test(doc *container, op Operation) error { path, err := op.Path() if err != nil { return errors.Wrapf(err, "test operation failed to decode path") } con, key := findObject(doc, path) if con == nil { return errors.Wrapf(ErrMissing, "test operation does not apply: is missing path: %s", path) } val, err := con.get(key) if err != nil { return errors.Wrapf(err, "error in test for path: '%s'", path) } if val == nil { if op.value().raw == nil { return nil } return errors.Wrapf(ErrTestFailed, "testing value %s failed", path) } else if op.value() == nil { return errors.Wrapf(ErrTestFailed, "testing value %s failed", path) } if val.equal(op.value()) { return nil } return errors.Wrapf(ErrTestFailed, "testing value %s failed", path) } func (p Patch) copy(doc *container, op Operation, accumulatedCopySize *int64) error { from, err := op.From() if err != nil { return errors.Wrapf(err, "copy operation failed to decode from") } con, key := findObject(doc, from) if con == nil { return errors.Wrapf(ErrMissing, "copy operation does not apply: doc is missing from path: %s", from) } val, err := con.get(key) if err != nil { return errors.Wrapf(err, "error in copy for from: '%s'", from) } path, err := op.Path() if err != nil { return errors.Wrapf(ErrMissing, "copy operation failed to decode path") } con, key = findObject(doc, path) if con == nil { return errors.Wrapf(ErrMissing, "copy operation does not apply: doc is missing destination path: %s", path) } valCopy, sz, err := deepCopy(val) if err != nil { return errors.Wrapf(err, "error while performing deep copy") } (*accumulatedCopySize) += int64(sz) if AccumulatedCopySizeLimit > 0 && *accumulatedCopySize > AccumulatedCopySizeLimit { return NewAccumulatedCopySizeError(AccumulatedCopySizeLimit, *accumulatedCopySize) } err = con.add(key, valCopy) if err != nil { return errors.Wrapf(err, "error while adding value during copy") } return nil } // Equal indicates if 2 JSON documents have the same structural equality. func Equal(a, b []byte) bool { ra := make(json.RawMessage, len(a)) copy(ra, a) la := newLazyNode(&ra) rb := make(json.RawMessage, len(b)) copy(rb, b) lb := newLazyNode(&rb) return la.equal(lb) } // DecodePatch decodes the passed JSON document as an RFC 6902 patch. func DecodePatch(buf []byte) (Patch, error) { var p Patch err := json.Unmarshal(buf, &p) if err != nil { return nil, err } return p, nil } // Apply mutates a JSON document according to the patch, and returns the new // document. func (p Patch) Apply(doc []byte) ([]byte, error) { return p.ApplyIndent(doc, "") } // ApplyIndent mutates a JSON document according to the patch, and returns the new // document indented. func (p Patch) ApplyIndent(doc []byte, indent string) ([]byte, error) { var pd container if doc[0] == '[' { pd = &partialArray{} } else { pd = &partialDoc{} } err := json.Unmarshal(doc, pd) if err != nil { return nil, err } err = nil var accumulatedCopySize int64 for _, op := range p { switch op.Kind() { case "add": err = p.add(&pd, op) case "remove": err = p.remove(&pd, op) case "replace": err = p.replace(&pd, op) case "move": err = p.move(&pd, op) case "test": err = p.test(&pd, op) case "copy": err = p.copy(&pd, op, &accumulatedCopySize) default: err = fmt.Errorf("Unexpected kind: %s", op.Kind()) } if err != nil { return nil, err } } if indent != "" { return json.MarshalIndent(pd, "", indent) } return json.Marshal(pd) } // From http://tools.ietf.org/html/rfc6901#section-4 : // // Evaluation of each reference token begins by decoding any escaped // character sequence. This is performed by first transforming any // occurrence of the sequence '~1' to '/', and then transforming any // occurrence of the sequence '~0' to '~'. var ( rfc6901Decoder = strings.NewReplacer("~1", "/", "~0", "~") ) func decodePatchKey(k string) string { return rfc6901Decoder.Replace(k) } json-patch-5.2.0/patch_test.go000066400000000000000000000336311377516326600162770ustar00rootroot00000000000000package jsonpatch import ( "bytes" "encoding/json" "fmt" "reflect" "testing" ) func reformatJSON(j string) string { buf := new(bytes.Buffer) json.Indent(buf, []byte(j), "", " ") return buf.String() } func compareJSON(a, b string) bool { // return Equal([]byte(a), []byte(b)) var objA, objB map[string]interface{} json.Unmarshal([]byte(a), &objA) json.Unmarshal([]byte(b), &objB) // fmt.Printf("Comparing %#v\nagainst %#v\n", objA, objB) return reflect.DeepEqual(objA, objB) } func applyPatch(doc, patch string) (string, error) { obj, err := DecodePatch([]byte(patch)) if err != nil { panic(err) } out, err := obj.Apply([]byte(doc)) if err != nil { return "", err } return string(out), nil } type Case struct { doc, patch, result string } func repeatedA(r int) string { var s string for i := 0; i < r; i++ { s += "A" } return s } var Cases = []Case{ { `{ "foo": "bar"}`, `[ { "op": "add", "path": "/baz", "value": "qux" } ]`, `{ "baz": "qux", "foo": "bar" }`, }, { `{ "foo": [ "bar", "baz" ] }`, `[ { "op": "add", "path": "/foo/1", "value": "qux" } ]`, `{ "foo": [ "bar", "qux", "baz" ] }`, }, { `{ "foo": [ "bar", "baz" ] }`, `[ { "op": "add", "path": "/foo/-1", "value": "qux" } ]`, `{ "foo": [ "bar", "baz", "qux" ] }`, }, { `{ "baz": "qux", "foo": "bar" }`, `[ { "op": "remove", "path": "/baz" } ]`, `{ "foo": "bar" }`, }, { `{ "foo": [ "bar", "qux", "baz" ] }`, `[ { "op": "remove", "path": "/foo/1" } ]`, `{ "foo": [ "bar", "baz" ] }`, }, { `{ "baz": "qux", "foo": "bar" }`, `[ { "op": "replace", "path": "/baz", "value": "boo" } ]`, `{ "baz": "boo", "foo": "bar" }`, }, { `{ "foo": { "bar": "baz", "waldo": "fred" }, "qux": { "corge": "grault" } }`, `[ { "op": "move", "from": "/foo/waldo", "path": "/qux/thud" } ]`, `{ "foo": { "bar": "baz" }, "qux": { "corge": "grault", "thud": "fred" } }`, }, { `{ "foo": [ "all", "grass", "cows", "eat" ] }`, `[ { "op": "move", "from": "/foo/1", "path": "/foo/3" } ]`, `{ "foo": [ "all", "cows", "eat", "grass" ] }`, }, { `{ "foo": [ "all", "grass", "cows", "eat" ] }`, `[ { "op": "move", "from": "/foo/1", "path": "/foo/2" } ]`, `{ "foo": [ "all", "cows", "grass", "eat" ] }`, }, { `{ "foo": "bar" }`, `[ { "op": "add", "path": "/child", "value": { "grandchild": { } } } ]`, `{ "foo": "bar", "child": { "grandchild": { } } }`, }, { `{ "foo": ["bar"] }`, `[ { "op": "add", "path": "/foo/-", "value": ["abc", "def"] } ]`, `{ "foo": ["bar", ["abc", "def"]] }`, }, { `{ "foo": "bar", "qux": { "baz": 1, "bar": null } }`, `[ { "op": "remove", "path": "/qux/bar" } ]`, `{ "foo": "bar", "qux": { "baz": 1 } }`, }, { `{ "foo": "bar" }`, `[ { "op": "add", "path": "/baz", "value": null } ]`, `{ "baz": null, "foo": "bar" }`, }, { `{ "foo": ["bar"]}`, `[ { "op": "replace", "path": "/foo/0", "value": "baz"}]`, `{ "foo": ["baz"]}`, }, { `{ "foo": ["bar","baz"]}`, `[ { "op": "replace", "path": "/foo/0", "value": "bum"}]`, `{ "foo": ["bum","baz"]}`, }, { `{ "foo": ["bar","qux","baz"]}`, `[ { "op": "replace", "path": "/foo/1", "value": "bum"}]`, `{ "foo": ["bar", "bum","baz"]}`, }, { `[ {"foo": ["bar","qux","baz"]}]`, `[ { "op": "replace", "path": "/0/foo/0", "value": "bum"}]`, `[ {"foo": ["bum","qux","baz"]}]`, }, { `[ {"foo": ["bar","qux","baz"], "bar": ["qux","baz"]}]`, `[ { "op": "copy", "from": "/0/foo/0", "path": "/0/bar/0"}]`, `[ {"foo": ["bar","qux","baz"], "bar": ["bar", "baz"]}]`, }, { `[ {"foo": ["bar","qux","baz"], "bar": ["qux","baz"]}]`, `[ { "op": "copy", "from": "/0/foo/0", "path": "/0/bar"}]`, `[ {"foo": ["bar","qux","baz"], "bar": ["bar", "qux", "baz"]}]`, }, { `[ { "foo": {"bar": ["qux","baz"]}, "baz": {"qux": "bum"}}]`, `[ { "op": "copy", "from": "/0/foo/bar", "path": "/0/baz/bar"}]`, `[ { "baz": {"bar": ["qux","baz"], "qux":"bum"}, "foo": {"bar": ["qux","baz"]}}]`, }, { `{ "foo": ["bar"]}`, `[{"op": "copy", "path": "/foo/0", "from": "/foo"}]`, `{ "foo": [["bar"], "bar"]}`, }, { `{ "foo": ["bar","qux","baz"]}`, `[ { "op": "remove", "path": "/foo/-2"}]`, `{ "foo": ["bar", "baz"]}`, }, { `{ "foo": []}`, `[ { "op": "add", "path": "/foo/-1", "value": "qux"}]`, `{ "foo": ["qux"]}`, }, { `{ "bar": [{"baz": null}]}`, `[ { "op": "replace", "path": "/bar/0/baz", "value": 1 } ]`, `{ "bar": [{"baz": 1}]}`, }, { `{ "bar": [{"baz": 1}]}`, `[ { "op": "replace", "path": "/bar/0/baz", "value": null } ]`, `{ "bar": [{"baz": null}]}`, }, { `{ "bar": [null]}`, `[ { "op": "replace", "path": "/bar/0", "value": 1 } ]`, `{ "bar": [1]}`, }, { `{ "bar": [1]}`, `[ { "op": "replace", "path": "/bar/0", "value": null } ]`, `{ "bar": [null]}`, }, { fmt.Sprintf(`{ "foo": ["A", %q] }`, repeatedA(48)), // The wrapping quotes around 'A's are included in the copy // size, so each copy operation increases the size by 50 bytes. `[ { "op": "copy", "path": "/foo/-", "from": "/foo/1" }, { "op": "copy", "path": "/foo/-", "from": "/foo/1" }]`, fmt.Sprintf(`{ "foo": ["A", %q, %q, %q] }`, repeatedA(48), repeatedA(48), repeatedA(48)), }, } type BadCase struct { doc, patch string } var MutationTestCases = []BadCase{ { `{ "foo": "bar", "qux": { "baz": 1, "bar": null } }`, `[ { "op": "remove", "path": "/qux/bar" } ]`, }, { `{ "foo": "bar", "qux": { "baz": 1, "bar": null } }`, `[ { "op": "replace", "path": "/qux/baz", "value": null } ]`, }, } var BadCases = []BadCase{ { `{ "foo": "bar" }`, `[ { "op": "add", "path": "/baz/bat", "value": "qux" } ]`, }, { `{ "a": { "b": { "d": 1 } } }`, `[ { "op": "remove", "path": "/a/b/c" } ]`, }, { `{ "a": { "b": { "d": 1 } } }`, `[ { "op": "move", "from": "/a/b/c", "path": "/a/b/e" } ]`, }, { `{ "a": { "b": [1] } }`, `[ { "op": "remove", "path": "/a/b/1" } ]`, }, { `{ "a": { "b": [1] } }`, `[ { "op": "move", "from": "/a/b/1", "path": "/a/b/2" } ]`, }, { `{ "foo": "bar" }`, `[ { "op": "add", "pathz": "/baz", "value": "qux" } ]`, }, { `{ "foo": "bar" }`, `[ { "op": "add", "path": "", "value": "qux" } ]`, }, { `{ "foo": ["bar","baz"]}`, `[ { "op": "replace", "path": "/foo/2", "value": "bum"}]`, }, { `{ "foo": ["bar","baz"]}`, `[ { "op": "add", "path": "/foo/-4", "value": "bum"}]`, }, { `{ "name":{ "foo": "bat", "qux": "bum"}}`, `[ { "op": "replace", "path": "/foo/bar", "value":"baz"}]`, }, { `{ "foo": ["bar"]}`, `[ {"op": "add", "path": "/foo/2", "value": "bum"}]`, }, { `{ "foo": []}`, `[ {"op": "remove", "path": "/foo/-"}]`, }, { `{ "foo": []}`, `[ {"op": "remove", "path": "/foo/-1"}]`, }, { `{ "foo": ["bar"]}`, `[ {"op": "remove", "path": "/foo/-2"}]`, }, { `{}`, `[ {"op":null,"path":""} ]`, }, { `{}`, `[ {"op":"add","path":null} ]`, }, { `{}`, `[ { "op": "copy", "from": null }]`, }, { `{ "foo": ["bar"]}`, `[{"op": "copy", "path": "/foo/6666666666", "from": "/"}]`, }, // Can't copy into an index greater than the size of the array { `{ "foo": ["bar"]}`, `[{"op": "copy", "path": "/foo/2", "from": "/foo/0"}]`, }, // Accumulated copy size cannot exceed AccumulatedCopySizeLimit. { fmt.Sprintf(`{ "foo": ["A", %q] }`, repeatedA(49)), // The wrapping quotes around 'A's are included in the copy // size, so each copy operation increases the size by 51 bytes. `[ { "op": "copy", "path": "/foo/-", "from": "/foo/1" }, { "op": "copy", "path": "/foo/-", "from": "/foo/1" }]`, }, // Can't move into an index greater than or equal to the size of the array { `{ "foo": [ "all", "grass", "cows", "eat" ] }`, `[ { "op": "move", "from": "/foo/1", "path": "/foo/4" } ]`, }, } // This is not thread safe, so we cannot run patch tests in parallel. func configureGlobals(accumulatedCopySizeLimit int64) func() { oldAccumulatedCopySizeLimit := AccumulatedCopySizeLimit AccumulatedCopySizeLimit = accumulatedCopySizeLimit return func() { AccumulatedCopySizeLimit = oldAccumulatedCopySizeLimit } } func TestAllCases(t *testing.T) { defer configureGlobals(int64(100))() for _, c := range Cases { out, err := applyPatch(c.doc, c.patch) if err != nil { t.Errorf("Unable to apply patch: %s", err) } if !compareJSON(out, c.result) { t.Errorf("Patch did not apply. Expected:\n%s\n\nActual:\n%s", reformatJSON(c.result), reformatJSON(out)) } } for _, c := range MutationTestCases { out, err := applyPatch(c.doc, c.patch) if err != nil { t.Errorf("Unable to apply patch: %s", err) } if compareJSON(out, c.doc) { t.Errorf("Patch did not apply. Original:\n%s\n\nPatched:\n%s", reformatJSON(c.doc), reformatJSON(out)) } } for _, c := range BadCases { _, err := applyPatch(c.doc, c.patch) if err == nil { t.Errorf("Patch %q should have failed to apply but it did not", c.patch) } } } type TestCase struct { doc, patch string result bool failedPath string } var TestCases = []TestCase{ { `{ "baz": "qux", "foo": [ "a", 2, "c" ] }`, `[ { "op": "test", "path": "/baz", "value": "qux" }, { "op": "test", "path": "/foo/1", "value": 2 } ]`, true, "", }, { `{ "baz": "qux" }`, `[ { "op": "test", "path": "/baz", "value": "bar" } ]`, false, "/baz", }, { `{ "baz": "qux", "foo": ["a", 2, "c"] }`, `[ { "op": "test", "path": "/baz", "value": "qux" }, { "op": "test", "path": "/foo/1", "value": "c" } ]`, false, "/foo/1", }, { `{ "baz": "qux" }`, `[ { "op": "test", "path": "/foo", "value": 42 } ]`, false, "/foo", }, { `{ "baz": "qux" }`, `[ { "op": "test", "path": "/foo", "value": null } ]`, true, "", }, { `{ "foo": null }`, `[ { "op": "test", "path": "/foo", "value": null } ]`, true, "", }, { `{ "foo": {} }`, `[ { "op": "test", "path": "/foo", "value": null } ]`, false, "/foo", }, { `{ "foo": [] }`, `[ { "op": "test", "path": "/foo", "value": null } ]`, false, "/foo", }, { `{ "baz/foo": "qux" }`, `[ { "op": "test", "path": "/baz~1foo", "value": "qux"} ]`, true, "", }, { `{ "foo": [] }`, `[ { "op": "test", "path": "/foo"} ]`, false, "/foo", }, } func TestAllTest(t *testing.T) { for _, c := range TestCases { _, err := applyPatch(c.doc, c.patch) if c.result && err != nil { t.Errorf("Testing failed when it should have passed: %s", err) } else if !c.result && err == nil { t.Errorf("Testing passed when it should have faild: %s", err) } else if !c.result { expected := fmt.Sprintf("testing value %s failed: test failed", c.failedPath) if err.Error() != expected { t.Errorf("Testing failed as expected but invalid message: expected [%s], got [%s]", expected, err) } } } } func TestAdd(t *testing.T) { testCases := []struct { name string key string val lazyNode arr partialArray rejectNegativeIndicies bool err string }{ { name: "should work", key: "0", val: lazyNode{}, arr: partialArray{}, }, { name: "index too large", key: "1", val: lazyNode{}, arr: partialArray{}, err: "Unable to access invalid index: 1: invalid index referenced", }, { name: "negative should work", key: "-1", val: lazyNode{}, arr: partialArray{}, }, { name: "negative too small", key: "-2", val: lazyNode{}, arr: partialArray{}, err: "Unable to access invalid index: -2: invalid index referenced", }, { name: "negative but negative disabled", key: "-1", val: lazyNode{}, arr: partialArray{}, rejectNegativeIndicies: true, err: "Unable to access invalid index: -1: invalid index referenced", }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { SupportNegativeIndices = !tc.rejectNegativeIndicies key := tc.key arr := &tc.arr val := &tc.val err := arr.add(key, val) if err == nil && tc.err != "" { t.Errorf("Expected error but got none! %v", tc.err) } else if err != nil && tc.err == "" { t.Errorf("Did not expect error but go: %v", err) } else if err != nil && err.Error() != tc.err { t.Errorf("Expected error %v but got error %v", tc.err, err) } }) } } type EqualityCase struct { name string a, b string equal bool } var EqualityCases = []EqualityCase{ { "ExtraKeyFalse", `{"foo": "bar"}`, `{"foo": "bar", "baz": "qux"}`, false, }, { "StripWhitespaceTrue", `{ "foo": "bar", "baz": "qux" }`, `{"foo": "bar", "baz": "qux"}`, true, }, { "KeysOutOfOrderTrue", `{ "baz": "qux", "foo": "bar" }`, `{"foo": "bar", "baz": "qux"}`, true, }, { "ComparingNullFalse", `{"foo": null}`, `{"foo": "bar"}`, false, }, { "ComparingNullTrue", `{"foo": null}`, `{"foo": null}`, true, }, { "ArrayOutOfOrderFalse", `["foo", "bar", "baz"]`, `["bar", "baz", "foo"]`, false, }, { "ArrayTrue", `["foo", "bar", "baz"]`, `["foo", "bar", "baz"]`, true, }, { "NonStringTypesTrue", `{"int": 6, "bool": true, "float": 7.0, "string": "the_string", "null": null}`, `{"int": 6, "bool": true, "float": 7.0, "string": "the_string", "null": null}`, true, }, { "NestedNullFalse", `{"foo": ["an", "array"], "bar": {"an": "object"}}`, `{"foo": null, "bar": null}`, false, }, { "NullCompareStringFalse", `"foo"`, `null`, false, }, { "NullCompareIntFalse", `6`, `null`, false, }, { "NullCompareFloatFalse", `6.01`, `null`, false, }, { "NullCompareBoolFalse", `false`, `null`, false, }, } func TestEquality(t *testing.T) { for _, tc := range EqualityCases { t.Run(tc.name, func(t *testing.T) { got := Equal([]byte(tc.a), []byte(tc.b)) if got != tc.equal { t.Errorf("Expected Equal(%s, %s) to return %t, but got %t", tc.a, tc.b, tc.equal, got) } got = Equal([]byte(tc.b), []byte(tc.a)) if got != tc.equal { t.Errorf("Expected Equal(%s, %s) to return %t, but got %t", tc.b, tc.a, tc.equal, got) } }) } } json-patch-5.2.0/v5/000077500000000000000000000000001377516326600141365ustar00rootroot00000000000000json-patch-5.2.0/v5/cmd/000077500000000000000000000000001377516326600147015ustar00rootroot00000000000000json-patch-5.2.0/v5/cmd/json-patch/000077500000000000000000000000001377516326600167475ustar00rootroot00000000000000json-patch-5.2.0/v5/cmd/json-patch/file_flag.go000066400000000000000000000013761377516326600212150ustar00rootroot00000000000000package main // Borrowed from Concourse: https://github.com/concourse/atc/blob/master/atccmd/file_flag.go import ( "fmt" "os" "path/filepath" ) // FileFlag is a flag for passing a path to a file on disk. The file is // expected to be a file, not a directory, that actually exists. type FileFlag string // UnmarshalFlag implements go-flag's Unmarshaler interface func (f *FileFlag) UnmarshalFlag(value string) error { stat, err := os.Stat(value) if err != nil { return err } if stat.IsDir() { return fmt.Errorf("path '%s' is a directory, not a file", value) } abs, err := filepath.Abs(value) if err != nil { return err } *f = FileFlag(abs) return nil } // Path is the path to the file func (f FileFlag) Path() string { return string(f) } json-patch-5.2.0/v5/cmd/json-patch/main.go000066400000000000000000000021241377516326600202210ustar00rootroot00000000000000package main import ( "fmt" "io/ioutil" "log" "os" jsonpatch "github.com/evanphx/json-patch/v5" flags "github.com/jessevdk/go-flags" ) type opts struct { PatchFilePaths []FileFlag `long:"patch-file" short:"p" value-name:"PATH" description:"Path to file with one or more operations"` } func main() { var o opts _, err := flags.Parse(&o) if err != nil { log.Fatalf("error: %s\n", err) } patches := make([]jsonpatch.Patch, len(o.PatchFilePaths)) for i, patchFilePath := range o.PatchFilePaths { var bs []byte bs, err = ioutil.ReadFile(patchFilePath.Path()) if err != nil { log.Fatalf("error reading patch file: %s", err) } var patch jsonpatch.Patch patch, err = jsonpatch.DecodePatch(bs) if err != nil { log.Fatalf("error decoding patch file: %s", err) } patches[i] = patch } doc, err := ioutil.ReadAll(os.Stdin) if err != nil { log.Fatalf("error reading from stdin: %s", err) } mdoc := doc for _, patch := range patches { mdoc, err = patch.Apply(mdoc) if err != nil { log.Fatalf("error applying patch: %s", err) } } fmt.Printf("%s", mdoc) } json-patch-5.2.0/v5/errors.go000066400000000000000000000022411377516326600160000ustar00rootroot00000000000000package jsonpatch import "fmt" // AccumulatedCopySizeError is an error type returned when the accumulated size // increase caused by copy operations in a patch operation has exceeded the // limit. type AccumulatedCopySizeError struct { limit int64 accumulated int64 } // NewAccumulatedCopySizeError returns an AccumulatedCopySizeError. func NewAccumulatedCopySizeError(l, a int64) *AccumulatedCopySizeError { return &AccumulatedCopySizeError{limit: l, accumulated: a} } // Error implements the error interface. func (a *AccumulatedCopySizeError) Error() string { return fmt.Sprintf("Unable to complete the copy, the accumulated size increase of copy is %d, exceeding the limit %d", a.accumulated, a.limit) } // ArraySizeError is an error type returned when the array size has exceeded // the limit. type ArraySizeError struct { limit int size int } // NewArraySizeError returns an ArraySizeError. func NewArraySizeError(l, s int) *ArraySizeError { return &ArraySizeError{limit: l, size: s} } // Error implements the error interface. func (a *ArraySizeError) Error() string { return fmt.Sprintf("Unable to create array of size %d, limit is %d", a.size, a.limit) } json-patch-5.2.0/v5/go.mod000066400000000000000000000002011377516326600152350ustar00rootroot00000000000000module github.com/evanphx/json-patch/v5 go 1.12 require ( github.com/jessevdk/go-flags v1.4.0 github.com/pkg/errors v0.8.1 ) json-patch-5.2.0/v5/go.sum000066400000000000000000000005201377516326600152660ustar00rootroot00000000000000github.com/jessevdk/go-flags v1.4.0 h1:4IU2WS7AumrZ/40jfhf4QVDMsQwqA7VEHozFRrGARJA= github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= json-patch-5.2.0/v5/merge.go000066400000000000000000000217651377516326600155770ustar00rootroot00000000000000package jsonpatch import ( "bytes" "encoding/json" "fmt" "reflect" ) func merge(cur, patch *lazyNode, mergeMerge bool) *lazyNode { curDoc, err := cur.intoDoc() if err != nil { pruneNulls(patch) return patch } patchDoc, err := patch.intoDoc() if err != nil { return patch } mergeDocs(curDoc, patchDoc, mergeMerge) return cur } func mergeDocs(doc, patch *partialDoc, mergeMerge bool) { for k, v := range patch.obj { if v == nil { if mergeMerge { idx := -1 for i, key := range doc.keys { if key == k { idx = i break } } if idx == -1 { doc.keys = append(doc.keys, k) } doc.obj[k] = nil } else { _ = doc.remove(k, &ApplyOptions{}) } } else { cur, ok := doc.obj[k] if !ok || cur == nil { pruneNulls(v) _ = doc.set(k, v, &ApplyOptions{}) } else { _ = doc.set(k, merge(cur, v, mergeMerge), &ApplyOptions{}) } } } } func pruneNulls(n *lazyNode) { sub, err := n.intoDoc() if err == nil { pruneDocNulls(sub) } else { ary, err := n.intoAry() if err == nil { pruneAryNulls(ary) } } } func pruneDocNulls(doc *partialDoc) *partialDoc { for k, v := range doc.obj { if v == nil { _ = doc.remove(k, &ApplyOptions{}) } else { pruneNulls(v) } } return doc } func pruneAryNulls(ary *partialArray) *partialArray { newAry := []*lazyNode{} for _, v := range *ary { if v != nil { pruneNulls(v) newAry = append(newAry, v) } } *ary = newAry return ary } var errBadJSONDoc = fmt.Errorf("Invalid JSON Document") var errBadJSONPatch = fmt.Errorf("Invalid JSON Patch") var errBadMergeTypes = fmt.Errorf("Mismatched JSON Documents") // MergeMergePatches merges two merge patches together, such that // applying this resulting merged merge patch to a document yields the same // as merging each merge patch to the document in succession. func MergeMergePatches(patch1Data, patch2Data []byte) ([]byte, error) { return doMergePatch(patch1Data, patch2Data, true) } // MergePatch merges the patchData into the docData. func MergePatch(docData, patchData []byte) ([]byte, error) { return doMergePatch(docData, patchData, false) } func doMergePatch(docData, patchData []byte, mergeMerge bool) ([]byte, error) { doc := &partialDoc{} docErr := json.Unmarshal(docData, doc) patch := &partialDoc{} patchErr := json.Unmarshal(patchData, patch) if isSyntaxError(docErr) { return nil, errBadJSONDoc } if isSyntaxError(patchErr) { return nil, errBadJSONPatch } if docErr == nil && doc.obj == nil { return nil, errBadJSONDoc } if patchErr == nil && patch.obj == nil { return nil, errBadJSONPatch } if docErr != nil || patchErr != nil { // Not an error, just not a doc, so we turn straight into the patch if patchErr == nil { if mergeMerge { doc = patch } else { doc = pruneDocNulls(patch) } } else { patchAry := &partialArray{} patchErr = json.Unmarshal(patchData, patchAry) if patchErr != nil { return nil, errBadJSONPatch } pruneAryNulls(patchAry) out, patchErr := json.Marshal(patchAry) if patchErr != nil { return nil, errBadJSONPatch } return out, nil } } else { mergeDocs(doc, patch, mergeMerge) } return json.Marshal(doc) } func isSyntaxError(err error) bool { if _, ok := err.(*json.SyntaxError); ok { return true } if _, ok := err.(*syntaxError); ok { return true } return false } // resemblesJSONArray indicates whether the byte-slice "appears" to be // a JSON array or not. // False-positives are possible, as this function does not check the internal // structure of the array. It only checks that the outer syntax is present and // correct. func resemblesJSONArray(input []byte) bool { input = bytes.TrimSpace(input) hasPrefix := bytes.HasPrefix(input, []byte("[")) hasSuffix := bytes.HasSuffix(input, []byte("]")) return hasPrefix && hasSuffix } // CreateMergePatch will return a merge patch document capable of converting // the original document(s) to the modified document(s). // The parameters can be bytes of either two JSON Documents, or two arrays of // JSON documents. // The merge patch returned follows the specification defined at http://tools.ietf.org/html/draft-ietf-appsawg-json-merge-patch-07 func CreateMergePatch(originalJSON, modifiedJSON []byte) ([]byte, error) { originalResemblesArray := resemblesJSONArray(originalJSON) modifiedResemblesArray := resemblesJSONArray(modifiedJSON) // Do both byte-slices seem like JSON arrays? if originalResemblesArray && modifiedResemblesArray { return createArrayMergePatch(originalJSON, modifiedJSON) } // Are both byte-slices are not arrays? Then they are likely JSON objects... if !originalResemblesArray && !modifiedResemblesArray { return createObjectMergePatch(originalJSON, modifiedJSON) } // None of the above? Then return an error because of mismatched types. return nil, errBadMergeTypes } // createObjectMergePatch will return a merge-patch document capable of // converting the original document to the modified document. func createObjectMergePatch(originalJSON, modifiedJSON []byte) ([]byte, error) { originalDoc := map[string]interface{}{} modifiedDoc := map[string]interface{}{} err := json.Unmarshal(originalJSON, &originalDoc) if err != nil { return nil, errBadJSONDoc } err = json.Unmarshal(modifiedJSON, &modifiedDoc) if err != nil { return nil, errBadJSONDoc } dest, err := getDiff(originalDoc, modifiedDoc) if err != nil { return nil, err } return json.Marshal(dest) } // createArrayMergePatch will return an array of merge-patch documents capable // of converting the original document to the modified document for each // pair of JSON documents provided in the arrays. // Arrays of mismatched sizes will result in an error. func createArrayMergePatch(originalJSON, modifiedJSON []byte) ([]byte, error) { originalDocs := []json.RawMessage{} modifiedDocs := []json.RawMessage{} err := json.Unmarshal(originalJSON, &originalDocs) if err != nil { return nil, errBadJSONDoc } err = json.Unmarshal(modifiedJSON, &modifiedDocs) if err != nil { return nil, errBadJSONDoc } total := len(originalDocs) if len(modifiedDocs) != total { return nil, errBadJSONDoc } result := []json.RawMessage{} for i := 0; i < len(originalDocs); i++ { original := originalDocs[i] modified := modifiedDocs[i] patch, err := createObjectMergePatch(original, modified) if err != nil { return nil, err } result = append(result, json.RawMessage(patch)) } return json.Marshal(result) } // Returns true if the array matches (must be json types). // As is idiomatic for go, an empty array is not the same as a nil array. func matchesArray(a, b []interface{}) bool { if len(a) != len(b) { return false } if (a == nil && b != nil) || (a != nil && b == nil) { return false } for i := range a { if !matchesValue(a[i], b[i]) { return false } } return true } // Returns true if the values matches (must be json types) // The types of the values must match, otherwise it will always return false // If two map[string]interface{} are given, all elements must match. func matchesValue(av, bv interface{}) bool { if reflect.TypeOf(av) != reflect.TypeOf(bv) { return false } switch at := av.(type) { case string: bt := bv.(string) if bt == at { return true } case float64: bt := bv.(float64) if bt == at { return true } case bool: bt := bv.(bool) if bt == at { return true } case nil: // Both nil, fine. return true case map[string]interface{}: bt := bv.(map[string]interface{}) if len(bt) != len(at) { return false } for key := range bt { av, aOK := at[key] bv, bOK := bt[key] if aOK != bOK { return false } if !matchesValue(av, bv) { return false } } return true case []interface{}: bt := bv.([]interface{}) return matchesArray(at, bt) } return false } // getDiff returns the (recursive) difference between a and b as a map[string]interface{}. func getDiff(a, b map[string]interface{}) (map[string]interface{}, error) { into := map[string]interface{}{} for key, bv := range b { av, ok := a[key] // value was added if !ok { into[key] = bv continue } // If types have changed, replace completely if reflect.TypeOf(av) != reflect.TypeOf(bv) { into[key] = bv continue } // Types are the same, compare values switch at := av.(type) { case map[string]interface{}: bt := bv.(map[string]interface{}) dst := make(map[string]interface{}, len(bt)) dst, err := getDiff(at, bt) if err != nil { return nil, err } if len(dst) > 0 { into[key] = dst } case string, float64, bool: if !matchesValue(av, bv) { into[key] = bv } case []interface{}: bt := bv.([]interface{}) if !matchesArray(at, bt) { into[key] = bv } case nil: switch bv.(type) { case nil: // Both nil, fine. default: into[key] = bv } default: panic(fmt.Sprintf("Unknown type:%T in key %s", av, key)) } } // Now add all deleted values as nil for key := range a { _, found := b[key] if !found { into[key] = nil } } return into, nil } json-patch-5.2.0/v5/merge_test.go000066400000000000000000000445731377516326600166400ustar00rootroot00000000000000package jsonpatch import ( "fmt" "strings" "testing" ) func mergePatch(doc, patch string) string { out, err := MergePatch([]byte(doc), []byte(patch)) if err != nil { panic(err) } return string(out) } func TestMergePatchReplaceKey(t *testing.T) { doc := `{ "title": "hello" }` pat := `{ "title": "goodbye" }` res := mergePatch(doc, pat) if !compareJSON(pat, res) { t.Fatalf("Key was not replaced") } } func TestMergePatchIgnoresOtherValues(t *testing.T) { doc := `{ "title": "hello", "age": 18 }` pat := `{ "title": "goodbye" }` res := mergePatch(doc, pat) exp := `{ "title": "goodbye", "age": 18 }` if !compareJSON(exp, res) { t.Fatalf("Key was not replaced") } } func TestMergePatchNilDoc(t *testing.T) { doc := `{ "title": null }` pat := `{ "title": {"foo": "bar"} }` res := mergePatch(doc, pat) exp := `{ "title": {"foo": "bar"} }` if !compareJSON(exp, res) { t.Fatalf("Key was not replaced") } } func TestMergePatchRecursesIntoObjects(t *testing.T) { doc := `{ "person": { "title": "hello", "age": 18 } }` pat := `{ "person": { "title": "goodbye" } }` res := mergePatch(doc, pat) exp := `{ "person": { "title": "goodbye", "age": 18 } }` if !compareJSON(exp, res) { t.Fatalf("Key was not replaced: %s", res) } } type nonObjectCases struct { doc, pat, res string } func TestMergePatchReplacesNonObjectsWholesale(t *testing.T) { a1 := `[1]` a2 := `[2]` o1 := `{ "a": 1 }` o2 := `{ "a": 2 }` o3 := `{ "a": 1, "b": 1 }` o4 := `{ "a": 2, "b": 1 }` cases := []nonObjectCases{ {a1, a2, a2}, {o1, a2, a2}, {a1, o1, o1}, {o3, o2, o4}, } for _, c := range cases { act := mergePatch(c.doc, c.pat) if !compareJSON(c.res, act) { t.Errorf("whole object replacement failed") } } } func TestMergePatchReturnsErrorOnBadJSON(t *testing.T) { _, err := MergePatch([]byte(`[[[[`), []byte(`1`)) if err == nil { t.Errorf("Did not return an error for bad json: %s", err) } _, err = MergePatch([]byte(`1`), []byte(`[[[[`)) if err == nil { t.Errorf("Did not return an error for bad json: %s", err) } } func TestMergePatchReturnsEmptyArrayOnEmptyArray(t *testing.T) { doc := `{ "array": ["one", "two"] }` pat := `{ "array": [] }` exp := `{ "array": [] }` res, err := MergePatch([]byte(doc), []byte(pat)) if err != nil { t.Errorf("Unexpected error: %s, %s", err, string(res)) } if !compareJSON(exp, string(res)) { t.Fatalf("Emtpy array did not return not return as empty array") } } var rfcTests = []struct { target string patch string expected string }{ // test cases from https://tools.ietf.org/html/rfc7386#appendix-A {target: `{"a":"b"}`, patch: `{"a":"c"}`, expected: `{"a":"c"}`}, {target: `{"a":"b"}`, patch: `{"b":"c"}`, expected: `{"a":"b","b":"c"}`}, {target: `{"a":"b"}`, patch: `{"a":null}`, expected: `{}`}, {target: `{"a":"b","b":"c"}`, patch: `{"a":null}`, expected: `{"b":"c"}`}, {target: `{"a":["b"]}`, patch: `{"a":"c"}`, expected: `{"a":"c"}`}, {target: `{"a":"c"}`, patch: `{"a":["b"]}`, expected: `{"a":["b"]}`}, {target: `{"a":{"b": "c"}}`, patch: `{"a": {"b": "d","c": null}}`, expected: `{"a":{"b":"d"}}`}, {target: `{"a":[{"b":"c"}]}`, patch: `{"a":[1]}`, expected: `{"a":[1]}`}, {target: `["a","b"]`, patch: `["c","d"]`, expected: `["c","d"]`}, {target: `{"a":"b"}`, patch: `["c"]`, expected: `["c"]`}, // {target: `{"a":"foo"}`, patch: `null`, expected: `null`}, // {target: `{"a":"foo"}`, patch: `"bar"`, expected: `"bar"`}, {target: `{"e":null}`, patch: `{"a":1}`, expected: `{"a":1,"e":null}`}, {target: `[1,2]`, patch: `{"a":"b","c":null}`, expected: `{"a":"b"}`}, {target: `{}`, patch: `{"a":{"bb":{"ccc":null}}}`, expected: `{"a":{"bb":{}}}`}, } func TestMergePatchRFCCases(t *testing.T) { for i, c := range rfcTests { out := mergePatch(c.target, c.patch) if !compareJSON(out, c.expected) { t.Errorf("case[%d], patch '%s' did not apply properly to '%s'. expected:\n'%s'\ngot:\n'%s'", i, c.patch, c.target, c.expected, out) } } } var rfcFailTests = ` {"a":"foo"} | null {"a":"foo"} | "bar" ` func TestMergePatchFailRFCCases(t *testing.T) { tests := strings.Split(rfcFailTests, "\n") for _, c := range tests { if strings.TrimSpace(c) == "" { continue } parts := strings.SplitN(c, "|", 2) doc := strings.TrimSpace(parts[0]) pat := strings.TrimSpace(parts[1]) out, err := MergePatch([]byte(doc), []byte(pat)) if err != errBadJSONPatch { t.Errorf("error not returned properly: %s, %s", err, string(out)) } } } func TestResembleJSONArray(t *testing.T) { testCases := []struct { input []byte expected bool }{ // Failure cases {input: []byte(``), expected: false}, {input: []byte(`not an array`), expected: false}, {input: []byte(`{"foo": "bar"}`), expected: false}, {input: []byte(`{"fizz": ["buzz"]}`), expected: false}, {input: []byte(`[bad suffix`), expected: false}, {input: []byte(`bad prefix]`), expected: false}, {input: []byte(`][`), expected: false}, // Valid cases {input: []byte(`[]`), expected: true}, {input: []byte(`["foo", "bar"]`), expected: true}, {input: []byte(`[["foo", "bar"]]`), expected: true}, {input: []byte(`[not valid syntax]`), expected: true}, // Valid cases with whitespace {input: []byte(` []`), expected: true}, {input: []byte(`[] `), expected: true}, {input: []byte(` [] `), expected: true}, {input: []byte(` [ ] `), expected: true}, {input: []byte("\t[]"), expected: true}, {input: []byte("[]\n"), expected: true}, {input: []byte("\n\t\r[]"), expected: true}, } for _, test := range testCases { result := resemblesJSONArray(test.input) if result != test.expected { t.Errorf( `expected "%t" but received "%t" for case: "%s"`, test.expected, result, string(test.input), ) } } } func TestCreateMergePatchReplaceKey(t *testing.T) { doc := `{ "title": "hello", "nested": {"one": 1, "two": 2} }` pat := `{ "title": "goodbye", "nested": {"one": 2, "two": 2} }` exp := `{ "title": "goodbye", "nested": {"one": 2} }` res, err := CreateMergePatch([]byte(doc), []byte(pat)) if err != nil { t.Errorf("Unexpected error: %s, %s", err, string(res)) } if !compareJSON(exp, string(res)) { t.Fatalf("Key was not replaced") } } func TestCreateMergePatchGetArray(t *testing.T) { doc := `{ "title": "hello", "array": ["one", "two"], "notmatch": [1, 2, 3] }` pat := `{ "title": "hello", "array": ["one", "two", "three"], "notmatch": [1, 2, 3] }` exp := `{ "array": ["one", "two", "three"] }` res, err := CreateMergePatch([]byte(doc), []byte(pat)) if err != nil { t.Errorf("Unexpected error: %s, %s", err, string(res)) } if !compareJSON(exp, string(res)) { t.Fatalf("Array was not added") } } func TestCreateMergePatchGetObjArray(t *testing.T) { doc := `{ "title": "hello", "array": [{"banana": true}, {"evil": false}], "notmatch": [{"one":1}, {"two":2}, {"three":3}] }` pat := `{ "title": "hello", "array": [{"banana": false}, {"evil": true}], "notmatch": [{"one":1}, {"two":2}, {"three":3}] }` exp := `{ "array": [{"banana": false}, {"evil": true}] }` res, err := CreateMergePatch([]byte(doc), []byte(pat)) if err != nil { t.Errorf("Unexpected error: %s, %s", err, string(res)) } if !compareJSON(exp, string(res)) { t.Fatalf("Object array was not added") } } func TestCreateMergePatchDeleteKey(t *testing.T) { doc := `{ "title": "hello", "nested": {"one": 1, "two": 2} }` pat := `{ "title": "hello", "nested": {"one": 1} }` exp := `{"nested":{"two":null}}` res, err := CreateMergePatch([]byte(doc), []byte(pat)) if err != nil { t.Errorf("Unexpected error: %s, %s", err, string(res)) } // We cannot use "compareJSON", since Equals does not report a difference if the value is null if exp != string(res) { t.Fatalf("Key was not removed") } } func TestCreateMergePatchEmptyArray(t *testing.T) { doc := `{ "array": null }` pat := `{ "array": [] }` exp := `{"array":[]}` res, err := CreateMergePatch([]byte(doc), []byte(pat)) if err != nil { t.Errorf("Unexpected error: %s, %s", err, string(res)) } // We cannot use "compareJSON", since Equals does not report a difference if the value is null if exp != string(res) { t.Fatalf("Key was not removed") } } func TestCreateMergePatchNil(t *testing.T) { doc := `{ "title": "hello", "nested": {"one": 1, "two": [{"one":null}, {"two":null}, {"three":null}]} }` pat := doc exp := `{}` res, err := CreateMergePatch([]byte(doc), []byte(pat)) if err != nil { t.Errorf("Unexpected error: %s, %s", err, string(res)) } if !compareJSON(exp, string(res)) { t.Fatalf("Object array was not added") } } func TestCreateMergePatchObjArray(t *testing.T) { doc := `{ "array": [ {"a": {"b": 2}}, {"a": {"b": 3}} ]}` exp := `{}` res, err := CreateMergePatch([]byte(doc), []byte(doc)) if err != nil { t.Errorf("Unexpected error: %s, %s", err, string(res)) } // We cannot use "compareJSON", since Equals does not report a difference if the value is null if exp != string(res) { t.Fatalf("Array was not empty, was " + string(res)) } } func TestCreateMergePatchSameOuterArray(t *testing.T) { doc := `[{"foo": "bar"}]` pat := doc exp := `[{}]` res, err := CreateMergePatch([]byte(doc), []byte(pat)) if err != nil { t.Errorf("Unexpected error: %s, %s", err, string(res)) } if !compareJSON(exp, string(res)) { t.Fatalf("Outer array was not unmodified") } } func TestCreateMergePatchModifiedOuterArray(t *testing.T) { doc := `[{"name": "John"}, {"name": "Will"}]` pat := `[{"name": "Jane"}, {"name": "Will"}]` exp := `[{"name": "Jane"}, {}]` res, err := CreateMergePatch([]byte(doc), []byte(pat)) if err != nil { t.Errorf("Unexpected error: %s, %s", err, string(res)) } if !compareJSON(exp, string(res)) { t.Fatalf("Expected %s but received %s", exp, res) } } func TestCreateMergePatchMismatchedOuterArray(t *testing.T) { doc := `[{"name": "John"}, {"name": "Will"}]` pat := `[{"name": "Jane"}]` _, err := CreateMergePatch([]byte(doc), []byte(pat)) if err == nil { t.Errorf("Expected error due to array length differences but received none") } } func TestCreateMergePatchMismatchedOuterTypes(t *testing.T) { doc := `[{"name": "John"}]` pat := `{"name": "Jane"}` _, err := CreateMergePatch([]byte(doc), []byte(pat)) if err == nil { t.Errorf("Expected error due to mismatched types but received none") } } func TestCreateMergePatchNoDifferences(t *testing.T) { doc := `{ "title": "hello", "nested": {"one": 1, "two": 2} }` pat := doc exp := `{}` res, err := CreateMergePatch([]byte(doc), []byte(pat)) if err != nil { t.Errorf("Unexpected error: %s, %s", err, string(res)) } if !compareJSON(exp, string(res)) { t.Fatalf("Key was not replaced") } } func TestCreateMergePatchComplexMatch(t *testing.T) { doc := `{"hello": "world","t": true ,"f": false, "n": null,"i": 123,"pi": 3.1416,"a": [1, 2, 3, 4], "nested": {"hello": "world","t": true ,"f": false, "n": null,"i": 123,"pi": 3.1416,"a": [1, 2, 3, 4]} }` empty := `{}` res, err := CreateMergePatch([]byte(doc), []byte(doc)) if err != nil { t.Errorf("Unexpected error: %s, %s", err, string(res)) } // We cannot use "compareJSON", since Equals does not report a difference if the value is null if empty != string(res) { t.Fatalf("Did not get empty result, was:%s", string(res)) } } func TestCreateMergePatchComplexAddAll(t *testing.T) { doc := `{"hello": "world","t": true ,"f": false, "n": null,"i": 123,"pi": 3.1416,"a": [1, 2, 3, 4], "nested": {"hello": "world","t": true ,"f": false, "n": null,"i": 123,"pi": 3.1416,"a": [1, 2, 3, 4]} }` empty := `{}` res, err := CreateMergePatch([]byte(empty), []byte(doc)) if err != nil { t.Errorf("Unexpected error: %s, %s", err, string(res)) } if !compareJSON(doc, string(res)) { t.Fatalf("Did not get everything as, it was:\n%s", string(res)) } } // createNestedMap created a series of nested map objects such that the number of // objects is roughly 2^n (precisely, 2^(n+1)-1). func createNestedMap(m map[string]interface{}, depth int, objectCount *int) { if depth == 0 { return } for i := 0; i< 2;i++ { nested := map[string]interface{}{} *objectCount += 1 createNestedMap(nested, depth-1, objectCount) m[fmt.Sprintf("key-%v", i)] = nested } } func TestMatchesValue(t *testing.T) { testcases := []struct { name string a interface{} b interface{} want bool }{ { name: "map empty", a: map[string]interface{}{}, b: map[string]interface{}{}, want: true, }, { name: "map equal keys, equal non-nil value", a: map[string]interface{}{"1": true}, b: map[string]interface{}{"1": true}, want: true, }, { name: "map equal keys, equal nil value", a: map[string]interface{}{"1": nil}, b: map[string]interface{}{"1": nil}, want: true, }, { name: "map different value", a: map[string]interface{}{"1": true}, b: map[string]interface{}{"1": false}, want: false, }, { name: "map different key, matching non-nil value", a: map[string]interface{}{"1": true}, b: map[string]interface{}{"2": true}, want: false, }, { name: "map different key, matching nil value", a: map[string]interface{}{"1": nil}, b: map[string]interface{}{"2": nil}, want: false, }, { name: "map different key, first nil value", a: map[string]interface{}{"1": true}, b: map[string]interface{}{"2": nil}, want: false, }, { name: "map different key, second nil value", a: map[string]interface{}{"1": nil}, b: map[string]interface{}{"2": true}, want: false, }, } for _, tc := range testcases { t.Run(tc.name, func(t *testing.T) { got := matchesValue(tc.a, tc.b) if got != tc.want { t.Fatalf("want %v, got %v", tc.want, got) } }) } } func benchmarkMatchesValueWithDeeplyNestedFields(depth int, b *testing.B) { a := map[string]interface{}{} objCount := 1 createNestedMap(a, depth, &objCount) b.ResetTimer() b.Run(fmt.Sprintf("objectCount=%v", objCount), func(b *testing.B) { for i := 0; i < b.N; i++ { if !matchesValue(a, a) { b.Errorf("Should be equal") } } }) } func BenchmarkMatchesValue1(b *testing.B) { benchmarkMatchesValueWithDeeplyNestedFields(1, b) } func BenchmarkMatchesValue2(b *testing.B) { benchmarkMatchesValueWithDeeplyNestedFields(2, b) } func BenchmarkMatchesValue3(b *testing.B) { benchmarkMatchesValueWithDeeplyNestedFields(3, b) } func BenchmarkMatchesValue4(b *testing.B) { benchmarkMatchesValueWithDeeplyNestedFields(4, b) } func BenchmarkMatchesValue5(b *testing.B) { benchmarkMatchesValueWithDeeplyNestedFields(5, b) } func BenchmarkMatchesValue6(b *testing.B) { benchmarkMatchesValueWithDeeplyNestedFields(6, b) } func BenchmarkMatchesValue7(b *testing.B) { benchmarkMatchesValueWithDeeplyNestedFields(7, b) } func BenchmarkMatchesValue8(b *testing.B) { benchmarkMatchesValueWithDeeplyNestedFields(8, b) } func BenchmarkMatchesValue9(b *testing.B) { benchmarkMatchesValueWithDeeplyNestedFields(9, b) } func BenchmarkMatchesValue10(b *testing.B) { benchmarkMatchesValueWithDeeplyNestedFields(10, b) } func TestCreateMergePatchComplexRemoveAll(t *testing.T) { doc := `{"hello": "world","t": true ,"f": false, "n": null,"i": 123,"pi": 3.1416,"a": [1, 2, 3, 4], "nested": {"hello": "world","t": true ,"f": false, "n": null,"i": 123,"pi": 3.1416,"a": [1, 2, 3, 4]} }` exp := `{"a":null,"f":null,"hello":null,"i":null,"n":null,"nested":null,"pi":null,"t":null}` empty := `{}` res, err := CreateMergePatch([]byte(doc), []byte(empty)) if err != nil { t.Errorf("Unexpected error: %s, %s", err, string(res)) } if exp != string(res) { t.Fatalf("Did not get result, was:%s", string(res)) } // FIXME: Crashes if using compareJSON like this: /* if !compareJSON(doc, string(res)) { t.Fatalf("Did not get everything as, it was:\n%s", string(res)) } */ } func TestCreateMergePatchObjectWithInnerArray(t *testing.T) { stateString := `{ "OuterArray": [ { "InnerArray": [ { "StringAttr": "abc123" } ], "StringAttr": "def456" } ] }` patch, err := CreateMergePatch([]byte(stateString), []byte(stateString)) if err != nil { t.Fatal(err) } if string(patch) != "{}" { t.Fatalf("Patch should have been {} but was: %v", string(patch)) } } func TestCreateMergePatchReplaceKeyNotEscape(t *testing.T) { doc := `{ "title": "hello", "nested": {"title/escaped": 1, "two": 2} }` pat := `{ "title": "goodbye", "nested": {"title/escaped": 2, "two": 2} }` exp := `{ "title": "goodbye", "nested": {"title/escaped": 2} }` res, err := CreateMergePatch([]byte(doc), []byte(pat)) if err != nil { t.Errorf("Unexpected error: %s, %s", err, string(res)) } if !compareJSON(exp, string(res)) { t.Log(string(res)) t.Fatalf("Key was not replaced") } } func TestMergePatchReplaceKeyNotEscaping(t *testing.T) { doc := `{ "obj": { "title/escaped": "hello" } }` pat := `{ "obj": { "title/escaped": "goodbye" } }` exp := `{ "obj": { "title/escaped": "goodbye" } }` res := mergePatch(doc, pat) if !compareJSON(exp, res) { t.Fatalf("Key was not replaced") } } func TestMergeMergePatches(t *testing.T) { cases := []struct { demonstrates string p1 string p2 string exp string }{ { demonstrates: "simple patches are merged normally", p1: `{"add1": 1}`, p2: `{"add2": 2}`, exp: `{"add1": 1, "add2": 2}`, }, { demonstrates: "nulls are kept", p1: `{"del1": null}`, p2: `{"del2": null}`, exp: `{"del1": null, "del2": null}`, }, { demonstrates: "a key added then deleted is kept deleted", p1: `{"add_then_delete": "atd"}`, p2: `{"add_then_delete": null}`, exp: `{"add_then_delete": null}`, }, { demonstrates: "a key deleted then added is kept added", p1: `{"delete_then_add": null}`, p2: `{"delete_then_add": "dta"}`, exp: `{"delete_then_add": "dta"}`, }, { demonstrates: "object overrides array", p1: `[]`, p2: `{"del": null, "add": "a"}`, exp: `{"del": null, "add": "a"}`, }, { demonstrates: "array overrides object", p1: `{"del": null, "add": "a"}`, p2: `[]`, exp: `[]`, }, } for _, c := range cases { out, err := MergeMergePatches([]byte(c.p1), []byte(c.p2)) if err != nil { panic(err) } if !compareJSON(c.exp, string(out)) { t.Logf("Error while trying to demonstrate: %v", c.demonstrates) t.Logf("Got %v", string(out)) t.Logf("Expected %v", c.exp) t.Fatalf("Merged merge patch is incorrect") } } } json-patch-5.2.0/v5/patch.go000066400000000000000000000563141377516326600155750ustar00rootroot00000000000000package jsonpatch import ( "bytes" "encoding/json" "fmt" "strconv" "strings" "github.com/pkg/errors" ) const ( eRaw = iota eDoc eAry ) var ( // SupportNegativeIndices decides whether to support non-standard practice of // allowing negative indices to mean indices starting at the end of an array. // Default to true. SupportNegativeIndices bool = true // AccumulatedCopySizeLimit limits the total size increase in bytes caused by // "copy" operations in a patch. AccumulatedCopySizeLimit int64 = 0 startObject = json.Delim('{') endObject = json.Delim('}') startArray = json.Delim('[') endArray = json.Delim(']') ) var ( ErrTestFailed = errors.New("test failed") ErrMissing = errors.New("missing value") ErrUnknownType = errors.New("unknown object type") ErrInvalid = errors.New("invalid state detected") ErrInvalidIndex = errors.New("invalid index referenced") rawJSONArray = []byte("[]") rawJSONObject = []byte("{}") rawJSONNull = []byte("null") ) type lazyNode struct { raw *json.RawMessage doc *partialDoc ary partialArray which int } // Operation is a single JSON-Patch step, such as a single 'add' operation. type Operation map[string]*json.RawMessage // Patch is an ordered collection of Operations. type Patch []Operation type partialDoc struct { keys []string obj map[string]*lazyNode } type partialArray []*lazyNode type container interface { get(key string, options *ApplyOptions) (*lazyNode, error) set(key string, val *lazyNode, options *ApplyOptions) error add(key string, val *lazyNode, options *ApplyOptions) error remove(key string, options *ApplyOptions) error } // ApplyOptions specifies options for calls to ApplyWithOptions. // Use NewApplyOptions to obtain default values for ApplyOptions. type ApplyOptions struct { // SupportNegativeIndices decides whether to support non-standard practice of // allowing negative indices to mean indices starting at the end of an array. // Default to true. SupportNegativeIndices bool // AccumulatedCopySizeLimit limits the total size increase in bytes caused by // "copy" operations in a patch. AccumulatedCopySizeLimit int64 // AllowMissingPathOnRemove indicates whether to fail "remove" operations when the target path is missing. // Default to false. AllowMissingPathOnRemove bool // EnsurePathExistsOnAdd instructs json-patch to recursively create the missing parts of path on "add" operation. // Default to false. EnsurePathExistsOnAdd bool } // NewApplyOptions creates a default set of options for calls to ApplyWithOptions. func NewApplyOptions() *ApplyOptions { return &ApplyOptions{ SupportNegativeIndices: SupportNegativeIndices, AccumulatedCopySizeLimit: AccumulatedCopySizeLimit, AllowMissingPathOnRemove: false, EnsurePathExistsOnAdd: false, } } func newLazyNode(raw *json.RawMessage) *lazyNode { return &lazyNode{raw: raw, doc: nil, ary: nil, which: eRaw} } func newRawMessage(buf []byte) *json.RawMessage { ra := make(json.RawMessage, len(buf)) copy(ra, buf) return &ra } func (n *lazyNode) MarshalJSON() ([]byte, error) { switch n.which { case eRaw: return json.Marshal(n.raw) case eDoc: return json.Marshal(n.doc) case eAry: return json.Marshal(n.ary) default: return nil, ErrUnknownType } } func (n *lazyNode) UnmarshalJSON(data []byte) error { dest := make(json.RawMessage, len(data)) copy(dest, data) n.raw = &dest n.which = eRaw return nil } func (n *partialDoc) MarshalJSON() ([]byte, error) { var buf bytes.Buffer if _, err := buf.WriteString("{"); err != nil { return nil, err } for i, k := range n.keys { if i > 0 { if _, err := buf.WriteString(", "); err != nil { return nil, err } } key, err := json.Marshal(k) if err != nil { return nil, err } if _, err := buf.Write(key); err != nil { return nil, err } if _, err := buf.WriteString(": "); err != nil { return nil, err } value, err := json.Marshal(n.obj[k]) if err != nil { return nil, err } if _, err := buf.Write(value); err != nil { return nil, err } } if _, err := buf.WriteString("}"); err != nil { return nil, err } return buf.Bytes(), nil } type syntaxError struct { msg string } func (err *syntaxError) Error() string { return err.msg } func (n *partialDoc) UnmarshalJSON(data []byte) error { if err := json.Unmarshal(data, &n.obj); err != nil { return err } buffer := bytes.NewBuffer(data) d := json.NewDecoder(buffer) if t, err := d.Token(); err != nil { return err } else if t != startObject { return &syntaxError{fmt.Sprintf("unexpected JSON token in document node: %s", t)} } for d.More() { k, err := d.Token() if err != nil { return err } key, ok := k.(string) if !ok { return &syntaxError{fmt.Sprintf("unexpected JSON token as document node key: %s", k)} } if err := skipValue(d); err != nil { return err } n.keys = append(n.keys, key) } return nil } func skipValue(d *json.Decoder) error { t, err := d.Token() if err != nil { return err } if t != startObject && t != startArray { return nil } for d.More() { if t == startObject { // consume key token if _, err := d.Token(); err != nil { return err } } if err := skipValue(d); err != nil { return err } } end, err := d.Token() if err != nil { return err } if t == startObject && end != endObject { return &syntaxError{msg: "expected close object token"} } if t == startArray && end != endArray { return &syntaxError{msg: "expected close object token"} } return nil } func deepCopy(src *lazyNode) (*lazyNode, int, error) { if src == nil { return nil, 0, nil } a, err := src.MarshalJSON() if err != nil { return nil, 0, err } sz := len(a) return newLazyNode(newRawMessage(a)), sz, nil } func (n *lazyNode) intoDoc() (*partialDoc, error) { if n.which == eDoc { return n.doc, nil } if n.raw == nil { return nil, ErrInvalid } err := json.Unmarshal(*n.raw, &n.doc) if err != nil { return nil, err } n.which = eDoc return n.doc, nil } func (n *lazyNode) intoAry() (*partialArray, error) { if n.which == eAry { return &n.ary, nil } if n.raw == nil { return nil, ErrInvalid } err := json.Unmarshal(*n.raw, &n.ary) if err != nil { return nil, err } n.which = eAry return &n.ary, nil } func (n *lazyNode) compact() []byte { buf := &bytes.Buffer{} if n.raw == nil { return nil } err := json.Compact(buf, *n.raw) if err != nil { return *n.raw } return buf.Bytes() } func (n *lazyNode) tryDoc() bool { if n.raw == nil { return false } err := json.Unmarshal(*n.raw, &n.doc) if err != nil { return false } n.which = eDoc return true } func (n *lazyNode) tryAry() bool { if n.raw == nil { return false } err := json.Unmarshal(*n.raw, &n.ary) if err != nil { return false } n.which = eAry return true } func (n *lazyNode) equal(o *lazyNode) bool { if n.which == eRaw { if !n.tryDoc() && !n.tryAry() { if o.which != eRaw { return false } return bytes.Equal(n.compact(), o.compact()) } } if n.which == eDoc { if o.which == eRaw { if !o.tryDoc() { return false } } if o.which != eDoc { return false } if len(n.doc.obj) != len(o.doc.obj) { return false } for k, v := range n.doc.obj { ov, ok := o.doc.obj[k] if !ok { return false } if (v == nil) != (ov == nil) { return false } if v == nil && ov == nil { continue } if !v.equal(ov) { return false } } return true } if o.which != eAry && !o.tryAry() { return false } if len(n.ary) != len(o.ary) { return false } for idx, val := range n.ary { if !val.equal(o.ary[idx]) { return false } } return true } // Kind reads the "op" field of the Operation. func (o Operation) Kind() string { if obj, ok := o["op"]; ok && obj != nil { var op string err := json.Unmarshal(*obj, &op) if err != nil { return "unknown" } return op } return "unknown" } // Path reads the "path" field of the Operation. func (o Operation) Path() (string, error) { if obj, ok := o["path"]; ok && obj != nil { var op string err := json.Unmarshal(*obj, &op) if err != nil { return "unknown", err } return op, nil } return "unknown", errors.Wrapf(ErrMissing, "operation missing path field") } // From reads the "from" field of the Operation. func (o Operation) From() (string, error) { if obj, ok := o["from"]; ok && obj != nil { var op string err := json.Unmarshal(*obj, &op) if err != nil { return "unknown", err } return op, nil } return "unknown", errors.Wrapf(ErrMissing, "operation, missing from field") } func (o Operation) value() *lazyNode { if obj, ok := o["value"]; ok { return newLazyNode(obj) } return nil } // ValueInterface decodes the operation value into an interface. func (o Operation) ValueInterface() (interface{}, error) { if obj, ok := o["value"]; ok && obj != nil { var v interface{} err := json.Unmarshal(*obj, &v) if err != nil { return nil, err } return v, nil } return nil, errors.Wrapf(ErrMissing, "operation, missing value field") } func isArray(buf []byte) bool { Loop: for _, c := range buf { switch c { case ' ': case '\n': case '\t': continue case '[': return true default: break Loop } } return false } func findObject(pd *container, path string, options *ApplyOptions) (container, string) { doc := *pd split := strings.Split(path, "/") if len(split) < 2 { return nil, "" } parts := split[1 : len(split)-1] key := split[len(split)-1] var err error for _, part := range parts { next, ok := doc.get(decodePatchKey(part), options) if next == nil || ok != nil { return nil, "" } if isArray(*next.raw) { doc, err = next.intoAry() if err != nil { return nil, "" } } else { doc, err = next.intoDoc() if err != nil { return nil, "" } } } return doc, decodePatchKey(key) } func (d *partialDoc) set(key string, val *lazyNode, options *ApplyOptions) error { found := false for _, k := range d.keys { if k == key { found = true break } } if !found { d.keys = append(d.keys, key) } d.obj[key] = val return nil } func (d *partialDoc) add(key string, val *lazyNode, options *ApplyOptions) error { return d.set(key, val, options) } func (d *partialDoc) get(key string, options *ApplyOptions) (*lazyNode, error) { v, ok := d.obj[key] if !ok { return v, errors.Wrapf(ErrMissing, "unable to get nonexistent key: %s", key) } return v, nil } func (d *partialDoc) remove(key string, options *ApplyOptions) error { _, ok := d.obj[key] if !ok { if options.AllowMissingPathOnRemove { return nil } return errors.Wrapf(ErrMissing, "unable to remove nonexistent key: %s", key) } idx := -1 for i, k := range d.keys { if k == key { idx = i break } } d.keys = append(d.keys[0:idx], d.keys[idx+1:]...) delete(d.obj, key) return nil } // set should only be used to implement the "replace" operation, so "key" must // be an already existing index in "d". func (d *partialArray) set(key string, val *lazyNode, options *ApplyOptions) error { idx, err := strconv.Atoi(key) if err != nil { return err } if idx < 0 { if !options.SupportNegativeIndices { return errors.Wrapf(ErrInvalidIndex, "Unable to access invalid index: %d", idx) } if idx < -len(*d) { return errors.Wrapf(ErrInvalidIndex, "Unable to access invalid index: %d", idx) } idx += len(*d) } (*d)[idx] = val return nil } func (d *partialArray) add(key string, val *lazyNode, options *ApplyOptions) error { if key == "-" { *d = append(*d, val) return nil } idx, err := strconv.Atoi(key) if err != nil { return errors.Wrapf(err, "value was not a proper array index: '%s'", key) } sz := len(*d) + 1 ary := make([]*lazyNode, sz) cur := *d if idx >= len(ary) { return errors.Wrapf(ErrInvalidIndex, "Unable to access invalid index: %d", idx) } if idx < 0 { if !options.SupportNegativeIndices { return errors.Wrapf(ErrInvalidIndex, "Unable to access invalid index: %d", idx) } if idx < -len(ary) { return errors.Wrapf(ErrInvalidIndex, "Unable to access invalid index: %d", idx) } idx += len(ary) } copy(ary[0:idx], cur[0:idx]) ary[idx] = val copy(ary[idx+1:], cur[idx:]) *d = ary return nil } func (d *partialArray) get(key string, options *ApplyOptions) (*lazyNode, error) { idx, err := strconv.Atoi(key) if err != nil { return nil, err } if idx < 0 { if !options.SupportNegativeIndices { return nil, errors.Wrapf(ErrInvalidIndex, "Unable to access invalid index: %d", idx) } if idx < -len(*d) { return nil, errors.Wrapf(ErrInvalidIndex, "Unable to access invalid index: %d", idx) } idx += len(*d) } if idx >= len(*d) { return nil, errors.Wrapf(ErrInvalidIndex, "Unable to access invalid index: %d", idx) } return (*d)[idx], nil } func (d *partialArray) remove(key string, options *ApplyOptions) error { idx, err := strconv.Atoi(key) if err != nil { return err } cur := *d if idx >= len(cur) { if options.AllowMissingPathOnRemove { return nil } return errors.Wrapf(ErrInvalidIndex, "Unable to access invalid index: %d", idx) } if idx < 0 { if !options.SupportNegativeIndices { return errors.Wrapf(ErrInvalidIndex, "Unable to access invalid index: %d", idx) } if idx < -len(cur) { if options.AllowMissingPathOnRemove { return nil } return errors.Wrapf(ErrInvalidIndex, "Unable to access invalid index: %d", idx) } idx += len(cur) } ary := make([]*lazyNode, len(cur)-1) copy(ary[0:idx], cur[0:idx]) copy(ary[idx:], cur[idx+1:]) *d = ary return nil } func (p Patch) add(doc *container, op Operation, options *ApplyOptions) error { path, err := op.Path() if err != nil { return errors.Wrapf(ErrMissing, "add operation failed to decode path") } if options.EnsurePathExistsOnAdd { err = ensurePathExists(doc, path, options) if err != nil { return err } } con, key := findObject(doc, path, options) if con == nil { return errors.Wrapf(ErrMissing, "add operation does not apply: doc is missing path: \"%s\"", path) } err = con.add(key, op.value(), options) if err != nil { return errors.Wrapf(err, "error in add for path: '%s'", path) } return nil } // Given a document and a path to a key, walk the path and create all missing elements // creating objects and arrays as needed. func ensurePathExists(pd *container, path string, options *ApplyOptions) error { doc := *pd var err error var arrIndex int split := strings.Split(path, "/") if len(split) < 2 { return nil } parts := split[1:] for pi, part := range parts { // Have we reached the key part of the path? // If yes, we're done. if pi == len(parts)-1 { return nil } target, ok := doc.get(decodePatchKey(part), options) if target == nil || ok != nil { // If the current container is an array which has fewer elements than our target index, // pad the current container with nulls. if arrIndex, err = strconv.Atoi(part); err == nil { pa, ok := doc.(*partialArray) if ok && arrIndex >= len(*pa)+1 { // Pad the array with null values up to the required index. for i := len(*pa); i <= arrIndex-1; i++ { doc.add(strconv.Itoa(i), newLazyNode(newRawMessage(rawJSONNull)), options) } } } // Check if the next part is a numeric index. // If yes, then create an array, otherwise, create an object. if arrIndex, err = strconv.Atoi(parts[pi+1]); err == nil { if arrIndex < 0 { if !options.SupportNegativeIndices { return errors.Wrapf(ErrInvalidIndex, "Unable to ensure path for invalid index: %d", arrIndex) } if arrIndex < -1 { return errors.Wrapf(ErrInvalidIndex, "Unable to ensure path for negative index other than -1: %d", arrIndex) } arrIndex = 0 } newNode := newLazyNode(newRawMessage(rawJSONArray)) doc.add(part, newNode, options) doc, _ = newNode.intoAry() // Pad the new array with null values up to the required index. for i := 0; i < arrIndex; i++ { doc.add(strconv.Itoa(i), newLazyNode(newRawMessage(rawJSONNull)), options) } } else { newNode := newLazyNode(newRawMessage(rawJSONObject)) doc.add(part, newNode, options) doc, _ = newNode.intoDoc() } } else { if isArray(*target.raw) { doc, err = target.intoAry() if err != nil { return err } } else { doc, err = target.intoDoc() if err != nil { return err } } } } return nil } func (p Patch) remove(doc *container, op Operation, options *ApplyOptions) error { path, err := op.Path() if err != nil { return errors.Wrapf(ErrMissing, "remove operation failed to decode path") } con, key := findObject(doc, path, options) if con == nil { if options.AllowMissingPathOnRemove { return nil } return errors.Wrapf(ErrMissing, "remove operation does not apply: doc is missing path: \"%s\"", path) } err = con.remove(key, options) if err != nil { return errors.Wrapf(err, "error in remove for path: '%s'", path) } return nil } func (p Patch) replace(doc *container, op Operation, options *ApplyOptions) error { path, err := op.Path() if err != nil { return errors.Wrapf(err, "replace operation failed to decode path") } con, key := findObject(doc, path, options) if con == nil { return errors.Wrapf(ErrMissing, "replace operation does not apply: doc is missing path: %s", path) } _, ok := con.get(key, options) if ok != nil { return errors.Wrapf(ErrMissing, "replace operation does not apply: doc is missing key: %s", path) } err = con.set(key, op.value(), options) if err != nil { return errors.Wrapf(err, "error in remove for path: '%s'", path) } return nil } func (p Patch) move(doc *container, op Operation, options *ApplyOptions) error { from, err := op.From() if err != nil { return errors.Wrapf(err, "move operation failed to decode from") } con, key := findObject(doc, from, options) if con == nil { return errors.Wrapf(ErrMissing, "move operation does not apply: doc is missing from path: %s", from) } val, err := con.get(key, options) if err != nil { return errors.Wrapf(err, "error in move for path: '%s'", key) } err = con.remove(key, options) if err != nil { return errors.Wrapf(err, "error in move for path: '%s'", key) } path, err := op.Path() if err != nil { return errors.Wrapf(err, "move operation failed to decode path") } con, key = findObject(doc, path, options) if con == nil { return errors.Wrapf(ErrMissing, "move operation does not apply: doc is missing destination path: %s", path) } err = con.add(key, val, options) if err != nil { return errors.Wrapf(err, "error in move for path: '%s'", path) } return nil } func (p Patch) test(doc *container, op Operation, options *ApplyOptions) error { path, err := op.Path() if err != nil { return errors.Wrapf(err, "test operation failed to decode path") } con, key := findObject(doc, path, options) if con == nil { return errors.Wrapf(ErrMissing, "test operation does not apply: is missing path: %s", path) } val, err := con.get(key, options) if err != nil && errors.Cause(err) != ErrMissing { return errors.Wrapf(err, "error in test for path: '%s'", path) } if val == nil { if op.value().raw == nil { return nil } return errors.Wrapf(ErrTestFailed, "testing value %s failed", path) } else if op.value() == nil { return errors.Wrapf(ErrTestFailed, "testing value %s failed", path) } if val.equal(op.value()) { return nil } return errors.Wrapf(ErrTestFailed, "testing value %s failed", path) } func (p Patch) copy(doc *container, op Operation, accumulatedCopySize *int64, options *ApplyOptions) error { from, err := op.From() if err != nil { return errors.Wrapf(err, "copy operation failed to decode from") } con, key := findObject(doc, from, options) if con == nil { return errors.Wrapf(ErrMissing, "copy operation does not apply: doc is missing from path: %s", from) } val, err := con.get(key, options) if err != nil { return errors.Wrapf(err, "error in copy for from: '%s'", from) } path, err := op.Path() if err != nil { return errors.Wrapf(ErrMissing, "copy operation failed to decode path") } con, key = findObject(doc, path, options) if con == nil { return errors.Wrapf(ErrMissing, "copy operation does not apply: doc is missing destination path: %s", path) } valCopy, sz, err := deepCopy(val) if err != nil { return errors.Wrapf(err, "error while performing deep copy") } (*accumulatedCopySize) += int64(sz) if options.AccumulatedCopySizeLimit > 0 && *accumulatedCopySize > options.AccumulatedCopySizeLimit { return NewAccumulatedCopySizeError(options.AccumulatedCopySizeLimit, *accumulatedCopySize) } err = con.add(key, valCopy, options) if err != nil { return errors.Wrapf(err, "error while adding value during copy") } return nil } // Equal indicates if 2 JSON documents have the same structural equality. func Equal(a, b []byte) bool { la := newLazyNode(newRawMessage(a)) lb := newLazyNode(newRawMessage(b)) return la.equal(lb) } // DecodePatch decodes the passed JSON document as an RFC 6902 patch. func DecodePatch(buf []byte) (Patch, error) { var p Patch err := json.Unmarshal(buf, &p) if err != nil { return nil, err } return p, nil } // Apply mutates a JSON document according to the patch, and returns the new // document. func (p Patch) Apply(doc []byte) ([]byte, error) { return p.ApplyWithOptions(doc, NewApplyOptions()) } // ApplyWithOptions mutates a JSON document according to the patch and the passed in ApplyOptions. // It returns the new document. func (p Patch) ApplyWithOptions(doc []byte, options *ApplyOptions) ([]byte, error) { return p.ApplyIndentWithOptions(doc, "", options) } // ApplyIndent mutates a JSON document according to the patch, and returns the new // document indented. func (p Patch) ApplyIndent(doc []byte, indent string) ([]byte, error) { return p.ApplyIndentWithOptions(doc, indent, NewApplyOptions()) } // ApplyIndentWithOptions mutates a JSON document according to the patch and the passed in ApplyOptions. // It returns the new document indented. func (p Patch) ApplyIndentWithOptions(doc []byte, indent string, options *ApplyOptions) ([]byte, error) { var pd container if doc[0] == '[' { pd = &partialArray{} } else { pd = &partialDoc{} } err := json.Unmarshal(doc, pd) if err != nil { return nil, err } err = nil var accumulatedCopySize int64 for _, op := range p { switch op.Kind() { case "add": err = p.add(&pd, op, options) case "remove": err = p.remove(&pd, op, options) case "replace": err = p.replace(&pd, op, options) case "move": err = p.move(&pd, op, options) case "test": err = p.test(&pd, op, options) case "copy": err = p.copy(&pd, op, &accumulatedCopySize, options) default: err = fmt.Errorf("Unexpected kind: %s", op.Kind()) } if err != nil { return nil, err } } if indent != "" { return json.MarshalIndent(pd, "", indent) } return json.Marshal(pd) } // From http://tools.ietf.org/html/rfc6901#section-4 : // // Evaluation of each reference token begins by decoding any escaped // character sequence. This is performed by first transforming any // occurrence of the sequence '~1' to '/', and then transforming any // occurrence of the sequence '~0' to '~'. var ( rfc6901Decoder = strings.NewReplacer("~1", "/", "~0", "~") ) func decodePatchKey(k string) string { return rfc6901Decoder.Replace(k) } json-patch-5.2.0/v5/patch_test.go000066400000000000000000000541601377516326600166310ustar00rootroot00000000000000package jsonpatch import ( "bytes" "encoding/json" "fmt" "reflect" "testing" ) func reformatJSON(j string) string { buf := new(bytes.Buffer) json.Indent(buf, []byte(j), "", " ") return buf.String() } func compareJSON(a, b string) bool { // return Equal([]byte(a), []byte(b)) var objA, objB interface{} json.Unmarshal([]byte(a), &objA) json.Unmarshal([]byte(b), &objB) // fmt.Printf("Comparing %#v\nagainst %#v\n", objA, objB) return reflect.DeepEqual(objA, objB) } func applyPatch(doc, patch string) (string, error) { obj, err := DecodePatch([]byte(patch)) if err != nil { panic(err) } out, err := obj.Apply([]byte(doc)) if err != nil { return "", err } return string(out), nil } func applyPatchIndented(doc, patch string) (string, error) { obj, err := DecodePatch([]byte(patch)) if err != nil { panic(err) } out, err := obj.ApplyIndent([]byte(doc), " ") if err != nil { return "", err } return string(out), nil } func applyPatchWithOptions(doc, patch string, options *ApplyOptions) (string, error) { obj, err := DecodePatch([]byte(patch)) if err != nil { panic(err) } out, err := obj.ApplyWithOptions([]byte(doc), options) if err != nil { return "", err } return string(out), nil } type Case struct { doc, patch, result string allowMissingPathOnRemove bool ensurePathExistsOnAdd bool } func repeatedA(r int) string { var s string for i := 0; i < r; i++ { s += "A" } return s } var Cases = []Case{ { `{ "foo": "bar"}`, `[ { "op": "add", "path": "/baz", "value": "qux" } ]`, `{ "baz": "qux", "foo": "bar" }`, false, false, }, { `{ "foo": [ "bar", "baz" ] }`, `[ { "op": "add", "path": "/foo/1", "value": "qux" } ]`, `{ "foo": [ "bar", "qux", "baz" ] }`, false, false, }, { `{ "foo": [ "bar", "baz" ] }`, `[ { "op": "add", "path": "/foo/-1", "value": "qux" } ]`, `{ "foo": [ "bar", "baz", "qux" ] }`, false, false, }, { `{ "baz": "qux", "foo": "bar" }`, `[ { "op": "remove", "path": "/baz" } ]`, `{ "foo": "bar" }`, false, false, }, { `{ "foo": [ "bar", "qux", "baz" ] }`, `[ { "op": "remove", "path": "/foo/1" } ]`, `{ "foo": [ "bar", "baz" ] }`, false, false, }, { `{ "foo": [ "bar", "qux", "baz" ] }`, `[ { "op": "remove", "path": "/foo/-1" } ]`, `{ "foo": [ "bar", "qux" ] }`, false, false, }, { `{ "foo": [ "bar", "qux", {"a": "abc", "b": "xyz" } ] }`, `[ { "op": "remove", "path": "/foo/-1/a" } ]`, `{ "foo": [ "bar", "qux", {"b": "xyz" } ] }`, false, false, }, { `{ "baz": "qux", "foo": "bar" }`, `[ { "op": "replace", "path": "/baz", "value": "boo" } ]`, `{ "baz": "boo", "foo": "bar" }`, false, false, }, { `{ "foo": { "bar": "baz", "waldo": "fred" }, "qux": { "corge": "grault" } }`, `[ { "op": "move", "from": "/foo/waldo", "path": "/qux/thud" } ]`, `{ "foo": { "bar": "baz" }, "qux": { "corge": "grault", "thud": "fred" } }`, false, false, }, { `{ "foo": [ "all", "grass", "cows", "eat" ] }`, `[ { "op": "move", "from": "/foo/1", "path": "/foo/3" } ]`, `{ "foo": [ "all", "cows", "eat", "grass" ] }`, false, false, }, { `{ "foo": [ "all", "grass", "cows", "eat" ] }`, `[ { "op": "move", "from": "/foo/1", "path": "/foo/2" } ]`, `{ "foo": [ "all", "cows", "grass", "eat" ] }`, false, false, }, { `{ "foo": "bar" }`, `[ { "op": "add", "path": "/child", "value": { "grandchild": { } } } ]`, `{ "foo": "bar", "child": { "grandchild": { } } }`, false, false, }, { `{ "foo": ["bar"] }`, `[ { "op": "add", "path": "/foo/-", "value": ["abc", "def"] } ]`, `{ "foo": ["bar", ["abc", "def"]] }`, false, false, }, { `{ "foo": "bar", "qux": { "baz": 1, "bar": null } }`, `[ { "op": "remove", "path": "/qux/bar" } ]`, `{ "foo": "bar", "qux": { "baz": 1 } }`, false, false, }, { `{ "foo": "bar" }`, `[ { "op": "add", "path": "/baz", "value": null } ]`, `{ "baz": null, "foo": "bar" }`, false, false, }, { `{ "foo": ["bar"]}`, `[ { "op": "replace", "path": "/foo/0", "value": "baz"}]`, `{ "foo": ["baz"]}`, false, false, }, { `{ "foo": ["bar"]}`, `[ { "op": "replace", "path": "/foo/-1", "value": "baz"}]`, `{ "foo": ["baz"]}`, false, false, }, { `{ "foo": [{"bar": "x"}]}`, `[ { "op": "replace", "path": "/foo/-1/bar", "value": "baz"}]`, `{ "foo": [{"bar": "baz"}]}`, false, false, }, { `{ "foo": ["bar","baz"]}`, `[ { "op": "replace", "path": "/foo/0", "value": "bum"}]`, `{ "foo": ["bum","baz"]}`, false, false, }, { `{ "foo": ["bar","qux","baz"]}`, `[ { "op": "replace", "path": "/foo/1", "value": "bum"}]`, `{ "foo": ["bar", "bum","baz"]}`, false, false, }, { `[ {"foo": ["bar","qux","baz"]}]`, `[ { "op": "replace", "path": "/0/foo/0", "value": "bum"}]`, `[ {"foo": ["bum","qux","baz"]}]`, false, false, }, { `[ {"foo": ["bar","qux","baz"], "bar": ["qux","baz"]}]`, `[ { "op": "copy", "from": "/0/foo/0", "path": "/0/bar/0"}]`, `[ {"foo": ["bar","qux","baz"], "bar": ["bar", "qux", "baz"]}]`, false, false, }, { `[ {"foo": ["bar","qux","baz"], "bar": ["qux","baz"]}]`, `[ { "op": "copy", "from": "/0/foo/0", "path": "/0/bar"}]`, `[ {"foo": ["bar","qux","baz"], "bar": "bar"}]`, false, false, }, { `[ { "foo": {"bar": ["qux","baz"]}, "baz": {"qux": "bum"}}]`, `[ { "op": "copy", "from": "/0/foo/bar", "path": "/0/baz/bar"}]`, `[ { "baz": {"bar": ["qux","baz"], "qux":"bum"}, "foo": {"bar": ["qux","baz"]}}]`, false, false, }, { `{ "foo": ["bar"]}`, `[{"op": "copy", "path": "/foo/0", "from": "/foo"}]`, `{ "foo": [["bar"], "bar"]}`, false, false, }, { `{ "foo": null}`, `[{"op": "copy", "path": "/bar", "from": "/foo"}]`, `{ "foo": null, "bar": null}`, false, false, }, { `{ "foo": ["bar","qux","baz"]}`, `[ { "op": "remove", "path": "/foo/-2"}]`, `{ "foo": ["bar", "baz"]}`, false, false, }, { `{ "foo": []}`, `[ { "op": "add", "path": "/foo/-1", "value": "qux"}]`, `{ "foo": ["qux"]}`, false, false, }, { `{ "bar": [{"baz": null}]}`, `[ { "op": "replace", "path": "/bar/0/baz", "value": 1 } ]`, `{ "bar": [{"baz": 1}]}`, false, false, }, { `{ "bar": [{"baz": 1}]}`, `[ { "op": "replace", "path": "/bar/0/baz", "value": null } ]`, `{ "bar": [{"baz": null}]}`, false, false, }, { `{ "bar": [null]}`, `[ { "op": "replace", "path": "/bar/0", "value": 1 } ]`, `{ "bar": [1]}`, false, false, }, { `{ "bar": [1]}`, `[ { "op": "replace", "path": "/bar/0", "value": null } ]`, `{ "bar": [null]}`, false, false, }, { fmt.Sprintf(`{ "foo": ["A", %q] }`, repeatedA(48)), // The wrapping quotes around 'A's are included in the copy // size, so each copy operation increases the size by 50 bytes. `[ { "op": "copy", "path": "/foo/-", "from": "/foo/1" }, { "op": "copy", "path": "/foo/-", "from": "/foo/1" }]`, fmt.Sprintf(`{ "foo": ["A", %q, %q, %q] }`, repeatedA(48), repeatedA(48), repeatedA(48)), false, false, }, { `[1, 2, 3]`, `[ { "op": "remove", "path": "/0" } ]`, `[2, 3]`, false, false, }, { `{ "a": { "b": { "d": 1 } } }`, `[ { "op": "remove", "path": "/a/b/c" } ]`, `{ "a": { "b": { "d": 1 } } }`, true, false, }, { `{ "a": { "b": { "d": 1 } } }`, `[ { "op": "remove", "path": "/x/y/z" } ]`, `{ "a": { "b": { "d": 1 } } }`, true, false, }, { `[1, 2, 3]`, `[ { "op": "remove", "path": "/10" } ]`, `[1, 2, 3]`, true, false, }, { `[1, 2, 3]`, `[ { "op": "remove", "path": "/10/x/y/z" } ]`, `[1, 2, 3]`, true, false, }, { `[1, 2, 3]`, `[ { "op": "remove", "path": "/-10" } ]`, `[1, 2, 3]`, true, false, }, { `{}`, `[ { "op": "add", "path": "/a", "value": "hello" } ]`, `{"a": "hello" }`, false, true, }, { `{}`, `[ { "op": "add", "path": "/a/b", "value": "hello" } ]`, `{"a": {"b": "hello" } }`, false, true, }, { `{}`, `[ { "op": "add", "path": "/a/b/c", "value": "hello" } ]`, `{"a": {"b": {"c": "hello" } } }`, false, true, }, { `{"a": {} }`, `[ { "op": "add", "path": "/a/b/c", "value": "hello" } ]`, `{"a": {"b": {"c": "hello" } } }`, false, true, }, { `{"a": {} }`, `[ { "op": "add", "path": "/x/y/z", "value": "hello" } ]`, `{"a": {}, "x" : {"y": {"z": "hello" } } }`, false, true, }, { `{}`, `[ { "op": "add", "path": "/a/0/b", "value": "hello" } ]`, `{"a": [{"b": "hello"}] }`, false, true, }, { `{}`, `[ { "op": "add", "path": "/a/b/0", "value": "hello" } ]`, `{"a": {"b": ["hello"] } }`, false, true, }, { `{}`, `[ { "op": "add", "path": "/a/b/-1", "value": "hello" } ]`, `{"a": {"b": ["hello"] } }`, false, true, }, { `{}`, `[ { "op": "add", "path": "/a/b/-1/c", "value": "hello" } ]`, `{"a": {"b": [ { "c": "hello" } ] } }`, false, true, }, { `{"a": {"b": [ { "c": "whatever" } ] } }`, `[ { "op": "add", "path": "/a/b/-1/c", "value": "hello" } ]`, `{"a": {"b": [ { "c": "hello" } ] } }`, false, true, }, { `{}`, `[ { "op": "add", "path": "/a/b/3", "value": "hello" } ]`, `{"a": {"b": [null, null, null, "hello"] } }`, false, true, }, { `{"a": []}`, `[ { "op": "add", "path": "/a/-1", "value": "hello" } ]`, `{"a": ["hello"]}`, false, true, }, { `{}`, `[ { "op": "add", "path": "/a/0/0", "value": "hello" } ]`, `{"a": [["hello"]]}`, false, true, }, { `{"a": [{}]}`, `[ { "op": "add", "path": "/a/-1/b/c", "value": "hello" } ]`, `{"a": [{"b": {"c": "hello"}}]}`, false, true, }, { `{"a": [{"b": "whatever"}]}`, `[ { "op": "add", "path": "/a/2/b/c", "value": "hello" } ]`, `{"a": [{"b": "whatever"}, null, {"b": {"c": "hello"}}]}`, false, true, }, { `{"a": [{"b": "whatever"}]}`, `[ { "op": "add", "path": "/a/1/b/c", "value": "hello" } ]`, `{"a": [{"b": "whatever"}, {"b": {"c": "hello"}}]}`, false, true, }, } type BadCase struct { doc, patch string } var MutationTestCases = []BadCase{ { `{ "foo": "bar", "qux": { "baz": 1, "bar": null } }`, `[ { "op": "remove", "path": "/qux/bar" } ]`, }, { `{ "foo": "bar", "qux": { "baz": 1, "bar": null } }`, `[ { "op": "replace", "path": "/qux/baz", "value": null } ]`, }, } var BadCases = []BadCase{ { `{ "foo": "bar" }`, `[ { "op": "add", "path": "/baz/bat", "value": "qux" } ]`, }, { `{ "a": { "b": { "d": 1 } } }`, `[ { "op": "remove", "path": "/a/b/c" } ]`, }, { `{ "a": { "b": { "d": 1 } } }`, `[ { "op": "move", "from": "/a/b/c", "path": "/a/b/e" } ]`, }, { `{ "a": { "b": [1] } }`, `[ { "op": "remove", "path": "/a/b/1" } ]`, }, { `{ "a": { "b": [1] } }`, `[ { "op": "move", "from": "/a/b/1", "path": "/a/b/2" } ]`, }, { `{ "foo": "bar" }`, `[ { "op": "add", "pathz": "/baz", "value": "qux" } ]`, }, { `{ "foo": "bar" }`, `[ { "op": "add", "path": "", "value": "qux" } ]`, }, { `{ "foo": ["bar","baz"]}`, `[ { "op": "replace", "path": "/foo/2", "value": "bum"}]`, }, { `{ "foo": ["bar","baz"]}`, `[ { "op": "add", "path": "/foo/-4", "value": "bum"}]`, }, { `{ "name":{ "foo": "bat", "qux": "bum"}}`, `[ { "op": "replace", "path": "/foo/bar", "value":"baz"}]`, }, { `{ "foo": ["bar"]}`, `[ {"op": "add", "path": "/foo/2", "value": "bum"}]`, }, { `{ "foo": []}`, `[ {"op": "remove", "path": "/foo/-"}]`, }, { `{ "foo": []}`, `[ {"op": "remove", "path": "/foo/-1"}]`, }, { `{ "foo": ["bar"]}`, `[ {"op": "remove", "path": "/foo/-2"}]`, }, { `{}`, `[ {"op":null,"path":""} ]`, }, { `{}`, `[ {"op":"add","path":null} ]`, }, { `{}`, `[ { "op": "copy", "from": null }]`, }, { `{ "foo": ["bar"]}`, `[{"op": "copy", "path": "/foo/6666666666", "from": "/"}]`, }, // Can't copy into an index greater than the size of the array { `{ "foo": ["bar"]}`, `[{"op": "copy", "path": "/foo/2", "from": "/foo/0"}]`, }, // Accumulated copy size cannot exceed AccumulatedCopySizeLimit. { fmt.Sprintf(`{ "foo": ["A", %q] }`, repeatedA(49)), // The wrapping quotes around 'A's are included in the copy // size, so each copy operation increases the size by 51 bytes. `[ { "op": "copy", "path": "/foo/-", "from": "/foo/1" }, { "op": "copy", "path": "/foo/-", "from": "/foo/1" }]`, }, // Can't move into an index greater than or equal to the size of the array { `{ "foo": [ "all", "grass", "cows", "eat" ] }`, `[ { "op": "move", "from": "/foo/1", "path": "/foo/4" } ]`, }, { `{ "baz": "qux" }`, `[ { "op": "replace", "path": "/foo", "value": "bar" } ]`, }, // Can't copy from non-existent "from" key. { `{ "foo": "bar"}`, `[{"op": "copy", "path": "/qux", "from": "/baz"}]`, }, } // This is not thread safe, so we cannot run patch tests in parallel. func configureGlobals(accumulatedCopySizeLimit int64) func() { oldAccumulatedCopySizeLimit := AccumulatedCopySizeLimit AccumulatedCopySizeLimit = accumulatedCopySizeLimit return func() { AccumulatedCopySizeLimit = oldAccumulatedCopySizeLimit } } func TestAllCases(t *testing.T) { defer configureGlobals(int64(100))() // Test patch.Apply happy-path cases. for i, c := range Cases { t.Run(fmt.Sprintf("Case %d", i), func(t *testing.T) { if !c.allowMissingPathOnRemove && !c.ensurePathExistsOnAdd { out, err := applyPatch(c.doc, c.patch) if err != nil { t.Errorf("Unable to apply patch: %s", err) } if !compareJSON(out, c.result) { t.Errorf("Patch did not apply. Expected:\n%s\n\nActual:\n%s", reformatJSON(c.result), reformatJSON(out)) } } }) } // Test patch.ApplyWithOptions happy-path cases. options := NewApplyOptions() for _, c := range Cases { options.AllowMissingPathOnRemove = c.allowMissingPathOnRemove options.EnsurePathExistsOnAdd = c.ensurePathExistsOnAdd out, err := applyPatchWithOptions(c.doc, c.patch, options) if err != nil { t.Errorf("Unable to apply patch: %s", err) } if !compareJSON(out, c.result) { t.Errorf("Patch did not apply. Expected:\n%s\n\nActual:\n%s", reformatJSON(c.result), reformatJSON(out)) } } for _, c := range MutationTestCases { out, err := applyPatch(c.doc, c.patch) if err != nil { t.Errorf("Unable to apply patch: %s", err) } if compareJSON(out, c.doc) { t.Errorf("Patch did not apply. Original:\n%s\n\nPatched:\n%s", reformatJSON(c.doc), reformatJSON(out)) } } for _, c := range BadCases { _, err := applyPatch(c.doc, c.patch) if err == nil { t.Errorf("Patch %q should have failed to apply but it did not", c.patch) } } } type TestCase struct { doc, patch string result bool failedPath string } var TestCases = []TestCase{ { `{ "baz": "qux", "foo": [ "a", 2, "c" ] }`, `[ { "op": "test", "path": "/baz", "value": "qux" }, { "op": "test", "path": "/foo/1", "value": 2 } ]`, true, "", }, { `{ "baz": "qux" }`, `[ { "op": "test", "path": "/baz", "value": "bar" } ]`, false, "/baz", }, { `{ "baz": "qux", "foo": ["a", 2, "c"] }`, `[ { "op": "test", "path": "/baz", "value": "qux" }, { "op": "test", "path": "/foo/1", "value": "c" } ]`, false, "/foo/1", }, { `{ "baz": "qux" }`, `[ { "op": "test", "path": "/foo", "value": 42 } ]`, false, "/foo", }, { `{ "baz": "qux" }`, `[ { "op": "test", "path": "/foo", "value": null } ]`, true, "", }, { `{ "foo": null }`, `[ { "op": "test", "path": "/foo", "value": null } ]`, true, "", }, { `{ "foo": {} }`, `[ { "op": "test", "path": "/foo", "value": null } ]`, false, "/foo", }, { `{ "foo": [] }`, `[ { "op": "test", "path": "/foo", "value": null } ]`, false, "/foo", }, { `{ "baz/foo": "qux" }`, `[ { "op": "test", "path": "/baz~1foo", "value": "qux"} ]`, true, "", }, { `{ "foo": [] }`, `[ { "op": "test", "path": "/foo"} ]`, false, "/foo", }, { `{ "foo": "bar" }`, `[ { "op": "test", "path": "/baz", "value": "bar" } ]`, false, "/baz", }, { `{ "foo": "bar" }`, `[ { "op": "test", "path": "/baz", "value": null } ]`, true, "/baz", }, } func TestAllTest(t *testing.T) { for _, c := range TestCases { _, err := applyPatch(c.doc, c.patch) if c.result && err != nil { t.Errorf("Testing failed when it should have passed: %s", err) } else if !c.result && err == nil { t.Errorf("Testing passed when it should have failed: %s", err) } else if !c.result { expected := fmt.Sprintf("testing value %s failed: test failed", c.failedPath) if err.Error() != expected { t.Errorf("Testing failed as expected but invalid message: expected [%s], got [%s]", expected, err) } } } } func TestAdd(t *testing.T) { testCases := []struct { name string key string val lazyNode arr partialArray rejectNegativeIndicies bool err string }{ { name: "should work", key: "0", val: lazyNode{}, arr: partialArray{}, }, { name: "index too large", key: "1", val: lazyNode{}, arr: partialArray{}, err: "Unable to access invalid index: 1: invalid index referenced", }, { name: "negative should work", key: "-1", val: lazyNode{}, arr: partialArray{}, }, { name: "negative too small", key: "-2", val: lazyNode{}, arr: partialArray{}, err: "Unable to access invalid index: -2: invalid index referenced", }, { name: "negative but negative disabled", key: "-1", val: lazyNode{}, arr: partialArray{}, rejectNegativeIndicies: true, err: "Unable to access invalid index: -1: invalid index referenced", }, } options := NewApplyOptions() for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { key := tc.key arr := &tc.arr val := &tc.val options.SupportNegativeIndices = !tc.rejectNegativeIndicies err := arr.add(key, val, options) if err == nil && tc.err != "" { t.Errorf("Expected error but got none! %v", tc.err) } else if err != nil && tc.err == "" { t.Errorf("Did not expect error but go: %v", err) } else if err != nil && err.Error() != tc.err { t.Errorf("Expected error %v but got error %v", tc.err, err) } }) } } type EqualityCase struct { name string a, b string equal bool } var EqualityCases = []EqualityCase{ { "ExtraKeyFalse", `{"foo": "bar"}`, `{"foo": "bar", "baz": "qux"}`, false, }, { "StripWhitespaceTrue", `{ "foo": "bar", "baz": "qux" }`, `{"foo": "bar", "baz": "qux"}`, true, }, { "KeysOutOfOrderTrue", `{ "baz": "qux", "foo": "bar" }`, `{"foo": "bar", "baz": "qux"}`, true, }, { "ComparingNullFalse", `{"foo": null}`, `{"foo": "bar"}`, false, }, { "ComparingNullTrue", `{"foo": null}`, `{"foo": null}`, true, }, { "ArrayOutOfOrderFalse", `["foo", "bar", "baz"]`, `["bar", "baz", "foo"]`, false, }, { "ArrayTrue", `["foo", "bar", "baz"]`, `["foo", "bar", "baz"]`, true, }, { "NonStringTypesTrue", `{"int": 6, "bool": true, "float": 7.0, "string": "the_string", "null": null}`, `{"int": 6, "bool": true, "float": 7.0, "string": "the_string", "null": null}`, true, }, { "NestedNullFalse", `{"foo": ["an", "array"], "bar": {"an": "object"}}`, `{"foo": null, "bar": null}`, false, }, { "NullCompareStringFalse", `"foo"`, `null`, false, }, { "NullCompareIntFalse", `6`, `null`, false, }, { "NullCompareFloatFalse", `6.01`, `null`, false, }, { "NullCompareBoolFalse", `false`, `null`, false, }, } func TestEquality(t *testing.T) { for _, tc := range EqualityCases { t.Run(tc.name, func(t *testing.T) { got := Equal([]byte(tc.a), []byte(tc.b)) if got != tc.equal { t.Errorf("Expected Equal(%s, %s) to return %t, but got %t", tc.a, tc.b, tc.equal, got) } got = Equal([]byte(tc.b), []byte(tc.a)) if got != tc.equal { t.Errorf("Expected Equal(%s, %s) to return %t, but got %t", tc.b, tc.a, tc.equal, got) } }) } } func TestMaintainOrdering(t *testing.T) { cases := []struct { doc string patch string expected string }{ { `{"z":"1","a":["baz"],"y":3,"b":true,"x":null}`, `[{"op": "add", "path": "/foo", "value": "bar"}]`, `{"z":"1","a":["baz"],"y":3,"b":true,"x":null,"foo":"bar"}`, }, { `{"z":"1","a":["baz"],"y":3,"b":true,"x":null}`, `[{"op": "remove", "path": "/y"}]`, `{"z":"1","a":["baz"],"b":true,"x":null}`, }, { `{"z":"1","a":["baz"],"y":3,"b":true,"x":null}`, `[{"op": "move", "from": "/z", "path": "/a/-"},{"op": "remove", "path": "/y"}]`, `{"a":["baz","1"],"b":true,"x":null}`, }, { `{"z":"1","a":["baz"],"y":3,"b":true,"x":null}`, `[ {"op": "add", "path": "/foo", "value": "bar"}, {"op": "replace", "path": "/b", "value": {"zz":1,"aa":"foo","yy":true,"bb":null}}, {"op": "copy", "from": "/foo", "path": "/b/cc"}, {"op": "move", "from": "/z", "path": "/a/0"}, {"op": "remove", "path": "/y"} ]`, `{"a":["1","baz"],"b":{"zz":1,"aa":"foo","yy":true,"bb":null,"cc":"bar"},"x":null,"foo":"bar"}`, }, } for i, c := range cases { t.Run(fmt.Sprintf("case %d", i), func(t *testing.T) { res, err := applyPatch(c.doc, c.patch) if err != nil { t.Errorf("unexpected error: %+v", err) } else if res != c.expected { t.Errorf("expected:\n%s\ngot:\n%s", c.expected, res) } }) } } func TestMaintainOrderingIndented(t *testing.T) { cases := []struct { doc string patch string expected string }{ { `{"z":"1","a":["baz"],"y":3,"b":true,"x":null}`, `[ {"op": "add", "path": "/foo", "value": "bar"}, {"op": "replace", "path": "/b", "value": {"zz":1,"aa":"foo","yy":true,"bb":null}}, {"op": "copy", "from": "/foo", "path": "/b/cc"}, {"op": "move", "from": "/z", "path": "/a/0"}, {"op": "remove", "path": "/y"} ]`, `{ "a": [ "1", "baz" ], "b": { "zz": 1, "aa": "foo", "yy": true, "bb": null, "cc": "bar" }, "x": null, "foo": "bar" }`, }, } for i, c := range cases { t.Run(fmt.Sprintf("case %d", i), func(t *testing.T) { res, err := applyPatchIndented(c.doc, c.patch) if err != nil { t.Errorf("unexpected error: %+v", err) } else if res != c.expected { t.Errorf("expected:\n%s\ngot:\n%s", c.expected, res) } }) } }