diff --git a/.travis.yml b/.travis.yml index f3f60f4..5aa5ebf 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,14 +4,19 @@ go: - 1.8.x - 1.9.x - 1.10.x - + - 1.11.x notificaitons: email: recipients: danielfireman@gmail.com on_success: change on_failure: always +env: + - GO111MODULE=on + before_install: + - curl https://raw.githubusercontent.com/golang/dep/master/install.sh | sh + - dep ensure - go get github.com/mattn/goveralls script: diff --git a/Gopkg.lock b/Gopkg.lock index ee29e16..db26e8f 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -4,8 +4,8 @@ [[projects]] name = "github.com/frictionlessdata/tableschema-go" packages = ["csv","schema","table"] - revision = "de3a51f8487b68142a2826f6ffe521222a342273" - version = "1.1" + revision = "f9bf38926664ea64d9749ee7024987a1e967004d" + version = "v1.4" [[projects]] branch = "master" diff --git a/README.md b/README.md index a18cc17..d721724 100644 --- a/README.md +++ b/README.md @@ -3,21 +3,17 @@ # datapackage-go A Go library for working with [Data Packages](http://specs.frictionlessdata.io/data-package/). - - - [datapackage-go](#datapackage-go) - - [Install](#install) - - [Main Features](#main-features) - - [Loading and validating tabular data package descriptors](#loading-and-validating-tabular-data-package-descriptors) - - [Accessing data package resources](#accessing-data-package-resources) - - [Loading zip bundles](#loading-zip-bundles) - - [Creating a zip bundle with the data package.](#creating-a-zip-bundle-with-the-data-package) - - [CSV dialect support](#csv-dialect-support) - - [Loading multipart resources](#loading-multipart-resources) - - [Loading non-tabular resources](#loading-non-tabular-resources) - - [Manipulating data packages programatically](#manipulating-data-packages-programatically) - - + - [Install](#install) + - [Main Features](#main-features) + - [Loading and validating tabular data package descriptors](#loading-and-validating-tabular-data-package-descriptors) + - [Accessing data package resources](#accessing-data-package-resources) + - [Loading zip bundles](#loading-zip-bundles) + - [Creating a zip bundle with the data package.](#creating-a-zip-bundle-with-the-data-package) + - [CSV dialect support](#csv-dialect-support) + - [Loading multipart resources](#loading-multipart-resources) + - [Loading non-tabular resources](#loading-non-tabular-resources) + - [Manipulating data packages programatically](#manipulating-data-packages-programatically) ## Install @@ -95,7 +91,7 @@ fmt.Printf("+v", cities) // [{City:london Year:2017 Population:8780000} {City:paris Year:2017 Population:2240000} {City:rome Year:2017 Population:2860000}] ``` -Finally, if the data is to big to be loaded at once or if you would like to perform line-by-line processing, you could iterate through the resource contents: +If the data is to big to be loaded at once or if you would like to perform line-by-line processing, you could iterate through the resource contents: ```go iter, _ := resource.Iter(csv.LoadHeaders()) @@ -109,6 +105,16 @@ for iter.Next() { // {City:paris Year:2017 Population:2240000} // {City:rome Year:2017 Population:2860000}] ``` + +Or you might want to process specific columns, for instance to perform an statical analysis: + +```go +var population []float64 +resource.CastColumn("population", &population, csv.LoadHeaders()) +fmt.Println(ages) +// Output: [8780000 2240000 2860000] +``` + ### Loading zip bundles It is very common to store the data in zip bundles containing the descriptor and data files. Those are natively supported by our the [datapackage.Load](https://godoc.org/github.com/frictionlessdata/datapackage-go/datapackage#Load) method. For example, lets say we have the following `package.zip` bundle: diff --git a/datapackage/resource.go b/datapackage/resource.go index 7b55529..ec3742e 100644 --- a/datapackage/resource.go +++ b/datapackage/resource.go @@ -343,6 +343,25 @@ func (r *Resource) Cast(out interface{}, opts ...csv.CreationOpts) error { return sch.CastTable(tbl, out) } +// CastColumn casts a column from tabular resource contents. +// The out argument must necessarily be the address for a slice. The slice +// may be nil or previously allocated. +func (r *Resource) CastColumn(name string, out interface{}, opts ...csv.CreationOpts) error { + sch, err := r.GetSchema() + if err != nil { + return err + } + tab, err := r.GetTable(opts...) + if err != nil { + return err + } + col, err := tab.ReadColumn(name) + if err != nil { + return err + } + return sch.CastColumn(col, name, out) +} + // NewResourceWithDefaultRegistry creates a new Resource from the passed-in descriptor. // It uses the default registry to validate the resource descriptor. func NewResourceWithDefaultRegistry(d map[string]interface{}) (*Resource, error) { diff --git a/datapackage/resource_test.go b/datapackage/resource_test.go index 58e5f6c..24c3034 100644 --- a/datapackage/resource_test.go +++ b/datapackage/resource_test.go @@ -10,9 +10,27 @@ import ( "testing" "github.com/frictionlessdata/datapackage-go/validator" + "github.com/frictionlessdata/tableschema-go/csv" + "github.com/frictionlessdata/tableschema-go/schema" "github.com/matryer/is" ) +func ExampleResource_CastColumn() { + resStr := ` + { + "name": "col", + "data": "name,age\nfoo,42\nbar,84", + "format": "csv", + "profile": "tabular-data-resource", + "schema": {"fields": [{"name": "name", "type": "string"},{"name": "age", "type": "integer"}]} + }` + res, _ := NewResourceFromString(resStr, validator.MustInMemoryRegistry()) + var ages []float64 + res.CastColumn("age", &ages, csv.LoadHeaders()) + fmt.Println(ages) + // Output: [42 84] +} + func TestNewResourceWithDefaultRegistry(t *testing.T) { res, _ := NewResourceWithDefaultRegistry(r1) fmt.Println(res.Name()) @@ -152,9 +170,9 @@ func TestNew(t *testing.T) { map[string]interface{}{"name": "foo", "path": "foo.csv", "schema": ts.URL}, validator.MustInMemoryRegistry(), ) - is.NoErr(err) + is.NoErr(err) // Resource should be created successfully. sch, err := r.GetSchema() - is.Equal(sch.Fields[0].Type, "string") + is.Equal(sch.Fields[0].Type, schema.StringType) }) t.Run("InvalidRemote", func(t *testing.T) { _, err := NewResource( @@ -177,7 +195,7 @@ func TestNew(t *testing.T) { ) is.NoErr(err) sch, err := r.GetSchema() - is.Equal(sch.Fields[0].Type, "string") + is.Equal(sch.Fields[0].Type, schema.StringType) }) t.Run("InvalidLocal", func(t *testing.T) { _, err := NewResource( @@ -457,3 +475,65 @@ func TestResource_RawRead(t *testing.T) { is.Equal(string(contents), "{\"foo\":\"1234\"}") }) } + +func TestResource_ReadColumn(t *testing.T) { + resStr := ` + { + "name": "col", + "data": "name,age\nfoo,42\nbar,84", + "format": "csv", + "profile": "tabular-data-resource", + "schema": {"fields": [{"name": "name", "type": "string"},{"name": "age", "type": "integer"}]} + }` + t.Run("Valid", func(t *testing.T) { + is := is.New(t) + res, err := NewResourceFromString(resStr, validator.MustInMemoryRegistry()) + is.NoErr(err) + var ages []float64 + is.NoErr(res.CastColumn("age", &ages, csv.LoadHeaders())) + is.Equal(float64(42), ages[0]) + is.Equal(float64(84), ages[1]) + }) + t.Run("NoSchema", func(t *testing.T) { + res := NewUncheckedResource(map[string]interface{}{}) + var ages []float64 + if res.CastColumn("age", &ages) == nil { + t.Fatal("want:err got:nil") + } + }) + t.Run("NoData", func(t *testing.T) { + res := NewUncheckedResource(map[string]interface{}{ + "schema": map[string]interface{}{}, + }) + var ages []float64 + if res.CastColumn("age", &ages) == nil { + t.Fatal("want:err got:nil") + } + }) + t.Run("HeaderNotFound", func(t *testing.T) { + is := is.New(t) + res, err := NewResourceFromString(resStr, validator.MustInMemoryRegistry()) + is.NoErr(err) + var ages []float64 + if res.CastColumn("foo", &ages) == nil { + t.Fatal("want:err got:nil") + } + }) + t.Run("FieldNotFound", func(t *testing.T) { + is := is.New(t) + resStr := ` + { + "name": "col", + "data": "name,age\nfoo,42\nbar,84", + "format": "csv", + "profile": "tabular-data-resource", + "schema": {"fields": [{"name": "name", "type": "string"},{"name": "Age", "type": "integer"}]} + }` + res, err := NewResourceFromString(resStr, validator.MustInMemoryRegistry()) + is.NoErr(err) + var ages []float64 + if res.CastColumn("age", &ages) == nil { + t.Fatal("want:err got:nil") + } + }) +} diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..a791c29 --- /dev/null +++ b/go.mod @@ -0,0 +1,8 @@ +module github.com/frictionlessdata/datapackage-go + +require ( + github.com/frictionlessdata/tableschema-go v0.1.5-0.20190521014818-f9bf38926664 + github.com/matryer/is v1.2.0 + github.com/santhosh-tekuri/jsonschema v1.2.4 + github.com/satori/go.uuid v1.2.0 +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..52975bf --- /dev/null +++ b/go.sum @@ -0,0 +1,9 @@ +github.com/frictionlessdata/tableschema-go v0.1.5-0.20190521014818-f9bf38926664 h1:IvuZMJ6dH1ye2bWmM8Yla6jj1xIPBR/nZJlm6P4ZSD4= +github.com/frictionlessdata/tableschema-go v0.1.5-0.20190521014818-f9bf38926664/go.mod h1:OfuE6zbfQdlwx5q9vf5XWXEGJ0LYZcd9ML3zme5rP3k= +github.com/matryer/is v0.0.0-20170112134659-c0323ceb4e99/go.mod h1:2fLPjFQM9rhQ15aVEtbuwhJinnOqrmgXPNdZsdwlWXA= +github.com/matryer/is v1.2.0/go.mod h1:2fLPjFQM9rhQ15aVEtbuwhJinnOqrmgXPNdZsdwlWXA= +github.com/santhosh-tekuri/jsonschema v1.2.4 h1:hNhW8e7t+H1vgY+1QeEQpveR6D4+OwKPXCfD2aieJis= +github.com/santhosh-tekuri/jsonschema v1.2.4/go.mod h1:TEAUOeZSmIxTTuHatJzrvARHiuO9LYd+cIxzgEHCQI4= +github.com/satori/go.uuid v1.1.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= +github.com/satori/go.uuid v1.2.0 h1:0uYX9dsZ2yD7q2RtLRtPSdGDWzjeM3TbMJP9utgA0ww= +github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= diff --git a/vendor/github.com/frictionlessdata/tableschema-go/.gitignore b/vendor/github.com/frictionlessdata/tableschema-go/.gitignore deleted file mode 100644 index ae453ff..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/.gitignore +++ /dev/null @@ -1,18 +0,0 @@ -# Binaries for programs and plugins -*.exe -*.dll -*.so -*.dylib - -# Test binary, build with `go test -c` -*.test - -# Output of the go coverage tool, specifically when used with LiteIDE -*.out - -# Project-local glide cache, RE: https://github.com/Masterminds/glide/issues/736 -.glide/ - -**/*.coverprofile - -.vscode \ No newline at end of file diff --git a/vendor/github.com/frictionlessdata/tableschema-go/.travis.yml b/vendor/github.com/frictionlessdata/tableschema-go/.travis.yml deleted file mode 100644 index 7e57cab..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/.travis.yml +++ /dev/null @@ -1,18 +0,0 @@ - -language: go -sudo: false -go: - - 1.8.x - - 1.9.x - -notificaitons: - email: - recipients: danielfireman@gmail.com - on_success: change - on_failure: always - -before_install: - - go get github.com/mattn/goveralls - -script: - - $HOME/gopath/bin/goveralls -service=travis-ci -ignore=examples/infer/main.go,examples/validate/main.go diff --git a/vendor/github.com/frictionlessdata/tableschema-go/CONTRIBUTING.md b/vendor/github.com/frictionlessdata/tableschema-go/CONTRIBUTING.md deleted file mode 100644 index 4c797c8..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/CONTRIBUTING.md +++ /dev/null @@ -1,28 +0,0 @@ -# Contributing to tableschema-go - -Found a problem and would like to fix it? Have that great idea and would love to see it done? Let's do it! - -> Please open an issue before start working - -That could save a lot of time from everyone and we are super happy to answer questions and help you alonge the way. - -This project shares Go's code of conduct [values](https://golang.org/conduct#values) and [unwelcomed behavior](https://golang.org/conduct#unwelcome_behavior). Not sure what those mean or why we need those? Please give yourself a few minutes to get acquainted to those topics. - -* Before start coding: - * Fork and pull the latest version of the master branch - * Make sure you have go 1.8+ installed and you're using it - -* Requirements - * Compliance with [these guidelines](https://code.google.com/p/go-wiki/wiki/CodeReviewComments) - * Good unit test coverage - * [Good commit messages](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html) - -* Before sending the PR - -```sh -$ cd $GOPATH/src/github.com/frictionlessdata/tableschema-go -$ ./fmt.sh -$ go test ./.. -``` - -If all tests pass, you're ready to send the PR! :D diff --git a/vendor/github.com/frictionlessdata/tableschema-go/Gopkg.lock b/vendor/github.com/frictionlessdata/tableschema-go/Gopkg.lock deleted file mode 100644 index f8842a8..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/Gopkg.lock +++ /dev/null @@ -1,21 +0,0 @@ -# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. - - -[[projects]] - branch = "master" - name = "github.com/matryer/is" - packages = ["."] - revision = "c0323ceb4e996e4a8795670d1fb6f60e65b82fd2" - -[[projects]] - name = "github.com/satori/go.uuid" - packages = ["."] - revision = "879c5887cd475cd7864858769793b2ceb0d44feb" - version = "v1.1.0" - -[solve-meta] - analyzer-name = "dep" - analyzer-version = 1 - inputs-digest = "1ad488c5d7626ee23a29a6f0e4de915d8668ad0eff6659419ef4cf5f02d1a692" - solver-name = "gps-cdcl" - solver-version = 1 diff --git a/vendor/github.com/frictionlessdata/tableschema-go/Gopkg.toml b/vendor/github.com/frictionlessdata/tableschema-go/Gopkg.toml deleted file mode 100644 index 542aa80..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/Gopkg.toml +++ /dev/null @@ -1,26 +0,0 @@ - -# Gopkg.toml example -# -# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md -# for detailed Gopkg.toml documentation. -# -# required = ["github.com/user/thing/cmd/thing"] -# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"] -# -# [[constraint]] -# name = "github.com/user/project" -# version = "1.0.0" -# -# [[constraint]] -# name = "github.com/user/project2" -# branch = "dev" -# source = "github.com/myfork/project2" -# -# [[override]] -# name = "github.com/x/y" -# version = "2.4.0" - - -[[constraint]] - name = "github.com/satori/go.uuid" - version = "1.1.0" diff --git a/vendor/github.com/frictionlessdata/tableschema-go/LICENSE b/vendor/github.com/frictionlessdata/tableschema-go/LICENSE deleted file mode 100644 index 38351f0..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2017 Frictionless Data - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/frictionlessdata/tableschema-go/README.md b/vendor/github.com/frictionlessdata/tableschema-go/README.md deleted file mode 100644 index 00949bb..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/README.md +++ /dev/null @@ -1,251 +0,0 @@ -[![Build Status](https://travis-ci.org/frictionlessdata/tableschema-go.svg?branch=master)](https://travis-ci.org/frictionlessdata/tableschema-go) [![Coverage Status](https://coveralls.io/repos/github/frictionlessdata/tableschema-go/badge.svg?branch=master)](https://coveralls.io/github/frictionlessdata/tableschema-go?branch=master) [![Go Report Card](https://goreportcard.com/badge/github.com/frictionlessdata/tableschema-go)](https://goreportcard.com/report/github.com/frictionlessdata/tableschema-go) [![Gitter chat](https://badges.gitter.im/gitterHQ/gitter.png)](https://gitter.im/frictionlessdata/chat) [![GoDoc](https://godoc.org/github.com/frictionlessdata/tableschema-go?status.svg)](https://godoc.org/github.com/frictionlessdata/tableschema-go) - -# tableschema-go - -[Table schema](http://specs.frictionlessdata.io/table-schema/) tooling in Go. - -# Getting started - -## Installation - -This package uses [semantic versioning 2.0.0](http://semver.org/). - -### Using dep - -```sh -$ dep init -$ dep ensure -add github.com/frictionlessdata/tableschema-go/csv@>=0.1 -``` - - -# Main Features - -## Tabular Data Load - -Have tabular data stored in local files? Remote files? Packages like the [csv](https://godoc.org/github.com/frictionlessdata/tableschema-go/csv) are going to help on loading the data you need and making it ready for processing. - -```go -package main - -import "github.com/frictionlessdata/tableschema-go/csv" - -func main() { - tab, err := csv.NewTable(csv.Remote("myremotetable"), csv.LoadHeaders()) - // Error handling. -} -``` - -Supported physical representations: - -* [CSV](https://godoc.org/github.com/frictionlessdata/tableschema-go/csv) - -You would like to use tableschema-go but the physical representation you use is not listed here? No problem! Please create an issue before start contributing. We will be happy to help you along the way. - -## Schema Inference and Configuration - -Got that new dataset and wants to start getting your hands dirty ASAP? No problems, let the [schema package](https://github.com/frictionlessdata/tableschema-go/tree/master/schema) try to infer -the data types based on the table data. - -```go -package main - -import ( - "github.com/frictionlessdata/tableschema-go/csv" - "github.com/frictionlessdata/tableschema-go/schema" -) - -func main() { - tab, _ := csv.NewTable(csv.Remote("myremotetable"), csv.LoadHeaders()) - sch, _ := schema.Infer(tab) - fmt.Printf("%+v", sch) -} -``` - -> Want to go faster? Please give [InferImplicitCasting](https://godoc.org/github.com/frictionlessdata/tableschema-go/schema#InferImplicitCasting) a try and let us know how it goes. - -There might be cases in which the inferred schema is not correct. One of those cases is when your data use strings like "N/A" to represent missing cells. That would usually make our inferential algorithm think the field is a string. - -When that happens, you can manually perform those last minutes tweaks [Schema](https://godoc.org/github.com/frictionlessdata/tableschema-go/schema#Schema). - -```go - sch.MissingValues = []string{"N/A"} - sch.GetField("ID").Type = schema.IntegerType -``` - -After all that, you could persist your schema to disk: - -```go -sch.SaveToFile("users_schema.json") -``` - -And use the local schema later: - -```go -sch, _ := sch.LoadFromFile("users_schema.json") -``` - -Finally, if your schema is saved remotely, you can also use it: - -```go -sch, _ := schema.LoadRemote("http://myfoobar/users/schema.json") -``` - -## Processing Tabular Data - -Once you have the data, you would like to process using language data types. [schema.CastTable](https://godoc.org/github.com/frictionlessdata/tableschema-go/schema#example-Schema-CastTable) and [schema.CastRow](https://godoc.org/github.com/frictionlessdata/tableschema-go/schema#example-Schema-CastRow) are your friends on this journey. - -```go -package main - -import ( - "github.com/frictionlessdata/tableschema-go/csv" - "github.com/frictionlessdata/tableschema-go/schema" -) - -type user struct { - ID int - Age int - Name string -} - -func main() { - tab, _ := csv.NewTable(csv.FromFile("users.csv"), csv.LoadHeaders()) - sch, _ := schema.Infer(tab) - var users []user - sch.CastTable(tab, &users) - // Users slice contains the table contents properly raw into - // language types. Each row will be a new user appended to the slice. -} -``` - -If you have a lot of data and can no load everything in memory, you can easily iterate trough it: - -```go -... - iter, _ := sch.Iter() - for iter.Next() { - var u user - sch.CastRow(iter.Row(), &u) - // Variable u is now filled with row contents properly raw - // to language types. - } -... -``` - -> Even better if you could do it regardless the physical representation! The [table](https://godoc.org/github.com/frictionlessdata/tableschema-go/table) package declares some interfaces that will help you to achieve this goal: - -* [Table](https://godoc.org/github.com/frictionlessdata/tableschema-go/table#Table) -* [Iterator](https://godoc.org/github.com/frictionlessdata/tableschema-go/table#Iterator) - -### Field - -Class represents field in the schema. - -For example, data values can be castd to native Go types. Decoding a value will check if the value is of the expected type, is in the correct format, and complies with any constraints imposed by a schema. - -```javascript -{ - 'name': 'birthday', - 'type': 'date', - 'format': 'default', - 'constraints': { - 'required': True, - 'minimum': '2015-05-30' - } -} -``` - -The following example will raise exception the passed-in is less than allowed by `minimum` constraints of the field. `Errors` will be returned as well when the user tries to cast values which are not well formatted dates. - -```go -date, err := field.Cast("2014-05-29") -// uh oh, something went wrong -``` - -Values that can't be castd will return an `error`. -Casting a value that doesn't meet the constraints will return an `error`. - -Available types, formats and resultant value of the cast: - -| Type | Formats | Casting result | -| ---- | ------- | -------------- | -| any | default | interface{} | -| object | default | interface{} | -| array | default | []interface{} | -| boolean | default | bool | -| duration | default | time.Time | -| geopoint | default, array, object | [float64, float64] | -| integer | default | int64 | -| number | default | float64 | -| string | default, uri, email, binary | string | -| date | default, any, | time.Time | -| datetime | default, any, | time.Time | -| time | default, any, | time.Time | -| year | default | time.Time | -| yearmonth | default | time.Time | - -## Saving Tabular Data - -Once you're done processing the data, it is time to persist results. As an example, let us assume we have a remote table schema called `summary`, which contains two fields: - -* `Date`: of type [date](https://specs.frictionlessdata.io/table-schema/#date) -* `AverageAge`: of type [number](https://specs.frictionlessdata.io/table-schema/#number) - - -```go -import ( - "github.com/frictionlessdata/tableschema-go/csv" - "github.com/frictionlessdata/tableschema-go/schema" -) - - -type summaryEntry struct { - Date time.Time - AverageAge float64 -} - -func WriteSummary(summary []summaryEntry, path string) { - sch, _ := schema.LoadRemote("http://myfoobar/users/summary/schema.json") - - f, _ := os.Create(path) - defer f.Close() - - w := csv.NewWriter(f) - defer w.Flush() - - w.Write([]string{"Date", "AverageAge"}) - for _, summ := range summary{ - row, _ := sch.UncastRow(summ) - w.Write(row) - } -} -``` - -# API Reference and More Examples - -More detailed documentation about API methods and plenty of examples is available at [https://godoc.org/github.com/frictionlessdata/tableschema-go](https://godoc.org/github.com/frictionlessdata/tableschema-go) - -# Contributing - -Found a problem and would like to fix it? Have that great idea and would love to see it in the repository? - -> Please open an issue before start working - -That could save a lot of time from everyone and we are super happy to answer questions and help you alonge the way. Furthermore, feel free to join [frictionlessdata Gitter chat room](https://gitter.im/frictionlessdata/chat) and ask questions. - -This project follows the [Open Knowledge International coding standards](https://github.com/okfn/coding-standards) - -* Before start coding: - * Fork and pull the latest version of the master branch - * Make sure you have go 1.8+ installed and you're using it - * Make sure you [dep](https://github.com/golang/dep) installed - -* Before sending the PR: - -```sh -$ cd $GOPATH/src/github.com/frictionlessdata/tableschema-go -$ dep ensure -$ go test ./.. -``` - -And make sure your all tests pass. diff --git a/vendor/github.com/frictionlessdata/tableschema-go/csv/iterator_test.go b/vendor/github.com/frictionlessdata/tableschema-go/csv/iterator_test.go deleted file mode 100644 index 9eb352a..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/csv/iterator_test.go +++ /dev/null @@ -1,55 +0,0 @@ -package csv - -import ( - "testing" - - "github.com/matryer/is" -) - -type iterTestValue struct { - Name string -} - -const ( - dontSkipHeaders = false - skipHeaders = true -) - -func TestNewIterator(t *testing.T) { - t.Run("EmptyString", func(t *testing.T) { - is := is.New(t) - iter := newIterator(stringReadCloser(""), defaultDialect, dontSkipHeaders) - is.True(!iter.Next()) // more iterations than it should - is.NoErr(iter.Err()) - }) -} - -func TestIterator_Next(t *testing.T) { - t.Run("TwoRows", func(t *testing.T) { - is := is.New(t) - iter := newIterator(stringReadCloser("foo\nbar"), defaultDialect, dontSkipHeaders) - is.True(iter.Next()) // want two more iterations - is.True(iter.Next()) // want one more interation - is.True(!iter.Next()) // more iterations than it should - is.NoErr(iter.Err()) - }) - t.Run("TwoRowsSkipHeaders", func(t *testing.T) { - is := is.New(t) - iter := newIterator(stringReadCloser("name\nbar"), defaultDialect, skipHeaders) - is.True(iter.Next()) // want one interation - is.True(!iter.Next()) // more iterations than it should - is.NoErr(iter.Err()) - }) -} - -func TestIterator_Row(t *testing.T) { - t.Run("OneRow", func(t *testing.T) { - is := is.New(t) - iter := newIterator(stringReadCloser("name"), defaultDialect, dontSkipHeaders) - is.True(iter.Next()) // want one iteration - - got := iter.Row() - want := []string{"name"} - is.Equal(want, got) - }) -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/csv/table.go b/vendor/github.com/frictionlessdata/tableschema-go/csv/table.go deleted file mode 100644 index b7fe29f..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/csv/table.go +++ /dev/null @@ -1,262 +0,0 @@ -package csv - -import ( - "bytes" - "encoding/csv" - "fmt" - "io" - "io/ioutil" - "net/http" - "os" - "strings" - "sync" - "time" - - "github.com/frictionlessdata/tableschema-go/table" -) - -// Table represents a Table backed by a CSV physical representation. -type Table struct { - headers []string - source Source - skipHeaders bool - dialect dialect -} - -// dialect represents CSV dialect configuration options. -// http://frictionlessdata.io/specs/csv-dialect/ -type dialect struct { - // Delimiter specifies the character sequence which should separate fields (aka columns). - delimiter rune - // Specifies how to interpret whitespace which immediately follows a delimiter; - // if false, it means that whitespace immediately after a delimiter should be treated as part of the following field. - skipInitialSpace bool -} - -var defaultDialect = dialect{ - delimiter: ',', - skipInitialSpace: true, -} - -// NewTable creates a table.Table from the CSV table physical representation. -// CreationOpts are executed in the order they are declared. -// If a dialect is not configured via SetDialect, DefautltDialect is used. -func NewTable(source Source, opts ...CreationOpts) (*Table, error) { - t := Table{source: source, dialect: defaultDialect} - for _, opt := range opts { - if err := opt(&t); err != nil { - return nil, err - } - } - return &t, nil -} - -// Iter returns an Iterator to read the table. Iter returns an error -// if the table physical source can not be iterated. -// The iteration process always start at the beginning of the CSV and -// is backed by a new reading. -func (table *Table) Iter() (table.Iterator, error) { - src, err := table.source() - if err != nil { - return nil, err - } - return newIterator(src, table.dialect, table.skipHeaders), nil -} - -// ReadAll reads all rows from the table and return it as strings. -func (table *Table) ReadAll() ([][]string, error) { - var r [][]string - iter, err := table.Iter() - if err != nil { - return nil, err - } - defer iter.Close() - for iter.Next() { - r = append(r, iter.Row()) - } - return r, nil -} - -// Headers returns the headers of the tabular data. -func (table *Table) Headers() []string { - return table.headers -} - -// String returns a string version of the table. -func (table *Table) String() string { - var buf bytes.Buffer - w := csv.NewWriter(&buf) - rows, err := table.ReadAll() - if err != nil { - return "" - } - w.WriteAll(rows) - return buf.String() -} - -func newIterator(source io.ReadCloser, dialect dialect, skipHeaders bool) *csvIterator { - r := csv.NewReader(source) - r.Comma = dialect.delimiter - r.TrimLeadingSpace = dialect.skipInitialSpace - return &csvIterator{ - source: source, - reader: r, - skipHeaders: skipHeaders, - } -} - -type csvIterator struct { - reader *csv.Reader - source io.ReadCloser - - current []string - err error - skipHeaders bool -} - -func (i *csvIterator) Next() bool { - if i.err != nil { - return false - } - var err error - i.current, err = i.reader.Read() - if err != io.EOF { - i.err = err - } - if i.skipHeaders { - i.skipHeaders = false - i.Next() - } - return err == nil -} - -func (i *csvIterator) Row() []string { - return i.current -} - -func (i *csvIterator) Err() error { - return i.err -} - -func (i *csvIterator) Close() error { - return i.source.Close() -} - -// CreationOpts defines functional options for creating Tables. -type CreationOpts func(t *Table) error - -// Source defines a table physical data source. -type Source func() (io.ReadCloser, error) - -// FromFile defines a file-based Source. -func FromFile(path string) Source { - return func() (io.ReadCloser, error) { - f, err := os.Open(path) - if err != nil { - return nil, err - } - return f, nil - } -} - -var ( - httpClient *http.Client - once sync.Once -) - -const remoteFetchTimeoutSecs = 15 - -// Remote fetches the source schema from a remote URL. -func Remote(url string) Source { - return func() (io.ReadCloser, error) { - once.Do(func() { - httpClient = &http.Client{ - Timeout: remoteFetchTimeoutSecs * time.Second, - } - }) - resp, err := httpClient.Get(url) - if err != nil { - return nil, err - } - defer resp.Body.Close() - body, err := ioutil.ReadAll(resp.Body) - return stringReadCloser(string(body)), nil - } -} - -// FromString defines a string-based source. -func FromString(str string) Source { - return func() (io.ReadCloser, error) { - return stringReadCloser(str), nil - } -} - -func stringReadCloser(s string) io.ReadCloser { - return ioutil.NopCloser(strings.NewReader(s)) -} - -func errorSource() Source { - return func() (io.ReadCloser, error) { - return nil, fmt.Errorf("error source") - } -} - -// LoadHeaders uses the first line of the CSV as table headers. -// The header line will be skipped during iteration -func LoadHeaders() CreationOpts { - return func(reader *Table) error { - reader.skipHeaders = false - iter, err := reader.Iter() - if err != nil { - return err - } - if iter.Next() { - reader.headers = iter.Row() - } - reader.skipHeaders = true - return nil - } -} - -// SetHeaders sets the table headers. -func SetHeaders(headers ...string) CreationOpts { - return func(reader *Table) error { - reader.headers = headers - return nil - } -} - -// Delimiter specifies the character sequence which should separate fields (aka columns). -func Delimiter(d rune) CreationOpts { - return func(t *Table) error { - t.dialect.delimiter = d - return nil - } -} - -// ConsiderInitialSpace configures the CSV parser to treat the whitespace immediately after a delimiter as part of the following field. -func ConsiderInitialSpace() CreationOpts { - return func(t *Table) error { - t.dialect.skipInitialSpace = false - return nil - } -} - -func errorOpts(headers ...string) CreationOpts { - return func(_ *Table) error { - return fmt.Errorf("error opts") - } -} - -// NewWriter creates a writer which appends records to a CSV raw file. -// -// As returned by NewWriter, a csv.Writer writes records terminated by a -// newline and uses ',' as the field delimiter. The exported fields can be -// changed to customize the details before the first call to Write or WriteAll. -// -// Comma is the field delimiter. -// -// If UseCRLF is true, the csv.Writer ends each record with \r\n instead of \n. -func NewWriter(w io.Writer) *csv.Writer { - return csv.NewWriter(w) -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/csv/table_test.go b/vendor/github.com/frictionlessdata/tableschema-go/csv/table_test.go deleted file mode 100644 index 309e303..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/csv/table_test.go +++ /dev/null @@ -1,135 +0,0 @@ -package csv - -import ( - "bytes" - "fmt" - "net/http" - "net/http/httptest" - "testing" - - "github.com/matryer/is" -) - -type csvRow struct { - Name string -} - -func ExampleTable_Iter() { - table, _ := NewTable(FromString("\"name\"\nfoo\nbar"), LoadHeaders()) - iter, _ := table.Iter() - defer iter.Close() - for iter.Next() { - fmt.Println(iter.Row()) - } - // Output:[foo] - // [bar] -} - -func ExampleTable_ReadAll() { - table, _ := NewTable(FromString("\"name\"\nfoo\nbar"), LoadHeaders()) - rows, _ := table.ReadAll() - fmt.Print(rows) - // Output:[[foo] [bar]] -} - -func ExampleNewWriter() { - var buf bytes.Buffer - w := NewWriter(&buf) - w.Write([]string{"foo", "bar"}) - w.Flush() - fmt.Println(buf.String()) - // Output:foo,bar -} - -func TestRemote(t *testing.T) { - is := is.New(t) - h := func(w http.ResponseWriter, r *http.Request) { - fmt.Fprintf(w, "\"name\"\nfoo\nbar") - } - ts := httptest.NewServer(http.HandlerFunc(h)) - defer ts.Close() - table, _ := NewTable(Remote(ts.URL), LoadHeaders()) - got, _ := table.ReadAll() - want := [][]string{{"foo"}, {"bar"}} - is.Equal(want, got) - - t.Run("Error", func(t *testing.T) { - is := is.New(t) - _, err := NewTable(Remote("invalidURL"), LoadHeaders()) - is.True(err != nil) - }) -} - -func TestLoadHeaders(t *testing.T) { - t.Run("EmptyString", func(t *testing.T) { - is := is.New(t) - table, err := NewTable(FromString(""), LoadHeaders()) - is.NoErr(err) - is.Equal(len(table.Headers()), 0) - }) - t.Run("SimpleCase", func(t *testing.T) { - is := is.New(t) - in := `"name" -"bar"` - table, err := NewTable(FromString(in), LoadHeaders()) - is.NoErr(err) - - want := []string{"name"} - is.Equal(want, table.Headers()) - - iter, _ := table.Iter() - iter.Next() - want = []string{"bar"} - is.Equal(want, iter.Row()) - is.True(!iter.Next()) - }) -} - -func TestNewTable(t *testing.T) { - t.Run("ErrorOpts", func(t *testing.T) { - is := is.New(t) - table, err := NewTable(FromString(""), errorOpts()) - is.True(table == nil) - is.True(err != nil) - }) - t.Run("ErrorSource", func(t *testing.T) { - is := is.New(t) - _, err := NewTable(errorSource(), LoadHeaders()) - is.True(err != nil) - }) -} - -func TestSetHeaders(t *testing.T) { - is := is.New(t) - in := "Foo" - table, err := NewTable(FromString(in), SetHeaders("name")) - is.NoErr(err) - want := []string{"name"} - is.Equal(want, table.Headers()) - - iter, _ := table.Iter() - iter.Next() - want = []string{"Foo"} - is.Equal(want, iter.Row()) - is.True(!iter.Next()) -} - -func TestDelimiter(t *testing.T) { - is := is.New(t) - in := "Foo;Bar" - table, err := NewTable(FromString(in), Delimiter(';')) - is.NoErr(err) - contents, err := table.ReadAll() - is.NoErr(err) - is.Equal(contents, [][]string{{"Foo", "Bar"}}) -} - -func TestConsiderInitialSpace(t *testing.T) { - is := is.New(t) - in := " Foo" - table, err := NewTable(FromString(in), ConsiderInitialSpace()) - is.NoErr(err) - contents, err := table.ReadAll() - is.NoErr(err) - is.Equal(contents, [][]string{{" Foo"}}) -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/examples/infer/data_infer_utf8.csv b/vendor/github.com/frictionlessdata/tableschema-go/examples/infer/data_infer_utf8.csv deleted file mode 100644 index 385002e..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/examples/infer/data_infer_utf8.csv +++ /dev/null @@ -1,5 +0,0 @@ -1,39,Paul -2,23,Jimmy -3,36,Jane -4,28,Judy -5,37,Iñtërnâtiônàlizætiøn \ No newline at end of file diff --git a/vendor/github.com/frictionlessdata/tableschema-go/examples/infer/main.go b/vendor/github.com/frictionlessdata/tableschema-go/examples/infer/main.go deleted file mode 100644 index 7a3049a..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/examples/infer/main.go +++ /dev/null @@ -1,34 +0,0 @@ -package main - -import ( - "fmt" - - "github.com/frictionlessdata/tableschema-go/csv" - "github.com/frictionlessdata/tableschema-go/schema" -) - -type user struct { - ID int - Age int - Name string -} - -func main() { - tab, err := csv.NewTable(csv.FromFile("data_infer_utf8.csv"), csv.SetHeaders("ID", "Age", "Name")) - if err != nil { - panic(err) - } - fmt.Println("## Raw Table ##") - fmt.Println(tab) - sch, err := schema.Infer(tab) - if err != nil { - panic(err) - } - - fmt.Println("## Schema ##") - fmt.Println(sch) - var users []user - sch.CastTable(tab, &users) - - fmt.Printf("\n## Cast Table ##\n%+v\n", users) -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/examples/validate/capital.csv b/vendor/github.com/frictionlessdata/tableschema-go/examples/validate/capital.csv deleted file mode 100644 index b45c39d..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/examples/validate/capital.csv +++ /dev/null @@ -1,5 +0,0 @@ -id,capital,url -1,39.00,http://www.test.com -2,23.00,http://www.test.de -3,36.00,http://www.test.uk -4,28.00,http://www.test.co.il \ No newline at end of file diff --git a/vendor/github.com/frictionlessdata/tableschema-go/examples/validate/main.go b/vendor/github.com/frictionlessdata/tableschema-go/examples/validate/main.go deleted file mode 100644 index f5049c6..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/examples/validate/main.go +++ /dev/null @@ -1,60 +0,0 @@ -package main - -import ( - "log" - - "github.com/frictionlessdata/tableschema-go/csv" - "github.com/frictionlessdata/tableschema-go/schema" -) - -// Example of how to read, validate and change a schema. -func main() { - // Reading schem. - capitalSchema, err := schema.LoadFromFile("schema.json") - if err != nil { - log.Fatal(err) - } - // Validate schema. - if err := capitalSchema.Validate(); err != nil { - log.Fatal(err) - } - - // Printing schema fields names. - log.Println("Fields:") - for i, f := range capitalSchema.Fields { - log.Printf("%d - %s\n", i, f.Name) - } - - // Working with schema fields. - if capitalSchema.HasField("Capital") { - log.Println("Field capital exists in schema") - } else { - log.Fatalf("Schema must have the field capital") - } - field, _ := capitalSchema.GetField("URL") - if field.TestString("http://new.url.com") { - value, err := field.Cast("http://new.url.com") - log.Printf("URL unmarshal to value: %v\n", value) - if err != nil { - log.Fatalf("Error casting value: %q", err) - } - } else { - log.Fatalf("Value http://new.url.com must fit in field capital.") - } - - // Dealing with tabular data associated with the schema. - table, err := csv.NewTable(csv.FromFile("capital.csv"), csv.LoadHeaders()) - capitalRow := struct { - ID int - Capital float64 - URL string - }{} - - iter, _ := table.Iter() - for iter.Next() { - if err := capitalSchema.CastRow(iter.Row(), &capitalRow); err != nil { - log.Fatalf("Couldn't unmarshal row:%v err:%q", iter.Row(), err) - } - log.Printf("Cast Row: %+v\n", capitalRow) - } -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/examples/validate/schema.json b/vendor/github.com/frictionlessdata/tableschema-go/examples/validate/schema.json deleted file mode 100644 index 2dcfd2e..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/examples/validate/schema.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "fields": [ - { - "name": "ID", - "title": "", - "description": "", - "type": "integer", - "format": "default" - }, - { - "name": "Capital", - "title": "", - "description": "", - "type": "number" - }, - { - "name": "URL", - "title": "", - "description": "", - "type": "string" - } - ] -} \ No newline at end of file diff --git a/vendor/github.com/frictionlessdata/tableschema-go/schema/any.go b/vendor/github.com/frictionlessdata/tableschema-go/schema/any.go deleted file mode 100644 index 1872308..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/schema/any.go +++ /dev/null @@ -1,11 +0,0 @@ -package schema - -import "fmt" - -func castAny(value interface{}) (interface{}, error) { - return value, nil -} - -func uncastAny(value interface{}) (string, error) { - return fmt.Sprintf("%v", value), nil -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/schema/any_test.go b/vendor/github.com/frictionlessdata/tableschema-go/schema/any_test.go deleted file mode 100644 index bbc2a65..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/schema/any_test.go +++ /dev/null @@ -1,21 +0,0 @@ -package schema - -import ( - "testing" - - "github.com/matryer/is" -) - -func TestCastAny(t *testing.T) { - is := is.New(t) - got, err := castAny("foo") - is.NoErr(err) - is.Equal("foo", got) -} - -func TestUncastAny(t *testing.T) { - is := is.New(t) - got, err := uncastAny(10) - is.NoErr(err) - is.Equal("10", got) -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/schema/array.go b/vendor/github.com/frictionlessdata/tableschema-go/schema/array.go deleted file mode 100644 index d01bfd8..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/schema/array.go +++ /dev/null @@ -1,18 +0,0 @@ -package schema - -import ( - "encoding/json" - "fmt" -) - -func castArray(value string) (interface{}, error) { - var obj interface{} - if err := json.Unmarshal([]byte(value), &obj); err != nil { - return nil, err - } - arr, ok := obj.([]interface{}) - if !ok { - return nil, fmt.Errorf("%s is not an JSON array", value) - } - return arr, nil -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/schema/boolean.go b/vendor/github.com/frictionlessdata/tableschema-go/schema/boolean.go deleted file mode 100644 index cc19c9b..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/schema/boolean.go +++ /dev/null @@ -1,39 +0,0 @@ -package schema - -import ( - "fmt" - "reflect" -) - -func castBoolean(value string, trueValues, falseValues []string) (bool, error) { - for _, v := range trueValues { - if value == v { - return true, nil - } - } - for _, v := range falseValues { - if value == v { - return false, nil - } - } - return false, fmt.Errorf("invalid boolean value:%s", value) -} - -func uncastBoolean(value interface{}, trueValues, falseValues []string) (string, error) { - switch value.(type) { - case bool: - return fmt.Sprintf("%v", value), nil - case string: - for _, v := range trueValues { - if value == v { - return value.(string), nil - } - } - for _, v := range falseValues { - if value == v { - return value.(string), nil - } - } - } - return "", fmt.Errorf("invalid boolean - value:\"%v\" type:%v", value, reflect.ValueOf(value).Type()) -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/schema/boolean_test.go b/vendor/github.com/frictionlessdata/tableschema-go/schema/boolean_test.go deleted file mode 100644 index deeced2..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/schema/boolean_test.go +++ /dev/null @@ -1,78 +0,0 @@ -package schema - -import ( - "testing" - - "github.com/matryer/is" -) - -func TestCastBoolean(t *testing.T) { - data := []struct { - Desc string - TrueValues []string - FalseValues []string - Value string - Expected bool - }{ - {"simple true value", []string{"1"}, []string{"0"}, "1", true}, - {"simple false value", []string{"1"}, []string{"0"}, "0", false}, - {"duplicate value, true wins", []string{"1"}, []string{"1"}, "1", true}, - } - for _, d := range data { - t.Run(d.Desc, func(t *testing.T) { - is := is.New(t) - b, err := castBoolean(d.Value, d.TrueValues, d.FalseValues) - is.NoErr(err) - is.Equal(b, d.Expected) - }) - } -} - -func TestCastBoolean_Error(t *testing.T) { - is := is.New(t) - _, err := castBoolean("foo", defaultTrueValues, defaultFalseValues) - is.True(err != nil) -} - -func TestUncastBoolean(t *testing.T) { - t.Run("Success", func(t *testing.T) { - data := []struct { - desc string - value interface{} - want string - trueValues []string - falseValues []string - }{ - {"True", true, "true", []string{}, []string{}}, - {"False", false, "false", []string{}, []string{}}, - {"TrueFromTrueValues", "0", "0", []string{"0"}, []string{}}, - {"FalseFromFalseValues", "1", "1", []string{}, []string{"1"}}, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - got, err := uncastBoolean(d.value, d.trueValues, d.falseValues) - is.NoErr(err) - is.Equal(d.want, got) - }) - } - }) - t.Run("Error", func(t *testing.T) { - data := []struct { - desc string - value interface{} - trueValues []string - falseValues []string - }{ - {"InvalidType", 10, []string{}, []string{}}, - {"NotInTrueOrFalseValues", "1", []string{}, []string{}}, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - _, err := uncastBoolean(d.value, d.trueValues, d.falseValues) - is.True(err != nil) - }) - } - }) -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/schema/date.go b/vendor/github.com/frictionlessdata/tableschema-go/schema/date.go deleted file mode 100644 index 7aed3e6..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/schema/date.go +++ /dev/null @@ -1,28 +0,0 @@ -package schema - -import "time" - -func castDate(format, value string, c Constraints) (time.Time, error) { - y, err := castDateWithoutChecks(format, value) - if err != nil { - return y, err - } - var max, min time.Time - if c.Maximum != "" { - max, err = castDateWithoutChecks(format, c.Maximum) - if err != nil { - return max, err - } - } - if c.Minimum != "" { - min, err = castDateWithoutChecks(format, c.Minimum) - if err != nil { - return min, err - } - } - return checkConstraints(y, max, min, DateType) -} - -func castDateWithoutChecks(format, value string) (time.Time, error) { - return castDefaultOrCustomTime("2006-01-02", format, value) -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/schema/date_test.go b/vendor/github.com/frictionlessdata/tableschema-go/schema/date_test.go deleted file mode 100644 index 1fd4a30..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/schema/date_test.go +++ /dev/null @@ -1,40 +0,0 @@ -package schema - -import ( - "testing" - - "github.com/matryer/is" -) - -func TestCastDate(t *testing.T) { - t.Run("ValidMaximum", func(t *testing.T) { - is := is.New(t) - _, err := castDate("2006-01-02", "2006-01-02", Constraints{Maximum: "2007-01-02"}) - is.NoErr(err) - }) - t.Run("ValidMinimum", func(t *testing.T) { - is := is.New(t) - _, err := castDate("2006-01-02", "2007-01-02", Constraints{Minimum: "2006-01-02"}) - is.NoErr(err) - }) - t.Run("Error", func(t *testing.T) { - data := []struct { - desc string - date string - constraints Constraints - }{ - {"InvalidDate", "foo", Constraints{}}, - {"DateBiggerThanMaximum", "2006-01-02", Constraints{Maximum: "2005-01-02"}}, - {"InvalidMaximum", "2006-01-02", Constraints{Maximum: "boo"}}, - {"DateSmallerThanMinimum", "2005-01-02", Constraints{Minimum: "2006-01-02"}}, - {"InvalidMinimum", "2006-01-02", Constraints{Minimum: "boo"}}, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - _, err := castDate("2006-01-02", d.date, d.constraints) - is.True(err != nil) - }) - } - }) -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/schema/datetime.go b/vendor/github.com/frictionlessdata/tableschema-go/schema/datetime.go deleted file mode 100644 index d9c49e9..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/schema/datetime.go +++ /dev/null @@ -1,139 +0,0 @@ -package schema - -import ( - "fmt" - "strings" - "time" -) - -// Go has a different date formatting style. Converting to the one -// used in https://specs.frictionlessdata.io/table-schema/#date -// https://docs.python.org/2/library/datetime.html#strftime-strptime-behavior -var strftimeToGoConversionTable = map[string]string{ - "%d": "02", - "%-d": "2", - "%B": "January", - "%b": "Jan", - "%h": "Jan", - "%m": "01", - "%_m": " 1", - "%-m": "1", - "%Y": "2006", - "%y": "06", - "%H": "15", - "%I": "03", - "%M": "04", - "%S": "05", - "%f": "999999", - "%z": "Z0700", - "%:z": "Z07:00", - "%Z": "MST", - "%p": "PM", -} - -func castYearMonth(value string, c Constraints) (time.Time, error) { - y, err := castYearMonthWithoutChecks(value) - if err != nil { - return y, err - } - var max, min time.Time - if c.Maximum != "" { - max, err = castYearMonthWithoutChecks(c.Maximum) - if err != nil { - return y, err - } - } - if c.Minimum != "" { - min, err = castYearMonthWithoutChecks(c.Minimum) - if err != nil { - return y, err - } - } - return checkConstraints(y, max, min, YearMonthType) -} - -func castYearMonthWithoutChecks(value string) (time.Time, error) { - return time.Parse("2006-01", value) -} - -func castYearWithoutChecks(value string) (time.Time, error) { - return time.Parse("2006", value) -} - -func castYear(value string, c Constraints) (time.Time, error) { - y, err := castYearWithoutChecks(value) - if err != nil { - return y, err - } - var max, min time.Time - if c.Maximum != "" { - max, err = castYearWithoutChecks(c.Maximum) - if err != nil { - return y, err - } - } - if c.Minimum != "" { - min, err = castYearWithoutChecks(c.Minimum) - if err != nil { - return y, err - } - } - return checkConstraints(y, max, min, YearType) -} - -func castDateTime(value string, c Constraints) (time.Time, error) { - dt, err := castDateTimeWithoutChecks(value) - if err != nil { - return dt, err - } - var max, min time.Time - if c.Maximum != "" { - max, err = castDateTimeWithoutChecks(c.Maximum) - if err != nil { - return dt, err - } - } - if c.Minimum != "" { - min, err = castDateTimeWithoutChecks(c.Minimum) - if err != nil { - return dt, err - } - } - return checkConstraints(dt, max, min, DateTimeType) -} - -func castDateTimeWithoutChecks(value string) (time.Time, error) { - return time.Parse(time.RFC3339, value) -} - -func checkConstraints(v, max, min time.Time, t string) (time.Time, error) { - if !max.IsZero() && v.After(max) { - return v, fmt.Errorf("constraint check error: %s:%v > maximum:%v", t, v, max) - } - if !min.IsZero() && v.Before(min) { - return v, fmt.Errorf("constraint check error: %s:%v < minimum:%v", t, v, min) - } - return v, nil -} - -func castDefaultOrCustomTime(defaultFormat, format, value string) (time.Time, error) { - switch format { - case "", defaultFieldFormat: - t, err := time.Parse(defaultFormat, value) - if err != nil { - return t, err - } - return t.In(time.UTC), nil - case AnyDateFormat: - return time.Unix(0, 0), fmt.Errorf("any date format not yet supported. Please file an issue at github.com/frictionlessdata/tableschema-go") - } - goFormat := format - for f, s := range strftimeToGoConversionTable { - goFormat = strings.Replace(goFormat, f, s, -1) - } - t, err := time.Parse(goFormat, value) - if err != nil { - return t, err - } - return t.In(time.UTC), nil -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/schema/datetime_test.go b/vendor/github.com/frictionlessdata/tableschema-go/schema/datetime_test.go deleted file mode 100644 index 8d4e714..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/schema/datetime_test.go +++ /dev/null @@ -1,126 +0,0 @@ -package schema - -import ( - "testing" - - "github.com/matryer/is" -) - -func TestCastDatetime(t *testing.T) { - t.Run("ValidMaximum", func(t *testing.T) { - is := is.New(t) - _, err := castDateTime("2013-01-24T22:01:00+07:00", Constraints{Maximum: "2014-01-24T22:01:00Z"}) - is.NoErr(err) - }) - t.Run("ValidMinimum", func(t *testing.T) { - is := is.New(t) - _, err := castDateTime("2013-01-24T22:01:00Z", Constraints{Minimum: "2012-01-24T22:01:00Z"}) - is.NoErr(err) - }) - t.Run("Error", func(t *testing.T) { - data := []struct { - desc string - datetime string - constraints Constraints - }{ - { - "InvalidDateTime", - "foo", - Constraints{}, - }, - { - "DateTimeBiggerThanMaximum", - "2013-01-24T22:01:00Z", - Constraints{Maximum: "2013-01-24T01:01:00Z"}, - }, - { - "InvalidMaximum", - "2013-01-24T22:01:00Z", - Constraints{Maximum: "boo"}, - }, - { - "DateTimeSmallerThanMinimum", - "2013-01-24T22:01:00Z", - Constraints{Minimum: "2013-01-24T22:01:01Z"}, - }, - { - "InvalidMinimum", - "2013-01-24T22:01:00Z", - Constraints{Minimum: "boo"}, - }, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - _, err := castDateTime(d.datetime, d.constraints) - is.True(err != nil) - }) - } - }) -} - -func TestCastYear(t *testing.T) { - t.Run("ValidMaximum", func(t *testing.T) { - is := is.New(t) - _, err := castYear("2006", Constraints{Maximum: "2007"}) - is.NoErr(err) - }) - t.Run("ValidMinimum", func(t *testing.T) { - is := is.New(t) - _, err := castYear("2007", Constraints{Minimum: "2006"}) - is.NoErr(err) - }) - t.Run("Error", func(t *testing.T) { - data := []struct { - desc string - year string - constraints Constraints - }{ - {"InvalidYear", "foo", Constraints{}}, - {"YearBiggerThanMaximum", "2006", Constraints{Maximum: "2005"}}, - {"InvalidMaximum", "2005", Constraints{Maximum: "boo"}}, - {"YearSmallerThanMinimum", "2005", Constraints{Minimum: "2006"}}, - {"InvalidMinimum", "2005", Constraints{Minimum: "boo"}}, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - _, err := castYear(d.year, d.constraints) - is.True(err != nil) - }) - } - }) -} - -func TestCastYearMonth(t *testing.T) { - t.Run("ValidMaximum", func(t *testing.T) { - is := is.New(t) - _, err := castYearMonth("2006-02", Constraints{Maximum: "2006-03"}) - is.NoErr(err) - }) - t.Run("ValidMinimum", func(t *testing.T) { - is := is.New(t) - _, err := castYearMonth("2006-03", Constraints{Minimum: "2006-02"}) - is.NoErr(err) - }) - t.Run("Error", func(t *testing.T) { - data := []struct { - desc string - year string - constraints Constraints - }{ - {"InvalidYear", "foo", Constraints{}}, - {"YearBiggerThanMaximum", "2006-02", Constraints{Maximum: "2006-01"}}, - {"InvalidMaximum", "2005-02", Constraints{Maximum: "boo"}}, - {"YearSmallerThanMinimum", "2006-02", Constraints{Minimum: "2006-03"}}, - {"InvalidMinimum", "2005-02", Constraints{Minimum: "boo"}}, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - _, err := castYearMonth(d.year, d.constraints) - is.True(err != nil) - }) - } - }) -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/schema/duration.go b/vendor/github.com/frictionlessdata/tableschema-go/schema/duration.go deleted file mode 100644 index be11f59..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/schema/duration.go +++ /dev/null @@ -1,65 +0,0 @@ -package schema - -import ( - "fmt" - "reflect" - "regexp" - "strconv" - "strings" - "time" -) - -var durationRegexp = regexp.MustCompile( - `P(?P\d+Y)?(?P\d+M)?(?P\d+D)?T?(?P\d+H)?(?P\d+M)?(?P\d+\.?\d*S)?`) - -const ( - hoursInYear = time.Duration(24*360) * time.Hour - hoursInMonth = time.Duration(24*30) * time.Hour - hoursInDay = time.Duration(24) * time.Hour -) - -func castDuration(value string) (time.Duration, error) { - matches := durationRegexp.FindStringSubmatch(value) - if len(matches) == 0 { - return 0, fmt.Errorf("Invalid duration:\"%s\"", value) - } - years := parseIntDuration(matches[1], hoursInYear) - months := parseIntDuration(matches[2], hoursInMonth) - days := parseIntDuration(matches[3], hoursInDay) - hours := parseIntDuration(matches[4], time.Hour) - minutes := parseIntDuration(matches[5], time.Minute) - seconds := parseSeconds(matches[6]) - return years + months + days + hours + minutes + seconds, nil -} - -func parseIntDuration(v string, multiplier time.Duration) time.Duration { - if len(v) == 0 { - return 0 - } - // Ignoring error here because only digits could come from the regular expression. - d, _ := strconv.Atoi(v[0 : len(v)-1]) - return time.Duration(d) * multiplier -} - -func parseSeconds(v string) time.Duration { - if len(v) == 0 { - return 0 - } - // Ignoring error here because only valid arbitrary precision floats could come from the regular expression. - d, _ := strconv.ParseFloat(v[0:len(v)-1], 64) - return time.Duration(d * 10e8) -} - -func uncastDuration(in interface{}) (string, error) { - v, ok := in.(time.Duration) - if !ok { - return "", fmt.Errorf("invalid duration - value:%v type:%v", in, reflect.ValueOf(in).Type()) - } - y := v / hoursInYear - r := v % hoursInYear - m := r / hoursInMonth - r = r % hoursInMonth - d := r / hoursInDay - r = r % hoursInDay - return strings.ToUpper(fmt.Sprintf("P%dY%dM%dDT%s", y, m, d, r.String())), nil -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/schema/duration_test.go b/vendor/github.com/frictionlessdata/tableschema-go/schema/duration_test.go deleted file mode 100644 index 2d53b9b..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/schema/duration_test.go +++ /dev/null @@ -1,85 +0,0 @@ -package schema - -import ( - "testing" - "time" - - "github.com/matryer/is" -) - -func TestCastDuration_Success(t *testing.T) { - data := []struct { - desc string - value string - want time.Duration - }{ - {"OnlyP", "P", time.Duration(0)}, - {"OnlyHour", "P2H", time.Duration(2) * time.Hour}, - {"SecondsWithDecimal", "P22.519S", 22519 * time.Millisecond}, - {"HourDefaultZero", "PH", time.Duration(0) * time.Hour}, - {"OnlyPeriod", "P3Y6M4D", 3*hoursInYear + 6*hoursInMonth + 4*hoursInDay}, - {"OnlyTime", "PT12H30M5S", 12*time.Hour + 30*time.Minute + 5*time.Second}, - {"Complex", "P3Y6M4DT12H30M5S", 3*hoursInYear + 6*hoursInMonth + 4*hoursInDay + 12*time.Hour + 30*time.Minute + 5*time.Second}, - {"2Years", "P2Y", (2 * 360 * 24) * time.Hour}, - {"StringFieldsAreIgnored", "PfooHdddS", time.Duration(0)}, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - got, err := castDuration(d.value) - is.NoErr(err) - is.Equal(got, d.want) - }) - } -} - -func TestCastDuration_Error(t *testing.T) { - data := []struct { - desc string - value string - }{ - {"WrongStartChar", "C2H"}, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - _, err := castDuration(d.value) - is.True(err != nil) - }) - } -} - -func TestUncastDuration(t *testing.T) { - t.Run("Success", func(t *testing.T) { - data := []struct { - desc string - value time.Duration - want string - }{ - {"1Year", 1*hoursInYear + 1*hoursInMonth + 1*hoursInDay + 1*time.Hour + 1*time.Minute + 500*time.Millisecond, "P1Y1M1DT1H1M0.5S"}, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - got, err := uncastDuration(d.value) - is.NoErr(err) - is.Equal(d.want, got) - }) - } - }) - t.Run("Error", func(t *testing.T) { - data := []struct { - desc string - value interface{} - }{ - {"InvalidType", 10}, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - _, err := uncastDuration(d.value) - is.True(err != nil) - }) - } - }) -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/schema/field.go b/vendor/github.com/frictionlessdata/tableschema-go/schema/field.go deleted file mode 100644 index 37257cb..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/schema/field.go +++ /dev/null @@ -1,268 +0,0 @@ -package schema - -import ( - "encoding/json" - "fmt" - "reflect" - "regexp" -) - -// Default for schema fields. -const ( - defaultFieldType = "string" - defaultFieldFormat = "default" -) - -// Default schema variables. -var ( - defaultTrueValues = []string{"yes", "y", "true", "t", "1"} - defaultFalseValues = []string{"no", "n", "false", "f", "0"} - defaultDecimalChar = "." - defaultGroupChar = "," - defaultBareNumber = true -) - -// Field types. -const ( - IntegerType = "integer" - StringType = "string" - BooleanType = "boolean" - NumberType = "number" - DateType = "date" - ObjectType = "object" - ArrayType = "array" - DateTimeType = "datetime" - TimeType = "time" - YearMonthType = "yearmonth" - YearType = "year" - DurationType = "duration" - GeoPointType = "geopoint" - AnyType = "any" -) - -// Formats. -const ( - AnyDateFormat = "any" -) - -// Constraints can be used by consumers to list constraints for validating -// field values. -type Constraints struct { - // Required indicates whether this field is allowed to be null. - // Schema.MissingValues define how the string representation can - // represent null values. - Required bool `json:"required,omitempty"` - - // Unique indicates whether this field is allowed to have duplicates. - // This constrain is only relevant for Schema.CastTable - Unique bool `json:"unique,omitempty"` - - Maximum string `json:"maximum,omitempty"` - Minimum string `json:"minimum,omitempty"` - MinLength int `json:"minLength,omitempty"` - MaxLength int `json:"maxLength,omitempty"` - Pattern string `json:"pattern,omitempty"` - compiledPattern *regexp.Regexp - - // Enum indicates that the value of the field must exactly match a value in the enum array. - // The values of the fields could need encoding, depending on the type. - // It applies to all field types. - Enum []interface{} `json:"enum,omitempty"` - // rawEnum keeps the raw version of the enum objects, to make validation faster and easier. - rawEnum map[string]struct{} -} - -// Field describes a single field in the table schema. -// More: https://specs.frictionlessdata.io/table-schema/#field-descriptors -type Field struct { - // Name of the field. It is mandatory and shuold correspond to the name of field/column in the data file (if it has a name). - Name string `json:"name"` - Type string `json:"type,omitempty"` - Format string `json:"format,omitempty"` - // A human readable label or title for the field. - Title string `json:"title,omitempty"` - // A description for this field e.g. "The recipient of the funds" - Description string `json:"description,omitempty"` - - // Boolean properties. Define set of the values that represent true and false, respectively. - // https://specs.frictionlessdata.io/table-schema/#boolean - TrueValues []string `json:"trueValues,omitempty"` - FalseValues []string `json:"falseValues,omitempty"` - - // Number/Integer properties. - - // A string whose value is used to represent a decimal point within the number. The default value is ".". - DecimalChar string `json:"decimalChar,omitempty"` - // A string whose value is used to group digits within the number. The default value is null. A common value is "," e.g. "100,000". - GroupChar string `json:"groupChar,omitempty"` - // If true the physical contents of this field must follow the formatting constraints already set out. - // If false the contents of this field may contain leading and/or trailing non-numeric characters which - // are going to be stripped. Default value is true: - BareNumber bool `json:"bareNumber,omitempty"` - - // MissingValues is a map which dictates which string values should be treated as null - // values. - MissingValues map[string]struct{} `json:"-"` - - // Constraints can be used by consumers to list constraints for validating - // field values. - Constraints Constraints -} - -// UnmarshalJSON sets *f to a copy of data. It will respect the default values -// described at: https://specs.frictionlessdata.io/table-schema/ -func (f *Field) UnmarshalJSON(data []byte) error { - // This is neded so it does not call UnmarshalJSON from recursively. - type fieldAlias Field - u := &fieldAlias{ - Type: defaultFieldType, - Format: defaultFieldFormat, - TrueValues: defaultTrueValues, - FalseValues: defaultFalseValues, - DecimalChar: defaultDecimalChar, - GroupChar: defaultGroupChar, - BareNumber: defaultBareNumber, - } - if err := json.Unmarshal(data, u); err != nil { - return err - } - *f = Field(*u) - // Transformation/Validation that should be done at creation time. - if f.Constraints.Pattern != "" { - p, err := regexp.Compile(f.Constraints.Pattern) - if err != nil { - return err - } - f.Constraints.compiledPattern = p - } - if f.Constraints.Enum != nil { - f.Constraints.rawEnum = make(map[string]struct{}) - for i := range f.Constraints.Enum { - e, err := f.Uncast(f.Constraints.Enum[i]) - if err != nil { - return err - } - f.Constraints.rawEnum[e] = struct{}{} - } - } - return nil -} - -// Cast casts the passed-in string against field type. Returns an error -// if the value can not be cast or any field constraint can not be satisfied. -func (f *Field) Cast(value string) (interface{}, error) { - if f.Constraints.Required { - _, ok := f.MissingValues[value] - if ok { - return nil, fmt.Errorf("%s is required", f.Name) - } - } - var castd interface{} - var err error - switch f.Type { - case IntegerType: - castd, err = castInt(f.BareNumber, value, f.Constraints) - case StringType: - castd, err = castString(f.Format, value, f.Constraints) - case BooleanType: - castd, err = castBoolean(value, f.TrueValues, f.FalseValues) - case NumberType: - castd, err = castNumber(f.DecimalChar, f.GroupChar, f.BareNumber, value, f.Constraints) - case DateType: - castd, err = castDate(f.Format, value, f.Constraints) - case ObjectType: - castd, err = castObject(value) - case ArrayType: - castd, err = castArray(value) - case TimeType: - castd, err = castTime(f.Format, value, f.Constraints) - case YearMonthType: - castd, err = castYearMonth(value, f.Constraints) - case YearType: - castd, err = castYear(value, f.Constraints) - case DateTimeType: - castd, err = castDateTime(value, f.Constraints) - case DurationType: - castd, err = castDuration(value) - case GeoPointType: - castd, err = castGeoPoint(f.Format, value) - case AnyType: - castd, err = castAny(value) - } - if err != nil { - return nil, err - } - if castd == nil { - return nil, fmt.Errorf("invalid field type: %s", f.Type) - } - if len(f.Constraints.rawEnum) > 0 { - rawValue, err := f.Uncast(castd) - if err != nil { - return nil, err - } - if _, ok := f.Constraints.rawEnum[rawValue]; !ok { - return nil, fmt.Errorf("castd value:%s does not match enum constraints:%v", rawValue, f.Constraints.rawEnum) - } - } - return castd, nil -} - -// Uncast uncasts the passed-in value into a string. It returns an error if the -// the type of the passed-in value can not be converted to field type. -func (f *Field) Uncast(in interface{}) (string, error) { - // This indirect avoids the need to custom-case pointer types. - inValue := reflect.Indirect(reflect.ValueOf(in)) - inInterface := inValue.Interface() - ok := false - switch f.Type { - case IntegerType: - var a int64 - ok = reflect.TypeOf(inInterface).ConvertibleTo(reflect.ValueOf(a).Type()) - if ok { - inInterface = inValue.Convert(reflect.ValueOf(a).Type()).Interface() - } - case NumberType: - var a float64 - ok = reflect.TypeOf(inInterface).ConvertibleTo(reflect.ValueOf(a).Type()) - if ok { - inInterface = inValue.Convert(reflect.ValueOf(a).Type()).Interface() - } - case BooleanType: - return uncastBoolean(in, f.TrueValues, f.FalseValues) - case DurationType: - return uncastDuration(inInterface) - case GeoPointType: - return uncastGeoPoint(f.Format, in) - case DateType, DateTimeType, TimeType, YearMonthType, YearType: - return uncastTime(inInterface) - case ObjectType: - return uncastObject(inInterface) - case StringType: - _, ok = inInterface.(string) - case ArrayType: - ok = reflect.TypeOf(inInterface).Kind() == reflect.Slice - case AnyType: - return uncastAny(in) - } - if !ok { - return "", fmt.Errorf("can not convert \"%d\" which type is %s to type %s", in, reflect.TypeOf(in), f.Type) - } - return fmt.Sprintf("%v", inInterface), nil -} - -// TestString checks whether the value can be unmarshalled to the field type. -func (f *Field) TestString(value string) bool { - _, err := f.Cast(value) - return err == nil -} - -// asReadField returns the field passed-in as parameter like it's been read as JSON. -// That include setting default values. -// Created for being used in tests. -// IMPORTANT: Not ready for being used in production due to possibly bad performance. -func asJSONField(f Field) Field { - var out Field - data, _ := json.Marshal(&f) - json.Unmarshal(data, &out) - return out -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/schema/field_test.go b/vendor/github.com/frictionlessdata/tableschema-go/schema/field_test.go deleted file mode 100644 index 83997b9..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/schema/field_test.go +++ /dev/null @@ -1,276 +0,0 @@ -package schema - -import ( - "encoding/json" - "fmt" - "testing" - "time" - - "github.com/matryer/is" -) - -func ExampleField_Cast() { - in := `{ - "name": "id", - "type": "string", - "format": "default", - "constraints": { - "required": true, - "minLen": "5", - "maxLen": "10", - "pattern": ".*11$", - "enum":["1234511"] - } - }` - var field Field - json.Unmarshal([]byte(in), &field) - v, err := field.Cast("1234511") - if err != nil { - panic(err) - } - fmt.Println(v) - // Output: 1234511 -} - -func TestDefaultValues(t *testing.T) { - data := []struct { - Desc string - JSON string - Field Field - }{ - { - "Default Values", - `{"name":"n1"}`, - Field{Name: "n1", Type: defaultFieldType, Format: defaultFieldFormat, TrueValues: defaultTrueValues, FalseValues: defaultFalseValues, - DecimalChar: defaultDecimalChar, GroupChar: defaultGroupChar, BareNumber: defaultBareNumber}, - }, - { - "Overrinding default values", - `{"name":"n2","type":"t2","format":"f2","falseValues":["f2"],"trueValues":["t2"]}`, - Field{Name: "n2", Type: "t2", Format: "f2", TrueValues: []string{"t2"}, FalseValues: []string{"f2"}, - DecimalChar: defaultDecimalChar, GroupChar: defaultGroupChar, BareNumber: defaultBareNumber}, - }, - } - for _, d := range data { - t.Run(d.Desc, func(t *testing.T) { - is := is.New(t) - var f Field - is.NoErr(json.Unmarshal([]byte(d.JSON), &f)) - is.Equal(f, d.Field) - }) - } -} - -func TestField_Cast(t *testing.T) { - data := []struct { - Desc string - Value string - Field Field - Expected interface{} - }{ - {"Integer", "42", Field{Type: IntegerType}, int64(42)}, - {"String_URI", "http:/frictionlessdata.io", Field{Type: StringType, Format: "uri"}, "http:/frictionlessdata.io"}, - {"Boolean_TrueValues", "1", Field{Type: BooleanType, TrueValues: []string{"1"}}, true}, - {"Boolean_FalseValues", "0", Field{Type: BooleanType, FalseValues: []string{"0"}}, false}, - {"Number", "42.5", Field{Type: NumberType}, 42.5}, - {"Date_NoFormat", "2015-10-15", Field{Type: DateType}, time.Date(2015, time.October, 15, 0, 0, 0, 0, time.UTC)}, - {"Date_DefaultFormat", "2015-10-15", Field{Type: DateType, Format: defaultFieldFormat}, time.Date(2015, time.October, 15, 0, 0, 0, 0, time.UTC)}, - {"Date_CustomFormat", "15/10/2015", Field{Type: DateType, Format: "%d/%m/%Y"}, time.Date(2015, time.October, 15, 0, 0, 0, 0, time.UTC)}, - {"Time_NoFormat", "10:10:10", Field{Type: TimeType}, time.Date(0000, time.January, 01, 10, 10, 10, 00, time.UTC)}, - {"Time_DefaultFormat", "10:10:10", Field{Type: TimeType, Format: defaultFieldFormat}, time.Date(0000, time.January, 01, 10, 10, 10, 00, time.UTC)}, - {"Time_CustomFormat", "10-10-10", Field{Type: TimeType, Format: "%H-%M-%S"}, time.Date(0000, time.January, 01, 10, 10, 10, 00, time.UTC)}, - {"YearMonth", "2017-08", Field{Type: YearMonthType}, time.Date(2017, time.August, 01, 00, 00, 00, 00, time.UTC)}, - {"Year", "2017", Field{Type: YearType}, time.Date(2017, time.January, 01, 00, 00, 00, 00, time.UTC)}, - {"DateTime_NoFormat", "2008-09-15T10:53:00Z", Field{Type: DateTimeType}, time.Date(2008, time.September, 15, 10, 53, 00, 00, time.UTC)}, - {"DateTime_DefaultFormat", "2008-09-15T10:53:00Z", Field{Type: DateTimeType, Format: defaultFieldFormat}, time.Date(2008, time.September, 15, 10, 53, 00, 00, time.UTC)}, - {"Duration", "P2H", Field{Type: DurationType}, 2 * time.Hour}, - {"GeoPoint", "90,45", Field{Type: GeoPointType}, GeoPoint{90, 45}}, - {"Any", "10", Field{Type: AnyType}, "10"}, - } - for _, d := range data { - t.Run(d.Desc, func(t *testing.T) { - is := is.New(t) - c, err := d.Field.Cast(d.Value) - is.NoErr(err) - is.Equal(c, d.Expected) - }) - } - t.Run("Object_Success", func(t *testing.T) { - is := is.New(t) - f := Field{Type: ObjectType} - obj, err := f.Cast(`{"name":"foo"}`) - is.NoErr(err) - - objMap, ok := obj.(map[string]interface{}) - is.True(ok) - is.Equal(len(objMap), 1) - is.Equal(objMap["name"], "foo") - }) - t.Run("Object_Failure", func(t *testing.T) { - is := is.New(t) - f := Field{Type: ObjectType} - _, err := f.Cast(`{"name"}`) - is.True(err != nil) - }) - t.Run("Array_Success", func(t *testing.T) { - is := is.New(t) - f := Field{Type: ArrayType} - obj, err := f.Cast(`["foo"]`) - is.NoErr(err) - - arr, ok := obj.([]interface{}) - is.True(ok) - is.Equal(len(arr), 1) - is.Equal(arr[0], "foo") - }) - t.Run("Array_Failure", func(t *testing.T) { - is := is.New(t) - f := Field{Type: ArrayType} - _, err := f.Cast(`{"name":"foo"}`) - is.True(err != nil) - }) - t.Run("InvalidDate", func(t *testing.T) { - data := []struct { - desc string - field Field - value string - }{ - {"InvalidFormat_Any", Field{Type: DateType, Format: "any"}, "2015-10-15"}, - {"InvalidFormat_Strftime", Field{Type: DateType, Format: "Fooo"}, "2015-10-15"}, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - _, err := d.field.Cast(d.value) - is.True(err != nil) - }) - } - }) - t.Run("InvalidFieldType", func(t *testing.T) { - is := is.New(t) - f := Field{Type: "invalidType"} - _, err := f.Cast("42") - is.True(err != nil) - }) - t.Run("Constraints", func(t *testing.T) { - t.Run("Required", func(t *testing.T) { - is := is.New(t) - f := Field{Type: StringType, Constraints: Constraints{Required: true}, MissingValues: map[string]struct{}{"NA": struct{}{}}} - _, err := f.Cast("NA") - is.True(err != nil) - }) - t.Run("Enum", func(t *testing.T) { - data := []struct { - desc string - field Field - value string - }{ - { - "SimpleCase", - Field{Type: IntegerType, Constraints: Constraints{rawEnum: map[string]struct{}{"1": struct{}{}}}}, - "1", - }, - { - "NilEnumList", - Field{Type: IntegerType}, - "10", - }, - { - "EmptyEnumList", - Field{Type: IntegerType, Constraints: Constraints{rawEnum: map[string]struct{}{}}}, - "10", - }, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - _, err := d.field.Cast(d.value) - is.NoErr(err) - }) - } - }) - t.Run("EnumError", func(t *testing.T) { - data := []struct { - desc string - field Field - value string - }{ - {"NonEmptyEnumList", Field{Type: IntegerType, Constraints: Constraints{rawEnum: map[string]struct{}{"8": struct{}{}, "9": struct{}{}}}}, "10"}, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - _, err := d.field.Cast(d.value) - is.True(err != nil) - }) - } - }) - }) -} - -func TestUnmarshalJSON_InvalidField(t *testing.T) { - is := is.New(t) - var f Field - is.True(json.Unmarshal([]byte("{Foo:1}"), &f) != nil) -} - -func TestTestString(t *testing.T) { - is := is.New(t) - f := Field{Type: "integer"} - is.True(f.TestString("42")) - is.True(!f.TestString("boo")) -} - -func TestField_Uncast(t *testing.T) { - t.Run("Success", func(t *testing.T) { - data := []struct { - desc string - field Field - value interface{} - want string - }{ - {"Int", Field{Type: IntegerType}, 1, "1"}, - {"Number", Field{Type: NumberType}, 1.0, "1"}, - {"IntNumberImplicitCast", Field{Type: NumberType}, 100, "100"}, - {"NumberToIntImplicitCast", Field{Type: IntegerType}, 100.5, "100"}, - {"Boolean", Field{Type: BooleanType}, true, "true"}, - {"Duration", Field{Type: DurationType}, 1 * time.Second, "P0Y0M0DT1S"}, - {"GeoPoint", Field{Type: GeoPointType}, "10,10", "10,10"}, - {"String", Field{Type: StringType}, "foo", "foo"}, - {"Array", Field{Type: ArrayType}, []string{"foo"}, "[foo]"}, - {"Date", Field{Type: DateType}, time.Unix(1, 0), "1970-01-01T00:00:01Z"}, - {"Year", Field{Type: YearType}, time.Unix(1, 0), "1970-01-01T00:00:01Z"}, - {"YearMonth", Field{Type: YearMonthType}, time.Unix(1, 0), "1970-01-01T00:00:01Z"}, - {"DateTime", Field{Type: DateTimeType}, time.Unix(1, 0), "1970-01-01T00:00:01Z"}, - {"Date", Field{Type: DateType}, time.Unix(1, 0), "1970-01-01T00:00:01Z"}, - {"Object", Field{Type: ObjectType}, eoStruct{Name: "Foo"}, `{"name":"Foo"}`}, - {"Any", Field{Type: AnyType}, "10", "10"}, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - got, err := d.field.Uncast(d.value) - is.NoErr(err) - is.Equal(d.want, got) - }) - } - }) - t.Run("Error", func(t *testing.T) { - data := []struct { - desc string - field Field - value interface{} - }{ - {"StringToIntCast", Field{Type: IntegerType}, "1.5"}, - {"StringToNumberCast", Field{Type: NumberType}, "1.5"}, - {"InvalidType", Field{Type: "Boo"}, "1"}, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - _, err := d.field.Uncast(d.value) - is.True(err != nil) - }) - } - }) -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/schema/geopoint.go b/vendor/github.com/frictionlessdata/tableschema-go/schema/geopoint.go deleted file mode 100644 index 6fc4a75..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/schema/geopoint.go +++ /dev/null @@ -1,103 +0,0 @@ -package schema - -import ( - "encoding/json" - "fmt" - "reflect" - "regexp" - "strconv" -) - -// Formats specific to GeoPoint field type. -const ( - GeoPointArrayFormat = "array" - GeoPointObjectFormat = "object" -) - -// GeoPoint represents a "geopoint" cell. -// More at: https://specs.frictionlessdata.io/table-schema/#geopoint -type GeoPoint struct { - Lon float64 `json:"lon,omitempty"` - Lat float64 `json:"lat,omitempty"` -} - -// UnmarshalJSON sets *f to a copy of data. It will respect the default values -func (p *GeoPoint) UnmarshalJSON(data []byte) error { - type geoPointAlias struct { - Lon *float64 `json:"lon,omitempty"` - Lat *float64 `json:"lat,omitempty"` - } - var a geoPointAlias - if err := json.Unmarshal(data, &a); err != nil { - return err - } - if a.Lon == nil || a.Lat == nil { - return fmt.Errorf("Invalid geopoint:\"%s\"", string(data)) - } - p.Lon = *a.Lon - p.Lat = *a.Lat - return nil -} - -var ( - geoPointDefaultRegexp = regexp.MustCompile(`^([-+]?[0-9]*\.?[0-9]*), ?([-+]?[0-9]*\.?[0-9]*)$`) - geoPointArrayRegexp = regexp.MustCompile(`^\[([-+]?[0-9]*\.?[0-9]+), ?([-+]?[0-9]*\.?[0-9]+)\]$`) -) - -func castGeoPoint(format, value string) (GeoPoint, error) { - switch format { - case "", defaultFieldFormat: - return applyGeoPointRegexp(geoPointDefaultRegexp, value) - case GeoPointArrayFormat: - return applyGeoPointRegexp(geoPointArrayRegexp, value) - case GeoPointObjectFormat: - var p GeoPoint - if err := json.Unmarshal([]byte(value), &p); err != nil { - return GeoPoint{}, err - } - return p, nil - } - return GeoPoint{}, fmt.Errorf("invalid geopoint format:%s", format) -} - -func applyGeoPointRegexp(r *regexp.Regexp, value string) (GeoPoint, error) { - matches := r.FindStringSubmatch(value) - if len(matches) == 0 || len(matches[1]) == 0 || len(matches[2]) == 0 { - return GeoPoint{}, fmt.Errorf("Invalid geopoint:\"%s\"", value) - } - lon, _ := strconv.ParseFloat(matches[1], 64) - lat, _ := strconv.ParseFloat(matches[2], 64) - return GeoPoint{lon, lat}, nil -} - -func uncastGeoPoint(format string, gp interface{}) (string, error) { - switch format { - case "", defaultFieldFormat: - value, ok := gp.(string) - if ok { - _, err := applyGeoPointRegexp(geoPointDefaultRegexp, value) - if err != nil { - return "", err - } - return value, nil - } - return "", fmt.Errorf("invalid object type to uncast to geopoint dfault format. want:string got:%v", reflect.TypeOf(gp).String()) - case GeoPointArrayFormat: - value, ok := gp.(string) - if ok { - _, err := applyGeoPointRegexp(geoPointArrayRegexp, value) - if err != nil { - return "", err - } - return value, nil - } - return "", fmt.Errorf("invalid object type to uncast to geopoint %s format. want:string got:%v", GeoPointArrayFormat, reflect.TypeOf(gp).String()) - case GeoPointObjectFormat: - value, ok := gp.(GeoPoint) - if ok { - return fmt.Sprintf("%+v", value), nil - } - return "", fmt.Errorf("invalid object type to uncast to geopoint %s format. want:schema.Geopoint got:%v", GeoPointObjectFormat, reflect.TypeOf(gp).String()) - } - return "", fmt.Errorf("invalid geopoint - type:%v value:\"%v\" format:%s", gp, reflect.ValueOf(gp).Type(), format) -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/schema/geopoint_test.go b/vendor/github.com/frictionlessdata/tableschema-go/schema/geopoint_test.go deleted file mode 100644 index 7f25a1d..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/schema/geopoint_test.go +++ /dev/null @@ -1,102 +0,0 @@ -package schema - -import ( - "testing" - - "github.com/matryer/is" -) - -func TestCastGeoPoint(t *testing.T) { - data := []struct { - desc string - format string - value string - want GeoPoint - }{ - {"DefaultNoParentheses", defaultFieldFormat, "90,40", GeoPoint{90, 40}}, - {"DefaultNoParenthesesFloats", defaultFieldFormat, "90.5,40.44", GeoPoint{90.5, 40.44}}, - {"DefaultNoParenthesesNegative", defaultFieldFormat, "-90.10,-40", GeoPoint{-90.10, -40}}, - {"DefaultNoParenthesesEmptyFormat", "", "90,40", GeoPoint{90, 40}}, - {"DefaultWithSpace", "", "90, 40", GeoPoint{90, 40}}, - {"DefaultWithSpaceNegative", "", "-90, -40", GeoPoint{-90, -40}}, - {"Array", GeoPointArrayFormat, "[90,40]", GeoPoint{90, 40}}, - {"ArrayFloat", GeoPointArrayFormat, "[90.5,40.44]", GeoPoint{90.5, 40.44}}, - {"ArrayNegative", GeoPointArrayFormat, "[-90.5,-40]", GeoPoint{-90.5, -40}}, - {"ArrayWithSpace", GeoPointArrayFormat, "[90, 40]", GeoPoint{90, 40}}, - {"ArrayWithSpaceNegative", GeoPointArrayFormat, "[-90, -40]", GeoPoint{-90, -40}}, - {"Object", GeoPointObjectFormat, `{"lon": 90, "lat": 45}`, GeoPoint{90, 45}}, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - got, err := castGeoPoint(d.format, d.value) - is.NoErr(err) - is.Equal(got, d.want) - }) - } - t.Run("Error", func(t *testing.T) { - data := []struct { - desc string - format string - value string - }{ - {"BadJSON", GeoPointObjectFormat, ""}, - {"BadGeoPointJSON", GeoPointObjectFormat, `{"longi": 90, "lat": 45}`}, - {"BadFormat", "badformat", `{"longi": 90, "lat": 45}`}, - {"InvalidDefault", defaultFieldFormat, "/10,10/"}, - {"InvalidArray", defaultFieldFormat, "/[10,10]/"}, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - _, err := castGeoPoint(d.format, d.value) - is.True(err != nil) - }) - } - }) -} - -func TestUncastGeoPoint(t *testing.T) { - t.Run("Success", func(t *testing.T) { - data := []struct { - desc string - format string - value interface{} - want string - }{ - {"GeoPointObject", GeoPointObjectFormat, GeoPoint{10, 10}, "{Lon:10 Lat:10}"}, - {"GeoPointArray", GeoPointArrayFormat, "[10,10]", "[10,10]"}, - {"GeoPointDefault", defaultFieldFormat, "10,10", "10,10"}, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - got, err := uncastGeoPoint(d.format, d.value) - is.NoErr(err) - is.Equal(d.want, got) - }) - } - }) - t.Run("Error", func(t *testing.T) { - data := []struct { - desc string - format string - value interface{} - }{ - {"InvalidObjectType_Object", GeoPointObjectFormat, int(10)}, - {"InvalidObjectType_Array", GeoPointArrayFormat, int(10)}, - {"InvalidArray", GeoPointArrayFormat, "10,10"}, - {"InvalidObjectType_Empty", "", int(10)}, - {"InvalidObjectType_Default", defaultFieldFormat, int(10)}, - {"InvalidDefault", defaultFieldFormat, "/10,10/"}, - {"InvalidFormat", "badFormat", int(10)}, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - _, err := uncastGeoPoint(d.format, d.value) - is.True(err != nil) - }) - } - }) -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/schema/infer.go b/vendor/github.com/frictionlessdata/tableschema-go/schema/infer.go deleted file mode 100644 index 74093c4..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/schema/infer.go +++ /dev/null @@ -1,211 +0,0 @@ -package schema - -import ( - "fmt" - - "github.com/frictionlessdata/tableschema-go/table" -) - -var ( - // https://specs.frictionlessdata.io/table-schema/#boolean - booleanValues = map[string]struct{}{ - "true": struct{}{}, - "True": struct{}{}, - "TRUE": struct{}{}, - "1": struct{}{}, - "false": struct{}{}, - "False": struct{}{}, - "FALSE": struct{}{}, - "0": struct{}{}, - } - // This structure is optmized for querying. - // It should point a type to what is allowed to be implicitly cast. - // The inner set must be sorted by the narrower first. - implicitCast = map[string][]string{ - IntegerType: []string{IntegerType, NumberType, StringType}, - NumberType: []string{NumberType, StringType}, - BooleanType: []string{BooleanType, IntegerType, NumberType, StringType}, - YearMonthType: []string{YearMonthType, DateType, StringType}, - YearType: []string{YearType, IntegerType, NumberType, StringType}, - DateType: []string{DateType, DateTimeType, StringType}, - DateTimeType: []string{DateTimeType, StringType}, - TimeType: []string{TimeType, StringType}, - DurationType: []string{DurationType, StringType}, - ObjectType: []string{ObjectType, StringType}, - ArrayType: []string{ArrayType, StringType}, - GeoPointType: []string{GeoPointType, ArrayType, StringType}, - StringType: []string{}, - } - - // Types ordered from narrower to wider. - orderedTypes = []string{BooleanType, YearType, IntegerType, GeoPointType, NumberType, YearMonthType, DateType, DateTimeType, TimeType, DurationType, ArrayType, ObjectType} - - noConstraints = Constraints{} -) - -// Maximum number of rows used to infer schema. -const maxNumRowsInfer = 100 - -// Infer infers a schema from a slice of the tabular data. For columns that contain -// cells that can inferred as different types, the most popular type is set as the field -// type. For instance, a column with values 10.1, 10, 10 will inferred as being of type -// "integer". -func Infer(tab table.Table) (*Schema, error) { - s, err := sample(tab) - if err != nil { - return nil, err - } - return infer(tab.Headers(), s) -} - -func sample(tab table.Table) ([][]string, error) { - iter, err := tab.Iter() - if err != nil { - return nil, err - } - var t [][]string - for count := 0; count < maxNumRowsInfer && iter.Next(); count++ { - t = append(t, iter.Row()) - } - if iter.Err() != nil { - return nil, iter.Err() - } - return t, nil -} - -func infer(headers []string, table [][]string) (*Schema, error) { - inferredTypes := make([]map[string]int, len(headers)) - for rowID := range table { - row := table[rowID] - // TODO(danielfireman): the python version does some normalization on - // the number of columns and headers. Need to look closer at this. - if len(headers) != len(row) { - return nil, fmt.Errorf("data is not tabular. headers:%v row[%d]:%v", headers, rowID, row) - } - for cellIndex, cell := range row { - if inferredTypes[cellIndex] == nil { - inferredTypes[cellIndex] = make(map[string]int) - } - // The list bellow must be ordered by the narrower field type. - t := findType(cell, orderedTypes) - inferredTypes[cellIndex][t]++ - } - } - schema := Schema{} - for index := range headers { - schema.Fields = append(schema.Fields, - Field{ - Name: headers[index], - Type: defaultFieldType, - Format: defaultFieldFormat, - }) - count := 0 - for t, c := range inferredTypes[index] { - if c > count { - f := &schema.Fields[index] - f.Type = t - count = c - } - } - } - return &schema, nil -} - -// InferImplicitCasting uses a implicit casting for infering the type of columns -// that have cells of diference types. For instance, a column with values 10.1, 10, 10 -// will inferred as being of type "number" ("integer" can be implicitly cast to "number"). -// -// For medium to big tables, this method is faster than the Infer. -func InferImplicitCasting(tab table.Table) (*Schema, error) { - s, err := sample(tab) - if err != nil { - return nil, err - } - return inferImplicitCasting(tab.Headers(), s) -} - -func inferImplicitCasting(headers []string, table [][]string) (*Schema, error) { - inferredTypes := make([]string, len(headers)) - for rowID := range table { - row := table[rowID] - // TODO(danielfireman): the python version does some normalization on - // the number of columns and headers. Need to look closer at this. - if len(headers) != len(row) { - return nil, fmt.Errorf("data is not tabular. headers:%v row[%d]:%v", headers, rowID, row) - } - for cellIndex, cell := range row { - if inferredTypes[cellIndex] == "" { - t := findType(cell, orderedTypes) - inferredTypes[cellIndex] = t - } else { - inferredTypes[cellIndex] = findType(cell, implicitCast[inferredTypes[cellIndex]]) - } - } - } - schema := Schema{} - for index := range headers { - schema.Fields = append(schema.Fields, - Field{ - Name: headers[index], - Type: inferredTypes[index], - Format: defaultFieldFormat, - }) - } - return &schema, nil -} - -func findType(value string, checkOrder []string) string { - for _, t := range checkOrder { - switch t { - case BooleanType: - if _, ok := booleanValues[value]; ok { - return BooleanType - } - case IntegerType: - if _, err := castInt(defaultBareNumber, value, noConstraints); err == nil { - return IntegerType - } - case NumberType: - if _, err := castNumber(defaultDecimalChar, defaultGroupChar, defaultBareNumber, value, noConstraints); err == nil { - return NumberType - } - case DateType: - if _, err := castDate(defaultFieldFormat, value, noConstraints); err == nil { - return DateType - } - case ArrayType: - if _, err := castArray(value); err == nil { - return ArrayType - } - case ObjectType: - if _, err := castObject(value); err == nil { - return ObjectType - } - case TimeType: - if _, err := castTime(defaultFieldFormat, value, noConstraints); err == nil { - return TimeType - } - case YearMonthType: - if _, err := castYearMonth(value, noConstraints); err == nil { - return YearMonthType - } - case YearType: - if _, err := castYear(value, noConstraints); err == nil { - return YearType - } - case DateTimeType: - if _, err := castDateTime(value, noConstraints); err == nil { - return DateTimeType - } - case DurationType: - if _, err := castDuration(value); err == nil { - return DurationType - } - case GeoPointType: - if _, err := castGeoPoint(defaultFieldFormat, value); err == nil { - return GeoPointType - } - } - } - return StringType -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/schema/infer_test.go b/vendor/github.com/frictionlessdata/tableschema-go/schema/infer_test.go deleted file mode 100644 index 446bb8b..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/schema/infer_test.go +++ /dev/null @@ -1,215 +0,0 @@ -package schema - -import ( - "fmt" - "sort" - "testing" - - "github.com/matryer/is" - - "github.com/frictionlessdata/tableschema-go/table" -) - -func Exampleinfer() { - tab := table.FromSlices( - []string{"Person", "Height"}, - [][]string{ - []string{"Foo", "5"}, - []string{"Bar", "4"}, - []string{"Bez", "5.5"}, - }) - s, _ := Infer(tab) - fmt.Println("Fields:") - for _, f := range s.Fields { - fmt.Printf("{Name:%s Type:%s Format:%s}\n", f.Name, f.Type, f.Format) - } - // Output: Fields: - // {Name:Person Type:string Format:default} - // {Name:Height Type:integer Format:default} -} - -func ExampleInferImplicitCasting() { - tab := table.FromSlices( - []string{"Person", "Height"}, - [][]string{ - []string{"Foo", "5"}, - []string{"Bar", "4"}, - []string{"Bez", "5.5"}, - }) - s, _ := InferImplicitCasting(tab) - fmt.Println("Fields:") - for _, f := range s.Fields { - fmt.Printf("{Name:%s Type:%s Format:%s}\n", f.Name, f.Type, f.Format) - } - // Output: Fields: - // {Name:Person Type:string Format:default} - // {Name:Height Type:number Format:default} -} - -func TestInfer(t *testing.T) { - data := []struct { - desc string - headers []string - table [][]string - want Schema - }{ - {"1Cell_Date", []string{"Birthday"}, [][]string{[]string{"1983-10-15"}}, Schema{Fields: []Field{{Name: "Birthday", Type: DateType, Format: defaultFieldFormat}}}}, - {"1Cell_Integer", []string{"Age"}, [][]string{[]string{"10"}}, Schema{Fields: []Field{{Name: "Age", Type: IntegerType, Format: defaultFieldFormat}}}}, - {"1Cell_Number", []string{"Weight"}, [][]string{[]string{"20.2"}}, Schema{Fields: []Field{{Name: "Weight", Type: NumberType, Format: defaultFieldFormat}}}}, - {"1Cell_Boolean", []string{"Foo"}, [][]string{[]string{"0"}}, Schema{Fields: []Field{{Name: "Foo", Type: BooleanType, Format: defaultFieldFormat}}}}, - {"1Cell_Object", []string{"Foo"}, [][]string{[]string{`{"name":"foo"}`}}, Schema{Fields: []Field{{Name: "Foo", Type: ObjectType, Format: defaultFieldFormat}}}}, - {"1Cell_Array", []string{"Foo"}, [][]string{[]string{`["name"]`}}, Schema{Fields: []Field{{Name: "Foo", Type: ArrayType, Format: defaultFieldFormat}}}}, - {"1Cell_String", []string{"Foo"}, [][]string{[]string{"name"}}, Schema{Fields: []Field{{Name: "Foo", Type: StringType, Format: defaultFieldFormat}}}}, - {"1Cell_Time", []string{"Foo"}, [][]string{[]string{"10:15:50"}}, Schema{Fields: []Field{{Name: "Foo", Type: TimeType, Format: defaultFieldFormat}}}}, - {"1Cell_YearMonth", []string{"YearMonth"}, [][]string{[]string{"2017-08"}}, Schema{Fields: []Field{{Name: "YearMonth", Type: YearMonthType, Format: defaultFieldFormat}}}}, - {"1Cell_Year", []string{"Year"}, [][]string{[]string{"2017"}}, Schema{Fields: []Field{{Name: "Year", Type: YearType, Format: defaultFieldFormat}}}}, - {"1Cell_DateTime", []string{"DateTime"}, [][]string{[]string{"2008-09-15T15:53:00+05:00"}}, Schema{Fields: []Field{{Name: "DateTime", Type: DateTimeType, Format: defaultFieldFormat}}}}, - {"1Cell_Duration", []string{"Duration"}, [][]string{[]string{"P3Y6M4DT12H30M5S"}}, Schema{Fields: []Field{{Name: "Duration", Type: DurationType, Format: defaultFieldFormat}}}}, - {"1Cell_GeoPoint", []string{"GeoPoint"}, [][]string{[]string{"90,45"}}, Schema{Fields: []Field{{Name: "GeoPoint", Type: GeoPointType, Format: defaultFieldFormat}}}}, - {"ManyCells", - []string{"Name", "Age", "Weight", "Bogus", "Boolean", "Boolean1"}, - [][]string{ - []string{"Foo", "10", "20.2", "1", "1", "1"}, - []string{"Foo", "10", "30", "1", "1", "1"}, - []string{"Foo", "10", "30", "Daniel", "1", "2"}, - }, - Schema{Fields: []Field{ - {Name: "Name", Type: StringType, Format: defaultFieldFormat}, - {Name: "Age", Type: IntegerType, Format: defaultFieldFormat}, - {Name: "Weight", Type: IntegerType, Format: defaultFieldFormat}, - {Name: "Bogus", Type: BooleanType, Format: defaultFieldFormat}, - {Name: "Boolean", Type: BooleanType, Format: defaultFieldFormat}, - {Name: "Boolean1", Type: BooleanType, Format: defaultFieldFormat}, - }}, - }, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - s, err := infer(d.headers, d.table) - is.NoErr(err) - - sort.Sort(s.Fields) - sort.Sort(d.want.Fields) - is.Equal(s, &d.want) - }) - } - t.Run("Error", func(t *testing.T) { - data := []struct { - desc string - headers []string - table [][]string - }{ - {"NotATable", []string{}, [][]string{[]string{"1"}}}, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - _, err := infer(d.headers, d.table) - is.True(err != nil) - }) - } - }) -} - -func TestInferImplicitCasting(t *testing.T) { - data := []struct { - desc string - headers []string - table [][]string - want Schema - }{ - {"1Cell_Date", []string{"Birthday"}, [][]string{[]string{"1983-10-15"}}, Schema{Fields: []Field{{Name: "Birthday", Type: DateType, Format: defaultFieldFormat}}}}, - {"1Cell_Integer", []string{"Age"}, [][]string{[]string{"10"}}, Schema{Fields: []Field{{Name: "Age", Type: IntegerType, Format: defaultFieldFormat}}}}, - {"1Cell_Number", []string{"Weight"}, [][]string{[]string{"20.2"}}, Schema{Fields: []Field{{Name: "Weight", Type: NumberType, Format: defaultFieldFormat}}}}, - {"1Cell_Boolean", []string{"Foo"}, [][]string{[]string{"0"}}, Schema{Fields: []Field{{Name: "Foo", Type: BooleanType, Format: defaultFieldFormat}}}}, - {"1Cell_Object", []string{"Foo"}, [][]string{[]string{`{"name":"foo"}`}}, Schema{Fields: []Field{{Name: "Foo", Type: ObjectType, Format: defaultFieldFormat}}}}, - {"1Cell_Array", []string{"Foo"}, [][]string{[]string{`["name"]`}}, Schema{Fields: []Field{{Name: "Foo", Type: ArrayType, Format: defaultFieldFormat}}}}, - {"1Cell_String", []string{"Foo"}, [][]string{[]string{"name"}}, Schema{Fields: []Field{{Name: "Foo", Type: StringType, Format: defaultFieldFormat}}}}, - {"1Cell_Time", []string{"Foo"}, [][]string{[]string{"10:15:50"}}, Schema{Fields: []Field{{Name: "Foo", Type: TimeType, Format: defaultFieldFormat}}}}, - {"1Cell_YearMonth", []string{"YearMonth"}, [][]string{[]string{"2017-08"}}, Schema{Fields: []Field{{Name: "YearMonth", Type: YearMonthType, Format: defaultFieldFormat}}}}, - {"1Cell_Year", []string{"Year"}, [][]string{[]string{"2017"}}, Schema{Fields: []Field{{Name: "Year", Type: YearType, Format: defaultFieldFormat}}}}, - {"1Cell_DateTime", []string{"DateTime"}, [][]string{[]string{"2008-09-15T15:53:00+05:00"}}, Schema{Fields: []Field{{Name: "DateTime", Type: DateTimeType, Format: defaultFieldFormat}}}}, - {"1Cell_Duration", []string{"Duration"}, [][]string{[]string{"P3Y6M4DT12H30M5S"}}, Schema{Fields: []Field{{Name: "Duration", Type: DurationType, Format: defaultFieldFormat}}}}, - {"1Cell_GeoPoint", []string{"GeoPoint"}, [][]string{[]string{"90,45"}}, Schema{Fields: []Field{{Name: "GeoPoint", Type: GeoPointType, Format: defaultFieldFormat}}}}, - {"ManyCells", - []string{"Name", "Age", "Weight", "Bogus", "Boolean", "Int"}, - [][]string{ - []string{"Foo", "10", "20.2", "1", "1", "1"}, - []string{"Foo", "10", "30", "1", "1", "1"}, - []string{"Foo", "10", "30", "Daniel", "1", "2"}, - }, - Schema{Fields: []Field{ - {Name: "Name", Type: StringType, Format: defaultFieldFormat}, - {Name: "Age", Type: IntegerType, Format: defaultFieldFormat}, - {Name: "Weight", Type: NumberType, Format: defaultFieldFormat}, - {Name: "Bogus", Type: StringType, Format: defaultFieldFormat}, - {Name: "Boolean", Type: BooleanType, Format: defaultFieldFormat}, - {Name: "Int", Type: IntegerType, Format: defaultFieldFormat}, - }}, - }, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - s, err := inferImplicitCasting(d.headers, d.table) - is.NoErr(err) - - sort.Sort(s.Fields) - sort.Sort(d.want.Fields) - is.Equal(s, &d.want) - }) - } - t.Run("Error", func(t *testing.T) { - data := []struct { - desc string - headers []string - table [][]string - }{ - {"NotATable", []string{}, [][]string{[]string{"1"}}}, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - _, err := inferImplicitCasting(d.headers, d.table) - is.True(err != nil) - }) - } - }) -} - -var ( - benchmarkHeaders = []string{"Name", "Birthday", "Weight", "Address", "Siblings"} - benchmarkTable = [][]string{ - []string{"Foo", "2015-10-12", "20.2", `{"Street":"Foo", "Number":10, "City":"New York", "State":"NY"}`, `["Foo"]`}, - []string{"Bar", "2015-10-12", "30", `{"Street":"Foo", "Number":10, "City":"New York", "State":"NY"}`, `["Foo"]`}, - []string{"Bez", "2015-10-12", "30", `{"Street":"Foo", "Number":10, "City":"New York", "State":"NY"}`, `["Foo"]`}, - } -) - -func benchmarkinfer(growthMultiplier int, b *testing.B) { - for n := 0; n < b.N; n++ { - infer(benchmarkHeaders, generateBenchmarkTable(growthMultiplier)) - } -} - -func benchmarkInferImplicitCasting(growthMultiplier int, b *testing.B) { - for n := 0; n < b.N; n++ { - inferImplicitCasting(benchmarkHeaders, generateBenchmarkTable(growthMultiplier)) - } -} - -func generateBenchmarkTable(growthMultiplier int) [][]string { - var t [][]string - for i := 0; i < growthMultiplier; i++ { - t = append(t, benchmarkTable...) - } - return t -} - -func BenchmarkInferSmall(b *testing.B) { benchmarkinfer(1, b) } -func BenchmarkInferMedium(b *testing.B) { benchmarkinfer(100, b) } -func BenchmarkInferBig(b *testing.B) { benchmarkinfer(1000, b) } -func BenchmarkInferImplicitCastingSmall(b *testing.B) { benchmarkInferImplicitCasting(1, b) } -func BenchmarkInferImplicitCastingMedium(b *testing.B) { benchmarkInferImplicitCasting(100, b) } -func BenchmarkInferImplicitCastingBig(b *testing.B) { benchmarkInferImplicitCasting(1000, b) } diff --git a/vendor/github.com/frictionlessdata/tableschema-go/schema/integer.go b/vendor/github.com/frictionlessdata/tableschema-go/schema/integer.go deleted file mode 100644 index 5356f3e..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/schema/integer.go +++ /dev/null @@ -1,53 +0,0 @@ -package schema - -import ( - "fmt" - "regexp" - "strconv" -) - -// CastInt casts an integer value (passed-in as unicode string) against a field. Returns an -// error if the value can not be converted to integer. -func castInt(bareNumber bool, value string, c Constraints) (int64, error) { - v := value - if !bareNumber { - var err error - v, err = stripIntegerFromString(v) - if err != nil { - return 0, err - } - } - returned, err := strconv.ParseInt(v, 10, 64) - if err != nil { - return 0, err - } - if c.Maximum != "" { - max, err := strconv.ParseInt(c.Maximum, 10, 64) - if err != nil { - return 0, fmt.Errorf("invalid maximum integer: %v", c.Maximum) - } - if returned > max { - return 0, fmt.Errorf("constraint check error: integer:%d > maximum:%d", returned, max) - } - } - if c.Minimum != "" { - min, err := strconv.ParseInt(c.Minimum, 10, 64) - if err != nil { - return 0, fmt.Errorf("invalid minimum integer: %v", c.Minimum) - } - if returned < min { - return 0, fmt.Errorf("constraint check error: integer:%d < minimum:%d", returned, min) - } - } - return returned, nil -} - -var bareIntegerRegexp = regexp.MustCompile(`((^[0-9]+)|([0-9]+$))`) - -func stripIntegerFromString(v string) (string, error) { - matches := bareIntegerRegexp.FindStringSubmatch(v) - if matches == nil { - return "", fmt.Errorf("invalid integer to strip:%s", v) - } - return matches[1], nil -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/schema/integer_test.go b/vendor/github.com/frictionlessdata/tableschema-go/schema/integer_test.go deleted file mode 100644 index 0d49c9d..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/schema/integer_test.go +++ /dev/null @@ -1,67 +0,0 @@ -package schema - -import ( - "testing" - - "github.com/matryer/is" -) - -const notBareInt = false - -func TestCastInt(t *testing.T) { - t.Run("Success", func(t *testing.T) { - data := []struct { - desc string - number string - want int64 - bn bool - }{ - {"Positive_WithSignal", "+10", 10, defaultBareNumber}, - {"Positive_WithoutSignal", "10", 10, defaultBareNumber}, - {"Negative", "-10", -10, defaultBareNumber}, - {"BareNumber", "€95", 95, notBareInt}, - {"BareNumber_TrailingAtBeginning", "€95", 95, notBareInt}, - {"BareNumber_TrailingAtBeginningSpace", "EUR 95", 95, notBareInt}, - {"BareNumber_TrailingAtEnd", "95%", 95, notBareInt}, - {"BareNumber_TrailingAtEndSpace", "95 %", 95, notBareInt}, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - got, err := castInt(d.bn, d.number, Constraints{}) - is.NoErr(err) - is.Equal(d.want, got) - }) - } - }) - t.Run("ValidMaximum", func(t *testing.T) { - is := is.New(t) - _, err := castInt(defaultBareNumber, "2", Constraints{Maximum: "2"}) - is.NoErr(err) - }) - t.Run("ValidMinimum", func(t *testing.T) { - is := is.New(t) - _, err := castInt(defaultBareNumber, "2", Constraints{Minimum: "1"}) - is.NoErr(err) - }) - t.Run("Error", func(t *testing.T) { - data := []struct { - desc string - number string - constraints Constraints - }{ - {"InvalidIntToStrip_TooManyNumbers", "+10++10", Constraints{}}, - {"NumBiggerThanMaximum", "3", Constraints{Maximum: "2"}}, - {"InvalidMaximum", "1", Constraints{Maximum: "boo"}}, - {"NumSmallerThanMinimum", "1", Constraints{Minimum: "2"}}, - {"InvalidMinimum", "1", Constraints{Minimum: "boo"}}, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - _, err := castInt(defaultBareNumber, d.number, d.constraints) - is.True(err != nil) - }) - } - }) -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/schema/number.go b/vendor/github.com/frictionlessdata/tableschema-go/schema/number.go deleted file mode 100644 index def1c93..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/schema/number.go +++ /dev/null @@ -1,61 +0,0 @@ -package schema - -import ( - "fmt" - "regexp" - "strconv" - "strings" -) - -func castNumber(decimalChar, groupChar string, bareNumber bool, value string, c Constraints) (float64, error) { - dc := decimalChar - if groupChar != "" { - dc = decimalChar - } - v := strings.Replace(value, dc, ".", 1) - gc := defaultGroupChar - if groupChar != "" { - gc = groupChar - } - v = strings.Replace(v, gc, "", -1) - if !bareNumber { - var err error - v, err = stripNumberFromString(v) - if err != nil { - return 0, err - } - } - returned, err := strconv.ParseFloat(v, 64) - if err != nil { - return 0, err - } - if c.Maximum != "" { - max, err := strconv.ParseFloat(c.Maximum, 64) - if err != nil { - return 0, fmt.Errorf("invalid maximum number: %v", c.Maximum) - } - if returned > max { - return 0, fmt.Errorf("constraint check error: integer:%f > maximum:%f", returned, max) - } - } - if c.Minimum != "" { - min, err := strconv.ParseFloat(c.Minimum, 64) - if err != nil { - return 0, fmt.Errorf("invalid minimum integer: %v", c.Minimum) - } - if returned < min { - return 0, fmt.Errorf("constraint check error: integer:%f < minimum:%f", returned, min) - } - } - return returned, nil -} - -var bareNumberRegexp = regexp.MustCompile(`((^[0-9]+\.?[0-9]*)|([0-9]+\.?[0-9]*$))`) - -func stripNumberFromString(v string) (string, error) { - matches := bareNumberRegexp.FindStringSubmatch(v) - if matches == nil { - return "", fmt.Errorf("invalid number to strip:%s", v) - } - return matches[1], nil -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/schema/number_test.go b/vendor/github.com/frictionlessdata/tableschema-go/schema/number_test.go deleted file mode 100644 index e33b8dd..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/schema/number_test.go +++ /dev/null @@ -1,94 +0,0 @@ -package schema - -import ( - "math" - "testing" - - "github.com/matryer/is" -) - -const notBareNumber = false - -func TestCastNumber(t *testing.T) { - t.Run("Success", func(t *testing.T) { - data := []struct { - desc string - number string - want float64 - dc string - gc string - bn bool - }{ - {"Positive_WithSignal", "+10.10", 10.10, defaultDecimalChar, defaultGroupChar, defaultBareNumber}, - {"Positive_WithoutSignal", "10.10", 10.10, defaultDecimalChar, defaultGroupChar, defaultBareNumber}, - {"Negative", "-10.10", -10.10, defaultDecimalChar, defaultGroupChar, defaultBareNumber}, - {"BareNumber", "€95", 95, defaultDecimalChar, defaultGroupChar, notBareNumber}, - {"BareNumber_TrailingAtBeginning", "€95", 95, defaultDecimalChar, defaultGroupChar, notBareNumber}, - {"BareNumber_TrailingAtBeginningSpace", "EUR 95", 95, defaultDecimalChar, defaultGroupChar, notBareNumber}, - {"BareNumber_TrailingAtEnd", "95%", 95, defaultDecimalChar, defaultGroupChar, notBareNumber}, - {"BareNumber_TrailingAtEndSpace", "95 %", 95, defaultDecimalChar, defaultGroupChar, notBareNumber}, - {"GroupChar", "100,000", 100000, defaultDecimalChar, defaultGroupChar, defaultBareNumber}, - {"DecimalChar", "95;10", 95.10, ";", defaultGroupChar, defaultBareNumber}, - {"Mix", "EUR 95;10", 95.10, ";", ";", notBareNumber}, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - got, err := castNumber(d.dc, d.gc, d.bn, d.number, Constraints{}) - is.NoErr(err) - is.Equal(d.want, got) - }) - } - }) - t.Run("NaN", func(t *testing.T) { - is := is.New(t) - got, err := castNumber(defaultDecimalChar, defaultGroupChar, defaultBareNumber, "NaN", Constraints{}) - is.NoErr(err) - is.True(math.IsNaN(got)) - }) - t.Run("INF", func(t *testing.T) { - is := is.New(t) - got, err := castNumber(defaultDecimalChar, defaultGroupChar, defaultBareNumber, "INF", Constraints{}) - is.NoErr(err) - is.True(math.IsInf(got, 1)) - }) - t.Run("NegativeINF", func(t *testing.T) { - is := is.New(t) - got, err := castNumber(defaultDecimalChar, defaultGroupChar, defaultBareNumber, "-INF", Constraints{}) - is.NoErr(err) - is.True(math.IsInf(got, -1)) - }) - t.Run("ValidMaximum", func(t *testing.T) { - is := is.New(t) - _, err := castNumber(defaultDecimalChar, defaultGroupChar, defaultBareNumber, "2", Constraints{Maximum: "2"}) - is.NoErr(err) - }) - t.Run("ValidMinimum", func(t *testing.T) { - is := is.New(t) - _, err := castNumber(defaultDecimalChar, defaultGroupChar, defaultBareNumber, "2", Constraints{Minimum: "2"}) - is.NoErr(err) - }) - t.Run("Error", func(t *testing.T) { - data := []struct { - desc string - number string - dc string - gc string - bn bool - constraints Constraints - }{ - {"InvalidNumberToStrip_TooManyNumbers", "+10.10++10", defaultDecimalChar, defaultGroupChar, notBareNumber, Constraints{}}, - {"NumBiggerThanMaximum", "3", defaultDecimalChar, defaultGroupChar, notBareNumber, Constraints{Maximum: "2"}}, - {"InvalidMaximum", "1", defaultDecimalChar, defaultGroupChar, notBareNumber, Constraints{Maximum: "boo"}}, - {"NumSmallerThanMinimum", "1", defaultDecimalChar, defaultGroupChar, notBareNumber, Constraints{Minimum: "2"}}, - {"InvalidMinimum", "1", defaultDecimalChar, defaultGroupChar, notBareNumber, Constraints{Minimum: "boo"}}, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - _, err := castNumber(defaultDecimalChar, defaultGroupChar, defaultBareNumber, d.number, d.constraints) - is.True(err != nil) - }) - } - }) -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/schema/object.go b/vendor/github.com/frictionlessdata/tableschema-go/schema/object.go deleted file mode 100644 index aeb2db7..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/schema/object.go +++ /dev/null @@ -1,16 +0,0 @@ -package schema - -import "encoding/json" - -func castObject(value string) (interface{}, error) { - var obj interface{} - if err := json.Unmarshal([]byte(value), &obj); err != nil { - return nil, err - } - return obj, nil -} - -func uncastObject(value interface{}) (string, error) { - b, err := json.Marshal(value) - return string(b), err -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/schema/object_test.go b/vendor/github.com/frictionlessdata/tableschema-go/schema/object_test.go deleted file mode 100644 index e7ece48..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/schema/object_test.go +++ /dev/null @@ -1,26 +0,0 @@ -package schema - -import "testing" -import "github.com/matryer/is" - -type eoStruct struct { - Name string `json:"name"` -} - -func TestUncastObject(t *testing.T) { - data := []struct { - desc string - value interface{} - want string - }{ - {"Simple", eoStruct{Name: "Foo"}, `{"name":"Foo"}`}, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - got, err := uncastObject(d.value) - is.NoErr(err) - is.Equal(d.want, got) - }) - } -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/schema/schema.go b/vendor/github.com/frictionlessdata/tableschema-go/schema/schema.go deleted file mode 100644 index c683fd8..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/schema/schema.go +++ /dev/null @@ -1,431 +0,0 @@ -package schema - -import ( - "bytes" - "encoding/json" - "fmt" - "io" - "net/http" - "os" - "reflect" - "sort" - "strings" - "sync" - "time" - - "github.com/frictionlessdata/tableschema-go/table" -) - -// InvalidPosition is returned by GetField call when -// it refers to a field that does not exist in the schema. -const InvalidPosition = -1 - -// Unexported tagname for the tableheader -const tableheaderTag = "tableheader" - -// Read reads and parses a descriptor to create a schema. -// -// Example - Reading a schema from a file: -// -// f, err := os.Open("foo/bar/schema.json") -// if err != nil { -// panic(err) -// } -// s, err := Read(f) -// if err != nil { -// panic(err) -// } -// fmt.Println(s) -func Read(r io.Reader) (*Schema, error) { - var s Schema - dec := json.NewDecoder(r) - if err := dec.Decode(&s); err != nil { - return nil, err - } - if len(s.MissingValues) == 0 { - return &s, nil - } - // Transforming the list in a set. - valueSet := make(map[string]struct{}, len(s.MissingValues)) - for _, v := range s.MissingValues { - valueSet[v] = struct{}{} - } - // Updating fields. - for i := range s.Fields { - s.Fields[i].MissingValues = make(map[string]struct{}, len(valueSet)) - for k, v := range valueSet { - s.Fields[i].MissingValues[k] = v - } - } - return &s, nil -} - -// LoadFromFile loads and parses a schema descriptor from a local file. -func LoadFromFile(path string) (*Schema, error) { - f, err := os.Open(path) - if err != nil { - return nil, err - } - return Read(f) -} - -var ( - httpClient *http.Client - once sync.Once -) - -const remoteFetchTimeoutSecs = 15 - -// LoadRemote downloads and parses a schema descriptor from the specified URL. -func LoadRemote(url string) (*Schema, error) { - once.Do(func() { - httpClient = &http.Client{ - Timeout: remoteFetchTimeoutSecs * time.Second, - } - }) - resp, err := httpClient.Get(url) - if err != nil { - return nil, err - } - defer resp.Body.Close() - return Read(resp.Body) -} - -// Fields represents a list of schema fields. -type Fields []Field - -func (f Fields) Len() int { return len(f) } -func (f Fields) Swap(i, j int) { f[i], f[j] = f[j], f[i] } -func (f Fields) Less(i, j int) bool { return strings.Compare(f[i].Name, f[j].Name) == -1 } - -// ForeignKeyReference represents the field reference by a foreign key. -type ForeignKeyReference struct { - Resource string `json:"resource,omitempty"` - Fields []string `json:"-"` - FieldsPlaceholder interface{} `json:"fields,omitempty"` -} - -// ForeignKeys defines a schema foreign key -type ForeignKeys struct { - Fields []string `json:"-"` - FieldsPlaceholder interface{} `json:"fields,omitempty"` - Reference ForeignKeyReference `json:"reference,omitempty"` -} - -// Schema describes tabular data. -type Schema struct { - Fields Fields `json:"fields,omitempty"` - PrimaryKeyPlaceholder interface{} `json:"primaryKey,omitempty"` - PrimaryKeys []string `json:"-"` - ForeignKeys ForeignKeys `json:"foreignKeys,omitempty"` - MissingValues []string `json:"missingValues,omitempty"` -} - -// GetField fetches the index and field referenced by the name argument. -func (s *Schema) GetField(name string) (*Field, int) { - for i := range s.Fields { - if name == s.Fields[i].Name { - return &s.Fields[i], i - } - } - return nil, InvalidPosition -} - -// HasField returns checks whether the schema has a field with the passed-in. -func (s *Schema) HasField(name string) bool { - _, pos := s.GetField(name) - return pos != InvalidPosition -} - -// Validate checks whether the schema is valid. If it is not, returns an error -// describing the problem. -// More at: https://specs.frictionlessdata.io/table-schema/ -func (s *Schema) Validate() error { - // Checking if all fields have a name. - for _, f := range s.Fields { - if f.Name == "" { - return fmt.Errorf("invalid field: attribute name is mandatory") - } - } - // Checking primary keys. - for _, pk := range s.PrimaryKeys { - if !s.HasField(pk) { - return fmt.Errorf("invalid primary key: there is no field %s", pk) - } - } - // Checking foreign keys. - for _, fk := range s.ForeignKeys.Fields { - if !s.HasField(fk) { - return fmt.Errorf("invalid foreign keys: there is no field %s", fk) - } - } - if len(s.ForeignKeys.Reference.Fields) != len(s.ForeignKeys.Fields) { - return fmt.Errorf("invalid foreign key: foreignKey.fields must contain the same number entries as foreignKey.reference.fields") - } - return nil -} - -// Write writes the schema descriptor. -func (s *Schema) Write(w io.Writer) error { - pp, err := json.MarshalIndent(s, "", " ") - if err != nil { - return err - } - w.Write(pp) - return nil -} - -// SaveToFile writes the schema descriptor in local file. -func (s *Schema) SaveToFile(path string) error { - f, err := os.Create(path) - if err != nil { - return err - } - return s.Write(f) -} - -// CastRow casts the passed-in row to schema types and stores it in the value pointed -// by out. The out value must be pointer to a struct. Only exported fields will be unmarshalled. -// The lowercased field name is used as the key for each exported field. -// -// If a value in the row cannot be marshalled to its respective schema field (Field.Unmarshal), -// this call will return an error. Furthermore, this call is also going to return an error if -// the schema field value can not be unmarshalled to the struct field type. -func (s *Schema) CastRow(row []string, out interface{}) error { - if reflect.ValueOf(out).Kind() != reflect.Ptr || reflect.Indirect(reflect.ValueOf(out)).Kind() != reflect.Struct { - return fmt.Errorf("can only cast pointer to structs") - } - outv := reflect.Indirect(reflect.ValueOf(out)) - outt := outv.Type() - for i := 0; i < outt.NumField(); i++ { - fieldValue := outv.Field(i) - if fieldValue.CanSet() { // Only consider exported fields. - field := outt.Field(i) - fieldName, ok := field.Tag.Lookup(tableheaderTag) - if !ok { // if no tag is set use own name - fieldName = field.Name - } - f, fieldIndex := s.GetField(fieldName) - if fieldIndex != InvalidPosition { - cell := row[fieldIndex] - if s.isMissingValue(cell) { - continue - } - v, err := f.Cast(cell) - if err != nil { - return err - } - toSetValue := reflect.ValueOf(v) - toSetType := toSetValue.Type() - if !toSetType.ConvertibleTo(field.Type) { - return fmt.Errorf("value:%s field:%s - can not convert from %v to %v", field.Name, cell, toSetType, field.Type) - } - fieldValue.Set(toSetValue.Convert(field.Type)) - } - } - } - return nil -} - -type rawCell struct { - pos int - val string -} - -type rawRow []rawCell - -func (r rawRow) Len() int { return len(r) } -func (r rawRow) Swap(i, j int) { r[i], r[j] = r[j], r[i] } -func (r rawRow) Less(i, j int) bool { return r[i].pos < r[j].pos } - -// UncastRow uncasts struct into a row. This method can only uncast structs (or pointer to structs) and -// will error out if nil is passed. -// The order of the cells in the returned row is the schema declaration order. -func (s *Schema) UncastRow(in interface{}) ([]string, error) { - inValue := reflect.Indirect(reflect.ValueOf(in)) - if inValue.Kind() != reflect.Struct { - return nil, fmt.Errorf("can only uncast structs and does not support nil pointers") - } - inType := inValue.Type() - var row rawRow - for i := 0; i < inType.NumField(); i++ { - structFieldValue := inValue.Field(i) - fieldName, ok := inType.Field(i).Tag.Lookup(tableheaderTag) - if !ok { - fieldName = inType.Field(i).Name - } - f, fieldIndex := s.GetField(fieldName) - if fieldIndex != InvalidPosition { - cell, err := f.Uncast(structFieldValue.Interface()) - if err != nil { - return nil, err - } - row = append(row, rawCell{fieldIndex, cell}) - } - } - sort.Sort(row) - ret := make([]string, len(row)) - for i := range row { - ret[i] = row[i].val - } - return ret, nil -} - -func (s *Schema) isMissingValue(value string) bool { - for _, mv := range s.MissingValues { - if mv == value { - return true - } - } - return false -} - -// UnmarshalJSON sets *f to a copy of data. It will respect the default values -// described at: https://specs.frictionlessdata.io/table-schema/ -func (s *Schema) UnmarshalJSON(data []byte) error { - // This is neded so it does not call UnmarshalJSON from recursively. - type schemaAlias Schema - var a schemaAlias - if err := json.Unmarshal(data, &a); err != nil { - return err - } - if err := processPlaceholder(a.PrimaryKeyPlaceholder, &a.PrimaryKeys); err != nil { - return fmt.Errorf("primaryKey must be either a string or list") - } - a.PrimaryKeyPlaceholder = nil - if err := processPlaceholder(a.ForeignKeys.FieldsPlaceholder, &a.ForeignKeys.Fields); err != nil { - return fmt.Errorf("foreignKeys.fields must be either a string or list") - } - a.ForeignKeys.FieldsPlaceholder = nil - if err := processPlaceholder(a.ForeignKeys.Reference.FieldsPlaceholder, &a.ForeignKeys.Reference.Fields); err != nil { - return fmt.Errorf("foreignKeys.reference.fields must be either a string or list") - } - a.ForeignKeys.Reference.FieldsPlaceholder = nil - *s = Schema(a) - return nil -} - -// MarshalJSON returns the JSON encoding of s. -func (s *Schema) MarshalJSON() ([]byte, error) { - type schemaAlias Schema - a := schemaAlias(*s) - a.PrimaryKeyPlaceholder = a.PrimaryKeys - a.ForeignKeys.Reference.FieldsPlaceholder = a.ForeignKeys.Reference.Fields - return json.Marshal(a) -} - -func processPlaceholder(ph interface{}, v *[]string) error { - if ph == nil { - return nil - } - if vStr, ok := ph.(string); ok { - *v = append(*v, vStr) - return nil - } - if vSlice, ok := ph.([]interface{}); ok { - for i := range vSlice { - *v = append(*v, vSlice[i].(string)) - } - return nil - } - // Only for signalling that an error happened. The caller knows the best - // error message. - return fmt.Errorf("") -} - -// uniqueKey represents field ID and field value which then can be used for equality tests (e.g. in a map key) -type uniqueKey struct { - KeyIndex int - KeyValue interface{} -} - -// CastTable loads and casts all table rows. -// -// The result argument must necessarily be the address for a slice. The slice -// may be nil or previously allocated. -func (s *Schema) CastTable(tab table.Table, out interface{}) error { - outv := reflect.ValueOf(out) - if outv.Kind() != reflect.Ptr || outv.Elem().Kind() != reflect.Slice { - return fmt.Errorf("out argument must be a slice address") - } - iter, err := tab.Iter() - if err != nil { - return err - } - defer iter.Close() - - uniqueFieldIndexes := extractUniqueFieldIndexes(s) - uniqueCache := make(map[uniqueKey]struct{}) - - slicev := outv.Elem() - slicev = slicev.Slice(0, 0) // Trucantes the passed-in slice. - elemt := slicev.Type().Elem() - i := 0 - for iter.Next() { - i++ - elemp := reflect.New(elemt) - if err := s.CastRow(iter.Row(), elemp.Interface()); err != nil { - return err - } - for _, k := range uniqueFieldIndexes { - field := elemp.Elem().Field(k) - if _, ok := uniqueCache[uniqueKey{k, field.Interface()}]; ok { - return fmt.Errorf("field(s) '%s' duplicates in row %v", elemp.Elem().Type().Field(k).Name, i) - } - uniqueCache[uniqueKey{k, field.Interface()}] = struct{}{} - } - slicev = reflect.Append(slicev, elemp.Elem()) - slicev = slicev.Slice(0, slicev.Len()) - } - if iter.Err() != nil { - return iter.Err() - } - outv.Elem().Set(slicev.Slice(0, i)) - return nil -} - -func extractUniqueFieldIndexes(s *Schema) []int { - uniqueIndexes := make(map[int]struct{}) - for _, pk := range s.PrimaryKeys { - _, index := s.GetField(pk) - uniqueIndexes[index] = struct{}{} - } - for i := range s.Fields { - if _, ok := uniqueIndexes[i]; !ok && s.Fields[i].Constraints.Unique { - uniqueIndexes[i] = struct{}{} - } - } - keys := make([]int, 0, len(uniqueIndexes)) - for k := range uniqueIndexes { - keys = append(keys, k) - } - return keys -} - -// UncastTable uncasts each element (struct) of the passed-in slice and -func (s *Schema) UncastTable(in interface{}) ([][]string, error) { - inVal := reflect.Indirect(reflect.ValueOf(in)) - if inVal.Kind() != reflect.Slice { - return nil, fmt.Errorf("tables must be slice of structs") - } - var t [][]string - for i := 0; i < inVal.Len(); i++ { - r, err := s.UncastRow(inVal.Index(i).Interface()) - if err != nil { - return nil, err - } - t = append(t, r) - } - return t, nil -} - -// String returns an human readable version of the schema. -func (s *Schema) String() string { - var buf bytes.Buffer - pp, err := json.Marshal(s) - if err != nil { - return "" - } - buf.Write(pp) - return buf.String() -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/schema/schema_test.go b/vendor/github.com/frictionlessdata/tableschema-go/schema/schema_test.go deleted file mode 100644 index 64f532e..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/schema/schema_test.go +++ /dev/null @@ -1,629 +0,0 @@ -package schema - -import ( - "bytes" - "fmt" - "net/http" - "net/http/httptest" - "strings" - "testing" - - "github.com/frictionlessdata/tableschema-go/table" - "github.com/matryer/is" -) - -func ExampleSchema_CastRow() { - // Lets assume we have a schema ... - s := Schema{Fields: []Field{{Name: "Name", Type: StringType}, {Name: "Age", Type: IntegerType}}} - - // And a Table. - t := table.FromSlices([]string{"Name", "Age"}, [][]string{ - {"Foo", "42"}, - {"Bar", "43"}}) - - // And we would like to process them using Go types. First we need to create a struct to - // hold the content of each row. - // The tag tableheader maps the field to the schema. If no tag is set the name of the field - // has to be the same like inside the schema. - type person struct { - MyName string `tableheader:"Name"` - Age int - } - - // Now it is a matter of iterate over the table and Cast each row. - iter, _ := t.Iter() - for iter.Next() { - var p person - s.CastRow(iter.Row(), &p) - fmt.Printf("%+v\n", p) - } - // Output: {MyName:Foo Age:42} - // {MyName:Bar Age:43} -} - -func ExampleSchema_CastTable() { - // Lets assume we have a schema ... - s := Schema{Fields: []Field{{Name: "Name", Type: StringType}, {Name: "Age", Type: IntegerType, Constraints: Constraints{Unique: true}}}} - - // And a Table. - t := table.FromSlices([]string{"Name", "Age"}, [][]string{ - {"Foo", "42"}, - {"Bar", "43"}}) - - // And we would like to process them using Go types. First we need to create a struct to - // hold the content of each row. - // The tag tableheader maps the field to the schema. If no tag is set the name of the field - // has to be the same like inside the schema. - type person struct { - MyName string `tableheader:"Name"` - Age int - } - var people []person - s.CastTable(t, &people) - fmt.Print(people) - // Output: [{Foo 42} {Bar 43}] -} - -func ExampleSchema_UncastRow() { - // Lets assume we have a schema. - s := Schema{Fields: []Field{{Name: "Name", Type: StringType}, {Name: "Age", Type: IntegerType}}} - - // And would like to create a CSV out of this list. The tag tableheader maps - // the field to the schema name. If no tag is set the name of the field - // has to be the same like inside the schema. - people := []struct { - MyName string `tableheader:"Name"` - Age int - }{{"Foo", 42}, {"Bar", 43}} - - // First create the writer and write the header. - w := table.NewStringWriter() - w.Write([]string{"Name", "Age"}) - - // Then write the list - for _, person := range people { - row, _ := s.UncastRow(person) - w.Write(row) - } - w.Flush() - fmt.Print(w.String()) - // Output: Name,Age - // Foo,42 - // Bar,43 -} - -func ExampleSchema_UncastTable() { - // Lets assume we have a schema. - s := Schema{Fields: []Field{{Name: "Name", Type: StringType}, {Name: "Age", Type: IntegerType}}} - - // And would like to create a CSV out of this list. The tag tableheader maps - // the field to the schema name. If no tag is set the name of the field - // has to be the same like inside the schema. - people := []struct { - MyName string `tableheader:"Name"` - Age int - }{{"Foo", 42}, {"Bar", 43}} - - // Then uncast the people slice into a slice of rows. - rows, _ := s.UncastTable(people) - - // Now, simply write it down. - w := table.NewStringWriter() - w.Write([]string{"Name", "Age"}) - w.WriteAll(rows) - w.Flush() - fmt.Print(w.String()) - // Output: Name,Age - // Foo,42 - // Bar,43 -} - -func TestLoadRemote(t *testing.T) { - is := is.New(t) - h := func(w http.ResponseWriter, r *http.Request) { - fmt.Fprintf(w, `{"fields": [{"name": "ID", "type": "integer"}]}`) - } - ts := httptest.NewServer(http.HandlerFunc(h)) - defer ts.Close() - got, err := LoadRemote(ts.URL) - is.NoErr(err) - - want := &Schema{Fields: []Field{asJSONField(Field{Name: "ID", Type: "integer"})}} - is.Equal(got, want) - - t.Run("Error", func(t *testing.T) { - is := is.New(t) - _, err := LoadRemote("invalidURL") - is.True(err != nil) - }) -} - -func TestRead_Sucess(t *testing.T) { - data := []struct { - Desc string - JSON string - Schema Schema - }{ - { - "OneField", - `{ - "fields":[{"name":"n","title":"ti","type":"integer","description":"desc","format":"f","trueValues":["ntrue"],"falseValues":["nfalse"]}] - }`, - Schema{ - Fields: []Field{{Name: "n", Title: "ti", Type: "integer", Description: "desc", Format: "f", TrueValues: []string{"ntrue"}, FalseValues: []string{"nfalse"}, - DecimalChar: defaultDecimalChar, GroupChar: defaultGroupChar, BareNumber: defaultBareNumber}}, - }, - }, - { - "MultipleFields", - `{ - "fields":[{"name":"n1","type":"t1","format":"f1","falseValues":[]}, {"name":"n2","type":"t2","format":"f2","trueValues":[]}] - }`, - Schema{ - Fields: []Field{ - {Name: "n1", Type: "t1", Format: "f1", TrueValues: defaultTrueValues, FalseValues: []string{}, DecimalChar: defaultDecimalChar, GroupChar: defaultGroupChar, BareNumber: defaultBareNumber}, - {Name: "n2", Type: "t2", Format: "f2", TrueValues: []string{}, FalseValues: defaultFalseValues, DecimalChar: defaultDecimalChar, GroupChar: defaultGroupChar, BareNumber: defaultBareNumber}, - }, - }, - }, - { - "PKString", - `{"fields":[{"name":"n1"}], "primaryKey":"n1"}`, - Schema{Fields: []Field{asJSONField(Field{Name: "n1"})}, PrimaryKeys: []string{"n1"}}, - }, - { - "PKSlice", - `{"fields":[{"name":"n1"}], "primaryKey":["n1"]}`, - Schema{Fields: []Field{asJSONField(Field{Name: "n1"})}, PrimaryKeys: []string{"n1"}}, - }, - { - "FKFieldsString", - `{"fields":[{"name":"n1"}], "foreignKeys":{"fields":"n1"}}`, - Schema{Fields: []Field{asJSONField(Field{Name: "n1"})}, ForeignKeys: ForeignKeys{Fields: []string{"n1"}}}, - }, - { - "FKFieldsSlice", - `{"fields":[{"name":"n1"}], "foreignKeys":{"fields":["n1"]}}`, - Schema{Fields: []Field{asJSONField(Field{Name: "n1"})}, ForeignKeys: ForeignKeys{Fields: []string{"n1"}}}, - }, - { - "FKReferenceFieldsString", - `{"fields":[{"name":"n1"}], "foreignKeys":{"reference":{"fields":"n1"}}}`, - Schema{Fields: []Field{asJSONField(Field{Name: "n1"})}, ForeignKeys: ForeignKeys{Reference: ForeignKeyReference{Fields: []string{"n1"}}}}, - }, - { - "FKReferenceFieldsSlice", - `{"fields":[{"name":"n1"}], "foreignKeys":{"reference":{"fields":["n1"]}}}`, - Schema{Fields: []Field{asJSONField(Field{Name: "n1"})}, ForeignKeys: ForeignKeys{Reference: ForeignKeyReference{Fields: []string{"n1"}}}}, - }, - } - for _, d := range data { - t.Run(d.Desc, func(t *testing.T) { - is := is.New(t) - s, err := Read(strings.NewReader(d.JSON)) - is.NoErr(err) - is.Equal(s, &d.Schema) - }) - } - t.Run("MissingValues", func(t *testing.T) { - is := is.New(t) - reader := strings.NewReader(`{"fields":[{"name":"n","type":"integer"}],"missingValues":["na"]}`) - s, err := Read(reader) - is.NoErr(err) - - f := s.Fields[0] - _, ok := f.MissingValues["na"] - is.True(ok) - }) -} - -func TestRead_Error(t *testing.T) { - data := []struct { - Desc string - JSON string - }{ - {"InvalidSchema", `{"fields":"f1"}`}, - {"EmptyDescriptor", ""}, - {"InvalidPKType", `{"fields":[{"name":"n1"}], "primaryKey":1}`}, - {"InvalidFKFieldsType", `{"fields":[{"name":"n1"}], "foreignKeys":{"fields":1}}`}, - {"InvalidFKReferenceFieldsType", `{"fields":[{"name":"n1"}], "foreignKeys":{"reference":{"fields":1}}}`}, - } - for _, d := range data { - t.Run(d.Desc, func(t *testing.T) { - is := is.New(t) - _, err := Read(strings.NewReader(d.JSON)) - is.True(err != nil) - }) - } -} - -func TestSchema_Cast(t *testing.T) { - t.Run("NoImplicitCast", func(t *testing.T) { - is := is.New(t) - t1 := struct { - Name string - Age int64 - }{} - s := Schema{Fields: []Field{{Name: "Name", Type: StringType}, {Name: "Age", Type: IntegerType}}} - is.NoErr(s.CastRow([]string{"Foo", "42"}, &t1)) - is.Equal(t1.Name, "Foo") - is.Equal(t1.Age, int64(42)) - }) - t.Run("StructWithTags", func(t *testing.T) { - is := is.New(t) - t1 := struct { - MyName string `tableheader:"Name"` - MyAge int64 `tableheader:"Age"` - }{} - s := Schema{Fields: []Field{{Name: "Name", Type: StringType}, {Name: "Age", Type: IntegerType}}} - is.NoErr(s.CastRow([]string{"Foo", "42"}, &t1)) - is.Equal(t1.MyName, "Foo") - is.Equal(t1.MyAge, int64(42)) - }) - t.Run("ImplicitCastToInt", func(t *testing.T) { - is := is.New(t) - t1 := struct{ Age int }{} - s := Schema{Fields: []Field{{Name: "Name", Type: StringType}, {Name: "Age", Type: IntegerType}}} - is.NoErr(s.CastRow([]string{"Foo", "42"}, &t1)) - is.Equal(t1.Age, 42) - }) - t.Run("Error_SchemaFieldAndStructFieldDifferentTypes", func(t *testing.T) { - is := is.New(t) - // Field is string and struct is int. - t1 := struct{ Age int }{} - s := Schema{Fields: []Field{{Name: "Age", Type: StringType}}} - is.True(s.CastRow([]string{"42"}, &t1) != nil) - }) - t.Run("Error_NotAPointerToStruct", func(t *testing.T) { - is := is.New(t) - t1 := struct{ Age int }{} - s := Schema{Fields: []Field{{Name: "Name", Type: StringType}}} - is.True(s.CastRow([]string{"Foo", "42"}, t1) != nil) - }) - t.Run("Error_CellCanNotBeCast", func(t *testing.T) { - is := is.New(t) - // Field is string and struct is int. - t1 := struct{ Age int }{} - s := Schema{Fields: []Field{{Name: "Age", Type: IntegerType}}} - is.True(s.CastRow([]string{"foo"}, &t1) != nil) - }) - t.Run("Error_CastToNil", func(t *testing.T) { - is := is.New(t) - t1 := &struct{ Age int }{} - t1 = nil - s := Schema{Fields: []Field{{Name: "Age", Type: IntegerType}}} - is.True(s.CastRow([]string{"foo"}, &t1) != nil) - }) -} - -func TestValidate_SimpleValid(t *testing.T) { - data := []struct { - Desc string - Schema Schema - }{ - {"PrimaryKey", Schema{Fields: []Field{{Name: "p"}, {Name: "i"}}, - PrimaryKeys: []string{"p"}, - ForeignKeys: ForeignKeys{ - Fields: []string{"p"}, - Reference: ForeignKeyReference{Resource: "", Fields: []string{"i"}}, - }}, - }, - } - for _, d := range data { - t.Run(d.Desc, func(t *testing.T) { - is := is.New(t) - is.NoErr(d.Schema.Validate()) - }) - } -} - -func TestValidate_Invalid(t *testing.T) { - data := []struct { - Desc string - Schema Schema - }{ - {"MissingName", Schema{Fields: []Field{{Type: IntegerType}}}}, - {"PKNonexistingField", Schema{Fields: []Field{{Name: "n1"}}, PrimaryKeys: []string{"n2"}}}, - {"FKNonexistingField", Schema{Fields: []Field{{Name: "n1"}}, - ForeignKeys: ForeignKeys{Fields: []string{"n2"}}, - }}, - {"InvalidReferences", Schema{Fields: []Field{{Name: "n1"}}, - ForeignKeys: ForeignKeys{ - Fields: []string{"n1"}, - Reference: ForeignKeyReference{Resource: "", Fields: []string{"n1", "n2"}}, - }}, - }, - } - for _, d := range data { - t.Run(d.Desc, func(t *testing.T) { - is := is.New(t) - is.True(d.Schema.Validate() != nil) - }) - } -} - -func TestWrite(t *testing.T) { - is := is.New(t) - s := Schema{ - Fields: []Field{{Name: "Foo"}, {Name: "Bar"}}, - PrimaryKeys: []string{"Foo"}, - ForeignKeys: ForeignKeys{Reference: ForeignKeyReference{Fields: []string{"Foo"}}}, - } - buf := bytes.NewBufferString("") - is.NoErr(s.Write(buf)) - - want := `{ - "fields": [ - { - "name": "Foo", - "Constraints": {} - }, - { - "name": "Bar", - "Constraints": {} - } - ], - "primaryKey": [ - "Foo" - ], - "foreignKeys": { - "reference": { - "fields": [ - "Foo" - ] - } - } -}` - - is.Equal(buf.String(), want) -} - -func TestGetField(t *testing.T) { - t.Run("HasField", func(t *testing.T) { - is := is.New(t) - s := Schema{Fields: []Field{{Name: "Foo"}, {Name: "Bar"}}} - field, pos := s.GetField("Foo") - is.Equal(pos, 0) - is.True(field != nil) - - field, pos = s.GetField("Bar") - is.Equal(pos, 1) - is.True(field != nil) - }) - t.Run("DoesNotHaveField", func(t *testing.T) { - is := is.New(t) - s := Schema{Fields: []Field{{Name: "Bez"}}} - field, pos := s.GetField("Foo") - is.Equal(pos, InvalidPosition) - is.True(field == nil) - }) -} - -func TestHasField(t *testing.T) { - t.Run("HasField", func(t *testing.T) { - is := is.New(t) - s := Schema{Fields: []Field{{Name: "Foo"}, {Name: "Bar"}}} - is.True(s.HasField("Foo")) - is.True(s.HasField("Bar")) - }) - t.Run("DoesNotHaveField", func(t *testing.T) { - is := is.New(t) - s := Schema{Fields: []Field{{Name: "Bez"}}} - is.True(!s.HasField("Bar")) - }) -} - -func TestMissingValues(t *testing.T) { - is := is.New(t) - s := Schema{ - Fields: []Field{{Name: "Foo"}}, - MissingValues: []string{"f"}, - } - row := struct { - Foo string - }{} - s.CastRow([]string{"f"}, &row) - is.Equal(row.Foo, "") -} - -type csvRow struct { - Name string -} - -func TestCastTable(t *testing.T) { - data := []struct { - desc string - got []csvRow - }{ - {"OutEmpty", []csvRow{}}, - {"OutNil", nil}, - {"OutInitialized", []csvRow{{"fooooo"}}}, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - tab := table.FromSlices( - []string{"Name"}, - [][]string{{"foo"}, {"bar"}}) - s := &Schema{Fields: []Field{{Name: "Name", Type: StringType}}} - is.NoErr(s.CastTable(tab, &d.got)) - - want := []csvRow{{"foo"}, {"bar"}} - is.Equal(want, d.got) - }) - } - t.Run("MoarData", func(t *testing.T) { - is := is.New(t) - tab := table.FromSlices( - []string{"ID", "Age", "Name"}, - [][]string{{"1", "39", "Paul"}, {"2", "23", "Jimmy"}, {"3", "36", "Jane"}, {"4", "28", "Judy"}, {"5", "37", "Iñtërnâtiônàlizætiøn"}}) - - type data struct { - ID int - Age int - Name string - } - s := &Schema{Fields: []Field{{Name: "ID", Type: IntegerType}, {Name: "Age", Type: IntegerType}, {Name: "Name", Type: StringType}}} - got := []data{} - is.NoErr(s.CastTable(tab, &got)) - - want := []data{{1, 39, "Paul"}, {2, 23, "Jimmy"}, {3, 36, "Jane"}, {4, 28, "Judy"}, {5, 37, "Iñtërnâtiônàlizætiøn"}} - is.Equal(want, got) - }) - t.Run("EmptyTable", func(t *testing.T) { - is := is.New(t) - tab := table.FromSlices([]string{}, [][]string{}) - s := &Schema{Fields: []Field{{Name: "name", Type: StringType}}} - var got []csvRow - is.NoErr(s.CastTable(tab, &got)) - is.Equal(len(got), 0) - }) - t.Run("Error_OutNotAPointerToSlice", func(t *testing.T) { - is := is.New(t) - tab := table.FromSlices([]string{"name"}, [][]string{{""}}) - s := &Schema{Fields: []Field{{Name: "name", Type: StringType}}} - is.True(s.CastTable(tab, []csvRow{}) != nil) - }) - t.Run("Error_UniqueConstrain", func(t *testing.T) { - tab := table.FromSlices( - []string{"ID", "Point"}, - [][]string{{"1", "10,11"}, {"2", "11,10"}, {"3", "10,10"}, {"4", "10,11"}}) - s := &Schema{Fields: []Field{{Name: "ID", Type: IntegerType}, {Name: "Point", Type: GeoPointType, Constraints: Constraints{Unique: true}}}} - - type data struct { - ID int - Point GeoPoint - } - got := []data{} - if err := s.CastTable(tab, &got); err == nil { - t.Fatalf("err want:err got:nil") - } - if len(got) != 0 { - t.Fatalf("len(got) want:0 got:%v", len(got)) - } - }) - t.Run("Error_PrimaryKeyAndUniqueConstrain", func(t *testing.T) { - tab := table.FromSlices( - []string{"ID", "Age", "Name"}, - [][]string{{"1", "39", "Paul"}, {"2", "23", "Jimmy"}, {"3", "36", "Jane"}, {"4", "28", "Judy"}, {"4", "37", "John"}}) - - type data struct { - ID int - Age int - Name string - } - s := &Schema{Fields: []Field{{Name: "ID", Type: IntegerType}, {Name: "Age", Type: IntegerType}, {Name: "Name", Type: StringType, Constraints: Constraints{Unique: true}}}, PrimaryKeys: []string{"ID"}} - got := []data{} - if err := s.CastTable(tab, &got); err == nil { - t.Fatalf("err want:nil got:%q", err) - } - if len(got) != 0 { - t.Fatalf("len(got) want:0 got:%v", len(got)) - } - }) -} - -func TestSchema_Uncast(t *testing.T) { - t.Run("Success", func(t *testing.T) { - is := is.New(t) - type rowType struct { - Name string - Age int - } - s := Schema{Fields: []Field{{Name: "Name", Type: StringType}, {Name: "Age", Type: IntegerType}}} - got, err := s.UncastRow(rowType{Name: "Foo", Age: 42}) - is.NoErr(err) - - want := []string{"Foo", "42"} - is.Equal(want, got) - }) - t.Run("SuccessWithTags", func(t *testing.T) { - is := is.New(t) - type rowType struct { - MyName string `tableheader:"Name"` - MyAge int `tableheader:"Age"` - } - s := Schema{Fields: []Field{{Name: "Name", Type: StringType}, {Name: "Age", Type: IntegerType}}} - got, err := s.UncastRow(rowType{MyName: "Foo", MyAge: 42}) - is.NoErr(err) - is.Equal([]string{"Foo", "42"}, got) - }) - t.Run("SuccessSchemaMoreFieldsThanStruct", func(t *testing.T) { - is := is.New(t) - s := Schema{Fields: Fields{{Name: "Age", Type: IntegerType}, {Name: "Name", Type: StringType}}} - in := csvRow{Name: "Foo"} - got, err := s.UncastRow(&in) - if err != nil { - t.Fatalf("err want:nil got:%q", err) - } - is.Equal([]string{"Foo"}, got) - }) - t.Run("SuccessStructHasMoreFieldsThanSchema", func(t *testing.T) { - is := is.New(t) - // Note: deliberately changed the order to make the test more interesting. - type rowType struct { - Age int - Name string - Bar float64 - Bez string - } - s := Schema{Fields: []Field{{Name: "Name", Type: StringType}, {Name: "Bez", Type: StringType}}} - got, err := s.UncastRow(rowType{Age: 42, Bez: "Bez", Name: "Foo"}) - is.NoErr(err) - is.Equal([]string{"Foo", "Bez"}, got) - }) - t.Run("Error_Encoding", func(t *testing.T) { - is := is.New(t) - type rowType struct { - Age string - } - s := Schema{Fields: []Field{{Name: "Age", Type: IntegerType}}} - _, err := s.UncastRow(rowType{Age: "10"}) - is.True(err != nil) - }) - t.Run("Error_NotStruct", func(t *testing.T) { - is := is.New(t) - s := Schema{Fields: []Field{{Name: "name", Type: StringType}}} - in := "string" - _, err := s.UncastRow(in) - is.True(err != nil) - }) - t.Run("Error_StructIsNil", func(t *testing.T) { - is := is.New(t) - s := Schema{Fields: []Field{{Name: "name", Type: StringType}}} - var in *csvRow - _, err := s.UncastRow(in) - is.True(err != nil) - }) -} - -func TestUncastTable(t *testing.T) { - t.Run("Simple", func(t *testing.T) { - is := is.New(t) - people := []struct { - Name string - }{{"Foo"}, {"Bar"}, {"Bez"}} - s := Schema{Fields: []Field{{Name: "Name", Type: StringType}}} - got, err := s.UncastTable(people) - is.NoErr(err) - - want := [][]string{{"Foo"}, {"Bar"}, {"Bez"}} - is.Equal(want, got) - }) - - t.Run("Error_InputIsNotASlice", func(t *testing.T) { - is := is.New(t) - s := Schema{Fields: []Field{{Name: "Name", Type: StringType}}} - _, err := s.UncastTable(10) - is.True(err != nil) - }) - t.Run("Error_ErrorEncoding", func(t *testing.T) { - is := is.New(t) - people := []struct { - Name string - }{{"Foo"}} - s := Schema{Fields: []Field{{Name: "Name", Type: IntegerType}}} - _, err := s.UncastTable(people) - is.True(err != nil) - }) -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/schema/string.go b/vendor/github.com/frictionlessdata/tableschema-go/schema/string.go deleted file mode 100644 index ce6cbbd..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/schema/string.go +++ /dev/null @@ -1,59 +0,0 @@ -package schema - -import ( - "fmt" - "github.com/satori/go.uuid" - "net/mail" - "net/url" -) - -// Valid string formats and configuration. -const ( - stringURI = "uri" - stringEmail = "email" - stringUUID = "uuid" - stringBinary = "binary" - stringUUIDVersion = 4 -) - -func checkStringConstraints(v string, c Constraints) error { - minLength := c.MinLength - maxLength := c.MaxLength - re := c.compiledPattern - - if minLength != 0 && len(v) < minLength { - return fmt.Errorf("constraint check error: %v %v < minimum:%v", v, len(v), minLength) - } - if maxLength != 0 && len(v) > maxLength { - return fmt.Errorf("constraint check error: %v %v > maximum:%v", v, len(v), maxLength) - } - - if re != nil && !re.MatchString(v) { - return fmt.Errorf("constraint check error: %v don't fit pattern : %v ", v, c.Pattern) - } - return nil -} - -func castString(format, value string, c Constraints) (string, error) { - err := checkStringConstraints(value, c) - if err != nil { - return value, err - } - - switch format { - case stringURI: - _, err := url.ParseRequestURI(value) - return value, err - case stringEmail: - _, err := mail.ParseAddress(value) - return value, err - case stringUUID: - v, err := uuid.FromString(value) - if v.Version() != stringUUIDVersion { - return value, fmt.Errorf("invalid UUID version - got:%d want:%d", v.Version(), stringUUIDVersion) - } - return value, err - } - // NOTE: Returning the value for unknown format is in par with the python library. - return value, nil -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/schema/string_test.go b/vendor/github.com/frictionlessdata/tableschema-go/schema/string_test.go deleted file mode 100644 index 96e3722..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/schema/string_test.go +++ /dev/null @@ -1,69 +0,0 @@ -package schema - -import ( - "regexp" - "testing" - - "github.com/matryer/is" -) - -// To be in par with the python library. -func TestCastString_URIMustRequireScheme(t *testing.T) { - is := is.New(t) - _, err := castString(stringURI, "google.com", Constraints{}) - is.True(err != nil) -} - -func TestCastString_InvalidUUIDVersion(t *testing.T) { - is := is.New(t) - // This is a uuid3: namespace DNS and python.org. - _, err := castString(stringUUID, "6fa459ea-ee8a-3ca4-894e-db77e160355e", Constraints{}) - is.True(err != nil) -} - -func TestCastString_ErrorCheckingConstraints(t *testing.T) { - data := []struct { - desc string - value string - format string - constraints Constraints - }{ - {"InvalidMinLength_UUID", "6fa459ea-ee8a-3ca4-894e-db77e160355e", stringUUID, Constraints{MinLength: 100}}, - {"InvalidMinLength_Email", "foo@bar.com", stringEmail, Constraints{MinLength: 100}}, - {"InvalidMinLength_URI", "http://google.com", stringURI, Constraints{MinLength: 100}}, - {"InvalidMaxLength_UUID", "6fa459ea-ee8a-3ca4-894e-db77e160355e", stringUUID, Constraints{MaxLength: 1}}, - {"InvalidMaxLength_Email", "foo@bar.com", stringEmail, Constraints{MaxLength: 1}}, - {"InvalidMaxLength_URI", "http://google.com", stringURI, Constraints{MaxLength: 1}}, - {"InvalidPattern_UUID", "6fa459ea-ee8a-3ca4-894e-db77e160355e", stringUUID, Constraints{compiledPattern: regexp.MustCompile("^[0-9a-f]{1}-.*"), Pattern: "^[0-9a-f]{1}-.*"}}, - {"InvalidPattern_Email", "foo@bar.com", stringEmail, Constraints{compiledPattern: regexp.MustCompile("[0-9].*"), Pattern: "[0-9].*"}}, - {"InvalidPattern_URI", "http://google.com", stringURI, Constraints{compiledPattern: regexp.MustCompile("^//.*"), Pattern: "^//.*"}}, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - _, err := castString(d.format, d.value, d.constraints) - is.True(err != nil) - }) - } -} - -func TestCastString_Success(t *testing.T) { - var data = []struct { - desc string - value string - format string - constraints Constraints - }{ - {"URI", "http://google.com", stringURI, Constraints{MinLength: 1, compiledPattern: regexp.MustCompile("^http://.*"), Pattern: "^http://.*"}}, - {"Email", "foo@bar.com", stringEmail, Constraints{MinLength: 1, compiledPattern: regexp.MustCompile(".*@.*"), Pattern: ".*@.*"}}, - {"UUID", "C56A4180-65AA-42EC-A945-5FD21DEC0538", stringUUID, Constraints{MinLength: 36, MaxLength: 36, compiledPattern: regexp.MustCompile("[0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12}"), Pattern: "[0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12}"}}, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - v, err := castString(d.format, d.value, d.constraints) - is.NoErr(err) - is.Equal(v, d.value) - }) - } -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/schema/time.go b/vendor/github.com/frictionlessdata/tableschema-go/schema/time.go deleted file mode 100644 index 9d72815..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/schema/time.go +++ /dev/null @@ -1,41 +0,0 @@ -package schema - -import ( - "fmt" - "reflect" - "time" -) - -func castTime(format, value string, c Constraints) (time.Time, error) { - y, err := castTimeWithoutCheckConstraints(format, value) - if err != nil { - return y, err - } - var max, min time.Time - if c.Maximum != "" { - max, err = castTimeWithoutCheckConstraints(format, c.Maximum) - if err != nil { - return y, err - } - } - if c.Minimum != "" { - min, err = castTimeWithoutCheckConstraints(format, c.Minimum) - if err != nil { - return y, err - } - } - return checkConstraints(y, max, min, TimeType) -} - -func castTimeWithoutCheckConstraints(format, value string) (time.Time, error) { - return castDefaultOrCustomTime("03:04:05", format, value) -} - -func uncastTime(v interface{}) (string, error) { - value, ok := v.(time.Time) - if !ok { - return "", fmt.Errorf("invalid date - value:%v type:%v", v, reflect.ValueOf(v).Type()) - } - utc := value.In(time.UTC) - return utc.Format(time.RFC3339), nil -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/schema/time_test.go b/vendor/github.com/frictionlessdata/tableschema-go/schema/time_test.go deleted file mode 100644 index fc3c980..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/schema/time_test.go +++ /dev/null @@ -1,76 +0,0 @@ -package schema - -import ( - "testing" - "time" - - "github.com/matryer/is" -) - -func TestCastTime(t *testing.T) { - t.Run("ValidMaximum", func(t *testing.T) { - is := is.New(t) - _, err := castTime(defaultFieldFormat, "11:45:00", Constraints{Maximum: "11:45:01"}) - is.NoErr(err) - }) - t.Run("ValidMinimum", func(t *testing.T) { - is := is.New(t) - _, err := castTime(defaultFieldFormat, "11:45:00", Constraints{Minimum: "11:44:59"}) - is.NoErr(err) - }) - t.Run("Error", func(t *testing.T) { - data := []struct { - desc string - time string - constraints Constraints - }{ - {"InvalidYear", "foo", Constraints{}}, - {"BiggerThanMaximum", "11:45:00", Constraints{Maximum: "11:44:59"}}, - {"InvalidMaximum", "11:45:00", Constraints{Maximum: "boo"}}, - {"SmallerThanMinimum", "11:45:00", Constraints{Minimum: "11:45:01"}}, - {"InvalidMinimum", "11:45:00", Constraints{Minimum: "boo"}}, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - _, err := castTime(defaultFieldFormat, d.time, d.constraints) - is.True(err != nil) - }) - } - }) -} - -func TestUncastTime(t *testing.T) { - t.Run("Success", func(t *testing.T) { - data := []struct { - desc string - value time.Time - want string - }{ - {"SimpleDate", time.Unix(1, 0), "1970-01-01T00:00:01Z"}, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - got, err := uncastTime(d.value) - is.NoErr(err) - is.Equal(d.want, got) - }) - } - }) - t.Run("Error", func(t *testing.T) { - data := []struct { - desc string - value interface{} - }{ - {"InvalidType", "Boo"}, - } - for _, d := range data { - t.Run(d.desc, func(t *testing.T) { - is := is.New(t) - _, err := uncastTime(d.value) - is.True(err != nil) - }) - } - }) -} diff --git a/vendor/github.com/frictionlessdata/tableschema-go/table/table.go b/vendor/github.com/frictionlessdata/tableschema-go/table/table.go deleted file mode 100644 index 0983369..0000000 --- a/vendor/github.com/frictionlessdata/tableschema-go/table/table.go +++ /dev/null @@ -1,137 +0,0 @@ -// Package table provides the main interfaces used to manipulate tabular data. -// To understand why we need interfaces to process tabular data, it is useful to introduce -// the concepts of the physical and the logical representation of data. -// -// The physical representation of data refers to the representation of data as text on disk, -// for example, in a CSV, JSON or XML file. This representation may have some type information (JSON, -// where the primitive types that JSON supports can be used) or not (CSV, where all data is -// represented in string form). In this project, those are going to be presented as packages that -// provide structs which implement those interfaces. For instance, csv.NewTable creates a Table -// which is backed up by a CSV. -// -// The logical representation of data refers to the "ideal" representation of the data in terms of -// primitive types, data structures, and relations, all as defined by the specification. We could say -// that the specification is about the logical representation of data. That said, functions -// exported for data processing should deal with logic representations. That functionality -// is represented by interfaces in this package. -package table - -import ( - "bytes" - "encoding/csv" -) - -// Table provides functionality to iterate and write tabular data. This is the logical -// representation and is meant to be encoding/format agnostic. -type Table interface { - // Headers returns the headers of the tabular data. - Headers() []string - - // Iter provides a convenient way to iterate over table's data. - // The iteration process always start at the beginning of the table and - // is backed by a new reading. - Iter() (Iterator, error) - - // ReadAll reads all rows from the table and return it as strings. - ReadAll() ([][]string, error) -} - -// A Writer writes rows to a table file. The idea behind the writer is to -// abstract out the physical representation of the table. Which can have -// many formats, for instance, CSV, XML and JSON -type Writer interface { - // Write writes a single row to w along with any necessary quoting. - // A record is a slice of strings with each string being one field. - Write(record []string) error - // Flush writes any buffered data to the underlying io.Writer. - // To check if an error occurred during the Flush, call Error. - Flush() - // Error reports any error that has occurred during a previous Write or Flush. - Error() error - // WriteAll writes multiple CSV records to w using Write and then calls Flush. - WriteAll(records [][]string) error -} - -// StringWriter is a simple Writer implementation which is backed up by -// an in memory bytes.Buffer. -type StringWriter struct { - csv.Writer - - content *bytes.Buffer -} - -// String returns the content that has been written so far as raw CSV. -func (s *StringWriter) String() string { - return s.content.String() -} - -// NewStringWriter returns a Writer that writes CSV to a string. -// It exports a String() method, which returns its contents. -func NewStringWriter() *StringWriter { - buf := &bytes.Buffer{} - return &StringWriter{*csv.NewWriter(buf), buf} -} - -// FromSlices creates a new SliceTable using passed-in arguments. -func FromSlices(headers []string, content [][]string) *SliceTable { - return &SliceTable{headers, content} -} - -// SliceTable offers a simple table implementation backed by slices. -type SliceTable struct { - headers []string - content [][]string -} - -// Headers returns the headers of the tabular data. -func (t *SliceTable) Headers() []string { - return t.headers -} - -// ReadAll reads all rows from the table and return it as strings. -func (t *SliceTable) ReadAll() ([][]string, error) { - return t.content, nil -} - -// Iter provides a convenient way to iterate over table's data. -// The iteration process always start at the beginning of the table and -// is backed by a new reading process. -func (t *SliceTable) Iter() (Iterator, error) { - return &sliceIterator{content: t.content}, nil -} - -type sliceIterator struct { - content [][]string - pos int -} - -func (i *sliceIterator) Next() bool { - i.pos++ - return i.pos <= len(i.content) -} -func (i *sliceIterator) Row() []string { return i.content[i.pos-1] } -func (i *sliceIterator) Err() error { return nil } -func (i *sliceIterator) Close() error { return nil } - -// Iterator is an interface which provides method to interating over tabular -// data. It is heavly inspired by bufio.Scanner. -// Iterating stops unrecoverably at EOF, the first I/O error, or a token too large to fit in the buffer. -type Iterator interface { - // Next advances the table interator to the next row, which will be available through the Cast or Row methods. - // It returns false when the iterator stops, either by reaching the end of the table or an error. - // After Next returns false, the Err method will return any error that ocurred during the iteration, except if it was io.EOF, Err - // will return nil. - // Next could automatically buffer some data, improving reading performance. It could also block, if necessary. - Next() bool - - // Row returns the most recent row fetched by a call to Next as a newly allocated string slice - // holding its fields. - Row() []string - - // Err returns nil if no errors happened during iteration, or the actual error - // otherwise. - Err() error - - // Close frees up any resources used during the iteration process. - Close() error -} diff --git a/vendor/github.com/matryer/is/.gitignore b/vendor/github.com/matryer/is/.gitignore deleted file mode 100644 index daf913b..0000000 --- a/vendor/github.com/matryer/is/.gitignore +++ /dev/null @@ -1,24 +0,0 @@ -# Compiled Object files, Static and Dynamic libs (Shared Objects) -*.o -*.a -*.so - -# Folders -_obj -_test - -# Architecture specific extensions/prefixes -*.[568vq] -[568vq].out - -*.cgo1.go -*.cgo2.c -_cgo_defun.c -_cgo_gotypes.go -_cgo_export.* - -_testmain.go - -*.exe -*.test -*.prof diff --git a/vendor/github.com/matryer/is/.travis.yml b/vendor/github.com/matryer/is/.travis.yml deleted file mode 100644 index a7efa7b..0000000 --- a/vendor/github.com/matryer/is/.travis.yml +++ /dev/null @@ -1,19 +0,0 @@ -language: go - -sudo: required - -go: - - 1.6.x - - 1.7.x - - tip - -env: - - GIMME_OS=linux GIMME_ARCH=amd64 - - GIMME_OS=darwin GIMME_ARCH=amd64 - - GIMME_OS=windows GIMME_ARCH=amd64 - -install: - - go get -d -v ./... - -script: - - go build -v ./... diff --git a/vendor/github.com/matryer/is/LICENSE b/vendor/github.com/matryer/is/LICENSE deleted file mode 100644 index 9cecc1d..0000000 --- a/vendor/github.com/matryer/is/LICENSE +++ /dev/null @@ -1,674 +0,0 @@ - GNU GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU General Public License is a free, copyleft license for -software and other kinds of works. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -the GNU General Public License is intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. We, the Free Software Foundation, use the -GNU General Public License for most of our software; it applies also to -any other work released this way by its authors. You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - To protect your rights, we need to prevent others from denying you -these rights or asking you to surrender the rights. Therefore, you have -certain responsibilities if you distribute copies of the software, or if -you modify it: responsibilities to respect the freedom of others. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must pass on to the recipients the same -freedoms that you received. You must make sure that they, too, receive -or can get the source code. And you must show them these terms so they -know their rights. - - Developers that use the GNU GPL protect your rights with two steps: -(1) assert copyright on the software, and (2) offer you this License -giving you legal permission to copy, distribute and/or modify it. - - For the developers' and authors' protection, the GPL clearly explains -that there is no warranty for this free software. For both users' and -authors' sake, the GPL requires that modified versions be marked as -changed, so that their problems will not be attributed erroneously to -authors of previous versions. - - Some devices are designed to deny users access to install or run -modified versions of the software inside them, although the manufacturer -can do so. This is fundamentally incompatible with the aim of -protecting users' freedom to change the software. The systematic -pattern of such abuse occurs in the area of products for individuals to -use, which is precisely where it is most unacceptable. Therefore, we -have designed this version of the GPL to prohibit the practice for those -products. If such problems arise substantially in other domains, we -stand ready to extend this provision to those domains in future versions -of the GPL, as needed to protect the freedom of users. - - Finally, every program is threatened constantly by software patents. -States should not allow patents to restrict development and use of -software on general-purpose computers, but in those that do, we wish to -avoid the special danger that patents applied to a free program could -make it effectively proprietary. To prevent this, the GPL assures that -patents cannot be used to render the program non-free. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Use with the GNU Affero General Public License. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU Affero General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the special requirements of the GNU Affero General Public License, -section 13, concerning interaction through a network will apply to the -combination as such. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - {one line to give the program's name and a brief idea of what it does.} - Copyright (C) {year} {name of author} - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . - -Also add information on how to contact you by electronic and paper mail. - - If the program does terminal interaction, make it output a short -notice like this when it starts in an interactive mode: - - {project} Copyright (C) {year} {fullname} - This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, your program's commands -might be different; for a GUI interface, you would use an "about box". - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU GPL, see -. - - The GNU General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications with -the library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. But first, please read -. diff --git a/vendor/github.com/matryer/is/README.md b/vendor/github.com/matryer/is/README.md deleted file mode 100644 index 472cd14..0000000 --- a/vendor/github.com/matryer/is/README.md +++ /dev/null @@ -1,30 +0,0 @@ -# is [![GoDoc](https://godoc.org/github.com/matryer/is?status.png)](http://godoc.org/github.com/matryer/is) [![Go Report Card](https://goreportcard.com/badge/github.com/matryer/is)](https://goreportcard.com/report/github.com/matryer/is) [![Build Status](https://travis-ci.org/matryer/is.svg?branch=master)](https://travis-ci.org/matryer/is) -Professional lightweight testing mini-framework for Go. - -* Easy to write and read -* [Beautifully simple API](https://godoc.org/github.com/matryer/is) with everything you need: `is.Equal`, `is.True`, `is.NoErr`, and `is.Fail` -* Use comments to add descriptions (which show up when tests fail) - -Failures are very easy to read: - -![Examples of failures](https://github.com/matryer/is/raw/master/misc/delicious-failures.png) - -### Usage - -The following code shows a range of useful ways you can use -the helper methods: - -```go -func Test(t *testing.T) { - - is := is.New(t) - - signedin, err := isSignedIn(ctx) - is.NoErr(err) // isSignedIn error - is.Equal(signedin, true) // must be signed in - - body := readBody(r) - is.True(strings.Contains(body, "Hi there")) - -} -``` diff --git a/vendor/github.com/matryer/is/is.go b/vendor/github.com/matryer/is/is.go deleted file mode 100644 index c58cf2d..0000000 --- a/vendor/github.com/matryer/is/is.go +++ /dev/null @@ -1,397 +0,0 @@ -// Package is provides a lightweight extension to the -// standard library's testing capabilities. -// -// Comments on the assertion lines are used to add -// a description. -// -// The following failing test: -// -// func Test(t *testing.T) { -// is := is.New(t) -// a, b := 1, 2 -// is.Equal(a, b) // expect to be the same -// } -// -// Will output: -// -// your_test.go:123: 1 != 2 // expect to be the same -// -// Usage -// -// The following code shows a range of useful ways you can use -// the helper methods: -// -// func Test(t *testing.T) { -// -// // always start tests with this -// is := is.New(t) -// -// signedin, err := isSignedIn(ctx) -// is.NoErr(err) // isSignedIn error -// is.Equal(signedin, true) // must be signed in -// -// body := readBody(r) -// is.OK(strings.Contains(body, "Hi there")) -// -// } -package is - -import ( - "bufio" - "bytes" - "fmt" - "io" - "os" - "path/filepath" - "reflect" - "runtime" - "strings" - "testing" -) - -// T reports when failures occur. -// testing.T implements this interface. -type T interface { - // Fail indicates that the test has failed but - // allowed execution to continue. - // Fail is called in relaxed mode (via NewRelaxed). - Fail() - // FailNow indicates that the test has failed and - // aborts the test. - // FailNow is called in strict mode (via New). - FailNow() -} - -// I is the test helper harness. -type I struct { - t T - fail func() - out io.Writer - colorful bool -} - -// New makes a new testing helper using the specified -// T through which failures will be reported. -// In strict mode, failures call T.FailNow causing the test -// to be aborted. See NewRelaxed for alternative behavior. -func New(t T) *I { - return &I{t, t.FailNow, os.Stdout, true} -} - -// NewRelaxed makes a new testing helper using the specified -// T through which failures will be reported. -// In relaxed mode, failures call T.Fail allowing -// multiple failures per test. -func NewRelaxed(t T) *I { - return &I{t, t.Fail, os.Stdout, true} -} - -func (is *I) log(args ...interface{}) { - s := is.decorate(fmt.Sprint(args...)) - fmt.Fprintf(is.out, s) - is.fail() -} - -func (is *I) logf(format string, args ...interface{}) { - is.log(fmt.Sprintf(format, args...)) -} - -// Fail immediately fails the test. -// -// func Test(t *testing.T) { -// is := is.New(t) -// is.Fail() // TODO: write this test -// } -// -// In relaxed mode, execution will continue after a call to -// Fail, but that test will still fail. -func (is *I) Fail() { - is.log("failed") -} - -// True asserts that the expression is true. The expression -// code itself will be reported if the assertion fails. -// -// func Test(t *testing.T) { -// is := is.New(t) -// val := method() -// is.True(val != nil) // val should never be nil -// } -// -// Will output: -// -// your_test.go:123: not true: val != nil -func (is *I) True(expression bool) { - if !expression { - is.log("not true: $ARGS") - } -} - -// Equal asserts that a and b are equal. -// -// func Test(t *testing.T) { -// is := is.New(t) -// a := greet("Mat") -// is.Equal(a, "Hi Mat") // greeting -// } -// -// Will output: -// -// your_test.go:123: Hey Mat != Hi Mat // greeting -func (is *I) Equal(a, b interface{}) { - if !areEqual(a, b) { - if isNil(a) || isNil(b) { - aLabel := is.valWithType(a) - bLabel := is.valWithType(b) - if isNil(a) { - aLabel = "" - } - if isNil(b) { - bLabel = "" - } - is.logf("%s != %s", aLabel, bLabel) - return - } - if reflect.ValueOf(a).Type() == reflect.ValueOf(b).Type() { - is.logf("%v != %v", a, b) - return - } - is.logf("%s != %s", is.valWithType(a), is.valWithType(b)) - } -} - -// New is a method wrapper around the New function. -// It allows you to write subtests using a fimilar -// pattern: -// -// func Test(t *testing.T) { -// is := is.New(t) -// t.Run("sub", func(t *testing.T) { -// is := is.New(t) -// // TODO: test -// }) -// } -func (is *I) New(t *testing.T) *I { - return New(t) -} - -// NewRelaxed is a method wrapper aorund the NewRelaxed -// method. It allows you to write subtests using a fimilar -// pattern: -// -// func Test(t *testing.T) { -// is := is.New(t) -// t.Run("sub", func(t *testing.T) { -// is := is.New(t) -// // TODO: test -// }) -// } -func (is *I) NewRelaxed(t *testing.T) *I { - return NewRelaxed(t) -} - -func (is *I) valWithType(v interface{}) string { - if is.colorful { - return fmt.Sprintf("%[1]s%[3]T(%[2]s%[3]v%[1]s)%[2]s", colorType, colorNormal, v) - } - return fmt.Sprintf("%[1]T(%[1]v)", v) -} - -// NoErr asserts that err is nil. -// -// func Test(t *testing.T) { -// is := is.New(t) -// val, err := getVal() -// is.NoErr(err) // getVal error -// is.OK(len(val) > 10) // val cannot be short -// } -// -// Will output: -// -// your_test.go:123: err: not found // getVal error -func (is *I) NoErr(err error) { - if err != nil { - is.logf("err: %s", err.Error()) - } -} - -// isNil gets whether the object is nil or not. -func isNil(object interface{}) bool { - if object == nil { - return true - } - value := reflect.ValueOf(object) - kind := value.Kind() - if kind >= reflect.Chan && kind <= reflect.Slice && value.IsNil() { - return true - } - return false -} - -// areEqual gets whether a equals b or not. -func areEqual(a, b interface{}) bool { - if isNil(a) || isNil(b) { - if isNil(a) && !isNil(b) { - return false - } - if !isNil(a) && isNil(b) { - return false - } - return a == b - } - if reflect.DeepEqual(a, b) { - return true - } - aValue := reflect.ValueOf(a) - bValue := reflect.ValueOf(b) - if aValue == bValue { - return true - } - return false -} - -func callerinfo() (path string, line int, ok bool) { - for i := 0; ; i++ { - _, path, line, ok = runtime.Caller(i) - if !ok { - return - } - if strings.HasSuffix(path, "is.go") { - continue - } - return path, line, true - } -} - -// loadComment gets the Go comment from the specified line -// in the specified file. -func loadComment(path string, line int) (string, bool) { - f, err := os.Open(path) - if err != nil { - return "", false - } - defer f.Close() - s := bufio.NewScanner(f) - i := 1 - for s.Scan() { - if i == line { - text := s.Text() - commentI := strings.Index(text, "//") - if commentI == -1 { - return "", false // no comment - } - text = text[commentI+2:] - text = strings.TrimSpace(text) - return text, true - } - i++ - } - return "", false -} - -// loadArguments gets the arguments from the function call -// on the specified line of the file. -func loadArguments(path string, line int) (string, bool) { - f, err := os.Open(path) - if err != nil { - return "", false - } - defer f.Close() - s := bufio.NewScanner(f) - i := 1 - for s.Scan() { - if i == line { - text := s.Text() - braceI := strings.Index(text, "(") - if braceI == -1 { - return "", false - } - text = text[braceI+1:] - cs := bufio.NewScanner(strings.NewReader(text)) - cs.Split(bufio.ScanBytes) - i := 0 - c := 1 - for cs.Scan() { - switch cs.Text() { - case ")": - c-- - case "(": - c++ - } - if c == 0 { - break - } - i++ - } - text = text[:i] - return text, true - } - i++ - } - return "", false -} - -// decorate prefixes the string with the file and line of the call site -// and inserts the final newline if needed and indentation tabs for formatting. -// this function was copied from the testing framework and modified. -func (is *I) decorate(s string) string { - path, lineNumber, ok := callerinfo() // decorate + log + public function. - file := filepath.Base(path) - if ok { - // Truncate file name at last file name separator. - if index := strings.LastIndex(file, "/"); index >= 0 { - file = file[index+1:] - } else if index = strings.LastIndex(file, "\\"); index >= 0 { - file = file[index+1:] - } - } else { - file = "???" - lineNumber = 1 - } - buf := new(bytes.Buffer) - // Every line is indented at least one tab. - buf.WriteByte('\t') - if is.colorful { - buf.WriteString(colorFile) - } - fmt.Fprintf(buf, "%s:%d: ", file, lineNumber) - if is.colorful { - buf.WriteString(colorNormal) - } - lines := strings.Split(s, "\n") - if l := len(lines); l > 1 && lines[l-1] == "" { - lines = lines[:l-1] - } - for i, line := range lines { - if i > 0 { - // Second and subsequent lines are indented an extra tab. - buf.WriteString("\n\t\t") - } - // expand arguments (if $ARGS is present) - if strings.Contains(line, "$ARGS") { - args, _ := loadArguments(path, lineNumber) - line = strings.Replace(line, "$ARGS", args, -1) - } - buf.WriteString(line) - } - comment, ok := loadComment(path, lineNumber) - if ok { - if is.colorful { - buf.WriteString(colorComment) - } - buf.WriteString(" // ") - buf.WriteString(comment) - if is.colorful { - buf.WriteString(colorNormal) - } - } - buf.WriteString("\n") - return buf.String() -} - -const ( - colorNormal = "\u001b[39m" - colorComment = "\u001b[32m" - colorFile = "\u001b[90m" - colorType = "\u001b[90m" -) diff --git a/vendor/github.com/matryer/is/is_test.go b/vendor/github.com/matryer/is/is_test.go deleted file mode 100644 index c6e5b25..0000000 --- a/vendor/github.com/matryer/is/is_test.go +++ /dev/null @@ -1,252 +0,0 @@ -package is - -import ( - "bytes" - "errors" - "fmt" - "strings" - "testing" -) - -type mockT struct { - failed bool -} - -func (m *mockT) FailNow() { - m.failed = true -} -func (m *mockT) Fail() { - m.failed = true -} - -var tests = []struct { - N string - F func(is *I) - Fail string -}{ - // Equal - { - N: "Equal(1, 1)", - F: func(is *I) { - is.Equal(1, 1) // 1 doesn't equal 2 - }, - Fail: ``, - }, - - { - N: "Equal(1, 2)", - F: func(is *I) { - is.Equal(1, 2) // 1 doesn't equal 2 - }, - Fail: `1 != 2 // 1 doesn't equal 2`, - }, - { - N: "Equal(1, nil)", - F: func(is *I) { - is.Equal(1, nil) // 1 doesn't equal nil - }, - Fail: `int(1) != // 1 doesn't equal nil`, - }, - { - N: "Equal(nil, 2)", - F: func(is *I) { - is.Equal(nil, 2) // nil doesn't equal 2 - }, - Fail: ` != int(2) // nil doesn't equal 2`, - }, - { - N: "Equal(false, false)", - F: func(is *I) { - is.Equal(false, false) // nil doesn't equal 2 - }, - Fail: ``, - }, - { - N: "Equal(int32(1), int64(1))", - F: func(is *I) { - is.Equal(int32(1), int64(1)) // nope - }, - Fail: `int32(1) != int64(1) // nope`, - }, - { - N: "Equal(map1, map2)", - F: func(is *I) { - m1 := map[string]interface{}{"value": 1} - m2 := map[string]interface{}{"value": 2} - is.Equal(m1, m2) // maps - }, - Fail: `map[value:1] != map[value:2] // maps`, - }, - { - N: "Equal(true, map2)", - F: func(is *I) { - m1 := map[string]interface{}{"value": 1} - m2 := map[string]interface{}{"value": 2} - is.Equal(m1, m2) // maps - }, - Fail: `map[value:1] != map[value:2] // maps`, - }, - { - N: "Equal(slice1, slice2)", - F: func(is *I) { - s1 := []string{"one", "two", "three"} - s2 := []string{"one", "two", "three", "four"} - is.Equal(s1, s2) // slices - }, - Fail: `[one two three] != [one two three four] // slices`, - }, - { - N: "Equal(nil, chan)", - F: func(is *I) { - var a chan string - b := make(chan string) - is.Equal(a, b) // channels - }, - Fail: ` // channels`, - }, - { - N: "Equal(nil, slice)", - F: func(is *I) { - var s1 []string - s2 := []string{"one", "two", "three", "four"} - is.Equal(s1, s2) // nil slice - }, - Fail: ` // nil slice`, - }, - - // Fail - { - N: "Fail()", - F: func(is *I) { - is.Fail() // something went wrong - }, - Fail: "failed // something went wrong", - }, - - // NoErr - { - N: "NoErr(nil)", - F: func(is *I) { - var err error - is.NoErr(err) // method shouldn't return error - }, - Fail: "", - }, - { - N: "NoErr(error)", - F: func(is *I) { - err := errors.New("nope") - is.NoErr(err) // method shouldn't return error - }, - Fail: "err: nope // method shouldn't return error", - }, - - // OK - { - N: "True(1 == 2)", - F: func(is *I) { - is.True(1 == 2) - }, - Fail: "not true: 1 == 2", - }, -} - -func TestFailures(t *testing.T) { - colorful, notColorful := true, false - testFailures(t, colorful) - testFailures(t, notColorful) -} - -func testFailures(t *testing.T, colorful bool) { - for _, test := range tests { - tt := &mockT{} - is := New(tt) - var buf bytes.Buffer - is.out = &buf - is.colorful = colorful - test.F(is) - if len(test.Fail) == 0 && tt.failed { - t.Errorf("shouldn't fail: %s", test.N) - continue - } - if len(test.Fail) > 0 && !tt.failed { - t.Errorf("didn't fail: %s", test.N) - } - if colorful { - // if colorful, we won't check the messages - // this test is run twice, one without colorful - // statements. - // see TestFailures - fmt.Print(buf.String()) - continue - } - output := buf.String() - output = strings.TrimSpace(output) - if !strings.HasSuffix(output, test.Fail) { - t.Errorf("expected `%s` to end with `%s`", output, test.Fail) - } - } -} - -func TestRelaxed(t *testing.T) { - tt := &mockT{} - is := NewRelaxed(tt) - var buf bytes.Buffer - is.out = &buf - is.colorful = false - is.NoErr(errors.New("oops")) - is.True(1 == 2) - actual := buf.String() - if !strings.Contains(actual, `oops`) { - t.Errorf("missing: oops") - } - if !strings.Contains(actual, `1 == 2`) { - t.Errorf("missing: 1 == 2") - } - if !tt.failed { - t.Errorf("didn't fail") - } -} - -func TestLoadComment(t *testing.T) { - comment, ok := loadComment("./testdata/example_test.go", 12) - if !ok { - t.Errorf("loadComment: not ok") - } - if comment != `this comment will be extracted` { - t.Errorf("loadComment: bad comment %s", comment) - } -} - -func TestLoadArguments(t *testing.T) { - arguments, ok := loadArguments("./testdata/example_test.go", 20) - if !ok { - t.Errorf("loadArguments: not ok") - } - if arguments != `a == getB()` { - t.Errorf("loadArguments: bad arguments %s", arguments) - } - - arguments, ok = loadArguments("./testdata/example_test.go", 28) - if !ok { - t.Errorf("loadArguments: not ok") - } - if arguments != `a == getB()` { - t.Errorf("loadArguments: bad arguments %s", arguments) - } - - arguments, _ = loadArguments("./testdata/example_test.go", 26) - if len(arguments) > 0 { - t.Errorf("should be no arguments: %s", arguments) - } -} - -// TestSubtests ensures subtests work as expected. -// https://github.com/matryer/is/issues/1 -func TestSubtests(t *testing.T) { - is := New(t) - t.Run("sub1", func(t *testing.T) { - is := is.New(t) - is.Equal(1+1, 2) - }) -} diff --git a/vendor/github.com/matryer/is/misc/delicious-failures.png b/vendor/github.com/matryer/is/misc/delicious-failures.png deleted file mode 100644 index b1e0d01..0000000 Binary files a/vendor/github.com/matryer/is/misc/delicious-failures.png and /dev/null differ diff --git a/vendor/github.com/matryer/is/testdata/example_test.go b/vendor/github.com/matryer/is/testdata/example_test.go deleted file mode 100644 index 4fab917..0000000 --- a/vendor/github.com/matryer/is/testdata/example_test.go +++ /dev/null @@ -1,29 +0,0 @@ -package example - -// CAUTION: DO NOT EDIT -// Tests in this project rely on specific lines numbers -// throughout this file. - -import ( - "testing" -) - -func TestSomething(t *testing.T) { - // this comment will be extracted -} - -func TestSomethingElse(t *testing.T) { - a, b := 1, 2 - getB = func() int { - return b - } - is.True(a == getB()) // should be the same -} - -func TestSomethingElseTpp(t *testing.T) { - a, b := 1, 2 - getB = func() int { - return b - } - is.True(a == getB()) -} diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/.travis.yml b/vendor/github.com/santhosh-tekuri/jsonschema/.travis.yml deleted file mode 100644 index 1ab35ab..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/.travis.yml +++ /dev/null @@ -1,10 +0,0 @@ -language: go - -go: - - 1.8.1 - -script: - - ./go.test.sh - -after_success: - - bash <(curl -s https://codecov.io/bash) diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/LICENSE b/vendor/github.com/santhosh-tekuri/jsonschema/LICENSE deleted file mode 100644 index 65cd403..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2017 Santhosh Kumar Tekuri. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/README.md b/vendor/github.com/santhosh-tekuri/jsonschema/README.md deleted file mode 100644 index a2cf899..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/README.md +++ /dev/null @@ -1,141 +0,0 @@ -# jsonschema - -[![License](https://img.shields.io/badge/License-BSD%203--Clause-blue.svg)](https://opensource.org/licenses/BSD-3-Clause) -[![GoDoc](https://godoc.org/github.com/santhosh-tekuri/jsonschema?status.svg)](https://godoc.org/github.com/santhosh-tekuri/jsonschema) -[![Go Report Card](https://goreportcard.com/badge/github.com/santhosh-tekuri/jsonschema)](https://goreportcard.com/report/github.com/santhosh-tekuri/jsonschema) -[![Build Status](https://travis-ci.org/santhosh-tekuri/jsonschema.svg?branch=master)](https://travis-ci.org/santhosh-tekuri/jsonschema) -[![codecov.io](https://codecov.io/github/santhosh-tekuri/jsonschema/coverage.svg?branch=master)](https://codecov.io/github/santhosh-tekuri/jsonschema?branch=master) - -Package jsonschema provides json-schema compilation and validation. - -This implementation of JSON Schema, supports draft4 and draft6. - -Passes all tests(including optional) in https://github.com/json-schema/JSON-Schema-Test-Suite - -An example of using this package: - -```go -schema, err := jsonschema.Compile("schemas/purchaseOrder.json") -if err != nil { - return err -} -f, err := os.Open("purchaseOrder.json") -if err != nil { - return err -} -defer f.Close() -if err = schema.Validate(f); err != nil { - return err -} -``` - -The schema is compiled against the version specified in `$schema` property. -If `$schema` property is missing, it uses latest draft which currently is draft6. -You can force to use draft4 when `$schema` is missing, as follows: - -```go -compiler := jsonschema.NewCompiler() -compler.Draft = jsonschema.Draft4 -``` - -you can also validate go value using `schema.ValidateInterface(interface{})` method. -but the argument should not be user-defined struct. - - -This package supports loading json-schema from filePath and fileURL. - -To load json-schema from HTTPURL, add following import: - -```go -import _ "github.com/santhosh-tekuri/jsonschema/httploader" -``` - -Loading from urls for other schemes (such as ftp), can be plugged in. see package jsonschema/httploader -for an example - -To load json-schema from in-memory: - -```go -data := `{"type": "string"}` -url := "sch.json" -compiler := jsonschema.NewCompiler() -if err := compiler.AddResource(url, strings.NewReader(data)); err != nil { - return err -} -schema, err := compiler.Compile(url) -if err != nil { - return err -} -f, err := os.Open("doc.json") -if err != nil { - return err -} -defer f.Close() -if err = schema.Validate(f); err != nil { - return err -} -``` - -This package supports json string formats: -- date-time -- hostname -- email -- ip-address -- ipv4 -- ipv6 -- uri -- uriref/uri-reference -- regex -- format -- json-pointer -- uri-template (limited validation) - -Developers can define their own formats using package jsonschema/formats. - -## ValidationError - -The ValidationError returned by Validate method contains detailed context to understand why and where the error is. - -schema.json: -```json -{ - "$ref": "t.json#/definitions/employee" -} -``` - -t.json: -```json -{ - "definitions": { - "employee": { - "type": "string" - } - } -} -``` - -doc.json: -```json -1 -``` - -Validating `doc.json` with `schema.json`, gives following ValidationError: -``` -I[#] S[#] doesn't validate with "schema.json#" - I[#] S[#/$ref] doesn't valide with "t.json#/definitions/employee" - I[#] S[#/definitions/employee/type] expected string, but got number -``` - -Here `I` stands for instance document and `S` stands for schema document. -The json-fragments that caused error in instance and schema documents are represented using json-pointer notation. -Nested causes are printed with indent. - -## CLI - -```bash -jv []... -``` - -if no `` arguments are passed, it simply validates the ``. - -exit-code is 1, if there are any validation errors diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/cmd/jv/main.go b/vendor/github.com/santhosh-tekuri/jsonschema/cmd/jv/main.go deleted file mode 100644 index aca78f5..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/cmd/jv/main.go +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright 2017 Santhosh Kumar Tekuri. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package main - -import ( - "fmt" - "os" - - "github.com/santhosh-tekuri/jsonschema" - _ "github.com/santhosh-tekuri/jsonschema/httploader" - "github.com/santhosh-tekuri/jsonschema/loader" -) - -func main() { - if len(os.Args) == 1 { - fmt.Fprintln(os.Stderr, "jv []...") - os.Exit(1) - } - - schema, err := jsonschema.Compile(os.Args[1]) - if err != nil { - fmt.Fprintln(os.Stderr, err) - os.Exit(1) - } - - for _, f := range os.Args[2:] { - r, err := loader.Load(f) - if err != nil { - fmt.Fprintf(os.Stderr, "error in reading %q. reason: \n%v\n", f, err) - os.Exit(1) - } - - err = schema.Validate(r) - _ = r.Close() - if err != nil { - fmt.Fprintf(os.Stderr, "%q does not conform to the schema specified. reason:\n%v\n", f, err) - os.Exit(1) - } - } -} diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/compiler.go b/vendor/github.com/santhosh-tekuri/jsonschema/compiler.go deleted file mode 100644 index 05eba4c..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/compiler.go +++ /dev/null @@ -1,492 +0,0 @@ -// Copyright 2017 Santhosh Kumar Tekuri. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package jsonschema - -import ( - "encoding/json" - "fmt" - "io" - "math/big" - "regexp" - "strings" - - "github.com/santhosh-tekuri/jsonschema/formats" - "github.com/santhosh-tekuri/jsonschema/loader" -) - -// A Draft represents json-schema draft -type Draft struct { - meta *Schema - id string -} - -var latest = Draft6 - -func (draft *Draft) validateSchema(url, ptr string, v interface{}) error { - if meta := draft.meta; meta != nil { - if err := meta.validate(v); err != nil { - addContext(ptr, "", err) - finishSchemaContext(err, meta) - finishInstanceContext(err) - var instancePtr string - if ptr == "" { - instancePtr = "#" - } else { - instancePtr = "#/" + ptr - } - return &SchemaError{ - url, - &ValidationError{ - Message: fmt.Sprintf("doesn't validate with %q", meta.url+meta.ptr), - InstancePtr: instancePtr, - SchemaURL: meta.url, - SchemaPtr: "#", - Causes: []*ValidationError{err.(*ValidationError)}, - }, - } - } - } - return nil -} - -// A Compiler represents a json-schema compiler. -// -// Currently draft4 and draft6 are supported -type Compiler struct { - // Draft represents the draft used when '$schema' attribute is missing. - // - // This defaults to latest draft (currently draft6). - Draft *Draft - resources map[string]*resource -} - -// NewCompiler returns a draft4 json-schema Compiler object. -func NewCompiler() *Compiler { - return &Compiler{resources: make(map[string]*resource)} -} - -// AddResource adds in-memory resource to the compiler. -// -// Note that url must not have fragment -func (c *Compiler) AddResource(url string, r io.Reader) error { - res, err := newResource(url, r) - if err != nil { - return err - } - c.resources[res.url] = res - return nil -} - -func (c *Compiler) draft(v interface{}) (*Draft, error) { - if m, ok := v.(map[string]interface{}); ok { - if url, ok := m["$schema"]; ok { - switch url { - case "http://json-schema.org/schema#": - return latest, nil - case "http://json-schema.org/draft-06/schema#": - return Draft6, nil - case "http://json-schema.org/draft-04/schema#": - return Draft4, nil - default: - return nil, fmt.Errorf("unknown $schema %q", url) - } - } - } - if c.Draft == nil { - return latest, nil - } - return c.Draft, nil -} - -// MustCompile is like Compile but panics if the url cannot be compiled to *Schema. -// It simplifies safe initialization of global variables holding compiled Schemas. -func (c *Compiler) MustCompile(url string) *Schema { - s, err := c.Compile(url) - if err != nil { - panic(fmt.Sprintf("jsonschema: Compile(%q): %s", url, err)) - } - return s -} - -// Compile parses json-schema at given url returns, if successful, -// a Schema object that can be used to match against json. -func (c *Compiler) Compile(url string) (*Schema, error) { - base, fragment := split(url) - if _, ok := c.resources[base]; !ok { - r, err := loader.Load(base) - if err != nil { - return nil, err - } - defer r.Close() - if err := c.AddResource(base, r); err != nil { - return nil, err - } - } - r := c.resources[base] - return c.compileRef(nil, r, nil, r.url, fragment) -} - -func (c Compiler) compileRef(draft *Draft, r *resource, root map[string]interface{}, base, ref string) (*Schema, error) { - var err error - if rootFragment(ref) { - if _, ok := r.schemas["#"]; !ok { - if draft == nil { - draft, err = c.draft(r.doc) - if err != nil { - return nil, err - } - } - if err := draft.validateSchema(r.url, "", r.doc); err != nil { - return nil, err - } - s := &Schema{url: r.url, ptr: "#"} - r.schemas["#"] = s - if m, ok := r.doc.(map[string]interface{}); ok { - if _, err := c.compile(draft, r, s, base, m, m); err != nil { - return nil, err - } - } else { - if _, err := c.compile(draft, r, s, base, nil, r.doc); err != nil { - return nil, err - } - } - } - return r.schemas["#"], nil - } - - if strings.HasPrefix(ref, "#/") { - if _, ok := r.schemas[ref]; !ok { - docDraft := draft - if docDraft == nil { - docDraft = c.Draft - } - if docDraft == nil { - docDraft = latest - } - ptrBase, doc, err := r.resolvePtr(docDraft, ref) - if err != nil { - return nil, err - } - if draft == nil { - draft, err = c.draft(doc) - if err != nil { - return nil, err - } - } - if err := draft.validateSchema(r.url, strings.TrimPrefix(ref, "#/"), doc); err != nil { - return nil, err - } - r.schemas[ref] = &Schema{url: base, ptr: ref} - if _, err := c.compile(draft, r, r.schemas[ref], ptrBase, root, doc); err != nil { - return nil, err - } - } - return r.schemas[ref], nil - } - - refURL, err := resolveURL(base, ref) - if err != nil { - return nil, err - } - if rs, ok := r.schemas[refURL]; ok { - return rs, nil - } - - ids := make(map[string]map[string]interface{}) - if err := resolveIDs(draft, r.url, root, ids); err != nil { - return nil, err - } - if v, ok := ids[refURL]; ok { - if err := draft.validateSchema(r.url, "", v); err != nil { - return nil, err - } - u, f := split(refURL) - s := &Schema{url: u, ptr: f} - r.schemas[refURL] = s - if err := c.compileMap(draft, r, s, refURL, root, v); err != nil { - return nil, err - } - return s, nil - } - - base, _ = split(refURL) - if base == r.url { - return nil, fmt.Errorf("invalid ref: %q", refURL) - } - return c.Compile(refURL) -} - -func (c Compiler) compile(draft *Draft, r *resource, s *Schema, base string, root map[string]interface{}, m interface{}) (*Schema, error) { - if s == nil { - s = new(Schema) - s.url, _ = split(base) - } - switch m := m.(type) { - case bool: - s.always = &m - return s, nil - default: - return s, c.compileMap(draft, r, s, base, root, m.(map[string]interface{})) - } -} - -func (c Compiler) compileMap(draft *Draft, r *resource, s *Schema, base string, root, m map[string]interface{}) error { - var err error - - if id, ok := m[draft.id]; ok { - if base, err = resolveURL(base, id.(string)); err != nil { - return err - } - } - - if ref, ok := m["$ref"]; ok { - b, _ := split(base) - s.ref, err = c.compileRef(draft, r, root, b, ref.(string)) - if err != nil { - return err - } - // All other properties in a "$ref" object MUST be ignored - return nil - } - - if t, ok := m["type"]; ok { - switch t := t.(type) { - case string: - s.types = []string{t} - case []interface{}: - s.types = toStrings(t) - } - } - - if e, ok := m["enum"]; ok { - s.enum = e.([]interface{}) - allPrimitives := true - for _, item := range s.enum { - switch jsonType(item) { - case "object", "array": - allPrimitives = false - break - } - } - s.enumError = "enum failed" - if allPrimitives { - if len(s.enum) == 1 { - s.enumError = fmt.Sprintf("value must be %#v", s.enum[0]) - } else { - strEnum := make([]string, len(s.enum)) - for i, item := range s.enum { - strEnum[i] = fmt.Sprintf("%#v", item) - } - s.enumError = fmt.Sprintf("value must be one of %s", strings.Join(strEnum, ", ")) - } - } - } - - if not, ok := m["not"]; ok { - s.not, err = c.compile(draft, r, nil, base, root, not) - if err != nil { - return err - } - } - - loadSchemas := func(pname string) ([]*Schema, error) { - if pvalue, ok := m[pname]; ok { - pvalue := pvalue.([]interface{}) - schemas := make([]*Schema, len(pvalue)) - for i, v := range pvalue { - sch, err := c.compile(draft, r, nil, base, root, v) - if err != nil { - return nil, err - } - schemas[i] = sch - } - return schemas, nil - } - return nil, nil - } - if s.allOf, err = loadSchemas("allOf"); err != nil { - return err - } - if s.anyOf, err = loadSchemas("anyOf"); err != nil { - return err - } - if s.oneOf, err = loadSchemas("oneOf"); err != nil { - return err - } - - loadInt := func(pname string) int { - if num, ok := m[pname]; ok { - i, _ := num.(json.Number).Int64() - return int(i) - } - return -1 - } - s.minProperties, s.maxProperties = loadInt("minProperties"), loadInt("maxProperties") - - if req, ok := m["required"]; ok { - s.required = toStrings(req.([]interface{})) - } - - if props, ok := m["properties"]; ok { - props := props.(map[string]interface{}) - s.properties = make(map[string]*Schema, len(props)) - for pname, pmap := range props { - s.properties[pname], err = c.compile(draft, r, nil, base, root, pmap) - if err != nil { - return err - } - } - } - - if regexProps, ok := m["regexProperties"]; ok { - s.regexProperties = regexProps.(bool) - } - - if patternProps, ok := m["patternProperties"]; ok { - patternProps := patternProps.(map[string]interface{}) - s.patternProperties = make(map[*regexp.Regexp]*Schema, len(patternProps)) - for pattern, pmap := range patternProps { - s.patternProperties[regexp.MustCompile(pattern)], err = c.compile(draft, r, nil, base, root, pmap) - if err != nil { - return err - } - } - } - - if additionalProps, ok := m["additionalProperties"]; ok { - switch additionalProps := additionalProps.(type) { - case bool: - if !additionalProps { - s.additionalProperties = false - } - case map[string]interface{}: - s.additionalProperties, err = c.compile(draft, r, nil, base, root, additionalProps) - if err != nil { - return err - } - } - } - - if deps, ok := m["dependencies"]; ok { - deps := deps.(map[string]interface{}) - s.dependencies = make(map[string]interface{}, len(deps)) - for pname, pvalue := range deps { - switch pvalue := pvalue.(type) { - case []interface{}: - s.dependencies[pname] = toStrings(pvalue) - default: - s.dependencies[pname], err = c.compile(draft, r, nil, base, root, pvalue) - if err != nil { - return err - } - } - } - } - - s.minItems, s.maxItems = loadInt("minItems"), loadInt("maxItems") - - if unique, ok := m["uniqueItems"]; ok { - s.uniqueItems = unique.(bool) - } - - if items, ok := m["items"]; ok { - switch items := items.(type) { - case []interface{}: - s.items, err = loadSchemas("items") - if err != nil { - return err - } - if additionalItems, ok := m["additionalItems"]; ok { - switch additionalItems := additionalItems.(type) { - case bool: - s.additionalItems = additionalItems - case map[string]interface{}: - s.additionalItems, err = c.compile(draft, r, nil, base, root, additionalItems) - if err != nil { - return err - } - } - } else { - s.additionalItems = true - } - default: - s.items, err = c.compile(draft, r, nil, base, root, items) - if err != nil { - return err - } - } - } - - s.minLength, s.maxLength = loadInt("minLength"), loadInt("maxLength") - - if pattern, ok := m["pattern"]; ok { - s.pattern = regexp.MustCompile(pattern.(string)) - } - - if format, ok := m["format"]; ok { - s.formatName = format.(string) - s.format, _ = formats.Get(s.formatName) - } - - loadFloat := func(pname string) *big.Float { - if num, ok := m[pname]; ok { - r, _ := new(big.Float).SetString(string(num.(json.Number))) - return r - } - return nil - } - - s.minimum = loadFloat("minimum") - if exclusive, ok := m["exclusiveMinimum"]; ok { - if exclusive, ok := exclusive.(bool); ok { - if exclusive { - s.minimum, s.exclusiveMinimum = nil, s.minimum - } - } else { - s.exclusiveMinimum = loadFloat("exclusiveMinimum") - } - } - - s.maximum = loadFloat("maximum") - if exclusive, ok := m["exclusiveMaximum"]; ok { - if exclusive, ok := exclusive.(bool); ok { - if exclusive { - s.maximum, s.exclusiveMaximum = nil, s.maximum - } - } else { - s.exclusiveMaximum = loadFloat("exclusiveMaximum") - } - } - - s.multipleOf = loadFloat("multipleOf") - - if draft == Draft6 { - if c, ok := m["const"]; ok { - s.constant = []interface{}{c} - } - if propertyNames, ok := m["propertyNames"]; ok { - s.propertyNames, err = c.compile(draft, r, nil, base, root, propertyNames) - if err != nil { - return err - } - } - if contains, ok := m["contains"]; ok { - s.contains, err = c.compile(draft, r, nil, base, root, contains) - if err != nil { - return err - } - } - } - - return nil -} - -func toStrings(arr []interface{}) []string { - s := make([]string, len(arr)) - for i, v := range arr { - s[i] = v.(string) - } - return s -} diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/doc.go b/vendor/github.com/santhosh-tekuri/jsonschema/doc.go deleted file mode 100644 index 89ea048..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/doc.go +++ /dev/null @@ -1,74 +0,0 @@ -// Copyright 2017 Santhosh Kumar Tekuri. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -/* -Package jsonschema provides json-schema compilation and validation. - -This implementation of JSON Schema, supports draft4 and draft6. -Passes all tests(including optional) in https://github.com/json-schema/JSON-Schema-Test-Suite - -An example of using this package: - - schema, err := jsonschema.Compile("schemas/purchaseOrder.json") - if err != nil { - return err - } - f, err := os.Open("purchaseOrder.json") - if err != nil { - return err - } - defer f.Close() - if err = schema.Validate(f); err != nil { - return err - } - -The schema is compiled against the version specified in `$schema` property. -If `$schema` property is missing, it uses latest draft which currently is draft6. -You can force to use draft4 when `$schema` is missing, as follows: - - compiler := jsonschema.NewCompiler() - compler.Draft = jsonschema.Draft4 - -you can also validate go value using schema.ValidateInterface(interface{}) method. -but the argument should not be user-defined struct. - -This package supports loading json-schema from filePath and fileURL. - -To load json-schema from HTTPURL, add following import: - - import _ "github.com/santhosh-tekuri/jsonschema/httploader" - -Loading from urls for other schemes (such as ftp), can be plugged in. see package jsonschema/httploader -for an example - -To load json-schema from in-memory: - - data := `{"type": "string"}` - url := "sch.json" - compiler := jsonschema.NewCompiler() - if err := compiler.AddResource(url, strings.NewReader(data)); err != nil { - return err - } - schema, err := compiler.Compile(url) - if err != nil { - return err - } - f, err := os.Open("doc.json") - if err != nil { - return err - } - defer f.Close() - if err = schema.Validate(f); err != nil { - return err - } - -This package supports json string formats: date-time, hostname, email, ip-address, ipv4, ipv6, uri, uriref, regex, -format, json-pointer, uri-template (limited validation). - -Developers can define their own formats using package jsonschema/formats. - -The ValidationError returned by Validate method contains detailed context to understand why and where the error is. - -*/ -package jsonschema diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/draft4.go b/vendor/github.com/santhosh-tekuri/jsonschema/draft4.go deleted file mode 100644 index 1c98447..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/draft4.go +++ /dev/null @@ -1,172 +0,0 @@ -// Copyright 2017 Santhosh Kumar Tekuri. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package jsonschema - -import "strings" - -// Draft4 respresents http://json-schema.org/specification-links.html#draft-4 -var Draft4 = &Draft{id: "id"} - -func init() { - c := NewCompiler() - url := "http://json-schema.org/draft-04/schema" - err := c.AddResource(url, strings.NewReader(`{ - "$schema": "http://json-schema.org/draft-04/schema#", - "description": "Core schema meta-schema", - "definitions": { - "schemaArray": { - "type": "array", - "minItems": 1, - "items": { "$ref": "#" } - }, - "positiveInteger": { - "type": "integer", - "minimum": 0 - }, - "positiveIntegerDefault0": { - "allOf": [ { "$ref": "#/definitions/positiveInteger" }, { "default": 0 } ] - }, - "simpleTypes": { - "enum": [ "array", "boolean", "integer", "null", "number", "object", "string" ] - }, - "stringArray": { - "type": "array", - "items": { "type": "string" }, - "minItems": 1, - "uniqueItems": true - } - }, - "type": "object", - "properties": { - "id": { - "type": "string", - "format": "uriref" - }, - "$schema": { - "type": "string", - "format": "uri" - }, - "title": { - "type": "string" - }, - "description": { - "type": "string" - }, - "default": {}, - "multipleOf": { - "type": "number", - "minimum": 0, - "exclusiveMinimum": true - }, - "maximum": { - "type": "number" - }, - "exclusiveMaximum": { - "type": "boolean", - "default": false - }, - "minimum": { - "type": "number" - }, - "exclusiveMinimum": { - "type": "boolean", - "default": false - }, - "maxLength": { "$ref": "#/definitions/positiveInteger" }, - "minLength": { "$ref": "#/definitions/positiveIntegerDefault0" }, - "pattern": { - "type": "string", - "format": "regex" - }, - "additionalItems": { - "anyOf": [ - { "type": "boolean" }, - { "$ref": "#" } - ], - "default": {} - }, - "items": { - "anyOf": [ - { "$ref": "#" }, - { "$ref": "#/definitions/schemaArray" } - ], - "default": {} - }, - "maxItems": { "$ref": "#/definitions/positiveInteger" }, - "minItems": { "$ref": "#/definitions/positiveIntegerDefault0" }, - "uniqueItems": { - "type": "boolean", - "default": false - }, - "maxProperties": { "$ref": "#/definitions/positiveInteger" }, - "minProperties": { "$ref": "#/definitions/positiveIntegerDefault0" }, - "required": { "$ref": "#/definitions/stringArray" }, - "additionalProperties": { - "anyOf": [ - { "type": "boolean" }, - { "$ref": "#" } - ], - "default": {} - }, - "definitions": { - "type": "object", - "additionalProperties": { "$ref": "#" }, - "default": {} - }, - "properties": { - "type": "object", - "additionalProperties": { "$ref": "#" }, - "default": {} - }, - "patternProperties": { - "type": "object", - "regexProperties": true, - "additionalProperties": { "$ref": "#" }, - "default": {} - }, - "regexProperties": { "type": "boolean" }, - "dependencies": { - "type": "object", - "additionalProperties": { - "anyOf": [ - { "$ref": "#" }, - { "$ref": "#/definitions/stringArray" } - ] - } - }, - "enum": { - "type": "array", - "minItems": 1, - "uniqueItems": true - }, - "type": { - "anyOf": [ - { "$ref": "#/definitions/simpleTypes" }, - { - "type": "array", - "items": { "$ref": "#/definitions/simpleTypes" }, - "minItems": 1, - "uniqueItems": true - } - ] - }, - "allOf": { "$ref": "#/definitions/schemaArray" }, - "anyOf": { "$ref": "#/definitions/schemaArray" }, - "oneOf": { "$ref": "#/definitions/schemaArray" }, - "not": { "$ref": "#" }, - "format": { "type": "string", "format": "format" }, - "$ref": { "type": "string" } - }, - "dependencies": { - "exclusiveMaximum": [ "maximum" ], - "exclusiveMinimum": [ "minimum" ] - }, - "default": {} - }`)) - if err != nil { - panic(err) - } - Draft4.meta = c.MustCompile(url) -} diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/draft6.go b/vendor/github.com/santhosh-tekuri/jsonschema/draft6.go deleted file mode 100644 index 4d896e9..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/draft6.go +++ /dev/null @@ -1,170 +0,0 @@ -// Copyright 2017 Santhosh Kumar Tekuri. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package jsonschema - -import "strings" - -// Draft6 respresents http://json-schema.org/specification-links.html#draft-6 -var Draft6 = &Draft{id: "$id"} - -func init() { - c := NewCompiler() - url := "http://json-schema.org/draft-06/schema" - err := c.AddResource(url, strings.NewReader(`{ - "$schema": "http://json-schema.org/draft-06/schema#", - "$id": "http://json-schema.org/draft-06/schema#", - "title": "Core schema meta-schema", - "definitions": { - "schemaArray": { - "type": "array", - "minItems": 1, - "items": { "$ref": "#" } - }, - "nonNegativeInteger": { - "type": "integer", - "minimum": 0 - }, - "nonNegativeIntegerDefault0": { - "allOf": [ - { "$ref": "#/definitions/nonNegativeInteger" }, - { "default": 0 } - ] - }, - "simpleTypes": { - "enum": [ - "array", - "boolean", - "integer", - "null", - "number", - "object", - "string" - ] - }, - "stringArray": { - "type": "array", - "items": { "type": "string" }, - "uniqueItems": true, - "default": [] - } - }, - "type": ["object", "boolean"], - "properties": { - "$id": { - "type": "string", - "format": "uri-reference" - }, - "$schema": { - "type": "string", - "format": "uri" - }, - "$ref": { - "type": "string", - "format": "uri-reference" - }, - "title": { - "type": "string" - }, - "description": { - "type": "string" - }, - "default": {}, - "multipleOf": { - "type": "number", - "exclusiveMinimum": 0 - }, - "maximum": { - "type": "number" - }, - "exclusiveMaximum": { - "type": "number" - }, - "minimum": { - "type": "number" - }, - "exclusiveMinimum": { - "type": "number" - }, - "maxLength": { "$ref": "#/definitions/nonNegativeInteger" }, - "minLength": { "$ref": "#/definitions/nonNegativeIntegerDefault0" }, - "pattern": { - "type": "string", - "format": "regex" - }, - "additionalItems": { "$ref": "#" }, - "items": { - "anyOf": [ - { "$ref": "#" }, - { "$ref": "#/definitions/schemaArray" } - ], - "default": {} - }, - "maxItems": { "$ref": "#/definitions/nonNegativeInteger" }, - "minItems": { "$ref": "#/definitions/nonNegativeIntegerDefault0" }, - "uniqueItems": { - "type": "boolean", - "default": false - }, - "contains": { "$ref": "#" }, - "maxProperties": { "$ref": "#/definitions/nonNegativeInteger" }, - "minProperties": { "$ref": "#/definitions/nonNegativeIntegerDefault0" }, - "required": { "$ref": "#/definitions/stringArray" }, - "additionalProperties": { "$ref": "#" }, - "definitions": { - "type": "object", - "additionalProperties": { "$ref": "#" }, - "default": {} - }, - "properties": { - "type": "object", - "additionalProperties": { "$ref": "#" }, - "default": {} - }, - "patternProperties": { - "type": "object", - "regexProperties": true, - "additionalProperties": { "$ref": "#" }, - "default": {} - }, - "dependencies": { - "type": "object", - "additionalProperties": { - "anyOf": [ - { "$ref": "#" }, - { "$ref": "#/definitions/stringArray" } - ] - } - }, - "propertyNames": { "$ref": "#" }, - "const": {}, - "enum": { - "type": "array", - "minItems": 1, - "uniqueItems": true - }, - "type": { - "anyOf": [ - { "$ref": "#/definitions/simpleTypes" }, - { - "type": "array", - "items": { "$ref": "#/definitions/simpleTypes" }, - "minItems": 1, - "uniqueItems": true - } - ] - }, - "format": { "type": "string", "format": "format" }, - "allOf": { "$ref": "#/definitions/schemaArray" }, - "anyOf": { "$ref": "#/definitions/schemaArray" }, - "oneOf": { "$ref": "#/definitions/schemaArray" }, - "not": { "$ref": "#" } - }, - "default": {} - }`)) - if err != nil { - panic(err) - } - Draft6.meta = c.MustCompile(url) -} diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/errors.go b/vendor/github.com/santhosh-tekuri/jsonschema/errors.go deleted file mode 100644 index af1a1c5..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/errors.go +++ /dev/null @@ -1,122 +0,0 @@ -// Copyright 2017 Santhosh Kumar Tekuri. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package jsonschema - -import ( - "fmt" - "strings" -) - -// InvalidJSONTypeError is the error type returned by ValidateInteface. -// this tells that specified go object is not valid jsonType. -type InvalidJSONTypeError string - -func (e InvalidJSONTypeError) Error() string { - return fmt.Sprintf("invalid jsonType: %s", string(e)) -} - -// SchemaError is the error type returned by Compile. -type SchemaError struct { - // SchemaURL is the url to json-schema that filed to compile. - // This is helpful, if your schema refers to external schemas - SchemaURL string - - // Err is the error that occurred during compilation. - // It could be ValidationError, because compilation validates - // given schema against the json meta-schema - Err error -} - -func (se *SchemaError) Error() string { - return fmt.Sprintf("json-schema %q compilation failed. Reason:\n%s", se.SchemaURL, se.Err) -} - -// ValidationError is the error type returned by Validate. -type ValidationError struct { - // Message describes error - Message string - - // InstancePtr is json-pointer which refers to json-fragment in json instance - // that is not valid - InstancePtr string - - // SchemaURL is the url to json-schema against which validation failed. - // This is helpful, if your schema refers to external schemas - SchemaURL string - - // SchemaPtr is json-pointer which refers to json-fragment in json schema - // that failed to satisfy - SchemaPtr string - - // Causes details the nested validation errors - Causes []*ValidationError -} - -func (ve *ValidationError) add(causes ...error) error { - for _, cause := range causes { - addContext(ve.InstancePtr, ve.SchemaPtr, cause) - ve.Causes = append(ve.Causes, cause.(*ValidationError)) - } - return ve -} - -func (ve *ValidationError) Error() string { - msg := fmt.Sprintf("I[%s] S[%s] %s", ve.InstancePtr, ve.SchemaPtr, ve.Message) - for _, c := range ve.Causes { - for _, line := range strings.Split(c.Error(), "\n") { - msg += "\n " + line - } - } - return msg -} - -func validationError(schemaPtr string, format string, a ...interface{}) *ValidationError { - return &ValidationError{fmt.Sprintf(format, a...), "", "", schemaPtr, nil} -} - -func addContext(instancePtr, schemaPtr string, err error) error { - ve := err.(*ValidationError) - ve.InstancePtr = joinPtr(instancePtr, ve.InstancePtr) - if len(ve.SchemaURL) == 0 { - ve.SchemaPtr = joinPtr(schemaPtr, ve.SchemaPtr) - } - for _, cause := range ve.Causes { - addContext(instancePtr, schemaPtr, cause) - } - return ve -} - -func finishSchemaContext(err error, s *Schema) { - ve := err.(*ValidationError) - if len(ve.SchemaURL) == 0 { - ve.SchemaURL = s.url - ve.SchemaPtr = s.ptr + "/" + ve.SchemaPtr - for _, cause := range ve.Causes { - finishSchemaContext(cause, s) - } - } -} - -func finishInstanceContext(err error) { - ve := err.(*ValidationError) - if len(ve.InstancePtr) == 0 { - ve.InstancePtr = "#" - } else { - ve.InstancePtr = "#/" + ve.InstancePtr - } - for _, cause := range ve.Causes { - finishInstanceContext(cause) - } -} - -func joinPtr(ptr1, ptr2 string) string { - if len(ptr1) == 0 { - return ptr2 - } - if len(ptr2) == 0 { - return ptr1 - } - return ptr1 + "/" + ptr2 -} diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/formats/formats.go b/vendor/github.com/santhosh-tekuri/jsonschema/formats/formats.go deleted file mode 100644 index 4f47459..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/formats/formats.go +++ /dev/null @@ -1,217 +0,0 @@ -// Copyright 2017 Santhosh Kumar Tekuri. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package formats provides functions to check string against format. -// -// It allows developers to register custom formats, that can be used -// in json-schema for validation. -package formats - -import ( - "net" - "net/mail" - "net/url" - "regexp" - "strconv" - "strings" - "time" -) - -// The Format type is a function, to check -// whether given string is in valid format. -type Format func(string) bool - -var formats = map[string]Format{ - "date-time": IsDateTime, - "hostname": IsHostname, - "email": IsEmail, - "ip-address": IsIPV4, - "ipv4": IsIPV4, - "ipv6": IsIPV6, - "uri": IsURI, - "uri-reference": IsURIReference, - "uriref": IsURIReference, - "uri-template": IsURIReference, - "regex": IsRegex, - "json-pointer": IsJSONPointer, -} - -func init() { - formats["format"] = IsFormat -} - -// Register registers Format object for given format name. -func Register(name string, f Format) { - formats[name] = f -} - -// Get returns Format object for given format name, if found -func Get(name string) (Format, bool) { - f, ok := formats[name] - return f, ok -} - -// IsFormat tells whether given string is a valid format that is registered -func IsFormat(s string) bool { - _, ok := formats[s] - return ok -} - -// IsDateTime tells whether given string is a valid date representation -// as defined by RFC 3339, section 5.6. -// -// Note: this is unable to parse UTC leap seconds. See https://github.com/golang/go/issues/8728. -func IsDateTime(s string) bool { - if _, err := time.Parse(time.RFC3339, s); err == nil { - return true - } - if _, err := time.Parse(time.RFC3339Nano, s); err == nil { - return true - } - return false -} - -// IsHostname tells whether given string is a valid representation -// for an Internet host name, as defined by RFC 1034, section 3.1. -// -// See https://en.wikipedia.org/wiki/Hostname#Restrictions_on_valid_host_names, for details. -func IsHostname(s string) bool { - // entire hostname (including the delimiting dots but not a trailing dot) has a maximum of 253 ASCII characters - s = strings.TrimSuffix(s, ".") - if len(s) > 253 { - return false - } - - // Hostnames are composed of series of labels concatenated with dots, as are all domain names - for _, label := range strings.Split(s, ".") { - // Each label must be from 1 to 63 characters long - if labelLen := len(label); labelLen < 1 || labelLen > 63 { - return false - } - - // labels could not start with a digit or with a hyphen - if first := s[0]; (first >= '0' && first <= '9') || (first == '-') { - return false - } - - // must not end with a hyphen - if label[len(label)-1] == '-' { - return false - } - - // labels may contain only the ASCII letters 'a' through 'z' (in a case-insensitive manner), - // the digits '0' through '9', and the hyphen ('-') - for _, c := range label { - if valid := (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || (c == '-'); !valid { - return false - } - } - } - - return true -} - -// IsEmail tells whether given string is a valid Internet email address -// as defined by RFC 5322, section 3.4.1. -// -// See https://en.wikipedia.org/wiki/Email_address, for details. -func IsEmail(s string) bool { - // entire email address to be no more than 254 characters long - if len(s) > 254 { - return false - } - - // email address is generally recognized as having two parts joined with an at-sign - at := strings.LastIndexByte(s, '@') - if at == -1 { - return false - } - local := s[0:at] - domain := s[at+1:] - - // local part may be up to 64 characters long - if len(local) > 64 { - return false - } - - // domain must match the requirements for a hostname - if !IsHostname(domain) { - return false - } - - _, err := mail.ParseAddress(s) - return err == nil -} - -// IsIPV4 tells whether given string is a valid representation of an IPv4 address -// according to the "dotted-quad" ABNF syntax as defined in RFC 2673, section 3.2. -func IsIPV4(s string) bool { - groups := strings.Split(s, ".") - if len(groups) != 4 { - return false - } - for _, group := range groups { - n, err := strconv.Atoi(group) - if err != nil { - return false - } - if n < 0 || n > 255 { - return false - } - } - return true -} - -// IsIPV6 tells whether given string is a valid representation of an IPv6 address -// as defined in RFC 2373, section 2.2. -func IsIPV6(s string) bool { - if !strings.Contains(s, ":") { - return false - } - return net.ParseIP(s) != nil -} - -// IsURI tells whether given string is valid URI, according to RFC 3986. -func IsURI(s string) bool { - u, err := url.Parse(s) - return err == nil && u.IsAbs() -} - -// IsURIReference tells whether given string is a valid URI Reference -// (either a URI or a relative-reference), according to RFC 3986. -func IsURIReference(s string) bool { - _, err := url.Parse(s) - return err == nil -} - -// IsRegex tells whether given string is a valid regular expression, -// according to the ECMA 262 regular expression dialect. -// -// The implementation uses go-lang regexp package. -func IsRegex(s string) bool { - _, err := regexp.Compile(s) - return err == nil -} - -// IsJSONPointer tells whether given string is a valid JSON Pointer. -// -// Note: It returns false for JSON Pointer URI fragments. -func IsJSONPointer(s string) bool { - for _, item := range strings.Split(s, "/") { - for i := 0; i < len(item); i++ { - if item[i] == '~' { - if i == len(item)-1 { - return false - } - switch item[i+1] { - case '~', '0', '1': - // valid - default: - return false - } - } - } - } - return true -} diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/formats/formats_test.go b/vendor/github.com/santhosh-tekuri/jsonschema/formats/formats_test.go deleted file mode 100644 index 19f295f..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/formats/formats_test.go +++ /dev/null @@ -1,137 +0,0 @@ -// Copyright 2017 Santhosh Kumar Tekuri. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package formats_test - -import ( - "strings" - "testing" - - "github.com/santhosh-tekuri/jsonschema/formats" -) - -type test struct { - str string - valid bool -} - -func TestIsFormat(t *testing.T) { - tests := []test{ - {"date-time", true}, - {"palindrome", false}, - } - for i, test := range tests { - if test.valid != formats.IsFormat(test.str) { - t.Errorf("#%d: %q, valid %t, got valid %t", i, test.str, test.valid, !test.valid) - } - } -} - -func TestIsDateTime(t *testing.T) { - tests := []test{ - {"1985-04-12T23:20:50.52Z", true}, - {"1996-12-19T16:39:57-08:00", true}, - {"1990-12-31T23:59:59Z", true}, - {"1990-12-31T15:59:59-08:00", true}, - {"1937-01-01T12:00:27.87+00:20", true}, - {"06/19/1963 08:30:06 PST", false}, - {"2013-350T01:01:01", false}, - } - for i, test := range tests { - if test.valid != formats.IsDateTime(test.str) { - t.Errorf("#%d: %q, valid %t, got valid %t", i, test.str, test.valid, !test.valid) - } - } -} - -func TestIsHostname(t *testing.T) { - tests := []test{ - {"www.example.com", true}, - {strings.Repeat("a", 63) + "." + strings.Repeat("a", 63) + "." + strings.Repeat("a", 63) + "." + strings.Repeat("a", 61), true}, - {strings.Repeat("a", 63) + "." + strings.Repeat("a", 63) + "." + strings.Repeat("a", 63) + "." + strings.Repeat("a", 61) + ".", true}, - {strings.Repeat("a", 63) + "." + strings.Repeat("a", 63) + "." + strings.Repeat("a", 63) + "." + strings.Repeat("a", 62) + ".", false}, // length more than 253 characters long - {"www..com", false}, // empty label - {"-a-host-name-that-starts-with--", false}, - {"not_a_valid_host_name", false}, - {"a-vvvvvvvvvvvvvvvveeeeeeeeeeeeeeeerrrrrrrrrrrrrrrryyyyyyyyyyyyyyyy-long-host-name-component", false}, - {"www.example-.com", false}, // label ends with a hyphen - } - for i, test := range tests { - if test.valid != formats.IsHostname(test.str) { - t.Errorf("#%d: %q, valid %t, got valid %t", i, test.str, test.valid, !test.valid) - } - } -} - -func TestIsEmail(t *testing.T) { - tests := []test{ - {"joe.bloggs@example.com", true}, - {"2962", false}, // no "@" character - {strings.Repeat("a", 244) + "@google.com", false}, // more than 254 characters long - {strings.Repeat("a", 65) + "@google.com", false}, // local part more than 64 characters long - {"santhosh@-google.com", false}, // invalid domain name - } - for i, test := range tests { - if test.valid != formats.IsEmail(test.str) { - t.Errorf("#%d: %q, valid %t, got valid %t", i, test.str, test.valid, !test.valid) - } - } -} - -func TestIsIPV4(t *testing.T) { - tests := []test{ - {"192.168.0.1", true}, - {"192.168.0.test", false}, // non-integer component - {"127.0.0.0.1", false}, // too many components - {"256.256.256.256", false}, // out-of-range values - {"127.0", false}, // without 4 components - {"0x7f000001", false}, // an integer - } - for i, test := range tests { - if test.valid != formats.IsIPV4(test.str) { - t.Errorf("#%d: %q, valid %t, got valid %t", i, test.str, test.valid, !test.valid) - } - } -} - -func TestIsIPV6(t *testing.T) { - tests := []test{ - {"::1", true}, - {"192.168.0.1", false}, // is IPV4 - {"12345::", false}, // out-of-range values - {"1:1:1:1:1:1:1:1:1:1:1:1:1:1:1:1", false}, // too many components - {"::laptop", false}, // containing illegal characters - } - for i, test := range tests { - if test.valid != formats.IsIPV6(test.str) { - t.Errorf("#%d: %q, valid %t, got valid %t", i, test.str, test.valid, !test.valid) - } - } -} - -func TestIsURI(t *testing.T) { - tests := []test{ - {"http://foo.bar/?baz=qux#quux", true}, - {"//foo.bar/?baz=qux#quux", false}, // an invalid protocol-relative URI Reference - {"\\\\WINDOWS\\fileshare", false}, // an invalid URI - {"abc", false}, // an invalid URI though valid URI reference - } - for i, test := range tests { - if test.valid != formats.IsURI(test.str) { - t.Errorf("#%d: %q, valid %t, got valid %t", i, test.str, test.valid, !test.valid) - } - } -} - -func TestIsJSONPointer(t *testing.T) { - tests := []test{ - {"/foo/baz", true}, - {"/foo/baz~", false}, // ~ not escaped - } - for i, test := range tests { - if test.valid != formats.IsJSONPointer(test.str) { - t.Errorf("#%d: %q, valid %t, got valid %t", i, test.str, test.valid, !test.valid) - } - } -} diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/go.test.sh b/vendor/github.com/santhosh-tekuri/jsonschema/go.test.sh deleted file mode 100755 index 88c4e8b..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/go.test.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/usr/bin/env bash - -set -e -echo "" > coverage.txt - -for d in $(go list ./... | grep -v vendor); do - go test -v -race -coverprofile=profile.out -covermode=atomic $d - if [ -f profile.out ]; then - cat profile.out >> coverage.txt - rm profile.out - fi -done diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/httploader/httploader.go b/vendor/github.com/santhosh-tekuri/jsonschema/httploader/httploader.go deleted file mode 100644 index 5f86cf9..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/httploader/httploader.go +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright 2017 Santhosh Kumar Tekuri. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package httploader implements loader.Loader for http/https url. -// -// The package is typically only imported for the side effect of -// registering its Loaders. -// -// To use httploader, link this package into your program: -// import _ "github.com/santhosh-tekuri/jsonschema/httploader" -// -package httploader - -import ( - "fmt" - "io" - "net/http" - - "github.com/santhosh-tekuri/jsonschema/loader" -) - -// Client is the default HTTP Client used to Get the resource. -var Client = http.DefaultClient - -type httpLoader struct{} - -func (httpLoader) Load(url string) (io.ReadCloser, error) { - resp, err := Client.Get(url) - if err != nil { - return nil, err - } - if resp.StatusCode != http.StatusOK { - _ = resp.Body.Close() - return nil, fmt.Errorf("%s returned status code %d", url, resp.StatusCode) - } - return resp.Body, nil -} - -func init() { - loader.Register("http", httpLoader{}) - loader.Register("https", httpLoader{}) -} diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/loader/loader.go b/vendor/github.com/santhosh-tekuri/jsonschema/loader/loader.go deleted file mode 100644 index f8ee551..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/loader/loader.go +++ /dev/null @@ -1,101 +0,0 @@ -// Copyright 2017 Santhosh Kumar Tekuri. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package loader abstracts the reading document at given url. -// -// It allows developers to register loaders for different uri -// schemes. -package loader - -import ( - "fmt" - "io" - "net/url" - "os" - "path/filepath" - "runtime" - "strings" - "sync" -) - -// Loader is the interface that wraps the basic Load method. -// -// Load loads the document at given url and returns []byte, -// if successful. -type Loader interface { - Load(url string) (io.ReadCloser, error) -} - -type filePathLoader struct{} - -func (filePathLoader) Load(path string) (io.ReadCloser, error) { - return os.Open(path) -} - -type fileURLLoader struct{} - -func (fileURLLoader) Load(url string) (io.ReadCloser, error) { - f := strings.TrimPrefix(url, "file://") - if runtime.GOOS == "windows" { - f = strings.TrimPrefix(f, "/") - f = filepath.FromSlash(f) - } - return os.Open(f) -} - -var registry = make(map[string]Loader) -var mutex = sync.RWMutex{} - -// SchemeNotRegisteredError is the error type returned by Load function. -// It tells that no Loader is registered for that URL Scheme. -type SchemeNotRegisteredError string - -func (s SchemeNotRegisteredError) Error() string { - return fmt.Sprintf("no Loader registered for scheme %s", string(s)) -} - -// Register registers given Loader for given URI Scheme. -func Register(scheme string, loader Loader) { - mutex.Lock() - defer mutex.Unlock() - registry[scheme] = loader -} - -// UnRegister unregisters the registered loader(if any) for given URI Scheme. -func UnRegister(scheme string) { - mutex.Lock() - defer mutex.Unlock() - delete(registry, scheme) -} - -func get(s string) (Loader, error) { - mutex.RLock() - defer mutex.RUnlock() - u, err := url.Parse(s) - if err != nil { - return nil, err - } - if loader, ok := registry[u.Scheme]; ok { - return loader, nil - } - return nil, SchemeNotRegisteredError(u.Scheme) -} - -// Load loads the document at given url and returns []byte, -// if successful. -// -// If no Loader is registered against the URI Scheme, then it -// returns *SchemeNotRegisteredError -func Load(url string) (io.ReadCloser, error) { - loader, err := get(url) - if err != nil { - return nil, err - } - return loader.Load(url) -} - -func init() { - Register("", filePathLoader{}) - Register("file", fileURLLoader{}) -} diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/resource.go b/vendor/github.com/santhosh-tekuri/jsonschema/resource.go deleted file mode 100644 index 0421453..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/resource.go +++ /dev/null @@ -1,235 +0,0 @@ -// Copyright 2017 Santhosh Kumar Tekuri. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package jsonschema - -import ( - "encoding/json" - "errors" - "fmt" - "io" - "net/url" - "path/filepath" - "strconv" - "strings" -) - -type resource struct { - url string - doc interface{} - schemas map[string]*Schema -} - -// DecodeJSON decodes json document from r. -// -// Note that number is decoded into json.Number instead of as a float64 -func DecodeJSON(r io.Reader) (interface{}, error) { - decoder := json.NewDecoder(r) - decoder.UseNumber() - var doc interface{} - if err := decoder.Decode(&doc); err != nil { - return nil, err - } - if t, _ := decoder.Token(); t != nil { - return nil, fmt.Errorf("invalid character %v after top-level value", t) - } - return doc, nil -} - -func newResource(base string, r io.Reader) (*resource, error) { - if strings.IndexByte(base, '#') != -1 { - panic(fmt.Sprintf("BUG: newResource(%q)", base)) - } - doc, err := DecodeJSON(r) - if err != nil { - return nil, fmt.Errorf("parsing %q failed. Reason: %v", base, err) - } - return &resource{base, doc, make(map[string]*Schema)}, nil -} - -func resolveURL(base, ref string) (string, error) { - if ref == "" { - return base, nil - } - - refURL, err := url.Parse(ref) - if err != nil { - return "", err - } - if refURL.IsAbs() { - return normalize(ref), nil - } - - baseURL, err := url.Parse(base) - if err != nil { - return "", err - } - if baseURL.IsAbs() { - return normalize(baseURL.ResolveReference(refURL).String()), nil - } - - // filepath resolving - base, _ = split(base) - ref, fragment := split(ref) - if ref == "" { - return base + fragment, nil - } - dir, _ := filepath.Split(base) - return filepath.Join(dir, ref) + fragment, nil -} - -func (r *resource) resolvePtr(draft *Draft, ptr string) (string, interface{}, error) { - if !strings.HasPrefix(ptr, "#/") { - panic(fmt.Sprintf("BUG: resolvePtr(%q)", ptr)) - } - base := r.url - p := strings.TrimPrefix(ptr, "#/") - doc := r.doc - for _, item := range strings.Split(p, "/") { - item = strings.Replace(item, "~1", "/", -1) - item = strings.Replace(item, "~0", "~", -1) - item, err := url.PathUnescape(item) - if err != nil { - return "", nil, errors.New("unable to url unscape: " + item) - } - switch d := doc.(type) { - case map[string]interface{}: - if id, ok := d[draft.id]; ok { - if id, ok := id.(string); ok { - if base, err = resolveURL(base, id); err != nil { - return "", nil, err - } - } - } - doc = d[item] - case []interface{}: - index, err := strconv.Atoi(item) - if err != nil { - return "", nil, fmt.Errorf("invalid $ref %q, reason: %s", ptr, err) - } - if index < 0 || index >= len(d) { - return "", nil, fmt.Errorf("invalid $ref %q, reason: array index outofrange", ptr) - } - doc = d[index] - default: - return "", nil, errors.New("invalid $ref " + ptr) - } - } - return base, doc, nil -} - -func split(uri string) (string, string) { - hash := strings.IndexByte(uri, '#') - if hash == -1 { - return uri, "#" - } - return uri[0:hash], uri[hash:] -} - -func normalize(url string) string { - base, fragment := split(url) - if rootFragment(fragment) { - fragment = "#" - } - return base + fragment -} - -func rootFragment(fragment string) bool { - return fragment == "" || fragment == "#" || fragment == "#/" -} - -func resolveIDs(draft *Draft, base string, v interface{}, ids map[string]map[string]interface{}) error { - m, ok := v.(map[string]interface{}) - if !ok { - return nil - } - if id, ok := m[draft.id]; ok { - b, err := resolveURL(base, id.(string)) - if err != nil { - return err - } - base = b - ids[base] = m - } - if m, ok := m["not"]; ok { - if err := resolveIDs(draft, base, m, ids); err != nil { - return err - } - } - - for _, pname := range []string{"allOf", "anyOf", "oneOf"} { - if arr, ok := m[pname]; ok { - for _, m := range arr.([]interface{}) { - if err := resolveIDs(draft, base, m, ids); err != nil { - return err - } - } - } - } - - for _, pname := range []string{"definitions", "properties", "patternProperties"} { - if props, ok := m[pname]; ok { - for _, m := range props.(map[string]interface{}) { - if err := resolveIDs(draft, base, m, ids); err != nil { - return err - } - } - } - } - - if additionalProps, ok := m["additionalProperties"]; ok { - if additionalProps, ok := additionalProps.(map[string]interface{}); ok { - if err := resolveIDs(draft, base, additionalProps, ids); err != nil { - return err - } - } - } - - if deps, ok := m["dependencies"]; ok { - for _, pvalue := range deps.(map[string]interface{}) { - if m, ok := pvalue.(map[string]interface{}); ok { - if err := resolveIDs(draft, base, m, ids); err != nil { - return err - } - } - } - } - - if items, ok := m["items"]; ok { - switch items := items.(type) { - case map[string]interface{}: - if err := resolveIDs(draft, base, items, ids); err != nil { - return err - } - case []interface{}: - for _, item := range items { - if err := resolveIDs(draft, base, item, ids); err != nil { - return err - } - } - } - if additionalItems, ok := m["additionalItems"]; ok { - if additionalItems, ok := additionalItems.(map[string]interface{}); ok { - if err := resolveIDs(draft, base, additionalItems, ids); err != nil { - return err - } - } - } - } - - if draft == Draft6 { - if propertyNames, ok := m["propertyNames"]; ok { - if err := resolveIDs(draft, base, propertyNames, ids); err != nil { - return err - } - } - if contains, ok := m["contains"]; ok { - if err := resolveIDs(draft, base, contains, ids); err != nil { - return err - } - } - } - - return nil -} diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/schema.go b/vendor/github.com/santhosh-tekuri/jsonschema/schema.go deleted file mode 100644 index ecb21a9..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/schema.go +++ /dev/null @@ -1,508 +0,0 @@ -// Copyright 2017 Santhosh Kumar Tekuri. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package jsonschema - -import ( - "encoding/json" - "fmt" - "io" - "math/big" - "net/url" - "regexp" - "strconv" - "strings" - "unicode/utf8" - - "github.com/santhosh-tekuri/jsonschema/formats" -) - -// A Schema represents compiled version of json-schema. -type Schema struct { - url string // absolute url of the resource - ptr string // json-pointer to schema. always starts with `#` - - // type agnostic validations - always *bool // always pass/fail - ref *Schema - types []string - constant []interface{} - enum []interface{} - enumError string // error message for enum fail - not *Schema - allOf []*Schema - anyOf []*Schema - oneOf []*Schema - - // object validations - minProperties int // -1 if not specified - maxProperties int // -1 if not specified - required []string - properties map[string]*Schema - propertyNames *Schema - regexProperties bool // property names must be valid regex - patternProperties map[*regexp.Regexp]*Schema - additionalProperties interface{} // nil or false or *Schema - dependencies map[string]interface{} // value is *Schema or []string - - // array validations - minItems int // -1 if not specified - maxItems int // -1 if not specified - uniqueItems bool - items interface{} // nil or *Schema or []*Schema - additionalItems interface{} // nil or bool or *Schema - contains *Schema - - // string validations - minLength int // -1 if not specified - maxLength int // -1 if not specified - pattern *regexp.Regexp - format formats.Format - formatName string - - // number validators - minimum *big.Float - exclusiveMinimum *big.Float - maximum *big.Float - exclusiveMaximum *big.Float - multipleOf *big.Float -} - -// Compile parses json-schema at given url returns, if successful, -// a Schema object that can be used to match against json. -// -// The json-schema is validated with draft4 specification. -// Returned error can be *SchemaError -func Compile(url string) (*Schema, error) { - return NewCompiler().Compile(url) -} - -// MustCompile is like Compile but panics if the url cannot be compiled to *Schema. -// It simplifies safe initialization of global variables holding compiled Schemas. -func MustCompile(url string) *Schema { - return NewCompiler().MustCompile(url) -} - -// Validate validates the given json data, against the json-schema. -// -// Returned error can be *ValidationError. -func (s *Schema) Validate(r io.Reader) error { - doc, err := DecodeJSON(r) - if err != nil { - return err - } - return s.ValidateInterface(doc) -} - -// ValidateInterface validates given doc, against the json-schema. -// -// the doc must be the value decoded by json package using interface{} type. -// we recommend to use jsonschema.DecodeJSON(io.Reader) to decode JSON. -func (s *Schema) ValidateInterface(doc interface{}) (err error) { - defer func() { - if r := recover(); r != nil { - if _, ok := r.(InvalidJSONTypeError); ok { - err = r.(InvalidJSONTypeError) - } else { - panic(r) - } - } - }() - if err := s.validate(doc); err != nil { - finishSchemaContext(err, s) - finishInstanceContext(err) - return &ValidationError{ - Message: fmt.Sprintf("doesn't validate with %q", s.url+s.ptr), - InstancePtr: "#", - SchemaURL: s.url, - SchemaPtr: s.ptr, - Causes: []*ValidationError{err.(*ValidationError)}, - } - } - return nil -} - -// validate validates given value v with this schema. -func (s *Schema) validate(v interface{}) error { - if s.always != nil { - if !*s.always { - return validationError("", "always fail") - } - return nil - } - - if s.ref != nil { - if err := s.ref.validate(v); err != nil { - finishSchemaContext(err, s.ref) - var refURL string - if s.url == s.ref.url { - refURL = s.ref.ptr - } else { - refURL = s.ref.url + s.ref.ptr - } - return validationError("$ref", "doesn't validate with %q", refURL).add(err) - } - - // All other properties in a "$ref" object MUST be ignored - return nil - } - - if len(s.types) > 0 { - vType := jsonType(v) - matched := false - for _, t := range s.types { - if vType == t { - matched = true - break - } else if t == "integer" && vType == "number" { - if _, ok := new(big.Int).SetString(fmt.Sprint(v), 10); ok { - matched = true - break - } - } - } - if !matched { - return validationError("type", "expected %s, but got %s", strings.Join(s.types, " or "), vType) - } - } - - if len(s.constant) > 0 { - if !equals(v, s.constant[0]) { - switch jsonType(s.constant[0]) { - case "object", "array": - return validationError("const", "const failed") - default: - return validationError("const", "value must be %#v", s.constant[0]) - } - } - } - - if len(s.enum) > 0 { - matched := false - for _, item := range s.enum { - if equals(v, item) { - matched = true - break - } - } - if !matched { - return validationError("enum", s.enumError) - } - } - - if s.not != nil && s.not.validate(v) == nil { - return validationError("not", "not failed") - } - - for i, sch := range s.allOf { - if err := sch.validate(v); err != nil { - return validationError("allOf/"+strconv.Itoa(i), "allOf failed").add(err) - } - } - - if len(s.anyOf) > 0 { - matched := false - var causes []error - for i, sch := range s.anyOf { - if err := sch.validate(v); err == nil { - matched = true - break - } else { - causes = append(causes, addContext("", strconv.Itoa(i), err)) - } - } - if !matched { - return validationError("anyOf", "anyOf failed").add(causes...) - } - } - - if len(s.oneOf) > 0 { - matched := -1 - var causes []error - for i, sch := range s.oneOf { - if err := sch.validate(v); err == nil { - if matched == -1 { - matched = i - } else { - return validationError("oneOf", "valid against schemas at indexes %d and %d", matched, i) - } - } else { - causes = append(causes, addContext("", strconv.Itoa(i), err)) - } - } - if matched == -1 { - return validationError("oneOf", "oneOf failed").add(causes...) - } - } - - switch v := v.(type) { - case map[string]interface{}: - if s.minProperties != -1 && len(v) < s.minProperties { - return validationError("minProperties", "minimum %d properties allowed, but found %d properties", s.minProperties, len(v)) - } - if s.maxProperties != -1 && len(v) > s.maxProperties { - return validationError("maxProperties", "maximum %d properties allowed, but found %d properties", s.maxProperties, len(v)) - } - if len(s.required) > 0 { - var missing []string - for _, pname := range s.required { - if _, ok := v[pname]; !ok { - missing = append(missing, strconv.Quote(pname)) - } - } - if len(missing) > 0 { - return validationError("required", "missing properties: %s", strings.Join(missing, ", ")) - } - } - - var additionalProps map[string]struct{} - if s.additionalProperties != nil { - additionalProps = make(map[string]struct{}, len(v)) - for pname := range v { - additionalProps[pname] = struct{}{} - } - } - - if len(s.properties) > 0 { - for pname, pschema := range s.properties { - if pvalue, ok := v[pname]; ok { - delete(additionalProps, pname) - if err := pschema.validate(pvalue); err != nil { - return addContext(escape(pname), "properties/"+escape(pname), err) - } - } - } - } - - if s.propertyNames != nil { - for pname := range v { - if err := s.propertyNames.validate(pname); err != nil { - return addContext(escape(pname), "propertyNames", err) - } - } - } - - if s.regexProperties { - for pname := range v { - if !formats.IsRegex(pname) { - return validationError("", "patternProperty %q is not valid regex", pname) - } - } - } - for pattern, pschema := range s.patternProperties { - for pname, pvalue := range v { - if pattern.MatchString(pname) { - delete(additionalProps, pname) - if err := pschema.validate(pvalue); err != nil { - return addContext(escape(pname), "patternProperties/"+escape(pattern.String()), err) - } - } - } - } - if s.additionalProperties != nil { - if _, ok := s.additionalProperties.(bool); ok { - if len(additionalProps) != 0 { - pnames := make([]string, 0, len(additionalProps)) - for pname := range additionalProps { - pnames = append(pnames, strconv.Quote(pname)) - } - return validationError("additionalProperties", "additionalProperties %s not allowed", strings.Join(pnames, ", ")) - } - } else { - schema := s.additionalProperties.(*Schema) - for pname := range additionalProps { - if pvalue, ok := v[pname]; ok { - if err := schema.validate(pvalue); err != nil { - return addContext(escape(pname), "additionalProperties", err) - } - } - } - } - } - for dname, dvalue := range s.dependencies { - if _, ok := v[dname]; ok { - switch dvalue := dvalue.(type) { - case *Schema: - if err := dvalue.validate(v); err != nil { - return addContext("", "dependencies/"+escape(dname), err) - } - case []string: - for i, pname := range dvalue { - if _, ok := v[pname]; !ok { - return validationError("dependencies/"+escape(dname)+"/"+strconv.Itoa(i), "property %q is required, if %q property exists", pname, dname) - } - } - } - } - } - - case []interface{}: - if s.minItems != -1 && len(v) < s.minItems { - return validationError("minItems", "minimum %d items allowed, but found %d items", s.minItems, len(v)) - } - if s.maxItems != -1 && len(v) > s.maxItems { - return validationError("maxItems", "maximum %d items allowed, but found %d items", s.maxItems, len(v)) - } - if s.uniqueItems { - for i := 1; i < len(v); i++ { - for j := 0; j < i; j++ { - if equals(v[i], v[j]) { - return validationError("uniqueItems", "items at index %d and %d are equal", j, i) - } - } - } - } - switch items := s.items.(type) { - case *Schema: - for i, item := range v { - if err := items.validate(item); err != nil { - return addContext(strconv.Itoa(i), "items", err) - } - } - case []*Schema: - if additionalItems, ok := s.additionalItems.(bool); ok { - if !additionalItems && len(v) > len(items) { - return validationError("additionalItems", "only %d items are allowed, but found %d items", len(items), len(v)) - } - } - for i, item := range v { - if i < len(items) { - if err := items[i].validate(item); err != nil { - return addContext(strconv.Itoa(i), "items/"+strconv.Itoa(i), err) - } - } else if sch, ok := s.additionalItems.(*Schema); ok { - if err := sch.validate(item); err != nil { - return addContext(strconv.Itoa(i), "additionalItems", err) - } - } else { - break - } - } - } - if s.contains != nil { - matched := false - var causes []error - for i, item := range v { - if err := s.contains.validate(item); err != nil { - causes = append(causes, addContext(strconv.Itoa(i), "", err)) - } else { - matched = true - break - } - } - if !matched { - return validationError("contains", "contains failed").add(causes...) - } - } - - case string: - if s.minLength != -1 || s.maxLength != -1 { - length := utf8.RuneCount([]byte(v)) - if s.minLength != -1 && length < s.minLength { - return validationError("minLength", "length must be >= %d, but got %d", s.minLength, length) - } - if s.maxLength != -1 && length > s.maxLength { - return validationError("maxLength", "length must be <= %d, but got %d", s.maxLength, length) - } - } - if s.pattern != nil && !s.pattern.MatchString(v) { - return validationError("pattern", "does not match pattern %q", s.pattern) - } - if s.format != nil && !s.format(v) { - return validationError("format", "%q is not valid %q", v, s.formatName) - } - - case json.Number, float64, int, int32, int64: - num, _ := new(big.Float).SetString(fmt.Sprint(v)) - if s.minimum != nil && num.Cmp(s.minimum) < 0 { - return validationError("minimum", "must be >= %v but found %v", s.minimum, v) - } - if s.exclusiveMinimum != nil && num.Cmp(s.exclusiveMinimum) <= 0 { - return validationError("exclusiveMinimum", "must be > %v but found %v", s.exclusiveMinimum, v) - } - if s.maximum != nil && num.Cmp(s.maximum) > 0 { - return validationError("maximum", "must be <= %v but found %v", s.maximum, v) - } - if s.exclusiveMaximum != nil && num.Cmp(s.exclusiveMaximum) >= 0 { - return validationError("exclusiveMaximum", "must be < %v but found %v", s.exclusiveMaximum, v) - } - if s.multipleOf != nil { - if q := new(big.Float).Quo(num, s.multipleOf); !q.IsInt() { - return validationError("multipleOf", "%v not multipleOf %v", v, s.multipleOf) - } - } - } - - return nil -} - -// jsonType returns the json type of given value v. -// -// It panics if the given value is not valid json value -func jsonType(v interface{}) string { - switch v.(type) { - case nil: - return "null" - case bool: - return "boolean" - case json.Number, float64, int, int32, int64: - return "number" - case string: - return "string" - case []interface{}: - return "array" - case map[string]interface{}: - return "object" - } - panic(InvalidJSONTypeError(fmt.Sprintf("%T", v))) -} - -// equals tells if given two json values are equal or not. -func equals(v1, v2 interface{}) bool { - v1Type := jsonType(v1) - if v1Type != jsonType(v2) { - return false - } - switch v1Type { - case "array": - arr1, arr2 := v1.([]interface{}), v2.([]interface{}) - if len(arr1) != len(arr2) { - return false - } - for i := range arr1 { - if !equals(arr1[i], arr2[i]) { - return false - } - } - return true - case "object": - obj1, obj2 := v1.(map[string]interface{}), v2.(map[string]interface{}) - if len(obj1) != len(obj2) { - return false - } - for k, v1 := range obj1 { - if v2, ok := obj2[k]; ok { - if !equals(v1, v2) { - return false - } - } else { - return false - } - } - return true - case "number": - num1, _ := new(big.Float).SetString(string(v1.(json.Number))) - num2, _ := new(big.Float).SetString(string(v2.(json.Number))) - return num1.Cmp(num2) == 0 - default: - return v1 == v2 - } -} - -// escape converts given token to valid json-pointer token -func escape(token string) string { - token = strings.Replace(token, "~", "~0", -1) - token = strings.Replace(token, "/", "~1", -1) - return url.PathEscape(token) -} diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/schema_test.go b/vendor/github.com/santhosh-tekuri/jsonschema/schema_test.go deleted file mode 100644 index 45a99de..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/schema_test.go +++ /dev/null @@ -1,336 +0,0 @@ -// Copyright 2017 Santhosh Kumar Tekuri. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package jsonschema_test - -import ( - "bytes" - "crypto/tls" - "encoding/json" - "io/ioutil" - "net/http" - "net/http/httptest" - "os" - "path/filepath" - "strings" - "testing" - - "github.com/santhosh-tekuri/jsonschema" - _ "github.com/santhosh-tekuri/jsonschema/httploader" -) - -var draft4, draft6 []byte - -func init() { - var err error - draft4, err = ioutil.ReadFile("testdata/draft4.json") - if err != nil { - panic(err) - } - draft6, err = ioutil.ReadFile("testdata/draft6.json") - if err != nil { - panic(err) - } -} -func TestDraft4(t *testing.T) { - testFolder(t, "testdata/draft4", jsonschema.Draft4) -} - -func TestDraft6(t *testing.T) { - testFolder(t, "testdata/draft6", jsonschema.Draft6) -} - -type testGroup struct { - Description string - Schema json.RawMessage - Tests []struct { - Description string - Data json.RawMessage - Valid bool - } -} - -func testFolder(t *testing.T, folder string, draft *jsonschema.Draft) { - server := &http.Server{Addr: ":1234", Handler: http.FileServer(http.Dir("testdata/remotes"))} - go func() { - if err := server.ListenAndServe(); err != http.ErrServerClosed { - t.Fatal(err) - } - }() - defer server.Close() - - err := filepath.Walk(folder, func(path string, info os.FileInfo, err error) error { - if err != nil { - t.Error(err) - return nil - } - if info.IsDir() { - return nil - } - if filepath.Ext(info.Name()) != ".json" { - return nil - } - - t.Log(info.Name()) - data, err := ioutil.ReadFile(path) - if err != nil { - t.Errorf(" FAIL: %v\n", err) - return nil - } - var tg []testGroup - if err = json.Unmarshal(data, &tg); err != nil { - t.Errorf(" FAIL: %v\n", err) - return nil - } - for _, group := range tg { - t.Logf(" %s\n", group.Description) - c := jsonschema.NewCompiler() - if err := c.AddResource("http://json-schema.org/draft-04/schema", bytes.NewReader(draft4)); err != nil { - t.Errorf(" FAIL: add resource failed, reason: %v\n", err) - continue - } - if err := c.AddResource("http://json-schema.org/draft-06/schema", bytes.NewReader(draft6)); err != nil { - t.Errorf(" FAIL: add resource failed, reason: %v\n", err) - continue - } - c.Draft = draft - if err := c.AddResource("test.json", bytes.NewReader(group.Schema)); err != nil { - t.Errorf(" FAIL: add resource failed, reason: %v\n", err) - continue - } - schema, err := c.Compile("test.json") - if err != nil { - t.Errorf(" FAIL: schema compilation failed, reason: %v\n", err) - continue - } - for _, test := range group.Tests { - t.Logf(" %s\n", test.Description) - err = schema.Validate(bytes.NewReader(test.Data)) - valid := err == nil - if !valid { - for _, line := range strings.Split(err.Error(), "\n") { - t.Logf(" %s\n", line) - } - } - if test.Valid != valid { - t.Errorf(" FAIL: expected valid=%t got valid=%t\n", test.Valid, valid) - } - } - } - return nil - }) - if err != nil { - t.Fatal(err) - } - - invalidDocTests := []struct { - description string - doc string - }{ - {"non json instance", "{"}, - {"multiple json instance", "{}{}"}, - } - for _, test := range invalidDocTests { - t.Run(test.description, func(t *testing.T) { - c := jsonschema.NewCompiler() - if err := c.AddResource("test.json", strings.NewReader("{}")); err != nil { - t.Fatal(err) - } - s, err := c.Compile("test.json") - if err != nil { - t.Fatal(err) - } - if err := s.Validate(strings.NewReader(test.doc)); err != nil { - t.Log(err) - } else { - t.Error("error expected") - } - }) - } -} - -func TestInvalidSchema(t *testing.T) { - t.Run("MustCompile with panic", func(t *testing.T) { - defer func() { - if r := recover(); r == nil { - t.Error("panic expected") - } - }() - jsonschema.MustCompile("testdata/invalid_schema.json") - }) - - t.Run("MustCompile without panic", func(t *testing.T) { - defer func() { - if r := recover(); r != nil { - t.Error("panic not expected") - } - }() - jsonschema.MustCompile("testdata/customer_schema.json#/0") - }) - - t.Run("invalid json", func(t *testing.T) { - if err := jsonschema.NewCompiler().AddResource("test.json", strings.NewReader("{")); err == nil { - t.Error("error expected") - } else { - t.Log(err) - } - }) - - t.Run("multiple json", func(t *testing.T) { - if err := jsonschema.NewCompiler().AddResource("test.json", strings.NewReader("{}{}")); err == nil { - t.Error("error expected") - } else { - t.Log(err) - } - }) - - type test struct { - Description string - Schema json.RawMessage - Fragment string - } - data, err := ioutil.ReadFile("testdata/invalid_schemas.json") - if err != nil { - t.Fatal(err) - } - var tests []test - if err = json.Unmarshal(data, &tests); err != nil { - t.Fatal(err) - } - for _, test := range tests { - t.Run(test.Description, func(t *testing.T) { - c := jsonschema.NewCompiler() - url := "test.json" - if err := c.AddResource(url, bytes.NewReader(test.Schema)); err != nil { - t.Fatal(err) - } - if len(test.Fragment) > 0 { - url += test.Fragment - } - if _, err = c.Compile(url); err == nil { - t.Error("error expected") - } else { - t.Log(err) - } - }) - } -} - -func TestCompileURL(t *testing.T) { - tr := http.DefaultTransport.(*http.Transport) - if tr.TLSClientConfig == nil { - tr.TLSClientConfig = &tls.Config{} - } - tr.TLSClientConfig.InsecureSkipVerify = true - - handler := http.FileServer(http.Dir("testdata")) - httpServer := httptest.NewServer(handler) - defer httpServer.Close() - httpsServer := httptest.NewTLSServer(handler) - defer httpsServer.Close() - - abs, err := filepath.Abs("testdata") - if err != nil { - t.Error(err) - return - } - validTests := []struct { - schema, doc string - }{ - {"testdata/customer_schema.json#/0", "testdata/customer.json"}, - {"file://" + abs + "/customer_schema.json#/0", "testdata/customer.json"}, - {httpServer.URL + "/customer_schema.json#/0", "testdata/customer.json"}, - {httpsServer.URL + "/customer_schema.json#/0", "testdata/customer.json"}, - } - for i, test := range validTests { - t.Logf("valid #%d: %+v", i, test) - s, err := jsonschema.Compile(test.schema) - if err != nil { - t.Errorf("valid #%d: %v", i, err) - return - } - f, err := os.Open(test.doc) - if err != nil { - t.Errorf("valid #%d: %v", i, err) - return - } - err = s.Validate(f) - _ = f.Close() - if err != nil { - t.Errorf("valid #%d: %v", i, err) - } - } - - invalidTests := []string{ - "testdata/syntax_error.json", - "testdata/missing.json", - "file://" + abs + "/missing.json", - httpServer.URL + "/missing.json", - httpsServer.URL + "/missing.json", - } - for i, test := range invalidTests { - t.Logf("invalid #%d: %v", i, test) - if _, err := jsonschema.Compile(test); err == nil { - t.Errorf("invalid #%d: expected error", i) - } else { - t.Logf("invalid #%d: %v", i, err) - } - } -} - -func TestValidateInterface(t *testing.T) { - files := []string{ - "testdata/draft4/type.json", - "testdata/draft4/minimum.json", - "testdata/draft4/maximum.json", - } - for _, file := range files { - t.Log(filepath.Base(file)) - data, err := ioutil.ReadFile(file) - if err != nil { - t.Errorf(" FAIL: %v\n", err) - return - } - var tg []testGroup - if err = json.Unmarshal(data, &tg); err != nil { - t.Errorf(" FAIL: %v\n", err) - return - } - for _, group := range tg { - t.Logf(" %s\n", group.Description) - c := jsonschema.NewCompiler() - if err := c.AddResource("test.json", bytes.NewReader(group.Schema)); err != nil { - t.Errorf(" FAIL: add resource failed, reason: %v\n", err) - continue - } - c.Draft = jsonschema.Draft4 - schema, err := c.Compile("test.json") - if err != nil { - t.Errorf(" FAIL: schema compilation failed, reason: %v\n", err) - continue - } - for _, test := range group.Tests { - t.Logf(" %s\n", test.Description) - - decoder := json.NewDecoder(bytes.NewReader(test.Data)) - var doc interface{} - if err := decoder.Decode(&doc); err != nil { - t.Errorf(" FAIL: decode json failed, reason: %v\n", err) - continue - } - - err = schema.ValidateInterface(doc) - valid := err == nil - if !valid { - for _, line := range strings.Split(err.Error(), "\n") { - t.Logf(" %s\n", line) - } - } - if test.Valid != valid { - t.Errorf(" FAIL: expected valid=%t got valid=%t\n", test.Valid, valid) - } - } - } - } -} diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/customer.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/customer.json deleted file mode 100644 index 7bcaf11..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/customer.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "shipping_address": { - "street_address": "1600 Pennsylvania Avenue NW", - "city": "Washington", - "state": "DC" - }, - "billing_address": { - "street_address": "1st Street SE", - "city": "Washington", - "state": "DC" - } -} diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/customer_schema.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/customer_schema.json deleted file mode 100644 index db00584..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/customer_schema.json +++ /dev/null @@ -1,10 +0,0 @@ -[ - { - "$schema": "http://json-schema.org/draft-04/schema#", - "type": "object", - "properties": { - "billing_address": { "$ref": "definitions.json#/address" }, - "shipping_address": { "$ref": "definitions.json#/address" } - } - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/definitions.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/definitions.json deleted file mode 100644 index abd17ca..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/definitions.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "address": { - "type": "object", - "properties": { - "street_address": { "type": "string" }, - "city": { "type": "string" }, - "state": { "type": "string" } - }, - "required": ["street_address", "city", "state"] - } -} diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4.json deleted file mode 100644 index 0381502..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4.json +++ /dev/null @@ -1,231 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - "description": "Core schema meta-schema", - "definitions": { - "schemaArray": { - "type": "array", - "minItems": 1, - "items": { - "$ref": "#" - } - }, - "positiveInteger": { - "type": "integer", - "minimum": 0 - }, - "positiveIntegerDefault0": { - "allOf": [ - { - "$ref": "#/definitions/positiveInteger" - }, - { - "default": 0 - } - ] - }, - "simpleTypes": { - "enum": [ - "array", - "boolean", - "integer", - "null", - "number", - "object", - "string" - ] - }, - "stringArray": { - "type": "array", - "items": { - "type": "string" - }, - "minItems": 1, - "uniqueItems": true - } - }, - "type": "object", - "properties": { - "id": { - "type": "string", - "format": "uriref" - }, - "$schema": { - "type": "string", - "format": "uri" - }, - "title": { - "type": "string" - }, - "description": { - "type": "string" - }, - "default": {}, - "multipleOf": { - "type": "number", - "minimum": 0, - "exclusiveMinimum": true - }, - "maximum": { - "type": "number" - }, - "exclusiveMaximum": { - "type": "boolean", - "default": false - }, - "minimum": { - "type": "number" - }, - "exclusiveMinimum": { - "type": "boolean", - "default": false - }, - "maxLength": { - "$ref": "#/definitions/positiveInteger" - }, - "minLength": { - "$ref": "#/definitions/positiveIntegerDefault0" - }, - "pattern": { - "type": "string", - "format": "regex" - }, - "additionalItems": { - "anyOf": [ - { - "type": "boolean" - }, - { - "$ref": "#" - } - ], - "default": {} - }, - "items": { - "anyOf": [ - { - "$ref": "#" - }, - { - "$ref": "#/definitions/schemaArray" - } - ], - "default": {} - }, - "maxItems": { - "$ref": "#/definitions/positiveInteger" - }, - "minItems": { - "$ref": "#/definitions/positiveIntegerDefault0" - }, - "uniqueItems": { - "type": "boolean", - "default": false - }, - "maxProperties": { - "$ref": "#/definitions/positiveInteger" - }, - "minProperties": { - "$ref": "#/definitions/positiveIntegerDefault0" - }, - "required": { - "$ref": "#/definitions/stringArray" - }, - "additionalProperties": { - "anyOf": [ - { - "type": "boolean" - }, - { - "$ref": "#" - } - ], - "default": {} - }, - "definitions": { - "type": "object", - "additionalProperties": { - "$ref": "#" - }, - "default": {} - }, - "properties": { - "type": "object", - "additionalProperties": { - "$ref": "#" - }, - "default": {} - }, - "patternProperties": { - "type": "object", - "regexProperties": true, - "additionalProperties": { - "$ref": "#" - }, - "default": {} - }, - "regexProperties": { - "type": "boolean" - }, - "dependencies": { - "type": "object", - "additionalProperties": { - "anyOf": [ - { - "$ref": "#" - }, - { - "$ref": "#/definitions/stringArray" - } - ] - } - }, - "enum": { - "type": "array", - "minItems": 1, - "uniqueItems": true - }, - "type": { - "anyOf": [ - { - "$ref": "#/definitions/simpleTypes" - }, - { - "type": "array", - "items": { - "$ref": "#/definitions/simpleTypes" - }, - "minItems": 1, - "uniqueItems": true - } - ] - }, - "allOf": { - "$ref": "#/definitions/schemaArray" - }, - "anyOf": { - "$ref": "#/definitions/schemaArray" - }, - "oneOf": { - "$ref": "#/definitions/schemaArray" - }, - "not": { - "$ref": "#" - }, - "format": { - "type": "string", - "format": "format" - }, - "$ref": { - "type": "string" - } - }, - "dependencies": { - "exclusiveMaximum": [ - "maximum" - ], - "exclusiveMinimum": [ - "minimum" - ] - }, - "default": {} -} \ No newline at end of file diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/additionalItems.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/additionalItems.json deleted file mode 100644 index abecc57..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/additionalItems.json +++ /dev/null @@ -1,87 +0,0 @@ -[ - { - "description": "additionalItems as schema", - "schema": { - "items": [{}], - "additionalItems": {"type": "integer"} - }, - "tests": [ - { - "description": "additional items match schema", - "data": [ null, 2, 3, 4 ], - "valid": true - }, - { - "description": "additional items do not match schema", - "data": [ null, 2, 3, "foo" ], - "valid": false - } - ] - }, - { - "description": "items is schema, no additionalItems", - "schema": { - "items": {}, - "additionalItems": false - }, - "tests": [ - { - "description": "all items match schema", - "data": [ 1, 2, 3, 4, 5 ], - "valid": true - } - ] - }, - { - "description": "array of items with no additionalItems", - "schema": { - "items": [{}, {}, {}], - "additionalItems": false - }, - "tests": [ - { - "description": "fewer number of items present", - "data": [ 1, 2 ], - "valid": true - }, - { - "description": "equal number of items present", - "data": [ 1, 2, 3 ], - "valid": true - }, - { - "description": "additional items are not permitted", - "data": [ 1, 2, 3, 4 ], - "valid": false - } - ] - }, - { - "description": "additionalItems as false without items", - "schema": {"additionalItems": false}, - "tests": [ - { - "description": - "items defaults to empty schema so everything is valid", - "data": [ 1, 2, 3, 4, 5 ], - "valid": true - }, - { - "description": "ignores non-arrays", - "data": {"foo" : "bar"}, - "valid": true - } - ] - }, - { - "description": "additionalItems are allowed by default", - "schema": {"items": [{"type": "integer"}]}, - "tests": [ - { - "description": "only the first item is validated", - "data": [1, "foo", false], - "valid": true - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/additionalProperties.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/additionalProperties.json deleted file mode 100644 index 40831f9..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/additionalProperties.json +++ /dev/null @@ -1,88 +0,0 @@ -[ - { - "description": - "additionalProperties being false does not allow other properties", - "schema": { - "properties": {"foo": {}, "bar": {}}, - "patternProperties": { "^v": {} }, - "additionalProperties": false - }, - "tests": [ - { - "description": "no additional properties is valid", - "data": {"foo": 1}, - "valid": true - }, - { - "description": "an additional property is invalid", - "data": {"foo" : 1, "bar" : 2, "quux" : "boom"}, - "valid": false - }, - { - "description": "ignores non-objects", - "data": [1, 2, 3], - "valid": true - }, - { - "description": "patternProperties are not additional properties", - "data": {"foo":1, "vroom": 2}, - "valid": true - } - ] - }, - { - "description": - "additionalProperties allows a schema which should validate", - "schema": { - "properties": {"foo": {}, "bar": {}}, - "additionalProperties": {"type": "boolean"} - }, - "tests": [ - { - "description": "no additional properties is valid", - "data": {"foo": 1}, - "valid": true - }, - { - "description": "an additional valid property is valid", - "data": {"foo" : 1, "bar" : 2, "quux" : true}, - "valid": true - }, - { - "description": "an additional invalid property is invalid", - "data": {"foo" : 1, "bar" : 2, "quux" : 12}, - "valid": false - } - ] - }, - { - "description": - "additionalProperties can exist by itself", - "schema": { - "additionalProperties": {"type": "boolean"} - }, - "tests": [ - { - "description": "an additional valid property is valid", - "data": {"foo" : true}, - "valid": true - }, - { - "description": "an additional invalid property is invalid", - "data": {"foo" : 1}, - "valid": false - } - ] - }, - { - "description": "additionalProperties are allowed by default", - "schema": {"properties": {"foo": {}, "bar": {}}}, - "tests": [ - { - "description": "additional properties are allowed", - "data": {"foo": 1, "bar": 2, "quux": true}, - "valid": true - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/allOf.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/allOf.json deleted file mode 100644 index bbb5f89..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/allOf.json +++ /dev/null @@ -1,112 +0,0 @@ -[ - { - "description": "allOf", - "schema": { - "allOf": [ - { - "properties": { - "bar": {"type": "integer"} - }, - "required": ["bar"] - }, - { - "properties": { - "foo": {"type": "string"} - }, - "required": ["foo"] - } - ] - }, - "tests": [ - { - "description": "allOf", - "data": {"foo": "baz", "bar": 2}, - "valid": true - }, - { - "description": "mismatch second", - "data": {"foo": "baz"}, - "valid": false - }, - { - "description": "mismatch first", - "data": {"bar": 2}, - "valid": false - }, - { - "description": "wrong type", - "data": {"foo": "baz", "bar": "quux"}, - "valid": false - } - ] - }, - { - "description": "allOf with base schema", - "schema": { - "properties": {"bar": {"type": "integer"}}, - "required": ["bar"], - "allOf" : [ - { - "properties": { - "foo": {"type": "string"} - }, - "required": ["foo"] - }, - { - "properties": { - "baz": {"type": "null"} - }, - "required": ["baz"] - } - ] - }, - "tests": [ - { - "description": "valid", - "data": {"foo": "quux", "bar": 2, "baz": null}, - "valid": true - }, - { - "description": "mismatch base schema", - "data": {"foo": "quux", "baz": null}, - "valid": false - }, - { - "description": "mismatch first allOf", - "data": {"bar": 2, "baz": null}, - "valid": false - }, - { - "description": "mismatch second allOf", - "data": {"foo": "quux", "bar": 2}, - "valid": false - }, - { - "description": "mismatch both", - "data": {"bar": 2}, - "valid": false - } - ] - }, - { - "description": "allOf simple types", - "schema": { - "allOf": [ - {"maximum": 30}, - {"minimum": 20} - ] - }, - "tests": [ - { - "description": "valid", - "data": 25, - "valid": true - }, - { - "description": "mismatch one", - "data": 35, - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/anyOf.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/anyOf.json deleted file mode 100644 index a58714a..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/anyOf.json +++ /dev/null @@ -1,68 +0,0 @@ -[ - { - "description": "anyOf", - "schema": { - "anyOf": [ - { - "type": "integer" - }, - { - "minimum": 2 - } - ] - }, - "tests": [ - { - "description": "first anyOf valid", - "data": 1, - "valid": true - }, - { - "description": "second anyOf valid", - "data": 2.5, - "valid": true - }, - { - "description": "both anyOf valid", - "data": 3, - "valid": true - }, - { - "description": "neither anyOf valid", - "data": 1.5, - "valid": false - } - ] - }, - { - "description": "anyOf with base schema", - "schema": { - "type": "string", - "anyOf" : [ - { - "maxLength": 2 - }, - { - "minLength": 4 - } - ] - }, - "tests": [ - { - "description": "mismatch base schema", - "data": 3, - "valid": false - }, - { - "description": "one anyOf valid", - "data": "foobar", - "valid": true - }, - { - "description": "both anyOf invalid", - "data": "foo", - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/default.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/default.json deleted file mode 100644 index 1762977..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/default.json +++ /dev/null @@ -1,49 +0,0 @@ -[ - { - "description": "invalid type for default", - "schema": { - "properties": { - "foo": { - "type": "integer", - "default": [] - } - } - }, - "tests": [ - { - "description": "valid when property is specified", - "data": {"foo": 13}, - "valid": true - }, - { - "description": "still valid when the invalid default is used", - "data": {}, - "valid": true - } - ] - }, - { - "description": "invalid string value for default", - "schema": { - "properties": { - "bar": { - "type": "string", - "minLength": 4, - "default": "bad" - } - } - }, - "tests": [ - { - "description": "valid when property is specified", - "data": {"bar": "good"}, - "valid": true - }, - { - "description": "still valid when the invalid default is used", - "data": {}, - "valid": true - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/definitions.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/definitions.json deleted file mode 100644 index cf935a3..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/definitions.json +++ /dev/null @@ -1,32 +0,0 @@ -[ - { - "description": "valid definition", - "schema": {"$ref": "http://json-schema.org/draft-04/schema#"}, - "tests": [ - { - "description": "valid definition schema", - "data": { - "definitions": { - "foo": {"type": "integer"} - } - }, - "valid": true - } - ] - }, - { - "description": "invalid definition", - "schema": {"$ref": "http://json-schema.org/draft-04/schema#"}, - "tests": [ - { - "description": "invalid definition schema", - "data": { - "definitions": { - "foo": {"type": 1} - } - }, - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/dependencies.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/dependencies.json deleted file mode 100644 index 7b9b16a..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/dependencies.json +++ /dev/null @@ -1,113 +0,0 @@ -[ - { - "description": "dependencies", - "schema": { - "dependencies": {"bar": ["foo"]} - }, - "tests": [ - { - "description": "neither", - "data": {}, - "valid": true - }, - { - "description": "nondependant", - "data": {"foo": 1}, - "valid": true - }, - { - "description": "with dependency", - "data": {"foo": 1, "bar": 2}, - "valid": true - }, - { - "description": "missing dependency", - "data": {"bar": 2}, - "valid": false - }, - { - "description": "ignores non-objects", - "data": "foo", - "valid": true - } - ] - }, - { - "description": "multiple dependencies", - "schema": { - "dependencies": {"quux": ["foo", "bar"]} - }, - "tests": [ - { - "description": "neither", - "data": {}, - "valid": true - }, - { - "description": "nondependants", - "data": {"foo": 1, "bar": 2}, - "valid": true - }, - { - "description": "with dependencies", - "data": {"foo": 1, "bar": 2, "quux": 3}, - "valid": true - }, - { - "description": "missing dependency", - "data": {"foo": 1, "quux": 2}, - "valid": false - }, - { - "description": "missing other dependency", - "data": {"bar": 1, "quux": 2}, - "valid": false - }, - { - "description": "missing both dependencies", - "data": {"quux": 1}, - "valid": false - } - ] - }, - { - "description": "multiple dependencies subschema", - "schema": { - "dependencies": { - "bar": { - "properties": { - "foo": {"type": "integer"}, - "bar": {"type": "integer"} - } - } - } - }, - "tests": [ - { - "description": "valid", - "data": {"foo": 1, "bar": 2}, - "valid": true - }, - { - "description": "no dependency", - "data": {"foo": "quux"}, - "valid": true - }, - { - "description": "wrong type", - "data": {"foo": "quux", "bar": 2}, - "valid": false - }, - { - "description": "wrong type other", - "data": {"foo": 2, "bar": "quux"}, - "valid": false - }, - { - "description": "wrong type both", - "data": {"foo": "quux", "bar": "quux"}, - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/enum.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/enum.json deleted file mode 100644 index f124436..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/enum.json +++ /dev/null @@ -1,72 +0,0 @@ -[ - { - "description": "simple enum validation", - "schema": {"enum": [1, 2, 3]}, - "tests": [ - { - "description": "one of the enum is valid", - "data": 1, - "valid": true - }, - { - "description": "something else is invalid", - "data": 4, - "valid": false - } - ] - }, - { - "description": "heterogeneous enum validation", - "schema": {"enum": [6, "foo", [], true, {"foo": 12}]}, - "tests": [ - { - "description": "one of the enum is valid", - "data": [], - "valid": true - }, - { - "description": "something else is invalid", - "data": null, - "valid": false - }, - { - "description": "objects are deep compared", - "data": {"foo": false}, - "valid": false - } - ] - }, - { - "description": "enums in properties", - "schema": { - "type":"object", - "properties": { - "foo": {"enum":["foo"]}, - "bar": {"enum":["bar"]} - }, - "required": ["bar"] - }, - "tests": [ - { - "description": "both properties are valid", - "data": {"foo":"foo", "bar":"bar"}, - "valid": true - }, - { - "description": "missing optional property is valid", - "data": {"bar":"bar"}, - "valid": true - }, - { - "description": "missing required property is invalid", - "data": {"foo":"foo"}, - "valid": false - }, - { - "description": "missing all properties is invalid", - "data": {}, - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/items.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/items.json deleted file mode 100644 index 6a4e648..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/items.json +++ /dev/null @@ -1,78 +0,0 @@ -[ - { - "description": "a schema given for items", - "schema": { - "items": {"type": "integer"} - }, - "tests": [ - { - "description": "valid items", - "data": [ 1, 2, 3 ], - "valid": true - }, - { - "description": "wrong type of items", - "data": [1, "x"], - "valid": false - }, - { - "description": "ignores non-arrays", - "data": {"foo" : "bar"}, - "valid": true - }, - { - "description": "JavaScript pseudo-array is valid", - "data": { - "0": "invalid", - "length": 1 - }, - "valid": true - } - ] - }, - { - "description": "an array of schemas for items", - "schema": { - "items": [ - {"type": "integer"}, - {"type": "string"} - ] - }, - "tests": [ - { - "description": "correct types", - "data": [ 1, "foo" ], - "valid": true - }, - { - "description": "wrong types", - "data": [ "foo", 1 ], - "valid": false - }, - { - "description": "incomplete array of items", - "data": [ 1 ], - "valid": true - }, - { - "description": "array with additional items", - "data": [ 1, "foo", true ], - "valid": true - }, - { - "description": "empty array", - "data": [ ], - "valid": true - }, - { - "description": "JavaScript pseudo-array is valid", - "data": { - "0": "invalid", - "1": "valid", - "length": 2 - }, - "valid": true - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/maxItems.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/maxItems.json deleted file mode 100644 index 3b53a6b..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/maxItems.json +++ /dev/null @@ -1,28 +0,0 @@ -[ - { - "description": "maxItems validation", - "schema": {"maxItems": 2}, - "tests": [ - { - "description": "shorter is valid", - "data": [1], - "valid": true - }, - { - "description": "exact length is valid", - "data": [1, 2], - "valid": true - }, - { - "description": "too long is invalid", - "data": [1, 2, 3], - "valid": false - }, - { - "description": "ignores non-arrays", - "data": "foobar", - "valid": true - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/maxLength.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/maxLength.json deleted file mode 100644 index 811d35b..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/maxLength.json +++ /dev/null @@ -1,33 +0,0 @@ -[ - { - "description": "maxLength validation", - "schema": {"maxLength": 2}, - "tests": [ - { - "description": "shorter is valid", - "data": "f", - "valid": true - }, - { - "description": "exact length is valid", - "data": "fo", - "valid": true - }, - { - "description": "too long is invalid", - "data": "foo", - "valid": false - }, - { - "description": "ignores non-strings", - "data": 100, - "valid": true - }, - { - "description": "two supplementary Unicode code points is long enough", - "data": "\uD83D\uDCA9\uD83D\uDCA9", - "valid": true - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/maxProperties.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/maxProperties.json deleted file mode 100644 index d282446..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/maxProperties.json +++ /dev/null @@ -1,28 +0,0 @@ -[ - { - "description": "maxProperties validation", - "schema": {"maxProperties": 2}, - "tests": [ - { - "description": "shorter is valid", - "data": {"foo": 1}, - "valid": true - }, - { - "description": "exact length is valid", - "data": {"foo": 1, "bar": 2}, - "valid": true - }, - { - "description": "too long is invalid", - "data": {"foo": 1, "bar": 2, "baz": 3}, - "valid": false - }, - { - "description": "ignores non-objects", - "data": "foobar", - "valid": true - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/maximum.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/maximum.json deleted file mode 100644 index 82718fb..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/maximum.json +++ /dev/null @@ -1,47 +0,0 @@ -[ - { - "description": "maximum validation", - "schema": {"maximum": 3.0}, - "tests": [ - { - "description": "below the maximum is valid", - "data": 2.6, - "valid": true - }, - { - "description": "boundary point is valid", - "data": 3.0, - "valid": true - }, - { - "description": "above the maximum is invalid", - "data": 3.5, - "valid": false - }, - { - "description": "ignores non-numbers", - "data": "x", - "valid": true - } - ] - }, - { - "description": "exclusiveMaximum validation", - "schema": { - "maximum": 3.0, - "exclusiveMaximum": true - }, - "tests": [ - { - "description": "below the maximum is still valid", - "data": 2.2, - "valid": true - }, - { - "description": "boundary point is invalid", - "data": 3.0, - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/minItems.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/minItems.json deleted file mode 100644 index ed51188..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/minItems.json +++ /dev/null @@ -1,28 +0,0 @@ -[ - { - "description": "minItems validation", - "schema": {"minItems": 1}, - "tests": [ - { - "description": "longer is valid", - "data": [1, 2], - "valid": true - }, - { - "description": "exact length is valid", - "data": [1], - "valid": true - }, - { - "description": "too short is invalid", - "data": [], - "valid": false - }, - { - "description": "ignores non-arrays", - "data": "", - "valid": true - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/minLength.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/minLength.json deleted file mode 100644 index 3f09158..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/minLength.json +++ /dev/null @@ -1,33 +0,0 @@ -[ - { - "description": "minLength validation", - "schema": {"minLength": 2}, - "tests": [ - { - "description": "longer is valid", - "data": "foo", - "valid": true - }, - { - "description": "exact length is valid", - "data": "fo", - "valid": true - }, - { - "description": "too short is invalid", - "data": "f", - "valid": false - }, - { - "description": "ignores non-strings", - "data": 1, - "valid": true - }, - { - "description": "one supplementary Unicode code point is not long enough", - "data": "\uD83D\uDCA9", - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/minProperties.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/minProperties.json deleted file mode 100644 index a72c7d2..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/minProperties.json +++ /dev/null @@ -1,28 +0,0 @@ -[ - { - "description": "minProperties validation", - "schema": {"minProperties": 1}, - "tests": [ - { - "description": "longer is valid", - "data": {"foo": 1, "bar": 2}, - "valid": true - }, - { - "description": "exact length is valid", - "data": {"foo": 1}, - "valid": true - }, - { - "description": "too short is invalid", - "data": {}, - "valid": false - }, - { - "description": "ignores non-objects", - "data": "", - "valid": true - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/minimum.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/minimum.json deleted file mode 100644 index 9af8ed4..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/minimum.json +++ /dev/null @@ -1,47 +0,0 @@ -[ - { - "description": "minimum validation", - "schema": {"minimum": 1.1}, - "tests": [ - { - "description": "above the minimum is valid", - "data": 2.6, - "valid": true - }, - { - "description": "boundary point is valid", - "data": 1.1, - "valid": true - }, - { - "description": "below the minimum is invalid", - "data": 0.6, - "valid": false - }, - { - "description": "ignores non-numbers", - "data": "x", - "valid": true - } - ] - }, - { - "description": "exclusiveMinimum validation", - "schema": { - "minimum": 1.1, - "exclusiveMinimum": true - }, - "tests": [ - { - "description": "above the minimum is still valid", - "data": 1.2, - "valid": true - }, - { - "description": "boundary point is invalid", - "data": 1.1, - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/multipleOf.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/multipleOf.json deleted file mode 100644 index ca3b761..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/multipleOf.json +++ /dev/null @@ -1,60 +0,0 @@ -[ - { - "description": "by int", - "schema": {"multipleOf": 2}, - "tests": [ - { - "description": "int by int", - "data": 10, - "valid": true - }, - { - "description": "int by int fail", - "data": 7, - "valid": false - }, - { - "description": "ignores non-numbers", - "data": "foo", - "valid": true - } - ] - }, - { - "description": "by number", - "schema": {"multipleOf": 1.5}, - "tests": [ - { - "description": "zero is multiple of anything", - "data": 0, - "valid": true - }, - { - "description": "4.5 is multiple of 1.5", - "data": 4.5, - "valid": true - }, - { - "description": "35 is not multiple of 1.5", - "data": 35, - "valid": false - } - ] - }, - { - "description": "by small number", - "schema": {"multipleOf": 0.0001}, - "tests": [ - { - "description": "0.0075 is multiple of 0.0001", - "data": 0.0075, - "valid": true - }, - { - "description": "0.00751 is not multiple of 0.0001", - "data": 0.00751, - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/not.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/not.json deleted file mode 100644 index cbb7f46..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/not.json +++ /dev/null @@ -1,96 +0,0 @@ -[ - { - "description": "not", - "schema": { - "not": {"type": "integer"} - }, - "tests": [ - { - "description": "allowed", - "data": "foo", - "valid": true - }, - { - "description": "disallowed", - "data": 1, - "valid": false - } - ] - }, - { - "description": "not multiple types", - "schema": { - "not": {"type": ["integer", "boolean"]} - }, - "tests": [ - { - "description": "valid", - "data": "foo", - "valid": true - }, - { - "description": "mismatch", - "data": 1, - "valid": false - }, - { - "description": "other mismatch", - "data": true, - "valid": false - } - ] - }, - { - "description": "not more complex schema", - "schema": { - "not": { - "type": "object", - "properties": { - "foo": { - "type": "string" - } - } - } - }, - "tests": [ - { - "description": "match", - "data": 1, - "valid": true - }, - { - "description": "other match", - "data": {"foo": 1}, - "valid": true - }, - { - "description": "mismatch", - "data": {"foo": "bar"}, - "valid": false - } - ] - }, - { - "description": "forbidden property", - "schema": { - "properties": { - "foo": { - "not": {} - } - } - }, - "tests": [ - { - "description": "property present", - "data": {"foo": 1, "bar": 2}, - "valid": false - }, - { - "description": "property absent", - "data": {"bar": 1, "baz": 2}, - "valid": true - } - ] - } - -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/oneOf.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/oneOf.json deleted file mode 100644 index 1eaa4e4..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/oneOf.json +++ /dev/null @@ -1,68 +0,0 @@ -[ - { - "description": "oneOf", - "schema": { - "oneOf": [ - { - "type": "integer" - }, - { - "minimum": 2 - } - ] - }, - "tests": [ - { - "description": "first oneOf valid", - "data": 1, - "valid": true - }, - { - "description": "second oneOf valid", - "data": 2.5, - "valid": true - }, - { - "description": "both oneOf valid", - "data": 3, - "valid": false - }, - { - "description": "neither oneOf valid", - "data": 1.5, - "valid": false - } - ] - }, - { - "description": "oneOf with base schema", - "schema": { - "type": "string", - "oneOf" : [ - { - "minLength": 2 - }, - { - "maxLength": 4 - } - ] - }, - "tests": [ - { - "description": "mismatch base schema", - "data": 3, - "valid": false - }, - { - "description": "one oneOf valid", - "data": "foobar", - "valid": true - }, - { - "description": "both oneOf valid", - "data": "foo", - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/optional/bignum.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/optional/bignum.json deleted file mode 100644 index ccc7c17..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/optional/bignum.json +++ /dev/null @@ -1,107 +0,0 @@ -[ - { - "description": "integer", - "schema": {"type": "integer"}, - "tests": [ - { - "description": "a bignum is an integer", - "data": 12345678910111213141516171819202122232425262728293031, - "valid": true - } - ] - }, - { - "description": "number", - "schema": {"type": "number"}, - "tests": [ - { - "description": "a bignum is a number", - "data": 98249283749234923498293171823948729348710298301928331, - "valid": true - } - ] - }, - { - "description": "integer", - "schema": {"type": "integer"}, - "tests": [ - { - "description": "a negative bignum is an integer", - "data": -12345678910111213141516171819202122232425262728293031, - "valid": true - } - ] - }, - { - "description": "number", - "schema": {"type": "number"}, - "tests": [ - { - "description": "a negative bignum is a number", - "data": -98249283749234923498293171823948729348710298301928331, - "valid": true - } - ] - }, - { - "description": "string", - "schema": {"type": "string"}, - "tests": [ - { - "description": "a bignum is not a string", - "data": 98249283749234923498293171823948729348710298301928331, - "valid": false - } - ] - }, - { - "description": "integer comparison", - "schema": {"maximum": 18446744073709551615}, - "tests": [ - { - "description": "comparison works for high numbers", - "data": 18446744073709551600, - "valid": true - } - ] - }, - { - "description": "float comparison with high precision", - "schema": { - "maximum": 972783798187987123879878123.18878137, - "exclusiveMaximum": true - }, - "tests": [ - { - "description": "comparison works for high numbers", - "data": 972783798187987123879878123.188781371, - "valid": false - } - ] - }, - { - "description": "integer comparison", - "schema": {"minimum": -18446744073709551615}, - "tests": [ - { - "description": "comparison works for very negative numbers", - "data": -18446744073709551600, - "valid": true - } - ] - }, - { - "description": "float comparison with high precision on negative numbers", - "schema": { - "minimum": -972783798187987123879878123.18878137, - "exclusiveMinimum": true - }, - "tests": [ - { - "description": "comparison works for very negative numbers", - "data": -972783798187987123879878123.188781371, - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/optional/ecmascript-regex.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/optional/ecmascript-regex.json deleted file mode 100644 index 08dc936..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/optional/ecmascript-regex.json +++ /dev/null @@ -1,13 +0,0 @@ -[ - { - "description": "ECMA 262 regex non-compliance", - "schema": { "format": "regex" }, - "tests": [ - { - "description": "ECMA 262 has no support for \\Z anchor from .NET", - "data": "^\\S(|(.|\\n)*\\S)\\Z", - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/optional/format.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/optional/format.json deleted file mode 100644 index 80373bd..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/optional/format.json +++ /dev/null @@ -1,148 +0,0 @@ -[ - { - "description": "validation of date-time strings", - "schema": {"format": "date-time"}, - "tests": [ - { - "description": "a valid date-time string", - "data": "1963-06-19T08:30:06.283185Z", - "valid": true - }, - { - "description": "an invalid date-time string", - "data": "06/19/1963 08:30:06 PST", - "valid": false - }, - { - "description": "only RFC3339 not all of ISO 8601 are valid", - "data": "2013-350T01:01:01", - "valid": false - } - ] - }, - { - "description": "validation of URIs", - "schema": {"format": "uri"}, - "tests": [ - { - "description": "a valid URI", - "data": "http://foo.bar/?baz=qux#quux", - "valid": true - }, - { - "description": "an invalid protocol-relative URI Reference", - "data": "//foo.bar/?baz=qux#quux", - "valid": false - }, - { - "description": "an invalid URI", - "data": "\\\\WINDOWS\\fileshare", - "valid": false - }, - { - "description": "an invalid URI though valid URI reference", - "data": "abc", - "valid": false - } - ] - }, - { - "description": "validation of e-mail addresses", - "schema": {"format": "email"}, - "tests": [ - { - "description": "a valid e-mail address", - "data": "joe.bloggs@example.com", - "valid": true - }, - { - "description": "an invalid e-mail address", - "data": "2962", - "valid": false - } - ] - }, - { - "description": "validation of IP addresses", - "schema": {"format": "ipv4"}, - "tests": [ - { - "description": "a valid IP address", - "data": "192.168.0.1", - "valid": true - }, - { - "description": "an IP address with too many components", - "data": "127.0.0.0.1", - "valid": false - }, - { - "description": "an IP address with out-of-range values", - "data": "256.256.256.256", - "valid": false - }, - { - "description": "an IP address without 4 components", - "data": "127.0", - "valid": false - }, - { - "description": "an IP address as an integer", - "data": "0x7f000001", - "valid": false - } - ] - }, - { - "description": "validation of IPv6 addresses", - "schema": {"format": "ipv6"}, - "tests": [ - { - "description": "a valid IPv6 address", - "data": "::1", - "valid": true - }, - { - "description": "an IPv6 address with out-of-range values", - "data": "12345::", - "valid": false - }, - { - "description": "an IPv6 address with too many components", - "data": "1:1:1:1:1:1:1:1:1:1:1:1:1:1:1:1", - "valid": false - }, - { - "description": "an IPv6 address containing illegal characters", - "data": "::laptop", - "valid": false - } - ] - }, - { - "description": "validation of host names", - "schema": {"format": "hostname"}, - "tests": [ - { - "description": "a valid host name", - "data": "www.example.com", - "valid": true - }, - { - "description": "a host name starting with an illegal character", - "data": "-a-host-name-that-starts-with--", - "valid": false - }, - { - "description": "a host name containing illegal characters", - "data": "not_a_valid_host_name", - "valid": false - }, - { - "description": "a host name with a component too long", - "data": "a-vvvvvvvvvvvvvvvveeeeeeeeeeeeeeeerrrrrrrrrrrrrrrryyyyyyyyyyyyyyyy-long-host-name-component", - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/optional/zeroTerminatedFloats.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/optional/zeroTerminatedFloats.json deleted file mode 100644 index 9b50ea2..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/optional/zeroTerminatedFloats.json +++ /dev/null @@ -1,15 +0,0 @@ -[ - { - "description": "some languages do not distinguish between different types of numeric value", - "schema": { - "type": "integer" - }, - "tests": [ - { - "description": "a float is not an integer even without fractional part", - "data": 1.0, - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/pattern.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/pattern.json deleted file mode 100644 index 25e7299..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/pattern.json +++ /dev/null @@ -1,34 +0,0 @@ -[ - { - "description": "pattern validation", - "schema": {"pattern": "^a*$"}, - "tests": [ - { - "description": "a matching pattern is valid", - "data": "aaa", - "valid": true - }, - { - "description": "a non-matching pattern is invalid", - "data": "abc", - "valid": false - }, - { - "description": "ignores non-strings", - "data": true, - "valid": true - } - ] - }, - { - "description": "pattern is not anchored", - "schema": {"pattern": "a+"}, - "tests": [ - { - "description": "matches a substring", - "data": "xxaayy", - "valid": true - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/patternProperties.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/patternProperties.json deleted file mode 100644 index 18586e5..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/patternProperties.json +++ /dev/null @@ -1,110 +0,0 @@ -[ - { - "description": - "patternProperties validates properties matching a regex", - "schema": { - "patternProperties": { - "f.*o": {"type": "integer"} - } - }, - "tests": [ - { - "description": "a single valid match is valid", - "data": {"foo": 1}, - "valid": true - }, - { - "description": "multiple valid matches is valid", - "data": {"foo": 1, "foooooo" : 2}, - "valid": true - }, - { - "description": "a single invalid match is invalid", - "data": {"foo": "bar", "fooooo": 2}, - "valid": false - }, - { - "description": "multiple invalid matches is invalid", - "data": {"foo": "bar", "foooooo" : "baz"}, - "valid": false - }, - { - "description": "ignores non-objects", - "data": 12, - "valid": true - } - ] - }, - { - "description": "multiple simultaneous patternProperties are validated", - "schema": { - "patternProperties": { - "a*": {"type": "integer"}, - "aaa*": {"maximum": 20} - } - }, - "tests": [ - { - "description": "a single valid match is valid", - "data": {"a": 21}, - "valid": true - }, - { - "description": "a simultaneous match is valid", - "data": {"aaaa": 18}, - "valid": true - }, - { - "description": "multiple matches is valid", - "data": {"a": 21, "aaaa": 18}, - "valid": true - }, - { - "description": "an invalid due to one is invalid", - "data": {"a": "bar"}, - "valid": false - }, - { - "description": "an invalid due to the other is invalid", - "data": {"aaaa": 31}, - "valid": false - }, - { - "description": "an invalid due to both is invalid", - "data": {"aaa": "foo", "aaaa": 31}, - "valid": false - } - ] - }, - { - "description": "regexes are not anchored by default and are case sensitive", - "schema": { - "patternProperties": { - "[0-9]{2,}": { "type": "boolean" }, - "X_": { "type": "string" } - } - }, - "tests": [ - { - "description": "non recognized members are ignored", - "data": { "answer 1": "42" }, - "valid": true - }, - { - "description": "recognized members are accounted for", - "data": { "a31b": null }, - "valid": false - }, - { - "description": "regexes are case sensitive", - "data": { "a_x_3": 3 }, - "valid": true - }, - { - "description": "regexes are case sensitive, 2", - "data": { "a_X_3": 3 }, - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/properties.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/properties.json deleted file mode 100644 index cd1644d..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/properties.json +++ /dev/null @@ -1,92 +0,0 @@ -[ - { - "description": "object properties validation", - "schema": { - "properties": { - "foo": {"type": "integer"}, - "bar": {"type": "string"} - } - }, - "tests": [ - { - "description": "both properties present and valid is valid", - "data": {"foo": 1, "bar": "baz"}, - "valid": true - }, - { - "description": "one property invalid is invalid", - "data": {"foo": 1, "bar": {}}, - "valid": false - }, - { - "description": "both properties invalid is invalid", - "data": {"foo": [], "bar": {}}, - "valid": false - }, - { - "description": "doesn't invalidate other properties", - "data": {"quux": []}, - "valid": true - }, - { - "description": "ignores non-objects", - "data": [], - "valid": true - } - ] - }, - { - "description": - "properties, patternProperties, additionalProperties interaction", - "schema": { - "properties": { - "foo": {"type": "array", "maxItems": 3}, - "bar": {"type": "array"} - }, - "patternProperties": {"f.o": {"minItems": 2}}, - "additionalProperties": {"type": "integer"} - }, - "tests": [ - { - "description": "property validates property", - "data": {"foo": [1, 2]}, - "valid": true - }, - { - "description": "property invalidates property", - "data": {"foo": [1, 2, 3, 4]}, - "valid": false - }, - { - "description": "patternProperty invalidates property", - "data": {"foo": []}, - "valid": false - }, - { - "description": "patternProperty validates nonproperty", - "data": {"fxo": [1, 2]}, - "valid": true - }, - { - "description": "patternProperty invalidates nonproperty", - "data": {"fxo": []}, - "valid": false - }, - { - "description": "additionalProperty ignores property", - "data": {"bar": []}, - "valid": true - }, - { - "description": "additionalProperty validates others", - "data": {"quux": 3}, - "valid": true - }, - { - "description": "additionalProperty invalidates others", - "data": {"quux": "foo"}, - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/ref.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/ref.json deleted file mode 100644 index 52cf50a..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/ref.json +++ /dev/null @@ -1,300 +0,0 @@ -[ - { - "description": "root pointer ref", - "schema": { - "properties": { - "foo": {"$ref": "#"} - }, - "additionalProperties": false - }, - "tests": [ - { - "description": "match", - "data": {"foo": false}, - "valid": true - }, - { - "description": "recursive match", - "data": {"foo": {"foo": false}}, - "valid": true - }, - { - "description": "mismatch", - "data": {"bar": false}, - "valid": false - }, - { - "description": "recursive mismatch", - "data": {"foo": {"bar": false}}, - "valid": false - } - ] - }, - { - "description": "relative pointer ref to object", - "schema": { - "properties": { - "foo": {"type": "integer"}, - "bar": {"$ref": "#/properties/foo"} - } - }, - "tests": [ - { - "description": "match", - "data": {"bar": 3}, - "valid": true - }, - { - "description": "mismatch", - "data": {"bar": true}, - "valid": false - } - ] - }, - { - "description": "relative pointer ref to array", - "schema": { - "items": [ - {"type": "integer"}, - {"$ref": "#/items/0"} - ] - }, - "tests": [ - { - "description": "match array", - "data": [1, 2], - "valid": true - }, - { - "description": "mismatch array", - "data": [1, "foo"], - "valid": false - } - ] - }, - { - "description": "escaped pointer ref", - "schema": { - "tilda~field": {"type": "integer"}, - "slash/field": {"type": "integer"}, - "percent%field": {"type": "integer"}, - "properties": { - "tilda": {"$ref": "#/tilda~0field"}, - "slash": {"$ref": "#/slash~1field"}, - "percent": {"$ref": "#/percent%25field"} - } - }, - "tests": [ - { - "description": "slash invalid", - "data": {"slash": "aoeu"}, - "valid": false - }, - { - "description": "tilda invalid", - "data": {"tilda": "aoeu"}, - "valid": false - }, - { - "description": "percent invalid", - "data": {"percent": "aoeu"}, - "valid": false - }, - { - "description": "slash valid", - "data": {"slash": 123}, - "valid": true - }, - { - "description": "tilda valid", - "data": {"tilda": 123}, - "valid": true - }, - { - "description": "percent valid", - "data": {"percent": 123}, - "valid": true - } - ] - }, - { - "description": "nested refs", - "schema": { - "definitions": { - "a": {"type": "integer"}, - "b": {"$ref": "#/definitions/a"}, - "c": {"$ref": "#/definitions/b"} - }, - "$ref": "#/definitions/c" - }, - "tests": [ - { - "description": "nested ref valid", - "data": 5, - "valid": true - }, - { - "description": "nested ref invalid", - "data": "a", - "valid": false - } - ] - }, - { - "description": "ref overrides any sibling keywords", - "schema": { - "definitions": { - "reffed": { - "type": "array" - } - }, - "properties": { - "foo": { - "$ref": "#/definitions/reffed", - "maxItems": 2 - } - } - }, - "tests": [ - { - "description": "ref valid", - "data": { "foo": [] }, - "valid": true - }, - { - "description": "ref valid, maxItems ignored", - "data": { "foo": [ 1, 2, 3] }, - "valid": true - }, - { - "description": "ref invalid", - "data": { "foo": "string" }, - "valid": false - } - ] - }, - { - "description": "remote ref, containing refs itself", - "schema": {"$ref": "http://json-schema.org/draft-04/schema#"}, - "tests": [ - { - "description": "remote ref valid", - "data": {"minLength": 1}, - "valid": true - }, - { - "description": "remote ref invalid", - "data": {"minLength": -1}, - "valid": false - } - ] - }, - { - "description": "property named $ref that is not a reference", - "schema": { - "properties": { - "$ref": {"type": "string"} - } - }, - "tests": [ - { - "description": "property named $ref valid", - "data": {"$ref": "a"}, - "valid": true - }, - { - "description": "property named $ref invalid", - "data": {"$ref": 2}, - "valid": false - } - ] - }, - { - "description": "Recursive references between schemas", - "schema": { - "id": "http://localhost:1234/tree", - "description": "tree of nodes", - "type": "object", - "properties": { - "meta": {"type": "string"}, - "nodes": { - "type": "array", - "items": {"$ref": "node"} - } - }, - "required": ["meta", "nodes"], - "definitions": { - "node": { - "id": "http://localhost:1234/node", - "description": "node", - "type": "object", - "properties": { - "value": {"type": "number"}, - "subtree": {"$ref": "tree"} - }, - "required": ["value"] - } - } - }, - "tests": [ - { - "description": "valid tree", - "data": { - "meta": "root", - "nodes": [ - { - "value": 1, - "subtree": { - "meta": "child", - "nodes": [ - {"value": 1.1}, - {"value": 1.2} - ] - } - }, - { - "value": 2, - "subtree": { - "meta": "child", - "nodes": [ - {"value": 2.1}, - {"value": 2.2} - ] - } - } - ] - }, - "valid": true - }, - { - "description": "invalid tree", - "data": { - "meta": "root", - "nodes": [ - { - "value": 1, - "subtree": { - "meta": "child", - "nodes": [ - {"value": "string is invalid"}, - {"value": 1.2} - ] - } - }, - { - "value": 2, - "subtree": { - "meta": "child", - "nodes": [ - {"value": 2.1}, - {"value": 2.2} - ] - } - } - ] - }, - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/refRemote.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/refRemote.json deleted file mode 100644 index 8611fad..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/refRemote.json +++ /dev/null @@ -1,171 +0,0 @@ -[ - { - "description": "remote ref", - "schema": {"$ref": "http://localhost:1234/integer.json"}, - "tests": [ - { - "description": "remote ref valid", - "data": 1, - "valid": true - }, - { - "description": "remote ref invalid", - "data": "a", - "valid": false - } - ] - }, - { - "description": "fragment within remote ref", - "schema": {"$ref": "http://localhost:1234/subSchemas.json#/integer"}, - "tests": [ - { - "description": "remote fragment valid", - "data": 1, - "valid": true - }, - { - "description": "remote fragment invalid", - "data": "a", - "valid": false - } - ] - }, - { - "description": "ref within remote ref", - "schema": { - "$ref": "http://localhost:1234/subSchemas.json#/refToInteger" - }, - "tests": [ - { - "description": "ref within ref valid", - "data": 1, - "valid": true - }, - { - "description": "ref within ref invalid", - "data": "a", - "valid": false - } - ] - }, - { - "description": "base URI change", - "schema": { - "id": "http://localhost:1234/", - "items": { - "id": "folder/", - "items": {"$ref": "folderInteger.json"} - } - }, - "tests": [ - { - "description": "base URI change ref valid", - "data": [[1]], - "valid": true - }, - { - "description": "base URI change ref invalid", - "data": [["a"]], - "valid": false - } - ] - }, - { - "description": "base URI change - change folder", - "schema": { - "id": "http://localhost:1234/scope_change_defs1.json", - "type" : "object", - "properties": { - "list": {"$ref": "#/definitions/baz"} - }, - "definitions": { - "baz": { - "id": "folder/", - "type": "array", - "items": {"$ref": "folderInteger.json"} - } - } - }, - "tests": [ - { - "description": "number is valid", - "data": {"list": [1]}, - "valid": true - }, - { - "description": "string is invalid", - "data": {"list": ["a"]}, - "valid": false - } - ] - }, - { - "description": "base URI change - change folder in subschema", - "schema": { - "id": "http://localhost:1234/scope_change_defs2.json", - "type" : "object", - "properties": { - "list": {"$ref": "#/definitions/baz/definitions/bar"} - }, - "definitions": { - "baz": { - "id": "folder/", - "definitions": { - "bar": { - "type": "array", - "items": {"$ref": "folderInteger.json"} - } - } - } - } - }, - "tests": [ - { - "description": "number is valid", - "data": {"list": [1]}, - "valid": true - }, - { - "description": "string is invalid", - "data": {"list": ["a"]}, - "valid": false - } - ] - }, - { - "description": "root ref in remote ref", - "schema": { - "id": "http://localhost:1234/object", - "type": "object", - "properties": { - "name": {"$ref": "name.json#/definitions/orNull"} - } - }, - "tests": [ - { - "description": "string is valid", - "data": { - "name": "foo" - }, - "valid": true - }, - { - "description": "null is valid", - "data": { - "name": null - }, - "valid": true - }, - { - "description": "object is invalid", - "data": { - "name": { - "name": null - } - }, - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/required.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/required.json deleted file mode 100644 index 576ef45..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/required.json +++ /dev/null @@ -1,44 +0,0 @@ -[ - { - "description": "required validation", - "schema": { - "properties": { - "foo": {}, - "bar": {} - }, - "required": ["foo"] - }, - "tests": [ - { - "description": "present required property is valid", - "data": {"foo": 1}, - "valid": true - }, - { - "description": "non-present required property is invalid", - "data": {"bar": 1}, - "valid": false - }, - { - "description": "ignores non-objects", - "data": 12, - "valid": true - } - ] - }, - { - "description": "required default validation", - "schema": { - "properties": { - "foo": {} - } - }, - "tests": [ - { - "description": "not required by default", - "data": {}, - "valid": true - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/santhosh.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/santhosh.json deleted file mode 100644 index f44a979..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/santhosh.json +++ /dev/null @@ -1,130 +0,0 @@ -[ - { - "description": "multipleOf small value", - "schema": {"multipleOf": 0.01}, - "tests": [ - { - "description": "value is valid", - "data": 19.99, - "valid": true - } - ] - }, - { - "description": "enum with single value", - "schema": {"enum": [1]}, - "tests": [ - { - "description": "value is invalid", - "data": 5, - "valid": false - } - ] - }, - { - "description": "enum with array value", - "schema": {"enum": [[ "one", "two", "three"]]}, - "tests": [ - { - "description": "array size does not match", - "data": [ "one", "two" ], - "valid": false - } - ] - }, - { - "description": "enum with object value", - "schema": {"enum": [{ "one": 1, "two": 2, "three": 3}]}, - "tests": [ - { - "description": "object size does not match", - "data": { "one": 1 }, - "valid": false - } - ] - }, - { - "description": "enum with object having null property", - "schema": {"enum": [{ "foo": null }]}, - "tests": [ - { - "description": "valid", - "data": { "foo": null }, - "valid": true - }, - { - "description": "no props is invalid", - "data": { }, - "valid": false - }, - { - "description": "missing null prop is invalid", - "data": { "bar": null }, - "valid": false - } - ] - }, - { - "description": "enum with empty object", - "schema": {"enum": [{}]}, - "tests": [ - { - "description": "valid", - "data": {}, - "valid": true - }, - { - "description": "null prop is invalid", - "data": { "foo": null }, - "valid": false - } - ] - }, - { - "description": "$ref to $id", - "schema": { - "$schema": "http://json-schema.org/draft-04/schema#", - "type": "object", - "definitions": { - "name": { - "id": "#nm", - "type": "string" - }, - "email" : { - "id": "email.json", - "type": "string" - } - }, - "properties": { - "name": { "$ref": "#nm" }, - "email": { "$ref": "email.json"} - } - }, - "tests": [ - { - "description": "valid", - "data": { - "name": "Santhosh Kumar Tekuri", - "email": "santhosh.tekuri@gmail.com" - }, - "valid": true - }, - { - "description": "invalid1", - "data": { - "name": 0, - "email": "santhosh.tekuri@gmail.com" - }, - "valid": false - }, - { - "description": "valid", - "data": { - "name": "Santhosh Kumar Tekuri", - "email": 0 - }, - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/type.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/type.json deleted file mode 100644 index 6129374..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/type.json +++ /dev/null @@ -1,345 +0,0 @@ -[ - { - "description": "integer type matches integers", - "schema": {"type": "integer"}, - "tests": [ - { - "description": "an integer is an integer", - "data": 1, - "valid": true - }, - { - "description": "a float is not an integer", - "data": 1.1, - "valid": false - }, - { - "description": "a string is not an integer", - "data": "foo", - "valid": false - }, - { - "description": "a string is still not an integer, even if it looks like one", - "data": "1", - "valid": false - }, - { - "description": "an object is not an integer", - "data": {}, - "valid": false - }, - { - "description": "an array is not an integer", - "data": [], - "valid": false - }, - { - "description": "a boolean is not an integer", - "data": true, - "valid": false - }, - { - "description": "null is not an integer", - "data": null, - "valid": false - } - ] - }, - { - "description": "number type matches numbers", - "schema": {"type": "number"}, - "tests": [ - { - "description": "an integer is a number", - "data": 1, - "valid": true - }, - { - "description": "a float is a number", - "data": 1.1, - "valid": true - }, - { - "description": "a string is not a number", - "data": "foo", - "valid": false - }, - { - "description": "a string is still not a number, even if it looks like one", - "data": "1", - "valid": false - }, - { - "description": "an object is not a number", - "data": {}, - "valid": false - }, - { - "description": "an array is not a number", - "data": [], - "valid": false - }, - { - "description": "a boolean is not a number", - "data": true, - "valid": false - }, - { - "description": "null is not a number", - "data": null, - "valid": false - } - ] - }, - { - "description": "string type matches strings", - "schema": {"type": "string"}, - "tests": [ - { - "description": "1 is not a string", - "data": 1, - "valid": false - }, - { - "description": "a float is not a string", - "data": 1.1, - "valid": false - }, - { - "description": "a string is a string", - "data": "foo", - "valid": true - }, - { - "description": "a string is still a string, even if it looks like a number", - "data": "1", - "valid": true - }, - { - "description": "an object is not a string", - "data": {}, - "valid": false - }, - { - "description": "an array is not a string", - "data": [], - "valid": false - }, - { - "description": "a boolean is not a string", - "data": true, - "valid": false - }, - { - "description": "null is not a string", - "data": null, - "valid": false - } - ] - }, - { - "description": "object type matches objects", - "schema": {"type": "object"}, - "tests": [ - { - "description": "an integer is not an object", - "data": 1, - "valid": false - }, - { - "description": "a float is not an object", - "data": 1.1, - "valid": false - }, - { - "description": "a string is not an object", - "data": "foo", - "valid": false - }, - { - "description": "an object is an object", - "data": {}, - "valid": true - }, - { - "description": "an array is not an object", - "data": [], - "valid": false - }, - { - "description": "a boolean is not an object", - "data": true, - "valid": false - }, - { - "description": "null is not an object", - "data": null, - "valid": false - } - ] - }, - { - "description": "array type matches arrays", - "schema": {"type": "array"}, - "tests": [ - { - "description": "an integer is not an array", - "data": 1, - "valid": false - }, - { - "description": "a float is not an array", - "data": 1.1, - "valid": false - }, - { - "description": "a string is not an array", - "data": "foo", - "valid": false - }, - { - "description": "an object is not an array", - "data": {}, - "valid": false - }, - { - "description": "an array is an array", - "data": [], - "valid": true - }, - { - "description": "a boolean is not an array", - "data": true, - "valid": false - }, - { - "description": "null is not an array", - "data": null, - "valid": false - } - ] - }, - { - "description": "boolean type matches booleans", - "schema": {"type": "boolean"}, - "tests": [ - { - "description": "an integer is not a boolean", - "data": 1, - "valid": false - }, - { - "description": "a float is not a boolean", - "data": 1.1, - "valid": false - }, - { - "description": "a string is not a boolean", - "data": "foo", - "valid": false - }, - { - "description": "an object is not a boolean", - "data": {}, - "valid": false - }, - { - "description": "an array is not a boolean", - "data": [], - "valid": false - }, - { - "description": "a boolean is a boolean", - "data": true, - "valid": true - }, - { - "description": "null is not a boolean", - "data": null, - "valid": false - } - ] - }, - { - "description": "null type matches only the null object", - "schema": {"type": "null"}, - "tests": [ - { - "description": "an integer is not null", - "data": 1, - "valid": false - }, - { - "description": "a float is not null", - "data": 1.1, - "valid": false - }, - { - "description": "a string is not null", - "data": "foo", - "valid": false - }, - { - "description": "an object is not null", - "data": {}, - "valid": false - }, - { - "description": "an array is not null", - "data": [], - "valid": false - }, - { - "description": "a boolean is not null", - "data": true, - "valid": false - }, - { - "description": "null is null", - "data": null, - "valid": true - } - ] - }, - { - "description": "multiple types can be specified in an array", - "schema": {"type": ["integer", "string"]}, - "tests": [ - { - "description": "an integer is valid", - "data": 1, - "valid": true - }, - { - "description": "a string is valid", - "data": "foo", - "valid": true - }, - { - "description": "a float is invalid", - "data": 1.1, - "valid": false - }, - { - "description": "an object is invalid", - "data": {}, - "valid": false - }, - { - "description": "an array is invalid", - "data": [], - "valid": false - }, - { - "description": "a boolean is invalid", - "data": true, - "valid": false - }, - { - "description": "null is invalid", - "data": null, - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/uniqueItems.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/uniqueItems.json deleted file mode 100644 index c1f4ab9..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft4/uniqueItems.json +++ /dev/null @@ -1,79 +0,0 @@ -[ - { - "description": "uniqueItems validation", - "schema": {"uniqueItems": true}, - "tests": [ - { - "description": "unique array of integers is valid", - "data": [1, 2], - "valid": true - }, - { - "description": "non-unique array of integers is invalid", - "data": [1, 1], - "valid": false - }, - { - "description": "numbers are unique if mathematically unequal", - "data": [1.0, 1.00, 1], - "valid": false - }, - { - "description": "unique array of objects is valid", - "data": [{"foo": "bar"}, {"foo": "baz"}], - "valid": true - }, - { - "description": "non-unique array of objects is invalid", - "data": [{"foo": "bar"}, {"foo": "bar"}], - "valid": false - }, - { - "description": "unique array of nested objects is valid", - "data": [ - {"foo": {"bar" : {"baz" : true}}}, - {"foo": {"bar" : {"baz" : false}}} - ], - "valid": true - }, - { - "description": "non-unique array of nested objects is invalid", - "data": [ - {"foo": {"bar" : {"baz" : true}}}, - {"foo": {"bar" : {"baz" : true}}} - ], - "valid": false - }, - { - "description": "unique array of arrays is valid", - "data": [["foo"], ["bar"]], - "valid": true - }, - { - "description": "non-unique array of arrays is invalid", - "data": [["foo"], ["foo"]], - "valid": false - }, - { - "description": "1 and true are unique", - "data": [1, true], - "valid": true - }, - { - "description": "0 and false are unique", - "data": [0, false], - "valid": true - }, - { - "description": "unique heterogeneous types are valid", - "data": [{}, [1], true, null, 1], - "valid": true - }, - { - "description": "non-unique heterogeneous types are invalid", - "data": [{}, [1], true, null, {}, 1], - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6.json deleted file mode 100644 index 5e22683..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6.json +++ /dev/null @@ -1,213 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-06/schema#", - "$id": "http://json-schema.org/draft-06/schema#", - "title": "Core schema meta-schema", - "definitions": { - "schemaArray": { - "type": "array", - "minItems": 1, - "items": { - "$ref": "#" - } - }, - "nonNegativeInteger": { - "type": "integer", - "minimum": 0 - }, - "nonNegativeIntegerDefault0": { - "allOf": [ - { - "$ref": "#/definitions/nonNegativeInteger" - }, - { - "default": 0 - } - ] - }, - "simpleTypes": { - "enum": [ - "array", - "boolean", - "integer", - "null", - "number", - "object", - "string" - ] - }, - "stringArray": { - "type": "array", - "items": { - "type": "string" - }, - "uniqueItems": true, - "default": [] - } - }, - "type": [ - "object", - "boolean" - ], - "properties": { - "$id": { - "type": "string", - "format": "uri-reference" - }, - "$schema": { - "type": "string", - "format": "uri" - }, - "$ref": { - "type": "string", - "format": "uri-reference" - }, - "title": { - "type": "string" - }, - "description": { - "type": "string" - }, - "default": {}, - "multipleOf": { - "type": "number", - "exclusiveMinimum": 0 - }, - "maximum": { - "type": "number" - }, - "exclusiveMaximum": { - "type": "number" - }, - "minimum": { - "type": "number" - }, - "exclusiveMinimum": { - "type": "number" - }, - "maxLength": { - "$ref": "#/definitions/nonNegativeInteger" - }, - "minLength": { - "$ref": "#/definitions/nonNegativeIntegerDefault0" - }, - "pattern": { - "type": "string", - "format": "regex" - }, - "additionalItems": { - "$ref": "#" - }, - "items": { - "anyOf": [ - { - "$ref": "#" - }, - { - "$ref": "#/definitions/schemaArray" - } - ], - "default": {} - }, - "maxItems": { - "$ref": "#/definitions/nonNegativeInteger" - }, - "minItems": { - "$ref": "#/definitions/nonNegativeIntegerDefault0" - }, - "uniqueItems": { - "type": "boolean", - "default": false - }, - "contains": { - "$ref": "#" - }, - "maxProperties": { - "$ref": "#/definitions/nonNegativeInteger" - }, - "minProperties": { - "$ref": "#/definitions/nonNegativeIntegerDefault0" - }, - "required": { - "$ref": "#/definitions/stringArray" - }, - "additionalProperties": { - "$ref": "#" - }, - "definitions": { - "type": "object", - "additionalProperties": { - "$ref": "#" - }, - "default": {} - }, - "properties": { - "type": "object", - "additionalProperties": { - "$ref": "#" - }, - "default": {} - }, - "patternProperties": { - "type": "object", - "regexProperties": true, - "additionalProperties": { - "$ref": "#" - }, - "default": {} - }, - "dependencies": { - "type": "object", - "additionalProperties": { - "anyOf": [ - { - "$ref": "#" - }, - { - "$ref": "#/definitions/stringArray" - } - ] - } - }, - "propertyNames": { - "$ref": "#" - }, - "const": {}, - "enum": { - "type": "array", - "minItems": 1, - "uniqueItems": true - }, - "type": { - "anyOf": [ - { - "$ref": "#/definitions/simpleTypes" - }, - { - "type": "array", - "items": { - "$ref": "#/definitions/simpleTypes" - }, - "minItems": 1, - "uniqueItems": true - } - ] - }, - "format": { - "type": "string", - "format": "format" - }, - "allOf": { - "$ref": "#/definitions/schemaArray" - }, - "anyOf": { - "$ref": "#/definitions/schemaArray" - }, - "oneOf": { - "$ref": "#/definitions/schemaArray" - }, - "not": { - "$ref": "#" - } - }, - "default": {} -} \ No newline at end of file diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/additionalItems.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/additionalItems.json deleted file mode 100644 index abecc57..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/additionalItems.json +++ /dev/null @@ -1,87 +0,0 @@ -[ - { - "description": "additionalItems as schema", - "schema": { - "items": [{}], - "additionalItems": {"type": "integer"} - }, - "tests": [ - { - "description": "additional items match schema", - "data": [ null, 2, 3, 4 ], - "valid": true - }, - { - "description": "additional items do not match schema", - "data": [ null, 2, 3, "foo" ], - "valid": false - } - ] - }, - { - "description": "items is schema, no additionalItems", - "schema": { - "items": {}, - "additionalItems": false - }, - "tests": [ - { - "description": "all items match schema", - "data": [ 1, 2, 3, 4, 5 ], - "valid": true - } - ] - }, - { - "description": "array of items with no additionalItems", - "schema": { - "items": [{}, {}, {}], - "additionalItems": false - }, - "tests": [ - { - "description": "fewer number of items present", - "data": [ 1, 2 ], - "valid": true - }, - { - "description": "equal number of items present", - "data": [ 1, 2, 3 ], - "valid": true - }, - { - "description": "additional items are not permitted", - "data": [ 1, 2, 3, 4 ], - "valid": false - } - ] - }, - { - "description": "additionalItems as false without items", - "schema": {"additionalItems": false}, - "tests": [ - { - "description": - "items defaults to empty schema so everything is valid", - "data": [ 1, 2, 3, 4, 5 ], - "valid": true - }, - { - "description": "ignores non-arrays", - "data": {"foo" : "bar"}, - "valid": true - } - ] - }, - { - "description": "additionalItems are allowed by default", - "schema": {"items": [{"type": "integer"}]}, - "tests": [ - { - "description": "only the first item is validated", - "data": [1, "foo", false], - "valid": true - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/additionalProperties.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/additionalProperties.json deleted file mode 100644 index 40831f9..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/additionalProperties.json +++ /dev/null @@ -1,88 +0,0 @@ -[ - { - "description": - "additionalProperties being false does not allow other properties", - "schema": { - "properties": {"foo": {}, "bar": {}}, - "patternProperties": { "^v": {} }, - "additionalProperties": false - }, - "tests": [ - { - "description": "no additional properties is valid", - "data": {"foo": 1}, - "valid": true - }, - { - "description": "an additional property is invalid", - "data": {"foo" : 1, "bar" : 2, "quux" : "boom"}, - "valid": false - }, - { - "description": "ignores non-objects", - "data": [1, 2, 3], - "valid": true - }, - { - "description": "patternProperties are not additional properties", - "data": {"foo":1, "vroom": 2}, - "valid": true - } - ] - }, - { - "description": - "additionalProperties allows a schema which should validate", - "schema": { - "properties": {"foo": {}, "bar": {}}, - "additionalProperties": {"type": "boolean"} - }, - "tests": [ - { - "description": "no additional properties is valid", - "data": {"foo": 1}, - "valid": true - }, - { - "description": "an additional valid property is valid", - "data": {"foo" : 1, "bar" : 2, "quux" : true}, - "valid": true - }, - { - "description": "an additional invalid property is invalid", - "data": {"foo" : 1, "bar" : 2, "quux" : 12}, - "valid": false - } - ] - }, - { - "description": - "additionalProperties can exist by itself", - "schema": { - "additionalProperties": {"type": "boolean"} - }, - "tests": [ - { - "description": "an additional valid property is valid", - "data": {"foo" : true}, - "valid": true - }, - { - "description": "an additional invalid property is invalid", - "data": {"foo" : 1}, - "valid": false - } - ] - }, - { - "description": "additionalProperties are allowed by default", - "schema": {"properties": {"foo": {}, "bar": {}}}, - "tests": [ - { - "description": "additional properties are allowed", - "data": {"foo": 1, "bar": 2, "quux": true}, - "valid": true - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/allOf.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/allOf.json deleted file mode 100644 index 00c016c..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/allOf.json +++ /dev/null @@ -1,145 +0,0 @@ -[ - { - "description": "allOf", - "schema": { - "allOf": [ - { - "properties": { - "bar": {"type": "integer"} - }, - "required": ["bar"] - }, - { - "properties": { - "foo": {"type": "string"} - }, - "required": ["foo"] - } - ] - }, - "tests": [ - { - "description": "allOf", - "data": {"foo": "baz", "bar": 2}, - "valid": true - }, - { - "description": "mismatch second", - "data": {"foo": "baz"}, - "valid": false - }, - { - "description": "mismatch first", - "data": {"bar": 2}, - "valid": false - }, - { - "description": "wrong type", - "data": {"foo": "baz", "bar": "quux"}, - "valid": false - } - ] - }, - { - "description": "allOf with base schema", - "schema": { - "properties": {"bar": {"type": "integer"}}, - "required": ["bar"], - "allOf" : [ - { - "properties": { - "foo": {"type": "string"} - }, - "required": ["foo"] - }, - { - "properties": { - "baz": {"type": "null"} - }, - "required": ["baz"] - } - ] - }, - "tests": [ - { - "description": "valid", - "data": {"foo": "quux", "bar": 2, "baz": null}, - "valid": true - }, - { - "description": "mismatch base schema", - "data": {"foo": "quux", "baz": null}, - "valid": false - }, - { - "description": "mismatch first allOf", - "data": {"bar": 2, "baz": null}, - "valid": false - }, - { - "description": "mismatch second allOf", - "data": {"foo": "quux", "bar": 2}, - "valid": false - }, - { - "description": "mismatch both", - "data": {"bar": 2}, - "valid": false - } - ] - }, - { - "description": "allOf simple types", - "schema": { - "allOf": [ - {"maximum": 30}, - {"minimum": 20} - ] - }, - "tests": [ - { - "description": "valid", - "data": 25, - "valid": true - }, - { - "description": "mismatch one", - "data": 35, - "valid": false - } - ] - }, - { - "description": "allOf with boolean schemas, all true", - "schema": {"allOf": [true, true]}, - "tests": [ - { - "description": "any value is valid", - "data": "foo", - "valid": true - } - ] - }, - { - "description": "allOf with boolean schemas, some false", - "schema": {"allOf": [true, false]}, - "tests": [ - { - "description": "any value is invalid", - "data": "foo", - "valid": false - } - ] - }, - { - "description": "allOf with boolean schemas, all false", - "schema": {"allOf": [false, false]}, - "tests": [ - { - "description": "any value is invalid", - "data": "foo", - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/anyOf.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/anyOf.json deleted file mode 100644 index 1ea31ed..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/anyOf.json +++ /dev/null @@ -1,101 +0,0 @@ -[ - { - "description": "anyOf", - "schema": { - "anyOf": [ - { - "type": "integer" - }, - { - "minimum": 2 - } - ] - }, - "tests": [ - { - "description": "first anyOf valid", - "data": 1, - "valid": true - }, - { - "description": "second anyOf valid", - "data": 2.5, - "valid": true - }, - { - "description": "both anyOf valid", - "data": 3, - "valid": true - }, - { - "description": "neither anyOf valid", - "data": 1.5, - "valid": false - } - ] - }, - { - "description": "anyOf with base schema", - "schema": { - "type": "string", - "anyOf" : [ - { - "maxLength": 2 - }, - { - "minLength": 4 - } - ] - }, - "tests": [ - { - "description": "mismatch base schema", - "data": 3, - "valid": false - }, - { - "description": "one anyOf valid", - "data": "foobar", - "valid": true - }, - { - "description": "both anyOf invalid", - "data": "foo", - "valid": false - } - ] - }, - { - "description": "anyOf with boolean schemas, all true", - "schema": {"anyOf": [true, true]}, - "tests": [ - { - "description": "any value is valid", - "data": "foo", - "valid": true - } - ] - }, - { - "description": "anyOf with boolean schemas, some true", - "schema": {"anyOf": [true, false]}, - "tests": [ - { - "description": "any value is valid", - "data": "foo", - "valid": true - } - ] - }, - { - "description": "anyOf with boolean schemas, all false", - "schema": {"anyOf": [false, false]}, - "tests": [ - { - "description": "any value is invalid", - "data": "foo", - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/boolean_schema.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/boolean_schema.json deleted file mode 100644 index 6d40f23..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/boolean_schema.json +++ /dev/null @@ -1,104 +0,0 @@ -[ - { - "description": "boolean schema 'true'", - "schema": true, - "tests": [ - { - "description": "number is valid", - "data": 1, - "valid": true - }, - { - "description": "string is valid", - "data": "foo", - "valid": true - }, - { - "description": "boolean true is valid", - "data": true, - "valid": true - }, - { - "description": "boolean false is valid", - "data": false, - "valid": true - }, - { - "description": "null is valid", - "data": null, - "valid": true - }, - { - "description": "object is valid", - "data": {"foo": "bar"}, - "valid": true - }, - { - "description": "empty object is valid", - "data": {}, - "valid": true - }, - { - "description": "array is valid", - "data": ["foo"], - "valid": true - }, - { - "description": "empty array is valid", - "data": [], - "valid": true - } - ] - }, - { - "description": "boolean schema 'false'", - "schema": false, - "tests": [ - { - "description": "number is invalid", - "data": 1, - "valid": false - }, - { - "description": "string is invalid", - "data": "foo", - "valid": false - }, - { - "description": "boolean true is invalid", - "data": true, - "valid": false - }, - { - "description": "boolean false is invalid", - "data": false, - "valid": false - }, - { - "description": "null is invalid", - "data": null, - "valid": false - }, - { - "description": "object is invalid", - "data": {"foo": "bar"}, - "valid": false - }, - { - "description": "empty object is invalid", - "data": {}, - "valid": false - }, - { - "description": "array is invalid", - "data": ["foo"], - "valid": false - }, - { - "description": "empty array is invalid", - "data": [], - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/const.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/const.json deleted file mode 100644 index 0e533e0..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/const.json +++ /dev/null @@ -1,65 +0,0 @@ -[ - { - "description": "const validation", - "schema": {"const": 2}, - "tests": [ - { - "description": "same value is valid", - "data": 2, - "valid": true - }, - { - "description": "another value is invalid", - "data": 5, - "valid": false - }, - { - "description": "another type is invalid", - "data": "a", - "valid": false - } - ] - }, - { - "description": "const with object", - "schema": {"const": {"foo": "bar", "baz": "bax"}}, - "tests": [ - { - "description": "same object is valid", - "data": {"foo": "bar", "baz": "bax"}, - "valid": true - }, - { - "description": "same object with different property order is valid", - "data": {"baz": "bax", "foo": "bar"}, - "valid": true - }, - { - "description": "another object is invalid", - "data": {"foo": "bar"}, - "valid": false - }, - { - "description": "another type is invalid", - "data": [1, 2], - "valid": false - } - ] - }, - { - "description": "const with null", - "schema": {"const": null}, - "tests": [ - { - "description": "null is valid", - "data": null, - "valid": true - }, - { - "description": "not null is invalid", - "data": 0, - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/contains.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/contains.json deleted file mode 100644 index b7ae5a2..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/contains.json +++ /dev/null @@ -1,95 +0,0 @@ -[ - { - "description": "contains keyword validation", - "schema": { - "contains": {"minimum": 5} - }, - "tests": [ - { - "description": "array with item matching schema (5) is valid", - "data": [3, 4, 5], - "valid": true - }, - { - "description": "array with item matching schema (6) is valid", - "data": [3, 4, 6], - "valid": true - }, - { - "description": "array with two items matching schema (5, 6) is valid", - "data": [3, 4, 5, 6], - "valid": true - }, - { - "description": "array without items matching schema is invalid", - "data": [2, 3, 4], - "valid": false - }, - { - "description": "empty array is invalid", - "data": [], - "valid": false - }, - { - "description": "not array is valid", - "data": {}, - "valid": true - } - ] - }, - { - "description": "contains keyword with const keyword", - "schema": { - "contains": { "const": 5 } - }, - "tests": [ - { - "description": "array with item 5 is valid", - "data": [3, 4, 5], - "valid": true - }, - { - "description": "array with two items 5 is valid", - "data": [3, 4, 5, 5], - "valid": true - }, - { - "description": "array without item 5 is invalid", - "data": [1, 2, 3, 4], - "valid": false - } - ] - }, - { - "description": "contains keyword with boolean schema true", - "schema": {"contains": true}, - "tests": [ - { - "description": "any non-empty array is valid", - "data": ["foo"], - "valid": true - }, - { - "description": "empty array is invalid", - "data": [], - "valid": false - } - ] - }, - { - "description": "contains keyword with boolean schema false", - "schema": {"contains": false}, - "tests": [ - { - "description": "any non-empty array is invalid", - "data": ["foo"], - "valid": false - }, - { - "description": "empty array is invalid", - "data": [], - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/default.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/default.json deleted file mode 100644 index 1762977..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/default.json +++ /dev/null @@ -1,49 +0,0 @@ -[ - { - "description": "invalid type for default", - "schema": { - "properties": { - "foo": { - "type": "integer", - "default": [] - } - } - }, - "tests": [ - { - "description": "valid when property is specified", - "data": {"foo": 13}, - "valid": true - }, - { - "description": "still valid when the invalid default is used", - "data": {}, - "valid": true - } - ] - }, - { - "description": "invalid string value for default", - "schema": { - "properties": { - "bar": { - "type": "string", - "minLength": 4, - "default": "bad" - } - } - }, - "tests": [ - { - "description": "valid when property is specified", - "data": {"bar": "good"}, - "valid": true - }, - { - "description": "still valid when the invalid default is used", - "data": {}, - "valid": true - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/definitions.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/definitions.json deleted file mode 100644 index cf935a3..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/definitions.json +++ /dev/null @@ -1,32 +0,0 @@ -[ - { - "description": "valid definition", - "schema": {"$ref": "http://json-schema.org/draft-04/schema#"}, - "tests": [ - { - "description": "valid definition schema", - "data": { - "definitions": { - "foo": {"type": "integer"} - } - }, - "valid": true - } - ] - }, - { - "description": "invalid definition", - "schema": {"$ref": "http://json-schema.org/draft-04/schema#"}, - "tests": [ - { - "description": "invalid definition schema", - "data": { - "definitions": { - "foo": {"type": 1} - } - }, - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/dependencies.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/dependencies.json deleted file mode 100644 index a9b3334..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/dependencies.json +++ /dev/null @@ -1,162 +0,0 @@ -[ - { - "description": "dependencies", - "schema": { - "dependencies": {"bar": ["foo"]} - }, - "tests": [ - { - "description": "neither", - "data": {}, - "valid": true - }, - { - "description": "nondependant", - "data": {"foo": 1}, - "valid": true - }, - { - "description": "with dependency", - "data": {"foo": 1, "bar": 2}, - "valid": true - }, - { - "description": "missing dependency", - "data": {"bar": 2}, - "valid": false - }, - { - "description": "ignores non-objects", - "data": "foo", - "valid": true - } - ] - }, - { - "description": "dependencies with empty array", - "schema": { - "dependencies": {"bar": []} - }, - "tests": [ - { - "description": "empty object", - "data": {}, - "valid": true - }, - { - "description": "object with one property", - "data": {"bar": 2}, - "valid": true - } - ] - }, - { - "description": "multiple dependencies", - "schema": { - "dependencies": {"quux": ["foo", "bar"]} - }, - "tests": [ - { - "description": "neither", - "data": {}, - "valid": true - }, - { - "description": "nondependants", - "data": {"foo": 1, "bar": 2}, - "valid": true - }, - { - "description": "with dependencies", - "data": {"foo": 1, "bar": 2, "quux": 3}, - "valid": true - }, - { - "description": "missing dependency", - "data": {"foo": 1, "quux": 2}, - "valid": false - }, - { - "description": "missing other dependency", - "data": {"bar": 1, "quux": 2}, - "valid": false - }, - { - "description": "missing both dependencies", - "data": {"quux": 1}, - "valid": false - } - ] - }, - { - "description": "multiple dependencies subschema", - "schema": { - "dependencies": { - "bar": { - "properties": { - "foo": {"type": "integer"}, - "bar": {"type": "integer"} - } - } - } - }, - "tests": [ - { - "description": "valid", - "data": {"foo": 1, "bar": 2}, - "valid": true - }, - { - "description": "no dependency", - "data": {"foo": "quux"}, - "valid": true - }, - { - "description": "wrong type", - "data": {"foo": "quux", "bar": 2}, - "valid": false - }, - { - "description": "wrong type other", - "data": {"foo": 2, "bar": "quux"}, - "valid": false - }, - { - "description": "wrong type both", - "data": {"foo": "quux", "bar": "quux"}, - "valid": false - } - ] - }, - { - "description": "dependencies with boolean subschemas", - "schema": { - "dependencies": { - "foo": true, - "bar": false - } - }, - "tests": [ - { - "description": "object with property having schema true is valid", - "data": {"foo": 1}, - "valid": true - }, - { - "description": "object with property having schema false is invalid", - "data": {"bar": 2}, - "valid": false - }, - { - "description": "object with both properties is invalid", - "data": {"foo": 1, "bar": 2}, - "valid": false - }, - { - "description": "empty object is valid", - "data": {}, - "valid": true - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/enum.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/enum.json deleted file mode 100644 index f124436..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/enum.json +++ /dev/null @@ -1,72 +0,0 @@ -[ - { - "description": "simple enum validation", - "schema": {"enum": [1, 2, 3]}, - "tests": [ - { - "description": "one of the enum is valid", - "data": 1, - "valid": true - }, - { - "description": "something else is invalid", - "data": 4, - "valid": false - } - ] - }, - { - "description": "heterogeneous enum validation", - "schema": {"enum": [6, "foo", [], true, {"foo": 12}]}, - "tests": [ - { - "description": "one of the enum is valid", - "data": [], - "valid": true - }, - { - "description": "something else is invalid", - "data": null, - "valid": false - }, - { - "description": "objects are deep compared", - "data": {"foo": false}, - "valid": false - } - ] - }, - { - "description": "enums in properties", - "schema": { - "type":"object", - "properties": { - "foo": {"enum":["foo"]}, - "bar": {"enum":["bar"]} - }, - "required": ["bar"] - }, - "tests": [ - { - "description": "both properties are valid", - "data": {"foo":"foo", "bar":"bar"}, - "valid": true - }, - { - "description": "missing optional property is valid", - "data": {"bar":"bar"}, - "valid": true - }, - { - "description": "missing required property is invalid", - "data": {"foo":"foo"}, - "valid": false - }, - { - "description": "missing all properties is invalid", - "data": {}, - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/exclusiveMaximum.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/exclusiveMaximum.json deleted file mode 100644 index dc3cd70..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/exclusiveMaximum.json +++ /dev/null @@ -1,30 +0,0 @@ -[ - { - "description": "exclusiveMaximum validation", - "schema": { - "exclusiveMaximum": 3.0 - }, - "tests": [ - { - "description": "below the exclusiveMaximum is valid", - "data": 2.2, - "valid": true - }, - { - "description": "boundary point is invalid", - "data": 3.0, - "valid": false - }, - { - "description": "above the exclusiveMaximum is invalid", - "data": 3.5, - "valid": false - }, - { - "description": "ignores non-numbers", - "data": "x", - "valid": true - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/exclusiveMinimum.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/exclusiveMinimum.json deleted file mode 100644 index b38d7ec..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/exclusiveMinimum.json +++ /dev/null @@ -1,30 +0,0 @@ -[ - { - "description": "exclusiveMinimum validation", - "schema": { - "exclusiveMinimum": 1.1 - }, - "tests": [ - { - "description": "above the exclusiveMinimum is valid", - "data": 1.2, - "valid": true - }, - { - "description": "boundary point is invalid", - "data": 1.1, - "valid": false - }, - { - "description": "below the exclusiveMinimum is invalid", - "data": 0.6, - "valid": false - }, - { - "description": "ignores non-numbers", - "data": "x", - "valid": true - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/items.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/items.json deleted file mode 100644 index 13a6a11..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/items.json +++ /dev/null @@ -1,133 +0,0 @@ -[ - { - "description": "a schema given for items", - "schema": { - "items": {"type": "integer"} - }, - "tests": [ - { - "description": "valid items", - "data": [ 1, 2, 3 ], - "valid": true - }, - { - "description": "wrong type of items", - "data": [1, "x"], - "valid": false - }, - { - "description": "ignores non-arrays", - "data": {"foo" : "bar"}, - "valid": true - }, - { - "description": "JavaScript pseudo-array is valid", - "data": { - "0": "invalid", - "length": 1 - }, - "valid": true - } - ] - }, - { - "description": "an array of schemas for items", - "schema": { - "items": [ - {"type": "integer"}, - {"type": "string"} - ] - }, - "tests": [ - { - "description": "correct types", - "data": [ 1, "foo" ], - "valid": true - }, - { - "description": "wrong types", - "data": [ "foo", 1 ], - "valid": false - }, - { - "description": "incomplete array of items", - "data": [ 1 ], - "valid": true - }, - { - "description": "array with additional items", - "data": [ 1, "foo", true ], - "valid": true - }, - { - "description": "empty array", - "data": [ ], - "valid": true - }, - { - "description": "JavaScript pseudo-array is valid", - "data": { - "0": "invalid", - "1": "valid", - "length": 2 - }, - "valid": true - } - ] - }, - { - "description": "items with boolean schema (true)", - "schema": {"items": true}, - "tests": [ - { - "description": "any array is valid", - "data": [ 1, "foo", true ], - "valid": true - }, - { - "description": "empty array is valid", - "data": [], - "valid": true - } - ] - }, - { - "description": "items with boolean schema (false)", - "schema": {"items": false}, - "tests": [ - { - "description": "any non-empty array is invalid", - "data": [ 1, "foo", true ], - "valid": false - }, - { - "description": "empty array is valid", - "data": [], - "valid": true - } - ] - }, - { - "description": "items with boolean schemas", - "schema": { - "items": [true, false] - }, - "tests": [ - { - "description": "array with one item is valid", - "data": [ 1 ], - "valid": true - }, - { - "description": "array with two items is invalid", - "data": [ 1, "foo" ], - "valid": false - }, - { - "description": "empty array is valid", - "data": [], - "valid": true - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/maxItems.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/maxItems.json deleted file mode 100644 index 3b53a6b..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/maxItems.json +++ /dev/null @@ -1,28 +0,0 @@ -[ - { - "description": "maxItems validation", - "schema": {"maxItems": 2}, - "tests": [ - { - "description": "shorter is valid", - "data": [1], - "valid": true - }, - { - "description": "exact length is valid", - "data": [1, 2], - "valid": true - }, - { - "description": "too long is invalid", - "data": [1, 2, 3], - "valid": false - }, - { - "description": "ignores non-arrays", - "data": "foobar", - "valid": true - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/maxLength.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/maxLength.json deleted file mode 100644 index 811d35b..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/maxLength.json +++ /dev/null @@ -1,33 +0,0 @@ -[ - { - "description": "maxLength validation", - "schema": {"maxLength": 2}, - "tests": [ - { - "description": "shorter is valid", - "data": "f", - "valid": true - }, - { - "description": "exact length is valid", - "data": "fo", - "valid": true - }, - { - "description": "too long is invalid", - "data": "foo", - "valid": false - }, - { - "description": "ignores non-strings", - "data": 100, - "valid": true - }, - { - "description": "two supplementary Unicode code points is long enough", - "data": "\uD83D\uDCA9\uD83D\uDCA9", - "valid": true - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/maxProperties.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/maxProperties.json deleted file mode 100644 index d282446..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/maxProperties.json +++ /dev/null @@ -1,28 +0,0 @@ -[ - { - "description": "maxProperties validation", - "schema": {"maxProperties": 2}, - "tests": [ - { - "description": "shorter is valid", - "data": {"foo": 1}, - "valid": true - }, - { - "description": "exact length is valid", - "data": {"foo": 1, "bar": 2}, - "valid": true - }, - { - "description": "too long is invalid", - "data": {"foo": 1, "bar": 2, "baz": 3}, - "valid": false - }, - { - "description": "ignores non-objects", - "data": "foobar", - "valid": true - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/maximum.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/maximum.json deleted file mode 100644 index 8150984..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/maximum.json +++ /dev/null @@ -1,28 +0,0 @@ -[ - { - "description": "maximum validation", - "schema": {"maximum": 3.0}, - "tests": [ - { - "description": "below the maximum is valid", - "data": 2.6, - "valid": true - }, - { - "description": "boundary point is valid", - "data": 3.0, - "valid": true - }, - { - "description": "above the maximum is invalid", - "data": 3.5, - "valid": false - }, - { - "description": "ignores non-numbers", - "data": "x", - "valid": true - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/minItems.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/minItems.json deleted file mode 100644 index ed51188..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/minItems.json +++ /dev/null @@ -1,28 +0,0 @@ -[ - { - "description": "minItems validation", - "schema": {"minItems": 1}, - "tests": [ - { - "description": "longer is valid", - "data": [1, 2], - "valid": true - }, - { - "description": "exact length is valid", - "data": [1], - "valid": true - }, - { - "description": "too short is invalid", - "data": [], - "valid": false - }, - { - "description": "ignores non-arrays", - "data": "", - "valid": true - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/minLength.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/minLength.json deleted file mode 100644 index 3f09158..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/minLength.json +++ /dev/null @@ -1,33 +0,0 @@ -[ - { - "description": "minLength validation", - "schema": {"minLength": 2}, - "tests": [ - { - "description": "longer is valid", - "data": "foo", - "valid": true - }, - { - "description": "exact length is valid", - "data": "fo", - "valid": true - }, - { - "description": "too short is invalid", - "data": "f", - "valid": false - }, - { - "description": "ignores non-strings", - "data": 1, - "valid": true - }, - { - "description": "one supplementary Unicode code point is not long enough", - "data": "\uD83D\uDCA9", - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/minProperties.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/minProperties.json deleted file mode 100644 index a72c7d2..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/minProperties.json +++ /dev/null @@ -1,28 +0,0 @@ -[ - { - "description": "minProperties validation", - "schema": {"minProperties": 1}, - "tests": [ - { - "description": "longer is valid", - "data": {"foo": 1, "bar": 2}, - "valid": true - }, - { - "description": "exact length is valid", - "data": {"foo": 1}, - "valid": true - }, - { - "description": "too short is invalid", - "data": {}, - "valid": false - }, - { - "description": "ignores non-objects", - "data": "", - "valid": true - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/minimum.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/minimum.json deleted file mode 100644 index bd1e95b..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/minimum.json +++ /dev/null @@ -1,28 +0,0 @@ -[ - { - "description": "minimum validation", - "schema": {"minimum": 1.1}, - "tests": [ - { - "description": "above the minimum is valid", - "data": 2.6, - "valid": true - }, - { - "description": "boundary point is valid", - "data": 1.1, - "valid": true - }, - { - "description": "below the minimum is invalid", - "data": 0.6, - "valid": false - }, - { - "description": "ignores non-numbers", - "data": "x", - "valid": true - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/multipleOf.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/multipleOf.json deleted file mode 100644 index ca3b761..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/multipleOf.json +++ /dev/null @@ -1,60 +0,0 @@ -[ - { - "description": "by int", - "schema": {"multipleOf": 2}, - "tests": [ - { - "description": "int by int", - "data": 10, - "valid": true - }, - { - "description": "int by int fail", - "data": 7, - "valid": false - }, - { - "description": "ignores non-numbers", - "data": "foo", - "valid": true - } - ] - }, - { - "description": "by number", - "schema": {"multipleOf": 1.5}, - "tests": [ - { - "description": "zero is multiple of anything", - "data": 0, - "valid": true - }, - { - "description": "4.5 is multiple of 1.5", - "data": 4.5, - "valid": true - }, - { - "description": "35 is not multiple of 1.5", - "data": 35, - "valid": false - } - ] - }, - { - "description": "by small number", - "schema": {"multipleOf": 0.0001}, - "tests": [ - { - "description": "0.0075 is multiple of 0.0001", - "data": 0.0075, - "valid": true - }, - { - "description": "0.00751 is not multiple of 0.0001", - "data": 0.00751, - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/not.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/not.json deleted file mode 100644 index 98de0ed..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/not.json +++ /dev/null @@ -1,117 +0,0 @@ -[ - { - "description": "not", - "schema": { - "not": {"type": "integer"} - }, - "tests": [ - { - "description": "allowed", - "data": "foo", - "valid": true - }, - { - "description": "disallowed", - "data": 1, - "valid": false - } - ] - }, - { - "description": "not multiple types", - "schema": { - "not": {"type": ["integer", "boolean"]} - }, - "tests": [ - { - "description": "valid", - "data": "foo", - "valid": true - }, - { - "description": "mismatch", - "data": 1, - "valid": false - }, - { - "description": "other mismatch", - "data": true, - "valid": false - } - ] - }, - { - "description": "not more complex schema", - "schema": { - "not": { - "type": "object", - "properties": { - "foo": { - "type": "string" - } - } - } - }, - "tests": [ - { - "description": "match", - "data": 1, - "valid": true - }, - { - "description": "other match", - "data": {"foo": 1}, - "valid": true - }, - { - "description": "mismatch", - "data": {"foo": "bar"}, - "valid": false - } - ] - }, - { - "description": "forbidden property", - "schema": { - "properties": { - "foo": { - "not": {} - } - } - }, - "tests": [ - { - "description": "property present", - "data": {"foo": 1, "bar": 2}, - "valid": false - }, - { - "description": "property absent", - "data": {"bar": 1, "baz": 2}, - "valid": true - } - ] - }, - { - "description": "not with boolean schema true", - "schema": {"not": true}, - "tests": [ - { - "description": "any value is invalid", - "data": "foo", - "valid": false - } - ] - }, - { - "description": "not with boolean schema false", - "schema": {"not": false}, - "tests": [ - { - "description": "any value is valid", - "data": "foo", - "valid": true - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/oneOf.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/oneOf.json deleted file mode 100644 index df07e0b..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/oneOf.json +++ /dev/null @@ -1,112 +0,0 @@ -[ - { - "description": "oneOf", - "schema": { - "oneOf": [ - { - "type": "integer" - }, - { - "minimum": 2 - } - ] - }, - "tests": [ - { - "description": "first oneOf valid", - "data": 1, - "valid": true - }, - { - "description": "second oneOf valid", - "data": 2.5, - "valid": true - }, - { - "description": "both oneOf valid", - "data": 3, - "valid": false - }, - { - "description": "neither oneOf valid", - "data": 1.5, - "valid": false - } - ] - }, - { - "description": "oneOf with base schema", - "schema": { - "type": "string", - "oneOf" : [ - { - "minLength": 2 - }, - { - "maxLength": 4 - } - ] - }, - "tests": [ - { - "description": "mismatch base schema", - "data": 3, - "valid": false - }, - { - "description": "one oneOf valid", - "data": "foobar", - "valid": true - }, - { - "description": "both oneOf valid", - "data": "foo", - "valid": false - } - ] - }, - { - "description": "oneOf with boolean schemas, all true", - "schema": {"oneOf": [true, true, true]}, - "tests": [ - { - "description": "any value is invalid", - "data": "foo", - "valid": false - } - ] - }, - { - "description": "oneOf with boolean schemas, one true", - "schema": {"oneOf": [true, false, false]}, - "tests": [ - { - "description": "any value is valid", - "data": "foo", - "valid": true - } - ] - }, - { - "description": "oneOf with boolean schemas, more than one true", - "schema": {"oneOf": [true, true, false]}, - "tests": [ - { - "description": "any value is invalid", - "data": "foo", - "valid": false - } - ] - }, - { - "description": "oneOf with boolean schemas, all false", - "schema": {"oneOf": [false, false, false]}, - "tests": [ - { - "description": "any value is invalid", - "data": "foo", - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/optional/bignum.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/optional/bignum.json deleted file mode 100644 index fac275e..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/optional/bignum.json +++ /dev/null @@ -1,105 +0,0 @@ -[ - { - "description": "integer", - "schema": {"type": "integer"}, - "tests": [ - { - "description": "a bignum is an integer", - "data": 12345678910111213141516171819202122232425262728293031, - "valid": true - } - ] - }, - { - "description": "number", - "schema": {"type": "number"}, - "tests": [ - { - "description": "a bignum is a number", - "data": 98249283749234923498293171823948729348710298301928331, - "valid": true - } - ] - }, - { - "description": "integer", - "schema": {"type": "integer"}, - "tests": [ - { - "description": "a negative bignum is an integer", - "data": -12345678910111213141516171819202122232425262728293031, - "valid": true - } - ] - }, - { - "description": "number", - "schema": {"type": "number"}, - "tests": [ - { - "description": "a negative bignum is a number", - "data": -98249283749234923498293171823948729348710298301928331, - "valid": true - } - ] - }, - { - "description": "string", - "schema": {"type": "string"}, - "tests": [ - { - "description": "a bignum is not a string", - "data": 98249283749234923498293171823948729348710298301928331, - "valid": false - } - ] - }, - { - "description": "integer comparison", - "schema": {"maximum": 18446744073709551615}, - "tests": [ - { - "description": "comparison works for high numbers", - "data": 18446744073709551600, - "valid": true - } - ] - }, - { - "description": "float comparison with high precision", - "schema": { - "exclusiveMaximum": 972783798187987123879878123.18878137 - }, - "tests": [ - { - "description": "comparison works for high numbers", - "data": 972783798187987123879878123.188781371, - "valid": false - } - ] - }, - { - "description": "integer comparison", - "schema": {"minimum": -18446744073709551615}, - "tests": [ - { - "description": "comparison works for very negative numbers", - "data": -18446744073709551600, - "valid": true - } - ] - }, - { - "description": "float comparison with high precision on negative numbers", - "schema": { - "exclusiveMinimum": -972783798187987123879878123.18878137 - }, - "tests": [ - { - "description": "comparison works for very negative numbers", - "data": -972783798187987123879878123.188781371, - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/optional/ecmascript-regex.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/optional/ecmascript-regex.json deleted file mode 100644 index 08dc936..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/optional/ecmascript-regex.json +++ /dev/null @@ -1,13 +0,0 @@ -[ - { - "description": "ECMA 262 regex non-compliance", - "schema": { "format": "regex" }, - "tests": [ - { - "description": "ECMA 262 has no support for \\Z anchor from .NET", - "data": "^\\S(|(.|\\n)*\\S)\\Z", - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/optional/format.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/optional/format.json deleted file mode 100644 index e04fb88..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/optional/format.json +++ /dev/null @@ -1,261 +0,0 @@ -[ - { - "description": "validation of date-time strings", - "schema": {"format": "date-time"}, - "tests": [ - { - "description": "a valid date-time string", - "data": "1963-06-19T08:30:06.283185Z", - "valid": true - }, - { - "description": "an invalid date-time string", - "data": "06/19/1963 08:30:06 PST", - "valid": false - }, - { - "description": "only RFC3339 not all of ISO 8601 are valid", - "data": "2013-350T01:01:01", - "valid": false - } - ] - }, - { - "description": "validation of URIs", - "schema": {"format": "uri"}, - "tests": [ - { - "description": "a valid URI", - "data": "http://foo.bar/?baz=qux#quux", - "valid": true - }, - { - "description": "an invalid protocol-relative URI Reference", - "data": "//foo.bar/?baz=qux#quux", - "valid": false - }, - { - "description": "an invalid relative URI Reference", - "data": "/abc", - "valid": false - }, - { - "description": "an invalid URI", - "data": "\\\\WINDOWS\\fileshare", - "valid": false - }, - { - "description": "an invalid URI though valid URI reference", - "data": "abc", - "valid": false - } - ] - }, - { - "description": "validation of URI References", - "schema": {"format": "uri-reference"}, - "tests": [ - { - "description": "a valid URI", - "data": "http://foo.bar/?baz=qux#quux", - "valid": true - }, - { - "description": "a valid protocol-relative URI Reference", - "data": "//foo.bar/?baz=qux#quux", - "valid": true - }, - { - "description": "a valid relative URI Reference", - "data": "/abc", - "valid": true - }, - { - "description": "an invalid URI Reference", - "data": "\\\\WINDOWS\\fileshare", - "TODO:valid": false, - "valid": true - }, - { - "description": "a valid URI Reference", - "data": "abc", - "valid": true - }, - { - "description": "a valid URI fragment", - "data": "#fragment", - "valid": true - }, - { - "description": "an invalid URI fragment", - "data": "#frag\\ment", - "TODO:valid": false, - "valid": true - } - ] - }, - { - "description": "format: uri-template", - "schema": { - "format": "uri-template" - }, - "tests": [ - { - "description": "a valid uri-template", - "data": "http://example.com/dictionary/{term:1}/{term}", - "valid": true - }, - { - "description": "an invalid uri-template", - "data": "http://example.com/dictionary/{term:1}/{term", - "TODO:valid": false, - "valid": true - }, - { - "description": "a valid uri-template without variables", - "data": "http://example.com/dictionary", - "valid": true - }, - { - "description": "a valid relative uri-template", - "data": "dictionary/{term:1}/{term}", - "valid": true - } - ] - }, - { - "description": "validation of e-mail addresses", - "schema": {"format": "email"}, - "tests": [ - { - "description": "a valid e-mail address", - "data": "joe.bloggs@example.com", - "valid": true - }, - { - "description": "an invalid e-mail address", - "data": "2962", - "valid": false - } - ] - }, - { - "description": "validation of IP addresses", - "schema": {"format": "ipv4"}, - "tests": [ - { - "description": "a valid IP address", - "data": "192.168.0.1", - "valid": true - }, - { - "description": "an IP address with too many components", - "data": "127.0.0.0.1", - "valid": false - }, - { - "description": "an IP address with out-of-range values", - "data": "256.256.256.256", - "valid": false - }, - { - "description": "an IP address without 4 components", - "data": "127.0", - "valid": false - }, - { - "description": "an IP address as an integer", - "data": "0x7f000001", - "valid": false - } - ] - }, - { - "description": "validation of IPv6 addresses", - "schema": {"format": "ipv6"}, - "tests": [ - { - "description": "a valid IPv6 address", - "data": "::1", - "valid": true - }, - { - "description": "an IPv6 address with out-of-range values", - "data": "12345::", - "valid": false - }, - { - "description": "an IPv6 address with too many components", - "data": "1:1:1:1:1:1:1:1:1:1:1:1:1:1:1:1", - "valid": false - }, - { - "description": "an IPv6 address containing illegal characters", - "data": "::laptop", - "valid": false - } - ] - }, - { - "description": "validation of host names", - "schema": {"format": "hostname"}, - "tests": [ - { - "description": "a valid host name", - "data": "www.example.com", - "valid": true - }, - { - "description": "a host name starting with an illegal character", - "data": "-a-host-name-that-starts-with--", - "valid": false - }, - { - "description": "a host name containing illegal characters", - "data": "not_a_valid_host_name", - "valid": false - }, - { - "description": "a host name with a component too long", - "data": "a-vvvvvvvvvvvvvvvveeeeeeeeeeeeeeeerrrrrrrrrrrrrrrryyyyyyyyyyyyyyyy-long-host-name-component", - "valid": false - } - ] - }, - { - "description": "validation of JSON-pointers", - "schema": {"format": "json-pointer"}, - "tests": [ - { - "description": "a valid JSON-pointer", - "data": "/foo/bar~0/baz~1/%a", - "valid": true - }, - { - "description": "empty string is valid", - "data": "", - "valid": true - }, - { - "description": "/ is valid", - "data": "/", - "valid": true - }, - { - "description": "not a valid JSON-pointer (~ not escaped)", - "data": "/foo/bar~", - "valid": false - }, - { - "description": "valid JSON-pointer with empty segment", - "data": "/foo//bar", - "valid": true - }, - { - "description": "valid JSON-pointer with the last empty segment", - "data": "/foo/bar/", - "valid": true - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/optional/zeroTerminatedFloats.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/optional/zeroTerminatedFloats.json deleted file mode 100644 index aa3355c..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/optional/zeroTerminatedFloats.json +++ /dev/null @@ -1,15 +0,0 @@ -[ - { - "description": "some languages do not distinguish between different types of numeric value", - "schema": { - "type": "integer" - }, - "tests": [ - { - "description": "a float without fractional part is not an integer", - "data": 1.0, - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/pattern.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/pattern.json deleted file mode 100644 index 25e7299..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/pattern.json +++ /dev/null @@ -1,34 +0,0 @@ -[ - { - "description": "pattern validation", - "schema": {"pattern": "^a*$"}, - "tests": [ - { - "description": "a matching pattern is valid", - "data": "aaa", - "valid": true - }, - { - "description": "a non-matching pattern is invalid", - "data": "abc", - "valid": false - }, - { - "description": "ignores non-strings", - "data": true, - "valid": true - } - ] - }, - { - "description": "pattern is not anchored", - "schema": {"pattern": "a+"}, - "tests": [ - { - "description": "matches a substring", - "data": "xxaayy", - "valid": true - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/patternProperties.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/patternProperties.json deleted file mode 100644 index b1f2d35..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/patternProperties.json +++ /dev/null @@ -1,141 +0,0 @@ -[ - { - "description": - "patternProperties validates properties matching a regex", - "schema": { - "patternProperties": { - "f.*o": {"type": "integer"} - } - }, - "tests": [ - { - "description": "a single valid match is valid", - "data": {"foo": 1}, - "valid": true - }, - { - "description": "multiple valid matches is valid", - "data": {"foo": 1, "foooooo" : 2}, - "valid": true - }, - { - "description": "a single invalid match is invalid", - "data": {"foo": "bar", "fooooo": 2}, - "valid": false - }, - { - "description": "multiple invalid matches is invalid", - "data": {"foo": "bar", "foooooo" : "baz"}, - "valid": false - }, - { - "description": "ignores non-objects", - "data": 12, - "valid": true - } - ] - }, - { - "description": "multiple simultaneous patternProperties are validated", - "schema": { - "patternProperties": { - "a*": {"type": "integer"}, - "aaa*": {"maximum": 20} - } - }, - "tests": [ - { - "description": "a single valid match is valid", - "data": {"a": 21}, - "valid": true - }, - { - "description": "a simultaneous match is valid", - "data": {"aaaa": 18}, - "valid": true - }, - { - "description": "multiple matches is valid", - "data": {"a": 21, "aaaa": 18}, - "valid": true - }, - { - "description": "an invalid due to one is invalid", - "data": {"a": "bar"}, - "valid": false - }, - { - "description": "an invalid due to the other is invalid", - "data": {"aaaa": 31}, - "valid": false - }, - { - "description": "an invalid due to both is invalid", - "data": {"aaa": "foo", "aaaa": 31}, - "valid": false - } - ] - }, - { - "description": "regexes are not anchored by default and are case sensitive", - "schema": { - "patternProperties": { - "[0-9]{2,}": { "type": "boolean" }, - "X_": { "type": "string" } - } - }, - "tests": [ - { - "description": "non recognized members are ignored", - "data": { "answer 1": "42" }, - "valid": true - }, - { - "description": "recognized members are accounted for", - "data": { "a31b": null }, - "valid": false - }, - { - "description": "regexes are case sensitive", - "data": { "a_x_3": 3 }, - "valid": true - }, - { - "description": "regexes are case sensitive, 2", - "data": { "a_X_3": 3 }, - "valid": false - } - ] - }, - { - "description": "patternProperties with boolean schemas", - "schema": { - "patternProperties": { - "f.*": true, - "b.*": false - } - }, - "tests": [ - { - "description": "object with property matching schema true is valid", - "data": {"foo": 1}, - "valid": true - }, - { - "description": "object with property matching schema false is invalid", - "data": {"bar": 2}, - "valid": false - }, - { - "description": "object with both properties is invalid", - "data": {"foo": 1, "bar": 2}, - "valid": false - }, - { - "description": "empty object is valid", - "data": {}, - "valid": true - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/properties.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/properties.json deleted file mode 100644 index d56a960..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/properties.json +++ /dev/null @@ -1,123 +0,0 @@ -[ - { - "description": "object properties validation", - "schema": { - "properties": { - "foo": {"type": "integer"}, - "bar": {"type": "string"} - } - }, - "tests": [ - { - "description": "both properties present and valid is valid", - "data": {"foo": 1, "bar": "baz"}, - "valid": true - }, - { - "description": "one property invalid is invalid", - "data": {"foo": 1, "bar": {}}, - "valid": false - }, - { - "description": "both properties invalid is invalid", - "data": {"foo": [], "bar": {}}, - "valid": false - }, - { - "description": "doesn't invalidate other properties", - "data": {"quux": []}, - "valid": true - }, - { - "description": "ignores non-objects", - "data": [], - "valid": true - } - ] - }, - { - "description": - "properties, patternProperties, additionalProperties interaction", - "schema": { - "properties": { - "foo": {"type": "array", "maxItems": 3}, - "bar": {"type": "array"} - }, - "patternProperties": {"f.o": {"minItems": 2}}, - "additionalProperties": {"type": "integer"} - }, - "tests": [ - { - "description": "property validates property", - "data": {"foo": [1, 2]}, - "valid": true - }, - { - "description": "property invalidates property", - "data": {"foo": [1, 2, 3, 4]}, - "valid": false - }, - { - "description": "patternProperty invalidates property", - "data": {"foo": []}, - "valid": false - }, - { - "description": "patternProperty validates nonproperty", - "data": {"fxo": [1, 2]}, - "valid": true - }, - { - "description": "patternProperty invalidates nonproperty", - "data": {"fxo": []}, - "valid": false - }, - { - "description": "additionalProperty ignores property", - "data": {"bar": []}, - "valid": true - }, - { - "description": "additionalProperty validates others", - "data": {"quux": 3}, - "valid": true - }, - { - "description": "additionalProperty invalidates others", - "data": {"quux": "foo"}, - "valid": false - } - ] - }, - { - "description": "properties with boolean schema", - "schema": { - "properties": { - "foo": true, - "bar": false - } - }, - "tests": [ - { - "description": "no property present is valid", - "data": {}, - "valid": true - }, - { - "description": "only 'true' property present is valid", - "data": {"foo": 1}, - "valid": true - }, - { - "description": "only 'false' property present is invalid", - "data": {"bar": 2}, - "valid": false - }, - { - "description": "both properties present is invalid", - "data": {"foo": 1, "bar": 2}, - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/propertyNames.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/propertyNames.json deleted file mode 100644 index 2804b41..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/propertyNames.json +++ /dev/null @@ -1,68 +0,0 @@ -[ - { - "description": "propertyNames validation", - "schema": { - "propertyNames": {"maxLength": 3} - }, - "tests": [ - { - "description": "all property names valid", - "data": { - "f": {}, - "foo": {} - }, - "valid": true - }, - { - "description": "some property names invalid", - "data": { - "foo": {}, - "foobar": {} - }, - "valid": false - }, - { - "description": "object without properties is valid", - "data": {}, - "valid": true - }, - { - "description": "non-object is valid", - "data": [], - "valid": true - } - ] - }, - { - "description": "propertyNames with boolean schema true", - "schema": {"propertyNames": true}, - "tests": [ - { - "description": "object with any properties is valid", - "data": {"foo": 1}, - "valid": true - }, - { - "description": "empty object is valid", - "data": {}, - "valid": true - } - ] - }, - { - "description": "propertyNames with boolean schema false", - "schema": {"propertyNames": false}, - "tests": [ - { - "description": "object with any properties is invalid", - "data": {"foo": 1}, - "valid": false - }, - { - "description": "empty object is valid", - "data": {}, - "valid": true - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/ref.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/ref.json deleted file mode 100644 index 6dcf6cd..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/ref.json +++ /dev/null @@ -1,332 +0,0 @@ -[ - { - "description": "root pointer ref", - "schema": { - "properties": { - "foo": {"$ref": "#"} - }, - "additionalProperties": false - }, - "tests": [ - { - "description": "match", - "data": {"foo": false}, - "valid": true - }, - { - "description": "recursive match", - "data": {"foo": {"foo": false}}, - "valid": true - }, - { - "description": "mismatch", - "data": {"bar": false}, - "valid": false - }, - { - "description": "recursive mismatch", - "data": {"foo": {"bar": false}}, - "valid": false - } - ] - }, - { - "description": "relative pointer ref to object", - "schema": { - "properties": { - "foo": {"type": "integer"}, - "bar": {"$ref": "#/properties/foo"} - } - }, - "tests": [ - { - "description": "match", - "data": {"bar": 3}, - "valid": true - }, - { - "description": "mismatch", - "data": {"bar": true}, - "valid": false - } - ] - }, - { - "description": "relative pointer ref to array", - "schema": { - "items": [ - {"type": "integer"}, - {"$ref": "#/items/0"} - ] - }, - "tests": [ - { - "description": "match array", - "data": [1, 2], - "valid": true - }, - { - "description": "mismatch array", - "data": [1, "foo"], - "valid": false - } - ] - }, - { - "description": "escaped pointer ref", - "schema": { - "tilda~field": {"type": "integer"}, - "slash/field": {"type": "integer"}, - "percent%field": {"type": "integer"}, - "properties": { - "tilda": {"$ref": "#/tilda~0field"}, - "slash": {"$ref": "#/slash~1field"}, - "percent": {"$ref": "#/percent%25field"} - } - }, - "tests": [ - { - "description": "slash invalid", - "data": {"slash": "aoeu"}, - "valid": false - }, - { - "description": "tilda invalid", - "data": {"tilda": "aoeu"}, - "valid": false - }, - { - "description": "percent invalid", - "data": {"percent": "aoeu"}, - "valid": false - }, - { - "description": "slash valid", - "data": {"slash": 123}, - "valid": true - }, - { - "description": "tilda valid", - "data": {"tilda": 123}, - "valid": true - }, - { - "description": "percent valid", - "data": {"percent": 123}, - "valid": true - } - ] - }, - { - "description": "nested refs", - "schema": { - "definitions": { - "a": {"type": "integer"}, - "b": {"$ref": "#/definitions/a"}, - "c": {"$ref": "#/definitions/b"} - }, - "$ref": "#/definitions/c" - }, - "tests": [ - { - "description": "nested ref valid", - "data": 5, - "valid": true - }, - { - "description": "nested ref invalid", - "data": "a", - "valid": false - } - ] - }, - { - "description": "ref overrides any sibling keywords", - "schema": { - "definitions": { - "reffed": { - "type": "array" - } - }, - "properties": { - "foo": { - "$ref": "#/definitions/reffed", - "maxItems": 2 - } - } - }, - "tests": [ - { - "description": "ref valid", - "data": { "foo": [] }, - "valid": true - }, - { - "description": "ref valid, maxItems ignored", - "data": { "foo": [ 1, 2, 3] }, - "valid": true - }, - { - "description": "ref invalid", - "data": { "foo": "string" }, - "valid": false - } - ] - }, - { - "description": "remote ref, containing refs itself", - "schema": {"$ref": "http://json-schema.org/draft-04/schema#"}, - "tests": [ - { - "description": "remote ref valid", - "data": {"minLength": 1}, - "valid": true - }, - { - "description": "remote ref invalid", - "data": {"minLength": -1}, - "valid": false - } - ] - }, - { - "description": "property named $ref that is not a reference", - "schema": { - "properties": { - "$ref": {"type": "string"} - } - }, - "tests": [ - { - "description": "property named $ref valid", - "data": {"$ref": "a"}, - "valid": true - }, - { - "description": "property named $ref invalid", - "data": {"$ref": 2}, - "valid": false - } - ] - }, - { - "description": "$ref to boolean schema true", - "schema": { - "$ref": "#/definitions/bool", - "definitions": { - "bool": true - } - }, - "tests": [ - { - "description": "any value is valid", - "data": "foo", - "valid": true - } - ] - }, - { - "description": "$ref to boolean schema false", - "schema": { - "$ref": "#/definitions/bool", - "definitions": { - "bool": false - } - }, - "tests": [ - { - "description": "any value is invalid", - "data": "foo", - "valid": false - } - ] - }, - { - "description": "Recursive references between schemas", - "schema": { - "$id": "http://localhost:1234/tree", - "description": "tree of nodes", - "type": "object", - "properties": { - "meta": {"type": "string"}, - "nodes": { - "type": "array", - "items": {"$ref": "node"} - } - }, - "required": ["meta", "nodes"], - "definitions": { - "node": { - "$id": "http://localhost:1234/node", - "description": "node", - "type": "object", - "properties": { - "value": {"type": "number"}, - "subtree": {"$ref": "tree"} - }, - "required": ["value"] - } - } - }, - "tests": [ - { - "description": "valid tree", - "data": { - "meta": "root", - "nodes": [ - { - "value": 1, - "subtree": { - "meta": "child", - "nodes": [ - {"value": 1.1}, - {"value": 1.2} - ] - } - }, - { - "value": 2, - "subtree": { - "meta": "child", - "nodes": [ - {"value": 2.1}, - {"value": 2.2} - ] - } - } - ] - }, - "valid": true - }, - { - "description": "invalid tree", - "data": { - "meta": "root", - "nodes": [ - { - "value": 1, - "subtree": { - "meta": "child", - "nodes": [ - {"value": "string is invalid"}, - {"value": 1.2} - ] - } - }, - { - "value": 2, - "subtree": { - "meta": "child", - "nodes": [ - {"value": 2.1}, - {"value": 2.2} - ] - } - } - ] - }, - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/refRemote.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/refRemote.json deleted file mode 100644 index 819d326..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/refRemote.json +++ /dev/null @@ -1,171 +0,0 @@ -[ - { - "description": "remote ref", - "schema": {"$ref": "http://localhost:1234/integer.json"}, - "tests": [ - { - "description": "remote ref valid", - "data": 1, - "valid": true - }, - { - "description": "remote ref invalid", - "data": "a", - "valid": false - } - ] - }, - { - "description": "fragment within remote ref", - "schema": {"$ref": "http://localhost:1234/subSchemas.json#/integer"}, - "tests": [ - { - "description": "remote fragment valid", - "data": 1, - "valid": true - }, - { - "description": "remote fragment invalid", - "data": "a", - "valid": false - } - ] - }, - { - "description": "ref within remote ref", - "schema": { - "$ref": "http://localhost:1234/subSchemas.json#/refToInteger" - }, - "tests": [ - { - "description": "ref within ref valid", - "data": 1, - "valid": true - }, - { - "description": "ref within ref invalid", - "data": "a", - "valid": false - } - ] - }, - { - "description": "base URI change", - "schema": { - "$id": "http://localhost:1234/", - "items": { - "$id": "folder/", - "items": {"$ref": "folderInteger.json"} - } - }, - "tests": [ - { - "description": "base URI change ref valid", - "data": [[1]], - "valid": true - }, - { - "description": "base URI change ref invalid", - "data": [["a"]], - "valid": false - } - ] - }, - { - "description": "base URI change - change folder", - "schema": { - "$id": "http://localhost:1234/scope_change_defs1.json", - "type" : "object", - "properties": { - "list": {"$ref": "#/definitions/baz"} - }, - "definitions": { - "baz": { - "$id": "folder/", - "type": "array", - "items": {"$ref": "folderInteger.json"} - } - } - }, - "tests": [ - { - "description": "number is valid", - "data": {"list": [1]}, - "valid": true - }, - { - "description": "string is invalid", - "data": {"list": ["a"]}, - "valid": false - } - ] - }, - { - "description": "base URI change - change folder in subschema", - "schema": { - "$id": "http://localhost:1234/scope_change_defs2.json", - "type" : "object", - "properties": { - "list": {"$ref": "#/definitions/baz/definitions/bar"} - }, - "definitions": { - "baz": { - "$id": "folder/", - "definitions": { - "bar": { - "type": "array", - "items": {"$ref": "folderInteger.json"} - } - } - } - } - }, - "tests": [ - { - "description": "number is valid", - "data": {"list": [1]}, - "valid": true - }, - { - "description": "string is invalid", - "data": {"list": ["a"]}, - "valid": false - } - ] - }, - { - "description": "root ref in remote ref", - "schema": { - "$id": "http://localhost:1234/object", - "type": "object", - "properties": { - "name": {"$ref": "name.json#/definitions/orNull"} - } - }, - "tests": [ - { - "description": "string is valid", - "data": { - "name": "foo" - }, - "valid": true - }, - { - "description": "null is valid", - "data": { - "name": null - }, - "valid": true - }, - { - "description": "object is invalid", - "data": { - "name": { - "name": null - } - }, - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/required.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/required.json deleted file mode 100644 index 3082157..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/required.json +++ /dev/null @@ -1,60 +0,0 @@ -[ - { - "description": "required validation", - "schema": { - "properties": { - "foo": {}, - "bar": {} - }, - "required": ["foo"] - }, - "tests": [ - { - "description": "present required property is valid", - "data": {"foo": 1}, - "valid": true - }, - { - "description": "non-present required property is invalid", - "data": {"bar": 1}, - "valid": false - }, - { - "description": "ignores non-objects", - "data": 12, - "valid": true - } - ] - }, - { - "description": "required default validation", - "schema": { - "properties": { - "foo": {} - } - }, - "tests": [ - { - "description": "not required by default", - "data": {}, - "valid": true - } - ] - }, - { - "description": "required with empty array", - "schema": { - "properties": { - "foo": {} - }, - "required": [] - }, - "tests": [ - { - "description": "property not required", - "data": {}, - "valid": true - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/type.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/type.json deleted file mode 100644 index 6129374..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/type.json +++ /dev/null @@ -1,345 +0,0 @@ -[ - { - "description": "integer type matches integers", - "schema": {"type": "integer"}, - "tests": [ - { - "description": "an integer is an integer", - "data": 1, - "valid": true - }, - { - "description": "a float is not an integer", - "data": 1.1, - "valid": false - }, - { - "description": "a string is not an integer", - "data": "foo", - "valid": false - }, - { - "description": "a string is still not an integer, even if it looks like one", - "data": "1", - "valid": false - }, - { - "description": "an object is not an integer", - "data": {}, - "valid": false - }, - { - "description": "an array is not an integer", - "data": [], - "valid": false - }, - { - "description": "a boolean is not an integer", - "data": true, - "valid": false - }, - { - "description": "null is not an integer", - "data": null, - "valid": false - } - ] - }, - { - "description": "number type matches numbers", - "schema": {"type": "number"}, - "tests": [ - { - "description": "an integer is a number", - "data": 1, - "valid": true - }, - { - "description": "a float is a number", - "data": 1.1, - "valid": true - }, - { - "description": "a string is not a number", - "data": "foo", - "valid": false - }, - { - "description": "a string is still not a number, even if it looks like one", - "data": "1", - "valid": false - }, - { - "description": "an object is not a number", - "data": {}, - "valid": false - }, - { - "description": "an array is not a number", - "data": [], - "valid": false - }, - { - "description": "a boolean is not a number", - "data": true, - "valid": false - }, - { - "description": "null is not a number", - "data": null, - "valid": false - } - ] - }, - { - "description": "string type matches strings", - "schema": {"type": "string"}, - "tests": [ - { - "description": "1 is not a string", - "data": 1, - "valid": false - }, - { - "description": "a float is not a string", - "data": 1.1, - "valid": false - }, - { - "description": "a string is a string", - "data": "foo", - "valid": true - }, - { - "description": "a string is still a string, even if it looks like a number", - "data": "1", - "valid": true - }, - { - "description": "an object is not a string", - "data": {}, - "valid": false - }, - { - "description": "an array is not a string", - "data": [], - "valid": false - }, - { - "description": "a boolean is not a string", - "data": true, - "valid": false - }, - { - "description": "null is not a string", - "data": null, - "valid": false - } - ] - }, - { - "description": "object type matches objects", - "schema": {"type": "object"}, - "tests": [ - { - "description": "an integer is not an object", - "data": 1, - "valid": false - }, - { - "description": "a float is not an object", - "data": 1.1, - "valid": false - }, - { - "description": "a string is not an object", - "data": "foo", - "valid": false - }, - { - "description": "an object is an object", - "data": {}, - "valid": true - }, - { - "description": "an array is not an object", - "data": [], - "valid": false - }, - { - "description": "a boolean is not an object", - "data": true, - "valid": false - }, - { - "description": "null is not an object", - "data": null, - "valid": false - } - ] - }, - { - "description": "array type matches arrays", - "schema": {"type": "array"}, - "tests": [ - { - "description": "an integer is not an array", - "data": 1, - "valid": false - }, - { - "description": "a float is not an array", - "data": 1.1, - "valid": false - }, - { - "description": "a string is not an array", - "data": "foo", - "valid": false - }, - { - "description": "an object is not an array", - "data": {}, - "valid": false - }, - { - "description": "an array is an array", - "data": [], - "valid": true - }, - { - "description": "a boolean is not an array", - "data": true, - "valid": false - }, - { - "description": "null is not an array", - "data": null, - "valid": false - } - ] - }, - { - "description": "boolean type matches booleans", - "schema": {"type": "boolean"}, - "tests": [ - { - "description": "an integer is not a boolean", - "data": 1, - "valid": false - }, - { - "description": "a float is not a boolean", - "data": 1.1, - "valid": false - }, - { - "description": "a string is not a boolean", - "data": "foo", - "valid": false - }, - { - "description": "an object is not a boolean", - "data": {}, - "valid": false - }, - { - "description": "an array is not a boolean", - "data": [], - "valid": false - }, - { - "description": "a boolean is a boolean", - "data": true, - "valid": true - }, - { - "description": "null is not a boolean", - "data": null, - "valid": false - } - ] - }, - { - "description": "null type matches only the null object", - "schema": {"type": "null"}, - "tests": [ - { - "description": "an integer is not null", - "data": 1, - "valid": false - }, - { - "description": "a float is not null", - "data": 1.1, - "valid": false - }, - { - "description": "a string is not null", - "data": "foo", - "valid": false - }, - { - "description": "an object is not null", - "data": {}, - "valid": false - }, - { - "description": "an array is not null", - "data": [], - "valid": false - }, - { - "description": "a boolean is not null", - "data": true, - "valid": false - }, - { - "description": "null is null", - "data": null, - "valid": true - } - ] - }, - { - "description": "multiple types can be specified in an array", - "schema": {"type": ["integer", "string"]}, - "tests": [ - { - "description": "an integer is valid", - "data": 1, - "valid": true - }, - { - "description": "a string is valid", - "data": "foo", - "valid": true - }, - { - "description": "a float is invalid", - "data": 1.1, - "valid": false - }, - { - "description": "an object is invalid", - "data": {}, - "valid": false - }, - { - "description": "an array is invalid", - "data": [], - "valid": false - }, - { - "description": "a boolean is invalid", - "data": true, - "valid": false - }, - { - "description": "null is invalid", - "data": null, - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/uniqueItems.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/uniqueItems.json deleted file mode 100644 index c1f4ab9..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/draft6/uniqueItems.json +++ /dev/null @@ -1,79 +0,0 @@ -[ - { - "description": "uniqueItems validation", - "schema": {"uniqueItems": true}, - "tests": [ - { - "description": "unique array of integers is valid", - "data": [1, 2], - "valid": true - }, - { - "description": "non-unique array of integers is invalid", - "data": [1, 1], - "valid": false - }, - { - "description": "numbers are unique if mathematically unequal", - "data": [1.0, 1.00, 1], - "valid": false - }, - { - "description": "unique array of objects is valid", - "data": [{"foo": "bar"}, {"foo": "baz"}], - "valid": true - }, - { - "description": "non-unique array of objects is invalid", - "data": [{"foo": "bar"}, {"foo": "bar"}], - "valid": false - }, - { - "description": "unique array of nested objects is valid", - "data": [ - {"foo": {"bar" : {"baz" : true}}}, - {"foo": {"bar" : {"baz" : false}}} - ], - "valid": true - }, - { - "description": "non-unique array of nested objects is invalid", - "data": [ - {"foo": {"bar" : {"baz" : true}}}, - {"foo": {"bar" : {"baz" : true}}} - ], - "valid": false - }, - { - "description": "unique array of arrays is valid", - "data": [["foo"], ["bar"]], - "valid": true - }, - { - "description": "non-unique array of arrays is invalid", - "data": [["foo"], ["foo"]], - "valid": false - }, - { - "description": "1 and true are unique", - "data": [1, true], - "valid": true - }, - { - "description": "0 and false are unique", - "data": [0, false], - "valid": true - }, - { - "description": "unique heterogeneous types are valid", - "data": [{}, [1], true, null, 1], - "valid": true - }, - { - "description": "non-unique heterogeneous types are invalid", - "data": [{}, [1], true, null, {}, 1], - "valid": false - } - ] - } -] diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/invalid_schema.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/invalid_schema.json deleted file mode 100644 index b377661..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/invalid_schema.json +++ /dev/null @@ -1 +0,0 @@ -{ "type": 1 } \ No newline at end of file diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/invalid_schemas.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/invalid_schemas.json deleted file mode 100644 index c48a6e1..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/invalid_schemas.json +++ /dev/null @@ -1,268 +0,0 @@ -[ - { - "description": "must be object", - "schema": 1 - }, - { - "description": "format must be string", - "schema": { - "format": 1 - } - }, - { - "description": "unknown format", - "schema": { - "format": "palindrome" - } - }, - { - "description": "pattern must be string", - "schema": { - "pattern": 1 - } - }, - { - "description": "pattern must be regex", - "schema": { - "pattern": "(" - } - }, - { - "description": "$ref must be string", - "schema": { - "$ref": 1 - } - }, - { - "description": "patternProperties keys must be regex", - "schema": { - "patternProperties": { - "(": {} - } - } - }, - { - "description": "invalid $ref json-pointer", - "schema": { - "$ref": "#/definition/employee%1" - } - }, - { - "description": "invalid $ref url", - "schema": { - "$ref": "http:/localhost/test.json" - } - }, - { - "description": "invalid $ref ptr", - "schema": { - "$ref": "#/definition/employee" - } - }, - { - "description": "$ref to invalid schema", - "schema": { - "$ref": "#/definition/employee", - "definitions": { - "employee": { - "type": 1 - } - } - } - }, - { - "description": "draft3 is not supported", - "schema": { - "$schema": "http://json-schema.org/draft-03/schema#" - } - }, - { - "description": "does not validate with latest draft", - "schema": { - "$schema": "http://json-schema.org/schema#", - "contains": 1 - } - }, - { - "description": "$ref schema with syntax error", - "schema": { - "$ref": "testdata/syntax_error.json#" - } - }, - { - "description": "multipleOf must be greater than zero", - "schema": { - "multipleOf": 0 - } - }, - { - "description": "not compile fail", - "schema": { - "not": { - "$ref": "#/unknown" - } - } - }, - { - "description": "allOf compile fail", - "schema": { - "allOf": [ - { - "$ref": "#/unknown" - } - ] - } - }, - { - "description": "anyOf compile fail", - "schema": { - "anyOf": [ - { - "$ref": "#/unknown" - } - ] - } - }, - { - "description": "oneOf compile fail", - "schema": { - "oneOf": [ - { - "$ref": "#/unknown" - } - ] - } - }, - { - "description": "properties compile fail", - "schema": { - "properties": { - "p1": { - "$ref": "#/unknown" - } - } - } - }, - { - "description": "patternProperties compile fail", - "schema": { - "patternProperties": { - "p1": { - "$ref": "#/unknown" - } - } - } - }, - { - "description": "additionalProperties compile fail", - "schema": { - "additionalProperties": { - "$ref": "#/unknown" - } - } - }, - { - "description": "items compile fail", - "schema": { - "items": { - "$ref": "#/unknown" - } - } - }, - { - "description": "item compile fail", - "schema": { - "items": [ - { - "$ref": "#/unknown" - } - ] - } - }, - { - "description": "additionalItems compile fail", - "schema": { - "items": [{}], - "additionalItems": { - "$ref": "#/unknown" - } - } - }, - { - "description": "dependencies compile fail", - "schema": { - "dependencies": { - "p1": { - "$ref": "#/unknown" - } - } - } - }, - { - "description": "contains compile fail", - "schema": { - "contains": { - "$ref": "#/unknown" - } - } - }, - { - "description": "propertyNames compile fail", - "schema": { - "propertyNames": { - "$ref": "#/unknown" - } - } - }, - { - "description": "a:jsonpointer urlescape", - "schema": { - "propertyNames": { - "$ref": "#/unknown%" - } - } - }, - { - "description": "b:jsonpointer urlescape", - "schema": {}, - "fragment": "#/unknown%" - }, - { - "description": "jsonpointer array index", - "schema": { - "refs": [{}], - "propertyNames": { - "$ref": "#/refs/unknown" - } - } - }, - { - "description": "jsonpointer array index outofrange", - "schema": { - "refs": [{}], - "propertyNames": { - "$ref": "#/refs/5" - } - } - }, - { - "description": "schemaRef with invalid $schema", - "schema": [{"$schema":"http://json-schema.org/draft-03/schema"}], - "fragment": "#/0" - }, - { - "description": "schemaRef with wrong jsonpointer", - "schema": [{}], - "fragment": "#/1" - }, - { - "description": "schemaRef", - "schema": [1], - "fragment": "#/0" - }, - { - "description": "schemaRef wrong jsonpointer", - "schema": [{"$ref": "#/5"}], - "fragment": "#/0" - } -] \ No newline at end of file diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/remotes/folder/folderInteger.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/remotes/folder/folderInteger.json deleted file mode 100644 index dbe5c75..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/remotes/folder/folderInteger.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "integer" -} \ No newline at end of file diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/remotes/integer.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/remotes/integer.json deleted file mode 100644 index dbe5c75..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/remotes/integer.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "integer" -} \ No newline at end of file diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/remotes/name.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/remotes/name.json deleted file mode 100644 index 19ba093..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/remotes/name.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "definitions": { - "orNull": { - "anyOf": [ - {"type": "null"}, - {"$ref": "#"} - ] - } - }, - "type": "string" -} diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/remotes/subSchemas.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/remotes/subSchemas.json deleted file mode 100644 index 8b6d8f8..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/remotes/subSchemas.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "integer": { - "type": "integer" - }, - "refToInteger": { - "$ref": "#/integer" - } -} \ No newline at end of file diff --git a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/syntax_error.json b/vendor/github.com/santhosh-tekuri/jsonschema/testdata/syntax_error.json deleted file mode 100644 index 81750b9..0000000 --- a/vendor/github.com/santhosh-tekuri/jsonschema/testdata/syntax_error.json +++ /dev/null @@ -1 +0,0 @@ -{ \ No newline at end of file diff --git a/vendor/github.com/satori/go.uuid/.travis.yml b/vendor/github.com/satori/go.uuid/.travis.yml deleted file mode 100644 index 38517e2..0000000 --- a/vendor/github.com/satori/go.uuid/.travis.yml +++ /dev/null @@ -1,15 +0,0 @@ -language: go -sudo: false -go: - - 1.2 - - 1.3 - - 1.4 - - 1.5 - - 1.6 -before_install: - - go get github.com/mattn/goveralls - - go get golang.org/x/tools/cmd/cover -script: - - $HOME/gopath/bin/goveralls -service=travis-ci -notifications: - email: false diff --git a/vendor/github.com/satori/go.uuid/LICENSE b/vendor/github.com/satori/go.uuid/LICENSE deleted file mode 100644 index 488357b..0000000 --- a/vendor/github.com/satori/go.uuid/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -Copyright (C) 2013-2016 by Maxim Bublis - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/satori/go.uuid/README.md b/vendor/github.com/satori/go.uuid/README.md deleted file mode 100644 index b6aad1c..0000000 --- a/vendor/github.com/satori/go.uuid/README.md +++ /dev/null @@ -1,65 +0,0 @@ -# UUID package for Go language - -[![Build Status](https://travis-ci.org/satori/go.uuid.png?branch=master)](https://travis-ci.org/satori/go.uuid) -[![Coverage Status](https://coveralls.io/repos/github/satori/go.uuid/badge.svg?branch=master)](https://coveralls.io/github/satori/go.uuid) -[![GoDoc](http://godoc.org/github.com/satori/go.uuid?status.png)](http://godoc.org/github.com/satori/go.uuid) - -This package provides pure Go implementation of Universally Unique Identifier (UUID). Supported both creation and parsing of UUIDs. - -With 100% test coverage and benchmarks out of box. - -Supported versions: -* Version 1, based on timestamp and MAC address (RFC 4122) -* Version 2, based on timestamp, MAC address and POSIX UID/GID (DCE 1.1) -* Version 3, based on MD5 hashing (RFC 4122) -* Version 4, based on random numbers (RFC 4122) -* Version 5, based on SHA-1 hashing (RFC 4122) - -## Installation - -Use the `go` command: - - $ go get github.com/satori/go.uuid - -## Requirements - -UUID package requires Go >= 1.2. - -## Example - -```go -package main - -import ( - "fmt" - "github.com/satori/go.uuid" -) - -func main() { - // Creating UUID Version 4 - u1 := uuid.NewV4() - fmt.Printf("UUIDv4: %s\n", u1) - - // Parsing UUID from string input - u2, err := uuid.FromString("6ba7b810-9dad-11d1-80b4-00c04fd430c8") - if err != nil { - fmt.Printf("Something gone wrong: %s", err) - } - fmt.Printf("Successfully parsed: %s", u2) -} -``` - -## Documentation - -[Documentation](http://godoc.org/github.com/satori/go.uuid) is hosted at GoDoc project. - -## Links -* [RFC 4122](http://tools.ietf.org/html/rfc4122) -* [DCE 1.1: Authentication and Security Services](http://pubs.opengroup.org/onlinepubs/9696989899/chap5.htm#tagcjh_08_02_01_01) - -## Copyright - -Copyright (C) 2013-2016 by Maxim Bublis . - -UUID package released under MIT License. -See [LICENSE](https://github.com/satori/go.uuid/blob/master/LICENSE) for details. diff --git a/vendor/github.com/satori/go.uuid/benchmarks_test.go b/vendor/github.com/satori/go.uuid/benchmarks_test.go deleted file mode 100644 index b4e567f..0000000 --- a/vendor/github.com/satori/go.uuid/benchmarks_test.go +++ /dev/null @@ -1,121 +0,0 @@ -// Copyright (C) 2013-2015 by Maxim Bublis -// -// Permission is hereby granted, free of charge, to any person obtaining -// a copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to -// permit persons to whom the Software is furnished to do so, subject to -// the following conditions: -// -// The above copyright notice and this permission notice shall be -// included in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -package uuid - -import ( - "testing" -) - -func BenchmarkFromBytes(b *testing.B) { - bytes := []byte{0x6b, 0xa7, 0xb8, 0x10, 0x9d, 0xad, 0x11, 0xd1, 0x80, 0xb4, 0x00, 0xc0, 0x4f, 0xd4, 0x30, 0xc8} - for i := 0; i < b.N; i++ { - FromBytes(bytes) - } -} - -func BenchmarkFromString(b *testing.B) { - s := "6ba7b810-9dad-11d1-80b4-00c04fd430c8" - for i := 0; i < b.N; i++ { - FromString(s) - } -} - -func BenchmarkFromStringUrn(b *testing.B) { - s := "urn:uuid:6ba7b810-9dad-11d1-80b4-00c04fd430c8" - for i := 0; i < b.N; i++ { - FromString(s) - } -} - -func BenchmarkFromStringWithBrackets(b *testing.B) { - s := "{6ba7b810-9dad-11d1-80b4-00c04fd430c8}" - for i := 0; i < b.N; i++ { - FromString(s) - } -} - -func BenchmarkNewV1(b *testing.B) { - for i := 0; i < b.N; i++ { - NewV1() - } -} - -func BenchmarkNewV2(b *testing.B) { - for i := 0; i < b.N; i++ { - NewV2(DomainPerson) - } -} - -func BenchmarkNewV3(b *testing.B) { - for i := 0; i < b.N; i++ { - NewV3(NamespaceDNS, "www.example.com") - } -} - -func BenchmarkNewV4(b *testing.B) { - for i := 0; i < b.N; i++ { - NewV4() - } -} - -func BenchmarkNewV5(b *testing.B) { - for i := 0; i < b.N; i++ { - NewV5(NamespaceDNS, "www.example.com") - } -} - -func BenchmarkMarshalBinary(b *testing.B) { - u := NewV4() - for i := 0; i < b.N; i++ { - u.MarshalBinary() - } -} - -func BenchmarkMarshalText(b *testing.B) { - u := NewV4() - for i := 0; i < b.N; i++ { - u.MarshalText() - } -} - -func BenchmarkUnmarshalBinary(b *testing.B) { - bytes := []byte{0x6b, 0xa7, 0xb8, 0x10, 0x9d, 0xad, 0x11, 0xd1, 0x80, 0xb4, 0x00, 0xc0, 0x4f, 0xd4, 0x30, 0xc8} - u := UUID{} - for i := 0; i < b.N; i++ { - u.UnmarshalBinary(bytes) - } -} - -func BenchmarkUnmarshalText(b *testing.B) { - bytes := []byte("6ba7b810-9dad-11d1-80b4-00c04fd430c8") - u := UUID{} - for i := 0; i < b.N; i++ { - u.UnmarshalText(bytes) - } -} - -func BenchmarkMarshalToString(b *testing.B) { - u := NewV4() - for i := 0; i < b.N; i++ { - u.String() - } -} diff --git a/vendor/github.com/satori/go.uuid/uuid.go b/vendor/github.com/satori/go.uuid/uuid.go deleted file mode 100644 index 9c7fbaa..0000000 --- a/vendor/github.com/satori/go.uuid/uuid.go +++ /dev/null @@ -1,488 +0,0 @@ -// Copyright (C) 2013-2015 by Maxim Bublis -// -// Permission is hereby granted, free of charge, to any person obtaining -// a copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to -// permit persons to whom the Software is furnished to do so, subject to -// the following conditions: -// -// The above copyright notice and this permission notice shall be -// included in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -// Package uuid provides implementation of Universally Unique Identifier (UUID). -// Supported versions are 1, 3, 4 and 5 (as specified in RFC 4122) and -// version 2 (as specified in DCE 1.1). -package uuid - -import ( - "bytes" - "crypto/md5" - "crypto/rand" - "crypto/sha1" - "database/sql/driver" - "encoding/binary" - "encoding/hex" - "fmt" - "hash" - "net" - "os" - "sync" - "time" -) - -// UUID layout variants. -const ( - VariantNCS = iota - VariantRFC4122 - VariantMicrosoft - VariantFuture -) - -// UUID DCE domains. -const ( - DomainPerson = iota - DomainGroup - DomainOrg -) - -// Difference in 100-nanosecond intervals between -// UUID epoch (October 15, 1582) and Unix epoch (January 1, 1970). -const epochStart = 122192928000000000 - -// Used in string method conversion -const dash byte = '-' - -// UUID v1/v2 storage. -var ( - storageMutex sync.Mutex - storageOnce sync.Once - epochFunc = unixTimeFunc - clockSequence uint16 - lastTime uint64 - hardwareAddr [6]byte - posixUID = uint32(os.Getuid()) - posixGID = uint32(os.Getgid()) -) - -// String parse helpers. -var ( - urnPrefix = []byte("urn:uuid:") - byteGroups = []int{8, 4, 4, 4, 12} -) - -func initClockSequence() { - buf := make([]byte, 2) - safeRandom(buf) - clockSequence = binary.BigEndian.Uint16(buf) -} - -func initHardwareAddr() { - interfaces, err := net.Interfaces() - if err == nil { - for _, iface := range interfaces { - if len(iface.HardwareAddr) >= 6 { - copy(hardwareAddr[:], iface.HardwareAddr) - return - } - } - } - - // Initialize hardwareAddr randomly in case - // of real network interfaces absence - safeRandom(hardwareAddr[:]) - - // Set multicast bit as recommended in RFC 4122 - hardwareAddr[0] |= 0x01 -} - -func initStorage() { - initClockSequence() - initHardwareAddr() -} - -func safeRandom(dest []byte) { - if _, err := rand.Read(dest); err != nil { - panic(err) - } -} - -// Returns difference in 100-nanosecond intervals between -// UUID epoch (October 15, 1582) and current time. -// This is default epoch calculation function. -func unixTimeFunc() uint64 { - return epochStart + uint64(time.Now().UnixNano()/100) -} - -// UUID representation compliant with specification -// described in RFC 4122. -type UUID [16]byte - -// NullUUID can be used with the standard sql package to represent a -// UUID value that can be NULL in the database -type NullUUID struct { - UUID UUID - Valid bool -} - -// The nil UUID is special form of UUID that is specified to have all -// 128 bits set to zero. -var Nil = UUID{} - -// Predefined namespace UUIDs. -var ( - NamespaceDNS, _ = FromString("6ba7b810-9dad-11d1-80b4-00c04fd430c8") - NamespaceURL, _ = FromString("6ba7b811-9dad-11d1-80b4-00c04fd430c8") - NamespaceOID, _ = FromString("6ba7b812-9dad-11d1-80b4-00c04fd430c8") - NamespaceX500, _ = FromString("6ba7b814-9dad-11d1-80b4-00c04fd430c8") -) - -// And returns result of binary AND of two UUIDs. -func And(u1 UUID, u2 UUID) UUID { - u := UUID{} - for i := 0; i < 16; i++ { - u[i] = u1[i] & u2[i] - } - return u -} - -// Or returns result of binary OR of two UUIDs. -func Or(u1 UUID, u2 UUID) UUID { - u := UUID{} - for i := 0; i < 16; i++ { - u[i] = u1[i] | u2[i] - } - return u -} - -// Equal returns true if u1 and u2 equals, otherwise returns false. -func Equal(u1 UUID, u2 UUID) bool { - return bytes.Equal(u1[:], u2[:]) -} - -// Version returns algorithm version used to generate UUID. -func (u UUID) Version() uint { - return uint(u[6] >> 4) -} - -// Variant returns UUID layout variant. -func (u UUID) Variant() uint { - switch { - case (u[8] & 0x80) == 0x00: - return VariantNCS - case (u[8]&0xc0)|0x80 == 0x80: - return VariantRFC4122 - case (u[8]&0xe0)|0xc0 == 0xc0: - return VariantMicrosoft - } - return VariantFuture -} - -// Bytes returns bytes slice representation of UUID. -func (u UUID) Bytes() []byte { - return u[:] -} - -// Returns canonical string representation of UUID: -// xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx. -func (u UUID) String() string { - buf := make([]byte, 36) - - hex.Encode(buf[0:8], u[0:4]) - buf[8] = dash - hex.Encode(buf[9:13], u[4:6]) - buf[13] = dash - hex.Encode(buf[14:18], u[6:8]) - buf[18] = dash - hex.Encode(buf[19:23], u[8:10]) - buf[23] = dash - hex.Encode(buf[24:], u[10:]) - - return string(buf) -} - -// SetVersion sets version bits. -func (u *UUID) SetVersion(v byte) { - u[6] = (u[6] & 0x0f) | (v << 4) -} - -// SetVariant sets variant bits as described in RFC 4122. -func (u *UUID) SetVariant() { - u[8] = (u[8] & 0xbf) | 0x80 -} - -// MarshalText implements the encoding.TextMarshaler interface. -// The encoding is the same as returned by String. -func (u UUID) MarshalText() (text []byte, err error) { - text = []byte(u.String()) - return -} - -// UnmarshalText implements the encoding.TextUnmarshaler interface. -// Following formats are supported: -// "6ba7b810-9dad-11d1-80b4-00c04fd430c8", -// "{6ba7b810-9dad-11d1-80b4-00c04fd430c8}", -// "urn:uuid:6ba7b810-9dad-11d1-80b4-00c04fd430c8" -func (u *UUID) UnmarshalText(text []byte) (err error) { - if len(text) < 32 { - err = fmt.Errorf("uuid: UUID string too short: %s", text) - return - } - - t := text[:] - braced := false - - if bytes.Equal(t[:9], urnPrefix) { - t = t[9:] - } else if t[0] == '{' { - braced = true - t = t[1:] - } - - b := u[:] - - for i, byteGroup := range byteGroups { - if i > 0 && t[0] == '-' { - t = t[1:] - } else if i > 0 && t[0] != '-' { - err = fmt.Errorf("uuid: invalid string format") - return - } - - if i == 2 { - if !bytes.Contains([]byte("012345"), []byte{t[0]}) { - err = fmt.Errorf("uuid: invalid version number: %s", t[0]) - return - } - } - - if len(t) < byteGroup { - err = fmt.Errorf("uuid: UUID string too short: %s", text) - return - } - - if i == 4 && len(t) > byteGroup && - ((braced && t[byteGroup] != '}') || len(t[byteGroup:]) > 1 || !braced) { - err = fmt.Errorf("uuid: UUID string too long: %s", t) - return - } - - _, err = hex.Decode(b[:byteGroup/2], t[:byteGroup]) - - if err != nil { - return - } - - t = t[byteGroup:] - b = b[byteGroup/2:] - } - - return -} - -// MarshalBinary implements the encoding.BinaryMarshaler interface. -func (u UUID) MarshalBinary() (data []byte, err error) { - data = u.Bytes() - return -} - -// UnmarshalBinary implements the encoding.BinaryUnmarshaler interface. -// It will return error if the slice isn't 16 bytes long. -func (u *UUID) UnmarshalBinary(data []byte) (err error) { - if len(data) != 16 { - err = fmt.Errorf("uuid: UUID must be exactly 16 bytes long, got %d bytes", len(data)) - return - } - copy(u[:], data) - - return -} - -// Value implements the driver.Valuer interface. -func (u UUID) Value() (driver.Value, error) { - return u.String(), nil -} - -// Scan implements the sql.Scanner interface. -// A 16-byte slice is handled by UnmarshalBinary, while -// a longer byte slice or a string is handled by UnmarshalText. -func (u *UUID) Scan(src interface{}) error { - switch src := src.(type) { - case []byte: - if len(src) == 16 { - return u.UnmarshalBinary(src) - } - return u.UnmarshalText(src) - - case string: - return u.UnmarshalText([]byte(src)) - } - - return fmt.Errorf("uuid: cannot convert %T to UUID", src) -} - -// Value implements the driver.Valuer interface. -func (u NullUUID) Value() (driver.Value, error) { - if !u.Valid { - return nil, nil - } - // Delegate to UUID Value function - return u.UUID.Value() -} - -// Scan implements the sql.Scanner interface. -func (u *NullUUID) Scan(src interface{}) error { - if src == nil { - u.UUID, u.Valid = Nil, false - return nil - } - - // Delegate to UUID Scan function - u.Valid = true - return u.UUID.Scan(src) -} - -// FromBytes returns UUID converted from raw byte slice input. -// It will return error if the slice isn't 16 bytes long. -func FromBytes(input []byte) (u UUID, err error) { - err = u.UnmarshalBinary(input) - return -} - -// FromBytesOrNil returns UUID converted from raw byte slice input. -// Same behavior as FromBytes, but returns a Nil UUID on error. -func FromBytesOrNil(input []byte) UUID { - uuid, err := FromBytes(input) - if err != nil { - return Nil - } - return uuid -} - -// FromString returns UUID parsed from string input. -// Input is expected in a form accepted by UnmarshalText. -func FromString(input string) (u UUID, err error) { - err = u.UnmarshalText([]byte(input)) - return -} - -// FromStringOrNil returns UUID parsed from string input. -// Same behavior as FromString, but returns a Nil UUID on error. -func FromStringOrNil(input string) UUID { - uuid, err := FromString(input) - if err != nil { - return Nil - } - return uuid -} - -// Returns UUID v1/v2 storage state. -// Returns epoch timestamp, clock sequence, and hardware address. -func getStorage() (uint64, uint16, []byte) { - storageOnce.Do(initStorage) - - storageMutex.Lock() - defer storageMutex.Unlock() - - timeNow := epochFunc() - // Clock changed backwards since last UUID generation. - // Should increase clock sequence. - if timeNow <= lastTime { - clockSequence++ - } - lastTime = timeNow - - return timeNow, clockSequence, hardwareAddr[:] -} - -// NewV1 returns UUID based on current timestamp and MAC address. -func NewV1() UUID { - u := UUID{} - - timeNow, clockSeq, hardwareAddr := getStorage() - - binary.BigEndian.PutUint32(u[0:], uint32(timeNow)) - binary.BigEndian.PutUint16(u[4:], uint16(timeNow>>32)) - binary.BigEndian.PutUint16(u[6:], uint16(timeNow>>48)) - binary.BigEndian.PutUint16(u[8:], clockSeq) - - copy(u[10:], hardwareAddr) - - u.SetVersion(1) - u.SetVariant() - - return u -} - -// NewV2 returns DCE Security UUID based on POSIX UID/GID. -func NewV2(domain byte) UUID { - u := UUID{} - - timeNow, clockSeq, hardwareAddr := getStorage() - - switch domain { - case DomainPerson: - binary.BigEndian.PutUint32(u[0:], posixUID) - case DomainGroup: - binary.BigEndian.PutUint32(u[0:], posixGID) - } - - binary.BigEndian.PutUint16(u[4:], uint16(timeNow>>32)) - binary.BigEndian.PutUint16(u[6:], uint16(timeNow>>48)) - binary.BigEndian.PutUint16(u[8:], clockSeq) - u[9] = domain - - copy(u[10:], hardwareAddr) - - u.SetVersion(2) - u.SetVariant() - - return u -} - -// NewV3 returns UUID based on MD5 hash of namespace UUID and name. -func NewV3(ns UUID, name string) UUID { - u := newFromHash(md5.New(), ns, name) - u.SetVersion(3) - u.SetVariant() - - return u -} - -// NewV4 returns random generated UUID. -func NewV4() UUID { - u := UUID{} - safeRandom(u[:]) - u.SetVersion(4) - u.SetVariant() - - return u -} - -// NewV5 returns UUID based on SHA-1 hash of namespace UUID and name. -func NewV5(ns UUID, name string) UUID { - u := newFromHash(sha1.New(), ns, name) - u.SetVersion(5) - u.SetVariant() - - return u -} - -// Returns UUID based on hashing of namespace UUID and name. -func newFromHash(h hash.Hash, ns UUID, name string) UUID { - u := UUID{} - h.Write(ns[:]) - h.Write([]byte(name)) - copy(u[:], h.Sum(nil)) - - return u -} diff --git a/vendor/github.com/satori/go.uuid/uuid_test.go b/vendor/github.com/satori/go.uuid/uuid_test.go deleted file mode 100644 index aa68ac9..0000000 --- a/vendor/github.com/satori/go.uuid/uuid_test.go +++ /dev/null @@ -1,633 +0,0 @@ -// Copyright (C) 2013, 2015 by Maxim Bublis -// -// Permission is hereby granted, free of charge, to any person obtaining -// a copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to -// permit persons to whom the Software is furnished to do so, subject to -// the following conditions: -// -// The above copyright notice and this permission notice shall be -// included in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -package uuid - -import ( - "bytes" - "testing" -) - -func TestBytes(t *testing.T) { - u := UUID{0x6b, 0xa7, 0xb8, 0x10, 0x9d, 0xad, 0x11, 0xd1, 0x80, 0xb4, 0x00, 0xc0, 0x4f, 0xd4, 0x30, 0xc8} - - bytes1 := []byte{0x6b, 0xa7, 0xb8, 0x10, 0x9d, 0xad, 0x11, 0xd1, 0x80, 0xb4, 0x00, 0xc0, 0x4f, 0xd4, 0x30, 0xc8} - - if !bytes.Equal(u.Bytes(), bytes1) { - t.Errorf("Incorrect bytes representation for UUID: %s", u) - } -} - -func TestString(t *testing.T) { - if NamespaceDNS.String() != "6ba7b810-9dad-11d1-80b4-00c04fd430c8" { - t.Errorf("Incorrect string representation for UUID: %s", NamespaceDNS.String()) - } -} - -func TestEqual(t *testing.T) { - if !Equal(NamespaceDNS, NamespaceDNS) { - t.Errorf("Incorrect comparison of %s and %s", NamespaceDNS, NamespaceDNS) - } - - if Equal(NamespaceDNS, NamespaceURL) { - t.Errorf("Incorrect comparison of %s and %s", NamespaceDNS, NamespaceURL) - } -} - -func TestOr(t *testing.T) { - u1 := UUID{0x00, 0xff, 0x00, 0xff, 0x00, 0xff, 0x00, 0xff, 0x00, 0xff, 0x00, 0xff, 0x00, 0xff, 0x00, 0xff} - u2 := UUID{0xff, 0x00, 0xff, 0x00, 0xff, 0x00, 0xff, 0x00, 0xff, 0x00, 0xff, 0x00, 0xff, 0x00, 0xff, 0x00} - - u := UUID{0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff} - - if !Equal(u, Or(u1, u2)) { - t.Errorf("Incorrect bitwise OR result %s", Or(u1, u2)) - } -} - -func TestAnd(t *testing.T) { - u1 := UUID{0x00, 0xff, 0x00, 0xff, 0x00, 0xff, 0x00, 0xff, 0x00, 0xff, 0x00, 0xff, 0x00, 0xff, 0x00, 0xff} - u2 := UUID{0xff, 0x00, 0xff, 0x00, 0xff, 0x00, 0xff, 0x00, 0xff, 0x00, 0xff, 0x00, 0xff, 0x00, 0xff, 0x00} - - u := UUID{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00} - - if !Equal(u, And(u1, u2)) { - t.Errorf("Incorrect bitwise AND result %s", And(u1, u2)) - } -} - -func TestVersion(t *testing.T) { - u := UUID{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00} - - if u.Version() != 1 { - t.Errorf("Incorrect version for UUID: %d", u.Version()) - } -} - -func TestSetVersion(t *testing.T) { - u := UUID{} - u.SetVersion(4) - - if u.Version() != 4 { - t.Errorf("Incorrect version for UUID after u.setVersion(4): %d", u.Version()) - } -} - -func TestVariant(t *testing.T) { - u1 := UUID{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00} - - if u1.Variant() != VariantNCS { - t.Errorf("Incorrect variant for UUID variant %d: %d", VariantNCS, u1.Variant()) - } - - u2 := UUID{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00} - - if u2.Variant() != VariantRFC4122 { - t.Errorf("Incorrect variant for UUID variant %d: %d", VariantRFC4122, u2.Variant()) - } - - u3 := UUID{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xc0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00} - - if u3.Variant() != VariantMicrosoft { - t.Errorf("Incorrect variant for UUID variant %d: %d", VariantMicrosoft, u3.Variant()) - } - - u4 := UUID{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xe0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00} - - if u4.Variant() != VariantFuture { - t.Errorf("Incorrect variant for UUID variant %d: %d", VariantFuture, u4.Variant()) - } -} - -func TestSetVariant(t *testing.T) { - u := new(UUID) - u.SetVariant() - - if u.Variant() != VariantRFC4122 { - t.Errorf("Incorrect variant for UUID after u.setVariant(): %d", u.Variant()) - } -} - -func TestFromBytes(t *testing.T) { - u := UUID{0x6b, 0xa7, 0xb8, 0x10, 0x9d, 0xad, 0x11, 0xd1, 0x80, 0xb4, 0x00, 0xc0, 0x4f, 0xd4, 0x30, 0xc8} - b1 := []byte{0x6b, 0xa7, 0xb8, 0x10, 0x9d, 0xad, 0x11, 0xd1, 0x80, 0xb4, 0x00, 0xc0, 0x4f, 0xd4, 0x30, 0xc8} - - u1, err := FromBytes(b1) - if err != nil { - t.Errorf("Error parsing UUID from bytes: %s", err) - } - - if !Equal(u, u1) { - t.Errorf("UUIDs should be equal: %s and %s", u, u1) - } - - b2 := []byte{} - - _, err = FromBytes(b2) - if err == nil { - t.Errorf("Should return error parsing from empty byte slice, got %s", err) - } -} - -func TestMarshalBinary(t *testing.T) { - u := UUID{0x6b, 0xa7, 0xb8, 0x10, 0x9d, 0xad, 0x11, 0xd1, 0x80, 0xb4, 0x00, 0xc0, 0x4f, 0xd4, 0x30, 0xc8} - b1 := []byte{0x6b, 0xa7, 0xb8, 0x10, 0x9d, 0xad, 0x11, 0xd1, 0x80, 0xb4, 0x00, 0xc0, 0x4f, 0xd4, 0x30, 0xc8} - - b2, err := u.MarshalBinary() - if err != nil { - t.Errorf("Error marshaling UUID: %s", err) - } - - if !bytes.Equal(b1, b2) { - t.Errorf("Marshaled UUID should be %s, got %s", b1, b2) - } -} - -func TestUnmarshalBinary(t *testing.T) { - u := UUID{0x6b, 0xa7, 0xb8, 0x10, 0x9d, 0xad, 0x11, 0xd1, 0x80, 0xb4, 0x00, 0xc0, 0x4f, 0xd4, 0x30, 0xc8} - b1 := []byte{0x6b, 0xa7, 0xb8, 0x10, 0x9d, 0xad, 0x11, 0xd1, 0x80, 0xb4, 0x00, 0xc0, 0x4f, 0xd4, 0x30, 0xc8} - - u1 := UUID{} - err := u1.UnmarshalBinary(b1) - if err != nil { - t.Errorf("Error unmarshaling UUID: %s", err) - } - - if !Equal(u, u1) { - t.Errorf("UUIDs should be equal: %s and %s", u, u1) - } - - b2 := []byte{} - u2 := UUID{} - - err = u2.UnmarshalBinary(b2) - if err == nil { - t.Errorf("Should return error unmarshalling from empty byte slice, got %s", err) - } -} - -func TestFromString(t *testing.T) { - u := UUID{0x6b, 0xa7, 0xb8, 0x10, 0x9d, 0xad, 0x11, 0xd1, 0x80, 0xb4, 0x00, 0xc0, 0x4f, 0xd4, 0x30, 0xc8} - - s1 := "6ba7b810-9dad-11d1-80b4-00c04fd430c8" - s2 := "{6ba7b810-9dad-11d1-80b4-00c04fd430c8}" - s3 := "urn:uuid:6ba7b810-9dad-11d1-80b4-00c04fd430c8" - - _, err := FromString("") - if err == nil { - t.Errorf("Should return error trying to parse empty string, got %s", err) - } - - u1, err := FromString(s1) - if err != nil { - t.Errorf("Error parsing UUID from string: %s", err) - } - - if !Equal(u, u1) { - t.Errorf("UUIDs should be equal: %s and %s", u, u1) - } - - u2, err := FromString(s2) - if err != nil { - t.Errorf("Error parsing UUID from string: %s", err) - } - - if !Equal(u, u2) { - t.Errorf("UUIDs should be equal: %s and %s", u, u2) - } - - u3, err := FromString(s3) - if err != nil { - t.Errorf("Error parsing UUID from string: %s", err) - } - - if !Equal(u, u3) { - t.Errorf("UUIDs should be equal: %s and %s", u, u3) - } -} - -func TestFromStringShort(t *testing.T) { - // Invalid 35-character UUID string - s1 := "6ba7b810-9dad-11d1-80b4-00c04fd430c" - - for i := len(s1); i >= 0; i-- { - _, err := FromString(s1[:i]) - if err == nil { - t.Errorf("Should return error trying to parse too short string, got %s", err) - } - } -} - -func TestFromStringLong(t *testing.T) { - // Invalid 37+ character UUID string - s := []string{ - "6ba7b810-9dad-11d1-80b4-00c04fd430c8=", - "6ba7b810-9dad-11d1-80b4-00c04fd430c8}", - "{6ba7b810-9dad-11d1-80b4-00c04fd430c8}f", - "6ba7b810-9dad-11d1-80b4-00c04fd430c800c04fd430c8", - } - - for _, str := range s { - _, err := FromString(str) - if err == nil { - t.Errorf("Should return error trying to parse too long string, passed %s", str) - } - } -} - -func TestFromStringInvalid(t *testing.T) { - // Invalid UUID string formats - s := []string{ - "6ba7b8109dad11d180b400c04fd430c8", - "6ba7b8109dad11d180b400c04fd430c86ba7b8109dad11d180b400c04fd430c8", - "urn:uuid:{6ba7b810-9dad-11d1-80b4-00c04fd430c8}", - "6ba7b8109-dad-11d1-80b4-00c04fd430c8", - "6ba7b810-9dad1-1d1-80b4-00c04fd430c8", - "6ba7b810-9dad-11d18-0b4-00c04fd430c8", - "6ba7b810-9dad-11d1-80b40-0c04fd430c8", - "6ba7b810+9dad+11d1+80b4+00c04fd430c8", - "6ba7b810-9dad11d180b400c04fd430c8", - "6ba7b8109dad-11d180b400c04fd430c8", - "6ba7b8109dad11d1-80b400c04fd430c8", - "6ba7b8109dad11d180b4-00c04fd430c8", - } - - for _, str := range s { - _, err := FromString(str) - if err == nil { - t.Errorf("Should return error trying to parse invalid string, passed %s", str) - } - } -} - -func TestFromStringOrNil(t *testing.T) { - u := FromStringOrNil("") - if u != Nil { - t.Errorf("Should return Nil UUID on parse failure, got %s", u) - } -} - -func TestFromBytesOrNil(t *testing.T) { - b := []byte{} - u := FromBytesOrNil(b) - if u != Nil { - t.Errorf("Should return Nil UUID on parse failure, got %s", u) - } -} - -func TestMarshalText(t *testing.T) { - u := UUID{0x6b, 0xa7, 0xb8, 0x10, 0x9d, 0xad, 0x11, 0xd1, 0x80, 0xb4, 0x00, 0xc0, 0x4f, 0xd4, 0x30, 0xc8} - b1 := []byte("6ba7b810-9dad-11d1-80b4-00c04fd430c8") - - b2, err := u.MarshalText() - if err != nil { - t.Errorf("Error marshaling UUID: %s", err) - } - - if !bytes.Equal(b1, b2) { - t.Errorf("Marshaled UUID should be %s, got %s", b1, b2) - } -} - -func TestUnmarshalText(t *testing.T) { - u := UUID{0x6b, 0xa7, 0xb8, 0x10, 0x9d, 0xad, 0x11, 0xd1, 0x80, 0xb4, 0x00, 0xc0, 0x4f, 0xd4, 0x30, 0xc8} - b1 := []byte("6ba7b810-9dad-11d1-80b4-00c04fd430c8") - - u1 := UUID{} - err := u1.UnmarshalText(b1) - if err != nil { - t.Errorf("Error unmarshaling UUID: %s", err) - } - - if !Equal(u, u1) { - t.Errorf("UUIDs should be equal: %s and %s", u, u1) - } - - b2 := []byte("") - u2 := UUID{} - - err = u2.UnmarshalText(b2) - if err == nil { - t.Errorf("Should return error trying to unmarshal from empty string") - } -} - -func TestValue(t *testing.T) { - u, err := FromString("6ba7b810-9dad-11d1-80b4-00c04fd430c8") - if err != nil { - t.Errorf("Error parsing UUID from string: %s", err) - } - - val, err := u.Value() - if err != nil { - t.Errorf("Error getting UUID value: %s", err) - } - - if val != u.String() { - t.Errorf("Wrong value returned, should be equal: %s and %s", val, u) - } -} - -func TestValueNil(t *testing.T) { - u := UUID{} - - val, err := u.Value() - if err != nil { - t.Errorf("Error getting UUID value: %s", err) - } - - if val != Nil.String() { - t.Errorf("Wrong value returned, should be equal to UUID.Nil: %s", val) - } -} - -func TestNullUUIDValueNil(t *testing.T) { - u := NullUUID{} - - val, err := u.Value() - if err != nil { - t.Errorf("Error getting UUID value: %s", err) - } - - if val != nil { - t.Errorf("Wrong value returned, should be nil: %s", val) - } -} - -func TestScanBinary(t *testing.T) { - u := UUID{0x6b, 0xa7, 0xb8, 0x10, 0x9d, 0xad, 0x11, 0xd1, 0x80, 0xb4, 0x00, 0xc0, 0x4f, 0xd4, 0x30, 0xc8} - b1 := []byte{0x6b, 0xa7, 0xb8, 0x10, 0x9d, 0xad, 0x11, 0xd1, 0x80, 0xb4, 0x00, 0xc0, 0x4f, 0xd4, 0x30, 0xc8} - - u1 := UUID{} - err := u1.Scan(b1) - if err != nil { - t.Errorf("Error unmarshaling UUID: %s", err) - } - - if !Equal(u, u1) { - t.Errorf("UUIDs should be equal: %s and %s", u, u1) - } - - b2 := []byte{} - u2 := UUID{} - - err = u2.Scan(b2) - if err == nil { - t.Errorf("Should return error unmarshalling from empty byte slice, got %s", err) - } -} - -func TestScanString(t *testing.T) { - u := UUID{0x6b, 0xa7, 0xb8, 0x10, 0x9d, 0xad, 0x11, 0xd1, 0x80, 0xb4, 0x00, 0xc0, 0x4f, 0xd4, 0x30, 0xc8} - s1 := "6ba7b810-9dad-11d1-80b4-00c04fd430c8" - - u1 := UUID{} - err := u1.Scan(s1) - if err != nil { - t.Errorf("Error unmarshaling UUID: %s", err) - } - - if !Equal(u, u1) { - t.Errorf("UUIDs should be equal: %s and %s", u, u1) - } - - s2 := "" - u2 := UUID{} - - err = u2.Scan(s2) - if err == nil { - t.Errorf("Should return error trying to unmarshal from empty string") - } -} - -func TestScanText(t *testing.T) { - u := UUID{0x6b, 0xa7, 0xb8, 0x10, 0x9d, 0xad, 0x11, 0xd1, 0x80, 0xb4, 0x00, 0xc0, 0x4f, 0xd4, 0x30, 0xc8} - b1 := []byte("6ba7b810-9dad-11d1-80b4-00c04fd430c8") - - u1 := UUID{} - err := u1.Scan(b1) - if err != nil { - t.Errorf("Error unmarshaling UUID: %s", err) - } - - if !Equal(u, u1) { - t.Errorf("UUIDs should be equal: %s and %s", u, u1) - } - - b2 := []byte("") - u2 := UUID{} - - err = u2.Scan(b2) - if err == nil { - t.Errorf("Should return error trying to unmarshal from empty string") - } -} - -func TestScanUnsupported(t *testing.T) { - u := UUID{} - - err := u.Scan(true) - if err == nil { - t.Errorf("Should return error trying to unmarshal from bool") - } -} - -func TestScanNil(t *testing.T) { - u := UUID{0x6b, 0xa7, 0xb8, 0x10, 0x9d, 0xad, 0x11, 0xd1, 0x80, 0xb4, 0x00, 0xc0, 0x4f, 0xd4, 0x30, 0xc8} - - err := u.Scan(nil) - if err == nil { - t.Errorf("Error UUID shouldn't allow unmarshalling from nil") - } -} - -func TestNullUUIDScanValid(t *testing.T) { - u := UUID{0x6b, 0xa7, 0xb8, 0x10, 0x9d, 0xad, 0x11, 0xd1, 0x80, 0xb4, 0x00, 0xc0, 0x4f, 0xd4, 0x30, 0xc8} - s1 := "6ba7b810-9dad-11d1-80b4-00c04fd430c8" - - u1 := NullUUID{} - err := u1.Scan(s1) - if err != nil { - t.Errorf("Error unmarshaling NullUUID: %s", err) - } - - if !u1.Valid { - t.Errorf("NullUUID should be valid") - } - - if !Equal(u, u1.UUID) { - t.Errorf("UUIDs should be equal: %s and %s", u, u1.UUID) - } -} - -func TestNullUUIDScanNil(t *testing.T) { - u := NullUUID{UUID{0x6b, 0xa7, 0xb8, 0x10, 0x9d, 0xad, 0x11, 0xd1, 0x80, 0xb4, 0x00, 0xc0, 0x4f, 0xd4, 0x30, 0xc8}, true} - - err := u.Scan(nil) - if err != nil { - t.Errorf("Error unmarshaling NullUUID: %s", err) - } - - if u.Valid { - t.Errorf("NullUUID should not be valid") - } - - if !Equal(u.UUID, Nil) { - t.Errorf("NullUUID value should be equal to Nil: %s", u) - } -} - -func TestNewV1(t *testing.T) { - u := NewV1() - - if u.Version() != 1 { - t.Errorf("UUIDv1 generated with incorrect version: %d", u.Version()) - } - - if u.Variant() != VariantRFC4122 { - t.Errorf("UUIDv1 generated with incorrect variant: %d", u.Variant()) - } - - u1 := NewV1() - u2 := NewV1() - - if Equal(u1, u2) { - t.Errorf("UUIDv1 generated two equal UUIDs: %s and %s", u1, u2) - } - - oldFunc := epochFunc - epochFunc = func() uint64 { return 0 } - - u3 := NewV1() - u4 := NewV1() - - if Equal(u3, u4) { - t.Errorf("UUIDv1 generated two equal UUIDs: %s and %s", u3, u4) - } - - epochFunc = oldFunc -} - -func TestNewV2(t *testing.T) { - u1 := NewV2(DomainPerson) - - if u1.Version() != 2 { - t.Errorf("UUIDv2 generated with incorrect version: %d", u1.Version()) - } - - if u1.Variant() != VariantRFC4122 { - t.Errorf("UUIDv2 generated with incorrect variant: %d", u1.Variant()) - } - - u2 := NewV2(DomainGroup) - - if u2.Version() != 2 { - t.Errorf("UUIDv2 generated with incorrect version: %d", u2.Version()) - } - - if u2.Variant() != VariantRFC4122 { - t.Errorf("UUIDv2 generated with incorrect variant: %d", u2.Variant()) - } -} - -func TestNewV3(t *testing.T) { - u := NewV3(NamespaceDNS, "www.example.com") - - if u.Version() != 3 { - t.Errorf("UUIDv3 generated with incorrect version: %d", u.Version()) - } - - if u.Variant() != VariantRFC4122 { - t.Errorf("UUIDv3 generated with incorrect variant: %d", u.Variant()) - } - - if u.String() != "5df41881-3aed-3515-88a7-2f4a814cf09e" { - t.Errorf("UUIDv3 generated incorrectly: %s", u.String()) - } - - u = NewV3(NamespaceDNS, "python.org") - - if u.String() != "6fa459ea-ee8a-3ca4-894e-db77e160355e" { - t.Errorf("UUIDv3 generated incorrectly: %s", u.String()) - } - - u1 := NewV3(NamespaceDNS, "golang.org") - u2 := NewV3(NamespaceDNS, "golang.org") - if !Equal(u1, u2) { - t.Errorf("UUIDv3 generated different UUIDs for same namespace and name: %s and %s", u1, u2) - } - - u3 := NewV3(NamespaceDNS, "example.com") - if Equal(u1, u3) { - t.Errorf("UUIDv3 generated same UUIDs for different names in same namespace: %s and %s", u1, u2) - } - - u4 := NewV3(NamespaceURL, "golang.org") - if Equal(u1, u4) { - t.Errorf("UUIDv3 generated same UUIDs for sane names in different namespaces: %s and %s", u1, u4) - } -} - -func TestNewV4(t *testing.T) { - u := NewV4() - - if u.Version() != 4 { - t.Errorf("UUIDv4 generated with incorrect version: %d", u.Version()) - } - - if u.Variant() != VariantRFC4122 { - t.Errorf("UUIDv4 generated with incorrect variant: %d", u.Variant()) - } -} - -func TestNewV5(t *testing.T) { - u := NewV5(NamespaceDNS, "www.example.com") - - if u.Version() != 5 { - t.Errorf("UUIDv5 generated with incorrect version: %d", u.Version()) - } - - if u.Variant() != VariantRFC4122 { - t.Errorf("UUIDv5 generated with incorrect variant: %d", u.Variant()) - } - - u = NewV5(NamespaceDNS, "python.org") - - if u.String() != "886313e1-3b8a-5372-9b90-0c9aee199e5d" { - t.Errorf("UUIDv5 generated incorrectly: %s", u.String()) - } - - u1 := NewV5(NamespaceDNS, "golang.org") - u2 := NewV5(NamespaceDNS, "golang.org") - if !Equal(u1, u2) { - t.Errorf("UUIDv5 generated different UUIDs for same namespace and name: %s and %s", u1, u2) - } - - u3 := NewV5(NamespaceDNS, "example.com") - if Equal(u1, u3) { - t.Errorf("UUIDv5 generated same UUIDs for different names in same namespace: %s and %s", u1, u2) - } - - u4 := NewV5(NamespaceURL, "golang.org") - if Equal(u1, u4) { - t.Errorf("UUIDv3 generated same UUIDs for sane names in different namespaces: %s and %s", u1, u4) - } -}