TUN-3467: Serialize cf-cloudflared-response-meta during package initialization using jsoniter

This commit is contained in:
cthuang 2020-10-16 11:13:48 +01:00
parent 9ac40dcf04
commit 6886e5f90a
148 changed files with 11896 additions and 2555 deletions

View File

@ -204,13 +204,15 @@ type h2muxRespWriter struct {
}
func (rp *h2muxRespWriter) WriteRespHeaders(resp *http.Response) error {
return rp.WriteHeaders(h2mux.H1ResponseToH2ResponseHeaders(resp))
headers := h2mux.H1ResponseToH2ResponseHeaders(resp)
headers = append(headers, h2mux.Header{Name: responseMetaHeaderField, Value: responseSourceOrigin})
return rp.WriteHeaders(headers)
}
func (rp *h2muxRespWriter) WriteErrorResponse(err error) {
rp.WriteHeaders([]h2mux.Header{
{Name: ":status", Value: "502"},
h2mux.CreateResponseMetaHeader(h2mux.ResponseMetaHeaderField, h2mux.ResponseSourceCloudflared),
{Name: responseMetaHeaderField, Value: responseSourceCloudflared},
})
rp.Write([]byte("502 Bad Gateway"))
}

33
connection/header.go Normal file
View File

@ -0,0 +1,33 @@
package connection
import (
"fmt"
"net/http"
"github.com/cloudflare/cloudflared/h2mux"
)
const (
responseMetaHeaderField = "cf-cloudflared-response-meta"
responseSourceCloudflared = "cloudflared"
responseSourceOrigin = "origin"
)
var (
canonicalResponseUserHeadersField = http.CanonicalHeaderKey(h2mux.ResponseUserHeadersField)
canonicalResponseMetaHeaderField = http.CanonicalHeaderKey(responseMetaHeaderField)
responseMetaHeaderCfd = mustInitRespMetaHeader(responseSourceCloudflared)
responseMetaHeaderOrigin = mustInitRespMetaHeader(responseSourceOrigin)
)
type responseMetaHeader struct {
Source string `json:"src"`
}
func mustInitRespMetaHeader(src string) string {
header, err := json.Marshal(responseMetaHeader{Source: src})
if err != nil {
panic(fmt.Sprintf("Failed to serialize response meta header = %s, err: %v", responseSourceCloudflared, err))
}
return string(header)
}

View File

@ -2,13 +2,14 @@ package connection
import (
"context"
"encoding/json"
"fmt"
"io"
"math"
"net"
"net/http"
"net/url"
"strings"
"sync"
"github.com/cloudflare/cloudflared/h2mux"
tunnelpogs "github.com/cloudflare/cloudflared/tunnelrpc/pogs"
@ -32,14 +33,16 @@ type HTTP2Connection struct {
observer *Observer
connIndexStr string
connIndex uint8
shutdownChan chan struct{}
wg *sync.WaitGroup
connectedFuse ConnectedFuse
}
func NewHTTP2Connection(conn net.Conn, config *Config, originURL *url.URL, namedTunnelConfig *NamedTunnelConfig, connOptions *tunnelpogs.ConnectionOptions, observer *Observer, connIndex uint8, connectedFuse ConnectedFuse) (*HTTP2Connection, error) {
return &HTTP2Connection{
conn: conn,
server: &http2.Server{},
conn: conn,
server: &http2.Server{
MaxConcurrentStreams: math.MaxUint32,
},
config: config,
originURL: originURL,
namedTunnel: namedTunnelConfig,
@ -47,7 +50,7 @@ func NewHTTP2Connection(conn net.Conn, config *Config, originURL *url.URL, named
observer: observer,
connIndexStr: uint8ToString(connIndex),
connIndex: connIndex,
shutdownChan: make(chan struct{}),
wg: &sync.WaitGroup{},
connectedFuse: connectedFuse,
}, nil
}
@ -64,6 +67,9 @@ func (c *HTTP2Connection) Serve(ctx context.Context) {
}
func (c *HTTP2Connection) ServeHTTP(w http.ResponseWriter, r *http.Request) {
c.wg.Add(1)
defer c.wg.Done()
r.URL.Scheme = c.originURL.Scheme
r.URL.Host = c.originURL.Host
@ -103,9 +109,8 @@ func (c *HTTP2Connection) serveControlStream(ctx context.Context, h2RespWriter *
}
c.connectedFuse.Connected()
<-c.shutdownChan
<-ctx.Done()
c.gracefulShutdown(ctx, rpcClient)
close(c.shutdownChan)
return nil
}
@ -135,10 +140,8 @@ func (c *HTTP2Connection) gracefulShutdown(ctx context.Context, rpcClient *regis
}
func (c *HTTP2Connection) close() {
// Send signal to control loop to start graceful shutdown
c.shutdownChan <- struct{}{}
// Wait for control loop to close channel
<-c.shutdownChan
// Wait for all serve HTTP handlers to return
c.wg.Wait()
c.conn.Close()
}
@ -168,7 +171,8 @@ func (rp *http2RespWriter) WriteRespHeaders(resp *http.Response) error {
}
// Perform user header serialization and set them in the single header
dest.Set(h2mux.ResponseUserHeadersField, h2mux.SerializeHeaders(userHeaders))
dest.Set(canonicalResponseUserHeadersField, h2mux.SerializeHeaders(userHeaders))
rp.setResponseMetaHeader(responseMetaHeaderCfd)
status := resp.StatusCode
// HTTP2 removes support for 101 Switching Protocols https://tools.ietf.org/html/rfc7540#section-8.1.1
if status == http.StatusSwitchingProtocols {
@ -179,13 +183,14 @@ func (rp *http2RespWriter) WriteRespHeaders(resp *http.Response) error {
}
func (rp *http2RespWriter) WriteErrorResponse(err error) {
jsonResponseMetaHeader, err := json.Marshal(h2mux.ResponseMetaHeader{Source: h2mux.ResponseSourceCloudflared})
if err == nil {
rp.w.Header().Set(h2mux.ResponseMetaHeaderField, string(jsonResponseMetaHeader))
}
rp.setResponseMetaHeader(responseMetaHeaderCfd)
rp.w.WriteHeader(http.StatusBadGateway)
}
func (rp *http2RespWriter) setResponseMetaHeader(value string) {
rp.w.Header().Set(canonicalResponseMetaHeaderField, value)
}
func (rp *http2RespWriter) Read(p []byte) (n int, err error) {
return rp.r.Read(p)
}
@ -195,7 +200,7 @@ func (wr *http2RespWriter) Write(p []byte) (n int, err error) {
}
type wsRespWriter struct {
h2 *http2RespWriter
*http2RespWriter
flusher http.Flusher
}
@ -205,34 +210,24 @@ func newWSRespWriter(h2 *http2RespWriter) (*wsRespWriter, error) {
return nil, fmt.Errorf("ResponseWriter doesn't implement http.Flusher")
}
return &wsRespWriter{
h2: h2,
flusher: flusher,
h2,
flusher,
}, nil
}
func (rw *wsRespWriter) WriteRespHeaders(resp *http.Response) error {
err := rw.h2.WriteRespHeaders(resp)
if err != nil {
return err
func (rw *wsRespWriter) WriteRespHeaders(resp *http.Response) (err error) {
err = rw.http2RespWriter.WriteRespHeaders(resp)
if err == nil {
rw.flusher.Flush()
}
rw.flusher.Flush()
return nil
}
func (rw *wsRespWriter) WriteErrorResponse(err error) {
rw.h2.WriteErrorResponse(err)
}
func (rw *wsRespWriter) Read(p []byte) (n int, err error) {
return rw.h2.Read(p)
return
}
func (rw *wsRespWriter) Write(p []byte) (n int, err error) {
n, err = rw.h2.Write(p)
if err != nil {
return
n, err = rw.http2RespWriter.Write(p)
if err == nil {
rw.flusher.Flush()
}
rw.flusher.Flush()
return
}

7
connection/json.go Normal file
View File

@ -0,0 +1,7 @@
package connection
import (
jsoniter "github.com/json-iterator/go"
)
var json = jsoniter.ConfigFastest

3
go.mod
View File

@ -15,7 +15,7 @@ require (
github.com/coreos/go-oidc v0.0.0-20171002155002-a93f71fdfe73
github.com/coreos/go-systemd v0.0.0-20191104093116-d3cd4ed1dbcf
github.com/denisenkom/go-mssqldb v0.0.0-20191001013358-cfbb681360f0
github.com/equinox-io/equinox v1.2.0
github.com/equinox-io/equinox v1.2.0 // indirect
github.com/facebookgo/ensure v0.0.0-20160127193407-b4ab57deab51 // indirect
github.com/facebookgo/freeport v0.0.0-20150612182905-d4adf43b75b9 // indirect
github.com/facebookgo/grace v0.0.0-20180706040059-75cf19382434
@ -33,6 +33,7 @@ require (
github.com/gorilla/mux v1.7.3
github.com/gorilla/websocket v1.4.2
github.com/jmoiron/sqlx v1.2.0
github.com/json-iterator/go v1.1.10
github.com/kr/text v0.2.0 // indirect
github.com/kshvakov/clickhouse v1.3.11
github.com/kylelemons/godebug v1.1.0 // indirect

3
go.sum
View File

@ -369,6 +369,7 @@ github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCV
github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.10 h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr68=
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
@ -441,8 +442,10 @@ github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0Qu
github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI=
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=

View File

@ -2,7 +2,6 @@ package h2mux
import (
"encoding/base64"
"encoding/json"
"fmt"
"net/http"
"net/url"
@ -22,10 +21,6 @@ const (
RequestUserHeadersField = "cf-cloudflared-request-headers"
ResponseUserHeadersField = "cf-cloudflared-response-headers"
ResponseMetaHeaderField = "cf-cloudflared-response-meta"
ResponseSourceCloudflared = "cloudflared"
ResponseSourceOrigin = "origin"
CFAccessTokenHeader = "cf-access-token"
CFJumpDestinationHeader = "CF-Access-Jump-Destination"
CFAccessClientIDHeader = "CF-Access-Client-Id"
@ -152,7 +147,6 @@ func H1ResponseToH2ResponseHeaders(h1 *http.Response) (h2 []Header) {
// Perform user header serialization and set them in the single header
h2 = append(h2, Header{ResponseUserHeadersField, SerializeHeaders(userHeaders)})
return h2
}
@ -238,19 +232,3 @@ func DeserializeHeaders(serializedHeaders string) ([]Header, error) {
return deserialized, nil
}
type ResponseMetaHeader struct {
Source string `json:"src"`
}
func CreateResponseMetaHeader(headerName, source string) Header {
jsonResponseMetaHeader, err := json.Marshal(ResponseMetaHeader{Source: source})
if err != nil {
panic(err)
}
return Header{
Name: headerName,
Value: string(jsonResponseMetaHeader),
}
}

View File

@ -1,13 +0,0 @@
language: go
go:
- "1.4"
- "1.5"
- "1.7"
- "1.9"
- "1.10"
- "1.11"
- "tip"
matrix:
allow_failures:
- go: tip
script: go test -v github.com/equinox-io/equinox github.com/equinox-io/equinox/proto

View File

@ -1,99 +0,0 @@
# equinox client SDK [![godoc reference](https://godoc.org/github.com/equinox-io/equinox?status.png)](https://godoc.org/github.com/equinox-io/equinox)
Package equinox allows applications to remotely update themselves with the [equinox.io](https://equinox.io) service.
## Minimal Working Example
```go
import "github.com/equinox-io/equinox"
const appID = "<YOUR EQUINOX APP ID>"
var publicKey = []byte(`
-----BEGIN PUBLIC KEY-----
MFYwEAYHKoZIzj0CAQYFK4EEAAoDQgAEtrVmBxQvheRArXjg2vG1xIprWGuCyESx
MMY8pjmjepSy2kuz+nl9aFLqmr+rDNdYvEBqQaZrYMc6k29gjvoQnQ==
-----END PUBLIC KEY-----
`)
func update(channel string) error {
opts := equinox.Options{Channel: channel}
if err := opts.SetPublicKeyPEM(publicKey); err != nil {
return err
}
// check for the update
resp, err := equinox.Check(appID, opts)
switch {
case err == equinox.NotAvailableErr:
fmt.Println("No update available, already at the latest version!")
return nil
case err != nil:
return err
}
// fetch the update and apply it
err = resp.Apply()
if err != nil {
return err
}
fmt.Printf("Updated to new version: %s!\n", resp.ReleaseVersion)
return nil
}
```
## Update To Specific Version
When you specify a channel in the update options, equinox will try to update the application
to the latest release of your application published to that channel. Instead, you may wish to
update the application to a specific (possibly older) version. You can do this by explicitly setting
Version in the Options struct:
```go
opts := equinox.Options{Version: "0.1.2"}
```
## Prompt For Update
You may wish to ask the user for approval before updating to a new version. This is as simple
as calling the Check function and only calling Apply on the returned result if the user approves.
Example:
```go
// check for the update
resp, err := equinox.Check(appID, opts)
switch {
case err == equinox.NotAvailableErr:
fmt.Println("No update available, already at the latest version!")
return nil
case err != nil:
return err
}
fmt.Println("New version available!")
fmt.Println("Version:", resp.ReleaseVersion)
fmt.Println("Name:", resp.ReleaseTitle)
fmt.Println("Details:", resp.ReleaseDescription)
ok := prompt("Would you like to update?")
if !ok {
return
}
err = resp.Apply()
// ...
```
## Generating Keys
All equinox releases must be signed with a private ECDSA key, and all updates verified with the
public key portion. To do that, you'll need to generate a key pair. The equinox release tool can
generate an ecdsa key pair for you easily:
```shell
equinox genkey
```

View File

@ -1,92 +0,0 @@
/*
Package equinox allows applications to remotely update themselves with the equinox.io service.
Minimal Working Example
import "github.com/equinox-io/equinox"
const appID = "<YOUR EQUINOX APP ID>"
var publicKey = []byte(`
-----BEGIN PUBLIC KEY-----
MFYwEAYHKoZIzj0CAQYFK4EEAAoDQgAEtrVmBxQvheRArXjg2vG1xIprWGuCyESx
MMY8pjmjepSy2kuz+nl9aFLqmr+rDNdYvEBqQaZrYMc6k29gjvoQnQ==
-----END PUBLIC KEY-----
`)
func update(channel string) error {
opts := equinox.Options{Channel: channel}
if err := opts.SetPublicKeyPEM(publicKey); err != nil {
return err
}
// check for the update
resp, err := equinox.Check(appID, opts)
switch {
case err == equinox.NotAvailableErr:
fmt.Println("No update available, already at the latest version!")
return nil
case err != nil:
return err
}
// fetch the update and apply it
err = resp.Apply()
if err != nil {
return err
}
fmt.Printf("Updated to new version: %s!\n", resp.ReleaseVersion)
return nil
}
Update To Specific Version
When you specify a channel in the update options, equinox will try to update the application
to the latest release of your application published to that channel. Instead, you may wish to
update the application to a specific (possibly older) version. You can do this by explicitly setting
Version in the Options struct:
opts := equinox.Options{Version: "0.1.2"}
Prompt For Update
You may wish to ask the user for approval before updating to a new version. This is as simple
as calling the Check function and only calling Apply on the returned result if the user approves.
Example:
// check for the update
resp, err := equinox.Check(appID, opts)
switch {
case err == equinox.NotAvailableErr:
fmt.Println("No update available, already at the latest version!")
return nil
case err != nil:
return err
}
fmt.Println("New version available!")
fmt.Println("Version:", resp.ReleaseVersion)
fmt.Println("Name:", resp.ReleaseTitle)
fmt.Println("Details:", resp.ReleaseDescription)
ok := prompt("Would you like to update?")
if !ok {
return
}
err = resp.Apply()
// ...
Generating Keys
All equinox releases must be signed with a private ECDSA key, and all updates verified with the
public key portion. To do that, you'll need to generate a key pair. The equinox release tool can
generate an ecdsa key pair for you easily:
equinox genkey
*/
package equinox

View File

@ -1 +0,0 @@
module github.com/equinox-io/equinox

View File

@ -1,13 +0,0 @@
Copyright 2015 Alan Shreve
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -1,65 +0,0 @@
# go-update: Build self-updating Go programs [![godoc reference](https://godoc.org/github.com/inconshreveable/go-update?status.png)](https://godoc.org/github.com/inconshreveable/go-update)
Package update provides functionality to implement secure, self-updating Go programs (or other single-file targets)
A program can update itself by replacing its executable file with a new version.
It provides the flexibility to implement different updating user experiences
like auto-updating, or manual user-initiated updates. It also boasts
advanced features like binary patching and code signing verification.
Example of updating from a URL:
```go
import (
"fmt"
"net/http"
"github.com/inconshreveable/go-update"
)
func doUpdate(url string) error {
resp, err := http.Get(url)
if err != nil {
return err
}
defer resp.Body.Close()
err := update.Apply(resp.Body, update.Options{})
if err != nil {
// error handling
}
return err
}
```
## Features
- Cross platform support (Windows too!)
- Binary patch application
- Checksum verification
- Code signing verification
- Support for updating arbitrary files
## [equinox.io](https://equinox.io)
[equinox.io](https://equinox.io) is a complete ready-to-go updating solution built on top of go-update that provides:
- Hosted updates
- Update channels (stable, beta, nightly, ...)
- Dynamically computed binary diffs
- Automatic key generation and code
- Release tooling with proper code signing
- Update/download metrics
## API Compatibility Promises
The master branch of `go-update` is *not* guaranteed to have a stable API over time. For any production application, you should vendor
your dependency on `go-update` with a tool like git submodules, [gb](http://getgb.io/) or [govendor](https://github.com/kardianos/govendor).
The `go-update` package makes the following promises about API compatibility:
1. A list of all API-breaking changes will be documented in this README.
1. `go-update` will strive for as few API-breaking changes as possible.
## API Breaking Changes
- **Sept 3, 2015**: The `Options` struct passed to `Apply` was changed to be passed by value instead of passed by pointer. Old API at `28de026`.
- **Aug 9, 2015**: 2.0 API. Old API at `221d034` or `gopkg.in/inconshreveable/go-update.v0`.
## License
Apache

View File

@ -1,322 +0,0 @@
package update
import (
"bytes"
"crypto"
"crypto/x509"
"encoding/pem"
"errors"
"fmt"
"io"
"io/ioutil"
"os"
"path/filepath"
"github.com/equinox-io/equinox/internal/go-update/internal/osext"
)
var (
openFile = os.OpenFile
)
// Apply performs an update of the current executable (or opts.TargetFile, if set) with the contents of the given io.Reader.
//
// Apply performs the following actions to ensure a safe cross-platform update:
//
// 1. If configured, applies the contents of the update io.Reader as a binary patch.
//
// 2. If configured, computes the checksum of the new executable and verifies it matches.
//
// 3. If configured, verifies the signature with a public key.
//
// 4. Creates a new file, /path/to/.target.new with the TargetMode with the contents of the updated file
//
// 5. Renames /path/to/target to /path/to/.target.old
//
// 6. Renames /path/to/.target.new to /path/to/target
//
// 7. If the final rename is successful, deletes /path/to/.target.old, returns no error. On Windows,
// the removal of /path/to/target.old always fails, so instead Apply hides the old file instead.
//
// 8. If the final rename fails, attempts to roll back by renaming /path/to/.target.old
// back to /path/to/target.
//
// If the roll back operation fails, the file system is left in an inconsistent state (betweet steps 5 and 6) where
// there is no new executable file and the old executable file could not be be moved to its original location. In this
// case you should notify the user of the bad news and ask them to recover manually. Applications can determine whether
// the rollback failed by calling RollbackError, see the documentation on that function for additional detail.
func Apply(update io.Reader, opts Options) error {
// validate
verify := false
switch {
case opts.Signature != nil && opts.PublicKey != nil:
// okay
verify = true
case opts.Signature != nil:
return errors.New("no public key to verify signature with")
case opts.PublicKey != nil:
return errors.New("No signature to verify with")
}
// set defaults
if opts.Hash == 0 {
opts.Hash = crypto.SHA256
}
if opts.Verifier == nil {
opts.Verifier = NewECDSAVerifier()
}
if opts.TargetMode == 0 {
opts.TargetMode = 0755
}
// get target path
var err error
opts.TargetPath, err = opts.getPath()
if err != nil {
return err
}
var newBytes []byte
if opts.Patcher != nil {
if newBytes, err = opts.applyPatch(update); err != nil {
return err
}
} else {
// no patch to apply, go on through
if newBytes, err = ioutil.ReadAll(update); err != nil {
return err
}
}
// verify checksum if requested
if opts.Checksum != nil {
if err = opts.verifyChecksum(newBytes); err != nil {
return err
}
}
if verify {
if err = opts.verifySignature(newBytes); err != nil {
return err
}
}
// get the directory the executable exists in
updateDir := filepath.Dir(opts.TargetPath)
filename := filepath.Base(opts.TargetPath)
// Copy the contents of newbinary to a new executable file
newPath := filepath.Join(updateDir, fmt.Sprintf(".%s.new", filename))
fp, err := openFile(newPath, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, opts.TargetMode)
if err != nil {
return err
}
defer fp.Close()
_, err = io.Copy(fp, bytes.NewReader(newBytes))
if err != nil {
return err
}
// if we don't call fp.Close(), windows won't let us move the new executable
// because the file will still be "in use"
fp.Close()
// this is where we'll move the executable to so that we can swap in the updated replacement
oldPath := opts.OldSavePath
removeOld := opts.OldSavePath == ""
if removeOld {
oldPath = filepath.Join(updateDir, fmt.Sprintf(".%s.old", filename))
}
// delete any existing old exec file - this is necessary on Windows for two reasons:
// 1. after a successful update, Windows can't remove the .old file because the process is still running
// 2. windows rename operations fail if the destination file already exists
_ = os.Remove(oldPath)
// move the existing executable to a new file in the same directory
err = os.Rename(opts.TargetPath, oldPath)
if err != nil {
return err
}
// move the new exectuable in to become the new program
err = os.Rename(newPath, opts.TargetPath)
if err != nil {
// move unsuccessful
//
// The filesystem is now in a bad state. We have successfully
// moved the existing binary to a new location, but we couldn't move the new
// binary to take its place. That means there is no file where the current executable binary
// used to be!
// Try to rollback by restoring the old binary to its original path.
rerr := os.Rename(oldPath, opts.TargetPath)
if rerr != nil {
return &rollbackErr{err, rerr}
}
return err
}
// move successful, remove the old binary if needed
if removeOld {
errRemove := os.Remove(oldPath)
// windows has trouble with removing old binaries, so hide it instead
if errRemove != nil {
_ = hideFile(oldPath)
}
}
return nil
}
// RollbackError takes an error value returned by Apply and returns the error, if any,
// that occurred when attempting to roll back from a failed update. Applications should
// always call this function on any non-nil errors returned by Apply.
//
// If no rollback was needed or if the rollback was successful, RollbackError returns nil,
// otherwise it returns the error encountered when trying to roll back.
func RollbackError(err error) error {
if err == nil {
return nil
}
if rerr, ok := err.(*rollbackErr); ok {
return rerr.rollbackErr
}
return nil
}
type rollbackErr struct {
error // original error
rollbackErr error // error encountered while rolling back
}
type Options struct {
// TargetPath defines the path to the file to update.
// The emptry string means 'the executable file of the running program'.
TargetPath string
// Create TargetPath replacement with this file mode. If zero, defaults to 0755.
TargetMode os.FileMode
// Checksum of the new binary to verify against. If nil, no checksum or signature verification is done.
Checksum []byte
// Public key to use for signature verification. If nil, no signature verification is done.
PublicKey crypto.PublicKey
// Signature to verify the updated file. If nil, no signature verification is done.
Signature []byte
// Pluggable signature verification algorithm. If nil, ECDSA is used.
Verifier Verifier
// Use this hash function to generate the checksum. If not set, SHA256 is used.
Hash crypto.Hash
// If nil, treat the update as a complete replacement for the contents of the file at TargetPath.
// If non-nil, treat the update contents as a patch and use this object to apply the patch.
Patcher Patcher
// Store the old executable file at this path after a successful update.
// The empty string means the old executable file will be removed after the update.
OldSavePath string
}
// CheckPermissions determines whether the process has the correct permissions to
// perform the requested update. If the update can proceed, it returns nil, otherwise
// it returns the error that would occur if an update were attempted.
func (o *Options) CheckPermissions() error {
// get the directory the file exists in
path, err := o.getPath()
if err != nil {
return err
}
fileDir := filepath.Dir(path)
fileName := filepath.Base(path)
// attempt to open a file in the file's directory
newPath := filepath.Join(fileDir, fmt.Sprintf(".%s.new", fileName))
fp, err := openFile(newPath, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, o.TargetMode)
if err != nil {
return err
}
fp.Close()
_ = os.Remove(newPath)
return nil
}
// SetPublicKeyPEM is a convenience method to set the PublicKey property
// used for checking a completed update's signature by parsing a
// Public Key formatted as PEM data.
func (o *Options) SetPublicKeyPEM(pembytes []byte) error {
block, _ := pem.Decode(pembytes)
if block == nil {
return errors.New("couldn't parse PEM data")
}
pub, err := x509.ParsePKIXPublicKey(block.Bytes)
if err != nil {
return err
}
o.PublicKey = pub
return nil
}
func (o *Options) getPath() (string, error) {
if o.TargetPath == "" {
return osext.Executable()
} else {
return o.TargetPath, nil
}
}
func (o *Options) applyPatch(patch io.Reader) ([]byte, error) {
// open the file to patch
old, err := os.Open(o.TargetPath)
if err != nil {
return nil, err
}
defer old.Close()
// apply the patch
var applied bytes.Buffer
if err = o.Patcher.Patch(old, &applied, patch); err != nil {
return nil, err
}
return applied.Bytes(), nil
}
func (o *Options) verifyChecksum(updated []byte) error {
checksum, err := checksumFor(o.Hash, updated)
if err != nil {
return err
}
if !bytes.Equal(o.Checksum, checksum) {
return fmt.Errorf("Updated file has wrong checksum. Expected: %x, got: %x", o.Checksum, checksum)
}
return nil
}
func (o *Options) verifySignature(updated []byte) error {
checksum, err := checksumFor(o.Hash, updated)
if err != nil {
return err
}
return o.Verifier.VerifySignature(checksum, o.Signature, o.Hash, o.PublicKey)
}
func checksumFor(h crypto.Hash, payload []byte) ([]byte, error) {
if !h.Available() {
return nil, errors.New("requested hash function not available")
}
hash := h.New()
hash.Write(payload) // guaranteed not to error
return hash.Sum([]byte{}), nil
}

View File

@ -1,172 +0,0 @@
/*
Package update provides functionality to implement secure, self-updating Go programs (or other single-file targets).
For complete updating solutions please see Equinox (https://equinox.io) and go-tuf (https://github.com/flynn/go-tuf).
Basic Example
This example shows how to update a program remotely from a URL.
import (
"fmt"
"net/http"
"github.com/inconshreveable/go-update"
)
func doUpdate(url string) error {
// request the new file
resp, err := http.Get(url)
if err != nil {
return err
}
defer resp.Body.Close()
err := update.Apply(resp.Body, update.Options{})
if err != nil {
if rerr := update.RollbackError(err); rerr != nil {
fmt.Println("Failed to rollback from bad update: %v", rerr)
}
}
return err
}
Binary Patching
Go binaries can often be large. It can be advantageous to only ship a binary patch to a client
instead of the complete program text of a new version.
This example shows how to update a program with a bsdiff binary patch. Other patch formats
may be applied by implementing the Patcher interface.
import (
"encoding/hex"
"io"
"github.com/inconshreveable/go-update"
)
func updateWithPatch(patch io.Reader) error {
err := update.Apply(patch, update.Options{
Patcher: update.NewBSDiffPatcher()
})
if err != nil {
// error handling
}
return err
}
Checksum Verification
Updating executable code on a computer can be a dangerous operation unless you
take the appropriate steps to guarantee the authenticity of the new code. While
checksum verification is important, it should always be combined with signature
verification (next section) to guarantee that the code came from a trusted party.
go-update validates SHA256 checksums by default, but this is pluggable via the Hash
property on the Options struct.
This example shows how to guarantee that the newly-updated binary is verified to
have an appropriate checksum (that was otherwise retrived via a secure channel)
specified as a hex string.
import (
"crypto"
_ "crypto/sha256"
"encoding/hex"
"io"
"github.com/inconshreveable/go-update"
)
func updateWithChecksum(binary io.Reader, hexChecksum string) error {
checksum, err := hex.DecodeString(hexChecksum)
if err != nil {
return err
}
err = update.Apply(binary, update.Options{
Hash: crypto.SHA256, // this is the default, you don't need to specify it
Checksum: checksum,
})
if err != nil {
// error handling
}
return err
}
Cryptographic Signature Verification
Cryptographic verification of new code from an update is an extremely important way to guarantee the
security and integrity of your updates.
Verification is performed by validating the signature of a hash of the new file. This
means nothing changes if you apply your update with a patch.
This example shows how to add signature verification to your updates. To make all of this work
an application distributor must first create a public/private key pair and embed the public key
into their application. When they issue a new release, the issuer must sign the new executable file
with the private key and distribute the signature along with the update.
import (
"crypto"
_ "crypto/sha256"
"encoding/hex"
"io"
"github.com/inconshreveable/go-update"
)
var publicKey = []byte(`
-----BEGIN PUBLIC KEY-----
MFYwEAYHKoZIzj0CAQYFK4EEAAoDQgAEtrVmBxQvheRArXjg2vG1xIprWGuCyESx
MMY8pjmjepSy2kuz+nl9aFLqmr+rDNdYvEBqQaZrYMc6k29gjvoQnQ==
-----END PUBLIC KEY-----
`)
func verifiedUpdate(binary io.Reader, hexChecksum, hexSignature string) {
checksum, err := hex.DecodeString(hexChecksum)
if err != nil {
return err
}
signature, err := hex.DecodeString(hexSignature)
if err != nil {
return err
}
opts := update.Options{
Checksum: checksum,
Signature: signature,
Hash: crypto.SHA256, // this is the default, you don't need to specify it
Verifier: update.NewECDSAVerifier(), // this is the default, you don't need to specify it
}
err = opts.SetPublicKeyPEM(publicKey)
if err != nil {
return err
}
err = update.Apply(binary, opts)
if err != nil {
// error handling
}
return err
}
Building Single-File Go Binaries
In order to update a Go application with go-update, you must distributed it as a single executable.
This is often easy, but some applications require static assets (like HTML and CSS asset files or TLS certificates).
In order to update applications like these, you'll want to make sure to embed those asset files into
the distributed binary with a tool like go-bindata (my favorite): https://github.com/jteeuwen/go-bindata
Non-Goals
Mechanisms and protocols for determining whether an update should be applied and, if so, which one are
out of scope for this package. Please consult go-tuf (https://github.com/flynn/go-tuf) or Equinox (https://equinox.io)
for more complete solutions.
go-update only works for self-updating applications that are distributed as a single binary, i.e.
applications that do not have additional assets or dependency files.
Updating application that are distributed as mutliple on-disk files is out of scope, although this
may change in future versions of this library.
*/
package update

View File

@ -1,7 +0,0 @@
// +build !windows
package update
func hideFile(path string) error {
return nil
}

View File

@ -1,19 +0,0 @@
package update
import (
"syscall"
"unsafe"
)
func hideFile(path string) error {
kernel32 := syscall.NewLazyDLL("kernel32.dll")
setFileAttributes := kernel32.NewProc("SetFileAttributesW")
r1, _, err := setFileAttributes.Call(uintptr(unsafe.Pointer(syscall.StringToUTF16Ptr(path))), 2)
if r1 == 0 {
return err
} else {
return nil
}
}

View File

@ -1,22 +0,0 @@
Copyright 2012 Keith Rarick
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.

View File

@ -1,7 +0,0 @@
# binarydist
Package binarydist implements binary diff and patch as described on
<http://www.daemonology.net/bsdiff/>. It reads and writes files
compatible with the tools there.
Documentation at <http://go.pkgdoc.org/github.com/kr/binarydist>.

View File

@ -1,40 +0,0 @@
package binarydist
import (
"io"
"os/exec"
)
type bzip2Writer struct {
c *exec.Cmd
w io.WriteCloser
}
func (w bzip2Writer) Write(b []byte) (int, error) {
return w.w.Write(b)
}
func (w bzip2Writer) Close() error {
if err := w.w.Close(); err != nil {
return err
}
return w.c.Wait()
}
// Package compress/bzip2 implements only decompression,
// so we'll fake it by running bzip2 in another process.
func newBzip2Writer(w io.Writer) (wc io.WriteCloser, err error) {
var bw bzip2Writer
bw.c = exec.Command("bzip2", "-c")
bw.c.Stdout = w
if bw.w, err = bw.c.StdinPipe(); err != nil {
return nil, err
}
if err = bw.c.Start(); err != nil {
return nil, err
}
return bw, nil
}

View File

@ -1,408 +0,0 @@
package binarydist
import (
"bytes"
"encoding/binary"
"io"
"io/ioutil"
)
func swap(a []int, i, j int) { a[i], a[j] = a[j], a[i] }
func split(I, V []int, start, length, h int) {
var i, j, k, x, jj, kk int
if length < 16 {
for k = start; k < start+length; k += j {
j = 1
x = V[I[k]+h]
for i = 1; k+i < start+length; i++ {
if V[I[k+i]+h] < x {
x = V[I[k+i]+h]
j = 0
}
if V[I[k+i]+h] == x {
swap(I, k+i, k+j)
j++
}
}
for i = 0; i < j; i++ {
V[I[k+i]] = k + j - 1
}
if j == 1 {
I[k] = -1
}
}
return
}
x = V[I[start+length/2]+h]
jj = 0
kk = 0
for i = start; i < start+length; i++ {
if V[I[i]+h] < x {
jj++
}
if V[I[i]+h] == x {
kk++
}
}
jj += start
kk += jj
i = start
j = 0
k = 0
for i < jj {
if V[I[i]+h] < x {
i++
} else if V[I[i]+h] == x {
swap(I, i, jj+j)
j++
} else {
swap(I, i, kk+k)
k++
}
}
for jj+j < kk {
if V[I[jj+j]+h] == x {
j++
} else {
swap(I, jj+j, kk+k)
k++
}
}
if jj > start {
split(I, V, start, jj-start, h)
}
for i = 0; i < kk-jj; i++ {
V[I[jj+i]] = kk - 1
}
if jj == kk-1 {
I[jj] = -1
}
if start+length > kk {
split(I, V, kk, start+length-kk, h)
}
}
func qsufsort(obuf []byte) []int {
var buckets [256]int
var i, h int
I := make([]int, len(obuf)+1)
V := make([]int, len(obuf)+1)
for _, c := range obuf {
buckets[c]++
}
for i = 1; i < 256; i++ {
buckets[i] += buckets[i-1]
}
copy(buckets[1:], buckets[:])
buckets[0] = 0
for i, c := range obuf {
buckets[c]++
I[buckets[c]] = i
}
I[0] = len(obuf)
for i, c := range obuf {
V[i] = buckets[c]
}
V[len(obuf)] = 0
for i = 1; i < 256; i++ {
if buckets[i] == buckets[i-1]+1 {
I[buckets[i]] = -1
}
}
I[0] = -1
for h = 1; I[0] != -(len(obuf) + 1); h += h {
var n int
for i = 0; i < len(obuf)+1; {
if I[i] < 0 {
n -= I[i]
i -= I[i]
} else {
if n != 0 {
I[i-n] = -n
}
n = V[I[i]] + 1 - i
split(I, V, i, n, h)
i += n
n = 0
}
}
if n != 0 {
I[i-n] = -n
}
}
for i = 0; i < len(obuf)+1; i++ {
I[V[i]] = i
}
return I
}
func matchlen(a, b []byte) (i int) {
for i < len(a) && i < len(b) && a[i] == b[i] {
i++
}
return i
}
func search(I []int, obuf, nbuf []byte, st, en int) (pos, n int) {
if en-st < 2 {
x := matchlen(obuf[I[st]:], nbuf)
y := matchlen(obuf[I[en]:], nbuf)
if x > y {
return I[st], x
} else {
return I[en], y
}
}
x := st + (en-st)/2
if bytes.Compare(obuf[I[x]:], nbuf) < 0 {
return search(I, obuf, nbuf, x, en)
} else {
return search(I, obuf, nbuf, st, x)
}
panic("unreached")
}
// Diff computes the difference between old and new, according to the bsdiff
// algorithm, and writes the result to patch.
func Diff(old, new io.Reader, patch io.Writer) error {
obuf, err := ioutil.ReadAll(old)
if err != nil {
return err
}
nbuf, err := ioutil.ReadAll(new)
if err != nil {
return err
}
pbuf, err := diffBytes(obuf, nbuf)
if err != nil {
return err
}
_, err = patch.Write(pbuf)
return err
}
func diffBytes(obuf, nbuf []byte) ([]byte, error) {
var patch seekBuffer
err := diff(obuf, nbuf, &patch)
if err != nil {
return nil, err
}
return patch.buf, nil
}
func diff(obuf, nbuf []byte, patch io.WriteSeeker) error {
var lenf int
I := qsufsort(obuf)
db := make([]byte, len(nbuf))
eb := make([]byte, len(nbuf))
var dblen, eblen int
var hdr header
hdr.Magic = magic
hdr.NewSize = int64(len(nbuf))
err := binary.Write(patch, signMagLittleEndian{}, &hdr)
if err != nil {
return err
}
// Compute the differences, writing ctrl as we go
pfbz2, err := newBzip2Writer(patch)
if err != nil {
return err
}
var scan, pos, length int
var lastscan, lastpos, lastoffset int
for scan < len(nbuf) {
var oldscore int
scan += length
for scsc := scan; scan < len(nbuf); scan++ {
pos, length = search(I, obuf, nbuf[scan:], 0, len(obuf))
for ; scsc < scan+length; scsc++ {
if scsc+lastoffset < len(obuf) &&
obuf[scsc+lastoffset] == nbuf[scsc] {
oldscore++
}
}
if (length == oldscore && length != 0) || length > oldscore+8 {
break
}
if scan+lastoffset < len(obuf) && obuf[scan+lastoffset] == nbuf[scan] {
oldscore--
}
}
if length != oldscore || scan == len(nbuf) {
var s, Sf int
lenf = 0
for i := 0; lastscan+i < scan && lastpos+i < len(obuf); {
if obuf[lastpos+i] == nbuf[lastscan+i] {
s++
}
i++
if s*2-i > Sf*2-lenf {
Sf = s
lenf = i
}
}
lenb := 0
if scan < len(nbuf) {
var s, Sb int
for i := 1; (scan >= lastscan+i) && (pos >= i); i++ {
if obuf[pos-i] == nbuf[scan-i] {
s++
}
if s*2-i > Sb*2-lenb {
Sb = s
lenb = i
}
}
}
if lastscan+lenf > scan-lenb {
overlap := (lastscan + lenf) - (scan - lenb)
s := 0
Ss := 0
lens := 0
for i := 0; i < overlap; i++ {
if nbuf[lastscan+lenf-overlap+i] == obuf[lastpos+lenf-overlap+i] {
s++
}
if nbuf[scan-lenb+i] == obuf[pos-lenb+i] {
s--
}
if s > Ss {
Ss = s
lens = i + 1
}
}
lenf += lens - overlap
lenb -= lens
}
for i := 0; i < lenf; i++ {
db[dblen+i] = nbuf[lastscan+i] - obuf[lastpos+i]
}
for i := 0; i < (scan-lenb)-(lastscan+lenf); i++ {
eb[eblen+i] = nbuf[lastscan+lenf+i]
}
dblen += lenf
eblen += (scan - lenb) - (lastscan + lenf)
err = binary.Write(pfbz2, signMagLittleEndian{}, int64(lenf))
if err != nil {
pfbz2.Close()
return err
}
val := (scan - lenb) - (lastscan + lenf)
err = binary.Write(pfbz2, signMagLittleEndian{}, int64(val))
if err != nil {
pfbz2.Close()
return err
}
val = (pos - lenb) - (lastpos + lenf)
err = binary.Write(pfbz2, signMagLittleEndian{}, int64(val))
if err != nil {
pfbz2.Close()
return err
}
lastscan = scan - lenb
lastpos = pos - lenb
lastoffset = pos - scan
}
}
err = pfbz2.Close()
if err != nil {
return err
}
// Compute size of compressed ctrl data
l64, err := patch.Seek(0, 1)
if err != nil {
return err
}
hdr.CtrlLen = int64(l64 - 32)
// Write compressed diff data
pfbz2, err = newBzip2Writer(patch)
if err != nil {
return err
}
n, err := pfbz2.Write(db[:dblen])
if err != nil {
pfbz2.Close()
return err
}
if n != dblen {
pfbz2.Close()
return io.ErrShortWrite
}
err = pfbz2.Close()
if err != nil {
return err
}
// Compute size of compressed diff data
n64, err := patch.Seek(0, 1)
if err != nil {
return err
}
hdr.DiffLen = n64 - l64
// Write compressed extra data
pfbz2, err = newBzip2Writer(patch)
if err != nil {
return err
}
n, err = pfbz2.Write(eb[:eblen])
if err != nil {
pfbz2.Close()
return err
}
if n != eblen {
pfbz2.Close()
return io.ErrShortWrite
}
err = pfbz2.Close()
if err != nil {
return err
}
// Seek to the beginning, write the header, and close the file
_, err = patch.Seek(0, 0)
if err != nil {
return err
}
err = binary.Write(patch, signMagLittleEndian{}, &hdr)
if err != nil {
return err
}
return nil
}

View File

@ -1,24 +0,0 @@
// Package binarydist implements binary diff and patch as described on
// http://www.daemonology.net/bsdiff/. It reads and writes files
// compatible with the tools there.
package binarydist
var magic = [8]byte{'B', 'S', 'D', 'I', 'F', 'F', '4', '0'}
// File format:
// 0 8 "BSDIFF40"
// 8 8 X
// 16 8 Y
// 24 8 sizeof(newfile)
// 32 X bzip2(control block)
// 32+X Y bzip2(diff block)
// 32+X+Y ??? bzip2(extra block)
// with control block a set of triples (x,y,z) meaning "add x bytes
// from oldfile to x bytes from the diff block; copy y bytes from the
// extra block; seek forwards in oldfile by z bytes".
type header struct {
Magic [8]byte
CtrlLen int64
DiffLen int64
NewSize int64
}

View File

@ -1,53 +0,0 @@
package binarydist
// SignMagLittleEndian is the numeric encoding used by the bsdiff tools.
// It implements binary.ByteOrder using a sign-magnitude format
// and little-endian byte order. Only methods Uint64 and String
// have been written; the rest panic.
type signMagLittleEndian struct{}
func (signMagLittleEndian) Uint16(b []byte) uint16 { panic("unimplemented") }
func (signMagLittleEndian) PutUint16(b []byte, v uint16) { panic("unimplemented") }
func (signMagLittleEndian) Uint32(b []byte) uint32 { panic("unimplemented") }
func (signMagLittleEndian) PutUint32(b []byte, v uint32) { panic("unimplemented") }
func (signMagLittleEndian) Uint64(b []byte) uint64 {
y := int64(b[0]) |
int64(b[1])<<8 |
int64(b[2])<<16 |
int64(b[3])<<24 |
int64(b[4])<<32 |
int64(b[5])<<40 |
int64(b[6])<<48 |
int64(b[7]&0x7f)<<56
if b[7]&0x80 != 0 {
y = -y
}
return uint64(y)
}
func (signMagLittleEndian) PutUint64(b []byte, v uint64) {
x := int64(v)
neg := x < 0
if neg {
x = -x
}
b[0] = byte(x)
b[1] = byte(x >> 8)
b[2] = byte(x >> 16)
b[3] = byte(x >> 24)
b[4] = byte(x >> 32)
b[5] = byte(x >> 40)
b[6] = byte(x >> 48)
b[7] = byte(x >> 56)
if neg {
b[7] |= 0x80
}
}
func (signMagLittleEndian) String() string { return "signMagLittleEndian" }

View File

@ -1,109 +0,0 @@
package binarydist
import (
"bytes"
"compress/bzip2"
"encoding/binary"
"errors"
"io"
"io/ioutil"
)
var ErrCorrupt = errors.New("corrupt patch")
// Patch applies patch to old, according to the bspatch algorithm,
// and writes the result to new.
func Patch(old io.Reader, new io.Writer, patch io.Reader) error {
var hdr header
err := binary.Read(patch, signMagLittleEndian{}, &hdr)
if err != nil {
return err
}
if hdr.Magic != magic {
return ErrCorrupt
}
if hdr.CtrlLen < 0 || hdr.DiffLen < 0 || hdr.NewSize < 0 {
return ErrCorrupt
}
ctrlbuf := make([]byte, hdr.CtrlLen)
_, err = io.ReadFull(patch, ctrlbuf)
if err != nil {
return err
}
cpfbz2 := bzip2.NewReader(bytes.NewReader(ctrlbuf))
diffbuf := make([]byte, hdr.DiffLen)
_, err = io.ReadFull(patch, diffbuf)
if err != nil {
return err
}
dpfbz2 := bzip2.NewReader(bytes.NewReader(diffbuf))
// The entire rest of the file is the extra block.
epfbz2 := bzip2.NewReader(patch)
obuf, err := ioutil.ReadAll(old)
if err != nil {
return err
}
nbuf := make([]byte, hdr.NewSize)
var oldpos, newpos int64
for newpos < hdr.NewSize {
var ctrl struct{ Add, Copy, Seek int64 }
err = binary.Read(cpfbz2, signMagLittleEndian{}, &ctrl)
if err != nil {
return err
}
// Sanity-check
if newpos+ctrl.Add > hdr.NewSize {
return ErrCorrupt
}
// Read diff string
_, err = io.ReadFull(dpfbz2, nbuf[newpos:newpos+ctrl.Add])
if err != nil {
return ErrCorrupt
}
// Add old data to diff string
for i := int64(0); i < ctrl.Add; i++ {
if oldpos+i >= 0 && oldpos+i < int64(len(obuf)) {
nbuf[newpos+i] += obuf[oldpos+i]
}
}
// Adjust pointers
newpos += ctrl.Add
oldpos += ctrl.Add
// Sanity-check
if newpos+ctrl.Copy > hdr.NewSize {
return ErrCorrupt
}
// Read extra string
_, err = io.ReadFull(epfbz2, nbuf[newpos:newpos+ctrl.Copy])
if err != nil {
return ErrCorrupt
}
// Adjust pointers
newpos += ctrl.Copy
oldpos += ctrl.Seek
}
// Write the new file
for len(nbuf) > 0 {
n, err := new.Write(nbuf)
if err != nil {
return err
}
nbuf = nbuf[n:]
}
return nil
}

View File

@ -1,43 +0,0 @@
package binarydist
import (
"errors"
)
type seekBuffer struct {
buf []byte
pos int
}
func (b *seekBuffer) Write(p []byte) (n int, err error) {
n = copy(b.buf[b.pos:], p)
if n == len(p) {
b.pos += n
return n, nil
}
b.buf = append(b.buf, p[n:]...)
b.pos += len(p)
return len(p), nil
}
func (b *seekBuffer) Seek(offset int64, whence int) (ret int64, err error) {
var abs int64
switch whence {
case 0:
abs = offset
case 1:
abs = int64(b.pos) + offset
case 2:
abs = int64(len(b.buf)) + offset
default:
return 0, errors.New("binarydist: invalid whence")
}
if abs < 0 {
return 0, errors.New("binarydist: negative position")
}
if abs >= 1<<31 {
return 0, errors.New("binarydist: position out of range")
}
b.pos = int(abs)
return abs, nil
}

View File

@ -1,27 +0,0 @@
Copyright (c) 2012 The Go Authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@ -1,16 +0,0 @@
### Extensions to the "os" package.
## Find the current Executable and ExecutableFolder.
There is sometimes utility in finding the current executable file
that is running. This can be used for upgrading the current executable
or finding resources located relative to the executable file. Both
working directory and the os.Args[0] value are arbitrary and cannot
be relied on; os.Args[0] can be "faked".
Multi-platform and supports:
* Linux
* OS X
* Windows
* Plan 9
* BSDs.

View File

@ -1,27 +0,0 @@
// Copyright 2012 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Extensions to the standard "os" package.
package osext
import "path/filepath"
// Executable returns an absolute path that can be used to
// re-invoke the current program.
// It may not be valid after the current program exits.
func Executable() (string, error) {
p, err := executable()
return filepath.Clean(p), err
}
// Returns same path as Executable, returns just the folder
// path. Excludes the executable name and any trailing slash.
func ExecutableFolder() (string, error) {
p, err := Executable()
if err != nil {
return "", err
}
return filepath.Dir(p), nil
}

View File

@ -1,20 +0,0 @@
// Copyright 2012 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package osext
import (
"os"
"strconv"
"syscall"
)
func executable() (string, error) {
f, err := os.Open("/proc/" + strconv.Itoa(os.Getpid()) + "/text")
if err != nil {
return "", err
}
defer f.Close()
return syscall.Fd2path(int(f.Fd()))
}

View File

@ -1,36 +0,0 @@
// Copyright 2012 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build linux netbsd openbsd solaris dragonfly
package osext
import (
"errors"
"fmt"
"os"
"runtime"
"strings"
)
func executable() (string, error) {
switch runtime.GOOS {
case "linux":
const deletedTag = " (deleted)"
execpath, err := os.Readlink("/proc/self/exe")
if err != nil {
return execpath, err
}
execpath = strings.TrimSuffix(execpath, deletedTag)
execpath = strings.TrimPrefix(execpath, deletedTag)
return execpath, nil
case "netbsd":
return os.Readlink("/proc/curproc/exe")
case "openbsd", "dragonfly":
return os.Readlink("/proc/curproc/file")
case "solaris":
return os.Readlink(fmt.Sprintf("/proc/%d/path/a.out", os.Getpid()))
}
return "", errors.New("ExecPath not implemented for " + runtime.GOOS)
}

View File

@ -1,79 +0,0 @@
// Copyright 2012 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build darwin freebsd
package osext
import (
"os"
"path/filepath"
"runtime"
"syscall"
"unsafe"
)
var initCwd, initCwdErr = os.Getwd()
func executable() (string, error) {
var mib [4]int32
switch runtime.GOOS {
case "freebsd":
mib = [4]int32{1 /* CTL_KERN */, 14 /* KERN_PROC */, 12 /* KERN_PROC_PATHNAME */, -1}
case "darwin":
mib = [4]int32{1 /* CTL_KERN */, 38 /* KERN_PROCARGS */, int32(os.Getpid()), -1}
}
n := uintptr(0)
// Get length.
_, _, errNum := syscall.Syscall6(syscall.SYS___SYSCTL, uintptr(unsafe.Pointer(&mib[0])), 4, 0, uintptr(unsafe.Pointer(&n)), 0, 0)
if errNum != 0 {
return "", errNum
}
if n == 0 { // This shouldn't happen.
return "", nil
}
buf := make([]byte, n)
_, _, errNum = syscall.Syscall6(syscall.SYS___SYSCTL, uintptr(unsafe.Pointer(&mib[0])), 4, uintptr(unsafe.Pointer(&buf[0])), uintptr(unsafe.Pointer(&n)), 0, 0)
if errNum != 0 {
return "", errNum
}
if n == 0 { // This shouldn't happen.
return "", nil
}
for i, v := range buf {
if v == 0 {
buf = buf[:i]
break
}
}
var err error
execPath := string(buf)
// execPath will not be empty due to above checks.
// Try to get the absolute path if the execPath is not rooted.
if execPath[0] != '/' {
execPath, err = getAbs(execPath)
if err != nil {
return execPath, err
}
}
// For darwin KERN_PROCARGS may return the path to a symlink rather than the
// actual executable.
if runtime.GOOS == "darwin" {
if execPath, err = filepath.EvalSymlinks(execPath); err != nil {
return execPath, err
}
}
return execPath, nil
}
func getAbs(execPath string) (string, error) {
if initCwdErr != nil {
return execPath, initCwdErr
}
// The execPath may begin with a "../" or a "./" so clean it first.
// Join the two paths, trailing and starting slashes undetermined, so use
// the generic Join function.
return filepath.Join(initCwd, filepath.Clean(execPath)), nil
}

View File

@ -1,34 +0,0 @@
// Copyright 2012 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package osext
import (
"syscall"
"unicode/utf16"
"unsafe"
)
var (
kernel = syscall.MustLoadDLL("kernel32.dll")
getModuleFileNameProc = kernel.MustFindProc("GetModuleFileNameW")
)
// GetModuleFileName() with hModule = NULL
func executable() (exePath string, err error) {
return getModuleFileName()
}
func getModuleFileName() (string, error) {
var n uint32
b := make([]uint16, syscall.MAX_PATH)
size := uint32(len(b))
r0, _, e1 := getModuleFileNameProc.Call(0, uintptr(unsafe.Pointer(&b[0])), uintptr(size))
n = uint32(r0)
if n == 0 {
return "", e1
}
return string(utf16.Decode(b[0:n])), nil
}

View File

@ -1,24 +0,0 @@
package update
import (
"io"
"github.com/equinox-io/equinox/internal/go-update/internal/binarydist"
)
// Patcher defines an interface for applying binary patches to an old item to get an updated item.
type Patcher interface {
Patch(old io.Reader, new io.Writer, patch io.Reader) error
}
type patchFn func(io.Reader, io.Writer, io.Reader) error
func (fn patchFn) Patch(old io.Reader, new io.Writer, patch io.Reader) error {
return fn(old, new, patch)
}
// NewBSDifferPatcher returns a new Patcher that applies binary patches using
// the bsdiff algorithm. See http://www.daemonology.net/bsdiff/
func NewBSDiffPatcher() Patcher {
return patchFn(binarydist.Patch)
}

View File

@ -1,74 +0,0 @@
package update
import (
"crypto"
"crypto/dsa"
"crypto/ecdsa"
"crypto/rsa"
"encoding/asn1"
"errors"
"math/big"
)
// Verifier defines an interface for verfiying an update's signature with a public key.
type Verifier interface {
VerifySignature(checksum, signature []byte, h crypto.Hash, publicKey crypto.PublicKey) error
}
type verifyFn func([]byte, []byte, crypto.Hash, crypto.PublicKey) error
func (fn verifyFn) VerifySignature(checksum []byte, signature []byte, hash crypto.Hash, publicKey crypto.PublicKey) error {
return fn(checksum, signature, hash, publicKey)
}
// NewRSAVerifier returns a Verifier that uses the RSA algorithm to verify updates.
func NewRSAVerifier() Verifier {
return verifyFn(func(checksum, signature []byte, hash crypto.Hash, publicKey crypto.PublicKey) error {
key, ok := publicKey.(*rsa.PublicKey)
if !ok {
return errors.New("not a valid RSA public key")
}
return rsa.VerifyPKCS1v15(key, hash, checksum, signature)
})
}
type rsDER struct {
R *big.Int
S *big.Int
}
// NewECDSAVerifier returns a Verifier that uses the ECDSA algorithm to verify updates.
func NewECDSAVerifier() Verifier {
return verifyFn(func(checksum, signature []byte, hash crypto.Hash, publicKey crypto.PublicKey) error {
key, ok := publicKey.(*ecdsa.PublicKey)
if !ok {
return errors.New("not a valid ECDSA public key")
}
var rs rsDER
if _, err := asn1.Unmarshal(signature, &rs); err != nil {
return err
}
if !ecdsa.Verify(key, checksum, rs.R, rs.S) {
return errors.New("failed to verify ecsda signature")
}
return nil
})
}
// NewDSAVerifier returns a Verifier that uses the DSA algorithm to verify updates.
func NewDSAVerifier() Verifier {
return verifyFn(func(checksum, signature []byte, hash crypto.Hash, publicKey crypto.PublicKey) error {
key, ok := publicKey.(*dsa.PublicKey)
if !ok {
return errors.New("not a valid DSA public key")
}
var rs rsDER
if _, err := asn1.Unmarshal(signature, &rs); err != nil {
return err
}
if !dsa.Verify(key, checksum, rs.R, rs.S) {
return errors.New("failed to verify ecsda signature")
}
return nil
})
}

View File

@ -1,27 +0,0 @@
Copyright (c) 2012 The Go Authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@ -1,16 +0,0 @@
### Extensions to the "os" package.
## Find the current Executable and ExecutableFolder.
There is sometimes utility in finding the current executable file
that is running. This can be used for upgrading the current executable
or finding resources located relative to the executable file. Both
working directory and the os.Args[0] value are arbitrary and cannot
be relied on; os.Args[0] can be "faked".
Multi-platform and supports:
* Linux
* OS X
* Windows
* Plan 9
* BSDs.

View File

@ -1,27 +0,0 @@
// Copyright 2012 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Extensions to the standard "os" package.
package osext
import "path/filepath"
// Executable returns an absolute path that can be used to
// re-invoke the current program.
// It may not be valid after the current program exits.
func Executable() (string, error) {
p, err := executable()
return filepath.Clean(p), err
}
// Returns same path as Executable, returns just the folder
// path. Excludes the executable name and any trailing slash.
func ExecutableFolder() (string, error) {
p, err := Executable()
if err != nil {
return "", err
}
return filepath.Dir(p), nil
}

View File

@ -1,20 +0,0 @@
// Copyright 2012 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package osext
import (
"os"
"strconv"
"syscall"
)
func executable() (string, error) {
f, err := os.Open("/proc/" + strconv.Itoa(os.Getpid()) + "/text")
if err != nil {
return "", err
}
defer f.Close()
return syscall.Fd2path(int(f.Fd()))
}

View File

@ -1,36 +0,0 @@
// Copyright 2012 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build linux netbsd openbsd solaris dragonfly
package osext
import (
"errors"
"fmt"
"os"
"runtime"
"strings"
)
func executable() (string, error) {
switch runtime.GOOS {
case "linux":
const deletedTag = " (deleted)"
execpath, err := os.Readlink("/proc/self/exe")
if err != nil {
return execpath, err
}
execpath = strings.TrimSuffix(execpath, deletedTag)
execpath = strings.TrimPrefix(execpath, deletedTag)
return execpath, nil
case "netbsd":
return os.Readlink("/proc/curproc/exe")
case "openbsd", "dragonfly":
return os.Readlink("/proc/curproc/file")
case "solaris":
return os.Readlink(fmt.Sprintf("/proc/%d/path/a.out", os.Getpid()))
}
return "", errors.New("ExecPath not implemented for " + runtime.GOOS)
}

View File

@ -1,79 +0,0 @@
// Copyright 2012 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build darwin freebsd
package osext
import (
"os"
"path/filepath"
"runtime"
"syscall"
"unsafe"
)
var initCwd, initCwdErr = os.Getwd()
func executable() (string, error) {
var mib [4]int32
switch runtime.GOOS {
case "freebsd":
mib = [4]int32{1 /* CTL_KERN */, 14 /* KERN_PROC */, 12 /* KERN_PROC_PATHNAME */, -1}
case "darwin":
mib = [4]int32{1 /* CTL_KERN */, 38 /* KERN_PROCARGS */, int32(os.Getpid()), -1}
}
n := uintptr(0)
// Get length.
_, _, errNum := syscall.Syscall6(syscall.SYS___SYSCTL, uintptr(unsafe.Pointer(&mib[0])), 4, 0, uintptr(unsafe.Pointer(&n)), 0, 0)
if errNum != 0 {
return "", errNum
}
if n == 0 { // This shouldn't happen.
return "", nil
}
buf := make([]byte, n)
_, _, errNum = syscall.Syscall6(syscall.SYS___SYSCTL, uintptr(unsafe.Pointer(&mib[0])), 4, uintptr(unsafe.Pointer(&buf[0])), uintptr(unsafe.Pointer(&n)), 0, 0)
if errNum != 0 {
return "", errNum
}
if n == 0 { // This shouldn't happen.
return "", nil
}
for i, v := range buf {
if v == 0 {
buf = buf[:i]
break
}
}
var err error
execPath := string(buf)
// execPath will not be empty due to above checks.
// Try to get the absolute path if the execPath is not rooted.
if execPath[0] != '/' {
execPath, err = getAbs(execPath)
if err != nil {
return execPath, err
}
}
// For darwin KERN_PROCARGS may return the path to a symlink rather than the
// actual executable.
if runtime.GOOS == "darwin" {
if execPath, err = filepath.EvalSymlinks(execPath); err != nil {
return execPath, err
}
}
return execPath, nil
}
func getAbs(execPath string) (string, error) {
if initCwdErr != nil {
return execPath, initCwdErr
}
// The execPath may begin with a "../" or a "./" so clean it first.
// Join the two paths, trailing and starting slashes undetermined, so use
// the generic Join function.
return filepath.Join(initCwd, filepath.Clean(execPath)), nil
}

View File

@ -1,34 +0,0 @@
// Copyright 2012 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package osext
import (
"syscall"
"unicode/utf16"
"unsafe"
)
var (
kernel = syscall.MustLoadDLL("kernel32.dll")
getModuleFileNameProc = kernel.MustFindProc("GetModuleFileNameW")
)
// GetModuleFileName() with hModule = NULL
func executable() (exePath string, err error) {
return getModuleFileName()
}
func getModuleFileName() (string, error) {
var n uint32
b := make([]uint16, syscall.MAX_PATH)
size := uint32(len(b))
r0, _, e1 := getModuleFileNameProc.Call(0, uintptr(unsafe.Pointer(&b[0])), uintptr(size))
n = uint32(r0)
if n == 0 {
return "", e1
}
return string(utf16.Decode(b[0:n])), nil
}

View File

@ -1,42 +0,0 @@
/*
package proto defines a set of structures used to negotiate an update between an
an application (the client) and an Equinox update service.
*/
package proto
import "time"
type PatchKind string
const (
PatchNone PatchKind = "none"
PatchBSDiff PatchKind = "bsdiff"
)
type Request struct {
AppID string `json:"app_id"`
Channel string `json:"channel"`
OS string `json:"os"`
Arch string `json:"arch"`
GoARM string `json:"goarm"`
TargetVersion string `json:"target_version"`
CurrentVersion string `json:"current_version"`
CurrentSHA256 string `json:"current_sha256"`
}
type Response struct {
Available bool `json:"available"`
DownloadURL string `json:"download_url"`
Checksum string `json:"checksum"`
Signature string `json:"signature"`
Patch PatchKind `json:"patch_type"`
Release Release `json:"release"`
}
type Release struct {
Title string `json:"title"`
Version string `json:"version"`
Description string `json:"description"`
CreateDate time.Time `json:"create_date"`
}

View File

@ -1,330 +0,0 @@
package equinox
import (
"bytes"
"crypto"
"crypto/sha256"
"crypto/x509"
"encoding/hex"
"encoding/json"
"encoding/pem"
"errors"
"fmt"
"io"
"io/ioutil"
"net/http"
"os"
"runtime"
"time"
"github.com/equinox-io/equinox/internal/go-update"
"github.com/equinox-io/equinox/internal/osext"
"github.com/equinox-io/equinox/proto"
)
const protocolVersion = "1"
const defaultCheckURL = "https://update.equinox.io/check"
const userAgent = "EquinoxSDK/1.0"
var NotAvailableErr = errors.New("No update available")
type Options struct {
// Channel specifies the name of an Equinox release channel to check for
// a newer version of the application.
//
// If empty, defaults to 'stable'.
Channel string
// Version requests an update to a specific version of the application.
// If specified, `Channel` is ignored.
Version string
// TargetPath defines the path to the file to update.
// The emptry string means 'the executable file of the running program'.
TargetPath string
// Create TargetPath replacement with this file mode. If zero, defaults to 0755.
TargetMode os.FileMode
// Public key to use for signature verification. If nil, no signature
// verification is done. Use `SetPublicKeyPEM` to set this field with PEM data.
PublicKey crypto.PublicKey
// Target operating system of the update. Uses the same standard OS names used
// by Go build tags (windows, darwin, linux, etc).
// If empty, it will be populated by consulting runtime.GOOS
OS string
// Target architecture of the update. Uses the same standard Arch names used
// by Go build tags (amd64, 386, arm, etc).
// If empty, it will be populated by consulting runtime.GOARCH
Arch string
// Target ARM architecture, if a specific one if required. Uses the same names
// as the GOARM environment variable (5, 6, 7).
//
// GoARM is ignored if Arch != 'arm'.
// GoARM is ignored if it is the empty string. Omit it if you do not need
// to distinguish between ARM versions.
GoARM string
// The current application version. This is used for statistics and reporting only,
// it is optional.
CurrentVersion string
// CheckURL is the URL to request an update check from. You should only set
// this if you are running an on-prem Equinox server.
// If empty the default Equinox update service endpoint is used.
CheckURL string
// HTTPClient is used to make all HTTP requests necessary for the update check protocol.
// You may configure it to use custom timeouts, proxy servers or other behaviors.
HTTPClient *http.Client
}
// Response is returned by Check when an update is available. It may be
// passed to Apply to perform the update.
type Response struct {
// Version of the release that will be updated to if applied.
ReleaseVersion string
// Title of the the release
ReleaseTitle string
// Additional details about the release
ReleaseDescription string
// Creation date of the release
ReleaseDate time.Time
downloadURL string
checksum []byte
signature []byte
patch proto.PatchKind
opts Options
}
// SetPublicKeyPEM is a convenience method to set the PublicKey property
// used for checking a completed update's signature by parsing a
// Public Key formatted as PEM data.
func (o *Options) SetPublicKeyPEM(pembytes []byte) error {
block, _ := pem.Decode(pembytes)
if block == nil {
return errors.New("couldn't parse PEM data")
}
pub, err := x509.ParsePKIXPublicKey(block.Bytes)
if err != nil {
return err
}
o.PublicKey = pub
return nil
}
// Check communicates with an Equinox update service to determine if
// an update for the given application matching the specified options is
// available. The returned error is nil only if an update is available.
//
// The appID is issued to you when creating an application at https://equinox.io
//
// You can compare the returned error to NotAvailableErr to differentiate between
// a successful check that found no update from other errors like a failed
// network connection.
func Check(appID string, opts Options) (Response, error) {
var req, err = checkRequest(appID, &opts)
if err != nil {
return Response{}, err
}
return doCheckRequest(opts, req)
}
func checkRequest(appID string, opts *Options) (*http.Request, error) {
if opts.Channel == "" {
opts.Channel = "stable"
}
if opts.TargetPath == "" {
var err error
opts.TargetPath, err = osext.Executable()
if err != nil {
return nil, err
}
}
if opts.OS == "" {
opts.OS = runtime.GOOS
}
if opts.Arch == "" {
opts.Arch = runtime.GOARCH
}
if opts.CheckURL == "" {
opts.CheckURL = defaultCheckURL
}
if opts.HTTPClient == nil {
opts.HTTPClient = new(http.Client)
}
opts.HTTPClient.Transport = newUserAgentTransport(userAgent, opts.HTTPClient.Transport)
checksum := computeChecksum(opts.TargetPath)
payload, err := json.Marshal(proto.Request{
AppID: appID,
Channel: opts.Channel,
OS: opts.OS,
Arch: opts.Arch,
GoARM: opts.GoARM,
TargetVersion: opts.Version,
CurrentVersion: opts.CurrentVersion,
CurrentSHA256: checksum,
})
req, err := http.NewRequest("POST", opts.CheckURL, bytes.NewReader(payload))
if err != nil {
return nil, err
}
req.Header.Set("Accept", fmt.Sprintf("application/json; q=1; version=%s; charset=utf-8", protocolVersion))
req.Header.Set("Content-Type", "application/json; charset=utf-8")
req.Close = true
return req, err
}
func doCheckRequest(opts Options, req *http.Request) (r Response, err error) {
resp, err := opts.HTTPClient.Do(req)
if err != nil {
return r, err
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
body, _ := ioutil.ReadAll(resp.Body)
return r, fmt.Errorf("Server responded with %s: %s", resp.Status, body)
}
var protoResp proto.Response
err = json.NewDecoder(resp.Body).Decode(&protoResp)
if err != nil {
return r, err
}
if !protoResp.Available {
return r, NotAvailableErr
}
r.ReleaseVersion = protoResp.Release.Version
r.ReleaseTitle = protoResp.Release.Title
r.ReleaseDescription = protoResp.Release.Description
r.ReleaseDate = protoResp.Release.CreateDate
r.downloadURL = protoResp.DownloadURL
r.patch = protoResp.Patch
r.opts = opts
r.checksum, err = hex.DecodeString(protoResp.Checksum)
if err != nil {
return r, err
}
r.signature, err = hex.DecodeString(protoResp.Signature)
if err != nil {
return r, err
}
return r, nil
}
func computeChecksum(path string) string {
f, err := os.Open(path)
if err != nil {
return ""
}
defer f.Close()
h := sha256.New()
_, err = io.Copy(h, f)
if err != nil {
return ""
}
return hex.EncodeToString(h.Sum(nil))
}
// Apply performs an update of the current executable (or TargetFile, if it was
// set on the Options) with the update specified by Response.
//
// Error is nil if and only if the entire update completes successfully.
func (r Response) Apply() error {
var req, opts, err = r.applyRequest()
if err != nil {
return err
}
return r.applyUpdate(req, opts)
}
func (r Response) applyRequest() (*http.Request, update.Options, error) {
opts := update.Options{
TargetPath: r.opts.TargetPath,
TargetMode: r.opts.TargetMode,
Checksum: r.checksum,
Signature: r.signature,
Verifier: update.NewECDSAVerifier(),
PublicKey: r.opts.PublicKey,
}
switch r.patch {
case proto.PatchBSDiff:
opts.Patcher = update.NewBSDiffPatcher()
}
if err := opts.CheckPermissions(); err != nil {
return nil, opts, err
}
req, err := http.NewRequest("GET", r.downloadURL, nil)
return req, opts, err
}
func (r Response) applyUpdate(req *http.Request, opts update.Options) error {
// fetch the update
req.Close = true
resp, err := r.opts.HTTPClient.Do(req)
if err != nil {
return err
}
defer resp.Body.Close()
// check that we got a patch
if resp.StatusCode >= 400 {
msg := "error downloading patch"
id := resp.Header.Get("Request-Id")
if id != "" {
msg += ", request " + id
}
blob, err := ioutil.ReadAll(resp.Body)
if err == nil {
msg += ": " + string(bytes.TrimSpace(blob))
}
return fmt.Errorf(msg)
}
return update.Apply(resp.Body, opts)
}
type userAgentTransport struct {
userAgent string
http.RoundTripper
}
func newUserAgentTransport(userAgent string, rt http.RoundTripper) *userAgentTransport {
if rt == nil {
rt = http.DefaultTransport
}
return &userAgentTransport{userAgent, rt}
}
func (t *userAgentTransport) RoundTrip(r *http.Request) (*http.Response, error) {
if r.Header.Get("User-Agent") == "" {
r.Header.Set("User-Agent", t.userAgent)
}
return t.RoundTripper.RoundTrip(r)
}

View File

@ -1,29 +0,0 @@
// +build go1.7
package equinox
import (
"context"
)
// CheckContext is like Check but includes a context.
func CheckContext(ctx context.Context, appID string, opts Options) (Response, error) {
var req, err = checkRequest(appID, &opts)
if err != nil {
return Response{}, err
}
return doCheckRequest(opts, req.WithContext(ctx))
}
// ApplyContext is like Apply but includes a context.
func (r Response) ApplyContext(ctx context.Context) error {
var req, opts, err = r.applyRequest()
if err != nil {
return err
}
return r.applyUpdate(req.WithContext(ctx), opts)
}

3
vendor/github.com/json-iterator/go/.codecov.yml generated vendored Normal file
View File

@ -0,0 +1,3 @@
ignore:
- "output_tests/.*"

4
vendor/github.com/json-iterator/go/.gitignore generated vendored Normal file
View File

@ -0,0 +1,4 @@
/vendor
/bug_test.go
/coverage.txt
/.idea

14
vendor/github.com/json-iterator/go/.travis.yml generated vendored Normal file
View File

@ -0,0 +1,14 @@
language: go
go:
- 1.8.x
- 1.x
before_install:
- go get -t -v ./...
script:
- ./test.sh
after_success:
- bash <(curl -s https://codecov.io/bash)

21
vendor/github.com/json-iterator/go/Gopkg.lock generated vendored Normal file
View File

@ -0,0 +1,21 @@
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
[[projects]]
name = "github.com/modern-go/concurrent"
packages = ["."]
revision = "e0a39a4cb4216ea8db28e22a69f4ec25610d513a"
version = "1.0.0"
[[projects]]
name = "github.com/modern-go/reflect2"
packages = ["."]
revision = "4b7aa43c6742a2c18fdef89dd197aaae7dac7ccd"
version = "1.0.1"
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
inputs-digest = "ea54a775e5a354cb015502d2e7aa4b74230fc77e894f34a838b268c25ec8eeb8"
solver-name = "gps-cdcl"
solver-version = 1

26
vendor/github.com/json-iterator/go/Gopkg.toml generated vendored Normal file
View File

@ -0,0 +1,26 @@
# Gopkg.toml example
#
# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md
# for detailed Gopkg.toml documentation.
#
# required = ["github.com/user/thing/cmd/thing"]
# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
#
# [[constraint]]
# name = "github.com/user/project"
# version = "1.0.0"
#
# [[constraint]]
# name = "github.com/user/project2"
# branch = "dev"
# source = "github.com/myfork/project2"
#
# [[override]]
# name = "github.com/x/y"
# version = "2.4.0"
ignored = ["github.com/davecgh/go-spew*","github.com/google/gofuzz*","github.com/stretchr/testify*"]
[[constraint]]
name = "github.com/modern-go/reflect2"
version = "1.0.1"

View File

@ -1,6 +1,6 @@
MIT License
Copyright (c) 2017 Equinox
Copyright (c) 2016 json-iterator
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

87
vendor/github.com/json-iterator/go/README.md generated vendored Normal file
View File

@ -0,0 +1,87 @@
[![Sourcegraph](https://sourcegraph.com/github.com/json-iterator/go/-/badge.svg)](https://sourcegraph.com/github.com/json-iterator/go?badge)
[![GoDoc](http://img.shields.io/badge/go-documentation-blue.svg?style=flat-square)](https://pkg.go.dev/github.com/json-iterator/go)
[![Build Status](https://travis-ci.org/json-iterator/go.svg?branch=master)](https://travis-ci.org/json-iterator/go)
[![codecov](https://codecov.io/gh/json-iterator/go/branch/master/graph/badge.svg)](https://codecov.io/gh/json-iterator/go)
[![rcard](https://goreportcard.com/badge/github.com/json-iterator/go)](https://goreportcard.com/report/github.com/json-iterator/go)
[![License](http://img.shields.io/badge/license-mit-blue.svg?style=flat-square)](https://raw.githubusercontent.com/json-iterator/go/master/LICENSE)
[![Gitter chat](https://badges.gitter.im/gitterHQ/gitter.png)](https://gitter.im/json-iterator/Lobby)
A high-performance 100% compatible drop-in replacement of "encoding/json"
You can also use thrift like JSON using [thrift-iterator](https://github.com/thrift-iterator/go)
# Benchmark
![benchmark](http://jsoniter.com/benchmarks/go-benchmark.png)
Source code: https://github.com/json-iterator/go-benchmark/blob/master/src/github.com/json-iterator/go-benchmark/benchmark_medium_payload_test.go
Raw Result (easyjson requires static code generation)
| | ns/op | allocation bytes | allocation times |
| --------------- | ----------- | ---------------- | ---------------- |
| std decode | 35510 ns/op | 1960 B/op | 99 allocs/op |
| easyjson decode | 8499 ns/op | 160 B/op | 4 allocs/op |
| jsoniter decode | 5623 ns/op | 160 B/op | 3 allocs/op |
| std encode | 2213 ns/op | 712 B/op | 5 allocs/op |
| easyjson encode | 883 ns/op | 576 B/op | 3 allocs/op |
| jsoniter encode | 837 ns/op | 384 B/op | 4 allocs/op |
Always benchmark with your own workload.
The result depends heavily on the data input.
# Usage
100% compatibility with standard lib
Replace
```go
import "encoding/json"
json.Marshal(&data)
```
with
```go
import jsoniter "github.com/json-iterator/go"
var json = jsoniter.ConfigCompatibleWithStandardLibrary
json.Marshal(&data)
```
Replace
```go
import "encoding/json"
json.Unmarshal(input, &data)
```
with
```go
import jsoniter "github.com/json-iterator/go"
var json = jsoniter.ConfigCompatibleWithStandardLibrary
json.Unmarshal(input, &data)
```
[More documentation](http://jsoniter.com/migrate-from-go-std.html)
# How to get
```
go get github.com/json-iterator/go
```
# Contribution Welcomed !
Contributors
- [thockin](https://github.com/thockin)
- [mattn](https://github.com/mattn)
- [cch123](https://github.com/cch123)
- [Oleg Shaldybin](https://github.com/olegshaldybin)
- [Jason Toffaletti](https://github.com/toffaletti)
Report issue or pull request, or email taowen@gmail.com, or [![Gitter chat](https://badges.gitter.im/gitterHQ/gitter.png)](https://gitter.im/json-iterator/Lobby)

150
vendor/github.com/json-iterator/go/adapter.go generated vendored Normal file
View File

@ -0,0 +1,150 @@
package jsoniter
import (
"bytes"
"io"
)
// RawMessage to make replace json with jsoniter
type RawMessage []byte
// Unmarshal adapts to json/encoding Unmarshal API
//
// Unmarshal parses the JSON-encoded data and stores the result in the value pointed to by v.
// Refer to https://godoc.org/encoding/json#Unmarshal for more information
func Unmarshal(data []byte, v interface{}) error {
return ConfigDefault.Unmarshal(data, v)
}
// UnmarshalFromString is a convenient method to read from string instead of []byte
func UnmarshalFromString(str string, v interface{}) error {
return ConfigDefault.UnmarshalFromString(str, v)
}
// Get quick method to get value from deeply nested JSON structure
func Get(data []byte, path ...interface{}) Any {
return ConfigDefault.Get(data, path...)
}
// Marshal adapts to json/encoding Marshal API
//
// Marshal returns the JSON encoding of v, adapts to json/encoding Marshal API
// Refer to https://godoc.org/encoding/json#Marshal for more information
func Marshal(v interface{}) ([]byte, error) {
return ConfigDefault.Marshal(v)
}
// MarshalIndent same as json.MarshalIndent. Prefix is not supported.
func MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) {
return ConfigDefault.MarshalIndent(v, prefix, indent)
}
// MarshalToString convenient method to write as string instead of []byte
func MarshalToString(v interface{}) (string, error) {
return ConfigDefault.MarshalToString(v)
}
// NewDecoder adapts to json/stream NewDecoder API.
//
// NewDecoder returns a new decoder that reads from r.
//
// Instead of a json/encoding Decoder, an Decoder is returned
// Refer to https://godoc.org/encoding/json#NewDecoder for more information
func NewDecoder(reader io.Reader) *Decoder {
return ConfigDefault.NewDecoder(reader)
}
// Decoder reads and decodes JSON values from an input stream.
// Decoder provides identical APIs with json/stream Decoder (Token() and UseNumber() are in progress)
type Decoder struct {
iter *Iterator
}
// Decode decode JSON into interface{}
func (adapter *Decoder) Decode(obj interface{}) error {
if adapter.iter.head == adapter.iter.tail && adapter.iter.reader != nil {
if !adapter.iter.loadMore() {
return io.EOF
}
}
adapter.iter.ReadVal(obj)
err := adapter.iter.Error
if err == io.EOF {
return nil
}
return adapter.iter.Error
}
// More is there more?
func (adapter *Decoder) More() bool {
iter := adapter.iter
if iter.Error != nil {
return false
}
c := iter.nextToken()
if c == 0 {
return false
}
iter.unreadByte()
return c != ']' && c != '}'
}
// Buffered remaining buffer
func (adapter *Decoder) Buffered() io.Reader {
remaining := adapter.iter.buf[adapter.iter.head:adapter.iter.tail]
return bytes.NewReader(remaining)
}
// UseNumber causes the Decoder to unmarshal a number into an interface{} as a
// Number instead of as a float64.
func (adapter *Decoder) UseNumber() {
cfg := adapter.iter.cfg.configBeforeFrozen
cfg.UseNumber = true
adapter.iter.cfg = cfg.frozeWithCacheReuse(adapter.iter.cfg.extraExtensions)
}
// DisallowUnknownFields causes the Decoder to return an error when the destination
// is a struct and the input contains object keys which do not match any
// non-ignored, exported fields in the destination.
func (adapter *Decoder) DisallowUnknownFields() {
cfg := adapter.iter.cfg.configBeforeFrozen
cfg.DisallowUnknownFields = true
adapter.iter.cfg = cfg.frozeWithCacheReuse(adapter.iter.cfg.extraExtensions)
}
// NewEncoder same as json.NewEncoder
func NewEncoder(writer io.Writer) *Encoder {
return ConfigDefault.NewEncoder(writer)
}
// Encoder same as json.Encoder
type Encoder struct {
stream *Stream
}
// Encode encode interface{} as JSON to io.Writer
func (adapter *Encoder) Encode(val interface{}) error {
adapter.stream.WriteVal(val)
adapter.stream.WriteRaw("\n")
adapter.stream.Flush()
return adapter.stream.Error
}
// SetIndent set the indention. Prefix is not supported
func (adapter *Encoder) SetIndent(prefix, indent string) {
config := adapter.stream.cfg.configBeforeFrozen
config.IndentionStep = len(indent)
adapter.stream.cfg = config.frozeWithCacheReuse(adapter.stream.cfg.extraExtensions)
}
// SetEscapeHTML escape html by default, set to false to disable
func (adapter *Encoder) SetEscapeHTML(escapeHTML bool) {
config := adapter.stream.cfg.configBeforeFrozen
config.EscapeHTML = escapeHTML
adapter.stream.cfg = config.frozeWithCacheReuse(adapter.stream.cfg.extraExtensions)
}
// Valid reports whether data is a valid JSON encoding.
func Valid(data []byte) bool {
return ConfigDefault.Valid(data)
}

325
vendor/github.com/json-iterator/go/any.go generated vendored Normal file
View File

@ -0,0 +1,325 @@
package jsoniter
import (
"errors"
"fmt"
"github.com/modern-go/reflect2"
"io"
"reflect"
"strconv"
"unsafe"
)
// Any generic object representation.
// The lazy json implementation holds []byte and parse lazily.
type Any interface {
LastError() error
ValueType() ValueType
MustBeValid() Any
ToBool() bool
ToInt() int
ToInt32() int32
ToInt64() int64
ToUint() uint
ToUint32() uint32
ToUint64() uint64
ToFloat32() float32
ToFloat64() float64
ToString() string
ToVal(val interface{})
Get(path ...interface{}) Any
Size() int
Keys() []string
GetInterface() interface{}
WriteTo(stream *Stream)
}
type baseAny struct{}
func (any *baseAny) Get(path ...interface{}) Any {
return &invalidAny{baseAny{}, fmt.Errorf("GetIndex %v from simple value", path)}
}
func (any *baseAny) Size() int {
return 0
}
func (any *baseAny) Keys() []string {
return []string{}
}
func (any *baseAny) ToVal(obj interface{}) {
panic("not implemented")
}
// WrapInt32 turn int32 into Any interface
func WrapInt32(val int32) Any {
return &int32Any{baseAny{}, val}
}
// WrapInt64 turn int64 into Any interface
func WrapInt64(val int64) Any {
return &int64Any{baseAny{}, val}
}
// WrapUint32 turn uint32 into Any interface
func WrapUint32(val uint32) Any {
return &uint32Any{baseAny{}, val}
}
// WrapUint64 turn uint64 into Any interface
func WrapUint64(val uint64) Any {
return &uint64Any{baseAny{}, val}
}
// WrapFloat64 turn float64 into Any interface
func WrapFloat64(val float64) Any {
return &floatAny{baseAny{}, val}
}
// WrapString turn string into Any interface
func WrapString(val string) Any {
return &stringAny{baseAny{}, val}
}
// Wrap turn a go object into Any interface
func Wrap(val interface{}) Any {
if val == nil {
return &nilAny{}
}
asAny, isAny := val.(Any)
if isAny {
return asAny
}
typ := reflect2.TypeOf(val)
switch typ.Kind() {
case reflect.Slice:
return wrapArray(val)
case reflect.Struct:
return wrapStruct(val)
case reflect.Map:
return wrapMap(val)
case reflect.String:
return WrapString(val.(string))
case reflect.Int:
if strconv.IntSize == 32 {
return WrapInt32(int32(val.(int)))
}
return WrapInt64(int64(val.(int)))
case reflect.Int8:
return WrapInt32(int32(val.(int8)))
case reflect.Int16:
return WrapInt32(int32(val.(int16)))
case reflect.Int32:
return WrapInt32(val.(int32))
case reflect.Int64:
return WrapInt64(val.(int64))
case reflect.Uint:
if strconv.IntSize == 32 {
return WrapUint32(uint32(val.(uint)))
}
return WrapUint64(uint64(val.(uint)))
case reflect.Uintptr:
if ptrSize == 32 {
return WrapUint32(uint32(val.(uintptr)))
}
return WrapUint64(uint64(val.(uintptr)))
case reflect.Uint8:
return WrapUint32(uint32(val.(uint8)))
case reflect.Uint16:
return WrapUint32(uint32(val.(uint16)))
case reflect.Uint32:
return WrapUint32(uint32(val.(uint32)))
case reflect.Uint64:
return WrapUint64(val.(uint64))
case reflect.Float32:
return WrapFloat64(float64(val.(float32)))
case reflect.Float64:
return WrapFloat64(val.(float64))
case reflect.Bool:
if val.(bool) == true {
return &trueAny{}
}
return &falseAny{}
}
return &invalidAny{baseAny{}, fmt.Errorf("unsupported type: %v", typ)}
}
// ReadAny read next JSON element as an Any object. It is a better json.RawMessage.
func (iter *Iterator) ReadAny() Any {
return iter.readAny()
}
func (iter *Iterator) readAny() Any {
c := iter.nextToken()
switch c {
case '"':
iter.unreadByte()
return &stringAny{baseAny{}, iter.ReadString()}
case 'n':
iter.skipThreeBytes('u', 'l', 'l') // null
return &nilAny{}
case 't':
iter.skipThreeBytes('r', 'u', 'e') // true
return &trueAny{}
case 'f':
iter.skipFourBytes('a', 'l', 's', 'e') // false
return &falseAny{}
case '{':
return iter.readObjectAny()
case '[':
return iter.readArrayAny()
case '-':
return iter.readNumberAny(false)
case 0:
return &invalidAny{baseAny{}, errors.New("input is empty")}
default:
return iter.readNumberAny(true)
}
}
func (iter *Iterator) readNumberAny(positive bool) Any {
iter.startCapture(iter.head - 1)
iter.skipNumber()
lazyBuf := iter.stopCapture()
return &numberLazyAny{baseAny{}, iter.cfg, lazyBuf, nil}
}
func (iter *Iterator) readObjectAny() Any {
iter.startCapture(iter.head - 1)
iter.skipObject()
lazyBuf := iter.stopCapture()
return &objectLazyAny{baseAny{}, iter.cfg, lazyBuf, nil}
}
func (iter *Iterator) readArrayAny() Any {
iter.startCapture(iter.head - 1)
iter.skipArray()
lazyBuf := iter.stopCapture()
return &arrayLazyAny{baseAny{}, iter.cfg, lazyBuf, nil}
}
func locateObjectField(iter *Iterator, target string) []byte {
var found []byte
iter.ReadObjectCB(func(iter *Iterator, field string) bool {
if field == target {
found = iter.SkipAndReturnBytes()
return false
}
iter.Skip()
return true
})
return found
}
func locateArrayElement(iter *Iterator, target int) []byte {
var found []byte
n := 0
iter.ReadArrayCB(func(iter *Iterator) bool {
if n == target {
found = iter.SkipAndReturnBytes()
return false
}
iter.Skip()
n++
return true
})
return found
}
func locatePath(iter *Iterator, path []interface{}) Any {
for i, pathKeyObj := range path {
switch pathKey := pathKeyObj.(type) {
case string:
valueBytes := locateObjectField(iter, pathKey)
if valueBytes == nil {
return newInvalidAny(path[i:])
}
iter.ResetBytes(valueBytes)
case int:
valueBytes := locateArrayElement(iter, pathKey)
if valueBytes == nil {
return newInvalidAny(path[i:])
}
iter.ResetBytes(valueBytes)
case int32:
if '*' == pathKey {
return iter.readAny().Get(path[i:]...)
}
return newInvalidAny(path[i:])
default:
return newInvalidAny(path[i:])
}
}
if iter.Error != nil && iter.Error != io.EOF {
return &invalidAny{baseAny{}, iter.Error}
}
return iter.readAny()
}
var anyType = reflect2.TypeOfPtr((*Any)(nil)).Elem()
func createDecoderOfAny(ctx *ctx, typ reflect2.Type) ValDecoder {
if typ == anyType {
return &directAnyCodec{}
}
if typ.Implements(anyType) {
return &anyCodec{
valType: typ,
}
}
return nil
}
func createEncoderOfAny(ctx *ctx, typ reflect2.Type) ValEncoder {
if typ == anyType {
return &directAnyCodec{}
}
if typ.Implements(anyType) {
return &anyCodec{
valType: typ,
}
}
return nil
}
type anyCodec struct {
valType reflect2.Type
}
func (codec *anyCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
panic("not implemented")
}
func (codec *anyCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
obj := codec.valType.UnsafeIndirect(ptr)
any := obj.(Any)
any.WriteTo(stream)
}
func (codec *anyCodec) IsEmpty(ptr unsafe.Pointer) bool {
obj := codec.valType.UnsafeIndirect(ptr)
any := obj.(Any)
return any.Size() == 0
}
type directAnyCodec struct {
}
func (codec *directAnyCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
*(*Any)(ptr) = iter.readAny()
}
func (codec *directAnyCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
any := *(*Any)(ptr)
if any == nil {
stream.WriteNil()
return
}
any.WriteTo(stream)
}
func (codec *directAnyCodec) IsEmpty(ptr unsafe.Pointer) bool {
any := *(*Any)(ptr)
return any.Size() == 0
}

278
vendor/github.com/json-iterator/go/any_array.go generated vendored Normal file
View File

@ -0,0 +1,278 @@
package jsoniter
import (
"reflect"
"unsafe"
)
type arrayLazyAny struct {
baseAny
cfg *frozenConfig
buf []byte
err error
}
func (any *arrayLazyAny) ValueType() ValueType {
return ArrayValue
}
func (any *arrayLazyAny) MustBeValid() Any {
return any
}
func (any *arrayLazyAny) LastError() error {
return any.err
}
func (any *arrayLazyAny) ToBool() bool {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
return iter.ReadArray()
}
func (any *arrayLazyAny) ToInt() int {
if any.ToBool() {
return 1
}
return 0
}
func (any *arrayLazyAny) ToInt32() int32 {
if any.ToBool() {
return 1
}
return 0
}
func (any *arrayLazyAny) ToInt64() int64 {
if any.ToBool() {
return 1
}
return 0
}
func (any *arrayLazyAny) ToUint() uint {
if any.ToBool() {
return 1
}
return 0
}
func (any *arrayLazyAny) ToUint32() uint32 {
if any.ToBool() {
return 1
}
return 0
}
func (any *arrayLazyAny) ToUint64() uint64 {
if any.ToBool() {
return 1
}
return 0
}
func (any *arrayLazyAny) ToFloat32() float32 {
if any.ToBool() {
return 1
}
return 0
}
func (any *arrayLazyAny) ToFloat64() float64 {
if any.ToBool() {
return 1
}
return 0
}
func (any *arrayLazyAny) ToString() string {
return *(*string)(unsafe.Pointer(&any.buf))
}
func (any *arrayLazyAny) ToVal(val interface{}) {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
iter.ReadVal(val)
}
func (any *arrayLazyAny) Get(path ...interface{}) Any {
if len(path) == 0 {
return any
}
switch firstPath := path[0].(type) {
case int:
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
valueBytes := locateArrayElement(iter, firstPath)
if valueBytes == nil {
return newInvalidAny(path)
}
iter.ResetBytes(valueBytes)
return locatePath(iter, path[1:])
case int32:
if '*' == firstPath {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
arr := make([]Any, 0)
iter.ReadArrayCB(func(iter *Iterator) bool {
found := iter.readAny().Get(path[1:]...)
if found.ValueType() != InvalidValue {
arr = append(arr, found)
}
return true
})
return wrapArray(arr)
}
return newInvalidAny(path)
default:
return newInvalidAny(path)
}
}
func (any *arrayLazyAny) Size() int {
size := 0
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
iter.ReadArrayCB(func(iter *Iterator) bool {
size++
iter.Skip()
return true
})
return size
}
func (any *arrayLazyAny) WriteTo(stream *Stream) {
stream.Write(any.buf)
}
func (any *arrayLazyAny) GetInterface() interface{} {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
return iter.Read()
}
type arrayAny struct {
baseAny
val reflect.Value
}
func wrapArray(val interface{}) *arrayAny {
return &arrayAny{baseAny{}, reflect.ValueOf(val)}
}
func (any *arrayAny) ValueType() ValueType {
return ArrayValue
}
func (any *arrayAny) MustBeValid() Any {
return any
}
func (any *arrayAny) LastError() error {
return nil
}
func (any *arrayAny) ToBool() bool {
return any.val.Len() != 0
}
func (any *arrayAny) ToInt() int {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *arrayAny) ToInt32() int32 {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *arrayAny) ToInt64() int64 {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *arrayAny) ToUint() uint {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *arrayAny) ToUint32() uint32 {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *arrayAny) ToUint64() uint64 {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *arrayAny) ToFloat32() float32 {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *arrayAny) ToFloat64() float64 {
if any.val.Len() == 0 {
return 0
}
return 1
}
func (any *arrayAny) ToString() string {
str, _ := MarshalToString(any.val.Interface())
return str
}
func (any *arrayAny) Get(path ...interface{}) Any {
if len(path) == 0 {
return any
}
switch firstPath := path[0].(type) {
case int:
if firstPath < 0 || firstPath >= any.val.Len() {
return newInvalidAny(path)
}
return Wrap(any.val.Index(firstPath).Interface())
case int32:
if '*' == firstPath {
mappedAll := make([]Any, 0)
for i := 0; i < any.val.Len(); i++ {
mapped := Wrap(any.val.Index(i).Interface()).Get(path[1:]...)
if mapped.ValueType() != InvalidValue {
mappedAll = append(mappedAll, mapped)
}
}
return wrapArray(mappedAll)
}
return newInvalidAny(path)
default:
return newInvalidAny(path)
}
}
func (any *arrayAny) Size() int {
return any.val.Len()
}
func (any *arrayAny) WriteTo(stream *Stream) {
stream.WriteVal(any.val)
}
func (any *arrayAny) GetInterface() interface{} {
return any.val.Interface()
}

137
vendor/github.com/json-iterator/go/any_bool.go generated vendored Normal file
View File

@ -0,0 +1,137 @@
package jsoniter
type trueAny struct {
baseAny
}
func (any *trueAny) LastError() error {
return nil
}
func (any *trueAny) ToBool() bool {
return true
}
func (any *trueAny) ToInt() int {
return 1
}
func (any *trueAny) ToInt32() int32 {
return 1
}
func (any *trueAny) ToInt64() int64 {
return 1
}
func (any *trueAny) ToUint() uint {
return 1
}
func (any *trueAny) ToUint32() uint32 {
return 1
}
func (any *trueAny) ToUint64() uint64 {
return 1
}
func (any *trueAny) ToFloat32() float32 {
return 1
}
func (any *trueAny) ToFloat64() float64 {
return 1
}
func (any *trueAny) ToString() string {
return "true"
}
func (any *trueAny) WriteTo(stream *Stream) {
stream.WriteTrue()
}
func (any *trueAny) Parse() *Iterator {
return nil
}
func (any *trueAny) GetInterface() interface{} {
return true
}
func (any *trueAny) ValueType() ValueType {
return BoolValue
}
func (any *trueAny) MustBeValid() Any {
return any
}
type falseAny struct {
baseAny
}
func (any *falseAny) LastError() error {
return nil
}
func (any *falseAny) ToBool() bool {
return false
}
func (any *falseAny) ToInt() int {
return 0
}
func (any *falseAny) ToInt32() int32 {
return 0
}
func (any *falseAny) ToInt64() int64 {
return 0
}
func (any *falseAny) ToUint() uint {
return 0
}
func (any *falseAny) ToUint32() uint32 {
return 0
}
func (any *falseAny) ToUint64() uint64 {
return 0
}
func (any *falseAny) ToFloat32() float32 {
return 0
}
func (any *falseAny) ToFloat64() float64 {
return 0
}
func (any *falseAny) ToString() string {
return "false"
}
func (any *falseAny) WriteTo(stream *Stream) {
stream.WriteFalse()
}
func (any *falseAny) Parse() *Iterator {
return nil
}
func (any *falseAny) GetInterface() interface{} {
return false
}
func (any *falseAny) ValueType() ValueType {
return BoolValue
}
func (any *falseAny) MustBeValid() Any {
return any
}

83
vendor/github.com/json-iterator/go/any_float.go generated vendored Normal file
View File

@ -0,0 +1,83 @@
package jsoniter
import (
"strconv"
)
type floatAny struct {
baseAny
val float64
}
func (any *floatAny) Parse() *Iterator {
return nil
}
func (any *floatAny) ValueType() ValueType {
return NumberValue
}
func (any *floatAny) MustBeValid() Any {
return any
}
func (any *floatAny) LastError() error {
return nil
}
func (any *floatAny) ToBool() bool {
return any.ToFloat64() != 0
}
func (any *floatAny) ToInt() int {
return int(any.val)
}
func (any *floatAny) ToInt32() int32 {
return int32(any.val)
}
func (any *floatAny) ToInt64() int64 {
return int64(any.val)
}
func (any *floatAny) ToUint() uint {
if any.val > 0 {
return uint(any.val)
}
return 0
}
func (any *floatAny) ToUint32() uint32 {
if any.val > 0 {
return uint32(any.val)
}
return 0
}
func (any *floatAny) ToUint64() uint64 {
if any.val > 0 {
return uint64(any.val)
}
return 0
}
func (any *floatAny) ToFloat32() float32 {
return float32(any.val)
}
func (any *floatAny) ToFloat64() float64 {
return any.val
}
func (any *floatAny) ToString() string {
return strconv.FormatFloat(any.val, 'E', -1, 64)
}
func (any *floatAny) WriteTo(stream *Stream) {
stream.WriteFloat64(any.val)
}
func (any *floatAny) GetInterface() interface{} {
return any.val
}

74
vendor/github.com/json-iterator/go/any_int32.go generated vendored Normal file
View File

@ -0,0 +1,74 @@
package jsoniter
import (
"strconv"
)
type int32Any struct {
baseAny
val int32
}
func (any *int32Any) LastError() error {
return nil
}
func (any *int32Any) ValueType() ValueType {
return NumberValue
}
func (any *int32Any) MustBeValid() Any {
return any
}
func (any *int32Any) ToBool() bool {
return any.val != 0
}
func (any *int32Any) ToInt() int {
return int(any.val)
}
func (any *int32Any) ToInt32() int32 {
return any.val
}
func (any *int32Any) ToInt64() int64 {
return int64(any.val)
}
func (any *int32Any) ToUint() uint {
return uint(any.val)
}
func (any *int32Any) ToUint32() uint32 {
return uint32(any.val)
}
func (any *int32Any) ToUint64() uint64 {
return uint64(any.val)
}
func (any *int32Any) ToFloat32() float32 {
return float32(any.val)
}
func (any *int32Any) ToFloat64() float64 {
return float64(any.val)
}
func (any *int32Any) ToString() string {
return strconv.FormatInt(int64(any.val), 10)
}
func (any *int32Any) WriteTo(stream *Stream) {
stream.WriteInt32(any.val)
}
func (any *int32Any) Parse() *Iterator {
return nil
}
func (any *int32Any) GetInterface() interface{} {
return any.val
}

74
vendor/github.com/json-iterator/go/any_int64.go generated vendored Normal file
View File

@ -0,0 +1,74 @@
package jsoniter
import (
"strconv"
)
type int64Any struct {
baseAny
val int64
}
func (any *int64Any) LastError() error {
return nil
}
func (any *int64Any) ValueType() ValueType {
return NumberValue
}
func (any *int64Any) MustBeValid() Any {
return any
}
func (any *int64Any) ToBool() bool {
return any.val != 0
}
func (any *int64Any) ToInt() int {
return int(any.val)
}
func (any *int64Any) ToInt32() int32 {
return int32(any.val)
}
func (any *int64Any) ToInt64() int64 {
return any.val
}
func (any *int64Any) ToUint() uint {
return uint(any.val)
}
func (any *int64Any) ToUint32() uint32 {
return uint32(any.val)
}
func (any *int64Any) ToUint64() uint64 {
return uint64(any.val)
}
func (any *int64Any) ToFloat32() float32 {
return float32(any.val)
}
func (any *int64Any) ToFloat64() float64 {
return float64(any.val)
}
func (any *int64Any) ToString() string {
return strconv.FormatInt(any.val, 10)
}
func (any *int64Any) WriteTo(stream *Stream) {
stream.WriteInt64(any.val)
}
func (any *int64Any) Parse() *Iterator {
return nil
}
func (any *int64Any) GetInterface() interface{} {
return any.val
}

82
vendor/github.com/json-iterator/go/any_invalid.go generated vendored Normal file
View File

@ -0,0 +1,82 @@
package jsoniter
import "fmt"
type invalidAny struct {
baseAny
err error
}
func newInvalidAny(path []interface{}) *invalidAny {
return &invalidAny{baseAny{}, fmt.Errorf("%v not found", path)}
}
func (any *invalidAny) LastError() error {
return any.err
}
func (any *invalidAny) ValueType() ValueType {
return InvalidValue
}
func (any *invalidAny) MustBeValid() Any {
panic(any.err)
}
func (any *invalidAny) ToBool() bool {
return false
}
func (any *invalidAny) ToInt() int {
return 0
}
func (any *invalidAny) ToInt32() int32 {
return 0
}
func (any *invalidAny) ToInt64() int64 {
return 0
}
func (any *invalidAny) ToUint() uint {
return 0
}
func (any *invalidAny) ToUint32() uint32 {
return 0
}
func (any *invalidAny) ToUint64() uint64 {
return 0
}
func (any *invalidAny) ToFloat32() float32 {
return 0
}
func (any *invalidAny) ToFloat64() float64 {
return 0
}
func (any *invalidAny) ToString() string {
return ""
}
func (any *invalidAny) WriteTo(stream *Stream) {
}
func (any *invalidAny) Get(path ...interface{}) Any {
if any.err == nil {
return &invalidAny{baseAny{}, fmt.Errorf("get %v from invalid", path)}
}
return &invalidAny{baseAny{}, fmt.Errorf("%v, get %v from invalid", any.err, path)}
}
func (any *invalidAny) Parse() *Iterator {
return nil
}
func (any *invalidAny) GetInterface() interface{} {
return nil
}

69
vendor/github.com/json-iterator/go/any_nil.go generated vendored Normal file
View File

@ -0,0 +1,69 @@
package jsoniter
type nilAny struct {
baseAny
}
func (any *nilAny) LastError() error {
return nil
}
func (any *nilAny) ValueType() ValueType {
return NilValue
}
func (any *nilAny) MustBeValid() Any {
return any
}
func (any *nilAny) ToBool() bool {
return false
}
func (any *nilAny) ToInt() int {
return 0
}
func (any *nilAny) ToInt32() int32 {
return 0
}
func (any *nilAny) ToInt64() int64 {
return 0
}
func (any *nilAny) ToUint() uint {
return 0
}
func (any *nilAny) ToUint32() uint32 {
return 0
}
func (any *nilAny) ToUint64() uint64 {
return 0
}
func (any *nilAny) ToFloat32() float32 {
return 0
}
func (any *nilAny) ToFloat64() float64 {
return 0
}
func (any *nilAny) ToString() string {
return ""
}
func (any *nilAny) WriteTo(stream *Stream) {
stream.WriteNil()
}
func (any *nilAny) Parse() *Iterator {
return nil
}
func (any *nilAny) GetInterface() interface{} {
return nil
}

123
vendor/github.com/json-iterator/go/any_number.go generated vendored Normal file
View File

@ -0,0 +1,123 @@
package jsoniter
import (
"io"
"unsafe"
)
type numberLazyAny struct {
baseAny
cfg *frozenConfig
buf []byte
err error
}
func (any *numberLazyAny) ValueType() ValueType {
return NumberValue
}
func (any *numberLazyAny) MustBeValid() Any {
return any
}
func (any *numberLazyAny) LastError() error {
return any.err
}
func (any *numberLazyAny) ToBool() bool {
return any.ToFloat64() != 0
}
func (any *numberLazyAny) ToInt() int {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadInt()
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}
func (any *numberLazyAny) ToInt32() int32 {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadInt32()
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}
func (any *numberLazyAny) ToInt64() int64 {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadInt64()
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}
func (any *numberLazyAny) ToUint() uint {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadUint()
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}
func (any *numberLazyAny) ToUint32() uint32 {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadUint32()
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}
func (any *numberLazyAny) ToUint64() uint64 {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadUint64()
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}
func (any *numberLazyAny) ToFloat32() float32 {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadFloat32()
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}
func (any *numberLazyAny) ToFloat64() float64 {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
val := iter.ReadFloat64()
if iter.Error != nil && iter.Error != io.EOF {
any.err = iter.Error
}
return val
}
func (any *numberLazyAny) ToString() string {
return *(*string)(unsafe.Pointer(&any.buf))
}
func (any *numberLazyAny) WriteTo(stream *Stream) {
stream.Write(any.buf)
}
func (any *numberLazyAny) GetInterface() interface{} {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
return iter.Read()
}

374
vendor/github.com/json-iterator/go/any_object.go generated vendored Normal file
View File

@ -0,0 +1,374 @@
package jsoniter
import (
"reflect"
"unsafe"
)
type objectLazyAny struct {
baseAny
cfg *frozenConfig
buf []byte
err error
}
func (any *objectLazyAny) ValueType() ValueType {
return ObjectValue
}
func (any *objectLazyAny) MustBeValid() Any {
return any
}
func (any *objectLazyAny) LastError() error {
return any.err
}
func (any *objectLazyAny) ToBool() bool {
return true
}
func (any *objectLazyAny) ToInt() int {
return 0
}
func (any *objectLazyAny) ToInt32() int32 {
return 0
}
func (any *objectLazyAny) ToInt64() int64 {
return 0
}
func (any *objectLazyAny) ToUint() uint {
return 0
}
func (any *objectLazyAny) ToUint32() uint32 {
return 0
}
func (any *objectLazyAny) ToUint64() uint64 {
return 0
}
func (any *objectLazyAny) ToFloat32() float32 {
return 0
}
func (any *objectLazyAny) ToFloat64() float64 {
return 0
}
func (any *objectLazyAny) ToString() string {
return *(*string)(unsafe.Pointer(&any.buf))
}
func (any *objectLazyAny) ToVal(obj interface{}) {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
iter.ReadVal(obj)
}
func (any *objectLazyAny) Get(path ...interface{}) Any {
if len(path) == 0 {
return any
}
switch firstPath := path[0].(type) {
case string:
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
valueBytes := locateObjectField(iter, firstPath)
if valueBytes == nil {
return newInvalidAny(path)
}
iter.ResetBytes(valueBytes)
return locatePath(iter, path[1:])
case int32:
if '*' == firstPath {
mappedAll := map[string]Any{}
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
iter.ReadMapCB(func(iter *Iterator, field string) bool {
mapped := locatePath(iter, path[1:])
if mapped.ValueType() != InvalidValue {
mappedAll[field] = mapped
}
return true
})
return wrapMap(mappedAll)
}
return newInvalidAny(path)
default:
return newInvalidAny(path)
}
}
func (any *objectLazyAny) Keys() []string {
keys := []string{}
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
iter.ReadMapCB(func(iter *Iterator, field string) bool {
iter.Skip()
keys = append(keys, field)
return true
})
return keys
}
func (any *objectLazyAny) Size() int {
size := 0
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
iter.ReadObjectCB(func(iter *Iterator, field string) bool {
iter.Skip()
size++
return true
})
return size
}
func (any *objectLazyAny) WriteTo(stream *Stream) {
stream.Write(any.buf)
}
func (any *objectLazyAny) GetInterface() interface{} {
iter := any.cfg.BorrowIterator(any.buf)
defer any.cfg.ReturnIterator(iter)
return iter.Read()
}
type objectAny struct {
baseAny
err error
val reflect.Value
}
func wrapStruct(val interface{}) *objectAny {
return &objectAny{baseAny{}, nil, reflect.ValueOf(val)}
}
func (any *objectAny) ValueType() ValueType {
return ObjectValue
}
func (any *objectAny) MustBeValid() Any {
return any
}
func (any *objectAny) Parse() *Iterator {
return nil
}
func (any *objectAny) LastError() error {
return any.err
}
func (any *objectAny) ToBool() bool {
return any.val.NumField() != 0
}
func (any *objectAny) ToInt() int {
return 0
}
func (any *objectAny) ToInt32() int32 {
return 0
}
func (any *objectAny) ToInt64() int64 {
return 0
}
func (any *objectAny) ToUint() uint {
return 0
}
func (any *objectAny) ToUint32() uint32 {
return 0
}
func (any *objectAny) ToUint64() uint64 {
return 0
}
func (any *objectAny) ToFloat32() float32 {
return 0
}
func (any *objectAny) ToFloat64() float64 {
return 0
}
func (any *objectAny) ToString() string {
str, err := MarshalToString(any.val.Interface())
any.err = err
return str
}
func (any *objectAny) Get(path ...interface{}) Any {
if len(path) == 0 {
return any
}
switch firstPath := path[0].(type) {
case string:
field := any.val.FieldByName(firstPath)
if !field.IsValid() {
return newInvalidAny(path)
}
return Wrap(field.Interface())
case int32:
if '*' == firstPath {
mappedAll := map[string]Any{}
for i := 0; i < any.val.NumField(); i++ {
field := any.val.Field(i)
if field.CanInterface() {
mapped := Wrap(field.Interface()).Get(path[1:]...)
if mapped.ValueType() != InvalidValue {
mappedAll[any.val.Type().Field(i).Name] = mapped
}
}
}
return wrapMap(mappedAll)
}
return newInvalidAny(path)
default:
return newInvalidAny(path)
}
}
func (any *objectAny) Keys() []string {
keys := make([]string, 0, any.val.NumField())
for i := 0; i < any.val.NumField(); i++ {
keys = append(keys, any.val.Type().Field(i).Name)
}
return keys
}
func (any *objectAny) Size() int {
return any.val.NumField()
}
func (any *objectAny) WriteTo(stream *Stream) {
stream.WriteVal(any.val)
}
func (any *objectAny) GetInterface() interface{} {
return any.val.Interface()
}
type mapAny struct {
baseAny
err error
val reflect.Value
}
func wrapMap(val interface{}) *mapAny {
return &mapAny{baseAny{}, nil, reflect.ValueOf(val)}
}
func (any *mapAny) ValueType() ValueType {
return ObjectValue
}
func (any *mapAny) MustBeValid() Any {
return any
}
func (any *mapAny) Parse() *Iterator {
return nil
}
func (any *mapAny) LastError() error {
return any.err
}
func (any *mapAny) ToBool() bool {
return true
}
func (any *mapAny) ToInt() int {
return 0
}
func (any *mapAny) ToInt32() int32 {
return 0
}
func (any *mapAny) ToInt64() int64 {
return 0
}
func (any *mapAny) ToUint() uint {
return 0
}
func (any *mapAny) ToUint32() uint32 {
return 0
}
func (any *mapAny) ToUint64() uint64 {
return 0
}
func (any *mapAny) ToFloat32() float32 {
return 0
}
func (any *mapAny) ToFloat64() float64 {
return 0
}
func (any *mapAny) ToString() string {
str, err := MarshalToString(any.val.Interface())
any.err = err
return str
}
func (any *mapAny) Get(path ...interface{}) Any {
if len(path) == 0 {
return any
}
switch firstPath := path[0].(type) {
case int32:
if '*' == firstPath {
mappedAll := map[string]Any{}
for _, key := range any.val.MapKeys() {
keyAsStr := key.String()
element := Wrap(any.val.MapIndex(key).Interface())
mapped := element.Get(path[1:]...)
if mapped.ValueType() != InvalidValue {
mappedAll[keyAsStr] = mapped
}
}
return wrapMap(mappedAll)
}
return newInvalidAny(path)
default:
value := any.val.MapIndex(reflect.ValueOf(firstPath))
if !value.IsValid() {
return newInvalidAny(path)
}
return Wrap(value.Interface())
}
}
func (any *mapAny) Keys() []string {
keys := make([]string, 0, any.val.Len())
for _, key := range any.val.MapKeys() {
keys = append(keys, key.String())
}
return keys
}
func (any *mapAny) Size() int {
return any.val.Len()
}
func (any *mapAny) WriteTo(stream *Stream) {
stream.WriteVal(any.val)
}
func (any *mapAny) GetInterface() interface{} {
return any.val.Interface()
}

166
vendor/github.com/json-iterator/go/any_str.go generated vendored Normal file
View File

@ -0,0 +1,166 @@
package jsoniter
import (
"fmt"
"strconv"
)
type stringAny struct {
baseAny
val string
}
func (any *stringAny) Get(path ...interface{}) Any {
if len(path) == 0 {
return any
}
return &invalidAny{baseAny{}, fmt.Errorf("GetIndex %v from simple value", path)}
}
func (any *stringAny) Parse() *Iterator {
return nil
}
func (any *stringAny) ValueType() ValueType {
return StringValue
}
func (any *stringAny) MustBeValid() Any {
return any
}
func (any *stringAny) LastError() error {
return nil
}
func (any *stringAny) ToBool() bool {
str := any.ToString()
if str == "0" {
return false
}
for _, c := range str {
switch c {
case ' ', '\n', '\r', '\t':
default:
return true
}
}
return false
}
func (any *stringAny) ToInt() int {
return int(any.ToInt64())
}
func (any *stringAny) ToInt32() int32 {
return int32(any.ToInt64())
}
func (any *stringAny) ToInt64() int64 {
if any.val == "" {
return 0
}
flag := 1
startPos := 0
if any.val[0] == '+' || any.val[0] == '-' {
startPos = 1
}
if any.val[0] == '-' {
flag = -1
}
endPos := startPos
for i := startPos; i < len(any.val); i++ {
if any.val[i] >= '0' && any.val[i] <= '9' {
endPos = i + 1
} else {
break
}
}
parsed, _ := strconv.ParseInt(any.val[startPos:endPos], 10, 64)
return int64(flag) * parsed
}
func (any *stringAny) ToUint() uint {
return uint(any.ToUint64())
}
func (any *stringAny) ToUint32() uint32 {
return uint32(any.ToUint64())
}
func (any *stringAny) ToUint64() uint64 {
if any.val == "" {
return 0
}
startPos := 0
if any.val[0] == '-' {
return 0
}
if any.val[0] == '+' {
startPos = 1
}
endPos := startPos
for i := startPos; i < len(any.val); i++ {
if any.val[i] >= '0' && any.val[i] <= '9' {
endPos = i + 1
} else {
break
}
}
parsed, _ := strconv.ParseUint(any.val[startPos:endPos], 10, 64)
return parsed
}
func (any *stringAny) ToFloat32() float32 {
return float32(any.ToFloat64())
}
func (any *stringAny) ToFloat64() float64 {
if len(any.val) == 0 {
return 0
}
// first char invalid
if any.val[0] != '+' && any.val[0] != '-' && (any.val[0] > '9' || any.val[0] < '0') {
return 0
}
// extract valid num expression from string
// eg 123true => 123, -12.12xxa => -12.12
endPos := 1
for i := 1; i < len(any.val); i++ {
if any.val[i] == '.' || any.val[i] == 'e' || any.val[i] == 'E' || any.val[i] == '+' || any.val[i] == '-' {
endPos = i + 1
continue
}
// end position is the first char which is not digit
if any.val[i] >= '0' && any.val[i] <= '9' {
endPos = i + 1
} else {
endPos = i
break
}
}
parsed, _ := strconv.ParseFloat(any.val[:endPos], 64)
return parsed
}
func (any *stringAny) ToString() string {
return any.val
}
func (any *stringAny) WriteTo(stream *Stream) {
stream.WriteString(any.val)
}
func (any *stringAny) GetInterface() interface{} {
return any.val
}

74
vendor/github.com/json-iterator/go/any_uint32.go generated vendored Normal file
View File

@ -0,0 +1,74 @@
package jsoniter
import (
"strconv"
)
type uint32Any struct {
baseAny
val uint32
}
func (any *uint32Any) LastError() error {
return nil
}
func (any *uint32Any) ValueType() ValueType {
return NumberValue
}
func (any *uint32Any) MustBeValid() Any {
return any
}
func (any *uint32Any) ToBool() bool {
return any.val != 0
}
func (any *uint32Any) ToInt() int {
return int(any.val)
}
func (any *uint32Any) ToInt32() int32 {
return int32(any.val)
}
func (any *uint32Any) ToInt64() int64 {
return int64(any.val)
}
func (any *uint32Any) ToUint() uint {
return uint(any.val)
}
func (any *uint32Any) ToUint32() uint32 {
return any.val
}
func (any *uint32Any) ToUint64() uint64 {
return uint64(any.val)
}
func (any *uint32Any) ToFloat32() float32 {
return float32(any.val)
}
func (any *uint32Any) ToFloat64() float64 {
return float64(any.val)
}
func (any *uint32Any) ToString() string {
return strconv.FormatInt(int64(any.val), 10)
}
func (any *uint32Any) WriteTo(stream *Stream) {
stream.WriteUint32(any.val)
}
func (any *uint32Any) Parse() *Iterator {
return nil
}
func (any *uint32Any) GetInterface() interface{} {
return any.val
}

74
vendor/github.com/json-iterator/go/any_uint64.go generated vendored Normal file
View File

@ -0,0 +1,74 @@
package jsoniter
import (
"strconv"
)
type uint64Any struct {
baseAny
val uint64
}
func (any *uint64Any) LastError() error {
return nil
}
func (any *uint64Any) ValueType() ValueType {
return NumberValue
}
func (any *uint64Any) MustBeValid() Any {
return any
}
func (any *uint64Any) ToBool() bool {
return any.val != 0
}
func (any *uint64Any) ToInt() int {
return int(any.val)
}
func (any *uint64Any) ToInt32() int32 {
return int32(any.val)
}
func (any *uint64Any) ToInt64() int64 {
return int64(any.val)
}
func (any *uint64Any) ToUint() uint {
return uint(any.val)
}
func (any *uint64Any) ToUint32() uint32 {
return uint32(any.val)
}
func (any *uint64Any) ToUint64() uint64 {
return any.val
}
func (any *uint64Any) ToFloat32() float32 {
return float32(any.val)
}
func (any *uint64Any) ToFloat64() float64 {
return float64(any.val)
}
func (any *uint64Any) ToString() string {
return strconv.FormatUint(any.val, 10)
}
func (any *uint64Any) WriteTo(stream *Stream) {
stream.WriteUint64(any.val)
}
func (any *uint64Any) Parse() *Iterator {
return nil
}
func (any *uint64Any) GetInterface() interface{} {
return any.val
}

12
vendor/github.com/json-iterator/go/build.sh generated vendored Normal file
View File

@ -0,0 +1,12 @@
#!/bin/bash
set -e
set -x
if [ ! -d /tmp/build-golang/src/github.com/json-iterator ]; then
mkdir -p /tmp/build-golang/src/github.com/json-iterator
ln -s $PWD /tmp/build-golang/src/github.com/json-iterator/go
fi
export GOPATH=/tmp/build-golang
go get -u github.com/golang/dep/cmd/dep
cd /tmp/build-golang/src/github.com/json-iterator/go
exec $GOPATH/bin/dep ensure -update

375
vendor/github.com/json-iterator/go/config.go generated vendored Normal file
View File

@ -0,0 +1,375 @@
package jsoniter
import (
"encoding/json"
"io"
"reflect"
"sync"
"unsafe"
"github.com/modern-go/concurrent"
"github.com/modern-go/reflect2"
)
// Config customize how the API should behave.
// The API is created from Config by Froze.
type Config struct {
IndentionStep int
MarshalFloatWith6Digits bool
EscapeHTML bool
SortMapKeys bool
UseNumber bool
DisallowUnknownFields bool
TagKey string
OnlyTaggedField bool
ValidateJsonRawMessage bool
ObjectFieldMustBeSimpleString bool
CaseSensitive bool
}
// API the public interface of this package.
// Primary Marshal and Unmarshal.
type API interface {
IteratorPool
StreamPool
MarshalToString(v interface{}) (string, error)
Marshal(v interface{}) ([]byte, error)
MarshalIndent(v interface{}, prefix, indent string) ([]byte, error)
UnmarshalFromString(str string, v interface{}) error
Unmarshal(data []byte, v interface{}) error
Get(data []byte, path ...interface{}) Any
NewEncoder(writer io.Writer) *Encoder
NewDecoder(reader io.Reader) *Decoder
Valid(data []byte) bool
RegisterExtension(extension Extension)
DecoderOf(typ reflect2.Type) ValDecoder
EncoderOf(typ reflect2.Type) ValEncoder
}
// ConfigDefault the default API
var ConfigDefault = Config{
EscapeHTML: true,
}.Froze()
// ConfigCompatibleWithStandardLibrary tries to be 100% compatible with standard library behavior
var ConfigCompatibleWithStandardLibrary = Config{
EscapeHTML: true,
SortMapKeys: true,
ValidateJsonRawMessage: true,
}.Froze()
// ConfigFastest marshals float with only 6 digits precision
var ConfigFastest = Config{
EscapeHTML: false,
MarshalFloatWith6Digits: true, // will lose precession
ObjectFieldMustBeSimpleString: true, // do not unescape object field
}.Froze()
type frozenConfig struct {
configBeforeFrozen Config
sortMapKeys bool
indentionStep int
objectFieldMustBeSimpleString bool
onlyTaggedField bool
disallowUnknownFields bool
decoderCache *concurrent.Map
encoderCache *concurrent.Map
encoderExtension Extension
decoderExtension Extension
extraExtensions []Extension
streamPool *sync.Pool
iteratorPool *sync.Pool
caseSensitive bool
}
func (cfg *frozenConfig) initCache() {
cfg.decoderCache = concurrent.NewMap()
cfg.encoderCache = concurrent.NewMap()
}
func (cfg *frozenConfig) addDecoderToCache(cacheKey uintptr, decoder ValDecoder) {
cfg.decoderCache.Store(cacheKey, decoder)
}
func (cfg *frozenConfig) addEncoderToCache(cacheKey uintptr, encoder ValEncoder) {
cfg.encoderCache.Store(cacheKey, encoder)
}
func (cfg *frozenConfig) getDecoderFromCache(cacheKey uintptr) ValDecoder {
decoder, found := cfg.decoderCache.Load(cacheKey)
if found {
return decoder.(ValDecoder)
}
return nil
}
func (cfg *frozenConfig) getEncoderFromCache(cacheKey uintptr) ValEncoder {
encoder, found := cfg.encoderCache.Load(cacheKey)
if found {
return encoder.(ValEncoder)
}
return nil
}
var cfgCache = concurrent.NewMap()
func getFrozenConfigFromCache(cfg Config) *frozenConfig {
obj, found := cfgCache.Load(cfg)
if found {
return obj.(*frozenConfig)
}
return nil
}
func addFrozenConfigToCache(cfg Config, frozenConfig *frozenConfig) {
cfgCache.Store(cfg, frozenConfig)
}
// Froze forge API from config
func (cfg Config) Froze() API {
api := &frozenConfig{
sortMapKeys: cfg.SortMapKeys,
indentionStep: cfg.IndentionStep,
objectFieldMustBeSimpleString: cfg.ObjectFieldMustBeSimpleString,
onlyTaggedField: cfg.OnlyTaggedField,
disallowUnknownFields: cfg.DisallowUnknownFields,
caseSensitive: cfg.CaseSensitive,
}
api.streamPool = &sync.Pool{
New: func() interface{} {
return NewStream(api, nil, 512)
},
}
api.iteratorPool = &sync.Pool{
New: func() interface{} {
return NewIterator(api)
},
}
api.initCache()
encoderExtension := EncoderExtension{}
decoderExtension := DecoderExtension{}
if cfg.MarshalFloatWith6Digits {
api.marshalFloatWith6Digits(encoderExtension)
}
if cfg.EscapeHTML {
api.escapeHTML(encoderExtension)
}
if cfg.UseNumber {
api.useNumber(decoderExtension)
}
if cfg.ValidateJsonRawMessage {
api.validateJsonRawMessage(encoderExtension)
}
api.encoderExtension = encoderExtension
api.decoderExtension = decoderExtension
api.configBeforeFrozen = cfg
return api
}
func (cfg Config) frozeWithCacheReuse(extraExtensions []Extension) *frozenConfig {
api := getFrozenConfigFromCache(cfg)
if api != nil {
return api
}
api = cfg.Froze().(*frozenConfig)
for _, extension := range extraExtensions {
api.RegisterExtension(extension)
}
addFrozenConfigToCache(cfg, api)
return api
}
func (cfg *frozenConfig) validateJsonRawMessage(extension EncoderExtension) {
encoder := &funcEncoder{func(ptr unsafe.Pointer, stream *Stream) {
rawMessage := *(*json.RawMessage)(ptr)
iter := cfg.BorrowIterator([]byte(rawMessage))
defer cfg.ReturnIterator(iter)
iter.Read()
if iter.Error != nil && iter.Error != io.EOF {
stream.WriteRaw("null")
} else {
stream.WriteRaw(string(rawMessage))
}
}, func(ptr unsafe.Pointer) bool {
return len(*((*json.RawMessage)(ptr))) == 0
}}
extension[reflect2.TypeOfPtr((*json.RawMessage)(nil)).Elem()] = encoder
extension[reflect2.TypeOfPtr((*RawMessage)(nil)).Elem()] = encoder
}
func (cfg *frozenConfig) useNumber(extension DecoderExtension) {
extension[reflect2.TypeOfPtr((*interface{})(nil)).Elem()] = &funcDecoder{func(ptr unsafe.Pointer, iter *Iterator) {
exitingValue := *((*interface{})(ptr))
if exitingValue != nil && reflect.TypeOf(exitingValue).Kind() == reflect.Ptr {
iter.ReadVal(exitingValue)
return
}
if iter.WhatIsNext() == NumberValue {
*((*interface{})(ptr)) = json.Number(iter.readNumberAsString())
} else {
*((*interface{})(ptr)) = iter.Read()
}
}}
}
func (cfg *frozenConfig) getTagKey() string {
tagKey := cfg.configBeforeFrozen.TagKey
if tagKey == "" {
return "json"
}
return tagKey
}
func (cfg *frozenConfig) RegisterExtension(extension Extension) {
cfg.extraExtensions = append(cfg.extraExtensions, extension)
copied := cfg.configBeforeFrozen
cfg.configBeforeFrozen = copied
}
type lossyFloat32Encoder struct {
}
func (encoder *lossyFloat32Encoder) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteFloat32Lossy(*((*float32)(ptr)))
}
func (encoder *lossyFloat32Encoder) IsEmpty(ptr unsafe.Pointer) bool {
return *((*float32)(ptr)) == 0
}
type lossyFloat64Encoder struct {
}
func (encoder *lossyFloat64Encoder) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteFloat64Lossy(*((*float64)(ptr)))
}
func (encoder *lossyFloat64Encoder) IsEmpty(ptr unsafe.Pointer) bool {
return *((*float64)(ptr)) == 0
}
// EnableLossyFloatMarshalling keeps 10**(-6) precision
// for float variables for better performance.
func (cfg *frozenConfig) marshalFloatWith6Digits(extension EncoderExtension) {
// for better performance
extension[reflect2.TypeOfPtr((*float32)(nil)).Elem()] = &lossyFloat32Encoder{}
extension[reflect2.TypeOfPtr((*float64)(nil)).Elem()] = &lossyFloat64Encoder{}
}
type htmlEscapedStringEncoder struct {
}
func (encoder *htmlEscapedStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
str := *((*string)(ptr))
stream.WriteStringWithHTMLEscaped(str)
}
func (encoder *htmlEscapedStringEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return *((*string)(ptr)) == ""
}
func (cfg *frozenConfig) escapeHTML(encoderExtension EncoderExtension) {
encoderExtension[reflect2.TypeOfPtr((*string)(nil)).Elem()] = &htmlEscapedStringEncoder{}
}
func (cfg *frozenConfig) cleanDecoders() {
typeDecoders = map[string]ValDecoder{}
fieldDecoders = map[string]ValDecoder{}
*cfg = *(cfg.configBeforeFrozen.Froze().(*frozenConfig))
}
func (cfg *frozenConfig) cleanEncoders() {
typeEncoders = map[string]ValEncoder{}
fieldEncoders = map[string]ValEncoder{}
*cfg = *(cfg.configBeforeFrozen.Froze().(*frozenConfig))
}
func (cfg *frozenConfig) MarshalToString(v interface{}) (string, error) {
stream := cfg.BorrowStream(nil)
defer cfg.ReturnStream(stream)
stream.WriteVal(v)
if stream.Error != nil {
return "", stream.Error
}
return string(stream.Buffer()), nil
}
func (cfg *frozenConfig) Marshal(v interface{}) ([]byte, error) {
stream := cfg.BorrowStream(nil)
defer cfg.ReturnStream(stream)
stream.WriteVal(v)
if stream.Error != nil {
return nil, stream.Error
}
result := stream.Buffer()
copied := make([]byte, len(result))
copy(copied, result)
return copied, nil
}
func (cfg *frozenConfig) MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) {
if prefix != "" {
panic("prefix is not supported")
}
for _, r := range indent {
if r != ' ' {
panic("indent can only be space")
}
}
newCfg := cfg.configBeforeFrozen
newCfg.IndentionStep = len(indent)
return newCfg.frozeWithCacheReuse(cfg.extraExtensions).Marshal(v)
}
func (cfg *frozenConfig) UnmarshalFromString(str string, v interface{}) error {
data := []byte(str)
iter := cfg.BorrowIterator(data)
defer cfg.ReturnIterator(iter)
iter.ReadVal(v)
c := iter.nextToken()
if c == 0 {
if iter.Error == io.EOF {
return nil
}
return iter.Error
}
iter.ReportError("Unmarshal", "there are bytes left after unmarshal")
return iter.Error
}
func (cfg *frozenConfig) Get(data []byte, path ...interface{}) Any {
iter := cfg.BorrowIterator(data)
defer cfg.ReturnIterator(iter)
return locatePath(iter, path)
}
func (cfg *frozenConfig) Unmarshal(data []byte, v interface{}) error {
iter := cfg.BorrowIterator(data)
defer cfg.ReturnIterator(iter)
iter.ReadVal(v)
c := iter.nextToken()
if c == 0 {
if iter.Error == io.EOF {
return nil
}
return iter.Error
}
iter.ReportError("Unmarshal", "there are bytes left after unmarshal")
return iter.Error
}
func (cfg *frozenConfig) NewEncoder(writer io.Writer) *Encoder {
stream := NewStream(cfg, writer, 512)
return &Encoder{stream}
}
func (cfg *frozenConfig) NewDecoder(reader io.Reader) *Decoder {
iter := Parse(cfg, reader, 512)
return &Decoder{iter}
}
func (cfg *frozenConfig) Valid(data []byte) bool {
iter := cfg.BorrowIterator(data)
defer cfg.ReturnIterator(iter)
iter.Skip()
return iter.Error == nil
}

View File

@ -0,0 +1,7 @@
| json type \ dest type | bool | int | uint | float |string|
| --- | --- | --- | --- |--|--|
| number | positive => true <br/> negative => true <br/> zero => false| 23.2 => 23 <br/> -32.1 => -32| 12.1 => 12 <br/> -12.1 => 0|as normal|same as origin|
| string | empty string => false <br/> string "0" => false <br/> other strings => true | "123.32" => 123 <br/> "-123.4" => -123 <br/> "123.23xxxw" => 123 <br/> "abcde12" => 0 <br/> "-32.1" => -32| 13.2 => 13 <br/> -1.1 => 0 |12.1 => 12.1 <br/> -12.3 => -12.3<br/> 12.4xxa => 12.4 <br/> +1.1e2 =>110 |same as origin|
| bool | true => true <br/> false => false| true => 1 <br/> false => 0 | true => 1 <br/> false => 0 |true => 1 <br/>false => 0|true => "true" <br/> false => "false"|
| object | true | 0 | 0 |0|originnal json|
| array | empty array => false <br/> nonempty array => true| [] => 0 <br/> [1,2] => 1 | [] => 0 <br/> [1,2] => 1 |[] => 0<br/>[1,2] => 1|original json|

11
vendor/github.com/json-iterator/go/go.mod generated vendored Normal file
View File

@ -0,0 +1,11 @@
module github.com/json-iterator/go
go 1.12
require (
github.com/davecgh/go-spew v1.1.1
github.com/google/gofuzz v1.0.0
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742
github.com/stretchr/testify v1.3.0
)

14
vendor/github.com/json-iterator/go/go.sum generated vendored Normal file
View File

@ -0,0 +1,14 @@
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/google/gofuzz v1.0.0 h1:A8PeW59pxE9IoFRqBp37U+mSNaQoZ46F1f0f863XSXw=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742 h1:Esafd1046DLDQ0W1YjYsBW+p8U2u7vzgW2SQVmlNazg=
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=

349
vendor/github.com/json-iterator/go/iter.go generated vendored Normal file
View File

@ -0,0 +1,349 @@
package jsoniter
import (
"encoding/json"
"fmt"
"io"
)
// ValueType the type for JSON element
type ValueType int
const (
// InvalidValue invalid JSON element
InvalidValue ValueType = iota
// StringValue JSON element "string"
StringValue
// NumberValue JSON element 100 or 0.10
NumberValue
// NilValue JSON element null
NilValue
// BoolValue JSON element true or false
BoolValue
// ArrayValue JSON element []
ArrayValue
// ObjectValue JSON element {}
ObjectValue
)
var hexDigits []byte
var valueTypes []ValueType
func init() {
hexDigits = make([]byte, 256)
for i := 0; i < len(hexDigits); i++ {
hexDigits[i] = 255
}
for i := '0'; i <= '9'; i++ {
hexDigits[i] = byte(i - '0')
}
for i := 'a'; i <= 'f'; i++ {
hexDigits[i] = byte((i - 'a') + 10)
}
for i := 'A'; i <= 'F'; i++ {
hexDigits[i] = byte((i - 'A') + 10)
}
valueTypes = make([]ValueType, 256)
for i := 0; i < len(valueTypes); i++ {
valueTypes[i] = InvalidValue
}
valueTypes['"'] = StringValue
valueTypes['-'] = NumberValue
valueTypes['0'] = NumberValue
valueTypes['1'] = NumberValue
valueTypes['2'] = NumberValue
valueTypes['3'] = NumberValue
valueTypes['4'] = NumberValue
valueTypes['5'] = NumberValue
valueTypes['6'] = NumberValue
valueTypes['7'] = NumberValue
valueTypes['8'] = NumberValue
valueTypes['9'] = NumberValue
valueTypes['t'] = BoolValue
valueTypes['f'] = BoolValue
valueTypes['n'] = NilValue
valueTypes['['] = ArrayValue
valueTypes['{'] = ObjectValue
}
// Iterator is a io.Reader like object, with JSON specific read functions.
// Error is not returned as return value, but stored as Error member on this iterator instance.
type Iterator struct {
cfg *frozenConfig
reader io.Reader
buf []byte
head int
tail int
depth int
captureStartedAt int
captured []byte
Error error
Attachment interface{} // open for customized decoder
}
// NewIterator creates an empty Iterator instance
func NewIterator(cfg API) *Iterator {
return &Iterator{
cfg: cfg.(*frozenConfig),
reader: nil,
buf: nil,
head: 0,
tail: 0,
depth: 0,
}
}
// Parse creates an Iterator instance from io.Reader
func Parse(cfg API, reader io.Reader, bufSize int) *Iterator {
return &Iterator{
cfg: cfg.(*frozenConfig),
reader: reader,
buf: make([]byte, bufSize),
head: 0,
tail: 0,
depth: 0,
}
}
// ParseBytes creates an Iterator instance from byte array
func ParseBytes(cfg API, input []byte) *Iterator {
return &Iterator{
cfg: cfg.(*frozenConfig),
reader: nil,
buf: input,
head: 0,
tail: len(input),
depth: 0,
}
}
// ParseString creates an Iterator instance from string
func ParseString(cfg API, input string) *Iterator {
return ParseBytes(cfg, []byte(input))
}
// Pool returns a pool can provide more iterator with same configuration
func (iter *Iterator) Pool() IteratorPool {
return iter.cfg
}
// Reset reuse iterator instance by specifying another reader
func (iter *Iterator) Reset(reader io.Reader) *Iterator {
iter.reader = reader
iter.head = 0
iter.tail = 0
iter.depth = 0
return iter
}
// ResetBytes reuse iterator instance by specifying another byte array as input
func (iter *Iterator) ResetBytes(input []byte) *Iterator {
iter.reader = nil
iter.buf = input
iter.head = 0
iter.tail = len(input)
iter.depth = 0
return iter
}
// WhatIsNext gets ValueType of relatively next json element
func (iter *Iterator) WhatIsNext() ValueType {
valueType := valueTypes[iter.nextToken()]
iter.unreadByte()
return valueType
}
func (iter *Iterator) skipWhitespacesWithoutLoadMore() bool {
for i := iter.head; i < iter.tail; i++ {
c := iter.buf[i]
switch c {
case ' ', '\n', '\t', '\r':
continue
}
iter.head = i
return false
}
return true
}
func (iter *Iterator) isObjectEnd() bool {
c := iter.nextToken()
if c == ',' {
return false
}
if c == '}' {
return true
}
iter.ReportError("isObjectEnd", "object ended prematurely, unexpected char "+string([]byte{c}))
return true
}
func (iter *Iterator) nextToken() byte {
// a variation of skip whitespaces, returning the next non-whitespace token
for {
for i := iter.head; i < iter.tail; i++ {
c := iter.buf[i]
switch c {
case ' ', '\n', '\t', '\r':
continue
}
iter.head = i + 1
return c
}
if !iter.loadMore() {
return 0
}
}
}
// ReportError record a error in iterator instance with current position.
func (iter *Iterator) ReportError(operation string, msg string) {
if iter.Error != nil {
if iter.Error != io.EOF {
return
}
}
peekStart := iter.head - 10
if peekStart < 0 {
peekStart = 0
}
peekEnd := iter.head + 10
if peekEnd > iter.tail {
peekEnd = iter.tail
}
parsing := string(iter.buf[peekStart:peekEnd])
contextStart := iter.head - 50
if contextStart < 0 {
contextStart = 0
}
contextEnd := iter.head + 50
if contextEnd > iter.tail {
contextEnd = iter.tail
}
context := string(iter.buf[contextStart:contextEnd])
iter.Error = fmt.Errorf("%s: %s, error found in #%v byte of ...|%s|..., bigger context ...|%s|...",
operation, msg, iter.head-peekStart, parsing, context)
}
// CurrentBuffer gets current buffer as string for debugging purpose
func (iter *Iterator) CurrentBuffer() string {
peekStart := iter.head - 10
if peekStart < 0 {
peekStart = 0
}
return fmt.Sprintf("parsing #%v byte, around ...|%s|..., whole buffer ...|%s|...", iter.head,
string(iter.buf[peekStart:iter.head]), string(iter.buf[0:iter.tail]))
}
func (iter *Iterator) readByte() (ret byte) {
if iter.head == iter.tail {
if iter.loadMore() {
ret = iter.buf[iter.head]
iter.head++
return ret
}
return 0
}
ret = iter.buf[iter.head]
iter.head++
return ret
}
func (iter *Iterator) loadMore() bool {
if iter.reader == nil {
if iter.Error == nil {
iter.head = iter.tail
iter.Error = io.EOF
}
return false
}
if iter.captured != nil {
iter.captured = append(iter.captured,
iter.buf[iter.captureStartedAt:iter.tail]...)
iter.captureStartedAt = 0
}
for {
n, err := iter.reader.Read(iter.buf)
if n == 0 {
if err != nil {
if iter.Error == nil {
iter.Error = err
}
return false
}
} else {
iter.head = 0
iter.tail = n
return true
}
}
}
func (iter *Iterator) unreadByte() {
if iter.Error != nil {
return
}
iter.head--
return
}
// Read read the next JSON element as generic interface{}.
func (iter *Iterator) Read() interface{} {
valueType := iter.WhatIsNext()
switch valueType {
case StringValue:
return iter.ReadString()
case NumberValue:
if iter.cfg.configBeforeFrozen.UseNumber {
return json.Number(iter.readNumberAsString())
}
return iter.ReadFloat64()
case NilValue:
iter.skipFourBytes('n', 'u', 'l', 'l')
return nil
case BoolValue:
return iter.ReadBool()
case ArrayValue:
arr := []interface{}{}
iter.ReadArrayCB(func(iter *Iterator) bool {
var elem interface{}
iter.ReadVal(&elem)
arr = append(arr, elem)
return true
})
return arr
case ObjectValue:
obj := map[string]interface{}{}
iter.ReadMapCB(func(Iter *Iterator, field string) bool {
var elem interface{}
iter.ReadVal(&elem)
obj[field] = elem
return true
})
return obj
default:
iter.ReportError("Read", fmt.Sprintf("unexpected value type: %v", valueType))
return nil
}
}
// limit maximum depth of nesting, as allowed by https://tools.ietf.org/html/rfc7159#section-9
const maxDepth = 10000
func (iter *Iterator) incrementDepth() (success bool) {
iter.depth++
if iter.depth <= maxDepth {
return true
}
iter.ReportError("incrementDepth", "exceeded max depth")
return false
}
func (iter *Iterator) decrementDepth() (success bool) {
iter.depth--
if iter.depth >= 0 {
return true
}
iter.ReportError("decrementDepth", "unexpected negative nesting")
return false
}

64
vendor/github.com/json-iterator/go/iter_array.go generated vendored Normal file
View File

@ -0,0 +1,64 @@
package jsoniter
// ReadArray read array element, tells if the array has more element to read.
func (iter *Iterator) ReadArray() (ret bool) {
c := iter.nextToken()
switch c {
case 'n':
iter.skipThreeBytes('u', 'l', 'l')
return false // null
case '[':
c = iter.nextToken()
if c != ']' {
iter.unreadByte()
return true
}
return false
case ']':
return false
case ',':
return true
default:
iter.ReportError("ReadArray", "expect [ or , or ] or n, but found "+string([]byte{c}))
return
}
}
// ReadArrayCB read array with callback
func (iter *Iterator) ReadArrayCB(callback func(*Iterator) bool) (ret bool) {
c := iter.nextToken()
if c == '[' {
if !iter.incrementDepth() {
return false
}
c = iter.nextToken()
if c != ']' {
iter.unreadByte()
if !callback(iter) {
iter.decrementDepth()
return false
}
c = iter.nextToken()
for c == ',' {
if !callback(iter) {
iter.decrementDepth()
return false
}
c = iter.nextToken()
}
if c != ']' {
iter.ReportError("ReadArrayCB", "expect ] in the end, but found "+string([]byte{c}))
iter.decrementDepth()
return false
}
return iter.decrementDepth()
}
return iter.decrementDepth()
}
if c == 'n' {
iter.skipThreeBytes('u', 'l', 'l')
return true // null
}
iter.ReportError("ReadArrayCB", "expect [ or n, but found "+string([]byte{c}))
return false
}

339
vendor/github.com/json-iterator/go/iter_float.go generated vendored Normal file
View File

@ -0,0 +1,339 @@
package jsoniter
import (
"encoding/json"
"io"
"math/big"
"strconv"
"strings"
"unsafe"
)
var floatDigits []int8
const invalidCharForNumber = int8(-1)
const endOfNumber = int8(-2)
const dotInNumber = int8(-3)
func init() {
floatDigits = make([]int8, 256)
for i := 0; i < len(floatDigits); i++ {
floatDigits[i] = invalidCharForNumber
}
for i := int8('0'); i <= int8('9'); i++ {
floatDigits[i] = i - int8('0')
}
floatDigits[','] = endOfNumber
floatDigits[']'] = endOfNumber
floatDigits['}'] = endOfNumber
floatDigits[' '] = endOfNumber
floatDigits['\t'] = endOfNumber
floatDigits['\n'] = endOfNumber
floatDigits['.'] = dotInNumber
}
// ReadBigFloat read big.Float
func (iter *Iterator) ReadBigFloat() (ret *big.Float) {
str := iter.readNumberAsString()
if iter.Error != nil && iter.Error != io.EOF {
return nil
}
prec := 64
if len(str) > prec {
prec = len(str)
}
val, _, err := big.ParseFloat(str, 10, uint(prec), big.ToZero)
if err != nil {
iter.Error = err
return nil
}
return val
}
// ReadBigInt read big.Int
func (iter *Iterator) ReadBigInt() (ret *big.Int) {
str := iter.readNumberAsString()
if iter.Error != nil && iter.Error != io.EOF {
return nil
}
ret = big.NewInt(0)
var success bool
ret, success = ret.SetString(str, 10)
if !success {
iter.ReportError("ReadBigInt", "invalid big int")
return nil
}
return ret
}
//ReadFloat32 read float32
func (iter *Iterator) ReadFloat32() (ret float32) {
c := iter.nextToken()
if c == '-' {
return -iter.readPositiveFloat32()
}
iter.unreadByte()
return iter.readPositiveFloat32()
}
func (iter *Iterator) readPositiveFloat32() (ret float32) {
i := iter.head
// first char
if i == iter.tail {
return iter.readFloat32SlowPath()
}
c := iter.buf[i]
i++
ind := floatDigits[c]
switch ind {
case invalidCharForNumber:
return iter.readFloat32SlowPath()
case endOfNumber:
iter.ReportError("readFloat32", "empty number")
return
case dotInNumber:
iter.ReportError("readFloat32", "leading dot is invalid")
return
case 0:
if i == iter.tail {
return iter.readFloat32SlowPath()
}
c = iter.buf[i]
switch c {
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
iter.ReportError("readFloat32", "leading zero is invalid")
return
}
}
value := uint64(ind)
// chars before dot
non_decimal_loop:
for ; i < iter.tail; i++ {
c = iter.buf[i]
ind := floatDigits[c]
switch ind {
case invalidCharForNumber:
return iter.readFloat32SlowPath()
case endOfNumber:
iter.head = i
return float32(value)
case dotInNumber:
break non_decimal_loop
}
if value > uint64SafeToMultiple10 {
return iter.readFloat32SlowPath()
}
value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind;
}
// chars after dot
if c == '.' {
i++
decimalPlaces := 0
if i == iter.tail {
return iter.readFloat32SlowPath()
}
for ; i < iter.tail; i++ {
c = iter.buf[i]
ind := floatDigits[c]
switch ind {
case endOfNumber:
if decimalPlaces > 0 && decimalPlaces < len(pow10) {
iter.head = i
return float32(float64(value) / float64(pow10[decimalPlaces]))
}
// too many decimal places
return iter.readFloat32SlowPath()
case invalidCharForNumber, dotInNumber:
return iter.readFloat32SlowPath()
}
decimalPlaces++
if value > uint64SafeToMultiple10 {
return iter.readFloat32SlowPath()
}
value = (value << 3) + (value << 1) + uint64(ind)
}
}
return iter.readFloat32SlowPath()
}
func (iter *Iterator) readNumberAsString() (ret string) {
strBuf := [16]byte{}
str := strBuf[0:0]
load_loop:
for {
for i := iter.head; i < iter.tail; i++ {
c := iter.buf[i]
switch c {
case '+', '-', '.', 'e', 'E', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
str = append(str, c)
continue
default:
iter.head = i
break load_loop
}
}
if !iter.loadMore() {
break
}
}
if iter.Error != nil && iter.Error != io.EOF {
return
}
if len(str) == 0 {
iter.ReportError("readNumberAsString", "invalid number")
}
return *(*string)(unsafe.Pointer(&str))
}
func (iter *Iterator) readFloat32SlowPath() (ret float32) {
str := iter.readNumberAsString()
if iter.Error != nil && iter.Error != io.EOF {
return
}
errMsg := validateFloat(str)
if errMsg != "" {
iter.ReportError("readFloat32SlowPath", errMsg)
return
}
val, err := strconv.ParseFloat(str, 32)
if err != nil {
iter.Error = err
return
}
return float32(val)
}
// ReadFloat64 read float64
func (iter *Iterator) ReadFloat64() (ret float64) {
c := iter.nextToken()
if c == '-' {
return -iter.readPositiveFloat64()
}
iter.unreadByte()
return iter.readPositiveFloat64()
}
func (iter *Iterator) readPositiveFloat64() (ret float64) {
i := iter.head
// first char
if i == iter.tail {
return iter.readFloat64SlowPath()
}
c := iter.buf[i]
i++
ind := floatDigits[c]
switch ind {
case invalidCharForNumber:
return iter.readFloat64SlowPath()
case endOfNumber:
iter.ReportError("readFloat64", "empty number")
return
case dotInNumber:
iter.ReportError("readFloat64", "leading dot is invalid")
return
case 0:
if i == iter.tail {
return iter.readFloat64SlowPath()
}
c = iter.buf[i]
switch c {
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
iter.ReportError("readFloat64", "leading zero is invalid")
return
}
}
value := uint64(ind)
// chars before dot
non_decimal_loop:
for ; i < iter.tail; i++ {
c = iter.buf[i]
ind := floatDigits[c]
switch ind {
case invalidCharForNumber:
return iter.readFloat64SlowPath()
case endOfNumber:
iter.head = i
return float64(value)
case dotInNumber:
break non_decimal_loop
}
if value > uint64SafeToMultiple10 {
return iter.readFloat64SlowPath()
}
value = (value << 3) + (value << 1) + uint64(ind) // value = value * 10 + ind;
}
// chars after dot
if c == '.' {
i++
decimalPlaces := 0
if i == iter.tail {
return iter.readFloat64SlowPath()
}
for ; i < iter.tail; i++ {
c = iter.buf[i]
ind := floatDigits[c]
switch ind {
case endOfNumber:
if decimalPlaces > 0 && decimalPlaces < len(pow10) {
iter.head = i
return float64(value) / float64(pow10[decimalPlaces])
}
// too many decimal places
return iter.readFloat64SlowPath()
case invalidCharForNumber, dotInNumber:
return iter.readFloat64SlowPath()
}
decimalPlaces++
if value > uint64SafeToMultiple10 {
return iter.readFloat64SlowPath()
}
value = (value << 3) + (value << 1) + uint64(ind)
}
}
return iter.readFloat64SlowPath()
}
func (iter *Iterator) readFloat64SlowPath() (ret float64) {
str := iter.readNumberAsString()
if iter.Error != nil && iter.Error != io.EOF {
return
}
errMsg := validateFloat(str)
if errMsg != "" {
iter.ReportError("readFloat64SlowPath", errMsg)
return
}
val, err := strconv.ParseFloat(str, 64)
if err != nil {
iter.Error = err
return
}
return val
}
func validateFloat(str string) string {
// strconv.ParseFloat is not validating `1.` or `1.e1`
if len(str) == 0 {
return "empty number"
}
if str[0] == '-' {
return "-- is not valid"
}
dotPos := strings.IndexByte(str, '.')
if dotPos != -1 {
if dotPos == len(str)-1 {
return "dot can not be last character"
}
switch str[dotPos+1] {
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
default:
return "missing digit after dot"
}
}
return ""
}
// ReadNumber read json.Number
func (iter *Iterator) ReadNumber() (ret json.Number) {
return json.Number(iter.readNumberAsString())
}

345
vendor/github.com/json-iterator/go/iter_int.go generated vendored Normal file
View File

@ -0,0 +1,345 @@
package jsoniter
import (
"math"
"strconv"
)
var intDigits []int8
const uint32SafeToMultiply10 = uint32(0xffffffff)/10 - 1
const uint64SafeToMultiple10 = uint64(0xffffffffffffffff)/10 - 1
func init() {
intDigits = make([]int8, 256)
for i := 0; i < len(intDigits); i++ {
intDigits[i] = invalidCharForNumber
}
for i := int8('0'); i <= int8('9'); i++ {
intDigits[i] = i - int8('0')
}
}
// ReadUint read uint
func (iter *Iterator) ReadUint() uint {
if strconv.IntSize == 32 {
return uint(iter.ReadUint32())
}
return uint(iter.ReadUint64())
}
// ReadInt read int
func (iter *Iterator) ReadInt() int {
if strconv.IntSize == 32 {
return int(iter.ReadInt32())
}
return int(iter.ReadInt64())
}
// ReadInt8 read int8
func (iter *Iterator) ReadInt8() (ret int8) {
c := iter.nextToken()
if c == '-' {
val := iter.readUint32(iter.readByte())
if val > math.MaxInt8+1 {
iter.ReportError("ReadInt8", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return -int8(val)
}
val := iter.readUint32(c)
if val > math.MaxInt8 {
iter.ReportError("ReadInt8", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return int8(val)
}
// ReadUint8 read uint8
func (iter *Iterator) ReadUint8() (ret uint8) {
val := iter.readUint32(iter.nextToken())
if val > math.MaxUint8 {
iter.ReportError("ReadUint8", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return uint8(val)
}
// ReadInt16 read int16
func (iter *Iterator) ReadInt16() (ret int16) {
c := iter.nextToken()
if c == '-' {
val := iter.readUint32(iter.readByte())
if val > math.MaxInt16+1 {
iter.ReportError("ReadInt16", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return -int16(val)
}
val := iter.readUint32(c)
if val > math.MaxInt16 {
iter.ReportError("ReadInt16", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return int16(val)
}
// ReadUint16 read uint16
func (iter *Iterator) ReadUint16() (ret uint16) {
val := iter.readUint32(iter.nextToken())
if val > math.MaxUint16 {
iter.ReportError("ReadUint16", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return uint16(val)
}
// ReadInt32 read int32
func (iter *Iterator) ReadInt32() (ret int32) {
c := iter.nextToken()
if c == '-' {
val := iter.readUint32(iter.readByte())
if val > math.MaxInt32+1 {
iter.ReportError("ReadInt32", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return -int32(val)
}
val := iter.readUint32(c)
if val > math.MaxInt32 {
iter.ReportError("ReadInt32", "overflow: "+strconv.FormatInt(int64(val), 10))
return
}
return int32(val)
}
// ReadUint32 read uint32
func (iter *Iterator) ReadUint32() (ret uint32) {
return iter.readUint32(iter.nextToken())
}
func (iter *Iterator) readUint32(c byte) (ret uint32) {
ind := intDigits[c]
if ind == 0 {
iter.assertInteger()
return 0 // single zero
}
if ind == invalidCharForNumber {
iter.ReportError("readUint32", "unexpected character: "+string([]byte{byte(ind)}))
return
}
value := uint32(ind)
if iter.tail-iter.head > 10 {
i := iter.head
ind2 := intDigits[iter.buf[i]]
if ind2 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value
}
i++
ind3 := intDigits[iter.buf[i]]
if ind3 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*10 + uint32(ind2)
}
//iter.head = i + 1
//value = value * 100 + uint32(ind2) * 10 + uint32(ind3)
i++
ind4 := intDigits[iter.buf[i]]
if ind4 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*100 + uint32(ind2)*10 + uint32(ind3)
}
i++
ind5 := intDigits[iter.buf[i]]
if ind5 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*1000 + uint32(ind2)*100 + uint32(ind3)*10 + uint32(ind4)
}
i++
ind6 := intDigits[iter.buf[i]]
if ind6 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*10000 + uint32(ind2)*1000 + uint32(ind3)*100 + uint32(ind4)*10 + uint32(ind5)
}
i++
ind7 := intDigits[iter.buf[i]]
if ind7 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*100000 + uint32(ind2)*10000 + uint32(ind3)*1000 + uint32(ind4)*100 + uint32(ind5)*10 + uint32(ind6)
}
i++
ind8 := intDigits[iter.buf[i]]
if ind8 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*1000000 + uint32(ind2)*100000 + uint32(ind3)*10000 + uint32(ind4)*1000 + uint32(ind5)*100 + uint32(ind6)*10 + uint32(ind7)
}
i++
ind9 := intDigits[iter.buf[i]]
value = value*10000000 + uint32(ind2)*1000000 + uint32(ind3)*100000 + uint32(ind4)*10000 + uint32(ind5)*1000 + uint32(ind6)*100 + uint32(ind7)*10 + uint32(ind8)
iter.head = i
if ind9 == invalidCharForNumber {
iter.assertInteger()
return value
}
}
for {
for i := iter.head; i < iter.tail; i++ {
ind = intDigits[iter.buf[i]]
if ind == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value
}
if value > uint32SafeToMultiply10 {
value2 := (value << 3) + (value << 1) + uint32(ind)
if value2 < value {
iter.ReportError("readUint32", "overflow")
return
}
value = value2
continue
}
value = (value << 3) + (value << 1) + uint32(ind)
}
if !iter.loadMore() {
iter.assertInteger()
return value
}
}
}
// ReadInt64 read int64
func (iter *Iterator) ReadInt64() (ret int64) {
c := iter.nextToken()
if c == '-' {
val := iter.readUint64(iter.readByte())
if val > math.MaxInt64+1 {
iter.ReportError("ReadInt64", "overflow: "+strconv.FormatUint(uint64(val), 10))
return
}
return -int64(val)
}
val := iter.readUint64(c)
if val > math.MaxInt64 {
iter.ReportError("ReadInt64", "overflow: "+strconv.FormatUint(uint64(val), 10))
return
}
return int64(val)
}
// ReadUint64 read uint64
func (iter *Iterator) ReadUint64() uint64 {
return iter.readUint64(iter.nextToken())
}
func (iter *Iterator) readUint64(c byte) (ret uint64) {
ind := intDigits[c]
if ind == 0 {
iter.assertInteger()
return 0 // single zero
}
if ind == invalidCharForNumber {
iter.ReportError("readUint64", "unexpected character: "+string([]byte{byte(ind)}))
return
}
value := uint64(ind)
if iter.tail-iter.head > 10 {
i := iter.head
ind2 := intDigits[iter.buf[i]]
if ind2 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value
}
i++
ind3 := intDigits[iter.buf[i]]
if ind3 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*10 + uint64(ind2)
}
//iter.head = i + 1
//value = value * 100 + uint32(ind2) * 10 + uint32(ind3)
i++
ind4 := intDigits[iter.buf[i]]
if ind4 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*100 + uint64(ind2)*10 + uint64(ind3)
}
i++
ind5 := intDigits[iter.buf[i]]
if ind5 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*1000 + uint64(ind2)*100 + uint64(ind3)*10 + uint64(ind4)
}
i++
ind6 := intDigits[iter.buf[i]]
if ind6 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*10000 + uint64(ind2)*1000 + uint64(ind3)*100 + uint64(ind4)*10 + uint64(ind5)
}
i++
ind7 := intDigits[iter.buf[i]]
if ind7 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*100000 + uint64(ind2)*10000 + uint64(ind3)*1000 + uint64(ind4)*100 + uint64(ind5)*10 + uint64(ind6)
}
i++
ind8 := intDigits[iter.buf[i]]
if ind8 == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value*1000000 + uint64(ind2)*100000 + uint64(ind3)*10000 + uint64(ind4)*1000 + uint64(ind5)*100 + uint64(ind6)*10 + uint64(ind7)
}
i++
ind9 := intDigits[iter.buf[i]]
value = value*10000000 + uint64(ind2)*1000000 + uint64(ind3)*100000 + uint64(ind4)*10000 + uint64(ind5)*1000 + uint64(ind6)*100 + uint64(ind7)*10 + uint64(ind8)
iter.head = i
if ind9 == invalidCharForNumber {
iter.assertInteger()
return value
}
}
for {
for i := iter.head; i < iter.tail; i++ {
ind = intDigits[iter.buf[i]]
if ind == invalidCharForNumber {
iter.head = i
iter.assertInteger()
return value
}
if value > uint64SafeToMultiple10 {
value2 := (value << 3) + (value << 1) + uint64(ind)
if value2 < value {
iter.ReportError("readUint64", "overflow")
return
}
value = value2
continue
}
value = (value << 3) + (value << 1) + uint64(ind)
}
if !iter.loadMore() {
iter.assertInteger()
return value
}
}
}
func (iter *Iterator) assertInteger() {
if iter.head < len(iter.buf) && iter.buf[iter.head] == '.' {
iter.ReportError("assertInteger", "can not decode float as int")
}
}

267
vendor/github.com/json-iterator/go/iter_object.go generated vendored Normal file
View File

@ -0,0 +1,267 @@
package jsoniter
import (
"fmt"
"strings"
)
// ReadObject read one field from object.
// If object ended, returns empty string.
// Otherwise, returns the field name.
func (iter *Iterator) ReadObject() (ret string) {
c := iter.nextToken()
switch c {
case 'n':
iter.skipThreeBytes('u', 'l', 'l')
return "" // null
case '{':
c = iter.nextToken()
if c == '"' {
iter.unreadByte()
field := iter.ReadString()
c = iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
return field
}
if c == '}' {
return "" // end of object
}
iter.ReportError("ReadObject", `expect " after {, but found `+string([]byte{c}))
return
case ',':
field := iter.ReadString()
c = iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
return field
case '}':
return "" // end of object
default:
iter.ReportError("ReadObject", fmt.Sprintf(`expect { or , or } or n, but found %s`, string([]byte{c})))
return
}
}
// CaseInsensitive
func (iter *Iterator) readFieldHash() int64 {
hash := int64(0x811c9dc5)
c := iter.nextToken()
if c != '"' {
iter.ReportError("readFieldHash", `expect ", but found `+string([]byte{c}))
return 0
}
for {
for i := iter.head; i < iter.tail; i++ {
// require ascii string and no escape
b := iter.buf[i]
if b == '\\' {
iter.head = i
for _, b := range iter.readStringSlowPath() {
if 'A' <= b && b <= 'Z' && !iter.cfg.caseSensitive {
b += 'a' - 'A'
}
hash ^= int64(b)
hash *= 0x1000193
}
c = iter.nextToken()
if c != ':' {
iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c}))
return 0
}
return hash
}
if b == '"' {
iter.head = i + 1
c = iter.nextToken()
if c != ':' {
iter.ReportError("readFieldHash", `expect :, but found `+string([]byte{c}))
return 0
}
return hash
}
if 'A' <= b && b <= 'Z' && !iter.cfg.caseSensitive {
b += 'a' - 'A'
}
hash ^= int64(b)
hash *= 0x1000193
}
if !iter.loadMore() {
iter.ReportError("readFieldHash", `incomplete field name`)
return 0
}
}
}
func calcHash(str string, caseSensitive bool) int64 {
if !caseSensitive {
str = strings.ToLower(str)
}
hash := int64(0x811c9dc5)
for _, b := range []byte(str) {
hash ^= int64(b)
hash *= 0x1000193
}
return int64(hash)
}
// ReadObjectCB read object with callback, the key is ascii only and field name not copied
func (iter *Iterator) ReadObjectCB(callback func(*Iterator, string) bool) bool {
c := iter.nextToken()
var field string
if c == '{' {
if !iter.incrementDepth() {
return false
}
c = iter.nextToken()
if c == '"' {
iter.unreadByte()
field = iter.ReadString()
c = iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
if !callback(iter, field) {
iter.decrementDepth()
return false
}
c = iter.nextToken()
for c == ',' {
field = iter.ReadString()
c = iter.nextToken()
if c != ':' {
iter.ReportError("ReadObject", "expect : after object field, but found "+string([]byte{c}))
}
if !callback(iter, field) {
iter.decrementDepth()
return false
}
c = iter.nextToken()
}
if c != '}' {
iter.ReportError("ReadObjectCB", `object not ended with }`)
iter.decrementDepth()
return false
}
return iter.decrementDepth()
}
if c == '}' {
return iter.decrementDepth()
}
iter.ReportError("ReadObjectCB", `expect " after {, but found `+string([]byte{c}))
iter.decrementDepth()
return false
}
if c == 'n' {
iter.skipThreeBytes('u', 'l', 'l')
return true // null
}
iter.ReportError("ReadObjectCB", `expect { or n, but found `+string([]byte{c}))
return false
}
// ReadMapCB read map with callback, the key can be any string
func (iter *Iterator) ReadMapCB(callback func(*Iterator, string) bool) bool {
c := iter.nextToken()
if c == '{' {
if !iter.incrementDepth() {
return false
}
c = iter.nextToken()
if c == '"' {
iter.unreadByte()
field := iter.ReadString()
if iter.nextToken() != ':' {
iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
iter.decrementDepth()
return false
}
if !callback(iter, field) {
iter.decrementDepth()
return false
}
c = iter.nextToken()
for c == ',' {
field = iter.ReadString()
if iter.nextToken() != ':' {
iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
iter.decrementDepth()
return false
}
if !callback(iter, field) {
iter.decrementDepth()
return false
}
c = iter.nextToken()
}
if c != '}' {
iter.ReportError("ReadMapCB", `object not ended with }`)
iter.decrementDepth()
return false
}
return iter.decrementDepth()
}
if c == '}' {
return iter.decrementDepth()
}
iter.ReportError("ReadMapCB", `expect " after {, but found `+string([]byte{c}))
iter.decrementDepth()
return false
}
if c == 'n' {
iter.skipThreeBytes('u', 'l', 'l')
return true // null
}
iter.ReportError("ReadMapCB", `expect { or n, but found `+string([]byte{c}))
return false
}
func (iter *Iterator) readObjectStart() bool {
c := iter.nextToken()
if c == '{' {
c = iter.nextToken()
if c == '}' {
return false
}
iter.unreadByte()
return true
} else if c == 'n' {
iter.skipThreeBytes('u', 'l', 'l')
return false
}
iter.ReportError("readObjectStart", "expect { or n, but found "+string([]byte{c}))
return false
}
func (iter *Iterator) readObjectFieldAsBytes() (ret []byte) {
str := iter.ReadStringAsSlice()
if iter.skipWhitespacesWithoutLoadMore() {
if ret == nil {
ret = make([]byte, len(str))
copy(ret, str)
}
if !iter.loadMore() {
return
}
}
if iter.buf[iter.head] != ':' {
iter.ReportError("readObjectFieldAsBytes", "expect : after object field, but found "+string([]byte{iter.buf[iter.head]}))
return
}
iter.head++
if iter.skipWhitespacesWithoutLoadMore() {
if ret == nil {
ret = make([]byte, len(str))
copy(ret, str)
}
if !iter.loadMore() {
return
}
}
if ret == nil {
return str
}
return ret
}

130
vendor/github.com/json-iterator/go/iter_skip.go generated vendored Normal file
View File

@ -0,0 +1,130 @@
package jsoniter
import "fmt"
// ReadNil reads a json object as nil and
// returns whether it's a nil or not
func (iter *Iterator) ReadNil() (ret bool) {
c := iter.nextToken()
if c == 'n' {
iter.skipThreeBytes('u', 'l', 'l') // null
return true
}
iter.unreadByte()
return false
}
// ReadBool reads a json object as BoolValue
func (iter *Iterator) ReadBool() (ret bool) {
c := iter.nextToken()
if c == 't' {
iter.skipThreeBytes('r', 'u', 'e')
return true
}
if c == 'f' {
iter.skipFourBytes('a', 'l', 's', 'e')
return false
}
iter.ReportError("ReadBool", "expect t or f, but found "+string([]byte{c}))
return
}
// SkipAndReturnBytes skip next JSON element, and return its content as []byte.
// The []byte can be kept, it is a copy of data.
func (iter *Iterator) SkipAndReturnBytes() []byte {
iter.startCapture(iter.head)
iter.Skip()
return iter.stopCapture()
}
// SkipAndAppendBytes skips next JSON element and appends its content to
// buffer, returning the result.
func (iter *Iterator) SkipAndAppendBytes(buf []byte) []byte {
iter.startCaptureTo(buf, iter.head)
iter.Skip()
return iter.stopCapture()
}
func (iter *Iterator) startCaptureTo(buf []byte, captureStartedAt int) {
if iter.captured != nil {
panic("already in capture mode")
}
iter.captureStartedAt = captureStartedAt
iter.captured = buf
}
func (iter *Iterator) startCapture(captureStartedAt int) {
iter.startCaptureTo(make([]byte, 0, 32), captureStartedAt)
}
func (iter *Iterator) stopCapture() []byte {
if iter.captured == nil {
panic("not in capture mode")
}
captured := iter.captured
remaining := iter.buf[iter.captureStartedAt:iter.head]
iter.captureStartedAt = -1
iter.captured = nil
return append(captured, remaining...)
}
// Skip skips a json object and positions to relatively the next json object
func (iter *Iterator) Skip() {
c := iter.nextToken()
switch c {
case '"':
iter.skipString()
case 'n':
iter.skipThreeBytes('u', 'l', 'l') // null
case 't':
iter.skipThreeBytes('r', 'u', 'e') // true
case 'f':
iter.skipFourBytes('a', 'l', 's', 'e') // false
case '0':
iter.unreadByte()
iter.ReadFloat32()
case '-', '1', '2', '3', '4', '5', '6', '7', '8', '9':
iter.skipNumber()
case '[':
iter.skipArray()
case '{':
iter.skipObject()
default:
iter.ReportError("Skip", fmt.Sprintf("do not know how to skip: %v", c))
return
}
}
func (iter *Iterator) skipFourBytes(b1, b2, b3, b4 byte) {
if iter.readByte() != b1 {
iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
return
}
if iter.readByte() != b2 {
iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
return
}
if iter.readByte() != b3 {
iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
return
}
if iter.readByte() != b4 {
iter.ReportError("skipFourBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3, b4})))
return
}
}
func (iter *Iterator) skipThreeBytes(b1, b2, b3 byte) {
if iter.readByte() != b1 {
iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3})))
return
}
if iter.readByte() != b2 {
iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3})))
return
}
if iter.readByte() != b3 {
iter.ReportError("skipThreeBytes", fmt.Sprintf("expect %s", string([]byte{b1, b2, b3})))
return
}
}

163
vendor/github.com/json-iterator/go/iter_skip_sloppy.go generated vendored Normal file
View File

@ -0,0 +1,163 @@
//+build jsoniter_sloppy
package jsoniter
// sloppy but faster implementation, do not validate the input json
func (iter *Iterator) skipNumber() {
for {
for i := iter.head; i < iter.tail; i++ {
c := iter.buf[i]
switch c {
case ' ', '\n', '\r', '\t', ',', '}', ']':
iter.head = i
return
}
}
if !iter.loadMore() {
return
}
}
}
func (iter *Iterator) skipArray() {
level := 1
if !iter.incrementDepth() {
return
}
for {
for i := iter.head; i < iter.tail; i++ {
switch iter.buf[i] {
case '"': // If inside string, skip it
iter.head = i + 1
iter.skipString()
i = iter.head - 1 // it will be i++ soon
case '[': // If open symbol, increase level
level++
if !iter.incrementDepth() {
return
}
case ']': // If close symbol, increase level
level--
if !iter.decrementDepth() {
return
}
// If we have returned to the original level, we're done
if level == 0 {
iter.head = i + 1
return
}
}
}
if !iter.loadMore() {
iter.ReportError("skipObject", "incomplete array")
return
}
}
}
func (iter *Iterator) skipObject() {
level := 1
if !iter.incrementDepth() {
return
}
for {
for i := iter.head; i < iter.tail; i++ {
switch iter.buf[i] {
case '"': // If inside string, skip it
iter.head = i + 1
iter.skipString()
i = iter.head - 1 // it will be i++ soon
case '{': // If open symbol, increase level
level++
if !iter.incrementDepth() {
return
}
case '}': // If close symbol, increase level
level--
if !iter.decrementDepth() {
return
}
// If we have returned to the original level, we're done
if level == 0 {
iter.head = i + 1
return
}
}
}
if !iter.loadMore() {
iter.ReportError("skipObject", "incomplete object")
return
}
}
}
func (iter *Iterator) skipString() {
for {
end, escaped := iter.findStringEnd()
if end == -1 {
if !iter.loadMore() {
iter.ReportError("skipString", "incomplete string")
return
}
if escaped {
iter.head = 1 // skip the first char as last char read is \
}
} else {
iter.head = end
return
}
}
}
// adapted from: https://github.com/buger/jsonparser/blob/master/parser.go
// Tries to find the end of string
// Support if string contains escaped quote symbols.
func (iter *Iterator) findStringEnd() (int, bool) {
escaped := false
for i := iter.head; i < iter.tail; i++ {
c := iter.buf[i]
if c == '"' {
if !escaped {
return i + 1, false
}
j := i - 1
for {
if j < iter.head || iter.buf[j] != '\\' {
// even number of backslashes
// either end of buffer, or " found
return i + 1, true
}
j--
if j < iter.head || iter.buf[j] != '\\' {
// odd number of backslashes
// it is \" or \\\"
break
}
j--
}
} else if c == '\\' {
escaped = true
}
}
j := iter.tail - 1
for {
if j < iter.head || iter.buf[j] != '\\' {
// even number of backslashes
// either end of buffer, or " found
return -1, false // do not end with \
}
j--
if j < iter.head || iter.buf[j] != '\\' {
// odd number of backslashes
// it is \" or \\\"
break
}
j--
}
return -1, true // end with \
}

99
vendor/github.com/json-iterator/go/iter_skip_strict.go generated vendored Normal file
View File

@ -0,0 +1,99 @@
//+build !jsoniter_sloppy
package jsoniter
import (
"fmt"
"io"
)
func (iter *Iterator) skipNumber() {
if !iter.trySkipNumber() {
iter.unreadByte()
if iter.Error != nil && iter.Error != io.EOF {
return
}
iter.ReadFloat64()
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = nil
iter.ReadBigFloat()
}
}
}
func (iter *Iterator) trySkipNumber() bool {
dotFound := false
for i := iter.head; i < iter.tail; i++ {
c := iter.buf[i]
switch c {
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
case '.':
if dotFound {
iter.ReportError("validateNumber", `more than one dot found in number`)
return true // already failed
}
if i+1 == iter.tail {
return false
}
c = iter.buf[i+1]
switch c {
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
default:
iter.ReportError("validateNumber", `missing digit after dot`)
return true // already failed
}
dotFound = true
default:
switch c {
case ',', ']', '}', ' ', '\t', '\n', '\r':
if iter.head == i {
return false // if - without following digits
}
iter.head = i
return true // must be valid
}
return false // may be invalid
}
}
return false
}
func (iter *Iterator) skipString() {
if !iter.trySkipString() {
iter.unreadByte()
iter.ReadString()
}
}
func (iter *Iterator) trySkipString() bool {
for i := iter.head; i < iter.tail; i++ {
c := iter.buf[i]
if c == '"' {
iter.head = i + 1
return true // valid
} else if c == '\\' {
return false
} else if c < ' ' {
iter.ReportError("trySkipString",
fmt.Sprintf(`invalid control character found: %d`, c))
return true // already failed
}
}
return false
}
func (iter *Iterator) skipObject() {
iter.unreadByte()
iter.ReadObjectCB(func(iter *Iterator, field string) bool {
iter.Skip()
return true
})
}
func (iter *Iterator) skipArray() {
iter.unreadByte()
iter.ReadArrayCB(func(iter *Iterator) bool {
iter.Skip()
return true
})
}

215
vendor/github.com/json-iterator/go/iter_str.go generated vendored Normal file
View File

@ -0,0 +1,215 @@
package jsoniter
import (
"fmt"
"unicode/utf16"
)
// ReadString read string from iterator
func (iter *Iterator) ReadString() (ret string) {
c := iter.nextToken()
if c == '"' {
for i := iter.head; i < iter.tail; i++ {
c := iter.buf[i]
if c == '"' {
ret = string(iter.buf[iter.head:i])
iter.head = i + 1
return ret
} else if c == '\\' {
break
} else if c < ' ' {
iter.ReportError("ReadString",
fmt.Sprintf(`invalid control character found: %d`, c))
return
}
}
return iter.readStringSlowPath()
} else if c == 'n' {
iter.skipThreeBytes('u', 'l', 'l')
return ""
}
iter.ReportError("ReadString", `expects " or n, but found `+string([]byte{c}))
return
}
func (iter *Iterator) readStringSlowPath() (ret string) {
var str []byte
var c byte
for iter.Error == nil {
c = iter.readByte()
if c == '"' {
return string(str)
}
if c == '\\' {
c = iter.readByte()
str = iter.readEscapedChar(c, str)
} else {
str = append(str, c)
}
}
iter.ReportError("readStringSlowPath", "unexpected end of input")
return
}
func (iter *Iterator) readEscapedChar(c byte, str []byte) []byte {
switch c {
case 'u':
r := iter.readU4()
if utf16.IsSurrogate(r) {
c = iter.readByte()
if iter.Error != nil {
return nil
}
if c != '\\' {
iter.unreadByte()
str = appendRune(str, r)
return str
}
c = iter.readByte()
if iter.Error != nil {
return nil
}
if c != 'u' {
str = appendRune(str, r)
return iter.readEscapedChar(c, str)
}
r2 := iter.readU4()
if iter.Error != nil {
return nil
}
combined := utf16.DecodeRune(r, r2)
if combined == '\uFFFD' {
str = appendRune(str, r)
str = appendRune(str, r2)
} else {
str = appendRune(str, combined)
}
} else {
str = appendRune(str, r)
}
case '"':
str = append(str, '"')
case '\\':
str = append(str, '\\')
case '/':
str = append(str, '/')
case 'b':
str = append(str, '\b')
case 'f':
str = append(str, '\f')
case 'n':
str = append(str, '\n')
case 'r':
str = append(str, '\r')
case 't':
str = append(str, '\t')
default:
iter.ReportError("readEscapedChar",
`invalid escape char after \`)
return nil
}
return str
}
// ReadStringAsSlice read string from iterator without copying into string form.
// The []byte can not be kept, as it will change after next iterator call.
func (iter *Iterator) ReadStringAsSlice() (ret []byte) {
c := iter.nextToken()
if c == '"' {
for i := iter.head; i < iter.tail; i++ {
// require ascii string and no escape
// for: field name, base64, number
if iter.buf[i] == '"' {
// fast path: reuse the underlying buffer
ret = iter.buf[iter.head:i]
iter.head = i + 1
return ret
}
}
readLen := iter.tail - iter.head
copied := make([]byte, readLen, readLen*2)
copy(copied, iter.buf[iter.head:iter.tail])
iter.head = iter.tail
for iter.Error == nil {
c := iter.readByte()
if c == '"' {
return copied
}
copied = append(copied, c)
}
return copied
}
iter.ReportError("ReadStringAsSlice", `expects " or n, but found `+string([]byte{c}))
return
}
func (iter *Iterator) readU4() (ret rune) {
for i := 0; i < 4; i++ {
c := iter.readByte()
if iter.Error != nil {
return
}
if c >= '0' && c <= '9' {
ret = ret*16 + rune(c-'0')
} else if c >= 'a' && c <= 'f' {
ret = ret*16 + rune(c-'a'+10)
} else if c >= 'A' && c <= 'F' {
ret = ret*16 + rune(c-'A'+10)
} else {
iter.ReportError("readU4", "expects 0~9 or a~f, but found "+string([]byte{c}))
return
}
}
return ret
}
const (
t1 = 0x00 // 0000 0000
tx = 0x80 // 1000 0000
t2 = 0xC0 // 1100 0000
t3 = 0xE0 // 1110 0000
t4 = 0xF0 // 1111 0000
t5 = 0xF8 // 1111 1000
maskx = 0x3F // 0011 1111
mask2 = 0x1F // 0001 1111
mask3 = 0x0F // 0000 1111
mask4 = 0x07 // 0000 0111
rune1Max = 1<<7 - 1
rune2Max = 1<<11 - 1
rune3Max = 1<<16 - 1
surrogateMin = 0xD800
surrogateMax = 0xDFFF
maxRune = '\U0010FFFF' // Maximum valid Unicode code point.
runeError = '\uFFFD' // the "error" Rune or "Unicode replacement character"
)
func appendRune(p []byte, r rune) []byte {
// Negative values are erroneous. Making it unsigned addresses the problem.
switch i := uint32(r); {
case i <= rune1Max:
p = append(p, byte(r))
return p
case i <= rune2Max:
p = append(p, t2|byte(r>>6))
p = append(p, tx|byte(r)&maskx)
return p
case i > maxRune, surrogateMin <= i && i <= surrogateMax:
r = runeError
fallthrough
case i <= rune3Max:
p = append(p, t3|byte(r>>12))
p = append(p, tx|byte(r>>6)&maskx)
p = append(p, tx|byte(r)&maskx)
return p
default:
p = append(p, t4|byte(r>>18))
p = append(p, tx|byte(r>>12)&maskx)
p = append(p, tx|byte(r>>6)&maskx)
p = append(p, tx|byte(r)&maskx)
return p
}
}

18
vendor/github.com/json-iterator/go/jsoniter.go generated vendored Normal file
View File

@ -0,0 +1,18 @@
// Package jsoniter implements encoding and decoding of JSON as defined in
// RFC 4627 and provides interfaces with identical syntax of standard lib encoding/json.
// Converting from encoding/json to jsoniter is no more than replacing the package with jsoniter
// and variable type declarations (if any).
// jsoniter interfaces gives 100% compatibility with code using standard lib.
//
// "JSON and Go"
// (https://golang.org/doc/articles/json_and_go.html)
// gives a description of how Marshal/Unmarshal operate
// between arbitrary or predefined json objects and bytes,
// and it applies to jsoniter.Marshal/Unmarshal as well.
//
// Besides, jsoniter.Iterator provides a different set of interfaces
// iterating given bytes/string/reader
// and yielding parsed elements one by one.
// This set of interfaces reads input as required and gives
// better performance.
package jsoniter

42
vendor/github.com/json-iterator/go/pool.go generated vendored Normal file
View File

@ -0,0 +1,42 @@
package jsoniter
import (
"io"
)
// IteratorPool a thread safe pool of iterators with same configuration
type IteratorPool interface {
BorrowIterator(data []byte) *Iterator
ReturnIterator(iter *Iterator)
}
// StreamPool a thread safe pool of streams with same configuration
type StreamPool interface {
BorrowStream(writer io.Writer) *Stream
ReturnStream(stream *Stream)
}
func (cfg *frozenConfig) BorrowStream(writer io.Writer) *Stream {
stream := cfg.streamPool.Get().(*Stream)
stream.Reset(writer)
return stream
}
func (cfg *frozenConfig) ReturnStream(stream *Stream) {
stream.out = nil
stream.Error = nil
stream.Attachment = nil
cfg.streamPool.Put(stream)
}
func (cfg *frozenConfig) BorrowIterator(data []byte) *Iterator {
iter := cfg.iteratorPool.Get().(*Iterator)
iter.ResetBytes(data)
return iter
}
func (cfg *frozenConfig) ReturnIterator(iter *Iterator) {
iter.Error = nil
iter.Attachment = nil
cfg.iteratorPool.Put(iter)
}

337
vendor/github.com/json-iterator/go/reflect.go generated vendored Normal file
View File

@ -0,0 +1,337 @@
package jsoniter
import (
"fmt"
"reflect"
"unsafe"
"github.com/modern-go/reflect2"
)
// ValDecoder is an internal type registered to cache as needed.
// Don't confuse jsoniter.ValDecoder with json.Decoder.
// For json.Decoder's adapter, refer to jsoniter.AdapterDecoder(todo link).
//
// Reflection on type to create decoders, which is then cached
// Reflection on value is avoided as we can, as the reflect.Value itself will allocate, with following exceptions
// 1. create instance of new value, for example *int will need a int to be allocated
// 2. append to slice, if the existing cap is not enough, allocate will be done using Reflect.New
// 3. assignment to map, both key and value will be reflect.Value
// For a simple struct binding, it will be reflect.Value free and allocation free
type ValDecoder interface {
Decode(ptr unsafe.Pointer, iter *Iterator)
}
// ValEncoder is an internal type registered to cache as needed.
// Don't confuse jsoniter.ValEncoder with json.Encoder.
// For json.Encoder's adapter, refer to jsoniter.AdapterEncoder(todo godoc link).
type ValEncoder interface {
IsEmpty(ptr unsafe.Pointer) bool
Encode(ptr unsafe.Pointer, stream *Stream)
}
type checkIsEmpty interface {
IsEmpty(ptr unsafe.Pointer) bool
}
type ctx struct {
*frozenConfig
prefix string
encoders map[reflect2.Type]ValEncoder
decoders map[reflect2.Type]ValDecoder
}
func (b *ctx) caseSensitive() bool {
if b.frozenConfig == nil {
// default is case-insensitive
return false
}
return b.frozenConfig.caseSensitive
}
func (b *ctx) append(prefix string) *ctx {
return &ctx{
frozenConfig: b.frozenConfig,
prefix: b.prefix + " " + prefix,
encoders: b.encoders,
decoders: b.decoders,
}
}
// ReadVal copy the underlying JSON into go interface, same as json.Unmarshal
func (iter *Iterator) ReadVal(obj interface{}) {
depth := iter.depth
cacheKey := reflect2.RTypeOf(obj)
decoder := iter.cfg.getDecoderFromCache(cacheKey)
if decoder == nil {
typ := reflect2.TypeOf(obj)
if typ.Kind() != reflect.Ptr {
iter.ReportError("ReadVal", "can only unmarshal into pointer")
return
}
decoder = iter.cfg.DecoderOf(typ)
}
ptr := reflect2.PtrOf(obj)
if ptr == nil {
iter.ReportError("ReadVal", "can not read into nil pointer")
return
}
decoder.Decode(ptr, iter)
if iter.depth != depth {
iter.ReportError("ReadVal", "unexpected mismatched nesting")
return
}
}
// WriteVal copy the go interface into underlying JSON, same as json.Marshal
func (stream *Stream) WriteVal(val interface{}) {
if nil == val {
stream.WriteNil()
return
}
cacheKey := reflect2.RTypeOf(val)
encoder := stream.cfg.getEncoderFromCache(cacheKey)
if encoder == nil {
typ := reflect2.TypeOf(val)
encoder = stream.cfg.EncoderOf(typ)
}
encoder.Encode(reflect2.PtrOf(val), stream)
}
func (cfg *frozenConfig) DecoderOf(typ reflect2.Type) ValDecoder {
cacheKey := typ.RType()
decoder := cfg.getDecoderFromCache(cacheKey)
if decoder != nil {
return decoder
}
ctx := &ctx{
frozenConfig: cfg,
prefix: "",
decoders: map[reflect2.Type]ValDecoder{},
encoders: map[reflect2.Type]ValEncoder{},
}
ptrType := typ.(*reflect2.UnsafePtrType)
decoder = decoderOfType(ctx, ptrType.Elem())
cfg.addDecoderToCache(cacheKey, decoder)
return decoder
}
func decoderOfType(ctx *ctx, typ reflect2.Type) ValDecoder {
decoder := getTypeDecoderFromExtension(ctx, typ)
if decoder != nil {
return decoder
}
decoder = createDecoderOfType(ctx, typ)
for _, extension := range extensions {
decoder = extension.DecorateDecoder(typ, decoder)
}
decoder = ctx.decoderExtension.DecorateDecoder(typ, decoder)
for _, extension := range ctx.extraExtensions {
decoder = extension.DecorateDecoder(typ, decoder)
}
return decoder
}
func createDecoderOfType(ctx *ctx, typ reflect2.Type) ValDecoder {
decoder := ctx.decoders[typ]
if decoder != nil {
return decoder
}
placeholder := &placeholderDecoder{}
ctx.decoders[typ] = placeholder
decoder = _createDecoderOfType(ctx, typ)
placeholder.decoder = decoder
return decoder
}
func _createDecoderOfType(ctx *ctx, typ reflect2.Type) ValDecoder {
decoder := createDecoderOfJsonRawMessage(ctx, typ)
if decoder != nil {
return decoder
}
decoder = createDecoderOfJsonNumber(ctx, typ)
if decoder != nil {
return decoder
}
decoder = createDecoderOfMarshaler(ctx, typ)
if decoder != nil {
return decoder
}
decoder = createDecoderOfAny(ctx, typ)
if decoder != nil {
return decoder
}
decoder = createDecoderOfNative(ctx, typ)
if decoder != nil {
return decoder
}
switch typ.Kind() {
case reflect.Interface:
ifaceType, isIFace := typ.(*reflect2.UnsafeIFaceType)
if isIFace {
return &ifaceDecoder{valType: ifaceType}
}
return &efaceDecoder{}
case reflect.Struct:
return decoderOfStruct(ctx, typ)
case reflect.Array:
return decoderOfArray(ctx, typ)
case reflect.Slice:
return decoderOfSlice(ctx, typ)
case reflect.Map:
return decoderOfMap(ctx, typ)
case reflect.Ptr:
return decoderOfOptional(ctx, typ)
default:
return &lazyErrorDecoder{err: fmt.Errorf("%s%s is unsupported type", ctx.prefix, typ.String())}
}
}
func (cfg *frozenConfig) EncoderOf(typ reflect2.Type) ValEncoder {
cacheKey := typ.RType()
encoder := cfg.getEncoderFromCache(cacheKey)
if encoder != nil {
return encoder
}
ctx := &ctx{
frozenConfig: cfg,
prefix: "",
decoders: map[reflect2.Type]ValDecoder{},
encoders: map[reflect2.Type]ValEncoder{},
}
encoder = encoderOfType(ctx, typ)
if typ.LikePtr() {
encoder = &onePtrEncoder{encoder}
}
cfg.addEncoderToCache(cacheKey, encoder)
return encoder
}
type onePtrEncoder struct {
encoder ValEncoder
}
func (encoder *onePtrEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return encoder.encoder.IsEmpty(unsafe.Pointer(&ptr))
}
func (encoder *onePtrEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
encoder.encoder.Encode(unsafe.Pointer(&ptr), stream)
}
func encoderOfType(ctx *ctx, typ reflect2.Type) ValEncoder {
encoder := getTypeEncoderFromExtension(ctx, typ)
if encoder != nil {
return encoder
}
encoder = createEncoderOfType(ctx, typ)
for _, extension := range extensions {
encoder = extension.DecorateEncoder(typ, encoder)
}
encoder = ctx.encoderExtension.DecorateEncoder(typ, encoder)
for _, extension := range ctx.extraExtensions {
encoder = extension.DecorateEncoder(typ, encoder)
}
return encoder
}
func createEncoderOfType(ctx *ctx, typ reflect2.Type) ValEncoder {
encoder := ctx.encoders[typ]
if encoder != nil {
return encoder
}
placeholder := &placeholderEncoder{}
ctx.encoders[typ] = placeholder
encoder = _createEncoderOfType(ctx, typ)
placeholder.encoder = encoder
return encoder
}
func _createEncoderOfType(ctx *ctx, typ reflect2.Type) ValEncoder {
encoder := createEncoderOfJsonRawMessage(ctx, typ)
if encoder != nil {
return encoder
}
encoder = createEncoderOfJsonNumber(ctx, typ)
if encoder != nil {
return encoder
}
encoder = createEncoderOfMarshaler(ctx, typ)
if encoder != nil {
return encoder
}
encoder = createEncoderOfAny(ctx, typ)
if encoder != nil {
return encoder
}
encoder = createEncoderOfNative(ctx, typ)
if encoder != nil {
return encoder
}
kind := typ.Kind()
switch kind {
case reflect.Interface:
return &dynamicEncoder{typ}
case reflect.Struct:
return encoderOfStruct(ctx, typ)
case reflect.Array:
return encoderOfArray(ctx, typ)
case reflect.Slice:
return encoderOfSlice(ctx, typ)
case reflect.Map:
return encoderOfMap(ctx, typ)
case reflect.Ptr:
return encoderOfOptional(ctx, typ)
default:
return &lazyErrorEncoder{err: fmt.Errorf("%s%s is unsupported type", ctx.prefix, typ.String())}
}
}
type lazyErrorDecoder struct {
err error
}
func (decoder *lazyErrorDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
if iter.WhatIsNext() != NilValue {
if iter.Error == nil {
iter.Error = decoder.err
}
} else {
iter.Skip()
}
}
type lazyErrorEncoder struct {
err error
}
func (encoder *lazyErrorEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
if ptr == nil {
stream.WriteNil()
} else if stream.Error == nil {
stream.Error = encoder.err
}
}
func (encoder *lazyErrorEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return false
}
type placeholderDecoder struct {
decoder ValDecoder
}
func (decoder *placeholderDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
decoder.decoder.Decode(ptr, iter)
}
type placeholderEncoder struct {
encoder ValEncoder
}
func (encoder *placeholderEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
encoder.encoder.Encode(ptr, stream)
}
func (encoder *placeholderEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return encoder.encoder.IsEmpty(ptr)
}

104
vendor/github.com/json-iterator/go/reflect_array.go generated vendored Normal file
View File

@ -0,0 +1,104 @@
package jsoniter
import (
"fmt"
"github.com/modern-go/reflect2"
"io"
"unsafe"
)
func decoderOfArray(ctx *ctx, typ reflect2.Type) ValDecoder {
arrayType := typ.(*reflect2.UnsafeArrayType)
decoder := decoderOfType(ctx.append("[arrayElem]"), arrayType.Elem())
return &arrayDecoder{arrayType, decoder}
}
func encoderOfArray(ctx *ctx, typ reflect2.Type) ValEncoder {
arrayType := typ.(*reflect2.UnsafeArrayType)
if arrayType.Len() == 0 {
return emptyArrayEncoder{}
}
encoder := encoderOfType(ctx.append("[arrayElem]"), arrayType.Elem())
return &arrayEncoder{arrayType, encoder}
}
type emptyArrayEncoder struct{}
func (encoder emptyArrayEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteEmptyArray()
}
func (encoder emptyArrayEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return true
}
type arrayEncoder struct {
arrayType *reflect2.UnsafeArrayType
elemEncoder ValEncoder
}
func (encoder *arrayEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteArrayStart()
elemPtr := unsafe.Pointer(ptr)
encoder.elemEncoder.Encode(elemPtr, stream)
for i := 1; i < encoder.arrayType.Len(); i++ {
stream.WriteMore()
elemPtr = encoder.arrayType.UnsafeGetIndex(ptr, i)
encoder.elemEncoder.Encode(elemPtr, stream)
}
stream.WriteArrayEnd()
if stream.Error != nil && stream.Error != io.EOF {
stream.Error = fmt.Errorf("%v: %s", encoder.arrayType, stream.Error.Error())
}
}
func (encoder *arrayEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return false
}
type arrayDecoder struct {
arrayType *reflect2.UnsafeArrayType
elemDecoder ValDecoder
}
func (decoder *arrayDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
decoder.doDecode(ptr, iter)
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.arrayType, iter.Error.Error())
}
}
func (decoder *arrayDecoder) doDecode(ptr unsafe.Pointer, iter *Iterator) {
c := iter.nextToken()
arrayType := decoder.arrayType
if c == 'n' {
iter.skipThreeBytes('u', 'l', 'l')
return
}
if c != '[' {
iter.ReportError("decode array", "expect [ or n, but found "+string([]byte{c}))
return
}
c = iter.nextToken()
if c == ']' {
return
}
iter.unreadByte()
elemPtr := arrayType.UnsafeGetIndex(ptr, 0)
decoder.elemDecoder.Decode(elemPtr, iter)
length := 1
for c = iter.nextToken(); c == ','; c = iter.nextToken() {
if length >= arrayType.Len() {
iter.Skip()
continue
}
idx := length
length += 1
elemPtr = arrayType.UnsafeGetIndex(ptr, idx)
decoder.elemDecoder.Decode(elemPtr, iter)
}
if c != ']' {
iter.ReportError("decode array", "expect ], but found "+string([]byte{c}))
return
}
}

70
vendor/github.com/json-iterator/go/reflect_dynamic.go generated vendored Normal file
View File

@ -0,0 +1,70 @@
package jsoniter
import (
"github.com/modern-go/reflect2"
"reflect"
"unsafe"
)
type dynamicEncoder struct {
valType reflect2.Type
}
func (encoder *dynamicEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
obj := encoder.valType.UnsafeIndirect(ptr)
stream.WriteVal(obj)
}
func (encoder *dynamicEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return encoder.valType.UnsafeIndirect(ptr) == nil
}
type efaceDecoder struct {
}
func (decoder *efaceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
pObj := (*interface{})(ptr)
obj := *pObj
if obj == nil {
*pObj = iter.Read()
return
}
typ := reflect2.TypeOf(obj)
if typ.Kind() != reflect.Ptr {
*pObj = iter.Read()
return
}
ptrType := typ.(*reflect2.UnsafePtrType)
ptrElemType := ptrType.Elem()
if iter.WhatIsNext() == NilValue {
if ptrElemType.Kind() != reflect.Ptr {
iter.skipFourBytes('n', 'u', 'l', 'l')
*pObj = nil
return
}
}
if reflect2.IsNil(obj) {
obj := ptrElemType.New()
iter.ReadVal(obj)
*pObj = obj
return
}
iter.ReadVal(obj)
}
type ifaceDecoder struct {
valType *reflect2.UnsafeIFaceType
}
func (decoder *ifaceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
if iter.ReadNil() {
decoder.valType.UnsafeSet(ptr, decoder.valType.UnsafeNew())
return
}
obj := decoder.valType.UnsafeIndirect(ptr)
if reflect2.IsNil(obj) {
iter.ReportError("decode non empty interface", "can not unmarshal into nil")
return
}
iter.ReadVal(obj)
}

483
vendor/github.com/json-iterator/go/reflect_extension.go generated vendored Normal file
View File

@ -0,0 +1,483 @@
package jsoniter
import (
"fmt"
"github.com/modern-go/reflect2"
"reflect"
"sort"
"strings"
"unicode"
"unsafe"
)
var typeDecoders = map[string]ValDecoder{}
var fieldDecoders = map[string]ValDecoder{}
var typeEncoders = map[string]ValEncoder{}
var fieldEncoders = map[string]ValEncoder{}
var extensions = []Extension{}
// StructDescriptor describe how should we encode/decode the struct
type StructDescriptor struct {
Type reflect2.Type
Fields []*Binding
}
// GetField get one field from the descriptor by its name.
// Can not use map here to keep field orders.
func (structDescriptor *StructDescriptor) GetField(fieldName string) *Binding {
for _, binding := range structDescriptor.Fields {
if binding.Field.Name() == fieldName {
return binding
}
}
return nil
}
// Binding describe how should we encode/decode the struct field
type Binding struct {
levels []int
Field reflect2.StructField
FromNames []string
ToNames []string
Encoder ValEncoder
Decoder ValDecoder
}
// Extension the one for all SPI. Customize encoding/decoding by specifying alternate encoder/decoder.
// Can also rename fields by UpdateStructDescriptor.
type Extension interface {
UpdateStructDescriptor(structDescriptor *StructDescriptor)
CreateMapKeyDecoder(typ reflect2.Type) ValDecoder
CreateMapKeyEncoder(typ reflect2.Type) ValEncoder
CreateDecoder(typ reflect2.Type) ValDecoder
CreateEncoder(typ reflect2.Type) ValEncoder
DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder
DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder
}
// DummyExtension embed this type get dummy implementation for all methods of Extension
type DummyExtension struct {
}
// UpdateStructDescriptor No-op
func (extension *DummyExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) {
}
// CreateMapKeyDecoder No-op
func (extension *DummyExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder {
return nil
}
// CreateMapKeyEncoder No-op
func (extension *DummyExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder {
return nil
}
// CreateDecoder No-op
func (extension *DummyExtension) CreateDecoder(typ reflect2.Type) ValDecoder {
return nil
}
// CreateEncoder No-op
func (extension *DummyExtension) CreateEncoder(typ reflect2.Type) ValEncoder {
return nil
}
// DecorateDecoder No-op
func (extension *DummyExtension) DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder {
return decoder
}
// DecorateEncoder No-op
func (extension *DummyExtension) DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder {
return encoder
}
type EncoderExtension map[reflect2.Type]ValEncoder
// UpdateStructDescriptor No-op
func (extension EncoderExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) {
}
// CreateDecoder No-op
func (extension EncoderExtension) CreateDecoder(typ reflect2.Type) ValDecoder {
return nil
}
// CreateEncoder get encoder from map
func (extension EncoderExtension) CreateEncoder(typ reflect2.Type) ValEncoder {
return extension[typ]
}
// CreateMapKeyDecoder No-op
func (extension EncoderExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder {
return nil
}
// CreateMapKeyEncoder No-op
func (extension EncoderExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder {
return nil
}
// DecorateDecoder No-op
func (extension EncoderExtension) DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder {
return decoder
}
// DecorateEncoder No-op
func (extension EncoderExtension) DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder {
return encoder
}
type DecoderExtension map[reflect2.Type]ValDecoder
// UpdateStructDescriptor No-op
func (extension DecoderExtension) UpdateStructDescriptor(structDescriptor *StructDescriptor) {
}
// CreateMapKeyDecoder No-op
func (extension DecoderExtension) CreateMapKeyDecoder(typ reflect2.Type) ValDecoder {
return nil
}
// CreateMapKeyEncoder No-op
func (extension DecoderExtension) CreateMapKeyEncoder(typ reflect2.Type) ValEncoder {
return nil
}
// CreateDecoder get decoder from map
func (extension DecoderExtension) CreateDecoder(typ reflect2.Type) ValDecoder {
return extension[typ]
}
// CreateEncoder No-op
func (extension DecoderExtension) CreateEncoder(typ reflect2.Type) ValEncoder {
return nil
}
// DecorateDecoder No-op
func (extension DecoderExtension) DecorateDecoder(typ reflect2.Type, decoder ValDecoder) ValDecoder {
return decoder
}
// DecorateEncoder No-op
func (extension DecoderExtension) DecorateEncoder(typ reflect2.Type, encoder ValEncoder) ValEncoder {
return encoder
}
type funcDecoder struct {
fun DecoderFunc
}
func (decoder *funcDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
decoder.fun(ptr, iter)
}
type funcEncoder struct {
fun EncoderFunc
isEmptyFunc func(ptr unsafe.Pointer) bool
}
func (encoder *funcEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
encoder.fun(ptr, stream)
}
func (encoder *funcEncoder) IsEmpty(ptr unsafe.Pointer) bool {
if encoder.isEmptyFunc == nil {
return false
}
return encoder.isEmptyFunc(ptr)
}
// DecoderFunc the function form of TypeDecoder
type DecoderFunc func(ptr unsafe.Pointer, iter *Iterator)
// EncoderFunc the function form of TypeEncoder
type EncoderFunc func(ptr unsafe.Pointer, stream *Stream)
// RegisterTypeDecoderFunc register TypeDecoder for a type with function
func RegisterTypeDecoderFunc(typ string, fun DecoderFunc) {
typeDecoders[typ] = &funcDecoder{fun}
}
// RegisterTypeDecoder register TypeDecoder for a typ
func RegisterTypeDecoder(typ string, decoder ValDecoder) {
typeDecoders[typ] = decoder
}
// RegisterFieldDecoderFunc register TypeDecoder for a struct field with function
func RegisterFieldDecoderFunc(typ string, field string, fun DecoderFunc) {
RegisterFieldDecoder(typ, field, &funcDecoder{fun})
}
// RegisterFieldDecoder register TypeDecoder for a struct field
func RegisterFieldDecoder(typ string, field string, decoder ValDecoder) {
fieldDecoders[fmt.Sprintf("%s/%s", typ, field)] = decoder
}
// RegisterTypeEncoderFunc register TypeEncoder for a type with encode/isEmpty function
func RegisterTypeEncoderFunc(typ string, fun EncoderFunc, isEmptyFunc func(unsafe.Pointer) bool) {
typeEncoders[typ] = &funcEncoder{fun, isEmptyFunc}
}
// RegisterTypeEncoder register TypeEncoder for a type
func RegisterTypeEncoder(typ string, encoder ValEncoder) {
typeEncoders[typ] = encoder
}
// RegisterFieldEncoderFunc register TypeEncoder for a struct field with encode/isEmpty function
func RegisterFieldEncoderFunc(typ string, field string, fun EncoderFunc, isEmptyFunc func(unsafe.Pointer) bool) {
RegisterFieldEncoder(typ, field, &funcEncoder{fun, isEmptyFunc})
}
// RegisterFieldEncoder register TypeEncoder for a struct field
func RegisterFieldEncoder(typ string, field string, encoder ValEncoder) {
fieldEncoders[fmt.Sprintf("%s/%s", typ, field)] = encoder
}
// RegisterExtension register extension
func RegisterExtension(extension Extension) {
extensions = append(extensions, extension)
}
func getTypeDecoderFromExtension(ctx *ctx, typ reflect2.Type) ValDecoder {
decoder := _getTypeDecoderFromExtension(ctx, typ)
if decoder != nil {
for _, extension := range extensions {
decoder = extension.DecorateDecoder(typ, decoder)
}
decoder = ctx.decoderExtension.DecorateDecoder(typ, decoder)
for _, extension := range ctx.extraExtensions {
decoder = extension.DecorateDecoder(typ, decoder)
}
}
return decoder
}
func _getTypeDecoderFromExtension(ctx *ctx, typ reflect2.Type) ValDecoder {
for _, extension := range extensions {
decoder := extension.CreateDecoder(typ)
if decoder != nil {
return decoder
}
}
decoder := ctx.decoderExtension.CreateDecoder(typ)
if decoder != nil {
return decoder
}
for _, extension := range ctx.extraExtensions {
decoder := extension.CreateDecoder(typ)
if decoder != nil {
return decoder
}
}
typeName := typ.String()
decoder = typeDecoders[typeName]
if decoder != nil {
return decoder
}
if typ.Kind() == reflect.Ptr {
ptrType := typ.(*reflect2.UnsafePtrType)
decoder := typeDecoders[ptrType.Elem().String()]
if decoder != nil {
return &OptionalDecoder{ptrType.Elem(), decoder}
}
}
return nil
}
func getTypeEncoderFromExtension(ctx *ctx, typ reflect2.Type) ValEncoder {
encoder := _getTypeEncoderFromExtension(ctx, typ)
if encoder != nil {
for _, extension := range extensions {
encoder = extension.DecorateEncoder(typ, encoder)
}
encoder = ctx.encoderExtension.DecorateEncoder(typ, encoder)
for _, extension := range ctx.extraExtensions {
encoder = extension.DecorateEncoder(typ, encoder)
}
}
return encoder
}
func _getTypeEncoderFromExtension(ctx *ctx, typ reflect2.Type) ValEncoder {
for _, extension := range extensions {
encoder := extension.CreateEncoder(typ)
if encoder != nil {
return encoder
}
}
encoder := ctx.encoderExtension.CreateEncoder(typ)
if encoder != nil {
return encoder
}
for _, extension := range ctx.extraExtensions {
encoder := extension.CreateEncoder(typ)
if encoder != nil {
return encoder
}
}
typeName := typ.String()
encoder = typeEncoders[typeName]
if encoder != nil {
return encoder
}
if typ.Kind() == reflect.Ptr {
typePtr := typ.(*reflect2.UnsafePtrType)
encoder := typeEncoders[typePtr.Elem().String()]
if encoder != nil {
return &OptionalEncoder{encoder}
}
}
return nil
}
func describeStruct(ctx *ctx, typ reflect2.Type) *StructDescriptor {
structType := typ.(*reflect2.UnsafeStructType)
embeddedBindings := []*Binding{}
bindings := []*Binding{}
for i := 0; i < structType.NumField(); i++ {
field := structType.Field(i)
tag, hastag := field.Tag().Lookup(ctx.getTagKey())
if ctx.onlyTaggedField && !hastag && !field.Anonymous() {
continue
}
if tag == "-" || field.Name() == "_" {
continue
}
tagParts := strings.Split(tag, ",")
if field.Anonymous() && (tag == "" || tagParts[0] == "") {
if field.Type().Kind() == reflect.Struct {
structDescriptor := describeStruct(ctx, field.Type())
for _, binding := range structDescriptor.Fields {
binding.levels = append([]int{i}, binding.levels...)
omitempty := binding.Encoder.(*structFieldEncoder).omitempty
binding.Encoder = &structFieldEncoder{field, binding.Encoder, omitempty}
binding.Decoder = &structFieldDecoder{field, binding.Decoder}
embeddedBindings = append(embeddedBindings, binding)
}
continue
} else if field.Type().Kind() == reflect.Ptr {
ptrType := field.Type().(*reflect2.UnsafePtrType)
if ptrType.Elem().Kind() == reflect.Struct {
structDescriptor := describeStruct(ctx, ptrType.Elem())
for _, binding := range structDescriptor.Fields {
binding.levels = append([]int{i}, binding.levels...)
omitempty := binding.Encoder.(*structFieldEncoder).omitempty
binding.Encoder = &dereferenceEncoder{binding.Encoder}
binding.Encoder = &structFieldEncoder{field, binding.Encoder, omitempty}
binding.Decoder = &dereferenceDecoder{ptrType.Elem(), binding.Decoder}
binding.Decoder = &structFieldDecoder{field, binding.Decoder}
embeddedBindings = append(embeddedBindings, binding)
}
continue
}
}
}
fieldNames := calcFieldNames(field.Name(), tagParts[0], tag)
fieldCacheKey := fmt.Sprintf("%s/%s", typ.String(), field.Name())
decoder := fieldDecoders[fieldCacheKey]
if decoder == nil {
decoder = decoderOfType(ctx.append(field.Name()), field.Type())
}
encoder := fieldEncoders[fieldCacheKey]
if encoder == nil {
encoder = encoderOfType(ctx.append(field.Name()), field.Type())
}
binding := &Binding{
Field: field,
FromNames: fieldNames,
ToNames: fieldNames,
Decoder: decoder,
Encoder: encoder,
}
binding.levels = []int{i}
bindings = append(bindings, binding)
}
return createStructDescriptor(ctx, typ, bindings, embeddedBindings)
}
func createStructDescriptor(ctx *ctx, typ reflect2.Type, bindings []*Binding, embeddedBindings []*Binding) *StructDescriptor {
structDescriptor := &StructDescriptor{
Type: typ,
Fields: bindings,
}
for _, extension := range extensions {
extension.UpdateStructDescriptor(structDescriptor)
}
ctx.encoderExtension.UpdateStructDescriptor(structDescriptor)
ctx.decoderExtension.UpdateStructDescriptor(structDescriptor)
for _, extension := range ctx.extraExtensions {
extension.UpdateStructDescriptor(structDescriptor)
}
processTags(structDescriptor, ctx.frozenConfig)
// merge normal & embedded bindings & sort with original order
allBindings := sortableBindings(append(embeddedBindings, structDescriptor.Fields...))
sort.Sort(allBindings)
structDescriptor.Fields = allBindings
return structDescriptor
}
type sortableBindings []*Binding
func (bindings sortableBindings) Len() int {
return len(bindings)
}
func (bindings sortableBindings) Less(i, j int) bool {
left := bindings[i].levels
right := bindings[j].levels
k := 0
for {
if left[k] < right[k] {
return true
} else if left[k] > right[k] {
return false
}
k++
}
}
func (bindings sortableBindings) Swap(i, j int) {
bindings[i], bindings[j] = bindings[j], bindings[i]
}
func processTags(structDescriptor *StructDescriptor, cfg *frozenConfig) {
for _, binding := range structDescriptor.Fields {
shouldOmitEmpty := false
tagParts := strings.Split(binding.Field.Tag().Get(cfg.getTagKey()), ",")
for _, tagPart := range tagParts[1:] {
if tagPart == "omitempty" {
shouldOmitEmpty = true
} else if tagPart == "string" {
if binding.Field.Type().Kind() == reflect.String {
binding.Decoder = &stringModeStringDecoder{binding.Decoder, cfg}
binding.Encoder = &stringModeStringEncoder{binding.Encoder, cfg}
} else {
binding.Decoder = &stringModeNumberDecoder{binding.Decoder}
binding.Encoder = &stringModeNumberEncoder{binding.Encoder}
}
}
}
binding.Decoder = &structFieldDecoder{binding.Field, binding.Decoder}
binding.Encoder = &structFieldEncoder{binding.Field, binding.Encoder, shouldOmitEmpty}
}
}
func calcFieldNames(originalFieldName string, tagProvidedFieldName string, wholeTag string) []string {
// ignore?
if wholeTag == "-" {
return []string{}
}
// rename?
var fieldNames []string
if tagProvidedFieldName == "" {
fieldNames = []string{originalFieldName}
} else {
fieldNames = []string{tagProvidedFieldName}
}
// private?
isNotExported := unicode.IsLower(rune(originalFieldName[0])) || originalFieldName[0] == '_'
if isNotExported {
fieldNames = []string{}
}
return fieldNames
}

View File

@ -0,0 +1,112 @@
package jsoniter
import (
"encoding/json"
"github.com/modern-go/reflect2"
"strconv"
"unsafe"
)
type Number string
// String returns the literal text of the number.
func (n Number) String() string { return string(n) }
// Float64 returns the number as a float64.
func (n Number) Float64() (float64, error) {
return strconv.ParseFloat(string(n), 64)
}
// Int64 returns the number as an int64.
func (n Number) Int64() (int64, error) {
return strconv.ParseInt(string(n), 10, 64)
}
func CastJsonNumber(val interface{}) (string, bool) {
switch typedVal := val.(type) {
case json.Number:
return string(typedVal), true
case Number:
return string(typedVal), true
}
return "", false
}
var jsonNumberType = reflect2.TypeOfPtr((*json.Number)(nil)).Elem()
var jsoniterNumberType = reflect2.TypeOfPtr((*Number)(nil)).Elem()
func createDecoderOfJsonNumber(ctx *ctx, typ reflect2.Type) ValDecoder {
if typ.AssignableTo(jsonNumberType) {
return &jsonNumberCodec{}
}
if typ.AssignableTo(jsoniterNumberType) {
return &jsoniterNumberCodec{}
}
return nil
}
func createEncoderOfJsonNumber(ctx *ctx, typ reflect2.Type) ValEncoder {
if typ.AssignableTo(jsonNumberType) {
return &jsonNumberCodec{}
}
if typ.AssignableTo(jsoniterNumberType) {
return &jsoniterNumberCodec{}
}
return nil
}
type jsonNumberCodec struct {
}
func (codec *jsonNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
switch iter.WhatIsNext() {
case StringValue:
*((*json.Number)(ptr)) = json.Number(iter.ReadString())
case NilValue:
iter.skipFourBytes('n', 'u', 'l', 'l')
*((*json.Number)(ptr)) = ""
default:
*((*json.Number)(ptr)) = json.Number([]byte(iter.readNumberAsString()))
}
}
func (codec *jsonNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
number := *((*json.Number)(ptr))
if len(number) == 0 {
stream.writeByte('0')
} else {
stream.WriteRaw(string(number))
}
}
func (codec *jsonNumberCodec) IsEmpty(ptr unsafe.Pointer) bool {
return len(*((*json.Number)(ptr))) == 0
}
type jsoniterNumberCodec struct {
}
func (codec *jsoniterNumberCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
switch iter.WhatIsNext() {
case StringValue:
*((*Number)(ptr)) = Number(iter.ReadString())
case NilValue:
iter.skipFourBytes('n', 'u', 'l', 'l')
*((*Number)(ptr)) = ""
default:
*((*Number)(ptr)) = Number([]byte(iter.readNumberAsString()))
}
}
func (codec *jsoniterNumberCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
number := *((*Number)(ptr))
if len(number) == 0 {
stream.writeByte('0')
} else {
stream.WriteRaw(string(number))
}
}
func (codec *jsoniterNumberCodec) IsEmpty(ptr unsafe.Pointer) bool {
return len(*((*Number)(ptr))) == 0
}

View File

@ -0,0 +1,60 @@
package jsoniter
import (
"encoding/json"
"github.com/modern-go/reflect2"
"unsafe"
)
var jsonRawMessageType = reflect2.TypeOfPtr((*json.RawMessage)(nil)).Elem()
var jsoniterRawMessageType = reflect2.TypeOfPtr((*RawMessage)(nil)).Elem()
func createEncoderOfJsonRawMessage(ctx *ctx, typ reflect2.Type) ValEncoder {
if typ == jsonRawMessageType {
return &jsonRawMessageCodec{}
}
if typ == jsoniterRawMessageType {
return &jsoniterRawMessageCodec{}
}
return nil
}
func createDecoderOfJsonRawMessage(ctx *ctx, typ reflect2.Type) ValDecoder {
if typ == jsonRawMessageType {
return &jsonRawMessageCodec{}
}
if typ == jsoniterRawMessageType {
return &jsoniterRawMessageCodec{}
}
return nil
}
type jsonRawMessageCodec struct {
}
func (codec *jsonRawMessageCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
*((*json.RawMessage)(ptr)) = json.RawMessage(iter.SkipAndReturnBytes())
}
func (codec *jsonRawMessageCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteRaw(string(*((*json.RawMessage)(ptr))))
}
func (codec *jsonRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool {
return len(*((*json.RawMessage)(ptr))) == 0
}
type jsoniterRawMessageCodec struct {
}
func (codec *jsoniterRawMessageCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
*((*RawMessage)(ptr)) = RawMessage(iter.SkipAndReturnBytes())
}
func (codec *jsoniterRawMessageCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteRaw(string(*((*RawMessage)(ptr))))
}
func (codec *jsoniterRawMessageCodec) IsEmpty(ptr unsafe.Pointer) bool {
return len(*((*RawMessage)(ptr))) == 0
}

346
vendor/github.com/json-iterator/go/reflect_map.go generated vendored Normal file
View File

@ -0,0 +1,346 @@
package jsoniter
import (
"fmt"
"github.com/modern-go/reflect2"
"io"
"reflect"
"sort"
"unsafe"
)
func decoderOfMap(ctx *ctx, typ reflect2.Type) ValDecoder {
mapType := typ.(*reflect2.UnsafeMapType)
keyDecoder := decoderOfMapKey(ctx.append("[mapKey]"), mapType.Key())
elemDecoder := decoderOfType(ctx.append("[mapElem]"), mapType.Elem())
return &mapDecoder{
mapType: mapType,
keyType: mapType.Key(),
elemType: mapType.Elem(),
keyDecoder: keyDecoder,
elemDecoder: elemDecoder,
}
}
func encoderOfMap(ctx *ctx, typ reflect2.Type) ValEncoder {
mapType := typ.(*reflect2.UnsafeMapType)
if ctx.sortMapKeys {
return &sortKeysMapEncoder{
mapType: mapType,
keyEncoder: encoderOfMapKey(ctx.append("[mapKey]"), mapType.Key()),
elemEncoder: encoderOfType(ctx.append("[mapElem]"), mapType.Elem()),
}
}
return &mapEncoder{
mapType: mapType,
keyEncoder: encoderOfMapKey(ctx.append("[mapKey]"), mapType.Key()),
elemEncoder: encoderOfType(ctx.append("[mapElem]"), mapType.Elem()),
}
}
func decoderOfMapKey(ctx *ctx, typ reflect2.Type) ValDecoder {
decoder := ctx.decoderExtension.CreateMapKeyDecoder(typ)
if decoder != nil {
return decoder
}
for _, extension := range ctx.extraExtensions {
decoder := extension.CreateMapKeyDecoder(typ)
if decoder != nil {
return decoder
}
}
ptrType := reflect2.PtrTo(typ)
if ptrType.Implements(unmarshalerType) {
return &referenceDecoder{
&unmarshalerDecoder{
valType: ptrType,
},
}
}
if typ.Implements(unmarshalerType) {
return &unmarshalerDecoder{
valType: typ,
}
}
if ptrType.Implements(textUnmarshalerType) {
return &referenceDecoder{
&textUnmarshalerDecoder{
valType: ptrType,
},
}
}
if typ.Implements(textUnmarshalerType) {
return &textUnmarshalerDecoder{
valType: typ,
}
}
switch typ.Kind() {
case reflect.String:
return decoderOfType(ctx, reflect2.DefaultTypeOfKind(reflect.String))
case reflect.Bool,
reflect.Uint8, reflect.Int8,
reflect.Uint16, reflect.Int16,
reflect.Uint32, reflect.Int32,
reflect.Uint64, reflect.Int64,
reflect.Uint, reflect.Int,
reflect.Float32, reflect.Float64,
reflect.Uintptr:
typ = reflect2.DefaultTypeOfKind(typ.Kind())
return &numericMapKeyDecoder{decoderOfType(ctx, typ)}
default:
return &lazyErrorDecoder{err: fmt.Errorf("unsupported map key type: %v", typ)}
}
}
func encoderOfMapKey(ctx *ctx, typ reflect2.Type) ValEncoder {
encoder := ctx.encoderExtension.CreateMapKeyEncoder(typ)
if encoder != nil {
return encoder
}
for _, extension := range ctx.extraExtensions {
encoder := extension.CreateMapKeyEncoder(typ)
if encoder != nil {
return encoder
}
}
if typ == textMarshalerType {
return &directTextMarshalerEncoder{
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
}
}
if typ.Implements(textMarshalerType) {
return &textMarshalerEncoder{
valType: typ,
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
}
}
switch typ.Kind() {
case reflect.String:
return encoderOfType(ctx, reflect2.DefaultTypeOfKind(reflect.String))
case reflect.Bool,
reflect.Uint8, reflect.Int8,
reflect.Uint16, reflect.Int16,
reflect.Uint32, reflect.Int32,
reflect.Uint64, reflect.Int64,
reflect.Uint, reflect.Int,
reflect.Float32, reflect.Float64,
reflect.Uintptr:
typ = reflect2.DefaultTypeOfKind(typ.Kind())
return &numericMapKeyEncoder{encoderOfType(ctx, typ)}
default:
if typ.Kind() == reflect.Interface {
return &dynamicMapKeyEncoder{ctx, typ}
}
return &lazyErrorEncoder{err: fmt.Errorf("unsupported map key type: %v", typ)}
}
}
type mapDecoder struct {
mapType *reflect2.UnsafeMapType
keyType reflect2.Type
elemType reflect2.Type
keyDecoder ValDecoder
elemDecoder ValDecoder
}
func (decoder *mapDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
mapType := decoder.mapType
c := iter.nextToken()
if c == 'n' {
iter.skipThreeBytes('u', 'l', 'l')
*(*unsafe.Pointer)(ptr) = nil
mapType.UnsafeSet(ptr, mapType.UnsafeNew())
return
}
if mapType.UnsafeIsNil(ptr) {
mapType.UnsafeSet(ptr, mapType.UnsafeMakeMap(0))
}
if c != '{' {
iter.ReportError("ReadMapCB", `expect { or n, but found `+string([]byte{c}))
return
}
c = iter.nextToken()
if c == '}' {
return
}
iter.unreadByte()
key := decoder.keyType.UnsafeNew()
decoder.keyDecoder.Decode(key, iter)
c = iter.nextToken()
if c != ':' {
iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
return
}
elem := decoder.elemType.UnsafeNew()
decoder.elemDecoder.Decode(elem, iter)
decoder.mapType.UnsafeSetIndex(ptr, key, elem)
for c = iter.nextToken(); c == ','; c = iter.nextToken() {
key := decoder.keyType.UnsafeNew()
decoder.keyDecoder.Decode(key, iter)
c = iter.nextToken()
if c != ':' {
iter.ReportError("ReadMapCB", "expect : after object field, but found "+string([]byte{c}))
return
}
elem := decoder.elemType.UnsafeNew()
decoder.elemDecoder.Decode(elem, iter)
decoder.mapType.UnsafeSetIndex(ptr, key, elem)
}
if c != '}' {
iter.ReportError("ReadMapCB", `expect }, but found `+string([]byte{c}))
}
}
type numericMapKeyDecoder struct {
decoder ValDecoder
}
func (decoder *numericMapKeyDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
c := iter.nextToken()
if c != '"' {
iter.ReportError("ReadMapCB", `expect ", but found `+string([]byte{c}))
return
}
decoder.decoder.Decode(ptr, iter)
c = iter.nextToken()
if c != '"' {
iter.ReportError("ReadMapCB", `expect ", but found `+string([]byte{c}))
return
}
}
type numericMapKeyEncoder struct {
encoder ValEncoder
}
func (encoder *numericMapKeyEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.writeByte('"')
encoder.encoder.Encode(ptr, stream)
stream.writeByte('"')
}
func (encoder *numericMapKeyEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return false
}
type dynamicMapKeyEncoder struct {
ctx *ctx
valType reflect2.Type
}
func (encoder *dynamicMapKeyEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
obj := encoder.valType.UnsafeIndirect(ptr)
encoderOfMapKey(encoder.ctx, reflect2.TypeOf(obj)).Encode(reflect2.PtrOf(obj), stream)
}
func (encoder *dynamicMapKeyEncoder) IsEmpty(ptr unsafe.Pointer) bool {
obj := encoder.valType.UnsafeIndirect(ptr)
return encoderOfMapKey(encoder.ctx, reflect2.TypeOf(obj)).IsEmpty(reflect2.PtrOf(obj))
}
type mapEncoder struct {
mapType *reflect2.UnsafeMapType
keyEncoder ValEncoder
elemEncoder ValEncoder
}
func (encoder *mapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
if *(*unsafe.Pointer)(ptr) == nil {
stream.WriteNil()
return
}
stream.WriteObjectStart()
iter := encoder.mapType.UnsafeIterate(ptr)
for i := 0; iter.HasNext(); i++ {
if i != 0 {
stream.WriteMore()
}
key, elem := iter.UnsafeNext()
encoder.keyEncoder.Encode(key, stream)
if stream.indention > 0 {
stream.writeTwoBytes(byte(':'), byte(' '))
} else {
stream.writeByte(':')
}
encoder.elemEncoder.Encode(elem, stream)
}
stream.WriteObjectEnd()
}
func (encoder *mapEncoder) IsEmpty(ptr unsafe.Pointer) bool {
iter := encoder.mapType.UnsafeIterate(ptr)
return !iter.HasNext()
}
type sortKeysMapEncoder struct {
mapType *reflect2.UnsafeMapType
keyEncoder ValEncoder
elemEncoder ValEncoder
}
func (encoder *sortKeysMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
if *(*unsafe.Pointer)(ptr) == nil {
stream.WriteNil()
return
}
stream.WriteObjectStart()
mapIter := encoder.mapType.UnsafeIterate(ptr)
subStream := stream.cfg.BorrowStream(nil)
subStream.Attachment = stream.Attachment
subIter := stream.cfg.BorrowIterator(nil)
keyValues := encodedKeyValues{}
for mapIter.HasNext() {
key, elem := mapIter.UnsafeNext()
subStreamIndex := subStream.Buffered()
encoder.keyEncoder.Encode(key, subStream)
if subStream.Error != nil && subStream.Error != io.EOF && stream.Error == nil {
stream.Error = subStream.Error
}
encodedKey := subStream.Buffer()[subStreamIndex:]
subIter.ResetBytes(encodedKey)
decodedKey := subIter.ReadString()
if stream.indention > 0 {
subStream.writeTwoBytes(byte(':'), byte(' '))
} else {
subStream.writeByte(':')
}
encoder.elemEncoder.Encode(elem, subStream)
keyValues = append(keyValues, encodedKV{
key: decodedKey,
keyValue: subStream.Buffer()[subStreamIndex:],
})
}
sort.Sort(keyValues)
for i, keyValue := range keyValues {
if i != 0 {
stream.WriteMore()
}
stream.Write(keyValue.keyValue)
}
if subStream.Error != nil && stream.Error == nil {
stream.Error = subStream.Error
}
stream.WriteObjectEnd()
stream.cfg.ReturnStream(subStream)
stream.cfg.ReturnIterator(subIter)
}
func (encoder *sortKeysMapEncoder) IsEmpty(ptr unsafe.Pointer) bool {
iter := encoder.mapType.UnsafeIterate(ptr)
return !iter.HasNext()
}
type encodedKeyValues []encodedKV
type encodedKV struct {
key string
keyValue []byte
}
func (sv encodedKeyValues) Len() int { return len(sv) }
func (sv encodedKeyValues) Swap(i, j int) { sv[i], sv[j] = sv[j], sv[i] }
func (sv encodedKeyValues) Less(i, j int) bool { return sv[i].key < sv[j].key }

225
vendor/github.com/json-iterator/go/reflect_marshaler.go generated vendored Normal file
View File

@ -0,0 +1,225 @@
package jsoniter
import (
"encoding"
"encoding/json"
"unsafe"
"github.com/modern-go/reflect2"
)
var marshalerType = reflect2.TypeOfPtr((*json.Marshaler)(nil)).Elem()
var unmarshalerType = reflect2.TypeOfPtr((*json.Unmarshaler)(nil)).Elem()
var textMarshalerType = reflect2.TypeOfPtr((*encoding.TextMarshaler)(nil)).Elem()
var textUnmarshalerType = reflect2.TypeOfPtr((*encoding.TextUnmarshaler)(nil)).Elem()
func createDecoderOfMarshaler(ctx *ctx, typ reflect2.Type) ValDecoder {
ptrType := reflect2.PtrTo(typ)
if ptrType.Implements(unmarshalerType) {
return &referenceDecoder{
&unmarshalerDecoder{ptrType},
}
}
if ptrType.Implements(textUnmarshalerType) {
return &referenceDecoder{
&textUnmarshalerDecoder{ptrType},
}
}
return nil
}
func createEncoderOfMarshaler(ctx *ctx, typ reflect2.Type) ValEncoder {
if typ == marshalerType {
checkIsEmpty := createCheckIsEmpty(ctx, typ)
var encoder ValEncoder = &directMarshalerEncoder{
checkIsEmpty: checkIsEmpty,
}
return encoder
}
if typ.Implements(marshalerType) {
checkIsEmpty := createCheckIsEmpty(ctx, typ)
var encoder ValEncoder = &marshalerEncoder{
valType: typ,
checkIsEmpty: checkIsEmpty,
}
return encoder
}
ptrType := reflect2.PtrTo(typ)
if ctx.prefix != "" && ptrType.Implements(marshalerType) {
checkIsEmpty := createCheckIsEmpty(ctx, ptrType)
var encoder ValEncoder = &marshalerEncoder{
valType: ptrType,
checkIsEmpty: checkIsEmpty,
}
return &referenceEncoder{encoder}
}
if typ == textMarshalerType {
checkIsEmpty := createCheckIsEmpty(ctx, typ)
var encoder ValEncoder = &directTextMarshalerEncoder{
checkIsEmpty: checkIsEmpty,
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
}
return encoder
}
if typ.Implements(textMarshalerType) {
checkIsEmpty := createCheckIsEmpty(ctx, typ)
var encoder ValEncoder = &textMarshalerEncoder{
valType: typ,
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
checkIsEmpty: checkIsEmpty,
}
return encoder
}
// if prefix is empty, the type is the root type
if ctx.prefix != "" && ptrType.Implements(textMarshalerType) {
checkIsEmpty := createCheckIsEmpty(ctx, ptrType)
var encoder ValEncoder = &textMarshalerEncoder{
valType: ptrType,
stringEncoder: ctx.EncoderOf(reflect2.TypeOf("")),
checkIsEmpty: checkIsEmpty,
}
return &referenceEncoder{encoder}
}
return nil
}
type marshalerEncoder struct {
checkIsEmpty checkIsEmpty
valType reflect2.Type
}
func (encoder *marshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
obj := encoder.valType.UnsafeIndirect(ptr)
if encoder.valType.IsNullable() && reflect2.IsNil(obj) {
stream.WriteNil()
return
}
marshaler := obj.(json.Marshaler)
bytes, err := marshaler.MarshalJSON()
if err != nil {
stream.Error = err
} else {
// html escape was already done by jsoniter
// but the extra '\n' should be trimed
l := len(bytes)
if l > 0 && bytes[l-1] == '\n' {
bytes = bytes[:l-1]
}
stream.Write(bytes)
}
}
func (encoder *marshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return encoder.checkIsEmpty.IsEmpty(ptr)
}
type directMarshalerEncoder struct {
checkIsEmpty checkIsEmpty
}
func (encoder *directMarshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
marshaler := *(*json.Marshaler)(ptr)
if marshaler == nil {
stream.WriteNil()
return
}
bytes, err := marshaler.MarshalJSON()
if err != nil {
stream.Error = err
} else {
stream.Write(bytes)
}
}
func (encoder *directMarshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return encoder.checkIsEmpty.IsEmpty(ptr)
}
type textMarshalerEncoder struct {
valType reflect2.Type
stringEncoder ValEncoder
checkIsEmpty checkIsEmpty
}
func (encoder *textMarshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
obj := encoder.valType.UnsafeIndirect(ptr)
if encoder.valType.IsNullable() && reflect2.IsNil(obj) {
stream.WriteNil()
return
}
marshaler := (obj).(encoding.TextMarshaler)
bytes, err := marshaler.MarshalText()
if err != nil {
stream.Error = err
} else {
str := string(bytes)
encoder.stringEncoder.Encode(unsafe.Pointer(&str), stream)
}
}
func (encoder *textMarshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return encoder.checkIsEmpty.IsEmpty(ptr)
}
type directTextMarshalerEncoder struct {
stringEncoder ValEncoder
checkIsEmpty checkIsEmpty
}
func (encoder *directTextMarshalerEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
marshaler := *(*encoding.TextMarshaler)(ptr)
if marshaler == nil {
stream.WriteNil()
return
}
bytes, err := marshaler.MarshalText()
if err != nil {
stream.Error = err
} else {
str := string(bytes)
encoder.stringEncoder.Encode(unsafe.Pointer(&str), stream)
}
}
func (encoder *directTextMarshalerEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return encoder.checkIsEmpty.IsEmpty(ptr)
}
type unmarshalerDecoder struct {
valType reflect2.Type
}
func (decoder *unmarshalerDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
valType := decoder.valType
obj := valType.UnsafeIndirect(ptr)
unmarshaler := obj.(json.Unmarshaler)
iter.nextToken()
iter.unreadByte() // skip spaces
bytes := iter.SkipAndReturnBytes()
err := unmarshaler.UnmarshalJSON(bytes)
if err != nil {
iter.ReportError("unmarshalerDecoder", err.Error())
}
}
type textUnmarshalerDecoder struct {
valType reflect2.Type
}
func (decoder *textUnmarshalerDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
valType := decoder.valType
obj := valType.UnsafeIndirect(ptr)
if reflect2.IsNil(obj) {
ptrType := valType.(*reflect2.UnsafePtrType)
elemType := ptrType.Elem()
elem := elemType.UnsafeNew()
ptrType.UnsafeSet(ptr, unsafe.Pointer(&elem))
obj = valType.UnsafeIndirect(ptr)
}
unmarshaler := (obj).(encoding.TextUnmarshaler)
str := iter.ReadString()
err := unmarshaler.UnmarshalText([]byte(str))
if err != nil {
iter.ReportError("textUnmarshalerDecoder", err.Error())
}
}

453
vendor/github.com/json-iterator/go/reflect_native.go generated vendored Normal file
View File

@ -0,0 +1,453 @@
package jsoniter
import (
"encoding/base64"
"reflect"
"strconv"
"unsafe"
"github.com/modern-go/reflect2"
)
const ptrSize = 32 << uintptr(^uintptr(0)>>63)
func createEncoderOfNative(ctx *ctx, typ reflect2.Type) ValEncoder {
if typ.Kind() == reflect.Slice && typ.(reflect2.SliceType).Elem().Kind() == reflect.Uint8 {
sliceDecoder := decoderOfSlice(ctx, typ)
return &base64Codec{sliceDecoder: sliceDecoder}
}
typeName := typ.String()
kind := typ.Kind()
switch kind {
case reflect.String:
if typeName != "string" {
return encoderOfType(ctx, reflect2.TypeOfPtr((*string)(nil)).Elem())
}
return &stringCodec{}
case reflect.Int:
if typeName != "int" {
return encoderOfType(ctx, reflect2.TypeOfPtr((*int)(nil)).Elem())
}
if strconv.IntSize == 32 {
return &int32Codec{}
}
return &int64Codec{}
case reflect.Int8:
if typeName != "int8" {
return encoderOfType(ctx, reflect2.TypeOfPtr((*int8)(nil)).Elem())
}
return &int8Codec{}
case reflect.Int16:
if typeName != "int16" {
return encoderOfType(ctx, reflect2.TypeOfPtr((*int16)(nil)).Elem())
}
return &int16Codec{}
case reflect.Int32:
if typeName != "int32" {
return encoderOfType(ctx, reflect2.TypeOfPtr((*int32)(nil)).Elem())
}
return &int32Codec{}
case reflect.Int64:
if typeName != "int64" {
return encoderOfType(ctx, reflect2.TypeOfPtr((*int64)(nil)).Elem())
}
return &int64Codec{}
case reflect.Uint:
if typeName != "uint" {
return encoderOfType(ctx, reflect2.TypeOfPtr((*uint)(nil)).Elem())
}
if strconv.IntSize == 32 {
return &uint32Codec{}
}
return &uint64Codec{}
case reflect.Uint8:
if typeName != "uint8" {
return encoderOfType(ctx, reflect2.TypeOfPtr((*uint8)(nil)).Elem())
}
return &uint8Codec{}
case reflect.Uint16:
if typeName != "uint16" {
return encoderOfType(ctx, reflect2.TypeOfPtr((*uint16)(nil)).Elem())
}
return &uint16Codec{}
case reflect.Uint32:
if typeName != "uint32" {
return encoderOfType(ctx, reflect2.TypeOfPtr((*uint32)(nil)).Elem())
}
return &uint32Codec{}
case reflect.Uintptr:
if typeName != "uintptr" {
return encoderOfType(ctx, reflect2.TypeOfPtr((*uintptr)(nil)).Elem())
}
if ptrSize == 32 {
return &uint32Codec{}
}
return &uint64Codec{}
case reflect.Uint64:
if typeName != "uint64" {
return encoderOfType(ctx, reflect2.TypeOfPtr((*uint64)(nil)).Elem())
}
return &uint64Codec{}
case reflect.Float32:
if typeName != "float32" {
return encoderOfType(ctx, reflect2.TypeOfPtr((*float32)(nil)).Elem())
}
return &float32Codec{}
case reflect.Float64:
if typeName != "float64" {
return encoderOfType(ctx, reflect2.TypeOfPtr((*float64)(nil)).Elem())
}
return &float64Codec{}
case reflect.Bool:
if typeName != "bool" {
return encoderOfType(ctx, reflect2.TypeOfPtr((*bool)(nil)).Elem())
}
return &boolCodec{}
}
return nil
}
func createDecoderOfNative(ctx *ctx, typ reflect2.Type) ValDecoder {
if typ.Kind() == reflect.Slice && typ.(reflect2.SliceType).Elem().Kind() == reflect.Uint8 {
sliceDecoder := decoderOfSlice(ctx, typ)
return &base64Codec{sliceDecoder: sliceDecoder}
}
typeName := typ.String()
switch typ.Kind() {
case reflect.String:
if typeName != "string" {
return decoderOfType(ctx, reflect2.TypeOfPtr((*string)(nil)).Elem())
}
return &stringCodec{}
case reflect.Int:
if typeName != "int" {
return decoderOfType(ctx, reflect2.TypeOfPtr((*int)(nil)).Elem())
}
if strconv.IntSize == 32 {
return &int32Codec{}
}
return &int64Codec{}
case reflect.Int8:
if typeName != "int8" {
return decoderOfType(ctx, reflect2.TypeOfPtr((*int8)(nil)).Elem())
}
return &int8Codec{}
case reflect.Int16:
if typeName != "int16" {
return decoderOfType(ctx, reflect2.TypeOfPtr((*int16)(nil)).Elem())
}
return &int16Codec{}
case reflect.Int32:
if typeName != "int32" {
return decoderOfType(ctx, reflect2.TypeOfPtr((*int32)(nil)).Elem())
}
return &int32Codec{}
case reflect.Int64:
if typeName != "int64" {
return decoderOfType(ctx, reflect2.TypeOfPtr((*int64)(nil)).Elem())
}
return &int64Codec{}
case reflect.Uint:
if typeName != "uint" {
return decoderOfType(ctx, reflect2.TypeOfPtr((*uint)(nil)).Elem())
}
if strconv.IntSize == 32 {
return &uint32Codec{}
}
return &uint64Codec{}
case reflect.Uint8:
if typeName != "uint8" {
return decoderOfType(ctx, reflect2.TypeOfPtr((*uint8)(nil)).Elem())
}
return &uint8Codec{}
case reflect.Uint16:
if typeName != "uint16" {
return decoderOfType(ctx, reflect2.TypeOfPtr((*uint16)(nil)).Elem())
}
return &uint16Codec{}
case reflect.Uint32:
if typeName != "uint32" {
return decoderOfType(ctx, reflect2.TypeOfPtr((*uint32)(nil)).Elem())
}
return &uint32Codec{}
case reflect.Uintptr:
if typeName != "uintptr" {
return decoderOfType(ctx, reflect2.TypeOfPtr((*uintptr)(nil)).Elem())
}
if ptrSize == 32 {
return &uint32Codec{}
}
return &uint64Codec{}
case reflect.Uint64:
if typeName != "uint64" {
return decoderOfType(ctx, reflect2.TypeOfPtr((*uint64)(nil)).Elem())
}
return &uint64Codec{}
case reflect.Float32:
if typeName != "float32" {
return decoderOfType(ctx, reflect2.TypeOfPtr((*float32)(nil)).Elem())
}
return &float32Codec{}
case reflect.Float64:
if typeName != "float64" {
return decoderOfType(ctx, reflect2.TypeOfPtr((*float64)(nil)).Elem())
}
return &float64Codec{}
case reflect.Bool:
if typeName != "bool" {
return decoderOfType(ctx, reflect2.TypeOfPtr((*bool)(nil)).Elem())
}
return &boolCodec{}
}
return nil
}
type stringCodec struct {
}
func (codec *stringCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
*((*string)(ptr)) = iter.ReadString()
}
func (codec *stringCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
str := *((*string)(ptr))
stream.WriteString(str)
}
func (codec *stringCodec) IsEmpty(ptr unsafe.Pointer) bool {
return *((*string)(ptr)) == ""
}
type int8Codec struct {
}
func (codec *int8Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
if !iter.ReadNil() {
*((*int8)(ptr)) = iter.ReadInt8()
}
}
func (codec *int8Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteInt8(*((*int8)(ptr)))
}
func (codec *int8Codec) IsEmpty(ptr unsafe.Pointer) bool {
return *((*int8)(ptr)) == 0
}
type int16Codec struct {
}
func (codec *int16Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
if !iter.ReadNil() {
*((*int16)(ptr)) = iter.ReadInt16()
}
}
func (codec *int16Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteInt16(*((*int16)(ptr)))
}
func (codec *int16Codec) IsEmpty(ptr unsafe.Pointer) bool {
return *((*int16)(ptr)) == 0
}
type int32Codec struct {
}
func (codec *int32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
if !iter.ReadNil() {
*((*int32)(ptr)) = iter.ReadInt32()
}
}
func (codec *int32Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteInt32(*((*int32)(ptr)))
}
func (codec *int32Codec) IsEmpty(ptr unsafe.Pointer) bool {
return *((*int32)(ptr)) == 0
}
type int64Codec struct {
}
func (codec *int64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
if !iter.ReadNil() {
*((*int64)(ptr)) = iter.ReadInt64()
}
}
func (codec *int64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteInt64(*((*int64)(ptr)))
}
func (codec *int64Codec) IsEmpty(ptr unsafe.Pointer) bool {
return *((*int64)(ptr)) == 0
}
type uint8Codec struct {
}
func (codec *uint8Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
if !iter.ReadNil() {
*((*uint8)(ptr)) = iter.ReadUint8()
}
}
func (codec *uint8Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteUint8(*((*uint8)(ptr)))
}
func (codec *uint8Codec) IsEmpty(ptr unsafe.Pointer) bool {
return *((*uint8)(ptr)) == 0
}
type uint16Codec struct {
}
func (codec *uint16Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
if !iter.ReadNil() {
*((*uint16)(ptr)) = iter.ReadUint16()
}
}
func (codec *uint16Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteUint16(*((*uint16)(ptr)))
}
func (codec *uint16Codec) IsEmpty(ptr unsafe.Pointer) bool {
return *((*uint16)(ptr)) == 0
}
type uint32Codec struct {
}
func (codec *uint32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
if !iter.ReadNil() {
*((*uint32)(ptr)) = iter.ReadUint32()
}
}
func (codec *uint32Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteUint32(*((*uint32)(ptr)))
}
func (codec *uint32Codec) IsEmpty(ptr unsafe.Pointer) bool {
return *((*uint32)(ptr)) == 0
}
type uint64Codec struct {
}
func (codec *uint64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
if !iter.ReadNil() {
*((*uint64)(ptr)) = iter.ReadUint64()
}
}
func (codec *uint64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteUint64(*((*uint64)(ptr)))
}
func (codec *uint64Codec) IsEmpty(ptr unsafe.Pointer) bool {
return *((*uint64)(ptr)) == 0
}
type float32Codec struct {
}
func (codec *float32Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
if !iter.ReadNil() {
*((*float32)(ptr)) = iter.ReadFloat32()
}
}
func (codec *float32Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteFloat32(*((*float32)(ptr)))
}
func (codec *float32Codec) IsEmpty(ptr unsafe.Pointer) bool {
return *((*float32)(ptr)) == 0
}
type float64Codec struct {
}
func (codec *float64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
if !iter.ReadNil() {
*((*float64)(ptr)) = iter.ReadFloat64()
}
}
func (codec *float64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteFloat64(*((*float64)(ptr)))
}
func (codec *float64Codec) IsEmpty(ptr unsafe.Pointer) bool {
return *((*float64)(ptr)) == 0
}
type boolCodec struct {
}
func (codec *boolCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
if !iter.ReadNil() {
*((*bool)(ptr)) = iter.ReadBool()
}
}
func (codec *boolCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteBool(*((*bool)(ptr)))
}
func (codec *boolCodec) IsEmpty(ptr unsafe.Pointer) bool {
return !(*((*bool)(ptr)))
}
type base64Codec struct {
sliceType *reflect2.UnsafeSliceType
sliceDecoder ValDecoder
}
func (codec *base64Codec) Decode(ptr unsafe.Pointer, iter *Iterator) {
if iter.ReadNil() {
codec.sliceType.UnsafeSetNil(ptr)
return
}
switch iter.WhatIsNext() {
case StringValue:
src := iter.ReadString()
dst, err := base64.StdEncoding.DecodeString(src)
if err != nil {
iter.ReportError("decode base64", err.Error())
} else {
codec.sliceType.UnsafeSet(ptr, unsafe.Pointer(&dst))
}
case ArrayValue:
codec.sliceDecoder.Decode(ptr, iter)
default:
iter.ReportError("base64Codec", "invalid input")
}
}
func (codec *base64Codec) Encode(ptr unsafe.Pointer, stream *Stream) {
if codec.sliceType.UnsafeIsNil(ptr) {
stream.WriteNil()
return
}
src := *((*[]byte)(ptr))
encoding := base64.StdEncoding
stream.writeByte('"')
if len(src) != 0 {
size := encoding.EncodedLen(len(src))
buf := make([]byte, size)
encoding.Encode(buf, src)
stream.buf = append(stream.buf, buf...)
}
stream.writeByte('"')
}
func (codec *base64Codec) IsEmpty(ptr unsafe.Pointer) bool {
return len(*((*[]byte)(ptr))) == 0
}

129
vendor/github.com/json-iterator/go/reflect_optional.go generated vendored Normal file
View File

@ -0,0 +1,129 @@
package jsoniter
import (
"github.com/modern-go/reflect2"
"unsafe"
)
func decoderOfOptional(ctx *ctx, typ reflect2.Type) ValDecoder {
ptrType := typ.(*reflect2.UnsafePtrType)
elemType := ptrType.Elem()
decoder := decoderOfType(ctx, elemType)
return &OptionalDecoder{elemType, decoder}
}
func encoderOfOptional(ctx *ctx, typ reflect2.Type) ValEncoder {
ptrType := typ.(*reflect2.UnsafePtrType)
elemType := ptrType.Elem()
elemEncoder := encoderOfType(ctx, elemType)
encoder := &OptionalEncoder{elemEncoder}
return encoder
}
type OptionalDecoder struct {
ValueType reflect2.Type
ValueDecoder ValDecoder
}
func (decoder *OptionalDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
if iter.ReadNil() {
*((*unsafe.Pointer)(ptr)) = nil
} else {
if *((*unsafe.Pointer)(ptr)) == nil {
//pointer to null, we have to allocate memory to hold the value
newPtr := decoder.ValueType.UnsafeNew()
decoder.ValueDecoder.Decode(newPtr, iter)
*((*unsafe.Pointer)(ptr)) = newPtr
} else {
//reuse existing instance
decoder.ValueDecoder.Decode(*((*unsafe.Pointer)(ptr)), iter)
}
}
}
type dereferenceDecoder struct {
// only to deference a pointer
valueType reflect2.Type
valueDecoder ValDecoder
}
func (decoder *dereferenceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
if *((*unsafe.Pointer)(ptr)) == nil {
//pointer to null, we have to allocate memory to hold the value
newPtr := decoder.valueType.UnsafeNew()
decoder.valueDecoder.Decode(newPtr, iter)
*((*unsafe.Pointer)(ptr)) = newPtr
} else {
//reuse existing instance
decoder.valueDecoder.Decode(*((*unsafe.Pointer)(ptr)), iter)
}
}
type OptionalEncoder struct {
ValueEncoder ValEncoder
}
func (encoder *OptionalEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
if *((*unsafe.Pointer)(ptr)) == nil {
stream.WriteNil()
} else {
encoder.ValueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream)
}
}
func (encoder *OptionalEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return *((*unsafe.Pointer)(ptr)) == nil
}
type dereferenceEncoder struct {
ValueEncoder ValEncoder
}
func (encoder *dereferenceEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
if *((*unsafe.Pointer)(ptr)) == nil {
stream.WriteNil()
} else {
encoder.ValueEncoder.Encode(*((*unsafe.Pointer)(ptr)), stream)
}
}
func (encoder *dereferenceEncoder) IsEmpty(ptr unsafe.Pointer) bool {
dePtr := *((*unsafe.Pointer)(ptr))
if dePtr == nil {
return true
}
return encoder.ValueEncoder.IsEmpty(dePtr)
}
func (encoder *dereferenceEncoder) IsEmbeddedPtrNil(ptr unsafe.Pointer) bool {
deReferenced := *((*unsafe.Pointer)(ptr))
if deReferenced == nil {
return true
}
isEmbeddedPtrNil, converted := encoder.ValueEncoder.(IsEmbeddedPtrNil)
if !converted {
return false
}
fieldPtr := unsafe.Pointer(deReferenced)
return isEmbeddedPtrNil.IsEmbeddedPtrNil(fieldPtr)
}
type referenceEncoder struct {
encoder ValEncoder
}
func (encoder *referenceEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
encoder.encoder.Encode(unsafe.Pointer(&ptr), stream)
}
func (encoder *referenceEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return encoder.encoder.IsEmpty(unsafe.Pointer(&ptr))
}
type referenceDecoder struct {
decoder ValDecoder
}
func (decoder *referenceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
decoder.decoder.Decode(unsafe.Pointer(&ptr), iter)
}

99
vendor/github.com/json-iterator/go/reflect_slice.go generated vendored Normal file
View File

@ -0,0 +1,99 @@
package jsoniter
import (
"fmt"
"github.com/modern-go/reflect2"
"io"
"unsafe"
)
func decoderOfSlice(ctx *ctx, typ reflect2.Type) ValDecoder {
sliceType := typ.(*reflect2.UnsafeSliceType)
decoder := decoderOfType(ctx.append("[sliceElem]"), sliceType.Elem())
return &sliceDecoder{sliceType, decoder}
}
func encoderOfSlice(ctx *ctx, typ reflect2.Type) ValEncoder {
sliceType := typ.(*reflect2.UnsafeSliceType)
encoder := encoderOfType(ctx.append("[sliceElem]"), sliceType.Elem())
return &sliceEncoder{sliceType, encoder}
}
type sliceEncoder struct {
sliceType *reflect2.UnsafeSliceType
elemEncoder ValEncoder
}
func (encoder *sliceEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
if encoder.sliceType.UnsafeIsNil(ptr) {
stream.WriteNil()
return
}
length := encoder.sliceType.UnsafeLengthOf(ptr)
if length == 0 {
stream.WriteEmptyArray()
return
}
stream.WriteArrayStart()
encoder.elemEncoder.Encode(encoder.sliceType.UnsafeGetIndex(ptr, 0), stream)
for i := 1; i < length; i++ {
stream.WriteMore()
elemPtr := encoder.sliceType.UnsafeGetIndex(ptr, i)
encoder.elemEncoder.Encode(elemPtr, stream)
}
stream.WriteArrayEnd()
if stream.Error != nil && stream.Error != io.EOF {
stream.Error = fmt.Errorf("%v: %s", encoder.sliceType, stream.Error.Error())
}
}
func (encoder *sliceEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return encoder.sliceType.UnsafeLengthOf(ptr) == 0
}
type sliceDecoder struct {
sliceType *reflect2.UnsafeSliceType
elemDecoder ValDecoder
}
func (decoder *sliceDecoder) Decode(ptr unsafe.Pointer, iter *Iterator) {
decoder.doDecode(ptr, iter)
if iter.Error != nil && iter.Error != io.EOF {
iter.Error = fmt.Errorf("%v: %s", decoder.sliceType, iter.Error.Error())
}
}
func (decoder *sliceDecoder) doDecode(ptr unsafe.Pointer, iter *Iterator) {
c := iter.nextToken()
sliceType := decoder.sliceType
if c == 'n' {
iter.skipThreeBytes('u', 'l', 'l')
sliceType.UnsafeSetNil(ptr)
return
}
if c != '[' {
iter.ReportError("decode slice", "expect [ or n, but found "+string([]byte{c}))
return
}
c = iter.nextToken()
if c == ']' {
sliceType.UnsafeSet(ptr, sliceType.UnsafeMakeSlice(0, 0))
return
}
iter.unreadByte()
sliceType.UnsafeGrow(ptr, 1)
elemPtr := sliceType.UnsafeGetIndex(ptr, 0)
decoder.elemDecoder.Decode(elemPtr, iter)
length := 1
for c = iter.nextToken(); c == ','; c = iter.nextToken() {
idx := length
length += 1
sliceType.UnsafeGrow(ptr, length)
elemPtr = sliceType.UnsafeGetIndex(ptr, idx)
decoder.elemDecoder.Decode(elemPtr, iter)
}
if c != ']' {
iter.ReportError("decode slice", "expect ], but found "+string([]byte{c}))
return
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,211 @@
package jsoniter
import (
"fmt"
"github.com/modern-go/reflect2"
"io"
"reflect"
"unsafe"
)
func encoderOfStruct(ctx *ctx, typ reflect2.Type) ValEncoder {
type bindingTo struct {
binding *Binding
toName string
ignored bool
}
orderedBindings := []*bindingTo{}
structDescriptor := describeStruct(ctx, typ)
for _, binding := range structDescriptor.Fields {
for _, toName := range binding.ToNames {
new := &bindingTo{
binding: binding,
toName: toName,
}
for _, old := range orderedBindings {
if old.toName != toName {
continue
}
old.ignored, new.ignored = resolveConflictBinding(ctx.frozenConfig, old.binding, new.binding)
}
orderedBindings = append(orderedBindings, new)
}
}
if len(orderedBindings) == 0 {
return &emptyStructEncoder{}
}
finalOrderedFields := []structFieldTo{}
for _, bindingTo := range orderedBindings {
if !bindingTo.ignored {
finalOrderedFields = append(finalOrderedFields, structFieldTo{
encoder: bindingTo.binding.Encoder.(*structFieldEncoder),
toName: bindingTo.toName,
})
}
}
return &structEncoder{typ, finalOrderedFields}
}
func createCheckIsEmpty(ctx *ctx, typ reflect2.Type) checkIsEmpty {
encoder := createEncoderOfNative(ctx, typ)
if encoder != nil {
return encoder
}
kind := typ.Kind()
switch kind {
case reflect.Interface:
return &dynamicEncoder{typ}
case reflect.Struct:
return &structEncoder{typ: typ}
case reflect.Array:
return &arrayEncoder{}
case reflect.Slice:
return &sliceEncoder{}
case reflect.Map:
return encoderOfMap(ctx, typ)
case reflect.Ptr:
return &OptionalEncoder{}
default:
return &lazyErrorEncoder{err: fmt.Errorf("unsupported type: %v", typ)}
}
}
func resolveConflictBinding(cfg *frozenConfig, old, new *Binding) (ignoreOld, ignoreNew bool) {
newTagged := new.Field.Tag().Get(cfg.getTagKey()) != ""
oldTagged := old.Field.Tag().Get(cfg.getTagKey()) != ""
if newTagged {
if oldTagged {
if len(old.levels) > len(new.levels) {
return true, false
} else if len(new.levels) > len(old.levels) {
return false, true
} else {
return true, true
}
} else {
return true, false
}
} else {
if oldTagged {
return true, false
}
if len(old.levels) > len(new.levels) {
return true, false
} else if len(new.levels) > len(old.levels) {
return false, true
} else {
return true, true
}
}
}
type structFieldEncoder struct {
field reflect2.StructField
fieldEncoder ValEncoder
omitempty bool
}
func (encoder *structFieldEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
fieldPtr := encoder.field.UnsafeGet(ptr)
encoder.fieldEncoder.Encode(fieldPtr, stream)
if stream.Error != nil && stream.Error != io.EOF {
stream.Error = fmt.Errorf("%s: %s", encoder.field.Name(), stream.Error.Error())
}
}
func (encoder *structFieldEncoder) IsEmpty(ptr unsafe.Pointer) bool {
fieldPtr := encoder.field.UnsafeGet(ptr)
return encoder.fieldEncoder.IsEmpty(fieldPtr)
}
func (encoder *structFieldEncoder) IsEmbeddedPtrNil(ptr unsafe.Pointer) bool {
isEmbeddedPtrNil, converted := encoder.fieldEncoder.(IsEmbeddedPtrNil)
if !converted {
return false
}
fieldPtr := encoder.field.UnsafeGet(ptr)
return isEmbeddedPtrNil.IsEmbeddedPtrNil(fieldPtr)
}
type IsEmbeddedPtrNil interface {
IsEmbeddedPtrNil(ptr unsafe.Pointer) bool
}
type structEncoder struct {
typ reflect2.Type
fields []structFieldTo
}
type structFieldTo struct {
encoder *structFieldEncoder
toName string
}
func (encoder *structEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteObjectStart()
isNotFirst := false
for _, field := range encoder.fields {
if field.encoder.omitempty && field.encoder.IsEmpty(ptr) {
continue
}
if field.encoder.IsEmbeddedPtrNil(ptr) {
continue
}
if isNotFirst {
stream.WriteMore()
}
stream.WriteObjectField(field.toName)
field.encoder.Encode(ptr, stream)
isNotFirst = true
}
stream.WriteObjectEnd()
if stream.Error != nil && stream.Error != io.EOF {
stream.Error = fmt.Errorf("%v.%s", encoder.typ, stream.Error.Error())
}
}
func (encoder *structEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return false
}
type emptyStructEncoder struct {
}
func (encoder *emptyStructEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.WriteEmptyObject()
}
func (encoder *emptyStructEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return false
}
type stringModeNumberEncoder struct {
elemEncoder ValEncoder
}
func (encoder *stringModeNumberEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
stream.writeByte('"')
encoder.elemEncoder.Encode(ptr, stream)
stream.writeByte('"')
}
func (encoder *stringModeNumberEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return encoder.elemEncoder.IsEmpty(ptr)
}
type stringModeStringEncoder struct {
elemEncoder ValEncoder
cfg *frozenConfig
}
func (encoder *stringModeStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
tempStream := encoder.cfg.BorrowStream(nil)
tempStream.Attachment = stream.Attachment
defer encoder.cfg.ReturnStream(tempStream)
encoder.elemEncoder.Encode(ptr, tempStream)
stream.WriteString(string(tempStream.Buffer()))
}
func (encoder *stringModeStringEncoder) IsEmpty(ptr unsafe.Pointer) bool {
return encoder.elemEncoder.IsEmpty(ptr)
}

210
vendor/github.com/json-iterator/go/stream.go generated vendored Normal file
View File

@ -0,0 +1,210 @@
package jsoniter
import (
"io"
)
// stream is a io.Writer like object, with JSON specific write functions.
// Error is not returned as return value, but stored as Error member on this stream instance.
type Stream struct {
cfg *frozenConfig
out io.Writer
buf []byte
Error error
indention int
Attachment interface{} // open for customized encoder
}
// NewStream create new stream instance.
// cfg can be jsoniter.ConfigDefault.
// out can be nil if write to internal buffer.
// bufSize is the initial size for the internal buffer in bytes.
func NewStream(cfg API, out io.Writer, bufSize int) *Stream {
return &Stream{
cfg: cfg.(*frozenConfig),
out: out,
buf: make([]byte, 0, bufSize),
Error: nil,
indention: 0,
}
}
// Pool returns a pool can provide more stream with same configuration
func (stream *Stream) Pool() StreamPool {
return stream.cfg
}
// Reset reuse this stream instance by assign a new writer
func (stream *Stream) Reset(out io.Writer) {
stream.out = out
stream.buf = stream.buf[:0]
}
// Available returns how many bytes are unused in the buffer.
func (stream *Stream) Available() int {
return cap(stream.buf) - len(stream.buf)
}
// Buffered returns the number of bytes that have been written into the current buffer.
func (stream *Stream) Buffered() int {
return len(stream.buf)
}
// Buffer if writer is nil, use this method to take the result
func (stream *Stream) Buffer() []byte {
return stream.buf
}
// SetBuffer allows to append to the internal buffer directly
func (stream *Stream) SetBuffer(buf []byte) {
stream.buf = buf
}
// Write writes the contents of p into the buffer.
// It returns the number of bytes written.
// If nn < len(p), it also returns an error explaining
// why the write is short.
func (stream *Stream) Write(p []byte) (nn int, err error) {
stream.buf = append(stream.buf, p...)
if stream.out != nil {
nn, err = stream.out.Write(stream.buf)
stream.buf = stream.buf[nn:]
return
}
return len(p), nil
}
// WriteByte writes a single byte.
func (stream *Stream) writeByte(c byte) {
stream.buf = append(stream.buf, c)
}
func (stream *Stream) writeTwoBytes(c1 byte, c2 byte) {
stream.buf = append(stream.buf, c1, c2)
}
func (stream *Stream) writeThreeBytes(c1 byte, c2 byte, c3 byte) {
stream.buf = append(stream.buf, c1, c2, c3)
}
func (stream *Stream) writeFourBytes(c1 byte, c2 byte, c3 byte, c4 byte) {
stream.buf = append(stream.buf, c1, c2, c3, c4)
}
func (stream *Stream) writeFiveBytes(c1 byte, c2 byte, c3 byte, c4 byte, c5 byte) {
stream.buf = append(stream.buf, c1, c2, c3, c4, c5)
}
// Flush writes any buffered data to the underlying io.Writer.
func (stream *Stream) Flush() error {
if stream.out == nil {
return nil
}
if stream.Error != nil {
return stream.Error
}
_, err := stream.out.Write(stream.buf)
if err != nil {
if stream.Error == nil {
stream.Error = err
}
return err
}
stream.buf = stream.buf[:0]
return nil
}
// WriteRaw write string out without quotes, just like []byte
func (stream *Stream) WriteRaw(s string) {
stream.buf = append(stream.buf, s...)
}
// WriteNil write null to stream
func (stream *Stream) WriteNil() {
stream.writeFourBytes('n', 'u', 'l', 'l')
}
// WriteTrue write true to stream
func (stream *Stream) WriteTrue() {
stream.writeFourBytes('t', 'r', 'u', 'e')
}
// WriteFalse write false to stream
func (stream *Stream) WriteFalse() {
stream.writeFiveBytes('f', 'a', 'l', 's', 'e')
}
// WriteBool write true or false into stream
func (stream *Stream) WriteBool(val bool) {
if val {
stream.WriteTrue()
} else {
stream.WriteFalse()
}
}
// WriteObjectStart write { with possible indention
func (stream *Stream) WriteObjectStart() {
stream.indention += stream.cfg.indentionStep
stream.writeByte('{')
stream.writeIndention(0)
}
// WriteObjectField write "field": with possible indention
func (stream *Stream) WriteObjectField(field string) {
stream.WriteString(field)
if stream.indention > 0 {
stream.writeTwoBytes(':', ' ')
} else {
stream.writeByte(':')
}
}
// WriteObjectEnd write } with possible indention
func (stream *Stream) WriteObjectEnd() {
stream.writeIndention(stream.cfg.indentionStep)
stream.indention -= stream.cfg.indentionStep
stream.writeByte('}')
}
// WriteEmptyObject write {}
func (stream *Stream) WriteEmptyObject() {
stream.writeByte('{')
stream.writeByte('}')
}
// WriteMore write , with possible indention
func (stream *Stream) WriteMore() {
stream.writeByte(',')
stream.writeIndention(0)
}
// WriteArrayStart write [ with possible indention
func (stream *Stream) WriteArrayStart() {
stream.indention += stream.cfg.indentionStep
stream.writeByte('[')
stream.writeIndention(0)
}
// WriteEmptyArray write []
func (stream *Stream) WriteEmptyArray() {
stream.writeTwoBytes('[', ']')
}
// WriteArrayEnd write ] with possible indention
func (stream *Stream) WriteArrayEnd() {
stream.writeIndention(stream.cfg.indentionStep)
stream.indention -= stream.cfg.indentionStep
stream.writeByte(']')
}
func (stream *Stream) writeIndention(delta int) {
if stream.indention == 0 {
return
}
stream.writeByte('\n')
toWrite := stream.indention - delta
for i := 0; i < toWrite; i++ {
stream.buf = append(stream.buf, ' ')
}
}

111
vendor/github.com/json-iterator/go/stream_float.go generated vendored Normal file
View File

@ -0,0 +1,111 @@
package jsoniter
import (
"fmt"
"math"
"strconv"
)
var pow10 []uint64
func init() {
pow10 = []uint64{1, 10, 100, 1000, 10000, 100000, 1000000}
}
// WriteFloat32 write float32 to stream
func (stream *Stream) WriteFloat32(val float32) {
if math.IsInf(float64(val), 0) || math.IsNaN(float64(val)) {
stream.Error = fmt.Errorf("unsupported value: %f", val)
return
}
abs := math.Abs(float64(val))
fmt := byte('f')
// Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
if abs != 0 {
if float32(abs) < 1e-6 || float32(abs) >= 1e21 {
fmt = 'e'
}
}
stream.buf = strconv.AppendFloat(stream.buf, float64(val), fmt, -1, 32)
}
// WriteFloat32Lossy write float32 to stream with ONLY 6 digits precision although much much faster
func (stream *Stream) WriteFloat32Lossy(val float32) {
if math.IsInf(float64(val), 0) || math.IsNaN(float64(val)) {
stream.Error = fmt.Errorf("unsupported value: %f", val)
return
}
if val < 0 {
stream.writeByte('-')
val = -val
}
if val > 0x4ffffff {
stream.WriteFloat32(val)
return
}
precision := 6
exp := uint64(1000000) // 6
lval := uint64(float64(val)*float64(exp) + 0.5)
stream.WriteUint64(lval / exp)
fval := lval % exp
if fval == 0 {
return
}
stream.writeByte('.')
for p := precision - 1; p > 0 && fval < pow10[p]; p-- {
stream.writeByte('0')
}
stream.WriteUint64(fval)
for stream.buf[len(stream.buf)-1] == '0' {
stream.buf = stream.buf[:len(stream.buf)-1]
}
}
// WriteFloat64 write float64 to stream
func (stream *Stream) WriteFloat64(val float64) {
if math.IsInf(val, 0) || math.IsNaN(val) {
stream.Error = fmt.Errorf("unsupported value: %f", val)
return
}
abs := math.Abs(val)
fmt := byte('f')
// Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
if abs != 0 {
if abs < 1e-6 || abs >= 1e21 {
fmt = 'e'
}
}
stream.buf = strconv.AppendFloat(stream.buf, float64(val), fmt, -1, 64)
}
// WriteFloat64Lossy write float64 to stream with ONLY 6 digits precision although much much faster
func (stream *Stream) WriteFloat64Lossy(val float64) {
if math.IsInf(val, 0) || math.IsNaN(val) {
stream.Error = fmt.Errorf("unsupported value: %f", val)
return
}
if val < 0 {
stream.writeByte('-')
val = -val
}
if val > 0x4ffffff {
stream.WriteFloat64(val)
return
}
precision := 6
exp := uint64(1000000) // 6
lval := uint64(val*float64(exp) + 0.5)
stream.WriteUint64(lval / exp)
fval := lval % exp
if fval == 0 {
return
}
stream.writeByte('.')
for p := precision - 1; p > 0 && fval < pow10[p]; p-- {
stream.writeByte('0')
}
stream.WriteUint64(fval)
for stream.buf[len(stream.buf)-1] == '0' {
stream.buf = stream.buf[:len(stream.buf)-1]
}
}

190
vendor/github.com/json-iterator/go/stream_int.go generated vendored Normal file
View File

@ -0,0 +1,190 @@
package jsoniter
var digits []uint32
func init() {
digits = make([]uint32, 1000)
for i := uint32(0); i < 1000; i++ {
digits[i] = (((i / 100) + '0') << 16) + ((((i / 10) % 10) + '0') << 8) + i%10 + '0'
if i < 10 {
digits[i] += 2 << 24
} else if i < 100 {
digits[i] += 1 << 24
}
}
}
func writeFirstBuf(space []byte, v uint32) []byte {
start := v >> 24
if start == 0 {
space = append(space, byte(v>>16), byte(v>>8))
} else if start == 1 {
space = append(space, byte(v>>8))
}
space = append(space, byte(v))
return space
}
func writeBuf(buf []byte, v uint32) []byte {
return append(buf, byte(v>>16), byte(v>>8), byte(v))
}
// WriteUint8 write uint8 to stream
func (stream *Stream) WriteUint8(val uint8) {
stream.buf = writeFirstBuf(stream.buf, digits[val])
}
// WriteInt8 write int8 to stream
func (stream *Stream) WriteInt8(nval int8) {
var val uint8
if nval < 0 {
val = uint8(-nval)
stream.buf = append(stream.buf, '-')
} else {
val = uint8(nval)
}
stream.buf = writeFirstBuf(stream.buf, digits[val])
}
// WriteUint16 write uint16 to stream
func (stream *Stream) WriteUint16(val uint16) {
q1 := val / 1000
if q1 == 0 {
stream.buf = writeFirstBuf(stream.buf, digits[val])
return
}
r1 := val - q1*1000
stream.buf = writeFirstBuf(stream.buf, digits[q1])
stream.buf = writeBuf(stream.buf, digits[r1])
return
}
// WriteInt16 write int16 to stream
func (stream *Stream) WriteInt16(nval int16) {
var val uint16
if nval < 0 {
val = uint16(-nval)
stream.buf = append(stream.buf, '-')
} else {
val = uint16(nval)
}
stream.WriteUint16(val)
}
// WriteUint32 write uint32 to stream
func (stream *Stream) WriteUint32(val uint32) {
q1 := val / 1000
if q1 == 0 {
stream.buf = writeFirstBuf(stream.buf, digits[val])
return
}
r1 := val - q1*1000
q2 := q1 / 1000
if q2 == 0 {
stream.buf = writeFirstBuf(stream.buf, digits[q1])
stream.buf = writeBuf(stream.buf, digits[r1])
return
}
r2 := q1 - q2*1000
q3 := q2 / 1000
if q3 == 0 {
stream.buf = writeFirstBuf(stream.buf, digits[q2])
} else {
r3 := q2 - q3*1000
stream.buf = append(stream.buf, byte(q3+'0'))
stream.buf = writeBuf(stream.buf, digits[r3])
}
stream.buf = writeBuf(stream.buf, digits[r2])
stream.buf = writeBuf(stream.buf, digits[r1])
}
// WriteInt32 write int32 to stream
func (stream *Stream) WriteInt32(nval int32) {
var val uint32
if nval < 0 {
val = uint32(-nval)
stream.buf = append(stream.buf, '-')
} else {
val = uint32(nval)
}
stream.WriteUint32(val)
}
// WriteUint64 write uint64 to stream
func (stream *Stream) WriteUint64(val uint64) {
q1 := val / 1000
if q1 == 0 {
stream.buf = writeFirstBuf(stream.buf, digits[val])
return
}
r1 := val - q1*1000
q2 := q1 / 1000
if q2 == 0 {
stream.buf = writeFirstBuf(stream.buf, digits[q1])
stream.buf = writeBuf(stream.buf, digits[r1])
return
}
r2 := q1 - q2*1000
q3 := q2 / 1000
if q3 == 0 {
stream.buf = writeFirstBuf(stream.buf, digits[q2])
stream.buf = writeBuf(stream.buf, digits[r2])
stream.buf = writeBuf(stream.buf, digits[r1])
return
}
r3 := q2 - q3*1000
q4 := q3 / 1000
if q4 == 0 {
stream.buf = writeFirstBuf(stream.buf, digits[q3])
stream.buf = writeBuf(stream.buf, digits[r3])
stream.buf = writeBuf(stream.buf, digits[r2])
stream.buf = writeBuf(stream.buf, digits[r1])
return
}
r4 := q3 - q4*1000
q5 := q4 / 1000
if q5 == 0 {
stream.buf = writeFirstBuf(stream.buf, digits[q4])
stream.buf = writeBuf(stream.buf, digits[r4])
stream.buf = writeBuf(stream.buf, digits[r3])
stream.buf = writeBuf(stream.buf, digits[r2])
stream.buf = writeBuf(stream.buf, digits[r1])
return
}
r5 := q4 - q5*1000
q6 := q5 / 1000
if q6 == 0 {
stream.buf = writeFirstBuf(stream.buf, digits[q5])
} else {
stream.buf = writeFirstBuf(stream.buf, digits[q6])
r6 := q5 - q6*1000
stream.buf = writeBuf(stream.buf, digits[r6])
}
stream.buf = writeBuf(stream.buf, digits[r5])
stream.buf = writeBuf(stream.buf, digits[r4])
stream.buf = writeBuf(stream.buf, digits[r3])
stream.buf = writeBuf(stream.buf, digits[r2])
stream.buf = writeBuf(stream.buf, digits[r1])
}
// WriteInt64 write int64 to stream
func (stream *Stream) WriteInt64(nval int64) {
var val uint64
if nval < 0 {
val = uint64(-nval)
stream.buf = append(stream.buf, '-')
} else {
val = uint64(nval)
}
stream.WriteUint64(val)
}
// WriteInt write int to stream
func (stream *Stream) WriteInt(val int) {
stream.WriteInt64(int64(val))
}
// WriteUint write uint to stream
func (stream *Stream) WriteUint(val uint) {
stream.WriteUint64(uint64(val))
}

372
vendor/github.com/json-iterator/go/stream_str.go generated vendored Normal file
View File

@ -0,0 +1,372 @@
package jsoniter
import (
"unicode/utf8"
)
// htmlSafeSet holds the value true if the ASCII character with the given
// array position can be safely represented inside a JSON string, embedded
// inside of HTML <script> tags, without any additional escaping.
//
// All values are true except for the ASCII control characters (0-31), the
// double quote ("), the backslash character ("\"), HTML opening and closing
// tags ("<" and ">"), and the ampersand ("&").
var htmlSafeSet = [utf8.RuneSelf]bool{
' ': true,
'!': true,
'"': false,
'#': true,
'$': true,
'%': true,
'&': false,
'\'': true,
'(': true,
')': true,
'*': true,
'+': true,
',': true,
'-': true,
'.': true,
'/': true,
'0': true,
'1': true,
'2': true,
'3': true,
'4': true,
'5': true,
'6': true,
'7': true,
'8': true,
'9': true,
':': true,
';': true,
'<': false,
'=': true,
'>': false,
'?': true,
'@': true,
'A': true,
'B': true,
'C': true,
'D': true,
'E': true,
'F': true,
'G': true,
'H': true,
'I': true,
'J': true,
'K': true,
'L': true,
'M': true,
'N': true,
'O': true,
'P': true,
'Q': true,
'R': true,
'S': true,
'T': true,
'U': true,
'V': true,
'W': true,
'X': true,
'Y': true,
'Z': true,
'[': true,
'\\': false,
']': true,
'^': true,
'_': true,
'`': true,
'a': true,
'b': true,
'c': true,
'd': true,
'e': true,
'f': true,
'g': true,
'h': true,
'i': true,
'j': true,
'k': true,
'l': true,
'm': true,
'n': true,
'o': true,
'p': true,
'q': true,
'r': true,
's': true,
't': true,
'u': true,
'v': true,
'w': true,
'x': true,
'y': true,
'z': true,
'{': true,
'|': true,
'}': true,
'~': true,
'\u007f': true,
}
// safeSet holds the value true if the ASCII character with the given array
// position can be represented inside a JSON string without any further
// escaping.
//
// All values are true except for the ASCII control characters (0-31), the
// double quote ("), and the backslash character ("\").
var safeSet = [utf8.RuneSelf]bool{
' ': true,
'!': true,
'"': false,
'#': true,
'$': true,
'%': true,
'&': true,
'\'': true,
'(': true,
')': true,
'*': true,
'+': true,
',': true,
'-': true,
'.': true,
'/': true,
'0': true,
'1': true,
'2': true,
'3': true,
'4': true,
'5': true,
'6': true,
'7': true,
'8': true,
'9': true,
':': true,
';': true,
'<': true,
'=': true,
'>': true,
'?': true,
'@': true,
'A': true,
'B': true,
'C': true,
'D': true,
'E': true,
'F': true,
'G': true,
'H': true,
'I': true,
'J': true,
'K': true,
'L': true,
'M': true,
'N': true,
'O': true,
'P': true,
'Q': true,
'R': true,
'S': true,
'T': true,
'U': true,
'V': true,
'W': true,
'X': true,
'Y': true,
'Z': true,
'[': true,
'\\': false,
']': true,
'^': true,
'_': true,
'`': true,
'a': true,
'b': true,
'c': true,
'd': true,
'e': true,
'f': true,
'g': true,
'h': true,
'i': true,
'j': true,
'k': true,
'l': true,
'm': true,
'n': true,
'o': true,
'p': true,
'q': true,
'r': true,
's': true,
't': true,
'u': true,
'v': true,
'w': true,
'x': true,
'y': true,
'z': true,
'{': true,
'|': true,
'}': true,
'~': true,
'\u007f': true,
}
var hex = "0123456789abcdef"
// WriteStringWithHTMLEscaped write string to stream with html special characters escaped
func (stream *Stream) WriteStringWithHTMLEscaped(s string) {
valLen := len(s)
stream.buf = append(stream.buf, '"')
// write string, the fast path, without utf8 and escape support
i := 0
for ; i < valLen; i++ {
c := s[i]
if c < utf8.RuneSelf && htmlSafeSet[c] {
stream.buf = append(stream.buf, c)
} else {
break
}
}
if i == valLen {
stream.buf = append(stream.buf, '"')
return
}
writeStringSlowPathWithHTMLEscaped(stream, i, s, valLen)
}
func writeStringSlowPathWithHTMLEscaped(stream *Stream, i int, s string, valLen int) {
start := i
// for the remaining parts, we process them char by char
for i < valLen {
if b := s[i]; b < utf8.RuneSelf {
if htmlSafeSet[b] {
i++
continue
}
if start < i {
stream.WriteRaw(s[start:i])
}
switch b {
case '\\', '"':
stream.writeTwoBytes('\\', b)
case '\n':
stream.writeTwoBytes('\\', 'n')
case '\r':
stream.writeTwoBytes('\\', 'r')
case '\t':
stream.writeTwoBytes('\\', 't')
default:
// This encodes bytes < 0x20 except for \t, \n and \r.
// If escapeHTML is set, it also escapes <, >, and &
// because they can lead to security holes when
// user-controlled strings are rendered into JSON
// and served to some browsers.
stream.WriteRaw(`\u00`)
stream.writeTwoBytes(hex[b>>4], hex[b&0xF])
}
i++
start = i
continue
}
c, size := utf8.DecodeRuneInString(s[i:])
if c == utf8.RuneError && size == 1 {
if start < i {
stream.WriteRaw(s[start:i])
}
stream.WriteRaw(`\ufffd`)
i++
start = i
continue
}
// U+2028 is LINE SEPARATOR.
// U+2029 is PARAGRAPH SEPARATOR.
// They are both technically valid characters in JSON strings,
// but don't work in JSONP, which has to be evaluated as JavaScript,
// and can lead to security holes there. It is valid JSON to
// escape them, so we do so unconditionally.
// See http://timelessrepo.com/json-isnt-a-javascript-subset for discussion.
if c == '\u2028' || c == '\u2029' {
if start < i {
stream.WriteRaw(s[start:i])
}
stream.WriteRaw(`\u202`)
stream.writeByte(hex[c&0xF])
i += size
start = i
continue
}
i += size
}
if start < len(s) {
stream.WriteRaw(s[start:])
}
stream.writeByte('"')
}
// WriteString write string to stream without html escape
func (stream *Stream) WriteString(s string) {
valLen := len(s)
stream.buf = append(stream.buf, '"')
// write string, the fast path, without utf8 and escape support
i := 0
for ; i < valLen; i++ {
c := s[i]
if c > 31 && c != '"' && c != '\\' {
stream.buf = append(stream.buf, c)
} else {
break
}
}
if i == valLen {
stream.buf = append(stream.buf, '"')
return
}
writeStringSlowPath(stream, i, s, valLen)
}
func writeStringSlowPath(stream *Stream, i int, s string, valLen int) {
start := i
// for the remaining parts, we process them char by char
for i < valLen {
if b := s[i]; b < utf8.RuneSelf {
if safeSet[b] {
i++
continue
}
if start < i {
stream.WriteRaw(s[start:i])
}
switch b {
case '\\', '"':
stream.writeTwoBytes('\\', b)
case '\n':
stream.writeTwoBytes('\\', 'n')
case '\r':
stream.writeTwoBytes('\\', 'r')
case '\t':
stream.writeTwoBytes('\\', 't')
default:
// This encodes bytes < 0x20 except for \t, \n and \r.
// If escapeHTML is set, it also escapes <, >, and &
// because they can lead to security holes when
// user-controlled strings are rendered into JSON
// and served to some browsers.
stream.WriteRaw(`\u00`)
stream.writeTwoBytes(hex[b>>4], hex[b&0xF])
}
i++
start = i
continue
}
i++
continue
}
if start < len(s) {
stream.WriteRaw(s[start:])
}
stream.writeByte('"')
}

12
vendor/github.com/json-iterator/go/test.sh generated vendored Normal file
View File

@ -0,0 +1,12 @@
#!/usr/bin/env bash
set -e
echo "" > coverage.txt
for d in $(go list ./... | grep -v vendor); do
go test -coverprofile=profile.out -coverpkg=github.com/json-iterator/go $d
if [ -f profile.out ]; then
cat profile.out >> coverage.txt
rm profile.out
fi
done

1
vendor/github.com/modern-go/concurrent/.gitignore generated vendored Normal file
View File

@ -0,0 +1 @@
/coverage.txt

Some files were not shown because too many files have changed in this diff Show More