TUN-820: Fix caddyfile gitignore

This commit is contained in:
Areg Harutyunyan 2018-08-07 14:34:09 -05:00
parent a7d9747fae
commit 542bd90491
14 changed files with 1271 additions and 40 deletions

179
Gopkg.lock generated
View File

@ -2,31 +2,40 @@
[[projects]]
branch = "master"
name = "code.cfops.it/go/brotli"
packages = ["."]
revision = "18c9f6c67e3dfc12e0ddaca748d2887f97a7ac28"
[[projects]]
digest = "1:b16fbfbcc20645cb419f78325bb2e85ec729b338e996a228124d68931a6f2a37"
name = "github.com/BurntSushi/toml"
packages = ["."]
pruneopts = "UT"
revision = "b26d9c308763d68093482582cea63d69be07a0f0"
version = "v0.3.0"
[[projects]]
branch = "master"
digest = "1:d6afaeed1502aa28e80a4ed0981d570ad91b2579193404256ce672ed0a609e0d"
name = "github.com/beorn7/perks"
packages = ["quantile"]
pruneopts = "UT"
revision = "3a771d992973f24aa725d07868b467d1ddfceafb"
[[projects]]
digest = "1:fed1f537c2f1269fe475a8556c393fe466641682d73ef8fd0491cd3aa1e47bad"
name = "github.com/certifi/gocertifi"
packages = ["."]
pruneopts = "UT"
revision = "deb3ae2ef2610fde3330947281941c562861188b"
version = "2018.01.18"
[[projects]]
branch = "master"
digest = "1:35a3540009bb774a9999458966abc4bbdfd830c0662d7d73a66f4a324dd94e77"
name = "github.com/cloudflare/brotli-go"
packages = ["."]
pruneopts = "UT"
revision = "18c9f6c67e3dfc12e0ddaca748d2887f97a7ac28"
[[projects]]
branch = "master"
digest = "1:bbacd47c5969077c96bedf231d500ede08aa8fe5bc8e596539026694f18bb799"
name = "github.com/coredns/coredns"
packages = [
"core/dnsserver",
@ -52,24 +61,30 @@
"plugin/pkg/uniq",
"plugin/pkg/watch",
"plugin/test",
"request"
"request",
]
revision = "992e7928c7c258628d2b13b769acc86781b9faea"
pruneopts = "UT"
revision = "18a77cd04557b810eba96a7239d39ee2d7a92157"
[[projects]]
digest = "1:1da3a221f0bc090792d3a2a080ff09008427c0e0f0533a4ed6abd8994421da73"
name = "github.com/coreos/go-systemd"
packages = ["daemon"]
pruneopts = "UT"
revision = "39ca1b05acc7ad1220e09f133283b8859a8b71ab"
version = "v17"
[[projects]]
digest = "1:a2c1d0e43bd3baaa071d1b9ed72c27d78169b2b269f71c105ac4ba34b1be4a39"
name = "github.com/davecgh/go-spew"
packages = ["spew"]
pruneopts = "UT"
revision = "346938d642f2ec3594ed81d874461961cd0faa76"
version = "v1.1.0"
[[projects]]
branch = "master"
digest = "1:15eadee5930d325154e5e1f3c4f4b28ab62f9c0aa949580e8d9edabaf0b64e85"
name = "github.com/equinox-io/equinox"
packages = [
".",
@ -77,192 +92,244 @@
"internal/go-update/internal/binarydist",
"internal/go-update/internal/osext",
"internal/osext",
"proto"
"proto",
]
pruneopts = "UT"
revision = "f24972fa72facf59d05c91c848b65eac38815915"
[[projects]]
branch = "master"
digest = "1:433763f10d88dba9b533a7ea2fe9f5ee11e57e00306eb97a1f6090fd978e8fa1"
name = "github.com/facebookgo/grace"
packages = ["gracenet"]
pruneopts = "UT"
revision = "75cf19382434e82df4dd84953f566b8ad23d6e9e"
[[projects]]
branch = "master"
digest = "1:50a46ab1d5edbbdd55125b4d37f1bf503d0807c26461f9ad7b358d6006641d09"
name = "github.com/flynn/go-shlex"
packages = ["."]
pruneopts = "UT"
revision = "3f9db97f856818214da2e1057f8ad84803971cff"
[[projects]]
branch = "master"
digest = "1:d4623fc7bf7e281d9107367cc4a9e76ed3e86b1eec1a4e30630c870bef1fedd0"
name = "github.com/getsentry/raven-go"
packages = ["."]
pruneopts = "UT"
revision = "ed7bcb39ff10f39ab08e317ce16df282845852fa"
[[projects]]
branch = "master"
digest = "1:3e6afc3ed8a72949aa735c00fddc23427dc9384ccfd51cf0d91a412e668da632"
name = "github.com/golang-collections/collections"
packages = ["queue"]
pruneopts = "UT"
revision = "604e922904d35e97f98a774db7881f049cd8d970"
[[projects]]
digest = "1:17fe264ee908afc795734e8c4e63db2accabaf57326dbf21763a7d6b86096260"
name = "github.com/golang/protobuf"
packages = [
"proto",
"ptypes",
"ptypes/any",
"ptypes/duration",
"ptypes/timestamp"
"ptypes/timestamp",
]
pruneopts = "UT"
revision = "b4deda0973fb4c70b50d226b1af49f3da59f5265"
version = "v1.1.0"
[[projects]]
digest = "1:8f8811f9be822914c3a25c6a071e93beb4c805d7b026cbf298bc577bc1cc945b"
name = "github.com/google/uuid"
packages = ["."]
pruneopts = "UT"
revision = "064e2069ce9c359c118179501254f67d7d37ba24"
version = "0.2"
[[projects]]
digest = "1:43dd08a10854b2056e615d1b1d22ac94559d822e1f8b6fcc92c1a1057e85188e"
name = "github.com/gorilla/websocket"
packages = ["."]
pruneopts = "UT"
revision = "ea4d1f681babbce9545c9c5f3d5194a789c89f5b"
version = "v1.2.0"
[[projects]]
branch = "master"
digest = "1:1a1206efd03a54d336dce7bb8719e74f2f8932f661cb9f57d5813a1d99c083d8"
name = "github.com/grpc-ecosystem/grpc-opentracing"
packages = ["go/otgrpc"]
pruneopts = "UT"
revision = "8e809c8a86450a29b90dcc9efbf062d0fe6d9746"
[[projects]]
digest = "1:c658e84ad3916da105a761660dcaeb01e63416c8ec7bc62256a9b411a05fcd67"
name = "github.com/mattn/go-colorable"
packages = ["."]
pruneopts = "UT"
revision = "167de6bfdfba052fa6b2d3664c8f5272e23c9072"
version = "v0.0.9"
[[projects]]
digest = "1:d4d17353dbd05cb52a2a52b7fe1771883b682806f68db442b436294926bbfafb"
name = "github.com/mattn/go-isatty"
packages = ["."]
pruneopts = "UT"
revision = "0360b2af4f38e8d38c7fce2a9f4e702702d73a39"
version = "v0.0.3"
[[projects]]
digest = "1:ff5ebae34cfbf047d505ee150de27e60570e8c394b3b8fdbb720ff6ac71985fc"
name = "github.com/matttproud/golang_protobuf_extensions"
packages = ["pbutil"]
pruneopts = "UT"
revision = "c12348ce28de40eed0136aa2b644d0ee0650e56c"
version = "v1.0.1"
[[projects]]
branch = "master"
digest = "1:75fa16a231ef40da3e462d651c20b9df20bde0777bdc1ac0982242c79057ee71"
name = "github.com/mholt/caddy"
packages = [
".",
"caddyfile",
"telemetry"
"telemetry",
]
revision = "1f7b5abc80679fb71ee0e04ed98cbe284b1fc181"
version = "v0.11.0"
pruneopts = "UT"
revision = "3e0695ee31819685b2368f449ab65625256d2518"
[[projects]]
digest = "1:463e4140189f8194f9121ca1c7fe3b8e9e9a2ab3d949b43c835c21034927dc62"
name = "github.com/miekg/dns"
packages = ["."]
pruneopts = "UT"
revision = "5a2b9fab83ff0f8bfc99684bd5f43a37abe560f1"
version = "v1.0.8"
[[projects]]
branch = "master"
digest = "1:8eb17c2ec4df79193ae65b621cd1c0c4697db3bc317fe6afdc76d7f2746abd05"
name = "github.com/mitchellh/go-homedir"
packages = ["."]
pruneopts = "UT"
revision = "3864e76763d94a6df2f9960b16a20a33da9f9a66"
[[projects]]
digest = "1:450b7623b185031f3a456801155c8320209f75d0d4c4e633c6b1e59d44d6e392"
name = "github.com/opentracing/opentracing-go"
packages = [
".",
"ext",
"log"
"log",
]
pruneopts = "UT"
revision = "1949ddbfd147afd4d964a9f00b24eb291e0e7c38"
version = "v1.0.2"
[[projects]]
digest = "1:40e195917a951a8bf867cd05de2a46aaf1806c50cf92eebf4c16f78cd196f747"
name = "github.com/pkg/errors"
packages = ["."]
pruneopts = "UT"
revision = "645ef00459ed84a119197bfb8d8205042c6df63d"
version = "v0.8.0"
[[projects]]
digest = "1:0028cb19b2e4c3112225cd871870f2d9cf49b9b4276531f03438a88e94be86fe"
name = "github.com/pmezard/go-difflib"
packages = ["difflib"]
pruneopts = "UT"
revision = "792786c7400a136282c1664665ae0a8db921c6c2"
version = "v1.0.0"
[[projects]]
digest = "1:c968b29db5d68ec97de404b6d058d5937fa015a141b3b4f7a0d87d5f8226f04c"
name = "github.com/prometheus/client_golang"
packages = [
"prometheus",
"prometheus/promhttp"
"prometheus/promhttp",
]
pruneopts = "UT"
revision = "967789050ba94deca04a5e84cce8ad472ce313c1"
version = "v0.9.0-pre1"
[[projects]]
branch = "master"
digest = "1:32d10bdfa8f09ecf13598324dba86ab891f11db3c538b6a34d1c3b5b99d7c36b"
name = "github.com/prometheus/client_model"
packages = ["go"]
pruneopts = "UT"
revision = "99fa1f4be8e564e8a6b613da7fa6f46c9edafc6c"
[[projects]]
branch = "master"
digest = "1:e469cd65badf7694aeb44874518606d93c1d59e7735d3754ad442782437d3cc3"
name = "github.com/prometheus/common"
packages = [
"expfmt",
"internal/bitbucket.org/ww/goautoneg",
"model"
"model",
]
pruneopts = "UT"
revision = "7600349dcfe1abd18d72d3a1770870d9800a7801"
[[projects]]
branch = "master"
digest = "1:20d9bb50dbee172242f9bcd6ec24a917dd7a5bb17421bf16a79c33111dea7db1"
name = "github.com/prometheus/procfs"
packages = [
".",
"internal/util",
"nfs",
"xfs"
"xfs",
]
pruneopts = "UT"
revision = "ae68e2d4c00fed4943b5f6698d504a5fe083da8a"
[[projects]]
digest = "1:9a6f766efd8d5752adb7052aebb6e3d85255b31a8dff5e58ab4efa740ba9efa0"
name = "github.com/rifflock/lfshook"
packages = ["."]
pruneopts = "UT"
revision = "bf539943797a1f34c1f502d07de419b5238ae6c6"
version = "v2.3"
[[projects]]
digest = "1:9e9193aa51197513b3abcb108970d831fbcf40ef96aa845c4f03276e1fa316d2"
name = "github.com/sirupsen/logrus"
packages = ["."]
pruneopts = "UT"
revision = "c155da19408a8799da419ed3eeb0cb5db0ad5dbc"
version = "v1.0.5"
[[projects]]
digest = "1:18752d0b95816a1b777505a97f71c7467a8445b8ffb55631a7bf779f6ba4fa83"
name = "github.com/stretchr/testify"
packages = ["assert"]
pruneopts = "UT"
revision = "f35b8ab0b5a2cef36673838d662e249dd9c94686"
version = "v1.2.2"
[[projects]]
branch = "master"
digest = "1:583f2f436bab7b59a7bd3e759f1375b06f460760ed1f9235604d143eaab83009"
name = "golang.org/x/crypto"
packages = [
"ed25519",
"ed25519/internal/edwards25519",
"ssh/terminal"
"ssh/terminal",
]
pruneopts = "UT"
revision = "a49355c7e3f8fe157a85be2f77e6e269a0f89602"
[[projects]]
branch = "master"
digest = "1:011ee76ffb757c7c91a3e61cf5d6f367d1fd5dd08e16a4e6ae688e7759f8509a"
name = "golang.org/x/net"
packages = [
"bpf",
@ -277,18 +344,22 @@
"ipv4",
"ipv6",
"trace",
"websocket"
"websocket",
]
pruneopts = "UT"
revision = "32a936f46389aa10549d60bd7833e54b01685d09"
[[projects]]
branch = "master"
digest = "1:39ebcc2b11457b703ae9ee2e8cca0f68df21969c6102cb3b705f76cca0ea0239"
name = "golang.org/x/sync"
packages = ["errgroup"]
pruneopts = "UT"
revision = "1d60e4601c6fd243af51cc01ddf169918a5407ca"
[[projects]]
branch = "master"
digest = "1:2e8e74aa73847379a370c2704ff08baf4a7303ae682766e70c23ded3fd9b3f37"
name = "golang.org/x/sys"
packages = [
"unix",
@ -296,11 +367,13 @@
"windows/registry",
"windows/svc",
"windows/svc/eventlog",
"windows/svc/mgr"
"windows/svc/mgr",
]
pruneopts = "UT"
revision = "ce36f3865eeb42541ce3f87f32f8462c5687befa"
[[projects]]
digest = "1:a2ab62866c75542dd18d2b069fec854577a20211d7c0ea6ae746072a1dccdd18"
name = "golang.org/x/text"
packages = [
"collate",
@ -316,18 +389,22 @@
"unicode/bidi",
"unicode/cldr",
"unicode/norm",
"unicode/rangetable"
"unicode/rangetable",
]
pruneopts = "UT"
revision = "f21a4dfb5e38f5895301dc265a8def02365cc3d0"
version = "v0.3.0"
[[projects]]
branch = "master"
digest = "1:601e63e7d4577f907118bec825902505291918859d223bce015539e79f1160e3"
name = "google.golang.org/genproto"
packages = ["googleapis/rpc/status"]
pruneopts = "UT"
revision = "ff3583edef7de132f219f0efc00e097cabcc0ec0"
[[projects]]
digest = "1:2dab32a43451e320e49608ff4542fdfc653c95dcc35d0065ec9c6c3dd540ed74"
name = "google.golang.org/grpc"
packages = [
".",
@ -354,28 +431,34 @@
"stats",
"status",
"tap",
"transport"
"transport",
]
pruneopts = "UT"
revision = "168a6198bcb0ef175f7dacec0b8691fc141dc9b8"
version = "v1.13.0"
[[projects]]
branch = "altsrc-parse-durations"
digest = "1:0370b1bceda03dbfade3abbde639a43f1113bab711ec760452e5c0dcc0c14787"
name = "gopkg.in/urfave/cli.v2"
packages = [
".",
"altsrc"
"altsrc",
]
pruneopts = "UT"
revision = "d604b6ffeee878fbf084fd2761466b6649989cee"
source = "https://github.com/cbranch/cli"
[[projects]]
digest = "1:342378ac4dcb378a5448dd723f0784ae519383532f5e70ade24132c4c8693202"
name = "gopkg.in/yaml.v2"
packages = ["."]
pruneopts = "UT"
revision = "5420a8b6744d3b0345ab293f6fcba19c978f1183"
version = "v2.2.1"
[[projects]]
digest = "1:8ffc3ddc31414c0a71220957bb723b16510d7fcb5b3880dc0da4cf6d39c31642"
name = "zombiezen.com/go/capnproto2"
packages = [
".",
@ -391,8 +474,9 @@
"rpc/internal/refcount",
"schemas",
"server",
"std/capnp/rpc"
"std/capnp/rpc",
]
pruneopts = "UT"
revision = "7cfd211c19c7f5783c695f3654efa46f0df259c3"
source = "https://github.com/zombiezen/go-capnproto2"
version = "v2.17.1"
@ -400,6 +484,51 @@
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
inputs-digest = "a2d109989dfc76050eeb638546c012e192b5dc71d248c9d27cfa3f3282927966"
input-imports = [
"github.com/cloudflare/brotli-go",
"github.com/coredns/coredns/core/dnsserver",
"github.com/coredns/coredns/plugin",
"github.com/coredns/coredns/plugin/cache",
"github.com/coredns/coredns/plugin/metrics/vars",
"github.com/coredns/coredns/plugin/pkg/dnstest",
"github.com/coredns/coredns/plugin/pkg/rcode",
"github.com/coredns/coredns/request",
"github.com/coreos/go-systemd/daemon",
"github.com/equinox-io/equinox",
"github.com/facebookgo/grace/gracenet",
"github.com/getsentry/raven-go",
"github.com/golang-collections/collections/queue",
"github.com/gorilla/websocket",
"github.com/mattn/go-colorable",
"github.com/miekg/dns",
"github.com/mitchellh/go-homedir",
"github.com/pkg/errors",
"github.com/prometheus/client_golang/prometheus",
"github.com/prometheus/client_golang/prometheus/promhttp",
"github.com/rifflock/lfshook",
"github.com/sirupsen/logrus",
"github.com/stretchr/testify/assert",
"golang.org/x/crypto/ssh/terminal",
"golang.org/x/net/context",
"golang.org/x/net/http2",
"golang.org/x/net/http2/hpack",
"golang.org/x/net/idna",
"golang.org/x/net/trace",
"golang.org/x/net/websocket",
"golang.org/x/sync/errgroup",
"golang.org/x/sys/windows",
"golang.org/x/sys/windows/svc",
"golang.org/x/sys/windows/svc/eventlog",
"golang.org/x/sys/windows/svc/mgr",
"gopkg.in/urfave/cli.v2",
"gopkg.in/urfave/cli.v2/altsrc",
"zombiezen.com/go/capnproto2",
"zombiezen.com/go/capnproto2/encoding/text",
"zombiezen.com/go/capnproto2/pogs",
"zombiezen.com/go/capnproto2/rpc",
"zombiezen.com/go/capnproto2/schemas",
"zombiezen.com/go/capnproto2/server",
"zombiezen.com/go/capnproto2/std/capnp/rpc",
]
solver-name = "gps-cdcl"
solver-version = 1

View File

@ -50,3 +50,7 @@
[[constraint]]
name = "github.com/coredns/coredns"
branch = "master"
[[override]]
name = "github.com/mholt/caddy"
branch = "master"

View File

@ -125,7 +125,8 @@ func SplitHostPort(s string) (host, port string, ipnet *net.IPNet, err error) {
// Get the first lower octet boundary to see what encompassing zone we should be authoritative for.
mod := (bits - ones) % sizeDigit
nearest := (bits - ones) + mod
offset, end := 0, false
offset := 0
var end bool
for i := 0; i < nearest/sizeDigit; i++ {
offset, end = dns.NextLabel(rev, offset)
if end {

View File

@ -263,12 +263,16 @@ func (r *Request) Scrub(reply *dns.Msg) (*dns.Msg, Result) {
// Account for the OPT record that gets added in SizeAndDo(), subtract that length.
sub := 0
if r.Do() {
if r.Req.IsEdns0() != nil {
sub = optLen
}
origExtra := reply.Extra
// substract to make spaces for re-added EDNS0 OPT RR.
re := len(reply.Extra) - sub
size -= sub
l, m := 0, 0
origExtra := reply.Extra
for l < re {
m = (l + re) / 2
reply.Extra = origExtra[:m]
@ -297,9 +301,9 @@ func (r *Request) Scrub(reply *dns.Msg) (*dns.Msg, Result) {
return reply, ScrubExtra
}
origAnswer := reply.Answer
ra := len(reply.Answer)
l, m = 0, 0
origAnswer := reply.Answer
for l < ra {
m = (l + ra) / 2
reply.Answer = origAnswer[:m]
@ -324,14 +328,12 @@ func (r *Request) Scrub(reply *dns.Msg) (*dns.Msg, Result) {
// this extra m-1 step does make it fit in the client's buffer however.
}
// It now fits, but Truncated. We can't call sizeAndDo() because that adds a new record (OPT)
// in the additional section.
r.SizeAndDo(reply)
reply.Truncated = true
return reply, ScrubAnswer
}
// Type returns the type of the question as a string. If the request is malformed
// the empty string is returned.
// Type returns the type of the question as a string. If the request is malformed the empty string is returned.
func (r *Request) Type() string {
if r.Req == nil {
return ""

View File

@ -13,9 +13,10 @@ access.log
/*.conf
Caddyfile
!caddyfile/
og_static/
.vscode/
*.bat
*.bat

View File

@ -70,6 +70,8 @@ Then make sure the `caddy` binary is in your PATH.
To build for other platforms, use build.go with the `--goos` and `--goarch` flags.
When building from source, telemetry is enabled by default. You can disable it by changing `enableTelemetry` in run.go before compiling, or use the `-disabled-metrics` flag at runtime to disable only certain metrics.
## Quick Start

View File

@ -12,8 +12,8 @@ environment:
install:
- rmdir c:\go /s /q
- appveyor DownloadFile https://storage.googleapis.com/golang/go1.9.windows-amd64.zip
- 7z x go1.9.windows-amd64.zip -y -oC:\ > NUL
- appveyor DownloadFile https://storage.googleapis.com/golang/go1.10.windows-amd64.zip
- 7z x go1.10.windows-amd64.zip -y -oC:\ > NUL
- set PATH=%GOPATH%\bin;%PATH%
- go version
- go env

260
vendor/github.com/mholt/caddy/caddyfile/dispenser.go generated vendored Normal file
View File

@ -0,0 +1,260 @@
// Copyright 2015 Light Code Labs, LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package caddyfile
import (
"errors"
"fmt"
"io"
"strings"
)
// Dispenser is a type that dispenses tokens, similarly to a lexer,
// except that it can do so with some notion of structure and has
// some really convenient methods.
type Dispenser struct {
filename string
tokens []Token
cursor int
nesting int
}
// NewDispenser returns a Dispenser, ready to use for parsing the given input.
func NewDispenser(filename string, input io.Reader) Dispenser {
tokens, _ := allTokens(input) // ignoring error because nothing to do with it
return Dispenser{
filename: filename,
tokens: tokens,
cursor: -1,
}
}
// NewDispenserTokens returns a Dispenser filled with the given tokens.
func NewDispenserTokens(filename string, tokens []Token) Dispenser {
return Dispenser{
filename: filename,
tokens: tokens,
cursor: -1,
}
}
// Next loads the next token. Returns true if a token
// was loaded; false otherwise. If false, all tokens
// have been consumed.
func (d *Dispenser) Next() bool {
if d.cursor < len(d.tokens)-1 {
d.cursor++
return true
}
return false
}
// NextArg loads the next token if it is on the same
// line. Returns true if a token was loaded; false
// otherwise. If false, all tokens on the line have
// been consumed. It handles imported tokens correctly.
func (d *Dispenser) NextArg() bool {
if d.cursor < 0 {
d.cursor++
return true
}
if d.cursor >= len(d.tokens) {
return false
}
if d.cursor < len(d.tokens)-1 &&
d.tokens[d.cursor].File == d.tokens[d.cursor+1].File &&
d.tokens[d.cursor].Line+d.numLineBreaks(d.cursor) == d.tokens[d.cursor+1].Line {
d.cursor++
return true
}
return false
}
// NextLine loads the next token only if it is not on the same
// line as the current token, and returns true if a token was
// loaded; false otherwise. If false, there is not another token
// or it is on the same line. It handles imported tokens correctly.
func (d *Dispenser) NextLine() bool {
if d.cursor < 0 {
d.cursor++
return true
}
if d.cursor >= len(d.tokens) {
return false
}
if d.cursor < len(d.tokens)-1 &&
(d.tokens[d.cursor].File != d.tokens[d.cursor+1].File ||
d.tokens[d.cursor].Line+d.numLineBreaks(d.cursor) < d.tokens[d.cursor+1].Line) {
d.cursor++
return true
}
return false
}
// NextBlock can be used as the condition of a for loop
// to load the next token as long as it opens a block or
// is already in a block. It returns true if a token was
// loaded, or false when the block's closing curly brace
// was loaded and thus the block ended. Nested blocks are
// not supported.
func (d *Dispenser) NextBlock() bool {
if d.nesting > 0 {
d.Next()
if d.Val() == "}" {
d.nesting--
return false
}
return true
}
if !d.NextArg() { // block must open on same line
return false
}
if d.Val() != "{" {
d.cursor-- // roll back if not opening brace
return false
}
d.Next()
if d.Val() == "}" {
// Open and then closed right away
return false
}
d.nesting++
return true
}
// Val gets the text of the current token. If there is no token
// loaded, it returns empty string.
func (d *Dispenser) Val() string {
if d.cursor < 0 || d.cursor >= len(d.tokens) {
return ""
}
return d.tokens[d.cursor].Text
}
// Line gets the line number of the current token. If there is no token
// loaded, it returns 0.
func (d *Dispenser) Line() int {
if d.cursor < 0 || d.cursor >= len(d.tokens) {
return 0
}
return d.tokens[d.cursor].Line
}
// File gets the filename of the current token. If there is no token loaded,
// it returns the filename originally given when parsing started.
func (d *Dispenser) File() string {
if d.cursor < 0 || d.cursor >= len(d.tokens) {
return d.filename
}
if tokenFilename := d.tokens[d.cursor].File; tokenFilename != "" {
return tokenFilename
}
return d.filename
}
// Args is a convenience function that loads the next arguments
// (tokens on the same line) into an arbitrary number of strings
// pointed to in targets. If there are fewer tokens available
// than string pointers, the remaining strings will not be changed
// and false will be returned. If there were enough tokens available
// to fill the arguments, then true will be returned.
func (d *Dispenser) Args(targets ...*string) bool {
enough := true
for i := 0; i < len(targets); i++ {
if !d.NextArg() {
enough = false
break
}
*targets[i] = d.Val()
}
return enough
}
// RemainingArgs loads any more arguments (tokens on the same line)
// into a slice and returns them. Open curly brace tokens also indicate
// the end of arguments, and the curly brace is not included in
// the return value nor is it loaded.
func (d *Dispenser) RemainingArgs() []string {
var args []string
for d.NextArg() {
if d.Val() == "{" {
d.cursor--
break
}
args = append(args, d.Val())
}
return args
}
// ArgErr returns an argument error, meaning that another
// argument was expected but not found. In other words,
// a line break or open curly brace was encountered instead of
// an argument.
func (d *Dispenser) ArgErr() error {
if d.Val() == "{" {
return d.Err("Unexpected token '{', expecting argument")
}
return d.Errf("Wrong argument count or unexpected line ending after '%s'", d.Val())
}
// SyntaxErr creates a generic syntax error which explains what was
// found and what was expected.
func (d *Dispenser) SyntaxErr(expected string) error {
msg := fmt.Sprintf("%s:%d - Syntax error: Unexpected token '%s', expecting '%s'", d.File(), d.Line(), d.Val(), expected)
return errors.New(msg)
}
// EOFErr returns an error indicating that the dispenser reached
// the end of the input when searching for the next token.
func (d *Dispenser) EOFErr() error {
return d.Errf("Unexpected EOF")
}
// Err generates a custom parse-time error with a message of msg.
func (d *Dispenser) Err(msg string) error {
msg = fmt.Sprintf("%s:%d - Error during parsing: %s", d.File(), d.Line(), msg)
return errors.New(msg)
}
// Errf is like Err, but for formatted error messages
func (d *Dispenser) Errf(format string, args ...interface{}) error {
return d.Err(fmt.Sprintf(format, args...))
}
// numLineBreaks counts how many line breaks are in the token
// value given by the token index tknIdx. It returns 0 if the
// token does not exist or there are no line breaks.
func (d *Dispenser) numLineBreaks(tknIdx int) int {
if tknIdx < 0 || tknIdx >= len(d.tokens) {
return 0
}
return strings.Count(d.tokens[tknIdx].Text, "\n")
}
// isNewLine determines whether the current token is on a different
// line (higher line number) than the previous token. It handles imported
// tokens correctly. If there isn't a previous token, it returns true.
func (d *Dispenser) isNewLine() bool {
if d.cursor < 1 {
return true
}
if d.cursor > len(d.tokens)-1 {
return false
}
return d.tokens[d.cursor-1].File != d.tokens[d.cursor].File ||
d.tokens[d.cursor-1].Line+d.numLineBreaks(d.cursor-1) < d.tokens[d.cursor].Line
}

198
vendor/github.com/mholt/caddy/caddyfile/json.go generated vendored Normal file
View File

@ -0,0 +1,198 @@
// Copyright 2015 Light Code Labs, LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package caddyfile
import (
"bytes"
"encoding/json"
"fmt"
"sort"
"strconv"
"strings"
)
const filename = "Caddyfile"
// ToJSON converts caddyfile to its JSON representation.
func ToJSON(caddyfile []byte) ([]byte, error) {
var j EncodedCaddyfile
serverBlocks, err := Parse(filename, bytes.NewReader(caddyfile), nil)
if err != nil {
return nil, err
}
for _, sb := range serverBlocks {
block := EncodedServerBlock{
Keys: sb.Keys,
Body: [][]interface{}{},
}
// Extract directives deterministically by sorting them
var directives = make([]string, len(sb.Tokens))
for dir := range sb.Tokens {
directives = append(directives, dir)
}
sort.Strings(directives)
// Convert each directive's tokens into our JSON structure
for _, dir := range directives {
disp := NewDispenserTokens(filename, sb.Tokens[dir])
for disp.Next() {
block.Body = append(block.Body, constructLine(&disp))
}
}
// tack this block onto the end of the list
j = append(j, block)
}
result, err := json.Marshal(j)
if err != nil {
return nil, err
}
return result, nil
}
// constructLine transforms tokens into a JSON-encodable structure;
// but only one line at a time, to be used at the top-level of
// a server block only (where the first token on each line is a
// directive) - not to be used at any other nesting level.
func constructLine(d *Dispenser) []interface{} {
var args []interface{}
args = append(args, d.Val())
for d.NextArg() {
if d.Val() == "{" {
args = append(args, constructBlock(d))
continue
}
args = append(args, d.Val())
}
return args
}
// constructBlock recursively processes tokens into a
// JSON-encodable structure. To be used in a directive's
// block. Goes to end of block.
func constructBlock(d *Dispenser) [][]interface{} {
block := [][]interface{}{}
for d.Next() {
if d.Val() == "}" {
break
}
block = append(block, constructLine(d))
}
return block
}
// FromJSON converts JSON-encoded jsonBytes to Caddyfile text
func FromJSON(jsonBytes []byte) ([]byte, error) {
var j EncodedCaddyfile
var result string
err := json.Unmarshal(jsonBytes, &j)
if err != nil {
return nil, err
}
for sbPos, sb := range j {
if sbPos > 0 {
result += "\n\n"
}
for i, key := range sb.Keys {
if i > 0 {
result += ", "
}
//result += standardizeScheme(key)
result += key
}
result += jsonToText(sb.Body, 1)
}
return []byte(result), nil
}
// jsonToText recursively transforms a scope of JSON into plain
// Caddyfile text.
func jsonToText(scope interface{}, depth int) string {
var result string
switch val := scope.(type) {
case string:
if strings.ContainsAny(val, "\" \n\t\r") {
result += `"` + strings.Replace(val, "\"", "\\\"", -1) + `"`
} else {
result += val
}
case int:
result += strconv.Itoa(val)
case float64:
result += fmt.Sprintf("%v", val)
case bool:
result += fmt.Sprintf("%t", val)
case [][]interface{}:
result += " {\n"
for _, arg := range val {
result += strings.Repeat("\t", depth) + jsonToText(arg, depth+1) + "\n"
}
result += strings.Repeat("\t", depth-1) + "}"
case []interface{}:
for i, v := range val {
if block, ok := v.([]interface{}); ok {
result += "{\n"
for _, arg := range block {
result += strings.Repeat("\t", depth) + jsonToText(arg, depth+1) + "\n"
}
result += strings.Repeat("\t", depth-1) + "}"
continue
}
result += jsonToText(v, depth)
if i < len(val)-1 {
result += " "
}
}
}
return result
}
// TODO: Will this function come in handy somewhere else?
/*
// standardizeScheme turns an address like host:https into https://host,
// or "host:" into "host".
func standardizeScheme(addr string) string {
if hostname, port, err := net.SplitHostPort(addr); err == nil {
if port == "http" || port == "https" {
addr = port + "://" + hostname
}
}
return strings.TrimSuffix(addr, ":")
}
*/
// EncodedCaddyfile encapsulates a slice of EncodedServerBlocks.
type EncodedCaddyfile []EncodedServerBlock
// EncodedServerBlock represents a server block ripe for encoding.
type EncodedServerBlock struct {
Keys []string `json:"keys"`
Body [][]interface{} `json:"body"`
}

150
vendor/github.com/mholt/caddy/caddyfile/lexer.go generated vendored Normal file
View File

@ -0,0 +1,150 @@
// Copyright 2015 Light Code Labs, LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package caddyfile
import (
"bufio"
"io"
"unicode"
)
type (
// lexer is a utility which can get values, token by
// token, from a Reader. A token is a word, and tokens
// are separated by whitespace. A word can be enclosed
// in quotes if it contains whitespace.
lexer struct {
reader *bufio.Reader
token Token
line int
}
// Token represents a single parsable unit.
Token struct {
File string
Line int
Text string
}
)
// load prepares the lexer to scan an input for tokens.
// It discards any leading byte order mark.
func (l *lexer) load(input io.Reader) error {
l.reader = bufio.NewReader(input)
l.line = 1
// discard byte order mark, if present
firstCh, _, err := l.reader.ReadRune()
if err != nil {
return err
}
if firstCh != 0xFEFF {
err := l.reader.UnreadRune()
if err != nil {
return err
}
}
return nil
}
// next loads the next token into the lexer.
// A token is delimited by whitespace, unless
// the token starts with a quotes character (")
// in which case the token goes until the closing
// quotes (the enclosing quotes are not included).
// Inside quoted strings, quotes may be escaped
// with a preceding \ character. No other chars
// may be escaped. The rest of the line is skipped
// if a "#" character is read in. Returns true if
// a token was loaded; false otherwise.
func (l *lexer) next() bool {
var val []rune
var comment, quoted, escaped bool
makeToken := func() bool {
l.token.Text = string(val)
return true
}
for {
ch, _, err := l.reader.ReadRune()
if err != nil {
if len(val) > 0 {
return makeToken()
}
if err == io.EOF {
return false
}
panic(err)
}
if quoted {
if !escaped {
if ch == '\\' {
escaped = true
continue
} else if ch == '"' {
quoted = false
return makeToken()
}
}
if ch == '\n' {
l.line++
}
if escaped {
// only escape quotes
if ch != '"' {
val = append(val, '\\')
}
}
val = append(val, ch)
escaped = false
continue
}
if unicode.IsSpace(ch) {
if ch == '\r' {
continue
}
if ch == '\n' {
l.line++
comment = false
}
if len(val) > 0 {
return makeToken()
}
continue
}
if ch == '#' {
comment = true
}
if comment {
continue
}
if len(val) == 0 {
l.token = Token{Line: l.line}
if ch == '"' {
quoted = true
continue
}
}
val = append(val, ch)
}
}

482
vendor/github.com/mholt/caddy/caddyfile/parse.go generated vendored Normal file
View File

@ -0,0 +1,482 @@
// Copyright 2015 Light Code Labs, LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package caddyfile
import (
"io"
"log"
"os"
"path/filepath"
"strings"
"github.com/mholt/caddy/telemetry"
)
// Parse parses the input just enough to group tokens, in
// order, by server block. No further parsing is performed.
// Server blocks are returned in the order in which they appear.
// Directives that do not appear in validDirectives will cause
// an error. If you do not want to check for valid directives,
// pass in nil instead.
func Parse(filename string, input io.Reader, validDirectives []string) ([]ServerBlock, error) {
p := parser{Dispenser: NewDispenser(filename, input), validDirectives: validDirectives}
return p.parseAll()
}
// allTokens lexes the entire input, but does not parse it.
// It returns all the tokens from the input, unstructured
// and in order.
func allTokens(input io.Reader) ([]Token, error) {
l := new(lexer)
err := l.load(input)
if err != nil {
return nil, err
}
var tokens []Token
for l.next() {
tokens = append(tokens, l.token)
}
return tokens, nil
}
type parser struct {
Dispenser
block ServerBlock // current server block being parsed
validDirectives []string // a directive must be valid or it's an error
eof bool // if we encounter a valid EOF in a hard place
definedSnippets map[string][]Token
}
func (p *parser) parseAll() ([]ServerBlock, error) {
var blocks []ServerBlock
for p.Next() {
err := p.parseOne()
if err != nil {
return blocks, err
}
if len(p.block.Keys) > 0 {
blocks = append(blocks, p.block)
}
}
return blocks, nil
}
func (p *parser) parseOne() error {
p.block = ServerBlock{Tokens: make(map[string][]Token)}
return p.begin()
}
func (p *parser) begin() error {
if len(p.tokens) == 0 {
return nil
}
err := p.addresses()
if err != nil {
return err
}
if p.eof {
// this happens if the Caddyfile consists of only
// a line of addresses and nothing else
return nil
}
if ok, name := p.isSnippet(); ok {
if p.definedSnippets == nil {
p.definedSnippets = map[string][]Token{}
}
if _, found := p.definedSnippets[name]; found {
return p.Errf("redeclaration of previously declared snippet %s", name)
}
// consume all tokens til matched close brace
tokens, err := p.snippetTokens()
if err != nil {
return err
}
p.definedSnippets[name] = tokens
// empty block keys so we don't save this block as a real server.
p.block.Keys = nil
return nil
}
return p.blockContents()
}
func (p *parser) addresses() error {
var expectingAnother bool
for {
tkn := replaceEnvVars(p.Val())
// special case: import directive replaces tokens during parse-time
if tkn == "import" && p.isNewLine() {
err := p.doImport()
if err != nil {
return err
}
continue
}
// Open brace definitely indicates end of addresses
if tkn == "{" {
if expectingAnother {
return p.Errf("Expected another address but had '%s' - check for extra comma", tkn)
}
break
}
if tkn != "" { // empty token possible if user typed ""
// Trailing comma indicates another address will follow, which
// may possibly be on the next line
if tkn[len(tkn)-1] == ',' {
tkn = tkn[:len(tkn)-1]
expectingAnother = true
} else {
expectingAnother = false // but we may still see another one on this line
}
p.block.Keys = append(p.block.Keys, tkn)
}
// Advance token and possibly break out of loop or return error
hasNext := p.Next()
if expectingAnother && !hasNext {
return p.EOFErr()
}
if !hasNext {
p.eof = true
break // EOF
}
if !expectingAnother && p.isNewLine() {
break
}
}
return nil
}
func (p *parser) blockContents() error {
errOpenCurlyBrace := p.openCurlyBrace()
if errOpenCurlyBrace != nil {
// single-server configs don't need curly braces
p.cursor--
}
err := p.directives()
if err != nil {
return err
}
// Only look for close curly brace if there was an opening
if errOpenCurlyBrace == nil {
err = p.closeCurlyBrace()
if err != nil {
return err
}
}
return nil
}
// directives parses through all the lines for directives
// and it expects the next token to be the first
// directive. It goes until EOF or closing curly brace
// which ends the server block.
func (p *parser) directives() error {
for p.Next() {
// end of server block
if p.Val() == "}" {
break
}
// special case: import directive replaces tokens during parse-time
if p.Val() == "import" {
err := p.doImport()
if err != nil {
return err
}
p.cursor-- // cursor is advanced when we continue, so roll back one more
continue
}
// normal case: parse a directive on this line
if err := p.directive(); err != nil {
return err
}
}
return nil
}
// doImport swaps out the import directive and its argument
// (a total of 2 tokens) with the tokens in the specified file
// or globbing pattern. When the function returns, the cursor
// is on the token before where the import directive was. In
// other words, call Next() to access the first token that was
// imported.
func (p *parser) doImport() error {
// syntax checks
if !p.NextArg() {
return p.ArgErr()
}
importPattern := replaceEnvVars(p.Val())
if importPattern == "" {
return p.Err("Import requires a non-empty filepath")
}
if p.NextArg() {
return p.Err("Import takes only one argument (glob pattern or file)")
}
// splice out the import directive and its argument (2 tokens total)
tokensBefore := p.tokens[:p.cursor-1]
tokensAfter := p.tokens[p.cursor+1:]
var importedTokens []Token
// first check snippets. That is a simple, non-recursive replacement
if p.definedSnippets != nil && p.definedSnippets[importPattern] != nil {
importedTokens = p.definedSnippets[importPattern]
} else {
// make path relative to the file of the _token_ being processed rather
// than current working directory (issue #867) and then use glob to get
// list of matching filenames
absFile, err := filepath.Abs(p.Dispenser.File())
if err != nil {
return p.Errf("Failed to get absolute path of file: %s: %v", p.Dispenser.filename, err)
}
var matches []string
var globPattern string
if !filepath.IsAbs(importPattern) {
globPattern = filepath.Join(filepath.Dir(absFile), importPattern)
} else {
globPattern = importPattern
}
if strings.Count(globPattern, "*") > 1 || strings.Count(globPattern, "?") > 1 ||
(strings.Contains(globPattern, "[") && strings.Contains(globPattern, "]")) {
// See issue #2096 - a pattern with many glob expansions can hang for too long
return p.Errf("Glob pattern may only contain one wildcard (*), but has others: %s", globPattern)
}
matches, err = filepath.Glob(globPattern)
if err != nil {
return p.Errf("Failed to use import pattern %s: %v", importPattern, err)
}
if len(matches) == 0 {
if strings.ContainsAny(globPattern, "*?[]") {
log.Printf("[WARNING] No files matching import glob pattern: %s", importPattern)
} else {
return p.Errf("File to import not found: %s", importPattern)
}
}
// collect all the imported tokens
for _, importFile := range matches {
newTokens, err := p.doSingleImport(importFile)
if err != nil {
return err
}
importedTokens = append(importedTokens, newTokens...)
}
}
// splice the imported tokens in the place of the import statement
// and rewind cursor so Next() will land on first imported token
p.tokens = append(tokensBefore, append(importedTokens, tokensAfter...)...)
p.cursor--
return nil
}
// doSingleImport lexes the individual file at importFile and returns
// its tokens or an error, if any.
func (p *parser) doSingleImport(importFile string) ([]Token, error) {
file, err := os.Open(importFile)
if err != nil {
return nil, p.Errf("Could not import %s: %v", importFile, err)
}
defer file.Close()
if info, err := file.Stat(); err != nil {
return nil, p.Errf("Could not import %s: %v", importFile, err)
} else if info.IsDir() {
return nil, p.Errf("Could not import %s: is a directory", importFile)
}
importedTokens, err := allTokens(file)
if err != nil {
return nil, p.Errf("Could not read tokens while importing %s: %v", importFile, err)
}
// Tack the file path onto these tokens so errors show the imported file's name
// (we use full, absolute path to avoid bugs: issue #1892)
filename, err := filepath.Abs(importFile)
if err != nil {
return nil, p.Errf("Failed to get absolute path of file: %s: %v", p.Dispenser.filename, err)
}
for i := 0; i < len(importedTokens); i++ {
importedTokens[i].File = filename
}
return importedTokens, nil
}
// directive collects tokens until the directive's scope
// closes (either end of line or end of curly brace block).
// It expects the currently-loaded token to be a directive
// (or } that ends a server block). The collected tokens
// are loaded into the current server block for later use
// by directive setup functions.
func (p *parser) directive() error {
dir := p.Val()
nesting := 0
// TODO: More helpful error message ("did you mean..." or "maybe you need to install its server type")
if !p.validDirective(dir) {
return p.Errf("Unknown directive '%s'", dir)
}
// The directive itself is appended as a relevant token
p.block.Tokens[dir] = append(p.block.Tokens[dir], p.tokens[p.cursor])
telemetry.AppendUnique("directives", dir)
for p.Next() {
if p.Val() == "{" {
nesting++
} else if p.isNewLine() && nesting == 0 {
p.cursor-- // read too far
break
} else if p.Val() == "}" && nesting > 0 {
nesting--
} else if p.Val() == "}" && nesting == 0 {
return p.Err("Unexpected '}' because no matching opening brace")
}
p.tokens[p.cursor].Text = replaceEnvVars(p.tokens[p.cursor].Text)
p.block.Tokens[dir] = append(p.block.Tokens[dir], p.tokens[p.cursor])
}
if nesting > 0 {
return p.EOFErr()
}
return nil
}
// openCurlyBrace expects the current token to be an
// opening curly brace. This acts like an assertion
// because it returns an error if the token is not
// a opening curly brace. It does NOT advance the token.
func (p *parser) openCurlyBrace() error {
if p.Val() != "{" {
return p.SyntaxErr("{")
}
return nil
}
// closeCurlyBrace expects the current token to be
// a closing curly brace. This acts like an assertion
// because it returns an error if the token is not
// a closing curly brace. It does NOT advance the token.
func (p *parser) closeCurlyBrace() error {
if p.Val() != "}" {
return p.SyntaxErr("}")
}
return nil
}
// validDirective returns true if dir is in p.validDirectives.
func (p *parser) validDirective(dir string) bool {
if p.validDirectives == nil {
return true
}
for _, d := range p.validDirectives {
if d == dir {
return true
}
}
return false
}
// replaceEnvVars replaces environment variables that appear in the token
// and understands both the $UNIX and %WINDOWS% syntaxes.
func replaceEnvVars(s string) string {
s = replaceEnvReferences(s, "{%", "%}")
s = replaceEnvReferences(s, "{$", "}")
return s
}
// replaceEnvReferences performs the actual replacement of env variables
// in s, given the placeholder start and placeholder end strings.
func replaceEnvReferences(s, refStart, refEnd string) string {
index := strings.Index(s, refStart)
for index != -1 {
endIndex := strings.Index(s, refEnd)
if endIndex > index+len(refStart) {
ref := s[index : endIndex+len(refEnd)]
s = strings.Replace(s, ref, os.Getenv(ref[len(refStart):len(ref)-len(refEnd)]), -1)
} else {
return s
}
index = strings.Index(s, refStart)
}
return s
}
// ServerBlock associates any number of keys (usually addresses
// of some sort) with tokens (grouped by directive name).
type ServerBlock struct {
Keys []string
Tokens map[string][]Token
}
func (p *parser) isSnippet() (bool, string) {
keys := p.block.Keys
// A snippet block is a single key with parens. Nothing else qualifies.
if len(keys) == 1 && strings.HasPrefix(keys[0], "(") && strings.HasSuffix(keys[0], ")") {
return true, strings.TrimSuffix(keys[0][1:], ")")
}
return false, ""
}
// read and store everything in a block for later replay.
func (p *parser) snippetTokens() ([]Token, error) {
// TODO: disallow imports in snippets for simplicity at import time
// snippet must have curlies.
err := p.openCurlyBrace()
if err != nil {
return nil, err
}
count := 1
tokens := []Token{}
for p.Next() {
if p.Val() == "}" {
count--
if count == 0 {
break
}
}
if p.Val() == "{" {
count++
}
tokens = append(tokens, p.tokens[p.cursor])
}
// make sure we're matched up
if count != 0 {
return nil, p.SyntaxErr("}")
}
return tokens, nil
}

View File

@ -263,9 +263,10 @@ type EventName string
// Define names for the various events
const (
StartupEvent EventName = "startup"
ShutdownEvent EventName = "shutdown"
CertRenewEvent EventName = "certrenew"
InstanceStartupEvent EventName = "instancestartup"
ShutdownEvent = "shutdown"
CertRenewEvent = "certrenew"
InstanceStartupEvent = "instancestartup"
InstanceRestartEvent = "instancerestart"
)
// EventHook is a type which holds information about a startup hook plugin.

View File

@ -31,7 +31,7 @@ func checkFdlimit() {
err := syscall.Getrlimit(syscall.RLIMIT_NOFILE, rlimit)
if err == nil && rlimit.Cur < min {
fmt.Printf("WARNING: File descriptor limit %d is too low for production servers. "+
"At least %d is recommended. Fix with \"ulimit -n %d\".\n", rlimit.Cur, min, min)
"At least %d is recommended. Fix with `ulimit -n %d`.\n", rlimit.Cur, min, min)
}
}

View File

@ -90,6 +90,7 @@ func trapSignalsPosix() {
purgeEventHooks()
// Kick off the restart; our work is done
EmitEvent(InstanceRestartEvent, nil)
_, err = inst.Restart(caddyfileToUse)
if err != nil {
restoreEventHooks(oldEventHooks)