2017-09-23 00:56:58 -05:00
|
|
|
// Copyright 2015 Light Code Labs, LLC
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
Rewrote Caddy from the ground up; initial commit of 0.9 branch
These changes span work from the last ~4 months in an effort to make
Caddy more extensible, reduce the coupling between its components, and
lay a more robust foundation of code going forward into 1.0. A bunch of
new features have been added, too, with even higher future potential.
The most significant design change is an overall inversion of
dependencies. Instead of the caddy package knowing about the server
and the notion of middleware and config, the caddy package exposes an
interface that other components plug into. This does introduce more
indirection when reading the code, but every piece is very modular and
pluggable. Even the HTTP server is pluggable.
The caddy package has been moved to the top level, and main has been
pushed into a subfolder called caddy. The actual logic of the main
file has been pushed even further into caddy/caddymain/run.go so that
custom builds of Caddy can be 'go get'able.
The HTTPS logic was surgically separated into two parts to divide the
TLS-specific code and the HTTPS-specific code. The caddytls package can
now be used by any type of server that needs TLS, not just HTTP. I also
added the ability to customize nearly every aspect of TLS at the site
level rather than all sites sharing the same TLS configuration. Not all
of this flexibility is exposed in the Caddyfile yet, but it may be in
the future. Caddy can also generate self-signed certificates in memory
for the convenience of a developer working on localhost who wants HTTPS.
And Caddy now supports the DNS challenge, assuming at least one DNS
provider is plugged in.
Dozens, if not hundreds, of other minor changes swept through the code
base as I literally started from an empty main function, copying over
functions or files as needed, then adjusting them to fit in the new
design. Most tests have been restored and adapted to the new API,
but more work is needed there.
A lot of what was "impossible" before is now possible, or can be made
possible with minimal disruption of the code. For example, it's fairly
easy to make plugins hook into another part of the code via callbacks.
Plugins can do more than just be directives; we now have plugins that
customize how the Caddyfile is loaded (useful when you need to get your
configuration from a remote store).
Site addresses no longer need be just a host and port. They can have a
path, allowing you to scope a configuration to a specific path. There is
no inheretance, however; each site configuration is distinct.
Thanks to amazing work by Lucas Clemente, this commit adds experimental
QUIC support. Turn it on using the -quic flag; your browser may have
to be configured to enable it.
Almost everything is here, but you will notice that most of the middle-
ware are missing. After those are transferred over, we'll be ready for
beta tests.
I'm very excited to get this out. Thanks for everyone's help and
patience these last few months. I hope you like it!!
2016-06-04 18:00:29 -05:00
|
|
|
package caddyfile
|
2015-05-04 12:04:17 -05:00
|
|
|
|
|
|
|
import (
|
|
|
|
"bufio"
|
|
|
|
"io"
|
|
|
|
"unicode"
|
|
|
|
)
|
|
|
|
|
|
|
|
type (
|
|
|
|
// lexer is a utility which can get values, token by
|
|
|
|
// token, from a Reader. A token is a word, and tokens
|
|
|
|
// are separated by whitespace. A word can be enclosed
|
|
|
|
// in quotes if it contains whitespace.
|
|
|
|
lexer struct {
|
|
|
|
reader *bufio.Reader
|
2016-06-04 23:50:23 -05:00
|
|
|
token Token
|
2015-05-04 12:04:17 -05:00
|
|
|
line int
|
|
|
|
}
|
|
|
|
|
2016-06-04 23:50:23 -05:00
|
|
|
// Token represents a single parsable unit.
|
|
|
|
Token struct {
|
|
|
|
File string
|
|
|
|
Line int
|
|
|
|
Text string
|
2015-05-04 12:04:17 -05:00
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
// load prepares the lexer to scan an input for tokens.
|
2016-07-27 13:48:39 -05:00
|
|
|
// It discards any leading byte order mark.
|
2015-05-04 12:04:17 -05:00
|
|
|
func (l *lexer) load(input io.Reader) error {
|
|
|
|
l.reader = bufio.NewReader(input)
|
|
|
|
l.line = 1
|
2016-07-27 13:48:39 -05:00
|
|
|
|
|
|
|
// discard byte order mark, if present
|
|
|
|
firstCh, _, err := l.reader.ReadRune()
|
2016-08-17 18:17:26 -05:00
|
|
|
if err != nil {
|
2020-05-16 12:14:03 -05:00
|
|
|
if err == io.EOF {
|
|
|
|
return nil
|
|
|
|
}
|
2016-08-17 18:17:26 -05:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
if firstCh != 0xFEFF {
|
2016-07-27 13:48:39 -05:00
|
|
|
err := l.reader.UnreadRune()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-05-04 12:04:17 -05:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// next loads the next token into the lexer.
|
|
|
|
// A token is delimited by whitespace, unless
|
|
|
|
// the token starts with a quotes character (")
|
|
|
|
// in which case the token goes until the closing
|
|
|
|
// quotes (the enclosing quotes are not included).
|
2015-05-08 11:32:57 -05:00
|
|
|
// Inside quoted strings, quotes may be escaped
|
|
|
|
// with a preceding \ character. No other chars
|
|
|
|
// may be escaped. The rest of the line is skipped
|
|
|
|
// if a "#" character is read in. Returns true if
|
|
|
|
// a token was loaded; false otherwise.
|
2015-05-04 12:04:17 -05:00
|
|
|
func (l *lexer) next() bool {
|
|
|
|
var val []rune
|
|
|
|
var comment, quoted, escaped bool
|
|
|
|
|
|
|
|
makeToken := func() bool {
|
2016-06-04 23:50:23 -05:00
|
|
|
l.token.Text = string(val)
|
2015-05-04 12:04:17 -05:00
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
|
|
|
for {
|
|
|
|
ch, _, err := l.reader.ReadRune()
|
|
|
|
if err != nil {
|
|
|
|
if len(val) > 0 {
|
|
|
|
return makeToken()
|
|
|
|
}
|
|
|
|
if err == io.EOF {
|
|
|
|
return false
|
|
|
|
}
|
2015-05-24 21:52:34 -05:00
|
|
|
panic(err)
|
2015-05-04 12:04:17 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
if quoted {
|
|
|
|
if !escaped {
|
|
|
|
if ch == '\\' {
|
|
|
|
escaped = true
|
|
|
|
continue
|
|
|
|
} else if ch == '"' {
|
|
|
|
quoted = false
|
|
|
|
return makeToken()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if ch == '\n' {
|
|
|
|
l.line++
|
|
|
|
}
|
2015-05-08 11:32:57 -05:00
|
|
|
if escaped {
|
|
|
|
// only escape quotes
|
|
|
|
if ch != '"' {
|
|
|
|
val = append(val, '\\')
|
|
|
|
}
|
|
|
|
}
|
2015-05-04 12:04:17 -05:00
|
|
|
val = append(val, ch)
|
|
|
|
escaped = false
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if unicode.IsSpace(ch) {
|
|
|
|
if ch == '\r' {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if ch == '\n' {
|
|
|
|
l.line++
|
|
|
|
comment = false
|
|
|
|
}
|
|
|
|
if len(val) > 0 {
|
|
|
|
return makeToken()
|
|
|
|
}
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if ch == '#' {
|
|
|
|
comment = true
|
|
|
|
}
|
|
|
|
|
|
|
|
if comment {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(val) == 0 {
|
2016-06-04 23:50:23 -05:00
|
|
|
l.token = Token{Line: l.line}
|
2015-05-04 12:04:17 -05:00
|
|
|
if ch == '"' {
|
|
|
|
quoted = true
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
val = append(val, ch)
|
|
|
|
}
|
|
|
|
}
|