1
Fork 0
mirror of https://github.com/caddyserver/caddy.git synced 2024-12-16 21:56:40 -05:00

Merge branch 'caddytest-2' of github.com:elee1766/caddy into caddytest-2

This commit is contained in:
a 2024-06-18 23:45:57 -05:00
commit 4d40619aa4
No known key found for this signature in database
GPG key ID: 374BC539FE795AF0
8 changed files with 385 additions and 8 deletions

View file

@ -364,9 +364,45 @@ func (p *parser) doImport(nesting int) error {
// set up a replacer for non-variadic args replacement // set up a replacer for non-variadic args replacement
repl := makeArgsReplacer(args) repl := makeArgsReplacer(args)
// grab all the tokens (if it exists) from within a block that follows the import
var blockTokens []Token
for currentNesting := p.Nesting(); p.NextBlock(currentNesting); {
blockTokens = append(blockTokens, p.Token())
}
// initialize with size 1
blockMapping := make(map[string][]Token, 1)
if len(blockTokens) > 0 {
// use such tokens to create a new dispenser, and then use it to parse each block
bd := NewDispenser(blockTokens)
for bd.Next() {
// see if we can grab a key
var currentMappingKey string
if bd.Val() == "{" {
return p.Err("anonymous blocks are not supported")
}
currentMappingKey = bd.Val()
currentMappingTokens := []Token{}
// read all args until end of line / {
if bd.NextArg() {
currentMappingTokens = append(currentMappingTokens, bd.Token())
for bd.NextArg() {
currentMappingTokens = append(currentMappingTokens, bd.Token())
}
// TODO(elee1766): we don't enter another mapping here because it's annoying to extract the { and } properly.
// maybe someone can do that in the future
} else {
// attempt to enter a block and add tokens to the currentMappingTokens
for mappingNesting := bd.Nesting(); bd.NextBlock(mappingNesting); {
currentMappingTokens = append(currentMappingTokens, bd.Token())
}
}
blockMapping[currentMappingKey] = currentMappingTokens
}
}
// splice out the import directive and its arguments // splice out the import directive and its arguments
// (2 tokens, plus the length of args) // (2 tokens, plus the length of args)
tokensBefore := p.tokens[:p.cursor-1-len(args)] tokensBefore := p.tokens[:p.cursor-1-len(args)-len(blockTokens)]
tokensAfter := p.tokens[p.cursor+1:] tokensAfter := p.tokens[p.cursor+1:]
var importedTokens []Token var importedTokens []Token
var nodes []string var nodes []string
@ -495,6 +531,33 @@ func (p *parser) doImport(nesting int) error {
maybeSnippet = false maybeSnippet = false
} }
} }
// if it is {block}, we substitute with all tokens in the block
// if it is {blocks.*}, we substitute with the tokens in the mapping for the *
var skip bool
var tokensToAdd []Token
switch {
case token.Text == "{block}":
tokensToAdd = blockTokens
case strings.HasPrefix(token.Text, "{blocks.") && strings.HasSuffix(token.Text, "}"):
// {blocks.foo.bar} will be extracted to key `foo.bar`
blockKey := strings.TrimPrefix(strings.TrimSuffix(token.Text, "}"), "{blocks.")
val, ok := blockMapping[blockKey]
if ok {
tokensToAdd = val
}
default:
skip = true
}
if !skip {
if len(tokensToAdd) == 0 {
// if there is no content in the snippet block, don't do any replacement
// this allows snippets which contained {block}/{block.*} before this change to continue functioning as normal
tokensCopy = append(tokensCopy, token)
} else {
tokensCopy = append(tokensCopy, tokensToAdd...)
}
continue
}
if maybeSnippet { if maybeSnippet {
tokensCopy = append(tokensCopy, token) tokensCopy = append(tokensCopy, token)
@ -516,7 +579,7 @@ func (p *parser) doImport(nesting int) error {
// splice the imported tokens in the place of the import statement // splice the imported tokens in the place of the import statement
// and rewind cursor so Next() will land on first imported token // and rewind cursor so Next() will land on first imported token
p.tokens = append(tokensBefore, append(tokensCopy, tokensAfter...)...) p.tokens = append(tokensBefore, append(tokensCopy, tokensAfter...)...)
p.cursor -= len(args) + 1 p.cursor -= len(args) + len(blockTokens) + 1
return nil return nil
} }

View file

@ -0,0 +1,58 @@
(snippet) {
header {
{block}
}
}
example.com {
import snippet {
foo bar
}
}
----------
{
"apps": {
"http": {
"servers": {
"srv0": {
"listen": [
":443"
],
"routes": [
{
"match": [
{
"host": [
"example.com"
]
}
],
"handle": [
{
"handler": "subroute",
"routes": [
{
"handle": [
{
"handler": "headers",
"response": {
"set": {
"Foo": [
"bar"
]
}
}
}
]
}
]
}
],
"terminal": true
}
]
}
}
}
}
}

View file

@ -0,0 +1,56 @@
(snippet) {
{block}
}
example.com {
import snippet {
header foo bar
}
}
----------
{
"apps": {
"http": {
"servers": {
"srv0": {
"listen": [
":443"
],
"routes": [
{
"match": [
{
"host": [
"example.com"
]
}
],
"handle": [
{
"handler": "subroute",
"routes": [
{
"handle": [
{
"handler": "headers",
"response": {
"set": {
"Foo": [
"bar"
]
}
}
}
]
}
]
}
],
"terminal": true
}
]
}
}
}
}
}

View file

@ -0,0 +1,76 @@
(snippet) {
header {
{blocks.foo}
}
header {
{blocks.bar}
}
}
example.com {
import snippet {
foo {
foo a
}
bar {
bar b
}
}
}
----------
{
"apps": {
"http": {
"servers": {
"srv0": {
"listen": [
":443"
],
"routes": [
{
"match": [
{
"host": [
"example.com"
]
}
],
"handle": [
{
"handler": "subroute",
"routes": [
{
"handle": [
{
"handler": "headers",
"response": {
"set": {
"Foo": [
"a"
]
}
}
},
{
"handler": "headers",
"response": {
"set": {
"Bar": [
"b"
]
}
}
}
]
}
]
}
],
"terminal": true
}
]
}
}
}
}
}

View file

@ -0,0 +1,82 @@
(snippet) {
header {
{blocks.bar}
}
import sub_snippet {
bar {
{blocks.foo}
}
}
}
(sub_snippet) {
header {
{blocks.bar}
}
}
example.com {
import snippet {
foo {
foo a
}
bar {
bar b
}
}
}
----------
{
"apps": {
"http": {
"servers": {
"srv0": {
"listen": [
":443"
],
"routes": [
{
"match": [
{
"host": [
"example.com"
]
}
],
"handle": [
{
"handler": "subroute",
"routes": [
{
"handle": [
{
"handler": "headers",
"response": {
"set": {
"Bar": [
"b"
]
}
}
},
{
"handler": "headers",
"response": {
"set": {
"Foo": [
"a"
]
}
}
}
]
}
]
}
],
"terminal": true
}
]
}
}
}
}
}

View file

@ -34,6 +34,7 @@ import (
"github.com/google/cel-go/cel" "github.com/google/cel-go/cel"
"github.com/google/cel-go/common/types" "github.com/google/cel-go/common/types"
"github.com/google/cel-go/common/types/ref" "github.com/google/cel-go/common/types/ref"
"golang.org/x/net/idna"
"github.com/caddyserver/caddy/v2" "github.com/caddyserver/caddy/v2"
"github.com/caddyserver/caddy/v2/caddyconfig/caddyfile" "github.com/caddyserver/caddy/v2/caddyconfig/caddyfile"
@ -239,13 +240,20 @@ func (m *MatchHost) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
func (m MatchHost) Provision(_ caddy.Context) error { func (m MatchHost) Provision(_ caddy.Context) error {
// check for duplicates; they are nonsensical and reduce efficiency // check for duplicates; they are nonsensical and reduce efficiency
// (we could just remove them, but the user should know their config is erroneous) // (we could just remove them, but the user should know their config is erroneous)
seen := make(map[string]int) seen := make(map[string]int, len(m))
for i, h := range m { for i, host := range m {
h = strings.ToLower(h) asciiHost, err := idna.ToASCII(host)
if firstI, ok := seen[h]; ok { if err != nil {
return fmt.Errorf("host at index %d is repeated at index %d: %s", firstI, i, h) return fmt.Errorf("converting hostname '%s' to ASCII: %v", host, err)
} }
seen[h] = i if asciiHost != host {
m[i] = asciiHost
}
normalizedHost := strings.ToLower(asciiHost)
if firstI, ok := seen[normalizedHost]; ok {
return fmt.Errorf("host at index %d is repeated at index %d: %s", firstI, i, host)
}
seen[normalizedHost] = i
} }
if m.large() { if m.large() {

View file

@ -78,6 +78,11 @@ func TestHostMatcher(t *testing.T) {
input: "bar.example.com", input: "bar.example.com",
expect: false, expect: false,
}, },
{
match: MatchHost{"éxàmplê.com"},
input: "xn--xmpl-0na6cm.com",
expect: true,
},
{ {
match: MatchHost{"*.example.com"}, match: MatchHost{"*.example.com"},
input: "example.com", input: "example.com",
@ -149,6 +154,10 @@ func TestHostMatcher(t *testing.T) {
ctx := context.WithValue(req.Context(), caddy.ReplacerCtxKey, repl) ctx := context.WithValue(req.Context(), caddy.ReplacerCtxKey, repl)
req = req.WithContext(ctx) req = req.WithContext(ctx)
if err := tc.match.Provision(caddy.Context{}); err != nil {
t.Errorf("Test %d %v: provisioning failed: %v", i, tc.match, err)
}
actual := tc.match.Match(req) actual := tc.match.Match(req)
if actual != tc.expect { if actual != tc.expect {
t.Errorf("Test %d %v: Expected %t, got %t for '%s'", i, tc.match, tc.expect, actual, tc.input) t.Errorf("Test %d %v: Expected %t, got %t for '%s'", i, tc.match, tc.expect, actual, tc.input)

View file

@ -26,6 +26,7 @@ import (
"strconv" "strconv"
"strings" "strings"
"sync/atomic" "sync/atomic"
"time"
"github.com/cespare/xxhash/v2" "github.com/cespare/xxhash/v2"
@ -613,6 +614,8 @@ type CookieHashSelection struct {
Name string `json:"name,omitempty"` Name string `json:"name,omitempty"`
// Secret to hash (Hmac256) chosen upstream in cookie // Secret to hash (Hmac256) chosen upstream in cookie
Secret string `json:"secret,omitempty"` Secret string `json:"secret,omitempty"`
// The cookie's Max-Age before it expires. Default is no expiry.
MaxAge caddy.Duration `json:"max_age,omitempty"`
// The fallback policy to use if the cookie is not present. Defaults to `random`. // The fallback policy to use if the cookie is not present. Defaults to `random`.
FallbackRaw json.RawMessage `json:"fallback,omitempty" caddy:"namespace=http.reverse_proxy.selection_policies inline_key=policy"` FallbackRaw json.RawMessage `json:"fallback,omitempty" caddy:"namespace=http.reverse_proxy.selection_policies inline_key=policy"`
@ -671,6 +674,9 @@ func (s CookieHashSelection) Select(pool UpstreamPool, req *http.Request, w http
cookie.Secure = true cookie.Secure = true
cookie.SameSite = http.SameSiteNoneMode cookie.SameSite = http.SameSiteNoneMode
} }
if s.MaxAge > 0 {
cookie.MaxAge = int(time.Duration(s.MaxAge).Seconds())
}
http.SetCookie(w, cookie) http.SetCookie(w, cookie)
return upstream return upstream
} }
@ -699,6 +705,7 @@ func (s CookieHashSelection) Select(pool UpstreamPool, req *http.Request, w http
// //
// lb_policy cookie [<name> [<secret>]] { // lb_policy cookie [<name> [<secret>]] {
// fallback <policy> // fallback <policy>
// max_age <duration>
// } // }
// //
// By default name is `lb` // By default name is `lb`
@ -728,6 +735,24 @@ func (s *CookieHashSelection) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
return err return err
} }
s.FallbackRaw = mod s.FallbackRaw = mod
case "max_age":
if !d.NextArg() {
return d.ArgErr()
}
if s.MaxAge != 0 {
return d.Err("cookie max_age already specified")
}
maxAge, err := caddy.ParseDuration(d.Val())
if err != nil {
return d.Errf("invalid duration: %s", d.Val())
}
if maxAge <= 0 {
return d.Errf("invalid duration: %s, max_age should be non-zero and positive", d.Val())
}
if d.NextArg() {
return d.ArgErr()
}
s.MaxAge = caddy.Duration(maxAge)
default: default:
return d.Errf("unrecognized option '%s'", d.Val()) return d.Errf("unrecognized option '%s'", d.Val())
} }