mirror of
https://github.com/gohugoio/hugo.git
synced 2024-11-07 20:30:36 -05:00
Avoid impporting deploy from config when nodeploy tag is set
Test: ``` go list -tags nodeploy ./... | grep deploy ``` Fixes #12009
This commit is contained in:
parent
a65622a13e
commit
0257eb50a4
8 changed files with 65 additions and 60 deletions
|
@ -32,8 +32,10 @@ package commands
|
|||
import (
|
||||
"context"
|
||||
|
||||
"github.com/bep/simplecobra"
|
||||
"github.com/gohugoio/hugo/deploy"
|
||||
"github.com/gohugoio/hugo/deploy/deployconfig"
|
||||
|
||||
"github.com/bep/simplecobra"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
|
@ -62,9 +64,9 @@ documentation.
|
|||
cmd.Flags().Bool("confirm", false, "ask for confirmation before making changes to the target")
|
||||
cmd.Flags().Bool("dryRun", false, "dry run")
|
||||
cmd.Flags().Bool("force", false, "force upload of all files")
|
||||
cmd.Flags().Bool("invalidateCDN", deploy.DefaultConfig.InvalidateCDN, "invalidate the CDN cache listed in the deployment target")
|
||||
cmd.Flags().Int("maxDeletes", deploy.DefaultConfig.MaxDeletes, "maximum # of files to delete, or -1 to disable")
|
||||
cmd.Flags().Int("workers", deploy.DefaultConfig.Workers, "number of workers to transfer files. defaults to 10")
|
||||
cmd.Flags().Bool("invalidateCDN", deployconfig.DefaultConfig.InvalidateCDN, "invalidate the CDN cache listed in the deployment target")
|
||||
cmd.Flags().Int("maxDeletes", deployconfig.DefaultConfig.MaxDeletes, "maximum # of files to delete, or -1 to disable")
|
||||
cmd.Flags().Int("workers", deployconfig.DefaultConfig.Workers, "number of workers to transfer files. defaults to 10")
|
||||
},
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,7 +37,7 @@ import (
|
|||
"github.com/gohugoio/hugo/config/privacy"
|
||||
"github.com/gohugoio/hugo/config/security"
|
||||
"github.com/gohugoio/hugo/config/services"
|
||||
"github.com/gohugoio/hugo/deploy"
|
||||
"github.com/gohugoio/hugo/deploy/deployconfig"
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/gohugoio/hugo/langs"
|
||||
"github.com/gohugoio/hugo/markup/markup_config"
|
||||
|
@ -141,8 +141,8 @@ type Config struct {
|
|||
// <docsmeta>{"refs": ["config:languages:menus"] }</docsmeta>
|
||||
Menus *config.ConfigNamespace[map[string]navigation.MenuConfig, navigation.Menus] `mapstructure:"-"`
|
||||
|
||||
// The deployment configuration section contains for hugo deploy.
|
||||
Deployment deploy.DeployConfig `mapstructure:"-"`
|
||||
// The deployment configuration section contains for hugo deployconfig.
|
||||
Deployment deployconfig.DeployConfig `mapstructure:"-"`
|
||||
|
||||
// Module configuration.
|
||||
Module modules.Config `mapstructure:"-"`
|
||||
|
|
|
@ -25,7 +25,7 @@ import (
|
|||
"github.com/gohugoio/hugo/config/privacy"
|
||||
"github.com/gohugoio/hugo/config/security"
|
||||
"github.com/gohugoio/hugo/config/services"
|
||||
"github.com/gohugoio/hugo/deploy"
|
||||
"github.com/gohugoio/hugo/deploy/deployconfig"
|
||||
"github.com/gohugoio/hugo/langs"
|
||||
"github.com/gohugoio/hugo/markup/markup_config"
|
||||
"github.com/gohugoio/hugo/media"
|
||||
|
@ -333,7 +333,7 @@ var allDecoderSetups = map[string]decodeWeight{
|
|||
key: "deployment",
|
||||
decode: func(d decodeWeight, p decodeConfig) error {
|
||||
var err error
|
||||
p.c.Deployment, err = deploy.DecodeConfig(p.p)
|
||||
p.c.Deployment, err = deployconfig.DecodeConfig(p.p)
|
||||
return err
|
||||
},
|
||||
},
|
||||
|
|
|
@ -24,6 +24,7 @@ import (
|
|||
"github.com/aws/aws-sdk-go-v2/aws"
|
||||
"github.com/aws/aws-sdk-go-v2/service/cloudfront"
|
||||
"github.com/aws/aws-sdk-go-v2/service/cloudfront/types"
|
||||
"github.com/gohugoio/hugo/deploy/deployconfig"
|
||||
gcaws "gocloud.dev/aws"
|
||||
)
|
||||
|
||||
|
@ -38,7 +39,7 @@ var v2ConfigValidParams = map[string]bool{
|
|||
|
||||
// InvalidateCloudFront invalidates the CloudFront cache for distributionID.
|
||||
// Uses AWS credentials config from the bucket URL.
|
||||
func InvalidateCloudFront(ctx context.Context, target *Target) error {
|
||||
func InvalidateCloudFront(ctx context.Context, target *deployconfig.Target) error {
|
||||
u, err := url.Parse(target.URL)
|
||||
if err != nil {
|
||||
return err
|
||||
|
|
|
@ -38,6 +38,7 @@ import (
|
|||
"github.com/gobwas/glob"
|
||||
"github.com/gohugoio/hugo/common/loggers"
|
||||
"github.com/gohugoio/hugo/config"
|
||||
"github.com/gohugoio/hugo/deploy/deployconfig"
|
||||
"github.com/gohugoio/hugo/media"
|
||||
"github.com/spf13/afero"
|
||||
"golang.org/x/text/unicode/norm"
|
||||
|
@ -57,10 +58,10 @@ type Deployer struct {
|
|||
mediaTypes media.Types // Hugo's MediaType to guess ContentType
|
||||
quiet bool // true reduces STDOUT // TODO(bep) remove, this is a global feature.
|
||||
|
||||
cfg DeployConfig
|
||||
cfg deployconfig.DeployConfig
|
||||
logger loggers.Logger
|
||||
|
||||
target *Target // the target to deploy to
|
||||
target *deployconfig.Target // the target to deploy to
|
||||
|
||||
// For tests...
|
||||
summary deploySummary // summary of latest Deploy results
|
||||
|
@ -74,7 +75,7 @@ const metaMD5Hash = "md5chksum" // the meta key to store md5hash in
|
|||
|
||||
// New constructs a new *Deployer.
|
||||
func New(cfg config.AllProvider, logger loggers.Logger, localFs afero.Fs) (*Deployer, error) {
|
||||
dcfg := cfg.GetConfigSection(deploymentConfigKey).(DeployConfig)
|
||||
dcfg := cfg.GetConfigSection(deployconfig.DeploymentConfigKey).(deployconfig.DeployConfig)
|
||||
targetName := dcfg.Target
|
||||
|
||||
if len(dcfg.Targets) == 0 {
|
||||
|
@ -83,7 +84,7 @@ func New(cfg config.AllProvider, logger loggers.Logger, localFs afero.Fs) (*Depl
|
|||
mediaTypes := cfg.GetConfigSection("mediaTypes").(media.Types)
|
||||
|
||||
// Find the target to deploy to.
|
||||
var tgt *Target
|
||||
var tgt *deployconfig.Target
|
||||
if targetName == "" {
|
||||
// Default to the first target.
|
||||
tgt = dcfg.Targets[0]
|
||||
|
@ -133,7 +134,7 @@ func (d *Deployer) Deploy(ctx context.Context) error {
|
|||
// Load local files from the source directory.
|
||||
var include, exclude glob.Glob
|
||||
if d.target != nil {
|
||||
include, exclude = d.target.includeGlob, d.target.excludeGlob
|
||||
include, exclude = d.target.IncludeGlob, d.target.ExcludeGlob
|
||||
}
|
||||
local, err := d.walkLocal(d.localFs, d.cfg.Matchers, include, exclude, d.mediaTypes)
|
||||
if err != nil {
|
||||
|
@ -178,7 +179,7 @@ func (d *Deployer) Deploy(ctx context.Context) error {
|
|||
|
||||
// Order the uploads. They are organized in groups; all uploads in a group
|
||||
// must be complete before moving on to the next group.
|
||||
uploadGroups := applyOrdering(d.cfg.ordering, uploads)
|
||||
uploadGroups := applyOrdering(d.cfg.Ordering, uploads)
|
||||
|
||||
nParallel := d.cfg.Workers
|
||||
var errs []error
|
||||
|
@ -343,14 +344,14 @@ type localFile struct {
|
|||
UploadSize int64
|
||||
|
||||
fs afero.Fs
|
||||
matcher *Matcher
|
||||
matcher *deployconfig.Matcher
|
||||
md5 []byte // cache
|
||||
gzipped bytes.Buffer // cached of gzipped contents if gzipping
|
||||
mediaTypes media.Types
|
||||
}
|
||||
|
||||
// newLocalFile initializes a *localFile.
|
||||
func newLocalFile(fs afero.Fs, nativePath, slashpath string, m *Matcher, mt media.Types) (*localFile, error) {
|
||||
func newLocalFile(fs afero.Fs, nativePath, slashpath string, m *deployconfig.Matcher, mt media.Types) (*localFile, error) {
|
||||
f, err := fs.Open(nativePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -482,7 +483,7 @@ func knownHiddenDirectory(name string) bool {
|
|||
|
||||
// walkLocal walks the source directory and returns a flat list of files,
|
||||
// using localFile.SlashPath as the map keys.
|
||||
func (d *Deployer) walkLocal(fs afero.Fs, matchers []*Matcher, include, exclude glob.Glob, mediaTypes media.Types) (map[string]*localFile, error) {
|
||||
func (d *Deployer) walkLocal(fs afero.Fs, matchers []*deployconfig.Matcher, include, exclude glob.Glob, mediaTypes media.Types) (map[string]*localFile, error) {
|
||||
retval := map[string]*localFile{}
|
||||
err := afero.Walk(fs, "", func(path string, info os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
|
@ -521,7 +522,7 @@ func (d *Deployer) walkLocal(fs afero.Fs, matchers []*Matcher, include, exclude
|
|||
}
|
||||
|
||||
// Find the first matching matcher (if any).
|
||||
var m *Matcher
|
||||
var m *deployconfig.Matcher
|
||||
for _, cur := range matchers {
|
||||
if cur.Matches(slashpath) {
|
||||
m = cur
|
||||
|
|
|
@ -31,6 +31,7 @@ import (
|
|||
"testing"
|
||||
|
||||
"github.com/gohugoio/hugo/common/loggers"
|
||||
"github.com/gohugoio/hugo/deploy/deployconfig"
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
"github.com/gohugoio/hugo/media"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
|
@ -110,7 +111,7 @@ func TestFindDiffs(t *testing.T) {
|
|||
{
|
||||
Description: "local == remote with route.Force true -> diffs",
|
||||
Local: []*localFile{
|
||||
{NativePath: "aaa", SlashPath: "aaa", UploadSize: 1, matcher: &Matcher{Force: true}, md5: hash1},
|
||||
{NativePath: "aaa", SlashPath: "aaa", UploadSize: 1, matcher: &deployconfig.Matcher{Force: true}, md5: hash1},
|
||||
makeLocal("bbb", 2, hash1),
|
||||
},
|
||||
Remote: []*blob.ListObject{
|
||||
|
@ -293,7 +294,7 @@ func TestLocalFile(t *testing.T) {
|
|||
tests := []struct {
|
||||
Description string
|
||||
Path string
|
||||
Matcher *Matcher
|
||||
Matcher *deployconfig.Matcher
|
||||
MediaTypesConfig map[string]any
|
||||
WantContent []byte
|
||||
WantSize int64
|
||||
|
@ -319,7 +320,7 @@ func TestLocalFile(t *testing.T) {
|
|||
{
|
||||
Description: "CacheControl from matcher",
|
||||
Path: "foo.txt",
|
||||
Matcher: &Matcher{CacheControl: "max-age=630720000"},
|
||||
Matcher: &deployconfig.Matcher{CacheControl: "max-age=630720000"},
|
||||
WantContent: contentBytes,
|
||||
WantSize: contentLen,
|
||||
WantMD5: contentMD5[:],
|
||||
|
@ -328,7 +329,7 @@ func TestLocalFile(t *testing.T) {
|
|||
{
|
||||
Description: "ContentEncoding from matcher",
|
||||
Path: "foo.txt",
|
||||
Matcher: &Matcher{ContentEncoding: "foobar"},
|
||||
Matcher: &deployconfig.Matcher{ContentEncoding: "foobar"},
|
||||
WantContent: contentBytes,
|
||||
WantSize: contentLen,
|
||||
WantMD5: contentMD5[:],
|
||||
|
@ -337,7 +338,7 @@ func TestLocalFile(t *testing.T) {
|
|||
{
|
||||
Description: "ContentType from matcher",
|
||||
Path: "foo.txt",
|
||||
Matcher: &Matcher{ContentType: "foo/bar"},
|
||||
Matcher: &deployconfig.Matcher{ContentType: "foo/bar"},
|
||||
WantContent: contentBytes,
|
||||
WantSize: contentLen,
|
||||
WantMD5: contentMD5[:],
|
||||
|
@ -346,7 +347,7 @@ func TestLocalFile(t *testing.T) {
|
|||
{
|
||||
Description: "gzipped content",
|
||||
Path: "foo.txt",
|
||||
Matcher: &Matcher{Gzip: true},
|
||||
Matcher: &deployconfig.Matcher{Gzip: true},
|
||||
WantContent: gzBytes,
|
||||
WantSize: gzLen,
|
||||
WantMD5: gzMD5[:],
|
||||
|
@ -560,7 +561,7 @@ func TestEndToEndSync(t *testing.T) {
|
|||
localFs: test.fs,
|
||||
bucket: test.bucket,
|
||||
mediaTypes: media.DefaultTypes,
|
||||
cfg: DeployConfig{MaxDeletes: -1},
|
||||
cfg: deployconfig.DeployConfig{MaxDeletes: -1},
|
||||
}
|
||||
|
||||
// Initial deployment should sync remote with local.
|
||||
|
@ -643,7 +644,7 @@ func TestMaxDeletes(t *testing.T) {
|
|||
localFs: test.fs,
|
||||
bucket: test.bucket,
|
||||
mediaTypes: media.DefaultTypes,
|
||||
cfg: DeployConfig{MaxDeletes: -1},
|
||||
cfg: deployconfig.DeployConfig{MaxDeletes: -1},
|
||||
}
|
||||
|
||||
// Sync remote with local.
|
||||
|
@ -764,16 +765,16 @@ func TestIncludeExclude(t *testing.T) {
|
|||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
tgt := &Target{
|
||||
tgt := &deployconfig.Target{
|
||||
Include: test.Include,
|
||||
Exclude: test.Exclude,
|
||||
}
|
||||
if err := tgt.parseIncludeExclude(); err != nil {
|
||||
if err := tgt.ParseIncludeExclude(); err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
deployer := &Deployer{
|
||||
localFs: fsTest.fs,
|
||||
cfg: DeployConfig{MaxDeletes: -1}, bucket: fsTest.bucket,
|
||||
cfg: deployconfig.DeployConfig{MaxDeletes: -1}, bucket: fsTest.bucket,
|
||||
target: tgt,
|
||||
mediaTypes: media.DefaultTypes,
|
||||
}
|
||||
|
@ -830,7 +831,7 @@ func TestIncludeExcludeRemoteDelete(t *testing.T) {
|
|||
}
|
||||
deployer := &Deployer{
|
||||
localFs: fsTest.fs,
|
||||
cfg: DeployConfig{MaxDeletes: -1}, bucket: fsTest.bucket,
|
||||
cfg: deployconfig.DeployConfig{MaxDeletes: -1}, bucket: fsTest.bucket,
|
||||
mediaTypes: media.DefaultTypes,
|
||||
}
|
||||
|
||||
|
@ -848,11 +849,11 @@ func TestIncludeExcludeRemoteDelete(t *testing.T) {
|
|||
}
|
||||
|
||||
// Second sync
|
||||
tgt := &Target{
|
||||
tgt := &deployconfig.Target{
|
||||
Include: test.Include,
|
||||
Exclude: test.Exclude,
|
||||
}
|
||||
if err := tgt.parseIncludeExclude(); err != nil {
|
||||
if err := tgt.ParseIncludeExclude(); err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
deployer.target = tgt
|
||||
|
@ -882,7 +883,7 @@ func TestCompression(t *testing.T) {
|
|||
deployer := &Deployer{
|
||||
localFs: test.fs,
|
||||
bucket: test.bucket,
|
||||
cfg: DeployConfig{MaxDeletes: -1, Matchers: []*Matcher{{Pattern: ".*", Gzip: true, re: regexp.MustCompile(".*")}}},
|
||||
cfg: deployconfig.DeployConfig{MaxDeletes: -1, Matchers: []*deployconfig.Matcher{{Pattern: ".*", Gzip: true, Re: regexp.MustCompile(".*")}}},
|
||||
mediaTypes: media.DefaultTypes,
|
||||
}
|
||||
|
||||
|
@ -937,7 +938,7 @@ func TestMatching(t *testing.T) {
|
|||
deployer := &Deployer{
|
||||
localFs: test.fs,
|
||||
bucket: test.bucket,
|
||||
cfg: DeployConfig{MaxDeletes: -1, Matchers: []*Matcher{{Pattern: "^subdir/aaa$", Force: true, re: regexp.MustCompile("^subdir/aaa$")}}},
|
||||
cfg: deployconfig.DeployConfig{MaxDeletes: -1, Matchers: []*deployconfig.Matcher{{Pattern: "^subdir/aaa$", Force: true, Re: regexp.MustCompile("^subdir/aaa$")}}},
|
||||
mediaTypes: media.DefaultTypes,
|
||||
}
|
||||
|
||||
|
@ -962,7 +963,7 @@ func TestMatching(t *testing.T) {
|
|||
}
|
||||
|
||||
// Repeat with a matcher that should now match 3 files.
|
||||
deployer.cfg.Matchers = []*Matcher{{Pattern: "aaa", Force: true, re: regexp.MustCompile("aaa")}}
|
||||
deployer.cfg.Matchers = []*deployconfig.Matcher{{Pattern: "aaa", Force: true, Re: regexp.MustCompile("aaa")}}
|
||||
if err := deployer.Deploy(ctx); err != nil {
|
||||
t.Errorf("no-op deploy with triple force matcher: %v", err)
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2019 The Hugo Authors. All rights reserved.
|
||||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -11,7 +11,7 @@
|
|||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package deploy
|
||||
package deployconfig
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
@ -24,7 +24,7 @@ import (
|
|||
"github.com/mitchellh/mapstructure"
|
||||
)
|
||||
|
||||
const deploymentConfigKey = "deployment"
|
||||
const DeploymentConfigKey = "deployment"
|
||||
|
||||
// DeployConfig is the complete configuration for deployment.
|
||||
type DeployConfig struct {
|
||||
|
@ -48,7 +48,7 @@ type DeployConfig struct {
|
|||
// Number of concurrent workers to use when uploading files.
|
||||
Workers int
|
||||
|
||||
ordering []*regexp.Regexp // compiled Order
|
||||
Ordering []*regexp.Regexp `json:"-"` // compiled Order
|
||||
}
|
||||
|
||||
type Target struct {
|
||||
|
@ -67,20 +67,20 @@ type Target struct {
|
|||
Exclude string
|
||||
|
||||
// Parsed versions of Include/Exclude.
|
||||
includeGlob glob.Glob
|
||||
excludeGlob glob.Glob
|
||||
IncludeGlob glob.Glob `json:"-"`
|
||||
ExcludeGlob glob.Glob `json:"-"`
|
||||
}
|
||||
|
||||
func (tgt *Target) parseIncludeExclude() error {
|
||||
func (tgt *Target) ParseIncludeExclude() error {
|
||||
var err error
|
||||
if tgt.Include != "" {
|
||||
tgt.includeGlob, err = hglob.GetGlob(tgt.Include)
|
||||
tgt.IncludeGlob, err = hglob.GetGlob(tgt.Include)
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid deployment.target.include %q: %v", tgt.Include, err)
|
||||
}
|
||||
}
|
||||
if tgt.Exclude != "" {
|
||||
tgt.excludeGlob, err = hglob.GetGlob(tgt.Exclude)
|
||||
tgt.ExcludeGlob, err = hglob.GetGlob(tgt.Exclude)
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid deployment.target.exclude %q: %v", tgt.Exclude, err)
|
||||
}
|
||||
|
@ -115,12 +115,12 @@ type Matcher struct {
|
|||
// other route-determined metadata (e.g., ContentType) has changed.
|
||||
Force bool
|
||||
|
||||
// re is Pattern compiled.
|
||||
re *regexp.Regexp
|
||||
// Re is Pattern compiled.
|
||||
Re *regexp.Regexp `json:"-"`
|
||||
}
|
||||
|
||||
func (m *Matcher) Matches(path string) bool {
|
||||
return m.re.MatchString(path)
|
||||
return m.Re.MatchString(path)
|
||||
}
|
||||
|
||||
var DefaultConfig = DeployConfig{
|
||||
|
@ -133,10 +133,10 @@ var DefaultConfig = DeployConfig{
|
|||
func DecodeConfig(cfg config.Provider) (DeployConfig, error) {
|
||||
dcfg := DefaultConfig
|
||||
|
||||
if !cfg.IsSet(deploymentConfigKey) {
|
||||
if !cfg.IsSet(DeploymentConfigKey) {
|
||||
return dcfg, nil
|
||||
}
|
||||
if err := mapstructure.WeakDecode(cfg.GetStringMap(deploymentConfigKey), &dcfg); err != nil {
|
||||
if err := mapstructure.WeakDecode(cfg.GetStringMap(DeploymentConfigKey), &dcfg); err != nil {
|
||||
return dcfg, err
|
||||
}
|
||||
|
||||
|
@ -148,7 +148,7 @@ func DecodeConfig(cfg config.Provider) (DeployConfig, error) {
|
|||
if *tgt == (Target{}) {
|
||||
return dcfg, errors.New("empty deployment target")
|
||||
}
|
||||
if err := tgt.parseIncludeExclude(); err != nil {
|
||||
if err := tgt.ParseIncludeExclude(); err != nil {
|
||||
return dcfg, err
|
||||
}
|
||||
}
|
||||
|
@ -157,7 +157,7 @@ func DecodeConfig(cfg config.Provider) (DeployConfig, error) {
|
|||
if *m == (Matcher{}) {
|
||||
return dcfg, errors.New("empty deployment matcher")
|
||||
}
|
||||
m.re, err = regexp.Compile(m.Pattern)
|
||||
m.Re, err = regexp.Compile(m.Pattern)
|
||||
if err != nil {
|
||||
return dcfg, fmt.Errorf("invalid deployment.matchers.pattern: %v", err)
|
||||
}
|
||||
|
@ -167,7 +167,7 @@ func DecodeConfig(cfg config.Provider) (DeployConfig, error) {
|
|||
if err != nil {
|
||||
return dcfg, fmt.Errorf("invalid deployment.orderings.pattern: %v", err)
|
||||
}
|
||||
dcfg.ordering = append(dcfg.ordering, re)
|
||||
dcfg.Ordering = append(dcfg.Ordering, re)
|
||||
}
|
||||
|
||||
return dcfg, nil
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2019 The Hugo Authors. All rights reserved.
|
||||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -14,7 +14,7 @@
|
|||
//go:build !nodeploy
|
||||
// +build !nodeploy
|
||||
|
||||
package deploy
|
||||
package deployconfig
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
@ -91,7 +91,7 @@ force = true
|
|||
c.Assert(len(dcfg.Order), qt.Equals, 2)
|
||||
c.Assert(dcfg.Order[0], qt.Equals, "o1")
|
||||
c.Assert(dcfg.Order[1], qt.Equals, "o2")
|
||||
c.Assert(len(dcfg.ordering), qt.Equals, 2)
|
||||
c.Assert(len(dcfg.Ordering), qt.Equals, 2)
|
||||
|
||||
// Targets.
|
||||
c.Assert(len(dcfg.Targets), qt.Equals, 3)
|
||||
|
@ -104,11 +104,11 @@ force = true
|
|||
c.Assert(tgt.CloudFrontDistributionID, qt.Equals, fmt.Sprintf("cdn%d", i))
|
||||
c.Assert(tgt.Include, qt.Equals, wantInclude[i])
|
||||
if wantInclude[i] != "" {
|
||||
c.Assert(tgt.includeGlob, qt.Not(qt.IsNil))
|
||||
c.Assert(tgt.IncludeGlob, qt.Not(qt.IsNil))
|
||||
}
|
||||
c.Assert(tgt.Exclude, qt.Equals, wantExclude[i])
|
||||
if wantExclude[i] != "" {
|
||||
c.Assert(tgt.excludeGlob, qt.Not(qt.IsNil))
|
||||
c.Assert(tgt.ExcludeGlob, qt.Not(qt.IsNil))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -117,7 +117,7 @@ force = true
|
|||
for i := 0; i < 3; i++ {
|
||||
m := dcfg.Matchers[i]
|
||||
c.Assert(m.Pattern, qt.Equals, fmt.Sprintf("^pattern%d$", i))
|
||||
c.Assert(m.re, qt.Not(qt.IsNil))
|
||||
c.Assert(m.Re, qt.Not(qt.IsNil))
|
||||
c.Assert(m.CacheControl, qt.Equals, fmt.Sprintf("cachecontrol%d", i))
|
||||
c.Assert(m.ContentEncoding, qt.Equals, fmt.Sprintf("contentencoding%d", i))
|
||||
c.Assert(m.ContentType, qt.Equals, fmt.Sprintf("contenttype%d", i))
|
Loading…
Reference in a new issue