Add support for taking named tunnel credentials from an environment variable
This commit is contained in:
parent
1cb22817db
commit
6968b714d0
|
@ -261,8 +261,16 @@ func (sc *subcommandContext) delete(tunnelIDs []uuid.UUID) error {
|
|||
// and add the TunnelID into any old credentials (generated before TUN-3581 added the `TunnelID`
|
||||
// field to credentials files)
|
||||
func (sc *subcommandContext) findCredentials(tunnelID uuid.UUID) (connection.Credentials, error) {
|
||||
var credentials connection.Credentials
|
||||
var err error
|
||||
if credentialsContents := sc.c.String(CredContentsFlag); credentialsContents != "" {
|
||||
if err = json.Unmarshal([]byte(credentialsContents), &credentials); err != nil {
|
||||
err = errInvalidJSONCredential{path: "TUNNEL_CRED_CONTENTS", err: err}
|
||||
}
|
||||
} else {
|
||||
credFinder := sc.credentialFinder(tunnelID)
|
||||
credentials, err := sc.readTunnelCredentials(credFinder)
|
||||
credentials, err = sc.readTunnelCredentials(credFinder)
|
||||
}
|
||||
// This line ensures backwards compatibility with credentials files generated before
|
||||
// TUN-3581. Those old credentials files don't have a TunnelID field, so we enrich the struct
|
||||
// with the ID, which we have already resolved from the user input.
|
||||
|
|
|
@ -191,6 +191,51 @@ func Test_subcommandContext_findCredentials(t *testing.T) {
|
|||
TunnelName: name,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "TUNNEL_CRED_CONTENTS given contains old credentials contents",
|
||||
fields: fields{
|
||||
log: &log,
|
||||
fs: fs,
|
||||
c: func() *cli.Context {
|
||||
flagSet := flag.NewFlagSet("test0", flag.PanicOnError)
|
||||
flagSet.String(CredContentsFlag, "", "")
|
||||
c := cli.NewContext(cli.NewApp(), flagSet, nil)
|
||||
_ = c.Set(CredContentsFlag, fmt.Sprintf(`{"AccountTag":"%s","TunnelSecret":"%s"}`, accountTag, secretB64))
|
||||
return c
|
||||
}(),
|
||||
},
|
||||
args: args{
|
||||
tunnelID: tunnelID,
|
||||
},
|
||||
want: connection.Credentials{
|
||||
AccountTag: accountTag,
|
||||
TunnelID: tunnelID,
|
||||
TunnelSecret: secret,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "TUNNEL_CRED_CONTENTS given contains new credentials contents",
|
||||
fields: fields{
|
||||
log: &log,
|
||||
fs: fs,
|
||||
c: func() *cli.Context {
|
||||
flagSet := flag.NewFlagSet("test0", flag.PanicOnError)
|
||||
flagSet.String(CredContentsFlag, "", "")
|
||||
c := cli.NewContext(cli.NewApp(), flagSet, nil)
|
||||
_ = c.Set(CredContentsFlag, fmt.Sprintf(`{"AccountTag":"%s","TunnelSecret":"%s","TunnelID":"%s","TunnelName":"%s"}`, accountTag, secretB64, tunnelID, name))
|
||||
return c
|
||||
}(),
|
||||
},
|
||||
args: args{
|
||||
tunnelID: tunnelID,
|
||||
},
|
||||
want: connection.Credentials{
|
||||
AccountTag: accountTag,
|
||||
TunnelID: tunnelID,
|
||||
TunnelSecret: secret,
|
||||
TunnelName: name,
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
|
|
|
@ -33,6 +33,7 @@ const (
|
|||
connsSortByOptions = "id, startedAt, numConnections, version"
|
||||
CredFileFlagAlias = "cred-file"
|
||||
CredFileFlag = "credentials-file"
|
||||
CredContentsFlag = "credentials-contents"
|
||||
overwriteDNSFlagName = "overwrite-dns"
|
||||
|
||||
LogFieldTunnelID = "tunnelID"
|
||||
|
@ -112,6 +113,11 @@ var (
|
|||
EnvVars: []string{"TUNNEL_CRED_FILE"},
|
||||
}
|
||||
credentialsFileFlag = altsrc.NewStringFlag(credentialsFileFlagCLIOnly)
|
||||
credentialsContentsFlag = altsrc.NewStringFlag(&cli.StringFlag{
|
||||
Name: CredContentsFlag,
|
||||
Usage: "Contents of the tunnel credentials JSON file to use. When provided along with credentials-file, this will take precedence.",
|
||||
EnvVars: []string{"TUNNEL_CRED_CONTENTS"},
|
||||
})
|
||||
forceDeleteFlag = &cli.BoolFlag{
|
||||
Name: "force",
|
||||
Aliases: []string{"f"},
|
||||
|
@ -579,6 +585,7 @@ func buildRunCommand() *cli.Command {
|
|||
flags := []cli.Flag{
|
||||
forceFlag,
|
||||
credentialsFileFlag,
|
||||
credentialsContentsFlag,
|
||||
selectProtocolFlag,
|
||||
featuresFlag,
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue