Skip to content

Commit

Permalink
blob: create URLMux type for scheme registration
Browse files Browse the repository at this point in the history
Added URLOpener types to all of the provider packages.

Fixes google#931
  • Loading branch information
zombiezen committed Jan 18, 2019
1 parent f95e90f commit b850cab
Show file tree
Hide file tree
Showing 17 changed files with 617 additions and 374 deletions.
43 changes: 43 additions & 0 deletions blob/anyblob/anyblob.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
// Copyright 2019 The Go Cloud Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

// Package anyblob provides an Open function that opens buckets backed
// by GCS, S3, Azure Storage, filesystem, and memory.
package anyblob

import (
"context"

"gocloud.dev/blob"
"gocloud.dev/blob/azureblob"
"gocloud.dev/blob/fileblob"
"gocloud.dev/blob/gcsblob"
"gocloud.dev/blob/memblob"
"gocloud.dev/blob/s3blob"
)

// Open opens a bucket URL, allowing URL overrides for all openers that
// support it. See the URLOpener types in various provider packages for
// more details.
func Open(ctx context.Context, urlstr string) (*blob.Bucket, error) {
return mux.Open(ctx, urlstr)
}

var mux = blob.NewURLMux(map[string]blob.BucketURLOpener{
azureblob.Scheme: &azureblob.URLOpener{AllowURLOverrides: true},
fileblob.Scheme: &fileblob.URLOpener{},
gcsblob.Scheme: &gcsblob.URLOpener{AllowURLOverrides: true},
memblob.Scheme: &memblob.URLOpener{},
s3blob.Scheme: &s3blob.URLOpener{AllowURLOverrides: true},
})
185 changes: 113 additions & 72 deletions blob/azureblob/azureblob.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,36 +15,6 @@
// Package azureblob provides a blob implementation that uses Azure Storage’s
// BlockBlob. Use OpenBucket to construct a *blob.Bucket.
//
// Open URLs
//
// For blob.Open URLs, azureblob registers for the scheme "azblob"; URLs start
// with "azblob://".
//
// The URL's Host is used as the bucket name.
//
// By default, credentials are retrieved from the environment variables
// AZURE_STORAGE_ACCOUNT, AZURE_STORAGE_KEY, and AZURE_STORAGE_SAS_TOKEN.
// AZURE_STORAGE_ACCOUNT is required, along with one of the other two. See
// https://docs.microsoft.com/en-us/azure/storage/common/storage-dotnet-shared-access-signature-part-1#what-is-a-shared-access-signature
// for more on SAS tokens. Alternatively, credentials can be loaded from a file;
// see the cred_path query parameter below.
//
// The following query options are supported:
// - cred_path: Sets path to a credentials file in JSON format. The
// AccountName field must be specified, and either AccountKey or SASToken.
// Example credentials file using AccountKey:
// {
// "AccountName": "STORAGE ACCOUNT NAME",
// "AccountKey": "PRIMARY OR SECONDARY ACCOUNT KEY"
// }
// Example credentials file using SASToken:
// {
// "AccountName": "STORAGE ACCOUNT NAME",
// "SASToken": "ENTER YOUR AZURE STORAGE SAS TOKEN"
// }
// Example URL:
// azblob://mybucket?cred_path=pathToCredentials
//
// As
//
// azureblob exposes the following types for As:
Expand Down Expand Up @@ -120,54 +90,123 @@ const (
defaultUploadBlockSize = 8 * 1024 * 1024 // configure the upload buffer size
)

func init() {
blob.Register("azblob", openURL)
// Scheme is the URL scheme conventionally used for Azure Storage in a URLMux.
const Scheme = "azblob"

// URLOpener opens Azure URLs like "azblob://mybucket".
type URLOpener struct {
AccountName AccountName
Pipeline pipeline.Pipeline
Options Options

// AllowURLOverrides permits the fields above to be overridden in the
// URL using the following query parameters:
//
// cred_path: path to a credentials file in JSON format. The
// AccountName field must be specified, and either AccountKey or
// SASToken.
//
// Example credentials file using AccountKey:
//
// {
// "AccountName": "STORAGE ACCOUNT NAME",
// "AccountKey": "PRIMARY OR SECONDARY ACCOUNT KEY"
// }
//
// Example credentials file using SASToken:
// {
// "AccountName": "STORAGE ACCOUNT NAME",
// "SASToken": "ENTER YOUR AZURE STORAGE SAS TOKEN"
// }
//
// If AllowURLOverrides is true, cred_path is not given, and AccountName
// is empty, the AZURE_STORAGE_ACCOUNT environment variable will be used.
// Similarly, Pipeline, Options.Credential, and Options.SASToken will be
// derived from the environment variables AZURE_STORAGE_KEY and
// AZURE_STORAGE_SAS_TOKEN if AllowURLOverrides is true and creds_path is
// not given.
AllowURLOverrides bool
}

// OpenBucketURL opens the Azure Storage Account Container with the same name as
// the host in the URL.
func (o *URLOpener) OpenBucketURL(ctx context.Context, u *url.URL) (*blob.Bucket, error) {
var err error
o, err = o.forParams(ctx, u.Query())
if err != nil {
return nil, fmt.Errorf("open bucket %v: %v", u, err)
}
return OpenBucket(ctx, o.Pipeline, o.AccountName, u.Host, &o.Options)
}

func openURL(ctx context.Context, u *url.URL) (driver.Bucket, error) {
type AzureCreds struct {
AccountName AccountName
AccountKey AccountKey
SASToken SASToken
func (o *URLOpener) forParams(ctx context.Context, q url.Values) (*URLOpener, error) {
for k := range q {
if !o.AllowURLOverrides || k != "cred_path" {
return nil, fmt.Errorf("unknown Azure query parameter %s", k)
}
}
ac := AzureCreds{}
if credPath := u.Query()["cred_path"]; len(credPath) > 0 {
f, err := ioutil.ReadFile(credPath[0])
if err != nil {
return nil, err
if !o.AllowURLOverrides {
return o, nil
}

o2 := new(URLOpener)
*o2 = *o
credPath := q.Get("cred_path")
if credPath == "" {
// Use default credential info from the environment for missing fields.
// Ignore errors from environment, as we'll get errors from OpenBucket later.
if o2.AccountName == "" {
o2.AccountName, _ = DefaultAccountName()
}
err = json.Unmarshal(f, &ac)
if err != nil {
return nil, err
if o2.Options.SASToken == "" {
o2.Options.SASToken, _ = DefaultSASToken()
}
} else {
// Use default credential info from the environment.
// Ignore errors, as we'll get errors from OpenBucket later.
ac.AccountName, _ = DefaultAccountName()
ac.AccountKey, _ = DefaultAccountKey()
ac.SASToken, _ = DefaultSASToken()
}

// azblob.Credential is an interface; we will use either a SharedKeyCredential
// or anonymous credentials. If the former, we will also fill in
// Options.Credential so that SignedURL will work.
var credential azblob.Credential
var sharedKeyCred *azblob.SharedKeyCredential
if ac.AccountKey != "" {
var err error
sharedKeyCred, err = NewCredential(ac.AccountName, ac.AccountKey)
if err != nil {
return nil, err
if o2.Pipeline == nil || o2.Options.Credential == nil {
accountKey, _ := DefaultAccountKey()
pipeline, cred, err := credFileToOptions(o2.AccountName, accountKey, o2.Options.SASToken)
if err != nil {
return nil, err
}
if o2.Pipeline == nil {
o2.Pipeline = pipeline
}
if o2.Options.Credential == nil {
o2.Options.Credential = cred
}
}
credential = sharedKeyCred
} else {
credential = azblob.NewAnonymousCredential()
return o2, nil
}
var ac struct {
AccountName AccountName
AccountKey AccountKey
SASToken SASToken
}
f, err := ioutil.ReadFile(credPath)
if err != nil {
return nil, err
}
if err := json.Unmarshal(f, &ac); err != nil {
return nil, err
}
pipeline := NewPipeline(credential, azblob.PipelineOptions{})
return openBucket(ctx, pipeline, ac.AccountName, u.Host, &Options{
Credential: sharedKeyCred,
SASToken: ac.SASToken,
})
o2.AccountName = ac.AccountName
o2.Options.SASToken = ac.SASToken
o2.Pipeline, o2.Options.Credential, err = credFileToOptions(ac.AccountName, ac.AccountKey, ac.SASToken)
if err != nil {
return nil, err
}
return o2, nil
}

func credFileToOptions(account AccountName, key AccountKey, token SASToken) (pipeline.Pipeline, *azblob.SharedKeyCredential, error) {
if key == "" {
cred := azblob.NewAnonymousCredential()
return NewPipeline(cred, azblob.PipelineOptions{}), nil, nil
}
cred, err := NewCredential(account, key)
if err != nil {
return nil, nil, err
}
return NewPipeline(cred, azblob.PipelineOptions{}), cred, nil
}

// DefaultIdentity is a Wire provider set that provides an Azure storage
Expand Down Expand Up @@ -196,7 +235,9 @@ type AccountName string
type AccountKey string

// SASToken is an Azure shared access signature.
// https://docs.microsoft.com/en-us/azure/storage/common/storage-dotnet-shared-access-signature-part-1
//
// See https://docs.microsoft.com/en-us/azure/storage/common/storage-dotnet-shared-access-signature-part-1
// for more details.
type SASToken string

// DefaultAccountName loads the Azure storage account name from the
Expand Down
51 changes: 18 additions & 33 deletions blob/azureblob/azureblob_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -297,66 +297,51 @@ func TestOpenURL(t *testing.T) {
defer os.Remove(sasFile.Name())

tests := []struct {
url string
wantName string
wantErr bool
name string
query url.Values
wantErr bool
// If we use a key, we should get a non-nil *Credentials in Options.
wantCreds bool
}{
{
url: "azblob://mybucket?cred_path=" + keyFile.Name(),
wantName: "mybucket",
name: "AccountKey",
query: url.Values{"cred_path": {keyFile.Name()}},
wantCreds: true,
},
{
url: "azblob://mybucket2?cred_path=" + keyFile.Name(),
wantName: "mybucket2",
wantCreds: true,
},
{
url: "azblob://mybucket?cred_path=" + sasFile.Name(),
wantName: "mybucket",
},
{
url: "azblob://mybucket2?cred_path=" + sasFile.Name(),
wantName: "mybucket2",
name: "SASToken",
query: url.Values{"cred_path": {sasFile.Name()}},
wantCreds: false,
},
{
url: "azblob://foo?cred_path=" + badJSONFile.Name(),
name: "BadJSON",
query: url.Values{"cred_path": {badJSONFile.Name()}},
wantErr: true,
},
{
url: "azblob://foo?cred_path=" + badKeyFile.Name(),
name: "BadKey",
query: url.Values{"cred_path": {badKeyFile.Name()}},
wantErr: true,
},
{
url: "azblob://foo?cred_path=/path/does/not/exist",
name: "MissingFile",
query: url.Values{"cred_path": {"/path/does/not/exist"}},
wantErr: true,
},
}

ctx := context.Background()
opener := &URLOpener{AllowURLOverrides: true}
for _, test := range tests {
t.Run(test.url, func(t *testing.T) {
u, err := url.Parse(test.url)
if err != nil {
t.Fatal(err)
}
got, err := openURL(ctx, u)
t.Run(test.name, func(t *testing.T) {
got, err := opener.forParams(ctx, test.query)
if (err != nil) != test.wantErr {
t.Errorf("got err %v want error %v", err, test.wantErr)
}
if err != nil {
return
}
gotB, ok := got.(*bucket)
if !ok {
t.Fatalf("got type %T want *bucket", got)
}
if gotB.name != test.wantName {
t.Errorf("got bucket name %q want %q", gotB.name, test.wantName)
}
if gotCreds := (gotB.opts.Credential != nil); gotCreds != test.wantCreds {
if gotCreds := (got.Options.Credential != nil); gotCreds != test.wantCreds {
t.Errorf("got creds? %v want %v", gotCreds, test.wantCreds)
}
})
Expand Down
Loading

0 comments on commit b850cab

Please sign in to comment.