diff --git a/.gitignore b/.gitignore
index 686957820..4b921bc5f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -4,6 +4,7 @@ examples/terraform.tfstate
examples/terraform.tfstate.backup
test/
+.vscode/
log.txt
terraform-provider-outscale
*.log
diff --git a/GNUmakefile b/GNUmakefile
index c9eacb451..791210c0b 100644
--- a/GNUmakefile
+++ b/GNUmakefile
@@ -1,47 +1,64 @@
-TEST?=$$(go list ./... |grep -v 'vendor')
+TEST?=./...
GOFMT_FILES?=$$(find . -name '*.go' |grep -v vendor)
+PKG_NAME=outscale
+WEBSITE_REPO=github.com/hashicorp/terraform-website
default: build
build: fmtcheck
go install
+
test: fmtcheck
- go test -i $(TEST) || exit 1
- echo $(TEST) | \
- xargs -t -n4 go test $(TESTARGS) -timeout=30s -parallel=4
+ go test $(TEST) -timeout=30s -parallel=4
testacc: fmtcheck
- TF_ACC=1 go test $(TEST) -v $(TESTARGS) -timeout 120m
-
-vet:
- @echo "go vet ."
- @go vet $$(go list ./... | grep -v vendor/) ; if [ $$? -eq 1 ]; then \
- echo ""; \
- echo "Vet found suspicious constructs. Please check the reported constructs"; \
- echo "and fix them if necessary before submitting the code for review."; \
- exit 1; \
- fi
+ TF_ACC=1 go test $(TEST) -v -parallel 20 $(TESTARGS) -timeout 120m -cover
fmt:
- gofmt -w $(GOFMT_FILES)
+ @echo "==> Fixing source code with gofmt..."
+ gofmt -s -w ./main.go
+ gofmt -s -w ./$(PKG_NAME)
+# Currently required by tf-deploy compile
fmtcheck:
@sh -c "'$(CURDIR)/scripts/gofmtcheck.sh'"
-errcheck:
- @sh -c "'$(CURDIR)/scripts/errcheck.sh'"
+websitefmtcheck:
+ @sh -c "'$(CURDIR)/scripts/websitefmtcheck.sh'"
-vendor-status:
- @govendor status
+lint:
+ @echo "==> Checking source code against linters..."
+ @GOGC=30 golangci-lint run ./$(PKG_NAME) --deadline=30m
+
+tools:
+ GO111MODULE=off go get -u github.com/client9/misspell/cmd/misspell
+ GO111MODULE=off go get -u github.com/golangci/golangci-lint/cmd/golangci-lint
test-compile:
@if [ "$(TEST)" = "./..." ]; then \
echo "ERROR: Set TEST to a specific package. For example,"; \
- echo " make test-compile TEST=./aws"; \
+ echo " make test-compile TEST=./$(PKG_NAME)"; \
exit 1; \
fi
go test -c $(TEST) $(TESTARGS)
-.PHONY: build test testacc vet fmt fmtcheck errcheck vendor-status test-compile
+website:
+ifeq (,$(wildcard $(GOPATH)/src/$(WEBSITE_REPO)))
+ echo "$(WEBSITE_REPO) not found in your GOPATH (necessary for layouts and assets), get-ting..."
+ git clone https://$(WEBSITE_REPO) $(GOPATH)/src/$(WEBSITE_REPO)
+endif
+ @$(MAKE) -C $(GOPATH)/src/$(WEBSITE_REPO) website-provider PROVIDER_PATH=$(shell pwd) PROVIDER_NAME=$(PKG_NAME)
+
+website-lint:
+ @echo "==> Checking website against linters..."
+ @misspell -error -source=text website/
+
+website-test:
+ifeq (,$(wildcard $(GOPATH)/src/$(WEBSITE_REPO)))
+ echo "$(WEBSITE_REPO) not found in your GOPATH (necessary for layouts and assets), get-ting..."
+ git clone https://$(WEBSITE_REPO) $(GOPATH)/src/$(WEBSITE_REPO)
+endif
+ @$(MAKE) -C $(GOPATH)/src/$(WEBSITE_REPO) website-provider-test PROVIDER_PATH=$(shell pwd) PROVIDER_NAME=$(PKG_NAME)
+.PHONY: build test testacc fmt fmtcheck lint tools test-compile website website-lint website-test
diff --git a/Gopkg.lock b/Gopkg.lock
deleted file mode 100644
index d105f4589..000000000
--- a/Gopkg.lock
+++ /dev/null
@@ -1,292 +0,0 @@
-# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
-
-
-[[projects]]
- name = "github.com/agext/levenshtein"
- packages = ["."]
- revision = "5f10fee965225ac1eecdc234c09daf5cd9e7f7b6"
- version = "v1.2.1"
-
-[[projects]]
- branch = "master"
- name = "github.com/apparentlymart/go-cidr"
- packages = ["cidr"]
- revision = "2bd8b58cf4275aeb086ade613de226773e29e853"
-
-[[projects]]
- branch = "master"
- name = "github.com/apparentlymart/go-textseg"
- packages = ["textseg"]
- revision = "b836f5c4d331d1945a2fead7188db25432d73b69"
-
-[[projects]]
- branch = "master"
- name = "github.com/armon/go-radix"
- packages = ["."]
- revision = "1fca145dffbcaa8fe914309b1ec0cfc67500fe61"
-
-[[projects]]
- name = "github.com/aws/aws-sdk-go"
- packages = ["aws","aws/awserr","aws/awsutil","aws/client","aws/client/metadata","aws/corehandlers","aws/credentials","aws/credentials/ec2rolecreds","aws/credentials/endpointcreds","aws/credentials/stscreds","aws/defaults","aws/ec2metadata","aws/endpoints","aws/request","aws/session","aws/signer/v4","internal/shareddefaults","private/protocol","private/protocol/query","private/protocol/query/queryutil","private/protocol/rest","private/protocol/restxml","private/protocol/xml/xmlutil","service/s3","service/sts"]
- revision = "1b176c5c6b57adb03bb982c21930e708ebca5a77"
- version = "v1.12.70"
-
-[[projects]]
- branch = "master"
- name = "github.com/bgentry/go-netrc"
- packages = ["netrc"]
- revision = "9fd32a8b3d3d3f9d43c341bfe098430e07609480"
-
-[[projects]]
- name = "github.com/bgentry/speakeasy"
- packages = ["."]
- revision = "4aabc24848ce5fd31929f7d1e4ea74d3709c14cd"
- version = "v0.1.0"
-
-[[projects]]
- name = "github.com/blang/semver"
- packages = ["."]
- revision = "2ee87856327ba09384cabd113bc6b5d174e9ec0f"
- version = "v3.5.1"
-
-[[projects]]
- name = "github.com/davecgh/go-spew"
- packages = ["spew"]
- revision = "346938d642f2ec3594ed81d874461961cd0faa76"
- version = "v1.1.0"
-
-[[projects]]
- name = "github.com/go-ini/ini"
- packages = ["."]
- revision = "32e4c1e6bc4e7d0d8451aa6b75200d19e37a536a"
- version = "v1.32.0"
-
-[[projects]]
- name = "github.com/golang/protobuf"
- packages = ["proto","ptypes","ptypes/any","ptypes/duration","ptypes/timestamp"]
- revision = "925541529c1fa6821df4e44ce2723319eb2be768"
- version = "v1.0.0"
-
-[[projects]]
- branch = "master"
- name = "github.com/hashicorp/errwrap"
- packages = ["."]
- revision = "7554cd9344cec97297fa6649b055a8c98c2a1e55"
-
-[[projects]]
- branch = "master"
- name = "github.com/hashicorp/go-cleanhttp"
- packages = ["."]
- revision = "d5fe4b57a186c716b0e00b8c301cbd9b4182694d"
-
-[[projects]]
- branch = "master"
- name = "github.com/hashicorp/go-getter"
- packages = [".","helper/url"]
- revision = "285374cdfad63de2c43d7562f49ced6dde5a7ba0"
-
-[[projects]]
- branch = "master"
- name = "github.com/hashicorp/go-hclog"
- packages = ["."]
- revision = "5bcb0f17e36442247290887cc914a6e507afa5c4"
-
-[[projects]]
- branch = "master"
- name = "github.com/hashicorp/go-multierror"
- packages = ["."]
- revision = "b7773ae218740a7be65057fc60b366a49b538a44"
-
-[[projects]]
- branch = "master"
- name = "github.com/hashicorp/go-plugin"
- packages = ["."]
- revision = "e53f54cbf51efde642d4711313e829a1ff0c236d"
-
-[[projects]]
- branch = "master"
- name = "github.com/hashicorp/go-uuid"
- packages = ["."]
- revision = "64130c7a86d732268a38cb04cfbaf0cc987fda98"
-
-[[projects]]
- branch = "master"
- name = "github.com/hashicorp/go-version"
- packages = ["."]
- revision = "4fe82ae3040f80a03d04d2cccb5606a626b8e1ee"
-
-[[projects]]
- branch = "master"
- name = "github.com/hashicorp/hcl"
- packages = [".","hcl/ast","hcl/parser","hcl/scanner","hcl/strconv","hcl/token","json/parser","json/scanner","json/token"]
- revision = "23c074d0eceb2b8a5bfdbb271ab780cde70f05a8"
-
-[[projects]]
- branch = "master"
- name = "github.com/hashicorp/hcl2"
- packages = ["gohcl","hcl","hcl/hclsyntax","hcl/json","hcldec","hclparse"]
- revision = "9f91684a1f1714c19f6d84313454f70b3773a300"
-
-[[projects]]
- branch = "master"
- name = "github.com/hashicorp/hil"
- packages = [".","ast","parser","scanner"]
- revision = "fa9f258a92500514cc8e9c67020487709df92432"
-
-[[projects]]
- branch = "master"
- name = "github.com/hashicorp/logutils"
- packages = ["."]
- revision = "0dc08b1671f34c4250ce212759ebd880f743d883"
-
-[[projects]]
- name = "github.com/hashicorp/terraform"
- packages = ["config","config/configschema","config/hcl2shim","config/module","dag","flatmap","helper/acctest","helper/config","helper/hashcode","helper/hilmapstructure","helper/logging","helper/mutexkv","helper/resource","helper/schema","moduledeps","plugin","plugin/discovery","registry","registry/regsrc","registry/response","svchost","svchost/auth","svchost/disco","terraform","tfdiags","version"]
- revision = "3802b14260603f90c7a1faf55994dcc8933e2069"
- version = "v0.11.3"
-
-[[projects]]
- branch = "master"
- name = "github.com/hashicorp/yamux"
- packages = ["."]
- revision = "683f49123a33db61abfb241b7ac5e4af4dc54d55"
-
-[[projects]]
- name = "github.com/jmespath/go-jmespath"
- packages = ["."]
- revision = "0b12d6b5"
-
-[[projects]]
- name = "github.com/mattn/go-isatty"
- packages = ["."]
- revision = "0360b2af4f38e8d38c7fce2a9f4e702702d73a39"
- version = "v0.0.3"
-
-[[projects]]
- branch = "master"
- name = "github.com/mitchellh/cli"
- packages = ["."]
- revision = "518dc677a1e1222682f4e7db06721942cb8e9e4c"
-
-[[projects]]
- branch = "master"
- name = "github.com/mitchellh/copystructure"
- packages = ["."]
- revision = "d23ffcb85de31694d6ccaa23ccb4a03e55c1303f"
-
-[[projects]]
- branch = "master"
- name = "github.com/mitchellh/go-homedir"
- packages = ["."]
- revision = "b8bc1bf767474819792c23f32d8286a45736f1c6"
-
-[[projects]]
- branch = "master"
- name = "github.com/mitchellh/go-testing-interface"
- packages = ["."]
- revision = "a61a99592b77c9ba629d254a693acffaeb4b7e28"
-
-[[projects]]
- branch = "master"
- name = "github.com/mitchellh/go-wordwrap"
- packages = ["."]
- revision = "ad45545899c7b13c020ea92b2072220eefad42b8"
-
-[[projects]]
- branch = "master"
- name = "github.com/mitchellh/hashstructure"
- packages = ["."]
- revision = "2bca23e0e452137f789efbc8610126fd8b94f73b"
-
-[[projects]]
- branch = "master"
- name = "github.com/mitchellh/mapstructure"
- packages = ["."]
- revision = "b4575eea38cca1123ec2dc90c26529b5c5acfcff"
-
-[[projects]]
- branch = "master"
- name = "github.com/mitchellh/reflectwalk"
- packages = ["."]
- revision = "63d60e9d0dbc60cf9164e6510889b0db6683d98c"
-
-[[projects]]
- name = "github.com/oklog/run"
- packages = ["."]
- revision = "4dadeb3030eda0273a12382bb2348ffc7c9d1a39"
- version = "v1.0.0"
-
-[[projects]]
- name = "github.com/posener/complete"
- packages = [".","cmd","cmd/install","match"]
- revision = "dc2bc5a81accba8782bebea28628224643a8286a"
- version = "v1.1"
-
-[[projects]]
- name = "github.com/satori/go.uuid"
- packages = ["."]
- revision = "f58768cc1a7a7e77a3bd49e98cdd21419399b6a3"
- version = "v1.2.0"
-
-[[projects]]
- name = "github.com/ulikunitz/xz"
- packages = [".","internal/hash","internal/xlog","lzma"]
- revision = "0c6b41e72360850ca4f98dc341fd999726ea007f"
- version = "v0.5.4"
-
-[[projects]]
- branch = "master"
- name = "github.com/zclconf/go-cty"
- packages = ["cty","cty/convert","cty/function","cty/function/stdlib","cty/gocty","cty/json","cty/set"]
- revision = "709e4033eeb037dc543dbc2048065dfb814ce316"
-
-[[projects]]
- branch = "master"
- name = "golang.org/x/crypto"
- packages = ["bcrypt","blowfish","cast5","curve25519","ed25519","ed25519/internal/edwards25519","internal/chacha20","openpgp","openpgp/armor","openpgp/elgamal","openpgp/errors","openpgp/packet","openpgp/s2k","poly1305","ssh"]
- revision = "1875d0a70c90e57f11972aefd42276df65e895b9"
-
-[[projects]]
- branch = "master"
- name = "golang.org/x/net"
- packages = ["context","html","html/atom","http2","http2/hpack","idna","internal/timeseries","lex/httplex","trace"]
- revision = "2fb46b16b8dda405028c50f7c7f0f9dd1fa6bfb1"
-
-[[projects]]
- branch = "master"
- name = "golang.org/x/sys"
- packages = ["unix"]
- revision = "37707fdb30a5b38865cfb95e5aab41707daec7fd"
-
-[[projects]]
- branch = "master"
- name = "golang.org/x/text"
- packages = ["collate","collate/build","internal/colltab","internal/gen","internal/tag","internal/triegen","internal/ucd","language","secure/bidirule","transform","unicode/bidi","unicode/cldr","unicode/norm","unicode/rangetable"]
- revision = "e19ae1496984b1c655b8044a65c0300a3c878dd3"
-
-[[projects]]
- branch = "master"
- name = "google.golang.org/genproto"
- packages = ["googleapis/rpc/status"]
- revision = "4eb30f4778eed4c258ba66527a0d4f9ec8a36c45"
-
-[[projects]]
- name = "google.golang.org/grpc"
- packages = [".","balancer","balancer/base","balancer/roundrobin","codes","connectivity","credentials","encoding","grpclb/grpc_lb_v1/messages","grpclog","health","health/grpc_health_v1","internal","keepalive","metadata","naming","peer","resolver","resolver/dns","resolver/passthrough","stats","status","tap","transport"]
- revision = "6b51017f791ae1cfbec89c52efdf444b13b550ef"
- version = "v1.9.2"
-
-[solve-meta]
- analyzer-name = "dep"
- analyzer-version = 1
- inputs-digest = "194b3107506bdd00f0576a32d6fce74b19f6b4b26a48232e01e31e58689d21e6"
- solver-name = "gps-cdcl"
- solver-version = 1
-
- [[projects]]
- digest = "1:08d65904057412fc0270fc4812a1c90c594186819243160dc779a402d4b6d0bc"
- name = "github.com/spf13/cast"
- packages = ["."]
- pruneopts = "UT"
- revision = "8c9545af88b134710ab1cd196795e7f2388358d7"
- version = "v1.3.0"
diff --git a/Gopkg.toml b/Gopkg.toml
deleted file mode 100644
index 0374c57bd..000000000
--- a/Gopkg.toml
+++ /dev/null
@@ -1,42 +0,0 @@
-# Gopkg.toml example
-#
-# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md
-# for detailed Gopkg.toml documentation.
-#
-# required = ["github.com/user/thing/cmd/thing"]
-# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
-#
-# [[constraint]]
-# name = "github.com/user/project"
-# version = "1.0.0"
-#
-# [[constraint]]
-# name = "github.com/user/project2"
-# branch = "dev"
-# source = "github.com/myfork/project2"
-#
-# [[override]]
-# name = "github.com/x/y"
-# version = "2.4.0"
-#
-# [prune]
-# non-go = false
-# go-tests = true
-# unused-packages = true
-
-
-[[constraint]]
- name = "github.com/aws/aws-sdk-go"
- version = "1.12.70"
-
-[[constraint]]
- name = "github.com/hashicorp/terraform"
- version = "0.11.3"
-
-[prune]
- go-tests = true
- unused-packages = true
-
-[[constraint]]
- name = "github.com/spf13/cast"
- version = "1.3.0"
diff --git a/README.md b/README.md
index a99182db5..d4de4755c 100644
--- a/README.md
+++ b/README.md
@@ -5,7 +5,7 @@ Terraform Provider
- [![Gitter chat](https://badges.gitter.im/hashicorp-terraform/Lobby.png)](https://gitter.im/hashicorp-terraform/Lobby)
- Mailing list: [Google Groups](http://groups.google.com/group/terraform-tool)
-
+
Requirements
------------
@@ -20,9 +20,9 @@ Installing The Provider on Linux
Download the binary and install it in ~/.terraform.d/plugins/linux_amd64/.
```sh
-$ wget https://github.com/outscale-dev/terraform-provider-outscale/releases/download/release-0.1.0RC6.1/terraform-provider-outscale_linux_amd64_v0.1.0-rc6.1.zip
-$ unzip terraform-provider-outscale_linux_amd64_v0.1.0-rc6.1.zip
-$ mv terraform-provider-outscale_v0.1.0-rc6.1 ~/.terraform.d/plugins/linux_amd64/.
+$ wget https://github.com/outscale-dev/terraform-provider-outscale/releases/download/release-0.1.0RC7/terraform-provider-outscale_linux_amd64_v0.1.0-rc7.zip
+$ unzip terraform-provider-outscale_linux_amd64_v0.1.0-rc7.zip
+$ mv terraform-provider-outscale_v0.1.0-rc7 ~/.terraform.d/plugins/linux_amd64/.
```
Installing The Provider on MacOs
@@ -31,9 +31,9 @@ Installing The Provider on MacOs
Download the binary and install it in ~/.terraform/plugins/darwin_amd64/.
```sh
-$ wget https://github.com/outscale-dev/terraform-provider-outscale/releases/download/release-0.1.0RC6.1/terraform-provider-outscale_darwin_amd64_v0.1.0-rc6.1.zip
-$ unzip terraform-provider-outscale_darwin_amd64_v0.1.0-rc6.1.zip
-$ mv terraform-provider-outscale_v0.1.0-rc6.1 ~/.terraform.d/plugins/darwin_amd64/.
+$ wget https://github.com/outscale-dev/terraform-provider-outscale/releases/download/release-0.1.0RC7/terraform-provider-outscale_darwin_amd64_v0.1.0-rc7.zip
+$ unzip terraform-provider-outscale_darwin_amd64_v0.1.0-rc7.zip
+$ mv terraform-provider-outscale_v0.1.0-rc7 ~/.terraform.d/plugins/darwin_amd64/.
```
Building The Provider
@@ -43,14 +43,14 @@ Clone repository to: `$GOPATH/src/github.com/terraform-providers/terraform-provi
```sh
$ mkdir -p $GOPATH/src/github.com/terraform-providers; cd $GOPATH/src/github.com/terraform-providers
-$ git clone --branch release-0.1.0RC6.1 https://github.com/outscale-dev/terraform-provider-outscale
+$ git clone --branch release-0.1.0RC7 https://github.com/outscale-dev/terraform-provider-outscale
```
Enter the provider directory and build the provider
```sh
$ cd $GOPATH/src/github.com/terraform-providers/terraform-provider-outscale
-$ go build -o terraform-provider-outscale_v0.1.0-rc6.1
+$ go build -o terraform-provider-outscale_v0.1.0-rc7
```
Using the provider
@@ -59,7 +59,7 @@ Using the provider
2. Move the plugin to the repository ~/.terraform.d/plugins/linux_amd64/.
```shell
- $ mv terraform-provider-outscale_v0.1.0-rc6.1 ~/.terraform.d/plugins/linux_amd64/.
+ $ mv terraform-provider-outscale_v0.1.0-rc7 ~/.terraform.d/plugins/linux_amd64/.
```
3. Execute `terraform plan`
@@ -89,6 +89,15 @@ In order to run the full suite of Acceptance tests, run `make testacc`.
*Note:* Acceptance tests create real resources, and often cost money to run.
+*Note:* The following environment variables must be set prior to run Acceptance Tests
+
+```sh
+$ export OUTSCALE_IMAGEID="ami-xxxxxxxx" # i.e. "ami-4a7bf2b3"
+$ export OUTSCALE_ACCESSKEYID="" # i.e. "XXXXXXXXXXXXXXXXXXXX"
+$ export OUTSCALE_SECRETKEYID="" # i.e. "YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY"
+$ export OUTSCALE_REGION="" # i.e. "eu-west-2"
+```
+
```sh
$ make testacc
```
diff --git a/examples/main.tf b/examples/main.tf
index 0426f6bce..bf13911f3 100644
--- a/examples/main.tf
+++ b/examples/main.tf
@@ -297,27 +297,11 @@ resource "outscale_subnet" "subnet019" {
resource "outscale_public_ip" "public_ip019" {
}
-resource "outscale_nat_service" "nat_service019" {
- depends_on = [outscale_route.route019]
- subnet_id = outscale_subnet.subnet019.subnet_id
- public_ip_id = outscale_public_ip.public_ip019.public_ip_id
- tags {
- key = "Natservice"
- value = "019"
- }
-
-}
resource "outscale_route_table" "route_table019" {
net_id = outscale_net.net019.net_id
}
-resource "outscale_route" "route019" {
- destination_ip_range = "0.0.0.0/0"
- gateway_id = outscale_internet_service.internet_service019.internet_service_id
- route_table_id = outscale_route_table.route_table019.route_table_id
-}
-
resource "outscale_route_table_link" "route_table_link019" {
subnet_id = outscale_subnet.subnet019.subnet_id
route_table_id = outscale_route_table.route_table019.id
@@ -331,6 +315,22 @@ resource "outscale_internet_service_link" "internet_service_link019" {
internet_service_id = outscale_internet_service.internet_service019.id
}
+resource "outscale_route" "route019" {
+ destination_ip_range = "0.0.0.0/0"
+ gateway_id = outscale_internet_service.internet_service019.internet_service_id
+ route_table_id = outscale_route_table.route_table019.route_table_id
+}
+
+resource "outscale_nat_service" "nat_service019" {
+ depends_on = [outscale_route.route019]
+ subnet_id = outscale_subnet.subnet019.subnet_id
+ public_ip_id = outscale_public_ip.public_ip019.public_ip_id
+ tags {
+ key = "Natservice"
+ value = "019"
+ }
+}
+
data "outscale_nat_service" "nat_service019" {
filter {
name = "nat_service_ids"
@@ -665,7 +665,6 @@ resource "outscale_nic_private_ip" "nic_private_ip35" {
private_ips = ["10.0.0.67"]
}
-
#------------------------------------------------------------------------
#---036------------------------------------------------------------------
@@ -698,4 +697,79 @@ resource "outscale_nic_link" "nic_link036" {
}
+#-------------------------
+#—037------------------------------------------------------------------
+resource "outscale_vm" "outscale_vm37" {
+ image_id = var.image_id
+ vm_type = var.vm_type
+ keypair_name = var.keypair_name
+ block_device_mappings {
+ device_name = "/dev/sda1" # resizing bootdisk volume
+ bsu = {
+ volume_size = "100"
+ volume_type = "gp2"
+ delete_on_vm_deletion = "true"
+ }
+ }
+ block_device_mappings {
+ device_name = "/dev/sdb"
+ bsu = {
+ volume_size=30
+ volume_type = "io1"
+ iops = 150
+ snapshot_id = var.snapshot_id
+ delete_on_vm_deletion = false
+ }
+ }
+ tags {
+ key = "name"
+ value = "VM with multiple Block Device Mappings"
+ }
+}
+
+#-------------------------
+
+#---038------------------------------------------------------------------
+
+resource "outscale_net" "outscale_net38" {
+ ip_range = "10.0.0.0/16"
+}
+
+resource "outscale_subnet" "outscale_subnet38" {
+ net_id = outscale_net.outscale_net38.net_id
+ ip_range = "10.0.0.0/24"
+ subregion_name = "${var.region}a"
+}
+
+resource "outscale_security_group" "outscale_security_group38" {
+ description = "test vm with nic"
+ security_group_name = "private-sg-1"
+ net_id = outscale_net.outscale_net38.net_id
+}
+
+
+resource "outscale_nic" "outscale_nic38" {
+ subnet_id = outscale_subnet.outscale_subnet38.subnet_id
+}
+
+resource "outscale_vm" "outscale_vm38" {
+ image_id = var.image_id
+ vm_type = "tinav4.c4r4p2"
+ keypair_name = var.keypair_name
+ nics {
+ subnet_id = outscale_subnet.outscale_subnet38.subnet_id
+ security_group_ids = [outscale_security_group.outscale_security_group38.security_group_id]
+ private_ips {
+ private_ip ="10.0.0.123"
+ is_primary = true
+ }
+ device_number = "0"
+ delete_on_vm_deletion = true
+ }
+ nics {
+ nic_id =outscale_nic.outscale_nic38.nic_id
+ device_number = "1"
+ }
+}
+
#-------------------------
\ No newline at end of file
diff --git a/go.mod b/go.mod
index ac2a55f9a..8bd8631c2 100644
--- a/go.mod
+++ b/go.mod
@@ -6,22 +6,18 @@ require (
github.com/antihax/optional v1.0.0
github.com/aws/aws-sdk-go v1.26.5
github.com/davecgh/go-spew v1.1.1
+ github.com/go-test/deep v1.0.3
github.com/hashicorp/errwrap v1.0.0
- github.com/hashicorp/go-hclog v0.9.2 // indirect
- github.com/hashicorp/go-plugin v1.0.1 // indirect
github.com/hashicorp/hcl v1.0.0 // indirect
- github.com/hashicorp/terraform v0.12.17
- github.com/hashicorp/yamux v0.0.0-20181012175058-2f1d1f20f75d // indirect
+ github.com/hashicorp/terraform v0.12.20 // indirect
+ github.com/hashicorp/terraform-plugin-sdk v1.6.0
github.com/marinsalinas/osc-sdk-go v0.0.0-20191219184659-536ff9c4c254
github.com/mattn/go-colorable v0.1.4 // indirect
- github.com/mitchellh/reflectwalk v1.0.1 // indirect
github.com/openlyinc/pointy v1.1.2
- github.com/outscale/osc-go v0.0.9
+ github.com/outscale/osc-go v0.0.9 // indirect
github.com/posener/complete v1.2.3 // indirect
- github.com/spf13/afero v1.2.2 // indirect
github.com/spf13/cast v1.3.0
github.com/ulikunitz/xz v0.5.6 // indirect
- github.com/zclconf/go-cty v1.1.1 // indirect
go.opencensus.io v0.22.2 // indirect
golang.org/x/crypto v0.0.0-20191206172530-e9b2fee46413 // indirect
golang.org/x/net v0.0.0-20191126235420-ef20fe5d7933 // indirect
diff --git a/go.sum b/go.sum
index 19670e1c9..67839107c 100644
--- a/go.sum
+++ b/go.sum
@@ -33,6 +33,7 @@ github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki
github.com/agext/levenshtein v1.2.2 h1:0S/Yg6LYmFJ5stwQeRp6EeOcCbj7xiqQSdNelsXvaqE=
github.com/agext/levenshtein v1.2.2/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558=
github.com/agl/ed25519 v0.0.0-20150830182803-278e1ec8e8a6/go.mod h1:WPjqKcmVOxf0XSf3YxCJs6N6AOSrOx3obionmG7T0y0=
+github.com/agl/ed25519 v0.0.0-20170116200512-5312a6153412/go.mod h1:WPjqKcmVOxf0XSf3YxCJs6N6AOSrOx3obionmG7T0y0=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/aliyun/alibaba-cloud-sdk-go v0.0.0-20190329064014-6e358769c32a/go.mod h1:T9M45xf79ahXVelWoOBmH0y4aC1t5kXO5BxwyakgIGA=
@@ -45,9 +46,11 @@ github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kd
github.com/apparentlymart/go-cidr v1.0.1 h1:NmIwLZ/KdsjIUlhf+/Np40atNXm/+lZ5txfTJ/SpF+U=
github.com/apparentlymart/go-cidr v1.0.1/go.mod h1:EBcsNrHc3zQeuaeCeCtQruQm+n9/YjEn/vI25Lg7Gwc=
github.com/apparentlymart/go-dump v0.0.0-20180507223929-23540a00eaa3/go.mod h1:oL81AME2rN47vu18xqj1S1jPIPuN7afo62yKTNn3XMM=
+github.com/apparentlymart/go-dump v0.0.0-20190214190832-042adf3cf4a0 h1:MzVXffFUye+ZcSR6opIgz9Co7WcDx6ZcY+RjfFHoA0I=
github.com/apparentlymart/go-dump v0.0.0-20190214190832-042adf3cf4a0/go.mod h1:oL81AME2rN47vu18xqj1S1jPIPuN7afo62yKTNn3XMM=
github.com/apparentlymart/go-textseg v1.0.0 h1:rRmlIsPEEhUTIKQb7T++Nz/A5Q6C9IuX2wFoYVvnCs0=
github.com/apparentlymart/go-textseg v1.0.0/go.mod h1:z96Txxhf3xSFMPmb5X/1W05FF/Nj9VFpLOpjS5yuumk=
+github.com/apparentlymart/go-versions v0.0.2-0.20180815153302-64b99f7cb171/go.mod h1:JXY95WvQrPJQtudvNARshgWajS7jNNlM90altXIPNyI=
github.com/armon/circbuf v0.0.0-20190214190532-5111143e8da2/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o=
github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY=
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
@@ -56,8 +59,6 @@ github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgI
github.com/aws/aws-sdk-go v1.15.78/go.mod h1:E3/ieXAlvM0XWO57iftYVDLLvQ824smPP3ATZkfNZeM=
github.com/aws/aws-sdk-go v1.16.22/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.25.3/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
-github.com/aws/aws-sdk-go v1.25.33 h1:8muvpP+Bq5e0CDkM9PDZ6tN74fVUq5v3zSCRaZ93ykM=
-github.com/aws/aws-sdk-go v1.25.33/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.26.5 h1:mm59kTX51f/tU8Bas3zfwdJirxVdeQ+FRTWMoTdYN0M=
github.com/aws/aws-sdk-go v1.26.5/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/baiyubin/aliyun-sts-go-sdk v0.0.0-20180326062324-cfa1a18b161f/go.mod h1:AuiFmCCPBSrqvVMvuqFuk0qogytodnVFVSN5CeJB8Gc=
@@ -71,7 +72,6 @@ github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnweb
github.com/bmatcuk/doublestar v1.1.5 h1:2bNwBOmhyFEFcoB3tGvTD5xanq+4kyOZlB8wFYbMjkk=
github.com/bmatcuk/doublestar v1.1.5/go.mod h1:wiQtGV+rzVYxB7WIlirSN++5HPtPlXEo9MEoZQC/PmE=
github.com/boltdb/bolt v1.3.1/go.mod h1:clJnj/oiGkjum5o1McbSZDSLxVThjynRyGBgiAx27Ps=
-github.com/bsm/go-vlq v0.0.0-20150828105119-ec6e8d4f5f4e/go.mod h1:N+BjUcTjSxc2mtRGSCPsat1kze3CUtvJN3/jTXlp29k=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/cheggaaa/pb v1.0.27/go.mod h1:pQciLPpbU0oxA0h+VJYYLxO+XeDQb5pZijXscXHm81s=
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
@@ -95,15 +95,16 @@ github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymF
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/fatih/color v1.7.0 h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys=
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
-github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
+github.com/go-test/deep v1.0.1/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA=
github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68=
github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA=
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
+github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/groupcache v0.0.0-20180513044358-24b0969c4cb7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6 h1:ZgQEtGgCBiWRM39fZuwSd1LwSqqSW0hOdXCYYDX0R3I=
@@ -118,6 +119,7 @@ github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5y
github.com/golang/protobuf v1.3.2 h1:6nsPYzhq5kReh6QImI3k5qWzO4PEbvbIW2cwSfR/6xs=
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
+github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
@@ -125,6 +127,7 @@ github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMyw
github.com/google/go-cmp v0.3.1 h1:Xye71clBPdm5HgqGwUkwhbynsUJZhDbS20FvLhQ2izg=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck=
+github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no=
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
@@ -142,7 +145,6 @@ github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgf
github.com/grpc-ecosystem/grpc-gateway v1.8.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
github.com/hashicorp/aws-sdk-go-base v0.4.0/go.mod h1:eRhlz3c4nhqxFZJAahJEFL7gh6Jyj5rQmQc7F9eHFyQ=
github.com/hashicorp/consul v0.0.0-20171026175957-610f3c86a089/go.mod h1:mFrjN1mfidgJfYP1xrJCF+AfRhr6Eaqhb2+sfyn/OOI=
-github.com/hashicorp/errwrap v0.0.0-20180715044906-d6c0cd880357/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA=
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/go-azure-helpers v0.10.0/go.mod h1:YuAtHxm2v74s+IjQwUG88dHBJPd5jL+cXr5BGVzSKhE=
@@ -152,13 +154,14 @@ github.com/hashicorp/go-cleanhttp v0.5.1 h1:dH3aiDG9Jvb5r5+bYHsikaOUIpcM0xvgMXVo
github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
github.com/hashicorp/go-getter v1.4.0 h1:ENHNi8494porjD0ZhIrjlAHnveSFhY7hvOJrV/fsKkw=
github.com/hashicorp/go-getter v1.4.0/go.mod h1:7qxyCd8rBfcShwsvxgIguu4KbS3l8bUCwg2Umn7RjeY=
+github.com/hashicorp/go-getter v1.4.2-0.20200106182914-9813cbd4eb02 h1:l1KB3bHVdvegcIf5upQ5mjcHjs2qsWnKh4Yr9xgIuu8=
+github.com/hashicorp/go-getter v1.4.2-0.20200106182914-9813cbd4eb02/go.mod h1:7qxyCd8rBfcShwsvxgIguu4KbS3l8bUCwg2Umn7RjeY=
github.com/hashicorp/go-hclog v0.0.0-20180709165350-ff2cf002a8dd/go.mod h1:9bjs9uLqI8l75knNv3lV1kA55veR+WUPSiKIWcQHudI=
github.com/hashicorp/go-hclog v0.0.0-20181001195459-61d530d6c27f/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ=
github.com/hashicorp/go-hclog v0.9.2 h1:CG6TE5H9/JXsFWJCfoIVpKFIkFe6ysEuHirp4DxCsHI=
github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ=
github.com/hashicorp/go-immutable-radix v0.0.0-20180129170900-7f3cd4390caa/go.mod h1:6ij3Z20p+OhOkCSrA0gImAWoHYQRGbnlcuk6XYTiaRw=
github.com/hashicorp/go-msgpack v0.5.4/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM=
-github.com/hashicorp/go-multierror v0.0.0-20180717150148-3d5d8f294aa0/go.mod h1:JMRHfdO9jKNzS/+BTlxCjKNQHg/jZAft8U7LloJvN7I=
github.com/hashicorp/go-multierror v1.0.0 h1:iVjPR7a6H0tWELX5NxNe7bYopibicUzc7uPribsnS6o=
github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk=
github.com/hashicorp/go-plugin v1.0.1-0.20190610192547-a1bc61569a26/go.mod h1:++UyYGoz3o5w9ZzAdZxtQKrWWP+iqPBn3cQptSMzBuY=
@@ -184,26 +187,28 @@ github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
github.com/hashicorp/hcl/v2 v2.0.0 h1:efQznTz+ydmQXq3BOnRa3AXzvCeTq1P4dKj/z5GLlY8=
github.com/hashicorp/hcl/v2 v2.0.0/go.mod h1:oVVDG71tEinNGYCxinCYadcmKU9bglqW9pV3txagJ90=
-github.com/hashicorp/hcl2 v0.0.0-20190821123243-0c888d1241f6 h1:JImQpEeUQ+0DPFMaWzLA0GdUNPaUlCXLpfiqkSZBUfc=
-github.com/hashicorp/hcl2 v0.0.0-20190821123243-0c888d1241f6/go.mod h1:Cxv+IJLuBiEhQ7pBYGEuORa0nr4U994pE8mYLuFd7v0=
+github.com/hashicorp/hcl/v2 v2.3.0 h1:iRly8YaMwTBAKhn1Ybk7VSdzbnopghktCD031P8ggUE=
+github.com/hashicorp/hcl/v2 v2.3.0/go.mod h1:d+FwDBbOLvpAM3Z6J7gPj/VoAGkNe/gm352ZhjJ/Zv8=
github.com/hashicorp/hil v0.0.0-20190212112733-ab17b08d6590 h1:2yzhWGdgQUWZUCNK+AoO35V+HTsgEmcM4J9IkArh7PI=
github.com/hashicorp/hil v0.0.0-20190212112733-ab17b08d6590/go.mod h1:n2TSygSNwsLJ76m8qFXTSc7beTb+auJxYdqrnoqwZWE=
github.com/hashicorp/logutils v1.0.0 h1:dLEQVugN8vlakKOUE3ihGLTZJRB4j+M2cdTm/ORI65Y=
github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64=
github.com/hashicorp/memberlist v0.1.0/go.mod h1:ncdBp14cuox2iFOq3kDiquKU6fqsTBc3W6JvZwjxxsE=
github.com/hashicorp/serf v0.0.0-20160124182025-e4ec8cc423bb/go.mod h1:h/Ru6tmZazX7WO/GDmwdpS975F019L4t5ng5IgwbNrE=
-github.com/hashicorp/terraform v0.12.17 h1:U4gOJWeG1/ICNS4CHSTjFsM7hxOM1t+2GDTq2TDiCLg=
-github.com/hashicorp/terraform v0.12.17/go.mod h1:LQR1l9+qbkA0ZKujo+7dk3tSKhYmG5LQjogPW6DZblI=
-github.com/hashicorp/terraform-config-inspect v0.0.0-20190821133035-82a99dc22ef4 h1:fTkL0YwjohGyN7AqsDhz6bwcGBpT+xBqi3Qhpw58Juw=
-github.com/hashicorp/terraform-config-inspect v0.0.0-20190821133035-82a99dc22ef4/go.mod h1:JDmizlhaP5P0rYTTZB0reDMefAiJyfWPEtugV4in1oI=
+github.com/hashicorp/terraform v0.12.20 h1:TnuNh1PGqnJOQBThQjygCmy5zUxLtX9/VOBHvuiKYJM=
+github.com/hashicorp/terraform v0.12.20/go.mod h1:UtGvFLi7LjM1AdiOX3mY/3+2X0Fdu2rqISHNrEquWlg=
+github.com/hashicorp/terraform-config-inspect v0.0.0-20191115094559-17f92b0546e8 h1:+RyjwU+Gnd/aTJBPZVDNm903eXVjjqhbaR4Ypx3xYyY=
+github.com/hashicorp/terraform-config-inspect v0.0.0-20191115094559-17f92b0546e8/go.mod h1:p+ivJws3dpqbp1iP84+npOyAmTTOLMgCzrXd3GSdn/A=
+github.com/hashicorp/terraform-config-inspect v0.0.0-20191212124732-c6ae6269b9d7 h1:Pc5TCv9mbxFN6UVX0LH6CpQrdTM5YjbVI2w15237Pjk=
+github.com/hashicorp/terraform-config-inspect v0.0.0-20191212124732-c6ae6269b9d7/go.mod h1:p+ivJws3dpqbp1iP84+npOyAmTTOLMgCzrXd3GSdn/A=
+github.com/hashicorp/terraform-plugin-sdk v1.6.0 h1:Um5hsAL7kKsfTHtan8lybY/d03F2bHu4fjRB1H6Ag4U=
+github.com/hashicorp/terraform-plugin-sdk v1.6.0/go.mod h1:H5QLx/uhwfxBZ59Bc5SqT19M4i+fYt7LZjHTpbLZiAg=
github.com/hashicorp/terraform-svchost v0.0.0-20191011084731-65d371908596 h1:hjyO2JsNZUKT1ym+FAdlBEkGPevazYsmVgIMw7dVELg=
github.com/hashicorp/terraform-svchost v0.0.0-20191011084731-65d371908596/go.mod h1:kNDNcF7sN4DocDLBkQYz73HGKwN1ANB1blq4lIYLYvg=
github.com/hashicorp/vault v0.10.4/go.mod h1:KfSyffbKxoVyspOdlaGVjIuwLobi07qD1bAbosPMpP0=
github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM=
github.com/hashicorp/yamux v0.0.0-20181012175058-2f1d1f20f75d h1:kJCB4vdITiW1eC1vq2e6IsrXKrZit1bv/TDYFGMp4BQ=
github.com/hashicorp/yamux v0.0.0-20181012175058-2f1d1f20f75d/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM=
-github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
-github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af h1:pmfjZENx5imkbgOkpRUYLnmbU7UEFbjtDA2hxJ1ichM=
github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
@@ -217,20 +222,18 @@ github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0/go.mod h1:1NbS8ALr
github.com/keybase/go-crypto v0.0.0-20161004153544-93f5b35093ba/go.mod h1:ghbZscTyKdM07+Fw3KSi0hcJm+AlEUWj8QLlPtijN/M=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
+github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
+github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k=
+github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lusis/go-artifactory v0.0.0-20160115162124-7e4ce345df82/go.mod h1:y54tfGmO3NKssKveTEFFzH8C/akrSOy/iW9qEAUDV84=
-github.com/marinsalinas/osc-sdk-go v0.0.0-20191112221552-df1645cbd455 h1:ql34JGvnYQ11X0ugYmWH27K5BOTujHFsp3pWqpGEynQ=
-github.com/marinsalinas/osc-sdk-go v0.0.0-20191112221552-df1645cbd455/go.mod h1:xpSNBAUUEpws9LGoll2ActMr2dwDkPHT37QNN8P+9+U=
-github.com/marinsalinas/osc-sdk-go v0.0.0-20191219182908-d9de3ac3091d h1:Ji02dPPvr4TdJKHt1ykFNjSqO6UmpJcXa3XKK5HpSsc=
-github.com/marinsalinas/osc-sdk-go v0.0.0-20191219182908-d9de3ac3091d/go.mod h1:hqNixTCJE5MVsNR+zPpM4s/fiqES/ktSbD7dA+/ZhFs=
github.com/marinsalinas/osc-sdk-go v0.0.0-20191219184659-536ff9c4c254 h1:t7LhfJoS45oMl5FHMPlz/Y9rMOA/6i81cfVIiockRYM=
github.com/marinsalinas/osc-sdk-go v0.0.0-20191219184659-536ff9c4c254/go.mod h1:hqNixTCJE5MVsNR+zPpM4s/fiqES/ktSbD7dA+/ZhFs=
-github.com/marstr/guid v1.1.0/go.mod h1:74gB1z2wpxxInTG6yaqA7KrtM0NZ+RbrcqDvYHefzho=
github.com/masterzen/simplexml v0.0.0-20160608183007-4572e39b1ab9/go.mod h1:kCEbxUJlNDEBNbdQMkPSp6yaKcRXVI6f4ddk8Riv4bc=
github.com/masterzen/winrm v0.0.0-20190223112901-5e5c9a7fe54b/go.mod h1:wr1VqkwW0AB5JS0QLy5GpVMS9E3VtRoSYXUYyVk46KY=
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
@@ -277,15 +280,13 @@ github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRW
github.com/nu7hatch/gouuid v0.0.0-20131221200532-179d4d0c4d8d/go.mod h1:YUTz3bUH2ZwIWBy3CJBeOBEugqcmXREj14T+iG/4k4U=
github.com/oklog/run v1.0.0 h1:Ru7dDtJNOyC66gQ5dQmaCa0qIsAUFY3sFpK1Xk8igrw=
github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA=
-github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
-github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
-github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/openlyinc/pointy v1.1.2 h1:LywVV2BWC5Sp5v7FoP4bUD+2Yn5k0VNeRbU5vq9jUMY=
github.com/openlyinc/pointy v1.1.2/go.mod h1:w2Sytx+0FVuMKn37xpXIAyBNhFNBIJGR/v2m7ik1WtM=
github.com/outscale/osc-go v0.0.9 h1:fphLDN7RCt3HJ1jsS1k3HVOaquUS8luTfqKjePDVDc0=
github.com/outscale/osc-go v0.0.9/go.mod h1:SIe8mlyWE9XUYBUrsvaNjkrFxVuUB3rC4Naq3Uzh2O0=
github.com/packer-community/winrmcp v0.0.0-20180102160824-81144009af58/go.mod h1:f6Izs6JvFTdnRbziASagjZ2vmf55NSIkC/weStxCHqk=
github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
+github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
github.com/pkg/browser v0.0.0-20180916011732-0a3d74bf9ce4/go.mod h1:4OwLy04Bl9Ef3GJJCoec+30X3LQs/0/m4HFRt/2LUSA=
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
@@ -305,6 +306,7 @@ github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
+github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
@@ -338,8 +340,9 @@ github.com/xiang90/probing v0.0.0-20160813154853-07dd2e8dfe18/go.mod h1:UETIi67q
github.com/xlab/treeprint v0.0.0-20161029104018-1d6e34225557/go.mod h1:ce1O1j6UtZfjr22oyGxGLbauSBp2YVXpARAosm7dHBg=
github.com/zclconf/go-cty v1.0.0/go.mod h1:xnAOWiHeOqg2nWS62VtQ7pbOu17FtxJNW8RLEih+O3s=
github.com/zclconf/go-cty v1.1.0/go.mod h1:xnAOWiHeOqg2nWS62VtQ7pbOu17FtxJNW8RLEih+O3s=
-github.com/zclconf/go-cty v1.1.1 h1:Shl2p9Dat0cqJfXu0DZa+cOTRPhXQjK8IYWD6GVfiqo=
-github.com/zclconf/go-cty v1.1.1/go.mod h1:xnAOWiHeOqg2nWS62VtQ7pbOu17FtxJNW8RLEih+O3s=
+github.com/zclconf/go-cty v1.2.0/go.mod h1:hOPWgoHbaTUnI5k4D2ld+GRpFJSCe6bCM7m1q/N4PQ8=
+github.com/zclconf/go-cty v1.2.1 h1:vGMsygfmeCl4Xb6OA5U5XVAaQZ69FvoG7X2jUtQujb8=
+github.com/zclconf/go-cty v1.2.1/go.mod h1:hOPWgoHbaTUnI5k4D2ld+GRpFJSCe6bCM7m1q/N4PQ8=
github.com/zclconf/go-cty-yaml v1.0.1 h1:up11wlgAaDvlAGENcFDnZgkn0qUJurso7k6EpURKNF8=
github.com/zclconf/go-cty-yaml v1.0.1/go.mod h1:IP3Ylp0wQpYm50IHK8OZWKMu6sPJIUgKa8XhiVHura0=
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
@@ -356,6 +359,7 @@ golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACk
golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
+golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20191206172530-e9b2fee46413 h1:ULYEB3JvPRE/IfO+9uO7vKV/xzVTO7XPAwm8xbf4w2g=
golang.org/x/crypto v0.0.0-20191206172530-e9b2fee46413/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
@@ -370,7 +374,6 @@ golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180811021610-c39426892332/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
-golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@@ -379,7 +382,6 @@ golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73r
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
-golang.org/x/net v0.0.0-20190502183928-7f726cade0ab/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
@@ -399,7 +401,6 @@ golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190129075346-302c3dd5f1cc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@@ -414,6 +415,7 @@ golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190804053845-51ab0e2deafa/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20191128015809-6d18c012aee9 h1:ZBzSG/7F4eNKz2L3GE9o300RX0Az1Bw5HF7PDraD+qU=
golang.org/x/sys v0.0.0-20191128015809-6d18c012aee9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@@ -441,6 +443,7 @@ google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsb
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
+google.golang.org/appengine v1.6.1 h1:QzqyMA1tlu6CgqCDUtU9V+ZKhLFT2dkJuANu5QaxI3I=
google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
@@ -460,12 +463,11 @@ google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQ
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/cheggaaa/pb.v1 v1.0.27/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw=
-gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
gopkg.in/ini.v1 v1.42.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
-gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
@@ -475,5 +477,4 @@ honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWh
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
-howett.net/plist v0.0.0-20181124034731-591f970eefbb/go.mod h1:vMygbs4qMhSZSc4lCUl2OEE+rDiIIJAIdR4m7MiMcm0=
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
diff --git a/main.go b/main.go
index 2e6ff78a2..99563f883 100644
--- a/main.go
+++ b/main.go
@@ -1,7 +1,7 @@
package main
import (
- "github.com/hashicorp/terraform/plugin"
+ "github.com/hashicorp/terraform-plugin-sdk/plugin"
"github.com/terraform-providers/terraform-provider-outscale/outscale"
)
diff --git a/osc/common/structs.go b/osc/common/structs.go
deleted file mode 100644
index 23a49a9c4..000000000
--- a/osc/common/structs.go
+++ /dev/null
@@ -1,9 +0,0 @@
-package common
-
-type Tag struct {
- _ struct{} `type:"structure"`
-
- Key *string `locationName:"key" type:"string"`
-
- Value *string `locationName:"value" type:"string"`
-}
diff --git a/osc/fcu/fcu.go b/osc/fcu/fcu.go
deleted file mode 100644
index 04df62774..000000000
--- a/osc/fcu/fcu.go
+++ /dev/null
@@ -1,55 +0,0 @@
-package fcu
-
-import (
- "fmt"
- "net/http"
- "net/url"
-
- "github.com/aws/aws-sdk-go/aws/credentials"
- "github.com/aws/aws-sdk-go/aws/signer/v4"
- "github.com/terraform-providers/terraform-provider-outscale/osc"
- "github.com/terraform-providers/terraform-provider-outscale/osc/handler"
-)
-
-//FCU the name of the api for url building
-const FCU = "fcu"
-
-//Client manages the FCU API
-type Client struct {
- client *osc.Client
- VM VMService
-}
-
-// NewFCUClient return a client to operate FCU resources
-func NewFCUClient(config osc.Config) (*Client, error) {
-
- s := &v4.Signer{
- Credentials: credentials.NewStaticCredentials(config.Credentials.AccessKey,
- config.Credentials.SecretKey, ""),
- }
-
- u, err := url.Parse(fmt.Sprintf(osc.DefaultBaseURL, FCU, config.Credentials.Region))
- if err != nil {
- return nil, err
- }
-
- config.Target = FCU
- config.BaseURL = u
- config.UserAgent = osc.UserAgent
- config.Client = &http.Client{}
-
- c := osc.Client{
- Config: config,
- Signer: s,
- MarshalHander: handler.URLEncodeMarshalHander,
- BuildRequestHandler: handler.BuildURLEncodedRequest,
- UnmarshalHandler: handler.UnmarshalXML,
- UnmarshalErrorHandler: handler.UnmarshalErrorHandler,
- SetHeaders: handler.SetHeaders,
- }
-
- f := &Client{client: &c,
- VM: VMOperations{client: &c},
- }
- return f, nil
-}
diff --git a/osc/fcu/fcu_structs.go b/osc/fcu/fcu_structs.go
deleted file mode 100644
index 311f57f57..000000000
--- a/osc/fcu/fcu_structs.go
+++ /dev/null
@@ -1,10773 +0,0 @@
-package fcu
-
-import (
- "fmt"
- "time"
-
- "github.com/aws/aws-sdk-go/aws/awsutil"
- "github.com/aws/aws-sdk-go/aws/request"
-)
-
-const (
- //InstanceAttributeNameUserData represents user data attribute.
- InstanceAttributeNameUserData = "userData"
-)
-
-//DescribeInstancesInput ...
-type DescribeInstancesInput struct {
- Filters []*Filter `locationName:"Filter" locationNameList:"Filter" type:"list"`
- InstanceIds []*string `locationName:"InstanceId" locationNameList:"InstanceId" type:"list"`
- MaxResults *int64 `locationName:"maxResults" type:"integer"`
- NextToken *string `locationName:"nextToken" type:"string"`
-}
-
-type Filter struct {
- Name *string `type:"string"`
- Values []*string `locationName:"Value" locationNameList:"item" type:"list"`
-}
-
-type DescribeInstancesOutput struct {
- _ struct{} `type:"structure"`
- NextToken *string `locationName:"nextToken" type:"string"`
- OwnerId *string `locationName:"ownerId" locationNameList:"item" type:"string"`
- RequestId *string `locationName:"requestId" type:"string"`
- ReservationId *string `locationName:"reservationId" locationNameList:"item" type:"string"`
- Reservations []*Reservation `locationName:"reservationSet" locationNameList:"item" type:"list"`
- GroupSet []*GroupIdentifier `locationName:"groupSet" locationNameList:"item" type:"list"`
-}
-
-type GroupIdentifier struct {
- _ struct{} `type:"structure"`
- GroupId *string `locationName:"groupId" type:"string"`
- GroupName *string `locationName:"groupName" type:"string"`
-}
-
-type Reservation struct {
- _ struct{} `type:"structure"`
- Groups []*GroupIdentifier `locationName:"groupSet" locationNameList:"item" type:"list"`
- Instances []*Instance `locationName:"instancesSet" locationNameList:"item" type:"list"`
- OwnerId *string `locationName:"ownerId" type:"string"`
- RequestId *string `locationName:"requestId" type:"string"`
- ReservationId *string `locationName:"reservationId" type:"string"`
-}
-
-type Instance struct {
- AmiLaunchIndex *int64 `locationName:"amiLaunchIndex" type:"integer"`
- Architecture *string `locationName:"architecture" type:"string" enum:"ArchitectureValues"`
- BlockDeviceMappings []*InstanceBlockDeviceMapping `locationName:"blockDeviceMapping" locationNameList:"item" type:"list"`
- ClientToken *string `locationName:"clientToken" type:"string"`
- DnsName *string `locationName:"dnsName" type:"string"`
- EbsOptimized *bool `locationName:"ebsOptimized" type:"boolean"`
- GroupSet []*GroupIdentifier `locationName:"groupSet" locationNameList:"item" type:"list"`
- Hypervisor *string `locationName:"hypervisor" type:"string" enum:"HypervisorType"`
- IamInstanceProfile *IamInstanceProfile `locationName:"iamInstanceProfile" type:"structure"`
- ImageId *string `locationName:"imageId" type:"string"`
- InstanceId *string `locationName:"instanceId" type:"string"`
- InstanceLifecycle *string `locationName:"instanceLifecycle" type:"string" enum:"InstanceLifecycleType"`
- InstanceState *InstanceState `locationName:"instanceState" type:"structure"`
- InstanceType *string `locationName:"instanceType" type:"string" enum:"InstanceType"`
- IpAddress *string `locationName:"ipAddress" type:"string"`
- KernelId *string `locationName:"kernelId" type:"string"`
- KeyName *string `locationName:"keyName" type:"string"`
- Monitoring *Monitoring `locationName:"monitoring" type:"structure"`
- NetworkInterfaces []*InstanceNetworkInterface `locationName:"networkInterfaceSet" locationNameList:"item" type:"list"`
- Placement *Placement `locationName:"placement" type:"structure"`
- Platform *string `locationName:"platform" type:"string" enum:"PlatformValues"`
- PrivateDnsName *string `locationName:"privateDnsName" type:"string"`
- PrivateIpAddress *string `locationName:"privateIpAddress" type:"string"`
- ProductCodes []*ProductCode `locationName:"productCodes" locationNameList:"item" type:"list"`
- RamdiskId *string `locationName:"ramdiskId" type:"string"`
- Reason *string `locationName:"reason" type:"string"`
- RootDeviceName *string `locationName:"rootDeviceName" type:"string"`
- RootDeviceType *string `locationName:"rootDeviceType" type:"string" enum:"DeviceType"`
- SourceDestCheck *bool `locationName:"sourceDestCheck" type:"boolean"`
- SpotInstanceRequestId *string `locationName:"spotInstanceRequestId" type:"string"`
- SriovNetSupport *string `locationName:"sriovNetSupport" type:"string"`
- State *InstanceState `locationName:"instanceState" type:"structure"`
- StateReason *StateReason `locationName:"stateReason" type:"structure"`
- SubnetId *string `locationName:"subnetId" type:"string"`
- Tags []*Tag `locationName:"tagSet" locationNameList:"item" type:"list"`
- VirtualizationType *string `locationName:"virtualizationType" type:"string" enum:"VirtualizationType"`
- VpcId *string `locationName:"vpcId" type:"string"`
-}
-
-type InstanceBlockDeviceMapping struct {
- DeviceName *string `locationName:"deviceName" type:"string"`
- Ebs *EbsInstanceBlockDevice `locationName:"ebs" type:"structure"`
-}
-
-type InstanceBlockDeviceMappingSpecification struct {
- _ struct{} `type:"structure"`
- DeviceName *string `locationName:"deviceName" type:"string"`
- Ebs *EbsInstanceBlockDeviceSpecification `locationName:"ebs" type:"structure"`
- NoDevice *string `locationName:"noDevice" type:"string"`
- VirtualName *string `locationName:"virtualName" type:"string"`
-}
-
-type InstanceCapacity struct {
- _ struct{} `type:"structure"`
- AvailableCapacity *int64 `locationName:"availableCapacity" type:"integer"`
- InstanceType *string `locationName:"instanceType" type:"string"`
- TotalCapacity *int64 `locationName:"totalCapacity" type:"integer"`
-}
-
-type InstanceCount struct {
- _ struct{} `type:"structure"`
- InstanceCount *int64 `locationName:"instanceCount" type:"integer"`
- State *string `locationName:"state" type:"string" enum:"ListingState"`
-}
-
-type InstanceExportDetails struct {
- _ struct{} `type:"structure"`
- InstanceId *string `locationName:"instanceId" type:"string"`
- TargetEnvironment *string `locationName:"targetEnvironment" type:"string" enum:"ExportEnvironment"`
-}
-
-type InstanceMonitoring struct {
- _ struct{} `type:"structure"`
- InstanceId *string `locationName:"instanceId" type:"string"`
- Monitoring *Monitoring `locationName:"monitoring" type:"structure"`
-}
-
-type InstanceNetworkInterface struct {
- Association *InstanceNetworkInterfaceAssociation `locationName:"association" type:"structure"`
- Attachment *InstanceNetworkInterfaceAttachment `locationName:"attachment" type:"structure"`
- Description *string `locationName:"description" type:"string"`
- Groups []*GroupIdentifier `locationName:"groupSet" locationNameList:"item" type:"list"`
- MacAddress *string `locationName:"macAddress" type:"string"`
- NetworkInterfaceId *string `locationName:"networkInterfaceId" type:"string"`
- OwnerId *string `locationName:"ownerId" type:"string"`
- PrivateDnsName *string `locationName:"privateDnsName" type:"string"`
- PrivateIpAddress *string `locationName:"privateIpAddress" type:"string"`
- PrivateIpAddresses []*InstancePrivateIpAddress `locationName:"privateIpAddressesSet" locationNameList:"item" type:"list"`
- SourceDestCheck *bool `locationName:"sourceDestCheck" type:"bool"`
- Status *string `locationName:"status" type:"string" enum:"NetworkInterfaceStatus"`
- SubnetId *string `locationName:"subnetId" type:"string"`
- VpcId *string `locationName:"vpcId" type:"string"`
-}
-
-type InstanceNetworkInterfaceAssociation struct {
- IpOwnerId *string `locationName:"ipOwnerId" type:"string"`
- PublicDnsName *string `locationName:"publicDnsName" type:"string"`
- PublicIp *string `locationName:"publicIp" type:"string"`
-}
-
-type InstanceNetworkInterfaceAttachment struct {
- AttachmentId *string `locationName:"attachmentId" type:"string"`
- DeleteOnTermination *bool `locationName:"deleteOnTermination" type:"boolean"`
- DeviceIndex *int64 `locationName:"deviceIndex" type:"integer"`
- Status *string `locationName:"status" type:"string" enum:"AttachmentStatus"`
-}
-
-type InstanceNetworkInterfaceSpecification struct {
- _ struct{} `type:"structure"`
-
- AssociatePublicIpAddress *bool `locationName:"associatePublicIpAddress" type:"boolean"`
-
- DeleteOnTermination *bool `locationName:"deleteOnTermination" type:"boolean"`
-
- Description *string `locationName:"description" type:"string"`
-
- DeviceIndex *int64 `locationName:"deviceIndex" type:"integer"`
-
- Groups []*string `locationName:"SecurityGroupId" locationNameList:"SecurityGroupId" type:"list"`
-
- Ipv6AddressCount *int64 `locationName:"ipv6AddressCount" type:"integer"`
-
- NetworkInterfaceId *string `locationName:"networkInterfaceId" type:"string"`
-
- PrivateIpAddress *string `locationName:"privateIpAddress" type:"string"`
-
- PrivateIpAddresses []*PrivateIpAddressSpecification `locationName:"privateIpAddressesSet" queryName:"PrivateIpAddresses" locationNameList:"item" type:"list"`
-
- SecurityGroupIds []*string `locationName:"SecurityGroupId" locationNameList:"SecurityGroupId" type:"list"`
-
- SecondaryPrivateIpAddressCount *int64 `locationName:"secondaryPrivateIpAddressCount" type:"integer"`
-
- SubnetId *string `locationName:"subnetId" type:"string"`
-}
-
-type InstancePrivateIpAddress struct {
- Association *InstanceNetworkInterfaceAssociation `locationName:"association" type:"structure"`
-
- Primary *bool `locationName:"primary" type:"boolean"`
-
- PrivateDnsName *string `locationName:"privateDnsName" type:"string"`
-
- PrivateIpAddress *string `locationName:"privateIpAddress" type:"string"`
-}
-
-type InstanceState struct {
- Code *int64 `locationName:"code" type:"integer"`
-
- Name *string `locationName:"name" type:"string" enum:"InstanceStateName"`
-}
-
-type InstanceStateChange struct {
- _ struct{} `type:"structure"`
-
- CurrentState *InstanceState `locationName:"currentState" type:"structure"`
-
- InstanceId *string `locationName:"instanceId" type:"string"`
-
- PreviousState *InstanceState `locationName:"previousState" type:"structure"`
-}
-
-type InstanceStatus struct {
- _ struct{} `type:"structure"`
-
- AvailabilityZone *string `locationName:"availabilityZone" type:"string"`
-
- Events []*InstanceStatusEvent `locationName:"eventsSet" locationNameList:"item" type:"list"`
-
- InstanceId *string `locationName:"instanceId" type:"string"`
-
- InstanceState *InstanceState `locationName:"instanceState" type:"structure"`
-
- InstanceStatus *InstanceStatusSummary `locationName:"instanceStatus" type:"structure"`
-
- SystemStatus *InstanceStatusSummary `locationName:"systemStatus" type:"structure"`
-}
-
-type InstanceStatusDetails struct {
- _ struct{} `type:"structure"`
-
- ImpairedSince *time.Time `locationName:"impairedSince" type:"timestamp" timestampFormat:"iso8601"`
-
- Name *string `locationName:"name" type:"string" enum:"StatusName"`
-
- Status *string `locationName:"status" type:"string" enum:"StatusType"`
-}
-
-type InstanceStatusEvent struct {
- _ struct{} `type:"structure"`
-
- Code *string `locationName:"code" type:"string" enum:"EventCode"`
-
- Description *string `locationName:"description" type:"string"`
-
- NotAfter *time.Time `locationName:"notAfter" type:"timestamp" timestampFormat:"iso8601"`
-
- NotBefore *time.Time `locationName:"notBefore" type:"timestamp" timestampFormat:"iso8601"`
-}
-
-type InstanceStatusSummary struct {
- _ struct{} `type:"structure"`
-
- Details []*InstanceStatusDetails `locationName:"details" locationNameList:"item" type:"list"`
-
- Status *string `locationName:"status" type:"string" enum:"SummaryStatus"`
-}
-
-type EbsInstanceBlockDevice struct {
- AttachTime *time.Time `locationName:"attachTime" type:"timestamp" timestampFormat:"iso8601"`
-
- DeleteOnTermination *bool `locationName:"deleteOnTermination" type:"boolean"`
-
- Status *string `locationName:"status" type:"string" enum:"AttachmentStatus"`
-
- VolumeId *string `locationName:"volumeId" type:"string"`
-}
-
-type EbsInstanceBlockDeviceSpecification struct {
- _ struct{} `type:"structure"`
-
- DeleteOnTermination *bool `locationName:"deleteOnTermination" type:"boolean"`
-
- VolumeId *string `locationName:"volumeId" type:"string"`
-}
-
-type IamInstanceProfile struct {
- Arn *string `locationName:"arn" type:"string"`
-
- Id *string `locationName:"id" type:"string"`
-}
-
-type Monitoring struct {
- _ struct{} `type:"structure"`
-
- State *string `locationName:"state" type:"string" enum:"MonitoringState"`
-}
-
-type Placement struct {
- Affinity *string `locationName:"affinity" type:"string"`
-
- AvailabilityZone *string `locationName:"availabilityZone" type:"string"`
-
- GroupName *string `locationName:"groupName" type:"string"`
-
- HostId *string `locationName:"hostId" type:"string"`
-
- Tenancy *string `locationName:"tenancy" type:"string" enum:"Tenancy"`
-}
-
-type ProductCode struct {
- ProductCode *string `locationName:"productCode" type:"string"`
-
- Type *string `locationName:"type" type:"string" enum:"ProductCodeValues"`
-}
-
-type StateReason struct {
- Code *string `locationName:"code" type:"string"`
- Message *string `locationName:"message" type:"string"`
-}
-
-type Tag struct {
- _ struct{} `type:"structure"`
-
- Key *string `locationName:"key" type:"string"`
-
- Value *string `locationName:"value" type:"string"`
-}
-
-type PrivateIpAddressSpecification struct {
- _ struct{} `type:"structure"`
-
- Primary *bool `locationName:"primary" type:"boolean"`
-
- PrivateIpAddress *string `locationName:"privateIpAddress" type:"string" required:"true"`
-}
-
-type DescribeInstanceAttributeInput struct {
- _ struct{} `type:"structure"`
-
- Attribute *string `locationName:"attribute" type:"string" required:"true" enum:"InstanceAttributeName"`
-
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- InstanceId *string `locationName:"instanceId" type:"string" required:"true"`
-}
-
-type DescribeInstanceAttributeOutput struct {
- _ struct{} `type:"structure"`
-
- BlockDeviceMappings []*InstanceBlockDeviceMapping `locationName:"blockDeviceMapping" locationNameList:"item" type:"list"`
-
- DisableApiTermination *AttributeBooleanValue `locationName:"disableApiTermination" type:"structure"`
-
- EbsOptimized *AttributeBooleanValue `locationName:"ebsOptimized" type:"structure"`
-
- EnaSupport *AttributeBooleanValue `locationName:"enaSupport" type:"structure"`
-
- Groups []*GroupIdentifier `locationName:"groupSet" locationNameList:"item" type:"list"`
-
- InstanceId *string `locationName:"instanceId" type:"string"`
-
- InstanceInitiatedShutdownBehavior *AttributeValue `locationName:"instanceInitiatedShutdownBehavior" type:"structure"`
-
- InstanceType *AttributeValue `locationName:"instanceType" type:"structure"`
-
- KernelId *AttributeValue `locationName:"kernel" type:"structure"`
-
- ProductCodes []*ProductCode `locationName:"productCodes" locationNameList:"item" type:"list"`
-
- RamdiskId *AttributeValue `locationName:"ramdisk" type:"structure"`
-
- RootDeviceName *AttributeValue `locationName:"rootDeviceName" type:"structure"`
-
- SourceDestCheck *AttributeBooleanValue `locationName:"sourceDestCheck" type:"structure"`
-
- SriovNetSupport *AttributeValue `locationName:"sriovNetSupport" type:"structure"`
-
- UserData *AttributeValue `locationName:"userData" type:"structure"`
-
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-type AttributeBooleanValue struct {
- _ struct{} `type:"structure"`
-
- Value *bool `locationName:"value" type:"boolean"`
-}
-
-type AttributeValue struct {
- _ struct{} `type:"structure"`
-
- Value *string `locationName:"value" type:"string"`
-}
-
-type RunInstancesInput struct {
- _ struct{} `type:"structure"`
-
- BlockDeviceMappings []*BlockDeviceMapping `locationName:"BlockDeviceMapping" locationNameList:"BlockDeviceMapping" type:"list"`
-
- ClientToken *string `locationName:"clientToken" type:"string"`
-
- DisableApiTermination *bool `locationName:"disableApiTermination" type:"boolean"`
-
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- EbsOptimized *bool `locationName:"ebsOptimized" type:"boolean"`
-
- ImageId *string `type:"string"`
-
- InstanceInitiatedShutdownBehavior *string `locationName:"instanceInitiatedShutdownBehavior" type:"string" enum:"ShutdownBehavior"`
-
- InstanceType *string `type:"string" enum:"InstanceType"`
-
- InstanceName *string `type:"string" enum:"InstanceName"`
-
- KeyName *string `locationName:"keyName" type:"string"`
-
- MaxCount *int64 `type:"integer" required:"true"`
-
- MinCount *int64 `type:"integer" required:"true"`
-
- NetworkInterfaces []*InstanceNetworkInterfaceSpecification `locationName:"networkInterface" locationNameList:"item" type:"list"`
-
- Placement *Placement `type:"structure"`
-
- PrivateIPAddress *string `locationName:"privateIpAddress" type:"string"`
-
- PrivateIPAddresses *string `locationName:"privateIpAddresses" type:"string"`
-
- RamdiskId *string `type:"string"`
-
- SecurityGroupIds []*string `locationName:"SecurityGroupId" locationNameList:"SecurityGroupId" type:"list"`
-
- SecurityGroups []*string `locationName:"SecurityGroup" locationNameList:"SecurityGroup" type:"list"`
-
- SubnetId *string `type:"string"`
-
- TagSpecifications []*TagSpecification `locationName:"TagSpecification" locationNameList:"item" type:"list"`
-
- UserData *string `type:"string"`
-
- OwnerId *string `type:"string"`
-
- RequestId *string `locationName:"requestId" type:"string"`
-
- ReservationId *string `type:"string"`
-
- PasswordData *string `type:"string"`
-}
-
-type BlockDeviceMapping struct {
- _ struct{} `type:"structure"`
-
- DeviceName *string `locationName:"deviceName" type:"string"`
-
- Ebs *EbsBlockDevice `locationName:"ebs" type:"structure"`
-
- NoDevice *string `locationName:"noDevice" type:"string"`
-
- VirtualName *string `locationName:"virtualName" type:"string"`
-}
-
-type PrivateIPAddressSpecification struct {
- _ struct{} `type:"structure"`
-
- Primary *bool `locationName:"primary" type:"boolean"`
-
- PrivateIPAddress *string `locationName:"privateIpAddress" type:"string" required:"true"`
-}
-
-type ModifyInstanceKeyPairInput struct {
- _ struct{} `type:"structure"`
-
- InstanceId *string `locationName:"instanceId" type:"string"`
-
- KeyName *string `locationName:"keyName" type:"string"`
-}
-
-type EbsBlockDevice struct {
- _ struct{} `type:"structure"`
-
- DeleteOnTermination *bool `locationName:"deleteOnTermination" type:"boolean"`
-
- Encrypted *bool `locationName:"encrypted" type:"boolean"`
-
- Iops *int64 `locationName:"iops" type:"integer"`
-
- KmsKeyId *string `type:"string"`
-
- SnapshotId *string `locationName:"snapshotId" type:"string"`
-
- VolumeSize *int64 `locationName:"volumeSize" type:"integer"`
-
- VolumeType *string `locationName:"volumeType" type:"string" enum:"VolumeType"`
-}
-
-type GetPasswordDataInput struct {
- _ struct{} `type:"structure"`
-
- InstanceId *string `type:"string" required:"true"`
-}
-
-type GetPasswordDataOutput struct {
- _ struct{} `type:"structure"`
-
- InstanceId *string `locationName:"instanceId" type:"string"`
-
- PasswordData *string `locationName:"passwordData" type:"string"`
-
- Timestamp *time.Time `locationName:"timestamp" type:"timestamp" timestampFormat:"iso8601"`
-}
-
-type TerminateInstancesInput struct {
- InstanceIds []*string `locationName:"InstanceId" locationNameList:"InstanceId" type:"list" required:"true"`
-}
-
-type TerminateInstancesOutput struct {
- _ struct{} `type:"structure"`
-
- TerminatingInstances []*InstanceStateChange `locationName:"instancesSet" locationNameList:"item" type:"list"`
-}
-type PublicIP struct {
- AllocationId *string `locationName:"allocationId" type:"string"`
- AssociationId *string `locationName:"associationId" type:"string"`
- Domain *string `locationName:"domain" type:"string"`
- InstanceId *string `locationName:"instanceId" type:"string"`
- NetworkInterfaceId *string `locationName:"networkInterfaceId" type:"string"`
- NetworkInterface_ownerId *string `locationName:"networkInterface_ownerId" type:"string"`
- PrivateIpAddress *string `locationName:"privateIpAddress" type:"string"`
- PublicIp *string `locationName:"publicIp" type:"string"`
-}
-
-type AllocateAddressInput struct {
- _ struct{} `type:"structure"`
-
- Domain *string `type:"string" enum:"DomainType"`
-
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-}
-
-type AllocateAddressOutput struct {
- _ struct{} `type:"structure"`
-
- AllocationId *string `locationName:"allocationId" type:"string"`
-
- Domain *string `locationName:"domain" type:"string" enum:"DomainType"`
-
- PublicIp *string `locationName:"publicIp" type:"string"`
-}
-
-type DescribeAddressesInput struct {
- _ struct{} `type:"structure"`
-
- AllocationIds []*string `locationName:"AllocationId" locationNameList:"AllocationId" type:"list"`
-
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- Filters []*Filter `locationName:"Filter" locationNameList:"Filter" type:"list"`
-
- PublicIps []*string `locationName:"PublicIp" locationNameList:"PublicIp" type:"list"`
-}
-
-type DescribeAddressesOutput struct {
- _ struct{} `type:"structure"`
-
- Addresses []*Address `locationName:"addressesSet" locationNameList:"item" type:"list"`
-
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-func (s DescribeAddressesOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-func (s DescribeAddressesOutput) GoString() string {
- return s.String()
-}
-
-func (s *DescribeAddressesOutput) SetAddresses(v []*Address) *DescribeAddressesOutput {
- s.Addresses = v
- return s
-}
-
-func (s *DescribeAddressesOutput) SetRequestId(v string) *DescribeAddressesOutput {
- s.RequestId = &v
- return s
-}
-
-type Address struct {
- _ struct{} `type:"structure"`
-
- AllocationId *string `locationName:"allocationId" type:"string"`
-
- AssociationId *string `locationName:"associationId" type:"string"`
-
- AllowReassociation *bool `locationName:"allowReassociation" type:"bool"`
-
- Domain *string `locationName:"domain" type:"string" enum:"DomainType"`
-
- InstanceId *string `locationName:"instanceId" type:"string"`
-
- NetworkInterfaceId *string `locationName:"networkInterfaceId" type:"string"`
-
- NetworkInterfaceOwnerId *string `locationName:"networkInterfaceOwnerId" type:"string"`
-
- PrivateIpAddress *string `locationName:"privateIpAddress" type:"string"`
-
- PublicIp *string `locationName:"publicIp" type:"string"`
-}
-
-type ModifyInstanceAttributeInput struct {
- _ struct{} `type:"structure"`
-
- Attribute *string `locationName:"attribute" type:"string" enum:"InstanceAttributeName"`
-
- BlockDeviceMappings []*BlockDeviceMapping `locationName:"blockDeviceMapping" locationNameList:"item" type:"list"`
-
- DisableApiTermination *AttributeBooleanValue `locationName:"disableApiTermination" type:"structure"`
-
- DeleteOnTermination *AttributeBooleanValue `locationName:"deleteOnTermination" type:"structure"`
-
- EbsOptimized *AttributeBooleanValue `locationName:"ebsOptimized" type:"structure"`
-
- Groups []*string `locationName:"GroupId" locationNameList:"groupId" type:"list"`
-
- InstanceId *string `locationName:"instanceId" type:"string" required:"true"`
-
- InstanceInitiatedShutdownBehavior *AttributeValue `locationName:"instanceInitiatedShutdownBehavior" type:"structure"`
-
- InstanceType *AttributeValue `locationName:"instanceType" type:"structure"`
-
- SourceDestCheck *AttributeBooleanValue `type:"structure"`
-
- UserData *BlobAttributeValue `locationName:"userData" type:"structure"`
-
- Value *string `locationName:"value" type:"string"`
-}
-
-func (s ModifyInstanceAttributeInput) String() string {
- return awsutil.Prettify(s)
-}
-
-func (s ModifyInstanceAttributeInput) GoString() string {
- return s.String()
-}
-
-func (s *ModifyInstanceAttributeInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "ModifyInstanceAttributeInput"}
- if s.InstanceId == nil {
- invalidParams.Add(request.NewErrParamRequired("InstanceId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-func (s *ModifyInstanceAttributeInput) SetAttribute(v string) *ModifyInstanceAttributeInput {
- s.Attribute = &v
- return s
-}
-
-func (s *ModifyInstanceAttributeInput) SetBlockDeviceMappings(v []*BlockDeviceMapping) *ModifyInstanceAttributeInput {
- s.BlockDeviceMappings = v
- return s
-}
-
-func (s *ModifyInstanceAttributeInput) SetDisableApiTermination(v *AttributeBooleanValue) *ModifyInstanceAttributeInput {
- s.DisableApiTermination = v
- return s
-}
-
-func (s *ModifyInstanceAttributeInput) SetEbsOptimized(v *AttributeBooleanValue) *ModifyInstanceAttributeInput {
- s.EbsOptimized = v
- return s
-}
-
-func (s *ModifyInstanceAttributeInput) SetGroups(v []*string) *ModifyInstanceAttributeInput {
- s.Groups = v
- return s
-}
-
-func (s *ModifyInstanceAttributeInput) SetInstanceId(v string) *ModifyInstanceAttributeInput {
- s.InstanceId = &v
- return s
-}
-
-func (s *ModifyInstanceAttributeInput) SetInstanceInitiatedShutdownBehavior(v *AttributeValue) *ModifyInstanceAttributeInput {
- s.InstanceInitiatedShutdownBehavior = v
- return s
-}
-
-func (s *ModifyInstanceAttributeInput) SetInstanceType(v *AttributeValue) *ModifyInstanceAttributeInput {
- s.InstanceType = v
- return s
-}
-
-func (s *ModifyInstanceAttributeInput) SetSourceDestCheck(v *AttributeBooleanValue) *ModifyInstanceAttributeInput {
- s.SourceDestCheck = v
- return s
-}
-
-func (s *ModifyInstanceAttributeInput) SetUserData(v *BlobAttributeValue) *ModifyInstanceAttributeInput {
- s.UserData = v
- return s
-}
-
-func (s *ModifyInstanceAttributeInput) SetValue(v string) *ModifyInstanceAttributeInput {
- s.Value = &v
- return s
-}
-
-type BlobAttributeValue struct {
- _ struct{} `type:"structure"`
-
- Value []byte `locationName:"value" type:"blob"`
-}
-
-type StopInstancesInput struct {
- _ struct{} `type:"structure"`
-
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- Force *bool `locationName:"force" type:"boolean"`
-
- InstanceIds []*string `locationName:"InstanceId" locationNameList:"InstanceId" type:"list" required:"true"`
-}
-
-type StopInstancesOutput struct {
- _ struct{} `type:"structure"`
-
- StoppingInstances []*InstanceStateChange `locationName:"instancesSet" locationNameList:"item" type:"list"`
-}
-type ModifyInstanceAttributeOutput struct {
- _ struct{} `type:"structure"`
-}
-
-type StartInstancesInput struct {
- _ struct{} `type:"structure"`
-
- AdditionalInfo *string `locationName:"additionalInfo" type:"string"`
-
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- InstanceIds []*string `locationName:"InstanceId" locationNameList:"InstanceId" type:"list" required:"true"`
-}
-
-type StartInstancesOutput struct {
- _ struct{} `type:"structure"`
-
- StartingInstances []*InstanceStateChange `locationName:"instancesSet" locationNameList:"item" type:"list"`
-}
-
-type AssociateAddressInput struct {
- _ struct{} `type:"structure"`
-
- AllocationId *string `type:"string"`
-
- AllowReassociation *bool `locationName:"allowReassociation" type:"boolean"`
-
- InstanceId *string `type:"string"`
-
- NetworkInterfaceId *string `locationName:"networkInterfaceId" type:"string"`
-
- PrivateIpAddress *string `locationName:"privateIpAddress" type:"string"`
-
- PublicIp *string `type:"string"`
-}
-
-type AssociateAddressOutput struct {
- _ struct{} `type:"structure"`
-
- AssociationId *string `locationName:"associationId" type:"string"`
-
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-type DisassociateAddressInput struct {
- _ struct{} `type:"structure"`
-
- AssociationId *string `type:"string"`
-
- PublicIp *string `type:"string"`
-}
-
-type DisassociateAddressOutput struct {
- _ struct{} `type:"structure"`
-
- RequestId *string `locationName:"requestId" type:"string"`
- Return *bool `locationName:"return" type:"boolean"`
-}
-
-type ReleaseAddressInput struct {
- _ struct{} `type:"structure"`
-
- AllocationId *string `type:"string"`
-
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- PublicIp *string `type:"string"`
-}
-
-type ReleaseAddressOutput struct {
- _ struct{} `type:"structure"`
-}
-type RegisterImageInput struct {
- _ struct{} `type:"structure"`
-
- Architecture *string `locationName:"architecture" type:"string" enum:"ArchitectureValues"`
-
- BillingProducts []*string `locationName:"BillingProduct" locationNameList:"item" type:"list"`
-
- BlockDeviceMappings []*BlockDeviceMapping `locationName:"BlockDeviceMapping" locationNameList:"BlockDeviceMapping" type:"list"`
-
- Description *string `locationName:"description" type:"string"`
-
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- EnaSupport *bool `locationName:"enaSupport" type:"boolean"`
-
- ImageLocation *string `type:"string"`
-
- InstanceId *string `type:"string"`
-
- NoReboot *bool `type:"boolean"`
-
- KernelId *string `locationName:"kernelId" type:"string"`
-
- Name *string `locationName:"name" type:"string" required:"true"`
-
- RamdiskId *string `locationName:"ramdiskId" type:"string"`
-
- RootDeviceName *string `locationName:"rootDeviceName" type:"string"`
-
- SriovNetSupport *string `locationName:"sriovNetSupport" type:"string"`
-
- VirtualizationType *string `locationName:"virtualizationType" type:"string"`
-}
-
-type RegisterImageOutput struct {
- _ struct{} `type:"structure"`
-
- ImageId *string `locationName:"imageId" type:"string"`
-}
-
-type DeregisterImageInput struct {
- _ struct{} `type:"structure"`
-
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- ImageId *string `type:"string" required:"true"`
-}
-
-type DeregisterImageOutput struct {
- _ struct{} `type:"structure"`
-}
-
-type Image struct {
- _ struct{} `type:"structure"`
-
- Architecture *string `locationName:"architecture" type:"string" enum:"ArchitectureValues"`
-
- ClientToken *string `locationName:"clientToken" type:"string"`
-
- BlockDeviceMappings []*BlockDeviceMapping `locationName:"blockDeviceMapping" locationNameList:"item" type:"list"`
-
- CreationDate *string `locationName:"creationDate" type:"string"`
-
- Description *string `locationName:"description" type:"string"`
-
- EnaSupport *bool `locationName:"enaSupport" type:"boolean"`
-
- Hypervisor *string `locationName:"hypervisor" type:"string" enum:"HypervisorType"`
-
- ImageId *string `locationName:"imageId" type:"string"`
-
- ImageLocation *string `locationName:"imageLocation" type:"string"`
-
- ImageOwnerAlias *string `locationName:"imageOwnerAlias" type:"string"`
-
- ImageType *string `locationName:"imageType" type:"string" enum:"ImageTypeValues"`
-
- KernelId *string `locationName:"kernelId" type:"string"`
-
- Name *string `locationName:"name" type:"string"`
-
- OwnerId *string `locationName:"imageOwnerId" type:"string"`
-
- Platform *string `locationName:"platform" type:"string" enum:"PlatformValues"`
-
- ProductCodes []*ProductCode `locationName:"productCodes" locationNameList:"item" type:"list"`
-
- Public *bool `locationName:"isPublic" type:"boolean"`
-
- RamdiskId *string `locationName:"ramdiskId" type:"string"`
-
- RootDeviceName *string `locationName:"rootDeviceName" type:"string"`
-
- RootDeviceType *string `locationName:"rootDeviceType" type:"string" enum:"DeviceType"`
-
- SriovNetSupport *string `locationName:"sriovNetSupport" type:"string"`
-
- State *string `locationName:"imageState" type:"string" enum:"ImageState"`
-
- StateReason *StateReason `locationName:"stateReason" type:"structure"`
-
- Tags []*Tag `locationName:"tagSet" locationNameList:"item" type:"list"`
-
- VirtualizationType *string `locationName:"virtualizationType" type:"string" enum:"VirtualizationType"`
-}
-
-type DescribeImagesInput struct {
- _ struct{} `type:"structure"`
-
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- ExecutableUsers []*string `locationName:"ExecutableBy" locationNameList:"ExecutableBy" type:"list"`
-
- Filters []*Filter `locationName:"Filter" locationNameList:"Filter" type:"list"`
-
- ImageIds []*string `locationName:"ImageId" locationNameList:"ImageId" type:"list"`
-
- Owners []*string `locationName:"Owner" locationNameList:"Owner" type:"list"`
-}
-
-type DescribeImagesOutput struct {
- _ struct{} `type:"structure"`
-
- Images []*Image `locationName:"imagesSet" locationNameList:"item" type:"list"`
-
- RequestId *string `locationName:"requestId" type:"String"`
-}
-
-func (s DescribeImagesOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-func (s DescribeImagesOutput) GoString() string {
- return s.String()
-}
-
-func (s *DescribeImagesOutput) SetImages(v []*Image) *DescribeImagesOutput {
- s.Images = v
- return s
-}
-func (s *DescribeImagesOutput) SetRequestId(v *string) *DescribeImagesOutput {
- s.RequestId = v
- return s
-}
-
-type ModifyImageAttributeInput struct {
- _ struct{} `type:"structure"`
-
- Attribute *string `type:"string"`
-
- Description *AttributeValue `type:"structure"`
-
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- ImageId *string `type:"string" required:"true"`
-
- LaunchPermission *LaunchPermissionModifications `type:"structure"`
-
- OperationType *string `type:"string" enum:"OperationType"`
-
- ProductCodes []*string `locationName:"ProductCode" locationNameList:"ProductCode" type:"list"`
-
- UserGroups []*string `locationName:"UserGroup" locationNameList:"UserGroup" type:"list"`
-
- UserIds []*string `locationName:"UserId" locationNameList:"UserId" type:"list"`
-
- Value *string `type:"string"`
-}
-
-type ModifyImageAttributeOutput struct {
- _ struct{} `type:"structure"`
-}
-
-type LaunchPermissionModifications struct {
- _ struct{} `type:"structure"`
-
- Add []*LaunchPermission `locationNameList:"item" type:"list"`
-
- Remove []*LaunchPermission `locationNameList:"item" type:"list"`
-}
-
-type LaunchPermission struct {
- _ struct{} `type:"structure"`
-
- Group *string `locationName:"group" type:"string" enum:"PermissionGroup"`
-
- UserId *string `locationName:"userId" type:"string"`
-}
-
-type DeleteTagsInput struct {
- _ struct{} `type:"structure"`
-
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- Resources []*string `locationName:"resourceId" type:"list" required:"true"`
-
- Tags []*Tag `locationName:"tag" locationNameList:"item" type:"list"`
-}
-
-type DeleteTagsOutput struct {
- _ struct{} `type:"structure"`
-}
-
-type CreateTagsInput struct {
- _ struct{} `type:"structure"`
-
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- Resources []*string `locationName:"ResourceId" type:"list" required:"true"`
-
- Tags []*Tag `locationName:"Tag" locationNameList:"item" type:"list" required:"true"`
-}
-
-type CreateTagsOutput struct {
- _ struct{} `type:"structure"`
-}
-
-type DescribeTagsInput struct {
- _ struct{} `type:"structure"`
-
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- Filters []*Filter `locationName:"Filter" locationNameList:"Filter" type:"list"`
-
- MaxResults *int64 `locationName:"maxResults" type:"integer"`
-
- NextToken *string `locationName:"nextToken" type:"string"`
-}
-
-type DescribeTagsOutput struct {
- _ struct{} `type:"structure"`
-
- // The token to use to retrieve the next page of results. This value is null
- // when there are no more results to return..
- NextToken *string `locationName:"nextToken" type:"string"`
-
- // A list of tags.
- Tags []*TagDescription `locationName:"tagSet" locationNameList:"item" type:"list"`
-
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-// String returns the string representation
-func (s DescribeTagsOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DescribeTagsOutput) GoString() string {
- return s.String()
-}
-
-// SetNextToken sets the NextToken field's value.
-func (s *DescribeTagsOutput) SetNextToken(v string) *DescribeTagsOutput {
- s.NextToken = &v
- return s
-}
-func (s *DescribeTagsOutput) SetRequestId(v string) *DescribeTagsOutput {
- s.RequestId = &v
- return s
-}
-
-// SetTags sets the Tags field's value.
-func (s *DescribeTagsOutput) SetTags(v []*TagDescription) *DescribeTagsOutput {
- s.Tags = v
- return s
-}
-
-type TagDescription struct {
- _ struct{} `type:"structure"`
-
- Key *string `locationName:"key" type:"string"`
-
- ResourceId *string `locationName:"resourceId" type:"string"`
-
- ResourceType *string `locationName:"resourceType" type:"string" enum:"ResourceType"`
-
- Value *string `locationName:"value" type:"string"`
-}
-
-type TagSpecification struct {
- _ struct{} `type:"structure"`
-
- ResourceType *string `locationName:"resourceType" type:"string" enum:"ResourceType"`
-
- Tags []*Tag `locationName:"Tag" locationNameList:"item" type:"list"`
-}
-
-// Contains the parameters for ImportKeyPair.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/ImportKeyPairRequest
-type ImportKeyPairInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // A unique name for the key pair.
- //
- // KeyName is a required field
- KeyName *string `locationName:"keyName" type:"string" required:"true"`
-
- // The public key. For API calls, the text must be base64-encoded. For command
- // line tools, base64 encoding is performed for you.
- //
- // PublicKeyMaterial is automatically base64 encoded/decoded by the SDK.
- //
- // PublicKeyMaterial is a required field
- PublicKeyMaterial []byte `locationName:"publicKeyMaterial" type:"blob" required:"true"`
-}
-
-// String returns the string representation
-
-// SetDryRun sets the DryRun field's value.
-
-// Contains the output of ImportKeyPair.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/ImportKeyPairResult
-type ImportKeyPairOutput struct {
- _ struct{} `type:"structure"`
-
- // The MD5 public key fingerprint as specified in section 4 of RFC 4716.
- KeyFingerprint *string `locationName:"keyFingerprint" type:"string"`
-
- // The key pair name you provided.
- KeyName *string `locationName:"keyName" type:"string"`
-}
-
-// Contains the parameters for DescribeKeyPairs.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeKeyPairsRequest
-type DescribeKeyPairsInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // One or more filters.
- //
- // * fingerprint - The fingerprint of the key pair.
- //
- // * key-name - The name of the key pair.
- Filters []*Filter `locationName:"Filter" locationNameList:"Filter" type:"list"`
-
- // One or more key pair names.
- //
- // Default: Describes all your key pairs.
- KeyNames []*string `locationName:"KeyName" locationNameList:"KeyName" type:"list"`
-}
-
-// Contains the output of DescribeKeyPairs.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeKeyPairsResult
-type DescribeKeyPairsOutput struct {
- _ struct{} `type:"structure"`
-
- // Information about one or more key pairs.
- KeyPairs []*KeyPairInfo `locationName:"keySet" locationNameList:"item" type:"list"`
- RequestId *string `locationName:"requestId" type:"String"`
-}
-
-// String returns the string representation
-
-// Describes a key pair.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/KeyPairInfo
-type KeyPairInfo struct {
- _ struct{} `type:"structure"`
-
- // If you used CreateKeyPair to create the key pair, this is the SHA-1 digest
- // of the DER encoded private key. If you used ImportKeyPair to provide AWS
- // the public key, this is the MD5 public key fingerprint as specified in section
- // 4 of RFC4716.
- KeyFingerprint *string `locationName:"keyFingerprint" type:"string"`
-
- // The name of the key pair.
- KeyName *string `locationName:"keyName" type:"string"`
-}
-
-// String returns the string representation
-
-// Contains the parameters for DeleteKeyPair.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DeleteKeyPairRequest
-type DeleteKeyPairInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The name of the key pair.
- //
- // KeyName is a required field
- KeyName *string `type:"string" required:"true"`
-}
-
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DeleteKeyPairOutput
-type DeleteKeyPairOutput struct {
- _ struct{} `type:"structure"`
-}
-
-type CreateKeyPairInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // A unique name for the key pair.
- //
- // Constraints: Up to 255 ASCII characters
- //
- // KeyName is a required field
- KeyName *string `type:"string" required:"true"`
-}
-
-// Describes a key pair.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/KeyPair
-type CreateKeyPairOutput struct {
- _ struct{} `type:"structure"`
-
- // The SHA-1 digest of the DER encoded private key.
- KeyFingerprint *string `locationName:"keyFingerprint" type:"string"`
-
- // An unencrypted PEM encoded RSA private key.
- KeyMaterial *string `locationName:"keyMaterial" type:"string"`
-
- // The name of the key pair.
- KeyName *string `locationName:"keyName" type:"string"`
-
- // The name of the Request ID
- RequestId *string `locationName:"requestId" type:"String"`
-}
-
-type CreateSecurityGroupInput struct {
- _ struct{} `type:"structure"`
-
- Description *string `locationName:"GroupDescription" type:"string" required:"true"`
-
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- GroupName *string `type:"string" required:"true"`
-
- VpcId *string `type:"string"`
-}
-
-type CreateSecurityGroupOutput struct {
- _ struct{} `type:"structure"`
-
- GroupId *string `locationName:"groupId" type:"string"`
-}
-
-type DescribeSecurityGroupsInput struct {
- _ struct{} `type:"structure"`
-
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- Filters []*Filter `locationName:"Filter" locationNameList:"Filter" type:"list"`
-
- GroupIds []*string `locationName:"GroupId" locationNameList:"groupId" type:"list"`
-
- GroupNames []*string `locationName:"GroupName" locationNameList:"GroupName" type:"list"`
-}
-
-type DescribeSecurityGroupsOutput struct {
- _ struct{} `type:"structure"`
-
- // Information about one or more security groups.
- SecurityGroups []*SecurityGroup `locationName:"securityGroupInfo" locationNameList:"item" type:"list"`
-
- RequestId *string `locationName:"requestId" type:"String"`
-}
-
-// String returns the string representation
-func (s DescribeSecurityGroupsOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DescribeSecurityGroupsOutput) GoString() string {
- return s.String()
-}
-
-// SetSecurityGroups sets the SecurityGroups field's value.
-func (s *DescribeSecurityGroupsOutput) SetSecurityGroups(v []*SecurityGroup) *DescribeSecurityGroupsOutput {
- s.SecurityGroups = v
- return s
-}
-
-func (s *DescribeSecurityGroupsOutput) SetRequestId(v string) *DescribeSecurityGroupsOutput {
- s.RequestId = &v
- return s
-}
-
-type SecurityGroup struct {
- _ struct{} `type:"structure"`
- Description *string `locationName:"groupDescription" type:"string"`
- GroupId *string `locationName:"groupId" type:"string"`
- GroupName *string `locationName:"groupName" type:"string"`
- IpPermissions []*IpPermission `locationName:"ipPermissions" locationNameList:"item" type:"list"`
- IpPermissionsEgress []*IpPermission `locationName:"ipPermissionsEgress" locationNameList:"item" type:"list"`
- OwnerId *string `locationName:"ownerId" type:"string"`
- Tags []*Tag `locationName:"tagSet" locationNameList:"item" type:"list"`
- VpcId *string `locationName:"vpcId" type:"string"`
-}
-
-type IpPermission struct {
- _ struct{} `type:"structure"`
- FromPort *int64 `locationName:"fromPort" type:"integer"`
- IpProtocol *string `locationName:"ipProtocol" type:"string"`
- IpRanges []*IpRange `locationName:"ipRanges" locationNameList:"item" type:"list"`
- Ipv6Ranges []*Ipv6Range `locationName:"ipv6Ranges" locationNameList:"item" type:"list"`
- PrefixListIds []*PrefixListId `locationName:"prefixListIds" locationNameList:"item" type:"list"`
- ToPort *int64 `locationName:"toPort" type:"integer"`
- UserIdGroupPairs []*UserIdGroupPair `locationName:"groups" locationNameList:"item" type:"list"`
-}
-
-type IpRange struct {
- _ struct{} `type:"structure"`
- CidrIp *string `locationName:"cidrIp" type:"string"`
-}
-
-type Ipv6Range struct {
- _ struct{} `type:"structure"`
- CidrIpv6 *string `locationName:"cidrIpv6" type:"string"`
-}
-
-type PrefixListId struct {
- _ struct{} `type:"structure"`
- PrefixListId *string `locationName:"prefixListId" type:"string"`
-}
-
-type UserIdGroupPair struct {
- _ struct{} `type:"structure"`
- GroupId *string `locationName:"groupId" type:"string"`
- GroupName *string `locationName:"groupName" type:"string"`
- PeeringStatus *string `locationName:"peeringStatus" type:"string"`
- UserId *string `locationName:"userId" type:"string"`
- VpcId *string `locationName:"vpcId" type:"string"`
- VpcPeeringConnectionId *string `locationName:"vpcPeeringConnectionId" type:"string"`
-}
-
-type RevokeSecurityGroupEgressInput struct {
- _ struct{} `type:"structure"`
-
- CidrIp *string `locationName:"cidrIp" type:"string"`
-
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- FromPort *int64 `locationName:"fromPort" type:"integer"`
-
- GroupId *string `locationName:"groupId" type:"string" required:"true"`
-
- IpPermissions []*IpPermission `locationName:"ipPermissions" locationNameList:"item" type:"list"`
-
- IpProtocol *string `locationName:"ipProtocol" type:"string"`
-
- SourceSecurityGroupName *string `locationName:"sourceSecurityGroupName" type:"string"`
-
- SourceSecurityGroupOwnerId *string `locationName:"sourceSecurityGroupOwnerId" type:"string"`
-
- ToPort *int64 `locationName:"toPort" type:"integer"`
-}
-
-type RevokeSecurityGroupEgressOutput struct {
- _ struct{} `type:"structure"`
-}
-
-type RevokeSecurityGroupIngressInput struct {
- _ struct{} `type:"structure"`
-
- CidrIp *string `type:"string"`
-
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- FromPort *int64 `type:"integer"`
-
- GroupId *string `type:"string"`
-
- GroupName *string `type:"string"`
-
- IpPermissions []*IpPermission `locationNameList:"item" type:"list"`
-
- IpProtocol *string `type:"string"`
-
- SourceSecurityGroupName *string `type:"string"`
-
- SourceSecurityGroupOwnerId *string `type:"string"`
-
- ToPort *int64 `type:"integer"`
-}
-
-type RevokeSecurityGroupIngressOutput struct {
- _ struct{} `type:"structure"`
-}
-
-type AuthorizeSecurityGroupEgressInput struct {
- _ struct{} `type:"structure"`
-
- CidrIp *string `locationName:"cidrIp" type:"string"`
-
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- FromPort *int64 `locationName:"fromPort" type:"integer"`
-
- GroupId *string `locationName:"groupId" type:"string" required:"true"`
-
- IpPermissions []*IpPermission `locationName:"ipPermissions" locationNameList:"item" type:"list"`
-
- IpProtocol *string `locationName:"ipProtocol" type:"string"`
-
- SourceSecurityGroupName *string `locationName:"sourceSecurityGroupName" type:"string"`
-
- SourceSecurityGroupOwnerId *string `locationName:"sourceSecurityGroupOwnerId" type:"string"`
-
- ToPort *int64 `locationName:"toPort" type:"integer"`
-}
-
-type AuthorizeSecurityGroupEgressOutput struct {
- _ struct{} `type:"structure"`
-}
-
-type AuthorizeSecurityGroupIngressInput struct {
- _ struct{} `type:"structure"`
-
- CidrIp *string `type:"string"`
-
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- FromPort *int64 `type:"integer"`
-
- GroupId *string `type:"string"`
-
- GroupName *string `type:"string"`
-
- IpPermissions []*IpPermission `locationNameList:"item" type:"list"`
-
- IpProtocol *string `type:"string"`
-
- SourceSecurityGroupName *string `type:"string"`
-
- SourceSecurityGroupOwnerId *string `type:"string"`
-
- ToPort *int64 `type:"integer"`
-}
-
-type AuthorizeSecurityGroupIngressOutput struct {
- _ struct{} `type:"structure"`
-}
-
-type DeleteSecurityGroupInput struct {
- _ struct{} `type:"structure"`
-
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- GroupId *string `type:"string"`
-
- GroupName *string `type:"string"`
-}
-
-type DeleteSecurityGroupOutput struct {
- _ struct{} `type:"structure"`
-}
-
-// Contains the parameters for CreateVolume.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/CreateVolumeRequest
-type CreateVolumeInput struct {
- _ struct{} `type:"structure"`
-
- // The Availability Zone in which to create the volume. Use DescribeAvailabilityZones
- // to list the Availability Zones that are currently available to you.
- //
- // AvailabilityZone is a required field
- AvailabilityZone *string `type:"string" required:"true"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // Specifies whether the volume should be encrypted. Encrypted Amazon EBS volumes
- // may only be attached to instances that support Amazon EBS encryption. Volumes
- // that are created from encrypted snapshots are automatically encrypted. There
- // is no way to create an encrypted volume from an unencrypted snapshot or vice
- // versa. If your AMI uses encrypted volumes, you can only launch it on supported
- // instance types. For more information, see Amazon EBS Encryption (http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSEncryption.html)
- // in the Amazon Elastic Compute Cloud User Guide.
- Encrypted *bool `locationName:"encrypted" type:"boolean"`
-
- // Only valid for Provisioned IOPS SSD volumes. The number of I/O operations
- // per second (IOPS) to provision for the volume, with a maximum ratio of 50
- // IOPS/GiB.
- //
- // Constraint: Range is 100 to 20000 for Provisioned IOPS SSD volumes
- Iops *int64 `type:"integer"`
-
- // The full ARN of the AWS Key Management Service (AWS KMS) customer master
- // key (CMK) to use when creating the encrypted volume. This parameter is only
- // required if you want to use a non-default CMK; if this parameter is not specified,
- // the default CMK for EBS is used. The ARN contains the arn:aws:kms namespace,
- // followed by the region of the CMK, the AWS account ID of the CMK owner, the
- // key namespace, and then the CMK ID. For example, arn:aws:kms:us-east-1:012345678910:key/abcd1234-a123-456a-a12b-a123b4cd56ef.
- // If a KmsKeyId is specified, the Encrypted flag must also be set.
- KmsKeyId *string `type:"string"`
-
- // The size of the volume, in GiBs.
- //
- // Constraints: 1-16384 for gp2, 4-16384 for io1, 500-16384 for st1, 500-16384
- // for sc1, and 1-1024 for standard. If you specify a snapshot, the volume size
- // must be equal to or larger than the snapshot size.
- //
- // Default: If you're creating the volume from a snapshot and don't specify
- // a volume size, the default is the snapshot size.
- Size *int64 `type:"integer"`
-
- // The snapshot from which to create the volume.
- SnapshotId *string `type:"string"`
-
- // The tags to apply to the volume during creation.
- TagSpecifications []*TagSpecification `locationName:"TagSpecification" locationNameList:"item" type:"list"`
-
- // The volume type. This can be gp2 for General Purpose SSD, io1 for Provisioned
- // IOPS SSD, st1 for Throughput Optimized HDD, sc1 for Cold HDD, or standard
- // for Magnetic volumes.
- //
- // Default: standard
- VolumeType *string `type:"string" enum:"VolumeType"`
-}
-
-// Contains the parameters for DeleteVolume.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DeleteVolumeRequest
-type DeleteVolumeInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The ID of the volume.
- //
- // VolumeId is a required field
- VolumeId *string `type:"string" required:"true"`
-}
-
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DeleteVolumeOutput
-type DeleteVolumeOutput struct {
- _ struct{} `type:"structure"`
-}
-
-// Contains the parameters for DescribeVolumes.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeVolumesRequest
-type DescribeVolumesInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // One or more filters.
- //
- // * attachment.attach-time - The time stamp when the attachment initiated.
- //
- // * attachment.delete-on-termination - Whether the volume is deleted on
- // instance termination.
- //
- // * attachment.device - The device name that is exposed to the instance
- // (for example, /dev/sda1).
- //
- // * attachment.instance-id - The ID of the instance the volume is attached
- // to.
- //
- // * attachment.status - The attachment state (attaching | attached | detaching
- // | detached).
- //
- // * availability-zone - The Availability Zone in which the volume was created.
- //
- // * create-time - The time stamp when the volume was created.
- //
- // * encrypted - The encryption status of the volume.
- //
- // * size - The size of the volume, in GiB.
- //
- // * snapshot-id - The snapshot from which the volume was created.
- //
- // * status - The status of the volume (creating | available | in-use | deleting
- // | deleted | error).
- //
- // * tag:key=value - The key/value combination of a tag assigned to the resource.
- // Specify the key of the tag in the filter name and the value of the tag
- // in the filter value. For example, for the tag Purpose=X, specify tag:Purpose
- // for the filter name and X for the filter value.
- //
- // * tag-key - The key of a tag assigned to the resource. This filter is
- // independent of the tag-value filter. For example, if you use both the
- // filter "tag-key=Purpose" and the filter "tag-value=X", you get any resources
- // assigned both the tag key Purpose (regardless of what the tag's value
- // is), and the tag value X (regardless of what the tag's key is). If you
- // want to list only resources where Purpose is X, see the tag:key=value
- // filter.
- //
- // * tag-value - The value of a tag assigned to the resource. This filter
- // is independent of the tag-key filter.
- //
- // * volume-id - The volume ID.
- //
- // * volume-type - The Amazon EBS volume type. This can be gp2 for General
- // Purpose SSD, io1 for Provisioned IOPS SSD, st1 for Throughput Optimized
- // HDD, sc1 for Cold HDD, or standard for Magnetic volumes.
- Filters []*Filter `locationName:"Filter" locationNameList:"Filter" type:"list"`
-
- // The maximum number of volume results returned by DescribeVolumes in paginated
- // output. When this parameter is used, DescribeVolumes only returns MaxResults
- // results in a single page along with a NextToken response element. The remaining
- // results of the initial request can be seen by sending another DescribeVolumes
- // request with the returned NextToken value. This value can be between 5 and
- // 500; if MaxResults is given a value larger than 500, only 500 results are
- // returned. If this parameter is not used, then DescribeVolumes returns all
- // results. You cannot specify this parameter and the volume IDs parameter in
- // the same request.
- MaxResults *int64 `locationName:"maxResults" type:"integer"`
-
- // The NextToken value returned from a previous paginated DescribeVolumes request
- // where MaxResults was used and the results exceeded the value of that parameter.
- // Pagination continues from the end of the previous results that returned the
- // NextToken value. This value is null when there are no more results to return.
- NextToken *string `locationName:"nextToken" type:"string"`
-
- // One or more volume IDs.
- VolumeIds []*string `locationName:"VolumeId" locationNameList:"VolumeId" type:"list"`
-}
-
-type DescribeVolumesOutput struct {
- _ struct{} `type:"structure"`
-
- // The NextToken value to include in a future DescribeVolumes request. When
- // the results of a DescribeVolumes request exceed MaxResults, this value can
- // be used to retrieve the next page of results. This value is null when there
- // are no more results to return.
- NextToken *string `locationName:"nextToken" type:"string"`
-
- // Information about the volumes.
- Volumes []*Volume `locationName:"volumeSet" locationNameList:"item" type:"list"`
-
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-// String returns the string representation
-func (s DescribeVolumesOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DescribeVolumesOutput) GoString() string {
- return s.String()
-}
-
-// SetNextToken sets the NextToken field's value.
-func (s *DescribeVolumesOutput) SetNextToken(v string) *DescribeVolumesOutput {
- s.NextToken = &v
- return s
-}
-func (s *DescribeVolumesOutput) SetRequesterId(v string) *DescribeVolumesOutput {
- s.RequestId = &v
- return s
-}
-
-// SetVolumes sets the Volumes field's value.
-func (s *DescribeVolumesOutput) SetVolumes(v []*Volume) *DescribeVolumesOutput {
- s.Volumes = v
- return s
-}
-
-// Describes a volume.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/Volume
-type Volume struct {
- _ struct{} `type:"structure"`
-
- // Information about the volume attachments.
- Attachments []*VolumeAttachment `locationName:"attachmentSet" locationNameList:"item" type:"list"`
-
- // The Availability Zone for the volume.
- AvailabilityZone *string `locationName:"availabilityZone" type:"string"`
-
- // The time stamp when volume creation was initiated.
- CreateTime *time.Time `locationName:"createTime" type:"timestamp" timestampFormat:"iso8601"`
-
- // Indicates whether the volume will be encrypted.
- Encrypted *bool `locationName:"encrypted" type:"boolean"`
-
- Iops *int64 `locationName:"iops" type:"integer"`
-
- // The full ARN of the AWS Key Management Service (AWS KMS) customer master
- // key (CMK) that was used to protect the volume encryption key for the volume.
- KmsKeyId *string `locationName:"kmsKeyId" type:"string"`
-
- // The size of the volume, in GiBs.
- Size *int64 `locationName:"size" type:"integer"`
-
- // The snapshot from which the volume was created, if applicable.
- SnapshotId *string `locationName:"snapshotId" type:"string"`
-
- // The volume state.
- State *string `locationName:"status" type:"string" enum:"VolumeState"`
-
- // Any tags assigned to the volume.
- Tags []*Tag `locationName:"tagSet" locationNameList:"item" type:"list"`
-
- // The ID of the volume.
- VolumeId *string `locationName:"volumeId" type:"string"`
-
- // The volume type. This can be gp2 for General Purpose SSD, io1 for Provisioned
- // IOPS SSD, st1 for Throughput Optimized HDD, sc1 for Cold HDD, or standard
- // for Magnetic volumes.
- VolumeType *string `locationName:"volumeType" type:"string" enum:"VolumeType"`
-}
-
-// String returns the string representation
-func (s Volume) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s Volume) GoString() string {
- return s.String()
-}
-
-// SetAttachments sets the Attachments field's value.
-func (s *Volume) SetAttachments(v []*VolumeAttachment) *Volume {
- s.Attachments = v
- return s
-}
-
-// SetAvailabilityZone sets the AvailabilityZone field's value.
-func (s *Volume) SetAvailabilityZone(v string) *Volume {
- s.AvailabilityZone = &v
- return s
-}
-
-// SetCreateTime sets the CreateTime field's value.
-func (s *Volume) SetCreateTime(v time.Time) *Volume {
- s.CreateTime = &v
- return s
-}
-
-// SetEncrypted sets the Encrypted field's value.
-func (s *Volume) SetEncrypted(v bool) *Volume {
- s.Encrypted = &v
- return s
-}
-
-// SetIops sets the Iops field's value.
-func (s *Volume) SetIops(v int64) *Volume {
- s.Iops = &v
- return s
-}
-
-// SetKmsKeyId sets the KmsKeyId field's value.
-func (s *Volume) SetKmsKeyId(v string) *Volume {
- s.KmsKeyId = &v
- return s
-}
-
-// SetSize sets the Size field's value.
-func (s *Volume) SetSize(v int64) *Volume {
- s.Size = &v
- return s
-}
-
-// SetSnapshotId sets the SnapshotId field's value.
-func (s *Volume) SetSnapshotId(v string) *Volume {
- s.SnapshotId = &v
- return s
-}
-
-// SetState sets the State field's value.
-func (s *Volume) SetState(v string) *Volume {
- s.State = &v
- return s
-}
-
-// SetTags sets the Tags field's value.
-func (s *Volume) SetTags(v []*Tag) *Volume {
- s.Tags = v
- return s
-}
-
-// SetVolumeId sets the VolumeId field's value.
-func (s *Volume) SetVolumeId(v string) *Volume {
- s.VolumeId = &v
- return s
-}
-
-// SetVolumeType sets the VolumeType field's value.
-func (s *Volume) SetVolumeType(v string) *Volume {
- s.VolumeType = &v
- return s
-}
-
-type VolumeAttachment struct {
- _ struct{} `type:"structure"`
-
- // The time stamp when the attachment initiated.
- AttachTime *time.Time `locationName:"attachTime" type:"timestamp" timestampFormat:"iso8601"`
-
- // Indicates whether the EBS volume is deleted on instance termination.
- DeleteOnTermination *bool `locationName:"deleteOnTermination" type:"boolean"`
-
- // The device name.
- Device *string `locationName:"device" type:"string"`
-
- // The ID of the instance.
- InstanceId *string `locationName:"instanceId" type:"string"`
-
- // The attachment state of the volume.
- State *string `locationName:"status" type:"string" enum:"VolumeAttachmentState"`
-
- // The ID of the volume.
- VolumeId *string `locationName:"volumeId" type:"string"`
-}
-
-// String returns the string representation
-func (s VolumeAttachment) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s VolumeAttachment) GoString() string {
- return s.String()
-}
-
-// SetAttachTime sets the AttachTime field's value.
-func (s *VolumeAttachment) SetAttachTime(v time.Time) *VolumeAttachment {
- s.AttachTime = &v
- return s
-}
-
-// SetDeleteOnTermination sets the DeleteOnTermination field's value.
-func (s *VolumeAttachment) SetDeleteOnTermination(v bool) *VolumeAttachment {
- s.DeleteOnTermination = &v
- return s
-}
-
-// SetDevice sets the Device field's value.
-func (s *VolumeAttachment) SetDevice(v string) *VolumeAttachment {
- s.Device = &v
- return s
-}
-
-// SetInstanceId sets the InstanceId field's value.
-func (s *VolumeAttachment) SetInstanceId(v string) *VolumeAttachment {
- s.InstanceId = &v
- return s
-}
-
-// SetState sets the State field's value.
-func (s *VolumeAttachment) SetState(v string) *VolumeAttachment {
- s.State = &v
- return s
-}
-
-// SetVolumeId sets the VolumeId field's value.
-func (s *VolumeAttachment) SetVolumeId(v string) *VolumeAttachment {
- s.VolumeId = &v
- return s
-}
-
-type AttachVolumeInput struct {
- _ struct{} `type:"structure"`
-
- Device *string `type:"string" required:"true"`
-
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The ID of the instance.
- //
- // InstanceId is a required field
- InstanceId *string `type:"string" required:"true"`
-
- VolumeId *string `type:"string" required:"true"`
-}
-
-type DetachVolumeInput struct {
- _ struct{} `type:"structure"`
-
- // The device name.
- Device *string `type:"string"`
-
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- Force *bool `type:"boolean"`
-
- // The ID of the instance.
- InstanceId *string `type:"string"`
-
- VolumeId *string `type:"string" required:"true"`
-}
-type CreateSubnetInput struct {
- _ struct{} `type:"structure"`
-
- // The Availability Zone for the subnet.
- //
- // Default: AWS selects one for you. If you create more than one subnet in your
- // VPC, we may not necessarily select a different zone for each subnet.
- AvailabilityZone *string `type:"string"`
-
- // The IPv4 network range for the subnet, in CIDR notation. For example, 10.0.0.0/24.
- //
- // CidrBlock is a required field
- CidrBlock *string `type:"string" required:"true"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The IPv6 network range for the subnet, in CIDR notation. The subnet size
- // must use a /64 prefix length.
- Ipv6CidrBlock *string `type:"string"`
-
- // The ID of the VPC.
- //
- // VpcId is a required field
- VpcId *string `type:"string" required:"true"`
-}
-
-type CreateSubnetOutput struct {
- _ struct{} `type:"structure"`
-
- // Information about the subnet.
- Subnet *Subnet `locationName:"subnet" type:"structure"`
-}
-
-type DescribeInstanceStatusInput struct {
- _ struct{} `type:"structure"`
-
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- Filters []*Filter `locationName:"Filter" locationNameList:"Filter" type:"list"`
-
- IncludeAllInstances *bool `locationName:"includeAllInstances" type:"boolean"`
-
- InstanceIds []*string `locationName:"InstanceId" locationNameList:"InstanceId" type:"list"`
-
- MaxResults *int64 `type:"integer"`
-
- NextToken *string `type:"string"`
-}
-
-func (s DescribeInstanceStatusInput) String() string {
- return awsutil.Prettify(s)
-}
-
-func (s DescribeInstanceStatusInput) GoString() string {
- return s.String()
-}
-
-func (s *DescribeInstanceStatusInput) SetDryRun(v bool) *DescribeInstanceStatusInput {
- s.DryRun = &v
- return s
-}
-
-func (s *DescribeInstanceStatusInput) SetFilters(v []*Filter) *DescribeInstanceStatusInput {
- s.Filters = v
- return s
-}
-
-func (s *DescribeInstanceStatusInput) SetIncludeAllInstances(v bool) *DescribeInstanceStatusInput {
- s.IncludeAllInstances = &v
- return s
-}
-
-func (s *DescribeInstanceStatusInput) SetInstanceIds(v []*string) *DescribeInstanceStatusInput {
- s.InstanceIds = v
- return s
-}
-
-func (s *DescribeInstanceStatusInput) SetMaxResults(v int64) *DescribeInstanceStatusInput {
- s.MaxResults = &v
- return s
-}
-
-func (s *DescribeInstanceStatusInput) SetNextToken(v string) *DescribeInstanceStatusInput {
- s.NextToken = &v
- return s
-}
-
-type DescribeInstanceStatusOutput struct {
- _ struct{} `type:"structure"`
-
- InstanceStatuses []*InstanceStatus `locationName:"instanceStatusSet" locationNameList:"item" type:"list"`
-
- NextToken *string `locationName:"nextToken" type:"string"`
-
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-func (s DescribeInstanceStatusOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-func (s DescribeInstanceStatusOutput) GoString() string {
- return s.String()
-}
-
-func (s *DescribeInstanceStatusOutput) SetInstanceStatuses(v []*InstanceStatus) *DescribeInstanceStatusOutput {
- s.InstanceStatuses = v
- return s
-}
-
-func (s *DescribeInstanceStatusOutput) SetNextToken(v string) *DescribeInstanceStatusOutput {
- s.NextToken = &v
- return s
-}
-
-//CreateInternetGatewayInput Contains the parameters for CreateInternetGateway.
-type CreateInternetGatewayInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The ID of the subnet.
- //
- // SubnetId is a required field
- SubnetId *string `type:"string" required:"true"`
-}
-
-type DeleteSubnetInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The ID of the subnet.
- //
- // SubnetId is a required field
- SubnetId *string `type:"string" required:"true"`
-}
-
-// String returns the string representation
-func (s DeleteSubnetInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DeleteSubnetInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *DeleteSubnetInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "DeleteSubnetInput"}
- if s.SubnetId == nil {
- invalidParams.Add(request.NewErrParamRequired("SubnetId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *DeleteSubnetInput) SetDryRun(v bool) *DeleteSubnetInput {
- s.DryRun = &v
- return s
-}
-
-// SetSubnetId sets the SubnetId field's value.
-func (s *DeleteSubnetInput) SetSubnetId(v string) *DeleteSubnetInput {
- s.SubnetId = &v
- return s
-}
-
-type DeleteSubnetOutput struct {
- _ struct{} `type:"structure"`
-}
-
-type Subnet struct {
- _ struct{} `type:"structure"`
-
- // Indicates whether a network interface created in this subnet (including a
- // network interface created by RunInstances) receives an IPv6 address.
- AssignIpv6AddressOnCreation *bool `locationName:"assignIpv6AddressOnCreation" type:"boolean"`
-
- // The Availability Zone of the subnet.
- AvailabilityZone *string `locationName:"availabilityZone" type:"string"`
-
- // The number of unused private IPv4 addresses in the subnet. Note that the
- // IPv4 addresses for any stopped instances are considered unavailable.
- AvailableIpAddressCount *int64 `locationName:"availableIpAddressCount" type:"integer"`
-
- // The IPv4 CIDR block assigned to the subnet.
- CidrBlock *string `locationName:"cidrBlock" type:"string"`
-
- // Indicates whether this is the default subnet for the Availability Zone.
- DefaultForAz *bool `locationName:"defaultForAz" type:"boolean"`
-
- // Information about the IPv6 CIDR blocks associated with the subnet.
-
- // Indicates whether instances launched in this subnet receive a public IPv4
- // address.
- MapPublicIpOnLaunch *bool `locationName:"mapPublicIpOnLaunch" type:"boolean"`
-
- // The current state of the subnet.
- State *string `locationName:"state" type:"string" enum:"SubnetState"`
-
- // The ID of the subnet.
- SubnetId *string `locationName:"subnetId" type:"string"`
-
- // Any tags assigned to the subnet.
- Tags []*Tag `locationName:"tagSet" locationNameList:"item" type:"list"`
-
- // The ID of the VPC the subnet is in.
- VpcId *string `locationName:"vpcId" type:"string"`
-}
-
-type DescribeSubnetsInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // One or more filters.
- //
- // * availabilityZone - The Availability Zone for the subnet. You can also
- // use availability-zone as the filter name.
- //
- // * available-ip-address-count - The number of IPv4 addresses in the subnet
- // that are available.
- //
- // * cidrBlock - The IPv4 CIDR block of the subnet. The CIDR block you specify
- // must exactly match the subnet's CIDR block for information to be returned
- // for the subnet. You can also use cidr or cidr-block as the filter names.
- //
- // * defaultForAz - Indicates whether this is the default subnet for the
- // Availability Zone. You can also use default-for-az as the filter name.
- //
- // * ipv6-cidr-block-association.ipv6-cidr-block - An IPv6 CIDR block associated
- // with the subnet.
- //
- // * ipv6-cidr-block-association.association-id - An association ID for an
- // IPv6 CIDR block associated with the subnet.
- //
- // * ipv6-cidr-block-association.state - The state of an IPv6 CIDR block
- // associated with the subnet.
- //
- // * state - The state of the subnet (pending | available).
- //
- // * subnet-id - The ID of the subnet.
- //
- // * tag:key=value - The key/value combination of a tag assigned to the resource.
- // Specify the key of the tag in the filter name and the value of the tag
- // in the filter value. For example, for the tag Purpose=X, specify tag:Purpose
- // for the filter name and X for the filter value.
- //
- // * tag-key - The key of a tag assigned to the resource. This filter is
- // independent of the tag-value filter. For example, if you use both the
- // filter "tag-key=Purpose" and the filter "tag-value=X", you get any resources
- // assigned both the tag key Purpose (regardless of what the tag's value
- // is), and the tag value X (regardless of what the tag's key is). If you
- // want to list only resources where Purpose is X, see the tag:key=value
- // filter.
- //
- // * tag-value - The value of a tag assigned to the resource. This filter
- // is independent of the tag-key filter.
- //
- // * vpc-id - The ID of the VPC for the subnet.
- Filters []*Filter `locationName:"Filter" locationNameList:"Filter" type:"list"`
-
- // One or more subnet IDs.
- //
- // Default: Describes all your subnets.
- SubnetIds []*string `locationName:"SubnetId" locationNameList:"SubnetId" type:"list"`
-}
-type DescribeSubnetsOutput struct {
- _ struct{} `type:"structure"`
-
- // Information about one or more subnets.
- Subnets []*Subnet `locationName:"subnetSet" locationNameList:"item" type:"list"`
-
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-// String returns the string representation
-func (s DescribeSubnetsOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DescribeSubnetsOutput) GoString() string {
- return s.String()
-}
-
-// SetSubnets sets the Subnets field's value.
-func (s *DescribeSubnetsOutput) SetSubnets(v []*Subnet) *DescribeSubnetsOutput {
- s.Subnets = v
- return s
-}
-func (s *DescribeSubnetsOutput) SetRequesterId(v *string) *DescribeSubnetsOutput {
- s.RequestId = v
- return s
-}
-
-// Contains the output of CreateNatGateway.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/CreateNatGatewayResult
-type CreateNatGatewayOutput struct {
- _ struct{} `type:"structure"`
-
- // Unique, case-sensitive identifier to ensure the idempotency of the request.
- // Only returned if a client token was provided in the request.
- ClientToken *string `locationName:"clientToken" type:"string"`
-
- // Information about the NAT gateway.
- NatGateway *NatGateway `locationName:"natGateway" type:"structure"`
-}
-
-// Describes a NAT gateway.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/NatGateway
-type NatGateway struct {
- _ struct{} `type:"structure"`
-
- // The date and time the NAT gateway was created.
- CreateTime *time.Time `locationName:"createTime" type:"timestamp" timestampFormat:"iso8601"`
-
- // The date and time the NAT gateway was deleted, if applicable.
- DeleteTime *time.Time `locationName:"deleteTime" type:"timestamp" timestampFormat:"iso8601"`
-
- // If the NAT gateway could not be created, specifies the error code for the
- // failure. (InsufficientFreeAddressesInSubnet | Gateway.NotAttached | InvalidAllocationID.NotFound
- // | Resource.AlreadyAssociated | InternalError | InvalidSubnetID.NotFound)
- FailureCode *string `locationName:"failureCode" type:"string"`
-
- // If the NAT gateway could not be created, specifies the error message for
- // the failure, that corresponds to the error code.
- //
- // * For InsufficientFreeAddressesInSubnet: "Subnet has insufficient free
- // addresses to create this NAT gateway"
- //
- // * For Gateway.NotAttached: "Network vpc-xxxxxxxx has no Internet gateway
- // attached"
- //
- // * For InvalidAllocationID.NotFound: "Elastic IP address eipalloc-xxxxxxxx
- // could not be associated with this NAT gateway"
- //
- // * For Resource.AlreadyAssociated: "Elastic IP address eipalloc-xxxxxxxx
- // is already associated"
- //
- // * For InternalError: "Network interface eni-xxxxxxxx, created and used
- // internally by this NAT gateway is in an invalid state. Please try again."
- //
- // * For InvalidSubnetID.NotFound: "The specified subnet subnet-xxxxxxxx
- // does not exist or could not be found."
- FailureMessage *string `locationName:"failureMessage" type:"string"`
-
- // Information about the IP addresses and network interface associated with
- // the NAT gateway.
- NatGatewayAddresses []*NatGatewayAddress `locationName:"natGatewayAddressSet" locationNameList:"item" type:"list"`
-
- // The ID of the NAT gateway.
- NatGatewayId *string `locationName:"natGatewayId" type:"string"`
-
- // Reserved. If you need to sustain traffic greater than the documented limits
- // (http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/vpc-nat-gateway.html),
- // contact us through the Support Center (https://console.aws.amazon.com/support/home?).
- ProvisionedBandwidth *ProvisionedBandwidth `locationName:"provisionedBandwidth" type:"structure"`
-
- // The state of the NAT gateway.
- //
- // * pending: The NAT gateway is being created and is not ready to process
- // traffic.
- //
- // * failed: The NAT gateway could not be created. Check the failureCode
- // and failureMessage fields for the reason.
- //
- // * available: The NAT gateway is able to process traffic. This status remains
- // until you delete the NAT gateway, and does not indicate the health of
- // the NAT gateway.
- //
- // * deleting: The NAT gateway is in the process of being terminated and
- // may still be processing traffic.
- //
- // * deleted: The NAT gateway has been terminated and is no longer processing
- // traffic.
- State *string `locationName:"state" type:"string" enum:"NatGatewayState"`
-
- // The ID of the subnet in which the NAT gateway is located.
- SubnetId *string `locationName:"subnetId" type:"string"`
-
- // The ID of the VPC in which the NAT gateway is located.
- VpcId *string `locationName:"vpcId" type:"string"`
-}
-
-// Describes the IP addresses and network interface associated with a NAT gateway.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/NatGatewayAddress
-type NatGatewayAddress struct {
- _ struct{} `type:"structure"`
-
- // The allocation ID of the Elastic IP address that's associated with the NAT
- // gateway.
- AllocationId *string `locationName:"allocationId" type:"string"`
-
- // The ID of the network interface associated with the NAT gateway.
- NetworkInterfaceId *string `locationName:"networkInterfaceId" type:"string"`
-
- // The private IP address associated with the Elastic IP address.
- PrivateIp *string `locationName:"privateIp" type:"string"`
-
- // The Elastic IP address associated with the NAT gateway.
- PublicIp *string `locationName:"publicIp" type:"string"`
-}
-
-type ProvisionedBandwidth struct {
- _ struct{} `type:"structure"`
-
- // Reserved. If you need to sustain traffic greater than the documented limits
- // (http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/vpc-nat-gateway.html),
- // contact us through the Support Center (https://console.aws.amazon.com/support/home?).
- ProvisionTime *time.Time `locationName:"provisionTime" type:"timestamp" timestampFormat:"iso8601"`
-
- // Reserved. If you need to sustain traffic greater than the documented limits
- // (http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/vpc-nat-gateway.html),
- // contact us through the Support Center (https://console.aws.amazon.com/support/home?).
- Provisioned *string `locationName:"provisioned" type:"string"`
-
- // Reserved. If you need to sustain traffic greater than the documented limits
- // (http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/vpc-nat-gateway.html),
- // contact us through the Support Center (https://console.aws.amazon.com/support/home?).
- RequestTime *time.Time `locationName:"requestTime" type:"timestamp" timestampFormat:"iso8601"`
-
- // Reserved. If you need to sustain traffic greater than the documented limits
- // (http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/vpc-nat-gateway.html),
- // contact us through the Support Center (https://console.aws.amazon.com/support/home?).
- Requested *string `locationName:"requested" type:"string"`
-
- // Reserved. If you need to sustain traffic greater than the documented limits
- // (http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/vpc-nat-gateway.html),
- // contact us through the Support Center (https://console.aws.amazon.com/support/home?).
- Status *string `locationName:"status" type:"string"`
-}
-type DescribeNatGatewaysInput struct {
- _ struct{} `type:"structure"`
-
- // One or more filters.
- //
- // * nat-gateway-id - The ID of the NAT gateway.
- //
- // * state - The state of the NAT gateway (pending | failed | available |
- // deleting | deleted).
- //
- // * subnet-id - The ID of the subnet in which the NAT gateway resides.
- //
- // * vpc-id - The ID of the VPC in which the NAT gateway resides.
- Filter []*Filter `locationNameList:"Filter" type:"list"`
-
- // The maximum number of items to return for this request. The request returns
- // a token that you can specify in a subsequent call to get the next set of
- // results.
- //
- // Constraint: If the value specified is greater than 1000, we return only 1000
- // items.
- MaxResults *int64 `type:"integer"`
-
- // One or more NAT gateway IDs.
- NatGatewayIds []*string `locationName:"NatGatewayId" locationNameList:"item" type:"list"`
-
- // The token to retrieve the next page of results.
- NextToken *string `type:"string"`
-}
-
-type DescribeNatGatewaysOutput struct {
- _ struct{} `type:"structure"`
-
- // Information about the NAT gateways.
- NatGateways []*NatGateway `locationName:"natGatewaySet" locationNameList:"item" type:"list"`
-
- // The token to use to retrieve the next page of results. This value is null
- // when there are no more results to return.
- NextToken *string `locationName:"nextToken" type:"string"`
-
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-type DeleteNatGatewayInput struct {
- _ struct{} `type:"structure"`
-
- // The ID of the NAT gateway.
- //
- // NatGatewayId is a required field
- NatGatewayId *string `type:"string" required:"true"`
-}
-
-type DeleteNatGatewayOutput struct {
- _ struct{} `type:"structure"`
-
- // The ID of the NAT gateway.
- NatGatewayId *string `locationName:"natGatewayId" type:"string"`
-}
-
-// Contains the parameters for CreateVpc.
-type CreateVpcInput struct {
- _ struct{} `type:"structure"`
-
- // Requests an Amazon-provided IPv6 CIDR block with a /56 prefix length for
- // the VPC. You cannot specify the range of IP addresses, or the size of the
- // CIDR block.
- AmazonProvidedIpv6CidrBlock *bool `locationName:"amazonProvidedIpv6CidrBlock" type:"boolean"`
-
- // The IPv4 network range for the VPC, in CIDR notation. For example, 10.0.0.0/16.
- //
- // CidrBlock is a required field
- CidrBlock *string `type:"string" required:"true"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The tenancy options for instances launched into the VPC. For default, instances
- // are launched with shared tenancy by default. You can launch instances with
- // any tenancy into a shared tenancy VPC. For dedicated, instances are launched
- // as dedicated tenancy instances by default. You can only launch instances
- // with a tenancy of dedicated or host into a dedicated tenancy VPC.
- //
- // Important: The host value cannot be used with this parameter. Use the default
- // or dedicated values only.
- //
- // Default: default
- InstanceTenancy *string `locationName:"instanceTenancy" type:"string" enum:"Tenancy"`
-}
-
-// Contains the output of CreateVpc.
-type CreateVpcOutput struct {
- _ struct{} `type:"structure"`
-
- // Information about the VPC.
- Vpc *Vpc `locationName:"vpc" type:"structure"`
-}
-
-// Contains the parameters for DescribeVpcs.
-type DescribeVpcsInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // One or more filters.
- //
- // * cidr - The primary IPv4 CIDR block of the VPC. The CIDR block you specify
- // must exactly match the VPC's CIDR block for information to be returned
- // for the VPC. Must contain the slash followed by one or two digits (for
- // example, /28).
- //
- // * cidr-block-association.cidr-block - An IPv4 CIDR block associated with
- // the VPC.
- //
- // * cidr-block-association.association-id - The association ID for an IPv4
- // CIDR block associated with the VPC.
- //
- // * cidr-block-association.state - The state of an IPv4 CIDR block associated
- // with the VPC.
- //
- // * dhcp-options-id - The ID of a set of DHCP options.
- //
- // * ipv6-cidr-block-association.ipv6-cidr-block - An IPv6 CIDR block associated
- // with the VPC.
- //
- // * ipv6-cidr-block-association.association-id - The association ID for
- // an IPv6 CIDR block associated with the VPC.
- //
- // * ipv6-cidr-block-association.state - The state of an IPv6 CIDR block
- // associated with the VPC.
- //
- // * isDefault - Indicates whether the VPC is the default VPC.
- //
- // * state - The state of the VPC (pending | available).
- //
- // * tag:key=value - The key/value combination of a tag assigned to the resource.
- // Specify the key of the tag in the filter name and the value of the tag
- // in the filter value. For example, for the tag Purpose=X, specify tag:Purpose
- // for the filter name and X for the filter value.
- //
- // * tag-key - The key of a tag assigned to the resource. This filter is
- // independent of the tag-value filter. For example, if you use both the
- // filter "tag-key=Purpose" and the filter "tag-value=X", you get any resources
- // assigned both the tag key Purpose (regardless of what the tag's value
- // is), and the tag value X (regardless of what the tag's key is). If you
- // want to list only resources where Purpose is X, see the tag:key=value
- // filter.
- //
- // * tag-value - The value of a tag assigned to the resource. This filter
- // is independent of the tag-key filter.
- //
- // * vpc-id - The ID of the VPC.
- Filters []*Filter `locationName:"Filter" locationNameList:"Filter" type:"list"`
-
- // One or more VPC IDs.
- //
- // Default: Describes all your VPCs.
- VpcIds []*string `locationName:"VpcId" locationNameList:"VpcId" type:"list"`
-}
-
-// Contains the output of DescribeVpcs.
-type DescribeVpcsOutput struct {
- _ struct{} `type:"structure"`
-
- // Information about one or more VPCs.
- Vpcs []*Vpc `locationName:"vpcSet" locationNameList:"item" type:"list"`
-
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-// Describes a VPC.
-type Vpc struct {
- _ struct{} `type:"structure"`
-
- // The primary IPv4 CIDR block for the VPC.
- CidrBlock *string `locationName:"cidrBlock" type:"string"`
-
- // Information about the IPv4 CIDR blocks associated with the VPC.
- CidrBlockAssociationSet []*VpcCidrBlockAssociation `locationName:"cidrBlockAssociationSet" locationNameList:"item" type:"list"`
-
- // The ID of the set of DHCP options you've associated with the VPC (or default
- // if the default options are associated with the VPC).
- DhcpOptionsId *string `locationName:"dhcpOptionsId" type:"string"`
-
- // The allowed tenancy of instances launched into the VPC.
- InstanceTenancy *string `locationName:"instanceTenancy" type:"string" enum:"Tenancy"`
-
- // Information about the IPv6 CIDR blocks associated with the VPC.
- Ipv6CidrBlockAssociationSet []*VpcIpv6CidrBlockAssociation `locationName:"ipv6CidrBlockAssociationSet" locationNameList:"item" type:"list"`
-
- // Indicates whether the VPC is the default VPC.
- IsDefault *bool `locationName:"isDefault" type:"boolean"`
-
- // The current state of the VPC.
- State *string `locationName:"state" type:"string" enum:"VpcState"`
-
- // Any tags assigned to the VPC.
- Tags []*Tag `locationName:"tagSet" locationNameList:"item" type:"list"`
-
- // The ID of the VPC.
- VpcId *string `locationName:"vpcId" type:"string"`
-}
-
-// Describes an IPv4 CIDR block associated with a VPC.
-type VpcCidrBlockAssociation struct {
- _ struct{} `type:"structure"`
-
- // The association ID for the IPv4 CIDR block.
- AssociationId *string `locationName:"associationId" type:"string"`
-
- // The IPv4 CIDR block.
- CidrBlock *string `locationName:"cidrBlock" type:"string"`
-
- // Information about the state of the CIDR block.
- CidrBlockState *VpcCidrBlockState `locationName:"cidrBlockState" type:"structure"`
-}
-
-// Describes the state of a CIDR block.
-type VpcCidrBlockState struct {
- _ struct{} `type:"structure"`
-
- // The state of the CIDR block.
- State *string `locationName:"state" type:"string" enum:"VpcCidrBlockStateCode"`
-
- // A message about the status of the CIDR block, if applicable.
- StatusMessage *string `locationName:"statusMessage" type:"string"`
-}
-
-// Describes an IPv6 CIDR block associated with a VPC.
-type VpcIpv6CidrBlockAssociation struct {
- _ struct{} `type:"structure"`
-
- // The association ID for the IPv6 CIDR block.
- AssociationId *string `locationName:"associationId" type:"string"`
-
- // The IPv6 CIDR block.
- Ipv6CidrBlock *string `locationName:"ipv6CidrBlock" type:"string"`
-
- // Information about the state of the CIDR block.
- Ipv6CidrBlockState *VpcCidrBlockState `locationName:"ipv6CidrBlockState" type:"structure"`
-}
-
-// Contains the parameters for DeleteVpc.
-type DeleteVpcInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The ID of the VPC.
- //
- // VpcId is a required field
- VpcId *string `type:"string" required:"true"`
-}
-
-type DeleteVpcOutput struct {
- _ struct{} `type:"structure"`
-}
-
-//CreateInternetGatewayOutput Contains the output of CreateInternetGateway.
-type CreateInternetGatewayOutput struct {
- _ struct{} `type:"structure"`
-
- // Information about the Internet gateway.
- InternetGateway *InternetGateway `locationName:"internetGateway" type:"structure"`
-}
-
-//InternetGateway Describes an Internet gateway.
-type InternetGateway struct {
- _ struct{} `type:"structure"`
-
- // Any VPCs attached to the Internet gateway.
- Attachments []*InternetGatewayAttachment `locationName:"attachmentSet" locationNameList:"item" type:"list"`
-
- // The ID of the Internet gateway.
- InternetGatewayId *string `locationName:"internetGatewayId" type:"string"`
-
- // Any tags assigned to the Internet gateway.
- Tags []*Tag `locationName:"tagSet" locationNameList:"item" type:"list"`
-}
-
-//InternetGatewayAttachment Describes the attachment of a VPC to an Internet gateway or an egress-only
-// Internet gateway.
-type InternetGatewayAttachment struct {
- _ struct{} `type:"structure"`
-
- // The current state of the attachment. For an Internet gateway, the state is
- // available when attached to a VPC; otherwise, this value is not returned.
- State *string `locationName:"state" type:"string" enum:"AttachmentStatus"`
-
- // The ID of the VPC.
- VpcId *string `locationName:"vpcId" type:"string"`
-}
-
-type ModifyVpcAttributeInput struct {
- _ struct{} `type:"structure"`
-
- // Indicates whether the instances launched in the VPC get DNS hostnames. If
- // enabled, instances in the VPC get DNS hostnames; otherwise, they do not.
- //
- // You cannot modify the DNS resolution and DNS hostnames attributes in the
- // same request. Use separate requests for each attribute. You can only enable
- // DNS hostnames if you've enabled DNS support.
- EnableDnsHostnames *AttributeBooleanValue `type:"structure"`
-
- // Indicates whether the DNS resolution is supported for the VPC. If enabled,
- // queries to the Amazon provided DNS server at the 169.254.169.253 IP address,
- // or the reserved IP address at the base of the VPC network range "plus two"
- // will succeed. If disabled, the Amazon provided DNS service in the VPC that
- // resolves public DNS hostnames to IP addresses is not enabled.
- //
- // You cannot modify the DNS resolution and DNS hostnames attributes in the
- // same request. Use separate requests for each attribute.
- EnableDnsSupport *AttributeBooleanValue `type:"structure"`
-
- // The ID of the VPC.
- //
- // VpcId is a required field
- VpcId *string `locationName:"vpcId" type:"string" required:"true"`
-}
-
-// String returns the string representation
-func (s ModifyVpcAttributeInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s ModifyVpcAttributeInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *ModifyVpcAttributeInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "ModifyVpcAttributeInput"}
- if s.VpcId == nil {
- invalidParams.Add(request.NewErrParamRequired("VpcId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetEnableDnsHostnames sets the EnableDnsHostnames field's value.
-func (s *ModifyVpcAttributeInput) SetEnableDnsHostnames(v *AttributeBooleanValue) *ModifyVpcAttributeInput {
- s.EnableDnsHostnames = v
- return s
-}
-
-// SetEnableDnsSupport sets the EnableDnsSupport field's value.
-func (s *ModifyVpcAttributeInput) SetEnableDnsSupport(v *AttributeBooleanValue) *ModifyVpcAttributeInput {
- s.EnableDnsSupport = v
- return s
-}
-
-// SetVpcId sets the VpcId field's value.
-func (s *ModifyVpcAttributeInput) SetVpcId(v string) *ModifyVpcAttributeInput {
- s.VpcId = &v
- return s
-}
-
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/ModifyVpcAttributeOutput
-type ModifyVpcAttributeOutput struct {
- _ struct{} `type:"structure"`
-}
-
-// String returns the string representation
-func (s ModifyVpcAttributeOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s ModifyVpcAttributeOutput) GoString() string {
- return s.String()
-}
-
-// Contains the parameters for DescribeInternetGateways.
-type DescribeInternetGatewaysInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // One or more filters.
- //
- // * attachment.state - The current state of the attachment between the gateway
- // and the VPC (available). Present only if a VPC is attached.
- //
- // * attachment.vpc-id - The ID of an attached VPC.
- //
- // * internet-gateway-id - The ID of the Internet gateway.
- //
- // * tag:key=value - The key/value combination of a tag assigned to the resource.
- // Specify the key of the tag in the filter name and the value of the tag
- // in the filter value. For example, for the tag Purpose=X, specify tag:Purpose
- // for the filter name and X for the filter value.
- //
- // * tag-key - The key of a tag assigned to the resource. This filter is
- // independent of the tag-value filter. For example, if you use both the
- // filter "tag-key=Purpose" and the filter "tag-value=X", you get any resources
- // assigned both the tag key Purpose (regardless of what the tag's value
- // is), and the tag value X (regardless of what the tag's key is). If you
- // want to list only resources where Purpose is X, see the tag:key=value
- // filter.
- //
- // * tag-value - The value of a tag assigned to the resource. This filter
- // is independent of the tag-key filter.
- Filters []*Filter `locationName:"Filter" locationNameList:"Filter" type:"list"`
-
- // One or more Internet gateway IDs.
- //
- // Default: Describes all your Internet gateways.
- InternetGatewayIds []*string `locationName:"internetGatewayId" locationNameList:"item" type:"list"`
-}
-
-// String returns the string representation
-func (s DescribeInternetGatewaysInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DescribeInternetGatewaysInput) GoString() string {
- return s.String()
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *DescribeInternetGatewaysInput) SetDryRun(v bool) *DescribeInternetGatewaysInput {
- s.DryRun = &v
- return s
-}
-
-// SetFilters sets the Filters field's value.
-func (s *DescribeInternetGatewaysInput) SetFilters(v []*Filter) *DescribeInternetGatewaysInput {
- s.Filters = v
- return s
-}
-
-// SetInternetGatewayIds sets the InternetGatewayIds field's value.
-func (s *DescribeInternetGatewaysInput) SetInternetGatewayIds(v []*string) *DescribeInternetGatewaysInput {
- s.InternetGatewayIds = v
- return s
-}
-
-//DescribeInternetGatewaysOutput Contains the output of DescribeInternetGateways.
-type DescribeInternetGatewaysOutput struct {
- _ struct{} `type:"structure"`
-
- // Information about one or more Internet gateways.
- InternetGateways []*InternetGateway `locationName:"internetGatewaySet" locationNameList:"item" type:"list"`
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-type DescribeVpcAttributeInput struct {
- _ struct{} `type:"structure"`
-
- // The VPC attribute.
- //
- // Attribute is a required field
- Attribute *string `type:"string" required:"true" enum:"VpcAttributeName"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The ID of the VPC.
- //
- // VpcId is a required field
- VpcId *string `type:"string" required:"true"`
-
- Filters []*Filter `locationName:"Filter" locationNameList:"Filter" type:"list"`
-}
-
-func (s *DescribeVpcAttributeInput) SetFilters(v []*Filter) *DescribeVpcAttributeInput {
- s.Filters = v
- return s
-}
-
-// String returns the string representation
-func (s DescribeVpcAttributeInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DescribeVpcAttributeInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *DescribeVpcAttributeInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "DescribeVpcAttributeInput"}
- if s.Attribute == nil {
- invalidParams.Add(request.NewErrParamRequired("Attribute"))
- }
- if s.VpcId == nil {
- invalidParams.Add(request.NewErrParamRequired("VpcId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetAttribute sets the Attribute field's value.
-func (s *DescribeVpcAttributeInput) SetAttribute(v string) *DescribeVpcAttributeInput {
- s.Attribute = &v
- return s
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *DescribeVpcAttributeInput) SetDryRun(v bool) *DescribeVpcAttributeInput {
- s.DryRun = &v
- return s
-}
-
-// SetVpcId sets the VpcId field's value.
-func (s *DescribeVpcAttributeInput) SetVpcId(v string) *DescribeVpcAttributeInput {
- s.VpcId = &v
- return s
-}
-
-type DescribeVpcAttributeOutput struct {
- _ struct{} `type:"structure"`
-
- // Indicates whether the instances launched in the VPC get DNS hostnames. If
- // this attribute is true, instances in the VPC get DNS hostnames; otherwise,
- // they do not.
- EnableDnsHostnames *AttributeBooleanValue `locationName:"enableDnsHostnames" type:"structure"`
-
- // Indicates whether DNS resolution is enabled for the VPC. If this attribute
- // is true, the Amazon DNS server resolves DNS hostnames for your instances
- // to their corresponding IP addresses; otherwise, it does not.
- EnableDnsSupport *AttributeBooleanValue `locationName:"enableDnsSupport" type:"structure"`
-
- // The ID of the VPC.
- VpcId *string `locationName:"vpcId" type:"string"`
-
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-// String returns the string representation
-func (s DescribeVpcAttributeOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DescribeVpcAttributeOutput) GoString() string {
- return s.String()
-}
-
-// SetEnableDnsHostnames sets the EnableDnsHostnames field's value.
-func (s *DescribeVpcAttributeOutput) SetEnableDnsHostnames(v *AttributeBooleanValue) *DescribeVpcAttributeOutput {
- s.EnableDnsHostnames = v
- return s
-}
-
-// SetEnableDnsSupport sets the EnableDnsSupport field's value.
-func (s *DescribeVpcAttributeOutput) SetRequesterId(v *string) *DescribeVpcAttributeOutput {
- s.RequestId = v
- return s
-}
-
-func (s *DescribeVpcAttributeOutput) SetEnableDnsSupport(v *AttributeBooleanValue) *DescribeVpcAttributeOutput {
- s.EnableDnsSupport = v
- return s
-}
-
-// SetVpcId sets the VpcId field's value.
-func (s *DescribeVpcAttributeOutput) SetVpcId(v string) *DescribeVpcAttributeOutput {
- s.VpcId = &v
- return s
-}
-
-type AttachInternetGatewayInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The ID of the Internet gateway.
- //
- // InternetGatewayId is a required field
- InternetGatewayId *string `locationName:"internetGatewayId" type:"string" required:"true"`
-
- // The ID of the VPC.
- //
- // VpcId is a required field
- VpcId *string `locationName:"vpcId" type:"string" required:"true"`
-}
-
-// String returns the string representation
-func (s AttachInternetGatewayInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s AttachInternetGatewayInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *AttachInternetGatewayInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "AttachInternetGatewayInput"}
- if s.InternetGatewayId == nil {
- invalidParams.Add(request.NewErrParamRequired("InternetGatewayId"))
- }
- if s.VpcId == nil {
- invalidParams.Add(request.NewErrParamRequired("VpcId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *AttachInternetGatewayInput) SetDryRun(v bool) *AttachInternetGatewayInput {
- s.DryRun = &v
- return s
-}
-
-// SetInternetGatewayId sets the InternetGatewayId field's value.
-func (s *AttachInternetGatewayInput) SetInternetGatewayId(v string) *AttachInternetGatewayInput {
- s.InternetGatewayId = &v
- return s
-}
-
-// SetVpcId sets the VpcId field's value.
-func (s *AttachInternetGatewayInput) SetVpcId(v string) *AttachInternetGatewayInput {
- s.VpcId = &v
- return s
-}
-
-type DeleteInternetGatewayInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The ID of the Internet gateway.
- //
- // InternetGatewayId is a required field
- InternetGatewayId *string `locationName:"internetGatewayId" type:"string" required:"true"`
-}
-
-// String returns the string representation
-func (s DeleteInternetGatewayInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DeleteInternetGatewayInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *DeleteInternetGatewayInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "DeleteInternetGatewayInput"}
- if s.InternetGatewayId == nil {
- invalidParams.Add(request.NewErrParamRequired("InternetGatewayId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *DeleteInternetGatewayInput) SetDryRun(v bool) *DeleteInternetGatewayInput {
- s.DryRun = &v
- return s
-}
-
-// SetInternetGatewayId sets the InternetGatewayId field's value.
-func (s *DeleteInternetGatewayInput) SetInternetGatewayId(v string) *DeleteInternetGatewayInput {
- s.InternetGatewayId = &v
- return s
-}
-
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DeleteInternetGatewayOutput
-type DeleteInternetGatewayOutput struct {
- _ struct{} `type:"structure"`
-}
-
-// String returns the string representation
-func (s DeleteInternetGatewayOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DeleteInternetGatewayOutput) GoString() string {
- return s.String()
-}
-
-// Contains the parameters for CreateNatGateway.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/CreateNatGatewayRequest
-type CreateNatGatewayInput struct {
- _ struct{} `type:"structure"`
-
- // The allocation ID of an Elastic IP address to associate with the NAT gateway.
- // If the Elastic IP address is associated with another resource, you must first
- // disassociate it.
- //
- // AllocationId is a required field
- AllocationId *string `type:"string" required:"true"`
-
- // Unique, case-sensitive identifier you provide to ensure the idempotency of
- // the request. For more information, see How to Ensure Idempotency (http://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html).
- //
- // Constraint: Maximum 64 ASCII characters.
- ClientToken *string `type:"string"`
-
- // The subnet in which to create the NAT gateway.
- //
- // SubnetId is a required field
- SubnetId *string `type:"string" required:"true"`
-}
-
-type AttachInternetGatewayOutput struct {
- _ struct{} `type:"structure"`
-}
-
-// String returns the string representation
-func (s AttachInternetGatewayOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s AttachInternetGatewayOutput) GoString() string {
- return s.String()
-}
-
-type DetachInternetGatewayOutput struct {
- _ struct{} `type:"structure"`
-}
-
-type DetachInternetGatewayInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The ID of the Internet gateway.
- //
- // InternetGatewayId is a required field
- InternetGatewayId *string `locationName:"internetGatewayId" type:"string" required:"true"`
-
- // The ID of the VPC.
- //
- // VpcId is a required field
- VpcId *string `locationName:"vpcId" type:"string" required:"true"`
-}
-
-type CreateAccessKeyInput struct {
- _ struct{} `type:"structure"`
-
- AccessKeyId *string `type:"string" required:"false"`
- SecretAccessKey *string `type:"string" required:"false"`
- Tags []*Tag `locationName:"tagSet" locationNameList:"item" type:"list"`
-}
-type CreateAccessKeyOutput struct {
- _ struct{} `type:"structure"`
- AccessKey *string `locationName:"accessKey" type:"structure"`
- ResponseMetadata *string `locationName:"responseMetaData" type:"structure"`
-}
-type DeleteAccessKeyInput struct {
- _ struct{} `type:"structure"`
- AccessKeyId *string `type:"string" required:"true"`
-}
-type DeleteAccessKeyOutput struct {
- _ struct{} `type:"structure"`
- ResponseMetadata *string `locationName:"responseMetaData" type:"structure"`
- Return *bool `locationName:"deleteAccessKey" type:"boolean"`
-}
-type UpdateAccessKeyInput struct {
- _ struct{} `type:"structure"`
- AccessKeyId *string `type:"string" required:"true"`
- Status *string `locationName:"status" type:"string" enum:"StatusType"`
-}
-type UpdateAccessKeyOutput struct {
- _ struct{} `type:"structure"`
- ResponseMetadata *string `locationName:"responseMetaData" type:"structure"`
- Return *bool `locationName:"updateAccessKey" type:"boolean"`
-}
-type DescribeAccessKeyInput struct {
- _ struct{} `type:"structure"`
- AccessKeyId *string `type:"string" required:"false"`
- SecretAccessKey *string `type:"string" required:"false"`
- Tags []*Tag `locationName:"tagSet" locationNameList:"item" type:"list"`
-}
-type DescribeAccessKeyOutput struct {
- _ struct{} `type:"structure"`
- AccessKey *string `locationName:"accessKey" type:"structure"`
- ResponseMetadata *string `locationName:"responseMetaData" type:"structure"`
-}
-
-// Contains the parameters for DeleteDhcpOptions.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DeleteDhcpOptionsRequest
-type DeleteDhcpOptionsInput struct {
- _ struct{} `type:"structure"`
-
- // The ID of the DHCP options set.
- //
- // DhcpOptionsId is a required field
- DhcpOptionsId *string `type:"string" required:"true"`
-
- // The Internet-routable IP address for the customer gateway's outside interface.
- // The address must be static.
- //
- // PublicIp is a required field
- PublicIp *string `locationName:"IpAddress" type:"string" required:"true"`
-
- // The type of VPN connection that this customer gateway supports (ipsec.1).
- //
- // Type is a required field
- Type *string `type:"string" required:"true" enum:"GatewayType"`
-}
-
-// String returns the string representation
-func (s DeleteDhcpOptionsInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// String returns the string representation
-func (s CreateCustomerGatewayInput) String() string {
- return awsutil.Prettify(s)
-}
-
-type CreateCustomerGatewayInput struct {
- _ struct{} `type:"structure"`
-
- // For devices that support BGP, the customer gateway's BGP ASN.
- //
- // Default: 65000
- //
- // BgpAsn is a required field
- BgpAsn *int64 `type:"integer" required:"true"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // PublicIp is a required field
- PublicIp *string `locationName:"IpAddress" type:"string" required:"true"`
-
- // The type of VPN connection that this customer gateway supports (ipsec.1).
- //
- // Type is a required field
- Type *string `type:"string" required:"true" enum:"GatewayType"`
-}
-
-// GoString returns the string representation
-func (s DeleteDhcpOptionsInput) GoString() string {
- return s.String()
-}
-func (s CreateCustomerGatewayInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *DeleteDhcpOptionsInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "DeleteDhcpOptionsInput"}
- if s.DhcpOptionsId == nil {
- invalidParams.Add(request.NewErrParamRequired("DhcpOptionsId"))
- }
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-func (s *CreateCustomerGatewayInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "CreateCustomerGatewayInput"}
- if s.BgpAsn == nil {
- invalidParams.Add(request.NewErrParamRequired("BgpAsn"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetDhcpOptionsId sets the DhcpOptionsId field's value.
-func (s *DeleteDhcpOptionsInput) SetDhcpOptionsId(v string) *DeleteDhcpOptionsInput {
- s.DhcpOptionsId = &v
- return s
-}
-
-// SetBgpAsn sets the BgpAsn field's value.
-func (s *CreateCustomerGatewayInput) SetBgpAsn(v int64) *CreateCustomerGatewayInput {
- s.BgpAsn = &v
- return s
-}
-
-func (s *CreateCustomerGatewayInput) SetDryRun(v bool) *CreateCustomerGatewayInput {
- s.DryRun = &v
- return s
-}
-
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DeleteDhcpOptionsOutput
-type DeleteDhcpOptionsOutput struct {
- _ struct{} `type:"structure"`
-}
-
-// String returns the string representation
-func (s DeleteDhcpOptionsOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// SetPublicIp sets the PublicIp field's value.
-func (s *CreateCustomerGatewayInput) SetPublicIp(v string) *CreateCustomerGatewayInput {
- s.PublicIp = &v
- return s
-}
-
-// SetType sets the Type field's value.
-func (s *CreateCustomerGatewayInput) SetType(v string) *CreateCustomerGatewayInput {
- s.Type = &v
- return s
-}
-
-// Contains the output of CreateCustomerGateway.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/CreateCustomerGatewayResult
-type CreateCustomerGatewayOutput struct {
- _ struct{} `type:"structure"`
-
- // Information about the customer gateway.
- CustomerGateway *CustomerGateway `locationName:"customerGateway" type:"structure"`
-}
-
-// String returns the string representation
-func (s CreateCustomerGatewayOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DeleteDhcpOptionsOutput) GoString() string {
- return s.String()
-}
-
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/NewDhcpConfiguration
-type NewDhcpConfiguration struct {
- _ struct{} `type:"structure"`
-
- Key *string `locationName:"key" type:"string"`
-
- Values []*string `locationName:"Value" locationNameList:"item" type:"list"`
-}
-
-// String returns the string representation
-func (s NewDhcpConfiguration) String() string {
- return awsutil.Prettify(s)
-}
-func (s CreateCustomerGatewayOutput) GoString() string {
- return s.String()
-}
-
-// SetCustomerGateway sets the CustomerGateway field's value.
-func (s *CreateCustomerGatewayOutput) SetCustomerGateway(v *CustomerGateway) *CreateCustomerGatewayOutput {
- s.CustomerGateway = v
- return s
-}
-
-type CustomerGateway struct {
- _ struct{} `type:"structure"`
-
- // The customer gateway's Border Gateway Protocol (BGP) Autonomous System Number
- // (ASN).
- BgpAsn *string `locationName:"bgpAsn" type:"string"`
-
- // The ID of the customer gateway.
- CustomerGatewayId *string `locationName:"customerGatewayId" type:"string"`
-
- // The Internet-routable IP address of the customer gateway's outside interface.
- IpAddress *string `locationName:"ipAddress" type:"string"`
-
- // The current state of the customer gateway (pending | available | deleting
- // | deleted).
- State *string `locationName:"state" type:"string"`
-
- // Any tags assigned to the customer gateway.
- Tags []*Tag `locationName:"tagSet" locationNameList:"item" type:"list"`
-
- // The type of VPN connection the customer gateway supports (ipsec.1).
- Type *string `locationName:"type" type:"string"`
-}
-
-// String returns the string representation
-func (s CustomerGateway) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s NewDhcpConfiguration) GoString() string {
- return s.String()
-}
-
-// SetKey sets the Key field's value.
-func (s *NewDhcpConfiguration) SetKey(v string) *NewDhcpConfiguration {
- s.Key = &v
- return s
-}
-
-// SetValues sets the Values field's value.
-func (s *NewDhcpConfiguration) SetValues(v []*string) *NewDhcpConfiguration {
- s.Values = v
- return s
-}
-
-// Contains the parameters for CreateDhcpOptions.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/CreateDhcpOptionsRequest
-type CreateDhcpOptionsInput struct {
- _ struct{} `type:"structure"`
-
- // A DHCP configuration option.
- //
- // DhcpConfigurations is a required field
- DhcpConfigurations []*NewDhcpConfiguration `locationName:"dhcpConfiguration" locationNameList:"item" type:"list" required:"true"`
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-}
-
-func (s CustomerGateway) GoString() string {
- return s.String()
-}
-
-// SetBgpAsn sets the BgpAsn field's value.
-func (s *CustomerGateway) SetBgpAsn(v string) *CustomerGateway {
- s.BgpAsn = &v
- return s
-}
-
-// SetCustomerGatewayId sets the CustomerGatewayId field's value.
-func (s *CustomerGateway) SetCustomerGatewayId(v string) *CustomerGateway {
- s.CustomerGatewayId = &v
- return s
-}
-
-// SetIpAddress sets the IpAddress field's value.
-func (s *CustomerGateway) SetIpAddress(v string) *CustomerGateway {
- s.IpAddress = &v
- return s
-}
-
-// SetState sets the State field's value.
-func (s *CustomerGateway) SetState(v string) *CustomerGateway {
- s.State = &v
- return s
-}
-
-// SetTags sets the Tags field's value.
-func (s *CustomerGateway) SetTags(v []*Tag) *CustomerGateway {
- s.Tags = v
- return s
-}
-
-// SetType sets the Type field's value.
-func (s *CustomerGateway) SetType(v string) *CustomerGateway {
- s.Type = &v
- return s
-}
-
-// Contains the parameters for DeleteCustomerGateway.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DeleteCustomerGatewayRequest
-type DeleteCustomerGatewayInput struct {
- _ struct{} `type:"structure"`
-
- // The ID of the customer gateway.
- //
- // CustomerGatewayId is a required field
- CustomerGatewayId *string `type:"string" required:"true"`
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-}
-
-// String returns the string representation
-func (s CreateDhcpOptionsInput) String() string {
- return awsutil.Prettify(s)
-}
-func (s DeleteCustomerGatewayInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s CreateDhcpOptionsInput) GoString() string {
- return s.String()
-}
-func (s DeleteCustomerGatewayInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *CreateDhcpOptionsInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "CreateDhcpOptionsInput"}
- if s.DhcpConfigurations == nil {
- invalidParams.Add(request.NewErrParamRequired("DhcpConfigurations"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-func (s *DeleteCustomerGatewayInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "DeleteCustomerGatewayInput"}
- if s.CustomerGatewayId == nil {
- invalidParams.Add(request.NewErrParamRequired("CustomerGatewayId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetDhcpConfigurations sets the DhcpConfigurations field's value.
-func (s *CreateDhcpOptionsInput) SetDhcpConfigurations(v []*NewDhcpConfiguration) *CreateDhcpOptionsInput {
- s.DhcpConfigurations = v
- return s
-}
-
-// SetCustomerGatewayId sets the CustomerGatewayId field's value.
-func (s *DeleteCustomerGatewayInput) SetCustomerGatewayId(v string) *DeleteCustomerGatewayInput {
- s.CustomerGatewayId = &v
- return s
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *CreateDhcpOptionsInput) SetDryRun(v bool) *CreateDhcpOptionsInput {
- s.DryRun = &v
- return s
-}
-func (s *DeleteCustomerGatewayInput) SetDryRun(v bool) *DeleteCustomerGatewayInput {
- s.DryRun = &v
- return s
-}
-
-// Contains the output of CreateDhcpOptions.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/CreateDhcpOptionsResult
-type CreateDhcpOptionsOutput struct {
- _ struct{} `type:"structure"`
-
- // A set of DHCP options.
- DhcpOptions *DhcpOptions `locationName:"dhcpOptions" type:"structure"`
-}
-
-// String returns the string representation
-func (s CreateDhcpOptionsOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s CreateDhcpOptionsOutput) GoString() string {
- return s.String()
-}
-
-// SetDhcpOptions sets the DhcpOptions field's value.
-func (s *CreateDhcpOptionsOutput) SetDhcpOptions(v *DhcpOptions) *CreateDhcpOptionsOutput {
- s.DhcpOptions = v
- return s
-}
-
-// Describes a set of DHCP options.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DhcpOptions
-type DhcpOptions struct {
- _ struct{} `type:"structure"`
-
- // One or more DHCP options in the set.
- DhcpConfigurations []*DhcpConfiguration `locationName:"dhcpConfigurationSet" locationNameList:"item" type:"list"`
-
- // The ID of the set of DHCP options.
- DhcpOptionsId *string `locationName:"dhcpOptionsId" type:"string"`
-
- // Any tags assigned to the DHCP options set.
- Tags []*Tag `locationName:"tagSet" locationNameList:"item" type:"list"`
-}
-
-// Describes a DHCP configuration option.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DhcpConfiguration
-type DhcpConfiguration struct {
- _ struct{} `type:"structure"`
-
- // The name of a DHCP option.
- Key *string `locationName:"key" type:"string"`
-
- // One or more values for the DHCP option.
- Values []*AttributeValue `locationName:"valueSet" locationNameList:"item" type:"list"`
-}
-
-// String returns the string representation
-func (s DhcpConfiguration) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DhcpConfiguration) GoString() string {
- return s.String()
-}
-
-// SetKey sets the Key field's value.
-func (s *DhcpConfiguration) SetKey(v string) *DhcpConfiguration {
- s.Key = &v
- return s
-}
-
-// SetValues sets the Values field's value.
-func (s *DhcpConfiguration) SetValues(v []*AttributeValue) *DhcpConfiguration {
- s.Values = v
- return s
-}
-
-// String returns the string representation
-func (s DhcpOptions) String() string {
- return awsutil.Prettify(s)
-}
-
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DeleteCustomerGatewayOutput
-type DeleteCustomerGatewayOutput struct {
- _ struct{} `type:"structure"`
-}
-
-// String returns the string representation
-func (s DeleteCustomerGatewayOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DhcpOptions) GoString() string {
- return s.String()
-}
-
-// SetDhcpConfigurations sets the DhcpConfigurations field's value.
-func (s *DhcpOptions) SetDhcpConfigurations(v []*DhcpConfiguration) *DhcpOptions {
- s.DhcpConfigurations = v
- return s
-}
-
-// SetDhcpOptionsId sets the DhcpOptionsId field's value.
-func (s *DhcpOptions) SetDhcpOptionsId(v string) *DhcpOptions {
- s.DhcpOptionsId = &v
- return s
-}
-
-// SetTags sets the Tags field's value.
-func (s *DhcpOptions) SetTags(v []*Tag) *DhcpOptions {
- s.Tags = v
- return s
-}
-
-// Contains the parameters for DescribeDhcpOptions.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeDhcpOptionsRequest
-type DescribeDhcpOptionsInput struct {
- _ struct{} `type:"structure"`
-
- // The IDs of one or more DHCP options sets.
- //
- // Default: Describes all your DHCP options sets.
- DhcpOptionsIds []*string `locationName:"DhcpOptionsId" locationNameList:"DhcpOptionsId" type:"list"`
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // One or more filters.
- //
- // * dhcp-options-id - The ID of a set of DHCP options.
- //
- // * key - The key for one of the options (for example, domain-name).
- //
- // * value - The value for one of the options.
- // * bgp-asn - The customer gateway's Border Gateway Protocol (BGP) Autonomous
- // System Number (ASN).
- //
- // * customer-gateway-id - The ID of the customer gateway.
- //
- // * ip-address - The IP address of the customer gateway's Internet-routable
- // external interface.
- //
- // * state - The state of the customer gateway (pending | available | deleting
- // | deleted).
- //
- // * type - The type of customer gateway. Currently, the only supported type
- // is ipsec.1.
- //
- // * tag:key=value - The key/value combination of a tag assigned to the resource.
- // Specify the key of the tag in the filter name and the value of the tag
- // in the filter value. For example, for the tag Purpose=X, specify tag:Purpose
- // for the filter name and X for the filter value.
- //
- // * tag-key - The key of a tag assigned to the resource. This filter is
- // independent of the tag-value filter. For example, if you use both the
- // filter "tag-key=Purpose" and the filter "tag-value=X", you get any resources
- // assigned both the tag key Purpose (regardless of what the tag's value
- // is), and the tag value X (regardless of what the tag's key is). If you
- // want to list only resources where Purpose is X, see the tag:key=value
- // filter.
- //
- // * tag-value - The value of a tag assigned to the resource. This filter
- // is independent of the tag-key filter.
- Filters []*Filter `locationName:"Filter" locationNameList:"Filter" type:"list"`
-}
-
-func (s DeleteCustomerGatewayOutput) GoString() string {
- return s.String()
-}
-
-type DescribeCustomerGatewaysInput struct {
- _ struct{} `type:"structure"`
-
- // One or more customer gateway IDs.
- //
- // Default: Describes all your customer gateways.
- CustomerGatewayIds []*string `locationName:"CustomerGatewayId" locationNameList:"CustomerGatewayId" type:"list"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // One or more filters.
- //
- // * dhcp-options-id - The ID of a set of DHCP options.
- //
- // * key - The key for one of the options (for example, domain-name).
- //
- // * value - The value for one of the options.
- // * bgp-asn - The customer gateway's Border Gateway Protocol (BGP) Autonomous
- // System Number (ASN).
- //
- // * customer-gateway-id - The ID of the customer gateway.
- //
- // * ip-address - The IP address of the customer gateway's Internet-routable
- // external interface.
- //
- // * state - The state of the customer gateway (pending | available | deleting
- // | deleted).
- //
- // * type - The type of customer gateway. Currently, the only supported type
- // is ipsec.1.
- //
- // * tag:key=value - The key/value combination of a tag assigned to the resource.
- // Specify the key of the tag in the filter name and the value of the tag
- // in the filter value. For example, for the tag Purpose=X, specify tag:Purpose
- // for the filter name and X for the filter value.
- //
- // * tag-key - The key of a tag assigned to the resource. This filter is
- // independent of the tag-value filter. For example, if you use both the
- // filter "tag-key=Purpose" and the filter "tag-value=X", you get any resources
- // assigned both the tag key Purpose (regardless of what the tag's value
- // is), and the tag value X (regardless of what the tag's key is). If you
- // want to list only resources where Purpose is X, see the tag:key=value
- // filter.
- //
- // * tag-value - The value of a tag assigned to the resource. This filter
- // is independent of the tag-key filter.
- Filters []*Filter `locationName:"Filter" locationNameList:"Filter" type:"list"`
-}
-
-// String returns the string representation
-func (s DescribeDhcpOptionsInput) String() string {
- return awsutil.Prettify(s)
-}
-func (s DescribeCustomerGatewaysInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DescribeDhcpOptionsInput) GoString() string {
- return s.String()
-}
-
-// SetDhcpOptionsIds sets the DhcpOptionsIds field's value.
-func (s *DescribeDhcpOptionsInput) SetDhcpOptionsIds(v []*string) *DescribeDhcpOptionsInput {
- s.DhcpOptionsIds = v
- return s
-}
-func (s DescribeCustomerGatewaysInput) GoString() string {
- return s.String()
-}
-
-// SetCustomerGatewayIds sets the CustomerGatewayIds field's value.
-func (s *DescribeCustomerGatewaysInput) SetCustomerGatewayIds(v []*string) *DescribeCustomerGatewaysInput {
- s.CustomerGatewayIds = v
- return s
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *DescribeDhcpOptionsInput) SetDryRun(v bool) *DescribeDhcpOptionsInput {
- s.DryRun = &v
- return s
-}
-func (s *DescribeCustomerGatewaysInput) SetDryRun(v bool) *DescribeCustomerGatewaysInput {
- s.DryRun = &v
- return s
-}
-
-// SetFilters sets the Filters field's value.
-func (s *DescribeDhcpOptionsInput) SetFilters(v []*Filter) *DescribeDhcpOptionsInput {
- s.Filters = v
- return s
-}
-func (s *DescribeCustomerGatewaysInput) SetFilters(v []*Filter) *DescribeCustomerGatewaysInput {
- s.Filters = v
- return s
-}
-
-// Contains the output of DescribeDhcpOptions.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeDhcpOptionsResult
-type DescribeDhcpOptionsOutput struct {
- _ struct{} `type:"structure"`
- RequestId *string `locationName:"requestId" type:"string"`
-
- // Information about one or more DHCP options sets.
- DhcpOptions []*DhcpOptions `locationName:"dhcpOptionsSet" locationNameList:"item" type:"list"`
-}
-
-// String returns the string representation
-func (s DescribeDhcpOptionsOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DescribeDhcpOptionsOutput) GoString() string {
- return s.String()
-}
-
-// SetDhcpOptions sets the DhcpOptions field's value.
-func (s *DescribeDhcpOptionsOutput) SetDhcpOptions(v []*DhcpOptions) *DescribeDhcpOptionsOutput {
- s.DhcpOptions = v
- return s
-}
-
-// Contains the parameters for AssociateDhcpOptions.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/AssociateDhcpOptionsRequest
-type AssociateDhcpOptionsInput struct {
- _ struct{} `type:"structure"`
-
- // The ID of the DHCP options set, or default to associate no DHCP options with
- // the VPC.
- //
- // DhcpOptionsId is a required field
- DhcpOptionsId *string `type:"string" required:"true"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The ID of the VPC.
- //
- // VpcId is a required field
- VpcId *string `type:"string" required:"true"`
-}
-
-// String returns the string representation
-func (s AssociateDhcpOptionsInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// Contains the output of DescribeCustomerGateways.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeCustomerGatewaysResult
-type DescribeCustomerGatewaysOutput struct {
- _ struct{} `type:"structure"`
-
- // Information about one or more customer gateways.
- CustomerGateways []*CustomerGateway `locationName:"customerGatewaySet" locationNameList:"item" type:"list"`
-
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-// String returns the string representation
-func (s DescribeCustomerGatewaysOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s AssociateDhcpOptionsInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *AssociateDhcpOptionsInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "AssociateDhcpOptionsInput"}
- if s.DhcpOptionsId == nil {
- invalidParams.Add(request.NewErrParamRequired("DhcpOptionsId"))
- }
- if s.VpcId == nil {
- invalidParams.Add(request.NewErrParamRequired("VpcId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetDhcpOptionsId sets the DhcpOptionsId field's value.
-func (s *AssociateDhcpOptionsInput) SetDhcpOptionsId(v string) *AssociateDhcpOptionsInput {
- s.DhcpOptionsId = &v
- return s
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *AssociateDhcpOptionsInput) SetDryRun(v bool) *AssociateDhcpOptionsInput {
- s.DryRun = &v
- return s
-}
-
-// SetVpcId sets the VpcId field's value.
-func (s *AssociateDhcpOptionsInput) SetVpcId(v string) *AssociateDhcpOptionsInput {
- s.VpcId = &v
- return s
-}
-
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/AssociateDhcpOptionsOutput
-type AssociateDhcpOptionsOutput struct {
- _ struct{} `type:"structure"`
-}
-
-// String returns the string representation
-func (s AssociateDhcpOptionsOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s AssociateDhcpOptionsOutput) GoString() string {
- return s.String()
-}
-func (s DescribeCustomerGatewaysOutput) GoString() string {
- return s.String()
-}
-
-// SetCustomerGateways sets the CustomerGateways field's value.
-func (s *DescribeCustomerGatewaysOutput) SetCustomerGateways(v []*CustomerGateway) *DescribeCustomerGatewaysOutput {
- s.CustomerGateways = v
- return s
-}
-func (s *DescribeCustomerGatewaysOutput) SetRequesterId(v *string) *DescribeCustomerGatewaysOutput {
- s.RequestId = v
- return s
-}
-
-type CreateRouteInput struct {
- _ struct{} `type:"structure"`
-
- // The IPv4 CIDR address block used for the destination match. Routing decisions
- // are based on the most specific match.
- DestinationCidrBlock *string `locationName:"destinationCidrBlock" type:"string"`
-
- // The IPv6 CIDR block used for the destination match. Routing decisions are
- // based on the most specific match.
- DestinationIpv6CidrBlock *string `locationName:"destinationIpv6CidrBlock" type:"string"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // [IPv6 traffic only] The ID of an egress-only Internet gateway.
- EgressOnlyInternetGatewayId *string `locationName:"egressOnlyInternetGatewayId" type:"string"`
-
- // The ID of an Internet gateway or virtual private gateway attached to your
- // VPC.
- GatewayId *string `locationName:"gatewayId" type:"string"`
-
- // The ID of a NAT instance in your VPC. The operation fails if you specify
- // an instance ID unless exactly one network interface is attached.
- InstanceId *string `locationName:"instanceId" type:"string"`
-
- // [IPv4 traffic only] The ID of a NAT gateway.
- NatGatewayId *string `locationName:"natGatewayId" type:"string"`
-
- // The ID of a network interface.
- NetworkInterfaceId *string `locationName:"networkInterfaceId" type:"string"`
-
- // The ID of the route table for the route.
- //
- // RouteTableId is a required field
- RouteTableId *string `locationName:"routeTableId" type:"string" required:"true"`
-
- // The ID of a VPC peering connection.
- VpcPeeringConnectionId *string `locationName:"vpcPeeringConnectionId" type:"string"`
-}
-
-// String returns the string representation
-func (s CreateRouteInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s CreateRouteInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *CreateRouteInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "CreateRouteInput"}
- if s.RouteTableId == nil {
- invalidParams.Add(request.NewErrParamRequired("RouteTableId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetDestinationCidrBlock sets the DestinationCidrBlock field's value.
-func (s *CreateRouteInput) SetDestinationCidrBlock(v string) *CreateRouteInput {
- s.DestinationCidrBlock = &v
- return s
-}
-
-// SetDestinationIpv6CidrBlock sets the DestinationIpv6CidrBlock field's value.
-func (s *CreateRouteInput) SetDestinationIpv6CidrBlock(v string) *CreateRouteInput {
- s.DestinationIpv6CidrBlock = &v
- return s
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *CreateRouteInput) SetDryRun(v bool) *CreateRouteInput {
- s.DryRun = &v
- return s
-}
-
-// SetEgressOnlyInternetGatewayId sets the EgressOnlyInternetGatewayId field's value.
-func (s *CreateRouteInput) SetEgressOnlyInternetGatewayId(v string) *CreateRouteInput {
- s.EgressOnlyInternetGatewayId = &v
- return s
-}
-
-// SetGatewayId sets the GatewayId field's value.
-func (s *CreateRouteInput) SetGatewayId(v string) *CreateRouteInput {
- s.GatewayId = &v
- return s
-}
-
-// SetInstanceId sets the InstanceId field's value.
-func (s *CreateRouteInput) SetInstanceId(v string) *CreateRouteInput {
- s.InstanceId = &v
- return s
-}
-
-// SetNatGatewayId sets the NatGatewayId field's value.
-func (s *CreateRouteInput) SetNatGatewayId(v string) *CreateRouteInput {
- s.NatGatewayId = &v
- return s
-}
-
-// SetNetworkInterfaceId sets the NetworkInterfaceId field's value.
-func (s *CreateRouteInput) SetNetworkInterfaceId(v string) *CreateRouteInput {
- s.NetworkInterfaceId = &v
- return s
-}
-
-// SetRouteTableId sets the RouteTableId field's value.
-func (s *CreateRouteInput) SetRouteTableId(v string) *CreateRouteInput {
- s.RouteTableId = &v
- return s
-}
-
-// SetVpcPeeringConnectionId sets the VpcPeeringConnectionId field's value.
-func (s *CreateRouteInput) SetVpcPeeringConnectionId(v string) *CreateRouteInput {
- s.VpcPeeringConnectionId = &v
- return s
-}
-
-// Contains the output of CreateRoute.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/CreateRouteResult
-type CreateRouteOutput struct {
- _ struct{} `type:"structure"`
-
- // Returns true if the request succeeds; otherwise, it returns an error.
- Return *bool `locationName:"return" type:"boolean"`
-}
-
-// String returns the string representation
-func (s CreateRouteOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s CreateRouteOutput) GoString() string {
- return s.String()
-}
-
-// SetReturn sets the Return field's value.
-func (s *CreateRouteOutput) SetReturn(v bool) *CreateRouteOutput {
- s.Return = &v
- return s
-}
-
-type Route struct {
- _ struct{} `type:"structure"`
-
- // The IPv4 CIDR block used for the destination match.
- DestinationCidrBlock *string `locationName:"destinationCidrBlock" type:"string"`
-
- // The IPv6 CIDR block used for the destination match.
- DestinationIpv6CidrBlock *string `locationName:"destinationIpv6CidrBlock" type:"string"`
-
- // The prefix of the AWS service.
- DestinationPrefixListId *string `locationName:"destinationPrefixListId" type:"string"`
-
- // The ID of the egress-only Internet gateway.
- EgressOnlyInternetGatewayId *string `locationName:"egressOnlyInternetGatewayId" type:"string"`
-
- // The ID of a gateway attached to your VPC.
- GatewayId *string `locationName:"gatewayId" type:"string"`
-
- // The ID of a NAT instance in your VPC.
- InstanceId *string `locationName:"instanceId" type:"string"`
-
- // The AWS account ID of the owner of the instance.
- InstanceOwnerId *string `locationName:"instanceOwnerId" type:"string"`
-
- // The ID of a NAT gateway.
- NatGatewayId *string `locationName:"natGatewayId" type:"string"`
-
- // The ID of the network interface.
- NetworkInterfaceId *string `locationName:"networkInterfaceId" type:"string"`
-
- // Describes how the route was created.
- //
- // * CreateRouteTable - The route was automatically created when the route
- // table was created.
- //
- // * CreateRoute - The route was manually added to the route table.
- //
- // * EnableVgwRoutePropagation - The route was propagated by route propagation.
- Origin *string `locationName:"origin" type:"string" enum:"RouteOrigin"`
-
- // The state of the route. The blackhole state indicates that the route's target
- // isn't available (for example, the specified gateway isn't attached to the
- // VPC, or the specified NAT instance has been terminated).
- State *string `locationName:"state" type:"string" enum:"RouteState"`
-
- // The ID of the VPC peering connection.
- VpcPeeringConnectionId *string `locationName:"vpcPeeringConnectionId" type:"string"`
-}
-
-// String returns the string representation
-func (s Route) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s Route) GoString() string {
- return s.String()
-}
-
-// SetDestinationCidrBlock sets the DestinationCidrBlock field's value.
-func (s *Route) SetDestinationCidrBlock(v string) *Route {
- s.DestinationCidrBlock = &v
- return s
-}
-
-// SetDestinationIpv6CidrBlock sets the DestinationIpv6CidrBlock field's value.
-func (s *Route) SetDestinationIpv6CidrBlock(v string) *Route {
- s.DestinationIpv6CidrBlock = &v
- return s
-}
-
-// SetDestinationPrefixListId sets the DestinationPrefixListId field's value.
-func (s *Route) SetDestinationPrefixListId(v string) *Route {
- s.DestinationPrefixListId = &v
- return s
-}
-
-// SetEgressOnlyInternetGatewayId sets the EgressOnlyInternetGatewayId field's value.
-func (s *Route) SetEgressOnlyInternetGatewayId(v string) *Route {
- s.EgressOnlyInternetGatewayId = &v
- return s
-}
-
-// SetGatewayId sets the GatewayId field's value.
-func (s *Route) SetGatewayId(v string) *Route {
- s.GatewayId = &v
- return s
-}
-
-// SetInstanceId sets the InstanceId field's value.
-func (s *Route) SetInstanceId(v string) *Route {
- s.InstanceId = &v
- return s
-}
-
-// SetInstanceOwnerId sets the InstanceOwnerId field's value.
-func (s *Route) SetInstanceOwnerId(v string) *Route {
- s.InstanceOwnerId = &v
- return s
-}
-
-// SetNatGatewayId sets the NatGatewayId field's value.
-func (s *Route) SetNatGatewayId(v string) *Route {
- s.NatGatewayId = &v
- return s
-}
-
-// SetNetworkInterfaceId sets the NetworkInterfaceId field's value.
-func (s *Route) SetNetworkInterfaceId(v string) *Route {
- s.NetworkInterfaceId = &v
- return s
-}
-
-// SetOrigin sets the Origin field's value.
-func (s *Route) SetOrigin(v string) *Route {
- s.Origin = &v
- return s
-}
-
-// SetState sets the State field's value.
-func (s *Route) SetState(v string) *Route {
- s.State = &v
- return s
-}
-
-// SetVpcPeeringConnectionId sets the VpcPeeringConnectionId field's value.
-func (s *Route) SetVpcPeeringConnectionId(v string) *Route {
- s.VpcPeeringConnectionId = &v
- return s
-}
-
-type ReplaceRouteInput struct {
- _ struct{} `type:"structure"`
-
- // The IPv4 CIDR address block used for the destination match. The value you
- // provide must match the CIDR of an existing route in the table.
- DestinationCidrBlock *string `locationName:"destinationCidrBlock" type:"string"`
-
- // The IPv6 CIDR address block used for the destination match. The value you
- // provide must match the CIDR of an existing route in the table.
- DestinationIpv6CidrBlock *string `locationName:"destinationIpv6CidrBlock" type:"string"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // [IPv6 traffic only] The ID of an egress-only Internet gateway.
- EgressOnlyInternetGatewayId *string `locationName:"egressOnlyInternetGatewayId" type:"string"`
-
- // The ID of an Internet gateway or virtual private gateway.
- GatewayId *string `locationName:"gatewayId" type:"string"`
-
- // The ID of a NAT instance in your VPC.
- InstanceId *string `locationName:"instanceId" type:"string"`
-
- // [IPv4 traffic only] The ID of a NAT gateway.
- NatGatewayId *string `locationName:"natGatewayId" type:"string"`
-
- // The ID of a network interface.
- NetworkInterfaceId *string `locationName:"networkInterfaceId" type:"string"`
-
- // The ID of the route table.
- //
- // RouteTableId is a required field
- RouteTableId *string `locationName:"routeTableId" type:"string" required:"true"`
-
- // The ID of a VPC peering connection.
- VpcPeeringConnectionId *string `locationName:"vpcPeeringConnectionId" type:"string"`
-}
-
-// String returns the string representation
-func (s ReplaceRouteInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s ReplaceRouteInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *ReplaceRouteInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "ReplaceRouteInput"}
- if s.RouteTableId == nil {
- invalidParams.Add(request.NewErrParamRequired("RouteTableId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetDestinationCidrBlock sets the DestinationCidrBlock field's value.
-func (s *ReplaceRouteInput) SetDestinationCidrBlock(v string) *ReplaceRouteInput {
- s.DestinationCidrBlock = &v
- return s
-}
-
-// SetDestinationIpv6CidrBlock sets the DestinationIpv6CidrBlock field's value.
-func (s *ReplaceRouteInput) SetDestinationIpv6CidrBlock(v string) *ReplaceRouteInput {
- s.DestinationIpv6CidrBlock = &v
- return s
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *ReplaceRouteInput) SetDryRun(v bool) *ReplaceRouteInput {
- s.DryRun = &v
- return s
-}
-
-// SetEgressOnlyInternetGatewayId sets the EgressOnlyInternetGatewayId field's value.
-func (s *ReplaceRouteInput) SetEgressOnlyInternetGatewayId(v string) *ReplaceRouteInput {
- s.EgressOnlyInternetGatewayId = &v
- return s
-}
-
-// SetGatewayId sets the GatewayId field's value.
-func (s *ReplaceRouteInput) SetGatewayId(v string) *ReplaceRouteInput {
- s.GatewayId = &v
- return s
-}
-
-// SetInstanceId sets the InstanceId field's value.
-func (s *ReplaceRouteInput) SetInstanceId(v string) *ReplaceRouteInput {
- s.InstanceId = &v
- return s
-}
-
-// SetNatGatewayId sets the NatGatewayId field's value.
-func (s *ReplaceRouteInput) SetNatGatewayId(v string) *ReplaceRouteInput {
- s.NatGatewayId = &v
- return s
-}
-
-// SetNetworkInterfaceId sets the NetworkInterfaceId field's value.
-func (s *ReplaceRouteInput) SetNetworkInterfaceId(v string) *ReplaceRouteInput {
- s.NetworkInterfaceId = &v
- return s
-}
-
-// SetRouteTableId sets the RouteTableId field's value.
-func (s *ReplaceRouteInput) SetRouteTableId(v string) *ReplaceRouteInput {
- s.RouteTableId = &v
- return s
-}
-
-// SetVpcPeeringConnectionId sets the VpcPeeringConnectionId field's value.
-func (s *ReplaceRouteInput) SetVpcPeeringConnectionId(v string) *ReplaceRouteInput {
- s.VpcPeeringConnectionId = &v
- return s
-}
-
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/ReplaceRouteOutput
-type ReplaceRouteOutput struct {
- _ struct{} `type:"structure"`
-}
-
-// String returns the string representation
-func (s ReplaceRouteOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s ReplaceRouteOutput) GoString() string {
- return s.String()
-}
-
-type DeleteRouteInput struct {
- _ struct{} `type:"structure"`
-
- // The IPv4 CIDR range for the route. The value you specify must match the CIDR
- // for the route exactly.
- DestinationCidrBlock *string `locationName:"destinationCidrBlock" type:"string"`
-
- // The IPv6 CIDR range for the route. The value you specify must match the CIDR
- // for the route exactly.
- DestinationIpv6CidrBlock *string `locationName:"destinationIpv6CidrBlock" type:"string"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The ID of the route table.
- //
- // RouteTableId is a required field
- RouteTableId *string `locationName:"routeTableId" type:"string" required:"true"`
-}
-
-// String returns the string representation
-func (s DeleteRouteInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DeleteRouteInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *DeleteRouteInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "DeleteRouteInput"}
- if s.RouteTableId == nil {
- invalidParams.Add(request.NewErrParamRequired("RouteTableId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetDestinationCidrBlock sets the DestinationCidrBlock field's value.
-func (s *DeleteRouteInput) SetDestinationCidrBlock(v string) *DeleteRouteInput {
- s.DestinationCidrBlock = &v
- return s
-}
-
-// SetDestinationIpv6CidrBlock sets the DestinationIpv6CidrBlock field's value.
-func (s *DeleteRouteInput) SetDestinationIpv6CidrBlock(v string) *DeleteRouteInput {
- s.DestinationIpv6CidrBlock = &v
- return s
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *DeleteRouteInput) SetDryRun(v bool) *DeleteRouteInput {
- s.DryRun = &v
- return s
-}
-
-// SetRouteTableId sets the RouteTableId field's value.
-func (s *DeleteRouteInput) SetRouteTableId(v string) *DeleteRouteInput {
- s.RouteTableId = &v
- return s
-}
-
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DeleteRouteOutput
-type DeleteRouteOutput struct {
- _ struct{} `type:"structure"`
-}
-
-// String returns the string representation
-func (s DeleteRouteOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DeleteRouteOutput) GoString() string {
- return s.String()
-}
-
-type DescribeRouteTablesInput struct {
- _ struct{} `type:"structure"`
-
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // * vpc-id - The ID of the VPC for the route table.
- Filters []*Filter `locationName:"Filter" locationNameList:"Filter" type:"list"`
-
- // One or more route table IDs.
- //
- // Default: Describes all your route tables.
- RouteTableIds []*string `locationName:"RouteTableId" locationNameList:"item" type:"list"`
-}
-
-// String returns the string representation
-func (s DescribeRouteTablesInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DescribeRouteTablesInput) GoString() string {
- return s.String()
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *DescribeRouteTablesInput) SetDryRun(v bool) *DescribeRouteTablesInput {
- s.DryRun = &v
- return s
-}
-
-// SetFilters sets the Filters field's value.
-func (s *DescribeRouteTablesInput) SetFilters(v []*Filter) *DescribeRouteTablesInput {
- s.Filters = v
- return s
-}
-
-// SetRouteTableIds sets the RouteTableIds field's value.
-func (s *DescribeRouteTablesInput) SetRouteTableIds(v []*string) *DescribeRouteTablesInput {
- s.RouteTableIds = v
- return s
-}
-
-// Contains the output of DescribeRouteTables.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeRouteTablesResult
-type DescribeRouteTablesOutput struct {
- _ struct{} `type:"structure"`
-
- // Information about one or more route tables.
- RouteTables []*RouteTable `locationName:"routeTableSet" locationNameList:"item" type:"list"`
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-// String returns the string representation
-func (s DescribeRouteTablesOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DescribeRouteTablesOutput) GoString() string {
- return s.String()
-}
-
-// SetRouteTables sets the RouteTables field's value.
-func (s *DescribeRouteTablesOutput) SetRouteTables(v []*RouteTable) *DescribeRouteTablesOutput {
- s.RouteTables = v
- return s
-}
-
-type RouteTable struct {
- _ struct{} `type:"structure"`
-
- // The associations between the route table and one or more subnets.
- Associations []*RouteTableAssociation `locationName:"associationSet" locationNameList:"item" type:"list"`
-
- // Any virtual private gateway (VGW) propagating routes.
- PropagatingVgws []*PropagatingVgw `locationName:"propagatingVgwSet" locationNameList:"item" type:"list"`
-
- // The ID of the route table.
- RouteTableId *string `locationName:"routeTableId" type:"string"`
-
- // The routes in the route table.
- Routes []*Route `locationName:"routeSet" locationNameList:"item" type:"list"`
-
- // Any tags assigned to the route table.
- Tags []*Tag `locationName:"tagSet" locationNameList:"item" type:"list"`
-
- // The ID of the VPC.
- VpcId *string `locationName:"vpcId" type:"string"`
-}
-
-// String returns the string representation
-func (s RouteTable) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s RouteTable) GoString() string {
- return s.String()
-}
-
-// SetAssociations sets the Associations field's value.
-func (s *RouteTable) SetAssociations(v []*RouteTableAssociation) *RouteTable {
- s.Associations = v
- return s
-}
-
-// SetPropagatingVgws sets the PropagatingVgws field's value.
-func (s *RouteTable) SetPropagatingVgws(v []*PropagatingVgw) *RouteTable {
- s.PropagatingVgws = v
- return s
-}
-
-// SetRouteTableId sets the RouteTableId field's value.
-func (s *RouteTable) SetRouteTableId(v string) *RouteTable {
- s.RouteTableId = &v
- return s
-}
-
-// SetRoutes sets the Routes field's value.
-func (s *RouteTable) SetRoutes(v []*Route) *RouteTable {
- s.Routes = v
- return s
-}
-
-// SetTags sets the Tags field's value.
-func (s *RouteTable) SetTags(v []*Tag) *RouteTable {
- s.Tags = v
- return s
-}
-
-// SetVpcId sets the VpcId field's value.
-func (s *RouteTable) SetVpcId(v string) *RouteTable {
- s.VpcId = &v
- return s
-}
-
-// Describes an association between a route table and a subnet.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/RouteTableAssociation
-type RouteTableAssociation struct {
- _ struct{} `type:"structure"`
-
- // Indicates whether this is the main route table.
- Main *bool `locationName:"main" type:"boolean"`
-
- // The ID of the association between a route table and a subnet.
- RouteTableAssociationId *string `locationName:"routeTableAssociationId" type:"string"`
-
- // The ID of the route table.
- RouteTableId *string `locationName:"routeTableId" type:"string"`
-
- // The ID of the subnet. A subnet ID is not returned for an implicit association.
- SubnetId *string `locationName:"subnetId" type:"string"`
-}
-
-// String returns the string representation
-func (s RouteTableAssociation) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s RouteTableAssociation) GoString() string {
- return s.String()
-}
-
-// SetMain sets the Main field's value.
-func (s *RouteTableAssociation) SetMain(v bool) *RouteTableAssociation {
- s.Main = &v
- return s
-}
-
-// SetRouteTableAssociationId sets the RouteTableAssociationId field's value.
-func (s *RouteTableAssociation) SetRouteTableAssociationId(v string) *RouteTableAssociation {
- s.RouteTableAssociationId = &v
- return s
-}
-
-// SetRouteTableId sets the RouteTableId field's value.
-func (s *RouteTableAssociation) SetRouteTableId(v string) *RouteTableAssociation {
- s.RouteTableId = &v
- return s
-}
-
-// SetSubnetId sets the SubnetId field's value.
-func (s *RouteTableAssociation) SetSubnetId(v string) *RouteTableAssociation {
- s.SubnetId = &v
- return s
-}
-
-type PropagatingVgw struct {
- _ struct{} `type:"structure"`
-
- // The ID of the virtual private gateway (VGW).
- GatewayId *string `locationName:"gatewayId" type:"string"`
-}
-
-// String returns the string representation
-func (s PropagatingVgw) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s PropagatingVgw) GoString() string {
- return s.String()
-}
-
-// SetGatewayId sets the GatewayId field's value.
-func (s *PropagatingVgw) SetGatewayId(v string) *PropagatingVgw {
- s.GatewayId = &v
- return s
-}
-
-type CreateRouteTableInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The ID of the VPC.
- //
- // VpcId is a required field
- VpcId *string `locationName:"vpcId" type:"string" required:"true"`
-}
-
-// String returns the string representation
-func (s CreateRouteTableInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s CreateRouteTableInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *CreateRouteTableInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "CreateRouteTableInput"}
- if s.VpcId == nil {
- invalidParams.Add(request.NewErrParamRequired("VpcId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *CreateRouteTableInput) SetDryRun(v bool) *CreateRouteTableInput {
- s.DryRun = &v
- return s
-}
-
-// SetVpcId sets the VpcId field's value.
-func (s *CreateRouteTableInput) SetVpcId(v string) *CreateRouteTableInput {
- s.VpcId = &v
- return s
-}
-
-// Contains the output of CreateRouteTable.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/CreateRouteTableResult
-type CreateRouteTableOutput struct {
- _ struct{} `type:"structure"`
-
- // Information about the route table.
- RouteTable *RouteTable `locationName:"routeTable" type:"structure"`
-
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-// String returns the string representation
-func (s CreateRouteTableOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s CreateRouteTableOutput) GoString() string {
- return s.String()
-}
-
-// SetRouteTable sets the RouteTable field's value.
-func (s *CreateRouteTableOutput) SetRouteTable(v *RouteTable) *CreateRouteTableOutput {
- s.RouteTable = v
- return s
-}
-
-type DisableVgwRoutePropagationInput struct {
- _ struct{} `type:"structure"`
-
- // The ID of the virtual private gateway.
- //
- // GatewayId is a required field
- GatewayId *string `type:"string" required:"true"`
-
- // The ID of the route table.
- //
- // RouteTableId is a required field
- RouteTableId *string `type:"string" required:"true"`
-}
-
-// String returns the string representation
-func (s DisableVgwRoutePropagationInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DisableVgwRoutePropagationInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *DisableVgwRoutePropagationInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "DisableVgwRoutePropagationInput"}
- if s.GatewayId == nil {
- invalidParams.Add(request.NewErrParamRequired("GatewayId"))
- }
- if s.RouteTableId == nil {
- invalidParams.Add(request.NewErrParamRequired("RouteTableId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetGatewayId sets the GatewayId field's value.
-func (s *DisableVgwRoutePropagationInput) SetGatewayId(v string) *DisableVgwRoutePropagationInput {
- s.GatewayId = &v
- return s
-}
-
-// SetRouteTableId sets the RouteTableId field's value.
-func (s *DisableVgwRoutePropagationInput) SetRouteTableId(v string) *DisableVgwRoutePropagationInput {
- s.RouteTableId = &v
- return s
-}
-
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DisableVgwRoutePropagationOutput
-type DisableVgwRoutePropagationOutput struct {
- _ struct{} `type:"structure"`
-}
-
-// String returns the string representation
-func (s DisableVgwRoutePropagationOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DisableVgwRoutePropagationOutput) GoString() string {
- return s.String()
-}
-
-type EnableVgwRoutePropagationInput struct {
- _ struct{} `type:"structure"`
-
- // The ID of the virtual private gateway.
- //
- // GatewayId is a required field
- GatewayId *string `type:"string" required:"true"`
-
- // The ID of the route table.
- //
- // RouteTableId is a required field
- RouteTableId *string `type:"string" required:"true"`
-}
-
-// String returns the string representation
-func (s EnableVgwRoutePropagationInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s EnableVgwRoutePropagationInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *EnableVgwRoutePropagationInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "EnableVgwRoutePropagationInput"}
- if s.GatewayId == nil {
- invalidParams.Add(request.NewErrParamRequired("GatewayId"))
- }
- if s.RouteTableId == nil {
- invalidParams.Add(request.NewErrParamRequired("RouteTableId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetGatewayId sets the GatewayId field's value.
-func (s *EnableVgwRoutePropagationInput) SetGatewayId(v string) *EnableVgwRoutePropagationInput {
- s.GatewayId = &v
- return s
-}
-
-// SetRouteTableId sets the RouteTableId field's value.
-func (s *EnableVgwRoutePropagationInput) SetRouteTableId(v string) *EnableVgwRoutePropagationInput {
- s.RouteTableId = &v
- return s
-}
-
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/EnableVgwRoutePropagationOutput
-type EnableVgwRoutePropagationOutput struct {
- _ struct{} `type:"structure"`
-
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-// String returns the string representation
-func (s EnableVgwRoutePropagationOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s EnableVgwRoutePropagationOutput) GoString() string {
- return s.String()
-}
-
-type DisassociateRouteTableInput struct {
- _ struct{} `type:"structure"`
-
- // The association ID representing the current association between the route
- // table and subnet.
- //
- // AssociationId is a required field
- AssociationId *string `locationName:"associationId" type:"string" required:"true"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-}
-
-// String returns the string representation
-func (s DisassociateRouteTableInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DisassociateRouteTableInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *DisassociateRouteTableInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "DisassociateRouteTableInput"}
- if s.AssociationId == nil {
- invalidParams.Add(request.NewErrParamRequired("AssociationId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetAssociationId sets the AssociationId field's value.
-func (s *DisassociateRouteTableInput) SetAssociationId(v string) *DisassociateRouteTableInput {
- s.AssociationId = &v
- return s
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *DisassociateRouteTableInput) SetDryRun(v bool) *DisassociateRouteTableInput {
- s.DryRun = &v
- return s
-}
-
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DisassociateRouteTableOutput
-type DisassociateRouteTableOutput struct {
- _ struct{} `type:"structure"`
-}
-
-// String returns the string representation
-func (s DisassociateRouteTableOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DisassociateRouteTableOutput) GoString() string {
- return s.String()
-}
-
-type DeleteRouteTableInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The ID of the route table.
- //
- // RouteTableId is a required field
- RouteTableId *string `locationName:"routeTableId" type:"string" required:"true"`
-}
-
-// String returns the string representation
-func (s DeleteRouteTableInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DeleteRouteTableInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *DeleteRouteTableInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "DeleteRouteTableInput"}
- if s.RouteTableId == nil {
- invalidParams.Add(request.NewErrParamRequired("RouteTableId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *DeleteRouteTableInput) SetDryRun(v bool) *DeleteRouteTableInput {
- s.DryRun = &v
- return s
-}
-
-// SetRouteTableId sets the RouteTableId field's value.
-func (s *DeleteRouteTableInput) SetRouteTableId(v string) *DeleteRouteTableInput {
- s.RouteTableId = &v
- return s
-}
-
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DeleteRouteTableOutput
-type DeleteRouteTableOutput struct {
- _ struct{} `type:"structure"`
-}
-
-// String returns the string representation
-func (s DeleteRouteTableOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DeleteRouteTableOutput) GoString() string {
- return s.String()
-}
-
-type AssociateRouteTableInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The ID of the route table.
- //
- // RouteTableId is a required field
- RouteTableId *string `locationName:"routeTableId" type:"string" required:"true"`
-
- // The ID of the subnet.
- //
- // SubnetId is a required field
- SubnetId *string `locationName:"subnetId" type:"string" required:"true"`
-}
-
-// String returns the string representation
-func (s AssociateRouteTableInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s AssociateRouteTableInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *AssociateRouteTableInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "AssociateRouteTableInput"}
- if s.RouteTableId == nil {
- invalidParams.Add(request.NewErrParamRequired("RouteTableId"))
- }
- if s.SubnetId == nil {
- invalidParams.Add(request.NewErrParamRequired("SubnetId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *AssociateRouteTableInput) SetDryRun(v bool) *AssociateRouteTableInput {
- s.DryRun = &v
- return s
-}
-
-// SetRouteTableId sets the RouteTableId field's value.
-func (s *AssociateRouteTableInput) SetRouteTableId(v string) *AssociateRouteTableInput {
- s.RouteTableId = &v
- return s
-}
-
-// SetSubnetId sets the SubnetId field's value.
-func (s *AssociateRouteTableInput) SetSubnetId(v string) *AssociateRouteTableInput {
- s.SubnetId = &v
- return s
-}
-
-// Contains the output of AssociateRouteTable.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/AssociateRouteTableResult
-type AssociateRouteTableOutput struct {
- _ struct{} `type:"structure"`
-
- // The route table association ID (needed to disassociate the route table).
- AssociationId *string `locationName:"associationId" type:"string"`
-}
-
-// String returns the string representation
-func (s AssociateRouteTableOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s AssociateRouteTableOutput) GoString() string {
- return s.String()
-}
-
-// SetAssociationId sets the AssociationId field's value.
-func (s *AssociateRouteTableOutput) SetAssociationId(v string) *AssociateRouteTableOutput {
- s.AssociationId = &v
- return s
-}
-
-type ReplaceRouteTableAssociationInput struct {
- _ struct{} `type:"structure"`
-
- // The association ID.
- //
- // AssociationId is a required field
- AssociationId *string `locationName:"associationId" type:"string" required:"true"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The ID of the new route table to associate with the subnet.
- //
- // RouteTableId is a required field
- RouteTableId *string `locationName:"routeTableId" type:"string" required:"true"`
-}
-
-// String returns the string representation
-func (s ReplaceRouteTableAssociationInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s ReplaceRouteTableAssociationInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *ReplaceRouteTableAssociationInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "ReplaceRouteTableAssociationInput"}
- if s.AssociationId == nil {
- invalidParams.Add(request.NewErrParamRequired("AssociationId"))
- }
- if s.RouteTableId == nil {
- invalidParams.Add(request.NewErrParamRequired("RouteTableId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetAssociationId sets the AssociationId field's value.
-func (s *ReplaceRouteTableAssociationInput) SetAssociationId(v string) *ReplaceRouteTableAssociationInput {
- s.AssociationId = &v
- return s
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *ReplaceRouteTableAssociationInput) SetDryRun(v bool) *ReplaceRouteTableAssociationInput {
- s.DryRun = &v
- return s
-}
-
-// SetRouteTableId sets the RouteTableId field's value.
-func (s *ReplaceRouteTableAssociationInput) SetRouteTableId(v string) *ReplaceRouteTableAssociationInput {
- s.RouteTableId = &v
- return s
-}
-
-// Contains the output of ReplaceRouteTableAssociation.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/ReplaceRouteTableAssociationResult
-type ReplaceRouteTableAssociationOutput struct {
- _ struct{} `type:"structure"`
-
- // The ID of the new association.
- NewAssociationId *string `locationName:"newAssociationId" type:"string"`
-}
-
-// String returns the string representation
-func (s ReplaceRouteTableAssociationOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s ReplaceRouteTableAssociationOutput) GoString() string {
- return s.String()
-}
-
-// SetNewAssociationId sets the NewAssociationId field's value.
-func (s *ReplaceRouteTableAssociationOutput) SetNewAssociationId(v string) *ReplaceRouteTableAssociationOutput {
- s.NewAssociationId = &v
- return s
-}
-
-// String returns the string representation
-func (s DescribeVpcsInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DescribeVpcsInput) GoString() string {
- return s.String()
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *DescribeVpcsInput) SetDryRun(v bool) *DescribeVpcsInput {
- s.DryRun = &v
- return s
-}
-
-// SetFilters sets the Filters field's value.
-func (s *DescribeVpcsInput) SetFilters(v []*Filter) *DescribeVpcsInput {
- s.Filters = v
- return s
-}
-
-// SetVpcIds sets the VpcIds field's value.
-func (s *DescribeVpcsInput) SetVpcIds(v []*string) *DescribeVpcsInput {
- s.VpcIds = v
- return s
-}
-
-// // Contains the output of DescribeVpcs.
-// // Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeVpcsResult
-// type DescribeVpcsOutput struct {
-// _ struct{} `type:"structure"`
-
-// // Information about one or more VPCs.
-// Vpcs []*Vpc `locationName:"vpcSet" locationNameList:"item" type:"list"`
-// }
-
-// // String returns the string representation
-// func (s DescribeVpcsOutput) String() string {
-// return awsutil.Prettify(s)
-// }
-
-// GoString returns the string representation
-// func (s DescribeVpcsOutput) GoString() string {
-// return s.String()
-// }
-
-// SetVpcs sets the Vpcs field's value.
-func (s *DescribeVpcsOutput) SetVpcs(v []*Vpc) *DescribeVpcsOutput {
- s.Vpcs = v
- return s
-}
-
-type VpnConnectionOptionsSpecification struct {
- _ struct{} `type:"structure"`
-
- // Indicates whether the VPN connection uses static routes only. Static routes
- // must be used for devices that don't support BGP.
- StaticRoutesOnly *bool `locationName:"staticRoutesOnly" type:"boolean"`
-}
-
-// String returns the string representation
-func (s VpnConnectionOptionsSpecification) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s VpnConnectionOptionsSpecification) GoString() string {
- return s.String()
-}
-
-// SetStaticRoutesOnly sets the StaticRoutesOnly field's value.
-func (s *VpnConnectionOptionsSpecification) SetStaticRoutesOnly(v bool) *VpnConnectionOptionsSpecification {
- s.StaticRoutesOnly = &v
- return s
-}
-
-type CreateVpnGatewayInput struct {
- _ struct{} `type:"structure"`
-
- // The Availability Zone for the virtual private gateway.
- AvailabilityZone *string `type:"string"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The type of VPN connection this virtual private gateway supports.
- //
- // Type is a required field
- Type *string `type:"string" required:"true" enum:"GatewayType"`
-}
-
-// String returns the string representation
-func (s CreateVpnGatewayInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s CreateVpnGatewayInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *CreateVpnGatewayInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "CreateVpnGatewayInput"}
- if s.Type == nil {
- invalidParams.Add(request.NewErrParamRequired("Type"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetAvailabilityZone sets the AvailabilityZone field's value.
-func (s *CreateVpnGatewayInput) SetAvailabilityZone(v string) *CreateVpnGatewayInput {
- s.AvailabilityZone = &v
- return s
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *CreateVpnGatewayInput) SetDryRun(v bool) *CreateVpnGatewayInput {
- s.DryRun = &v
- return s
-}
-
-// SetType sets the Type field's value.
-func (s *CreateVpnGatewayInput) SetType(v string) *CreateVpnGatewayInput {
- s.Type = &v
- return s
-}
-
-type CreateVpnConnectionInput struct {
- _ struct{} `type:"structure"`
-
- // The ID of the customer gateway.
- //
- // CustomerGatewayId is a required field
- CustomerGatewayId *string `type:"string" required:"true"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // Indicates whether the VPN connection requires static routes. If you are creating
- // a VPN connection for a device that does not support BGP, you must specify
- // true.
- //
- // Default: false
- Options *VpnConnectionOptionsSpecification `locationName:"options" type:"structure"`
-
- // The type of VPN connection (ipsec.1).
- //
- // Type is a required field
- Type *string `type:"string" required:"true"`
-
- // The ID of the virtual private gateway.
- //
- // VpnGatewayId is a required field
- VpnGatewayId *string `type:"string" required:"true"`
-}
-
-// String returns the string representation
-func (s CreateVpnConnectionInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s CreateVpnConnectionInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *CreateVpnConnectionInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "CreateVpnConnectionInput"}
- if s.CustomerGatewayId == nil {
- invalidParams.Add(request.NewErrParamRequired("CustomerGatewayId"))
- }
- if s.Type == nil {
- invalidParams.Add(request.NewErrParamRequired("Type"))
- }
- if s.VpnGatewayId == nil {
- invalidParams.Add(request.NewErrParamRequired("VpnGatewayId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetCustomerGatewayId sets the CustomerGatewayId field's value.
-func (s *CreateVpnConnectionInput) SetCustomerGatewayId(v string) *CreateVpnConnectionInput {
- s.CustomerGatewayId = &v
- return s
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *CreateVpnConnectionInput) SetDryRun(v bool) *CreateVpnConnectionInput {
- s.DryRun = &v
- return s
-}
-
-// SetOptions sets the Options field's value.
-func (s *CreateVpnConnectionInput) SetOptions(v *VpnConnectionOptionsSpecification) *CreateVpnConnectionInput {
- s.Options = v
- return s
-}
-
-// SetType sets the Type field's value.
-func (s *CreateVpnConnectionInput) SetType(v string) *CreateVpnConnectionInput {
- s.Type = &v
- return s
-}
-
-// SetVpnGatewayId sets the VpnGatewayId field's value.
-func (s *CreateVpnConnectionInput) SetVpnGatewayId(v string) *CreateVpnConnectionInput {
- s.VpnGatewayId = &v
- return s
-}
-
-// Contains the output of CreateVpnConnection.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/CreateVpnConnectionResult
-type CreateVpnConnectionOutput struct {
- _ struct{} `type:"structure"`
-
- // Information about the VPN connection.
- VpnConnection *VpnConnection `locationName:"vpnConnection" type:"structure"`
-}
-
-// String returns the string representation
-func (s CreateVpnConnectionOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s CreateVpnConnectionOutput) GoString() string {
- return s.String()
-}
-
-// SetVpnConnection sets the VpnConnection field's value.
-func (s *CreateVpnConnectionOutput) SetVpnConnection(v *VpnConnection) *CreateVpnConnectionOutput {
- s.VpnConnection = v
- return s
-}
-
-// Contains the output of CreateVpnGateway.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/CreateVpnGatewayResult
-type CreateVpnGatewayOutput struct {
- _ struct{} `type:"structure"`
-
- // Information about the virtual private gateway.
- VpnGateway *VpnGateway `locationName:"vpnGateway" type:"structure"`
-}
-
-// String returns the string representation
-func (s CreateVpnGatewayOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-type VpnConnection struct {
- _ struct{} `type:"structure"`
-
- // The configuration information for the VPN connection's customer gateway (in
- // the native XML format). This element is always present in the CreateVpnConnection
- // response; however, it's present in the DescribeVpnConnections response only
- // if the VPN connection is in the pending or available state.
- CustomerGatewayConfiguration *string `locationName:"customerGatewayConfiguration" type:"string"`
-
- // The ID of the customer gateway at your end of the VPN connection.
- CustomerGatewayId *string `locationName:"customerGatewayId" type:"string"`
-
- // The VPN connection options.
- Options *VpnConnectionOptions `locationName:"options" type:"structure"`
-
- // The static routes associated with the VPN connection.
- Routes []*VpnStaticRoute `locationName:"routes" locationNameList:"item" type:"list"`
-
- // The current state of the VPN connection.
- State *string `locationName:"state" type:"string" enum:"VpnState"`
-
- // Any tags assigned to the VPN connection.
- Tags []*Tag `locationName:"tagSet" locationNameList:"item" type:"list"`
-
- // The type of VPN connection.
- Type *string `locationName:"type" type:"string" enum:"GatewayType"`
-
- // Information about the VPN tunnel.
- VgwTelemetry []*VgwTelemetry `locationName:"vgwTelemetry" locationNameList:"item" type:"list"`
-
- // The ID of the VPN connection.
- VpnConnectionId *string `locationName:"vpnConnectionId" type:"string"`
-
- // The ID of the virtual private gateway at the AWS side of the VPN connection.
- VpnGatewayId *string `locationName:"vpnGatewayId" type:"string"`
-}
-
-// String returns the string representation
-func (s VpnConnection) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s VpnConnection) GoString() string {
- return s.String()
-}
-
-// SetCustomerGatewayConfiguration sets the CustomerGatewayConfiguration field's value.
-func (s *VpnConnection) SetCustomerGatewayConfiguration(v string) *VpnConnection {
- s.CustomerGatewayConfiguration = &v
- return s
-}
-
-// SetCustomerGatewayId sets the CustomerGatewayId field's value.
-func (s *VpnConnection) SetCustomerGatewayId(v string) *VpnConnection {
- s.CustomerGatewayId = &v
- return s
-}
-
-// SetOptions sets the Options field's value.
-func (s *VpnConnection) SetOptions(v *VpnConnectionOptions) *VpnConnection {
- s.Options = v
- return s
-}
-
-// SetRoutes sets the Routes field's value.
-func (s *VpnConnection) SetRoutes(v []*VpnStaticRoute) *VpnConnection {
- s.Routes = v
- return s
-}
-
-// SetState sets the State field's value.
-func (s *VpnConnection) SetState(v string) *VpnConnection {
- s.State = &v
- return s
-}
-
-// SetTags sets the Tags field's value.
-func (s *VpnConnection) SetTags(v []*Tag) *VpnConnection {
- s.Tags = v
- return s
-}
-
-// SetType sets the Type field's value.
-func (s *VpnConnection) SetType(v string) *VpnConnection {
- s.Type = &v
- return s
-}
-
-// SetVgwTelemetry sets the VgwTelemetry field's value.
-func (s *VpnConnection) SetVgwTelemetry(v []*VgwTelemetry) *VpnConnection {
- s.VgwTelemetry = v
- return s
-}
-
-// SetVpnConnectionId sets the VpnConnectionId field's value.
-func (s *VpnConnection) SetVpnConnectionId(v string) *VpnConnection {
- s.VpnConnectionId = &v
- return s
-}
-
-// SetVpnGatewayId sets the VpnGatewayId field's value.
-func (s *VpnConnection) SetVpnGatewayId(v string) *VpnConnection {
- s.VpnGatewayId = &v
- return s
-}
-func (s CreateVpnGatewayOutput) GoString() string {
- return s.String()
-}
-
-// SetVpnGateway sets the VpnGateway field's value.
-func (s *CreateVpnGatewayOutput) SetVpnGateway(v *VpnGateway) *CreateVpnGatewayOutput {
- s.VpnGateway = v
- return s
-}
-
-type VpnGateway struct {
- _ struct{} `type:"structure"`
-
- // The Availability Zone where the virtual private gateway was created, if applicable.
- // This field may be empty or not returned.
- AvailabilityZone *string `locationName:"availabilityZone" type:"string"`
-
- // The current state of the virtual private gateway.
- State *string `locationName:"state" type:"string" enum:"VpnState"`
-
- // Any tags assigned to the virtual private gateway.
- Tags []*Tag `locationName:"tagSet" locationNameList:"item" type:"list"`
-
- // The type of VPN connection the virtual private gateway supports.
- Type *string `locationName:"type" type:"string" enum:"GatewayType"`
-
- // Any VPCs attached to the virtual private gateway.
- VpcAttachments []*VpcAttachment `locationName:"attachments" locationNameList:"item" type:"list"`
-
- // The ID of the virtual private gateway.
- VpnGatewayId *string `locationName:"vpnGatewayId" type:"string"`
-}
-
-func (s VpnGateway) String() string {
- return awsutil.Prettify(s)
-}
-
-func (s VpnGateway) GoString() string {
- return s.String()
-}
-
-// SetAvailabilityZone sets the AvailabilityZone field's value.
-func (s *VpnGateway) SetAvailabilityZone(v string) *VpnGateway {
- s.AvailabilityZone = &v
- return s
-}
-
-func (s *VpnGateway) SetState(v string) *VpnGateway {
- s.State = &v
- return s
-}
-
-func (s *VpnGateway) SetTags(v []*Tag) *VpnGateway {
- s.Tags = v
- return s
-}
-
-func (s *VpnGateway) SetType(v string) *VpnGateway {
- s.Type = &v
- return s
-}
-
-// SetVpcAttachments sets the VpcAttachments field's value.
-func (s *VpnGateway) SetVpcAttachments(v []*VpcAttachment) *VpnGateway {
- s.VpcAttachments = v
- return s
-}
-
-func (s *VpnGateway) SetVpnGatewayId(v string) *VpnGateway {
- s.VpnGatewayId = &v
- return s
-}
-
-type VpnConnectionOptions struct {
- _ struct{} `type:"structure"`
-
- // Indicates whether the VPN connection uses static routes only. Static routes
- // must be used for devices that don't support BGP.
- StaticRoutesOnly *bool `locationName:"staticRoutesOnly" type:"boolean"`
-}
-
-type VpcAttachment struct {
- _ struct{} `type:"structure"`
-
- // The current state of the attachment.
- State *string `locationName:"state" type:"string" enum:"AttachmentStatus"`
-
- // The ID of the VPC.
- VpcId *string `locationName:"vpcId" type:"string"`
-}
-
-// String returns the string representation
-func (s VpcAttachment) String() string {
- return awsutil.Prettify(s)
-}
-
-// SetStaticRoutesOnly sets the StaticRoutesOnly field's value.
-func (s *VpnConnectionOptions) SetStaticRoutesOnly(v bool) *VpnConnectionOptions {
- s.StaticRoutesOnly = &v
- return s
-}
-
-type VpnStaticRoute struct {
- _ struct{} `type:"structure"`
-
- // The CIDR block associated with the local subnet of the customer data center.
- DestinationCidrBlock *string `locationName:"destinationCidrBlock" type:"string"`
-
- // Indicates how the routes were provided.
- Source *string `locationName:"source" type:"string" enum:"VpnStaticRouteSource"`
-
- // The current state of the static route.
- State *string `locationName:"state" type:"string" enum:"VpnState"`
-}
-
-// String returns the string representation
-func (s VpnStaticRoute) String() string {
- return s.String()
-}
-func (s VpcAttachment) GoString() string {
- return s.String()
-}
-
-// SetState sets the State field's value.
-func (s *VpcAttachment) SetState(v string) *VpcAttachment {
- s.State = &v
- return s
-}
-
-// SetVpcId sets the VpcId field's value.
-func (s *VpcAttachment) SetVpcId(v string) *VpcAttachment {
- s.VpcId = &v
- return s
-}
-
-type DescribeVpnGatewaysInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // One or more filters.
- //
- // * attachment.state - The current state of the attachment between the gateway
- // and the VPC (attaching | attached | detaching | detached).
- //
- // * attachment.vpc-id - The ID of an attached VPC.
- //
- // * availability-zone - The Availability Zone for the virtual private gateway
- // (if applicable).
- //
- // * state - The state of the virtual private gateway (pending | available
- // | deleting | deleted).
- //
- // * tag:key=value - The key/value combination of a tag assigned to the resource.
- // Specify the key of the tag in the filter name and the value of the tag
- // in the filter value. For example, for the tag Purpose=X, specify tag:Purpose
- // for the filter name and X for the filter value.
- //
- // * tag-key - The key of a tag assigned to the resource. This filter is
- // independent of the tag-value filter. For example, if you use both the
- // filter "tag-key=Purpose" and the filter "tag-value=X", you get any resources
- // assigned both the tag key Purpose (regardless of what the tag's value
- // is), and the tag value X (regardless of what the tag's key is). If you
- // want to list only resources where Purpose is X, see the tag:key=value
- // filter.
- //
- // * tag-value - The value of a tag assigned to the resource. This filter
- // is independent of the tag-key filter.
- //
- // * type - The type of virtual private gateway. Currently the only supported
- // type is ipsec.1.
- //
- // * vpn-gateway-id - The ID of the virtual private gateway.
- Filters []*Filter `locationName:"Filter" locationNameList:"Filter" type:"list"`
-
- // One or more virtual private gateway IDs.
- //
- // Default: Describes all your virtual private gateways.
- VpnGatewayIds []*string `locationName:"VpnGatewayId" locationNameList:"VpnGatewayId" type:"list"`
-}
-
-// String returns the string representation
-func (s DescribeVpnGatewaysInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s VpnStaticRoute) GoString() string {
- return s.String()
-}
-
-// SetDestinationCidrBlock sets the DestinationCidrBlock field's value.
-func (s *VpnStaticRoute) SetDestinationCidrBlock(v string) *VpnStaticRoute {
- s.DestinationCidrBlock = &v
- return s
-}
-
-// SetSource sets the Source field's value.
-func (s *VpnStaticRoute) SetSource(v string) *VpnStaticRoute {
- s.Source = &v
- return s
-}
-
-// SetState sets the State field's value.
-func (s *VpnStaticRoute) SetState(v string) *VpnStaticRoute {
- s.State = &v
- return s
-}
-
-type VgwTelemetry struct {
- _ struct{} `type:"structure"`
-
- // The number of accepted routes.
- AcceptedRouteCount *int64 `locationName:"acceptedRouteCount" type:"integer"`
-
- // The date and time of the last change in status.
- LastStatusChange *time.Time `locationName:"lastStatusChange" type:"timestamp" timestampFormat:"iso8601"`
-
- // The Internet-routable IP address of the virtual private gateway's outside
- // interface.
- OutsideIpAddress *string `locationName:"outsideIpAddress" type:"string"`
-
- // The status of the VPN tunnel.
- Status *string `locationName:"status" type:"string" enum:"TelemetryStatus"`
-
- // If an error occurs, a description of the error.
- StatusMessage *string `locationName:"statusMessage" type:"string"`
-}
-
-// String returns the string representation
-func (s VgwTelemetry) String() string {
- return s.String()
-}
-func (s DescribeVpnGatewaysInput) GoString() string {
- return s.String()
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *DescribeVpnGatewaysInput) SetDryRun(v bool) *DescribeVpnGatewaysInput {
- s.DryRun = &v
- return s
-}
-
-// SetFilters sets the Filters field's value.
-func (s *DescribeVpnGatewaysInput) SetFilters(v []*Filter) *DescribeVpnGatewaysInput {
- s.Filters = v
- return s
-}
-
-// SetVpnGatewayIds sets the VpnGatewayIds field's value.
-func (s *DescribeVpnGatewaysInput) SetVpnGatewayIds(v []*string) *DescribeVpnGatewaysInput {
- s.VpnGatewayIds = v
- return s
-}
-
-// Contains the output of DescribeVpnGateways.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeVpnGatewaysResult
-type DescribeVpnGatewaysOutput struct {
- _ struct{} `type:"structure"`
-
- // Information about one or more virtual private gateways.
- VpnGateways []*VpnGateway `locationName:"vpnGatewaySet" locationNameList:"item" type:"list"`
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-// String returns the string representation
-func (s DescribeVpnGatewaysOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DescribeVpnGatewaysOutput) GoString() string {
- return s.String()
-}
-
-// SetVpnGateways sets the VpnGateways field's value.
-func (s *DescribeVpnGatewaysOutput) SetVpnGateways(v []*VpnGateway) *DescribeVpnGatewaysOutput {
- s.VpnGateways = v
- return s
-}
-
-type DeleteVpnGatewayInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The ID of the virtual private gateway.
- //
- // VpnGatewayId is a required field
- VpnGatewayId *string `type:"string" required:"true"`
-}
-
-// String returns the string representation
-func (s DeleteVpnGatewayInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s VgwTelemetry) GoString() string {
- return s.String()
-}
-
-// SetAcceptedRouteCount sets the AcceptedRouteCount field's value.
-func (s *VgwTelemetry) SetAcceptedRouteCount(v int64) *VgwTelemetry {
- s.AcceptedRouteCount = &v
- return s
-}
-
-// SetLastStatusChange sets the LastStatusChange field's value.
-func (s *VgwTelemetry) SetLastStatusChange(v time.Time) *VgwTelemetry {
- s.LastStatusChange = &v
- return s
-}
-
-// SetOutsideIpAddress sets the OutsideIpAddress field's value.
-func (s *VgwTelemetry) SetOutsideIpAddress(v string) *VgwTelemetry {
- s.OutsideIpAddress = &v
- return s
-}
-
-// SetStatus sets the Status field's value.
-func (s *VgwTelemetry) SetStatus(v string) *VgwTelemetry {
- s.Status = &v
- return s
-}
-
-// SetStatusMessage sets the StatusMessage field's value.
-func (s *VgwTelemetry) SetStatusMessage(v string) *VgwTelemetry {
- s.StatusMessage = &v
- return s
-}
-
-type DescribeVpnConnectionsInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // One or more filters.
- //
- // * customer-gateway-configuration - The configuration information for the
- // customer gateway.
- //
- // * customer-gateway-id - The ID of a customer gateway associated with the
- // VPN connection.
- //
- // * state - The state of the VPN connection (pending | available | deleting
- // | deleted).
- //
- // * option.static-routes-only - Indicates whether the connection has static
- // routes only. Used for devices that do not support Border Gateway Protocol
- // (BGP).
- //
- // * route.destination-cidr-block - The destination CIDR block. This corresponds
- // to the subnet used in a customer data center.
- //
- // * bgp-asn - The BGP Autonomous System Number (ASN) associated with a BGP
- // device.
- //
- // * tag:key=value - The key/value combination of a tag assigned to the resource.
- // Specify the key of the tag in the filter name and the value of the tag
- // in the filter value. For example, for the tag Purpose=X, specify tag:Purpose
- // for the filter name and X for the filter value.
- //
- // * tag-key - The key of a tag assigned to the resource. This filter is
- // independent of the tag-value filter. For example, if you use both the
- // filter "tag-key=Purpose" and the filter "tag-value=X", you get any resources
- // assigned both the tag key Purpose (regardless of what the tag's value
- // is), and the tag value X (regardless of what the tag's key is). If you
- // want to list only resources where Purpose is X, see the tag:key=value
- // filter.
- //
- // * tag-value - The value of a tag assigned to the resource. This filter
- // is independent of the tag-key filter.
- //
- // * type - The type of VPN connection. Currently the only supported type
- // is ipsec.1.
- //
- // * vpn-connection-id - The ID of the VPN connection.
- //
- // * vpn-gateway-id - The ID of a virtual private gateway associated with
- // the VPN connection.
- Filters []*Filter `locationName:"Filter" locationNameList:"Filter" type:"list"`
-
- // One or more VPN connection IDs.
- //
- // Default: Describes your VPN connections.
- VpnConnectionIds []*string `locationName:"VpnConnectionId" locationNameList:"VpnConnectionId" type:"list"`
-}
-
-// String returns the string representation
-func (s DescribeVpnConnectionsInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DescribeVpnConnectionsInput) GoString() string {
- return s.String()
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *DescribeVpnConnectionsInput) SetDryRun(v bool) *DescribeVpnConnectionsInput {
- s.DryRun = &v
- return s
-}
-
-// SetFilters sets the Filters field's value.
-func (s *DescribeVpnConnectionsInput) SetFilters(v []*Filter) *DescribeVpnConnectionsInput {
- s.Filters = v
- return s
-}
-
-// SetVpnConnectionIds sets the VpnConnectionIds field's value.
-func (s *DescribeVpnConnectionsInput) SetVpnConnectionIds(v []*string) *DescribeVpnConnectionsInput {
- s.VpnConnectionIds = v
- return s
-}
-
-// Contains the output of DescribeVpnConnections.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeVpnConnectionsResult
-type DescribeVpnConnectionsOutput struct {
- _ struct{} `type:"structure"`
-
- // Information about one or more VPN connections.
- VpnConnections []*VpnConnection `locationName:"vpnConnectionSet" locationNameList:"item" type:"list"`
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-// String returns the string representation
-func (s DescribeVpnConnectionsOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DescribeVpnConnectionsOutput) GoString() string {
- return s.String()
-}
-
-// SetVpnConnections sets the VpnConnections field's value.
-func (s *DescribeVpnConnectionsOutput) SetVpnConnections(v []*VpnConnection) *DescribeVpnConnectionsOutput {
- s.VpnConnections = v
- return s
-}
-
-func (s DeleteVpnGatewayInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *DeleteVpnGatewayInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "DeleteVpnGatewayInput"}
- if s.VpnGatewayId == nil {
- invalidParams.Add(request.NewErrParamRequired("VpnGatewayId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *DeleteVpnGatewayInput) SetDryRun(v bool) *DeleteVpnGatewayInput {
- s.DryRun = &v
- return s
-}
-
-// SetVpnGatewayId sets the VpnGatewayId field's value.
-func (s *DeleteVpnGatewayInput) SetVpnGatewayId(v string) *DeleteVpnGatewayInput {
- s.VpnGatewayId = &v
- return s
-}
-
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DeleteVpnGatewayOutput
-type DeleteVpnGatewayOutput struct {
- _ struct{} `type:"structure"`
-}
-
-// String returns the string representation
-func (s DeleteVpnGatewayOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DeleteVpnGatewayOutput) GoString() string {
- return s.String()
-}
-
-type AttachVpnGatewayInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The ID of the VPC.
- //
- // VpcId is a required field
- VpcId *string `type:"string" required:"true"`
-
- // The ID of the virtual private gateway.
- //
- // VpnGatewayId is a required field
- VpnGatewayId *string `type:"string" required:"true"`
-}
-
-// String returns the string representation
-func (s AttachVpnGatewayInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s AttachVpnGatewayInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *AttachVpnGatewayInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "AttachVpnGatewayInput"}
- if s.VpcId == nil {
- invalidParams.Add(request.NewErrParamRequired("VpcId"))
- }
- if s.VpnGatewayId == nil {
- invalidParams.Add(request.NewErrParamRequired("VpnGatewayId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *AttachVpnGatewayInput) SetDryRun(v bool) *AttachVpnGatewayInput {
- s.DryRun = &v
- return s
-}
-
-// SetVpcId sets the VpcId field's value.
-func (s *AttachVpnGatewayInput) SetVpcId(v string) *AttachVpnGatewayInput {
- s.VpcId = &v
- return s
-}
-
-// SetVpnGatewayId sets the VpnGatewayId field's value.
-func (s *AttachVpnGatewayInput) SetVpnGatewayId(v string) *AttachVpnGatewayInput {
- s.VpnGatewayId = &v
- return s
-}
-
-// Contains the output of AttachVpnGateway.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/AttachVpnGatewayResult
-type AttachVpnGatewayOutput struct {
- _ struct{} `type:"structure"`
-
- // Information about the attachment.
- VpcAttachment *VpcAttachment `locationName:"attachment" type:"structure"`
-}
-
-// String returns the string representation
-func (s AttachVpnGatewayOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s AttachVpnGatewayOutput) GoString() string {
- return s.String()
-}
-
-// SetVpcAttachment sets the VpcAttachment field's value.
-func (s *AttachVpnGatewayOutput) SetVpcAttachment(v *VpcAttachment) *AttachVpnGatewayOutput {
- s.VpcAttachment = v
- return s
-}
-
-type DeleteVpnConnectionInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The ID of the VPN connection.
- //
- // VpnConnectionId is a required field
- VpnConnectionId *string `type:"string" required:"true"`
-}
-
-// String returns the string representation
-func (s DeleteVpnConnectionInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DeleteVpnConnectionInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *DeleteVpnConnectionInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "DeleteVpnConnectionInput"}
- if s.VpnConnectionId == nil {
- invalidParams.Add(request.NewErrParamRequired("VpnConnectionId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *DeleteVpnConnectionInput) SetDryRun(v bool) *DeleteVpnConnectionInput {
- s.DryRun = &v
- return s
-}
-
-// SetVpnConnectionId sets the VpnConnectionId field's value.
-func (s *DeleteVpnConnectionInput) SetVpnConnectionId(v string) *DeleteVpnConnectionInput {
- s.VpnConnectionId = &v
- return s
-}
-
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DeleteVpnConnectionOutput
-type DeleteVpnConnectionOutput struct {
- _ struct{} `type:"structure"`
-}
-
-// String returns the string representation
-func (s DeleteVpnConnectionOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DeleteVpnConnectionOutput) GoString() string {
- return s.String()
-}
-
-type DetachVpnGatewayInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The ID of the VPC.
- //
- // VpcId is a required field
- VpcId *string `type:"string" required:"true"`
-
- // The ID of the virtual private gateway.
- //
- // VpnGatewayId is a required field
- VpnGatewayId *string `type:"string" required:"true"`
-}
-
-// String returns the string representation
-func (s DetachVpnGatewayInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DetachVpnGatewayInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *DetachVpnGatewayInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "DetachVpnGatewayInput"}
- if s.VpcId == nil {
- invalidParams.Add(request.NewErrParamRequired("VpcId"))
- }
- if s.VpnGatewayId == nil {
- invalidParams.Add(request.NewErrParamRequired("VpnGatewayId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *DetachVpnGatewayInput) SetDryRun(v bool) *DetachVpnGatewayInput {
- s.DryRun = &v
- return s
-}
-
-// SetVpcId sets the VpcId field's value.
-func (s *DetachVpnGatewayInput) SetVpcId(v string) *DetachVpnGatewayInput {
- s.VpcId = &v
- return s
-}
-
-// SetVpnGatewayId sets the VpnGatewayId field's value.
-func (s *DetachVpnGatewayInput) SetVpnGatewayId(v string) *DetachVpnGatewayInput {
- s.VpnGatewayId = &v
- return s
-}
-
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DetachVpnGatewayOutput
-type DetachVpnGatewayOutput struct {
- _ struct{} `type:"structure"`
-}
-
-// String returns the string representation
-func (s DetachVpnGatewayOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DetachVpnGatewayOutput) GoString() string {
- return s.String()
-}
-
-type DescribeSnapshotExportTasksInput struct {
- SnapshotExportTaskId []*string `locationName:"snapshotExportTaskId" locationNameList:"item" type:"list"`
-}
-type DescribeSnapshotExportTasksOutput struct {
- SnapshotExportTask []*SnapshotExportTask `locationName:"snapshotExportTaskSet" locationNameList:"item" type:"list"`
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-type CreateSnapshotExportTaskInput struct {
- _ struct{} `type:"structure"`
- ExportToOsu *ExportToOsuTaskSpecification `locationName:"exportToOsu" type:"structure"`
- SnapshotId *string `locationName:"snapshotId" type:"string"`
-}
-
-type CreateSnapshotExportTaskOutput struct {
- _ struct{} `type:"structure"`
- SnapshotExportTask *SnapshotExportTask `locationName:"snapshotExportTask" type:"structure"`
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-type SnapshotExportTask struct {
- _ struct{} `type:"structure"`
- Completion *int64 `locationName:"completion" type:"string"`
- ExportToOsu *ExportToOsuTaskSpecification `locationName:"exportToOsu" type:"structure"`
- SnapshotExport *SnapshotExport `locationName:"snapshotExport" type:"structure"`
- SnapshotExportTaskId *string `locationName:"snapshotExportTaskId" type:"string"`
- SnapshotId *string `locationName:"SnapshotId" type:"string"`
- State *string `locationName:"state" type:"string"`
- StatusMessage *string `locationName:"statusMessage" type:"string"`
-}
-
-type SnapshotExport struct {
- SnapshotId *string `locationName:"snapshotId" type:"string"`
-}
-
-type ExportToOsuTaskSpecification struct {
- _ struct{} `type:"structure"`
- DiskImageFormat *string `locationName:"diskImageFormat" type:"string"`
- AkSk *ExportToOsuAccessKeySpecification `locationName:"akSk" type:"structure"`
- OsuBucket *string `locationName:"osuBucket" type:"string"`
- OsuKey *string `locationName:"osuKey" type:"string"`
- OsuPrefix *string `locationName:"osuPrefix" type:"string"`
-}
-
-type CreateImageExportTaskInput struct {
- _ struct{} `type:"structure"`
- ExportToOsu *ImageExportToOsuTaskSpecification `locationName:"exportToOsu" type:"structure"`
- ImageId *string `locationName:"imageId" type:"string"`
-}
-
-type ImageExportToOsuTaskSpecification struct {
- _ struct{} `type:"structure"`
- DiskImageFormat *string `locationName:"diskImageFormat" type:"string"`
- OsuAkSk *ExportToOsuAccessKeySpecification `locationName:"osuAkSk" type:"structure"`
- OsuBucket *string `locationName:"osuBucket" type:"string"`
- OsuManifestUrl *string `locationName:"osuManifestUrl" type:"string"`
- OsuPrefix *string `locationName:"osuPrefix" type:"string"`
-}
-
-type ExportToOsuAccessKeySpecification struct {
- _ struct{} `type:"structure"`
- AccessKey *string `locationName:"accessKey" type:"string"`
- SecretKey *string `locationName:"secretKey" type:"string"`
-}
-
-type CreateImageExportTaskOutput struct {
- _ struct{} `type:"structure"`
- ImageExportTask *ImageExportTask `locationName:"imageExportTask" type:"structure"`
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-type ImageExportTask struct {
- _ struct{} `type:"structure"`
- Completion *int64 `locationName:"completion" type:"string"`
- ExportToOsu *ImageExportToOsuTaskSpecification `locationName:"exportToOsu" type:"structure"`
- ImageExport *ImageExport `locationName:"imageExport" type:"structure"`
- ImageExportTaskId *string `locationName:"imageExportTaskId" type:"string"`
- ImageId *string `locationName:"imageId" type:"string"`
- State *string `locationName:"state" type:"string"`
- StatusMessage *string `locationName:"statusMessage" type:"string"`
-}
-
-type ImageExport struct {
- _ struct{} `type:"structure"`
- ImageId *string `locationName:"imageId" type:"string"`
-}
-
-type DescribeImageExportTasksInput struct {
- _ struct{} `type:"structure"`
- ImageExportTaskId []*string `locationName:"imageExportTaskId" locationNameList:"item" type:"list"`
-}
-
-type DescribeImageExportTasksOutput struct {
- _ struct{} `type:"structure"`
- ImageExportTask []*ImageExportTask `locationName:"imageExportTask" locationNameList:"item" type:"list"`
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-// Contains the parameters for CopyImage.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/CopyImageRequest
-type CopyImageInput struct {
- _ struct{} `type:"structure"`
-
- // Unique, case-sensitive identifier you provide to ensure idempotency of the
- // request. For more information, see How to Ensure Idempotency (http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Run_Instance_Idempotency.html)
- // in the Amazon Elastic Compute Cloud User Guide.
- ClientToken *string `type:"string"`
-
- // A description for the new AMI in the destination region.
- Description *string `type:"string"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // Specifies whether the destination snapshots of the copied image should be
- // encrypted. The default CMK for EBS is used unless a non-default AWS Key Management
- // Service (AWS KMS) CMK is specified with KmsKeyId. For more information, see
- // Amazon EBS Encryption (http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSEncryption.html)
- // in the Amazon Elastic Compute Cloud User Guide.
- Encrypted *bool `locationName:"encrypted" type:"boolean"`
-
- // The full ARN of the AWS Key Management Service (AWS KMS) CMK to use when
- // encrypting the snapshots of an image during a copy operation. This parameter
- // is only required if you want to use a non-default CMK; if this parameter
- // is not specified, the default CMK for EBS is used. The ARN contains the arn:aws:kms
- // namespace, followed by the region of the CMK, the AWS account ID of the CMK
- // owner, the key namespace, and then the CMK ID. For example, arn:aws:kms:us-east-1:012345678910:key/abcd1234-a123-456a-a12b-a123b4cd56ef.
- // The specified CMK must exist in the region that the snapshot is being copied
- // to. If a KmsKeyId is specified, the Encrypted flag must also be set.
- KmsKeyId *string `locationName:"kmsKeyId" type:"string"`
-
- // The name of the new AMI in the destination region.
- //
- // Name is a required field
- Name *string `type:"string" required:"true"`
-
- // The ID of the AMI to copy.
- //
- // SourceImageId is a required field
- SourceImageId *string `type:"string" required:"true"`
-
- // The name of the region that contains the AMI to copy.
- //
- // SourceRegion is a required field
- SourceRegion *string `type:"string" required:"true"`
-}
-
-// String returns the string representation
-func (s CopyImageInput) String() string {
- return awsutil.Prettify(s)
-}
-
-type CreateVpnConnectionRouteInput struct {
- _ struct{} `type:"structure"`
-
- // The CIDR block associated with the local subnet of the customer network.
- //
- // DestinationCidrBlock is a required field
- DestinationCidrBlock *string `type:"string" required:"true"`
-
- // The ID of the VPN connection.
- //
- // VpnConnectionId is a required field
- VpnConnectionId *string `type:"string" required:"true"`
-}
-
-// String returns the string representation
-func (s CreateVpnConnectionRouteInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s CopyImageInput) GoString() string {
- return s.String()
-}
-func (s CreateVpnConnectionRouteInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *CopyImageInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "CopyImageInput"}
- if s.Name == nil {
- invalidParams.Add(request.NewErrParamRequired("Name"))
- }
- if s.SourceImageId == nil {
- invalidParams.Add(request.NewErrParamRequired("SourceImageId"))
- }
- if s.SourceRegion == nil {
- invalidParams.Add(request.NewErrParamRequired("SourceRegion"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-func (s *CreateVpnConnectionRouteInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "CreateVpnConnectionRouteInput"}
- if s.DestinationCidrBlock == nil {
- invalidParams.Add(request.NewErrParamRequired("DestinationCidrBlock"))
- }
- if s.VpnConnectionId == nil {
- invalidParams.Add(request.NewErrParamRequired("VpnConnectionId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetClientToken sets the ClientToken field's value.
-func (s *CopyImageInput) SetClientToken(v string) *CopyImageInput {
- s.ClientToken = &v
- return s
-}
-
-// SetDescription sets the Description field's value.
-func (s *CopyImageInput) SetDescription(v string) *CopyImageInput {
- s.Description = &v
- return s
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *CopyImageInput) SetDryRun(v bool) *CopyImageInput {
- s.DryRun = &v
- return s
-}
-
-// SetEncrypted sets the Encrypted field's value.
-func (s *CopyImageInput) SetEncrypted(v bool) *CopyImageInput {
- s.Encrypted = &v
- return s
-}
-
-// SetKmsKeyId sets the KmsKeyId field's value.
-func (s *CopyImageInput) SetKmsKeyId(v string) *CopyImageInput {
- s.KmsKeyId = &v
- return s
-}
-
-// SetName sets the Name field's value.
-func (s *CopyImageInput) SetName(v string) *CopyImageInput {
- s.Name = &v
- return s
-}
-
-// SetSourceImageId sets the SourceImageId field's value.
-func (s *CopyImageInput) SetSourceImageId(v string) *CopyImageInput {
- s.SourceImageId = &v
- return s
-}
-
-// SetSourceRegion sets the SourceRegion field's value.
-func (s *CopyImageInput) SetSourceRegion(v string) *CopyImageInput {
- s.SourceRegion = &v
- return s
-}
-
-// Contains the output of CopyImage.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/CopyImageResult
-type CopyImageOutput struct {
- _ struct{} `type:"structure"`
-
- // The ID of the new AMI.
- ImageId *string `locationName:"imageId" type:"string"`
-}
-
-// String returns the string representation
-func (s CopyImageOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// SetDestinationCidrBlock sets the DestinationCidrBlock field's value.
-func (s *CreateVpnConnectionRouteInput) SetDestinationCidrBlock(v string) *CreateVpnConnectionRouteInput {
- s.DestinationCidrBlock = &v
- return s
-}
-
-// SetVpnConnectionId sets the VpnConnectionId field's value.
-func (s *CreateVpnConnectionRouteInput) SetVpnConnectionId(v string) *CreateVpnConnectionRouteInput {
- s.VpnConnectionId = &v
- return s
-}
-
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/CreateVpnConnectionRouteOutput
-type CreateVpnConnectionRouteOutput struct {
- _ struct{} `type:"structure"`
-}
-
-// String returns the string representation
-func (s CreateVpnConnectionRouteOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s CopyImageOutput) GoString() string {
- return s.String()
-}
-
-// SetImageId sets the ImageId field's value.
-func (s *CopyImageOutput) SetImageId(v string) *CopyImageOutput {
- s.ImageId = &v
- return s
-}
-
-// Contains the parameters for DescribeSnapshots.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeSnapshotsRequest
-type DescribeSnapshotsInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // One or more filters.
- //
- // * description - A description of the snapshot.
- //
- // * owner-alias - Value from an Amazon-maintained list (amazon | aws-marketplace
- // | microsoft) of snapshot owners. Not to be confused with the user-configured
- // AWS account alias, which is set from the IAM consolew.
- //
- // * owner-id - The ID of the AWS account that owns the snapshot.
- //
- // * progress - The progress of the snapshot, as a percentage (for example,
- // 80%).
- //
- // * snapshot-id - The snapshot ID.
- //
- // * start-time - The time stamp when the snapshot was initiated.
- //
- // * status - The status of the snapshot (pending | completed | error).
- //
- // * tag:key=value - The key/value combination of a tag assigned to the resource.
- // Specify the key of the tag in the filter name and the value of the tag
- // in the filter value. For example, for the tag Purpose=X, specify tag:Purpose
- // for the filter name and X for the filter value.
- //
- // * tag-key - The key of a tag assigned to the resource. This filter is
- // independent of the tag-value filter. For example, if you use both the
- // filter "tag-key=Purpose" and the filter "tag-value=X", you get any resources
- // assigned both the tag key Purpose (regardless of what the tag's value
- // is), and the tag value X (regardless of what the tag's key is). If you
- // want to list only resources where Purpose is X, see the tag:key=value
- // filter.
- //
- // * tag-value - The value of a tag assigned to the resource. This filter
- // is independent of the tag-key filter.
- //
- // * volume-id - The ID of the volume the snapshot is for.
- //
- // * volume-size - The size of the volume, in GiB.
- Filters []*Filter `locationName:"Filter" locationNameList:"Filter" type:"list"`
-
- // The maximum number of snapshot results returned by DescribeSnapshots in paginated
- // output. When this parameter is used, DescribeSnapshots only returns MaxResults
- // results in a single page along with a NextToken response element. The remaining
- // results of the initial request can be seen by sending another DescribeSnapshots
- // request with the returned NextToken value. This value can be between 5 and
- // 1000; if MaxResults is given a value larger than 1000, only 1000 results
- // are returned. If this parameter is not used, then DescribeSnapshots returns
- // all results. You cannot specify this parameter and the snapshot IDs parameter
- // in the same request.
- MaxResults *int64 `type:"integer"`
-
- // The NextToken value returned from a previous paginated DescribeSnapshots
- // request where MaxResults was used and the results exceeded the value of that
- // parameter. Pagination continues from the end of the previous results that
- // returned the NextToken value. This value is null when there are no more results
- // to return.
- NextToken *string `type:"string"`
-
- // Returns the snapshots owned by the specified owner. Multiple owners can be
- // specified.
- OwnerIds []*string `locationName:"Owner" locationNameList:"Owner" type:"list"`
-
- // One or more AWS accounts IDs that can create volumes from the snapshot.
- RestorableByUserIds []*string `locationName:"RestorableBy" type:"list"`
-
- // One or more snapshot IDs.
- //
- // Default: Describes snapshots for which you have launch permissions.
- SnapshotIds []*string `locationName:"SnapshotId" locationNameList:"SnapshotId" type:"list"`
-}
-
-// String returns the string representation
-func (s DescribeSnapshotsInput) String() string {
- return s.String()
-}
-func (s CreateVpnConnectionRouteOutput) GoString() string {
- return s.String()
-}
-
-type DeleteVpnConnectionRouteInput struct {
- _ struct{} `type:"structure"`
-
- // The CIDR block associated with the local subnet of the customer network.
- //
- // DestinationCidrBlock is a required field
- DestinationCidrBlock *string `type:"string" required:"true"`
-
- // The ID of the VPN connection.
- //
- // VpnConnectionId is a required field
- VpnConnectionId *string `type:"string" required:"true"`
-}
-
-// String returns the string representation
-func (s DeleteVpnConnectionRouteInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DescribeSnapshotsInput) GoString() string {
- return s.String()
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *DescribeSnapshotsInput) SetDryRun(v bool) *DescribeSnapshotsInput {
- s.DryRun = &v
- return s
-}
-
-// SetFilters sets the Filters field's value.
-func (s *DescribeSnapshotsInput) SetFilters(v []*Filter) *DescribeSnapshotsInput {
- s.Filters = v
- return s
-}
-
-// SetMaxResults sets the MaxResults field's value.
-func (s *DescribeSnapshotsInput) SetMaxResults(v int64) *DescribeSnapshotsInput {
- s.MaxResults = &v
- return s
-}
-
-// SetNextToken sets the NextToken field's value.
-func (s *DescribeSnapshotsInput) SetNextToken(v string) *DescribeSnapshotsInput {
- s.NextToken = &v
- return s
-}
-
-// SetOwnerIds sets the OwnerIds field's value.
-func (s *DescribeSnapshotsInput) SetOwnerIds(v []*string) *DescribeSnapshotsInput {
- s.OwnerIds = v
- return s
-}
-
-// SetRestorableByUserIds sets the RestorableByUserIds field's value.
-func (s *DescribeSnapshotsInput) SetRestorableByUserIds(v []*string) *DescribeSnapshotsInput {
- s.RestorableByUserIds = v
- return s
-}
-
-// SetSnapshotIds sets the SnapshotIds field's value.
-func (s *DescribeSnapshotsInput) SetSnapshotIds(v []*string) *DescribeSnapshotsInput {
- s.SnapshotIds = v
- return s
-}
-
-// Contains the output of DescribeSnapshots.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeSnapshotsResult
-type DescribeSnapshotsOutput struct {
- _ struct{} `type:"structure"`
-
- // The NextToken value to include in a future DescribeSnapshots request. When
- // the results of a DescribeSnapshots request exceed MaxResults, this value
- // can be used to retrieve the next page of results. This value is null when
- // there are no more results to return.
- NextToken *string `locationName:"nextToken" type:"string"`
-
- // Information about the snapshots.
- Snapshots []*Snapshot `locationName:"snapshotSet" locationNameList:"item" type:"list"`
-
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-// String returns the string representation
-func (s DescribeSnapshotsOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DescribeSnapshotsOutput) GoString() string {
- return s.String()
-}
-
-// SetNextToken sets the NextToken field's value.
-func (s *DescribeSnapshotsOutput) SetNextToken(v string) *DescribeSnapshotsOutput {
- s.NextToken = &v
- return s
-}
-
-// SetSnapshots sets the Snapshots field's value.
-func (s *DescribeSnapshotsOutput) SetSnapshots(v []*Snapshot) *DescribeSnapshotsOutput {
- s.Snapshots = v
- return s
-}
-
-// Describes a snapshot.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/Snapshot
-type Snapshot struct {
- _ struct{} `type:"structure"`
-
- // The data encryption key identifier for the snapshot. This value is a unique
- // identifier that corresponds to the data encryption key that was used to encrypt
- // the original volume or snapshot copy. Because data encryption keys are inherited
- // by volumes created from snapshots, and vice versa, if snapshots share the
- // same data encryption key identifier, then they belong to the same volume/snapshot
- // lineage. This parameter is only returned by the DescribeSnapshots API operation.
- DataEncryptionKeyId *string `locationName:"dataEncryptionKeyId" type:"string"`
-
- // The description for the snapshot.
- Description *string `locationName:"description" type:"string"`
-
- // Indicates whether the snapshot is encrypted.
- Encrypted *bool `locationName:"encrypted" type:"boolean"`
-
- // The full ARN of the AWS Key Management Service (AWS KMS) customer master
- // key (CMK) that was used to protect the volume encryption key for the parent
- // volume.
- KmsKeyId *string `locationName:"kmsKeyId" type:"string"`
-
- // Value from an Amazon-maintained list (amazon | aws-marketplace | microsoft)
- // of snapshot owners. Not to be confused with the user-configured AWS account
- // alias, which is set from the IAM console.
- OwnerAlias *string `locationName:"ownerAlias" type:"string"`
-
- // The AWS account ID of the EBS snapshot owner.
- OwnerId *string `locationName:"ownerId" type:"string"`
-
- // The progress of the snapshot, as a percentage.
- Progress *string `locationName:"progress" type:"string"`
-
- // The ID of the snapshot. Each snapshot receives a unique identifier when it
- // is created.
- SnapshotId *string `locationName:"snapshotId" type:"string"`
-
- // The time stamp when the snapshot was initiated.
- StartTime *time.Time `locationName:"startTime" type:"timestamp" timestampFormat:"iso8601"`
-
- // The snapshot state.
- State *string `locationName:"status" type:"string" enum:"SnapshotState"`
-
- // Encrypted Amazon EBS snapshots are copied asynchronously. If a snapshot copy
- // operation fails (for example, if the proper AWS Key Management Service (AWS
- // KMS) permissions are not obtained) this field displays error state details
- // to help you diagnose why the error occurred. This parameter is only returned
- // by the DescribeSnapshots API operation.
- StateMessage *string `locationName:"statusMessage" type:"string"`
-
- // Any tags assigned to the snapshot.
- Tags []*Tag `locationName:"tagSet" locationNameList:"item" type:"list"`
-
- // The ID of the volume that was used to create the snapshot. Snapshots created
- // by the CopySnapshot action have an arbitrary volume ID that should not be
- // used for any purpose.
- VolumeId *string `locationName:"volumeId" type:"string"`
-
- // The size of the volume, in GiB.
- VolumeSize *int64 `locationName:"volumeSize" type:"integer"`
-}
-
-// String returns the string representation
-func (s Snapshot) String() string {
- return awsutil.Prettify(s)
-}
-func (s DeleteVpnConnectionRouteInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *DeleteVpnConnectionRouteInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "DeleteVpnConnectionRouteInput"}
- if s.DestinationCidrBlock == nil {
- invalidParams.Add(request.NewErrParamRequired("DestinationCidrBlock"))
- }
- if s.VpnConnectionId == nil {
- invalidParams.Add(request.NewErrParamRequired("VpnConnectionId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetDestinationCidrBlock sets the DestinationCidrBlock field's value.
-func (s *DeleteVpnConnectionRouteInput) SetDestinationCidrBlock(v string) *DeleteVpnConnectionRouteInput {
- s.DestinationCidrBlock = &v
- return s
-}
-
-// SetVpnConnectionId sets the VpnConnectionId field's value.
-func (s *DeleteVpnConnectionRouteInput) SetVpnConnectionId(v string) *DeleteVpnConnectionRouteInput {
- s.VpnConnectionId = &v
- return s
-}
-
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DeleteVpnConnectionRouteOutput
-type DeleteVpnConnectionRouteOutput struct {
- _ struct{} `type:"structure"`
-}
-
-// String returns the string representation
-func (s DeleteVpnConnectionRouteOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s Snapshot) GoString() string {
- return s.String()
-}
-
-// SetDataEncryptionKeyId sets the DataEncryptionKeyId field's value.
-func (s *Snapshot) SetDataEncryptionKeyId(v string) *Snapshot {
- s.DataEncryptionKeyId = &v
- return s
-}
-
-// SetDescription sets the Description field's value.
-func (s *Snapshot) SetDescription(v string) *Snapshot {
- s.Description = &v
- return s
-}
-
-// SetEncrypted sets the Encrypted field's value.
-func (s *Snapshot) SetEncrypted(v bool) *Snapshot {
- s.Encrypted = &v
- return s
-}
-
-// SetKmsKeyId sets the KmsKeyId field's value.
-func (s *Snapshot) SetKmsKeyId(v string) *Snapshot {
- s.KmsKeyId = &v
- return s
-}
-
-// SetOwnerAlias sets the OwnerAlias field's value.
-func (s *Snapshot) SetOwnerAlias(v string) *Snapshot {
- s.OwnerAlias = &v
- return s
-}
-
-// SetOwnerId sets the OwnerId field's value.
-func (s *Snapshot) SetOwnerId(v string) *Snapshot {
- s.OwnerId = &v
- return s
-}
-
-// SetProgress sets the Progress field's value.
-func (s *Snapshot) SetProgress(v string) *Snapshot {
- s.Progress = &v
- return s
-}
-
-// SetSnapshotId sets the SnapshotId field's value.
-func (s *Snapshot) SetSnapshotId(v string) *Snapshot {
- s.SnapshotId = &v
- return s
-}
-
-// SetStartTime sets the StartTime field's value.
-func (s *Snapshot) SetStartTime(v time.Time) *Snapshot {
- s.StartTime = &v
- return s
-}
-
-// SetState sets the State field's value.
-func (s *Snapshot) SetState(v string) *Snapshot {
- s.State = &v
- return s
-}
-
-// SetStateMessage sets the StateMessage field's value.
-func (s *Snapshot) SetStateMessage(v string) *Snapshot {
- s.StateMessage = &v
- return s
-}
-
-// SetTags sets the Tags field's value.
-func (s *Snapshot) SetTags(v []*Tag) *Snapshot {
- s.Tags = v
- return s
-}
-
-// SetVolumeId sets the VolumeId field's value.
-func (s *Snapshot) SetVolumeId(v string) *Snapshot {
- s.VolumeId = &v
- return s
-}
-
-// SetVolumeSize sets the VolumeSize field's value.
-func (s *Snapshot) SetVolumeSize(v int64) *Snapshot {
- s.VolumeSize = &v
- return s
-}
-func (s DeleteVpnConnectionRouteOutput) GoString() string {
- return s.String()
-}
-
-type DescribeAvailabilityZonesInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // One or more filters.
- //
- // * message - Information about the Availability Zone.
- //
- // * region-name - The name of the region for the Availability Zone (for
- // example, us-east-1).
- //
- // * state - The state of the Availability Zone (available | information
- // | impaired | unavailable).
- //
- // * zone-name - The name of the Availability Zone (for example, us-east-1a).
- Filters []*Filter `locationName:"Filter" locationNameList:"Filter" type:"list"`
-
- // The names of one or more Availability Zones.
- ZoneNames []*string `locationName:"ZoneName" locationNameList:"ZoneName" type:"list"`
-}
-
-// String returns the string representation
-func (s DescribeAvailabilityZonesInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DescribeAvailabilityZonesInput) GoString() string {
- return s.String()
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *DescribeAvailabilityZonesInput) SetDryRun(v bool) *DescribeAvailabilityZonesInput {
- s.DryRun = &v
- return s
-}
-
-// SetFilters sets the Filters field's value.
-func (s *DescribeAvailabilityZonesInput) SetFilters(v []*Filter) *DescribeAvailabilityZonesInput {
- s.Filters = v
- return s
-}
-
-// SetZoneNames sets the ZoneNames field's value.
-func (s *DescribeAvailabilityZonesInput) SetZoneNames(v []*string) *DescribeAvailabilityZonesInput {
- s.ZoneNames = v
- return s
-}
-
-// Contains the output of DescribeAvailabiltyZones.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeAvailabilityZonesResult
-type DescribeAvailabilityZonesOutput struct {
- _ struct{} `type:"structure"`
-
- // Information about one or more Availability Zones.
- AvailabilityZones []*AvailabilityZone `locationName:"availabilityZoneInfo" locationNameList:"item" type:"list"`
-
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-// String returns the string representation
-func (s DescribeAvailabilityZonesOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DescribeAvailabilityZonesOutput) GoString() string {
- return s.String()
-}
-
-// SetAvailabilityZones sets the AvailabilityZones field's value.
-func (s *DescribeAvailabilityZonesOutput) SetAvailabilityZones(v []*AvailabilityZone) *DescribeAvailabilityZonesOutput {
- s.AvailabilityZones = v
- return s
-}
-
-type AvailabilityZone struct {
- _ struct{} `type:"structure"`
-
- // Any messages about the Availability Zone.
- Messages []*AvailabilityZoneMessage `locationName:"messageSet" locationNameList:"item" type:"list"`
-
- // The name of the region.
- RegionName *string `locationName:"regionName" type:"string"`
-
- // The state of the Availability Zone.
- State *string `locationName:"zoneState" type:"string" enum:"AvailabilityZoneState"`
-
- // The name of the Availability Zone.
- ZoneName *string `locationName:"zoneName" type:"string"`
-}
-
-// String returns the string representation
-func (s AvailabilityZone) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s AvailabilityZone) GoString() string {
- return s.String()
-}
-
-// SetMessages sets the Messages field's value.
-func (s *AvailabilityZone) SetMessages(v []*AvailabilityZoneMessage) *AvailabilityZone {
- s.Messages = v
- return s
-}
-
-// SetRegionName sets the RegionName field's value.
-func (s *AvailabilityZone) SetRegionName(v string) *AvailabilityZone {
- s.RegionName = &v
- return s
-}
-
-// SetState sets the State field's value.
-func (s *AvailabilityZone) SetState(v string) *AvailabilityZone {
- s.State = &v
- return s
-}
-
-// SetZoneName sets the ZoneName field's value.
-func (s *AvailabilityZone) SetZoneName(v string) *AvailabilityZone {
- s.ZoneName = &v
- return s
-}
-
-type AvailabilityZoneMessage struct {
- _ struct{} `type:"structure"`
-
- // The message about the Availability Zone.
- Message *string `locationName:"message" type:"string"`
-}
-
-// String returns the string representation
-func (s AvailabilityZoneMessage) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s AvailabilityZoneMessage) GoString() string {
- return s.String()
-}
-
-// SetMessage sets the Message field's value.
-func (s *AvailabilityZoneMessage) SetMessage(v string) *AvailabilityZoneMessage {
- s.Message = &v
- return s
-}
-
-type DescribePrefixListsInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `type:"boolean"`
-
- // One or more filters.
- //
- // * prefix-list-id: The ID of a prefix list.
- //
- // * prefix-list-name: The name of a prefix list.
- Filters []*Filter `locationName:"Filter" locationNameList:"Filter" type:"list"`
-
- // The maximum number of items to return for this request. The request returns
- // a token that you can specify in a subsequent call to get the next set of
- // results.
- //
- // Constraint: If the value specified is greater than 1000, we return only 1000
- // items.
- MaxResults *int64 `type:"integer"`
-
- // The token for the next set of items to return. (You received this token from
- // a prior call.)
- NextToken *string `type:"string"`
-
- // One or more prefix list IDs.
- PrefixListIds []*string `locationName:"PrefixListId" locationNameList:"item" type:"list"`
-}
-
-// String returns the string representation
-func (s DescribePrefixListsInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DescribePrefixListsInput) GoString() string {
- return s.String()
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *DescribePrefixListsInput) SetDryRun(v bool) *DescribePrefixListsInput {
- s.DryRun = &v
- return s
-}
-
-// SetFilters sets the Filters field's value.
-func (s *DescribePrefixListsInput) SetFilters(v []*Filter) *DescribePrefixListsInput {
- s.Filters = v
- return s
-}
-
-// SetMaxResults sets the MaxResults field's value.
-func (s *DescribePrefixListsInput) SetMaxResults(v int64) *DescribePrefixListsInput {
- s.MaxResults = &v
- return s
-}
-
-// SetNextToken sets the NextToken field's value.
-func (s *DescribePrefixListsInput) SetNextToken(v string) *DescribePrefixListsInput {
- s.NextToken = &v
- return s
-}
-
-// SetPrefixListIds sets the PrefixListIds field's value.
-func (s *DescribePrefixListsInput) SetPrefixListIds(v []*string) *DescribePrefixListsInput {
- s.PrefixListIds = v
- return s
-}
-
-// Contains the output of DescribePrefixLists.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribePrefixListsResult
-type DescribePrefixListsOutput struct {
- _ struct{} `type:"structure"`
-
- // The token to use when requesting the next set of items. If there are no additional
- // items to return, the string is empty.
- NextToken *string `locationName:"nextToken" type:"string"`
-
- // All available prefix lists.
- PrefixLists []*PrefixList `locationName:"prefixListSet" locationNameList:"item" type:"list"`
-
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-// String returns the string representation
-func (s DescribePrefixListsOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DescribePrefixListsOutput) GoString() string {
- return s.String()
-}
-
-// SetNextToken sets the NextToken field's value.
-func (s *DescribePrefixListsOutput) SetNextToken(v string) *DescribePrefixListsOutput {
- s.NextToken = &v
- return s
-}
-
-// SetPrefixLists sets the PrefixLists field's value.
-func (s *DescribePrefixListsOutput) SetPrefixLists(v []*PrefixList) *DescribePrefixListsOutput {
- s.PrefixLists = v
- return s
-}
-
-type PrefixList struct {
- _ struct{} `type:"structure"`
-
- // The IP address range of the AWS service.
- Cidrs []*string `locationName:"cidrSet" locationNameList:"item" type:"list"`
-
- // The ID of the prefix.
- PrefixListId *string `locationName:"prefixListId" type:"string"`
-
- // The name of the prefix.
- PrefixListName *string `locationName:"prefixListName" type:"string"`
-}
-
-// String returns the string representation
-func (s PrefixList) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s PrefixList) GoString() string {
- return s.String()
-}
-
-// SetCidrs sets the Cidrs field's value.
-func (s *PrefixList) SetCidrs(v []*string) *PrefixList {
- s.Cidrs = v
- return s
-}
-
-// SetPrefixListId sets the PrefixListId field's value.
-func (s *PrefixList) SetPrefixListId(v string) *PrefixList {
- s.PrefixListId = &v
- return s
-}
-
-// SetPrefixListName sets the PrefixListName field's value.
-func (s *PrefixList) SetPrefixListName(v string) *PrefixList {
- s.PrefixListName = &v
- return s
-}
-
-type DescribeQuotasInput struct {
- _ struct{} `type:"structure"`
-
- DryRun *bool `type:"boolean"`
-
- // One or more filters.
- //
- // * prefix-list-id: The ID of a prefix list.
- //
- // * prefix-list-name: The name of a prefix list.
- Filters []*Filter `locationName:"Filter" locationNameList:"Filter" type:"list"`
-
- // The maximum number of items to return for this request. The request returns
- // a token that you can specify in a subsequent call to get the next set of
- // results.
- //
- // Constraint: If the value specified is greater than 1000, we return only 1000
- // items.
- MaxResults *int64 `type:"integer"`
-
- // The token for the next set of items to return. (You received this token from
- // a prior call.)
- NextToken *string `type:"string"`
-
- // One or more prefix list IDs.
- QuotaName []*string `locationName:"QuotaName" locationNameList:"item" type:"list"`
-}
-
-type DescribeQuotasOutput struct {
- _ struct{} `type:"structure"`
-
- // The token to use when requesting the next set of items. If there are no additional
- // items to return, the string is empty.
- NextToken *string `locationName:"nextToken" type:"string"`
-
- // All available prefix lists.
- ReferenceQuotaSet []*ReferenceQuota `locationName:"referenceQuotaSet" locationNameList:"item" type:"list"`
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-type DescribeRegionsInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // One or more filters.
- //
- // * endpoint - The endpoint of the region (for example, ec2.us-east-1.amazonaws.com).
- //
- // * region-name - The name of the region (for example, us-east-1).
- Filters []*Filter `locationName:"Filter" locationNameList:"Filter" type:"list"`
-
- // The names of one or more regions.
- RegionNames []*string `locationName:"RegionName" locationNameList:"RegionName" type:"list"`
-}
-
-// String returns the string representation
-func (s DescribeRegionsInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DescribeRegionsInput) GoString() string {
- return s.String()
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *DescribeRegionsInput) SetDryRun(v bool) *DescribeRegionsInput {
- s.DryRun = &v
- return s
-}
-
-// SetFilters sets the Filters field's value.
-func (s *DescribeRegionsInput) SetFilters(v []*Filter) *DescribeRegionsInput {
- s.Filters = v
- return s
-}
-
-// SetRegionNames sets the RegionNames field's value.
-func (s *DescribeRegionsInput) SetRegionNames(v []*string) *DescribeRegionsInput {
- s.RegionNames = v
- return s
-}
-
-// Contains the output of DescribeRegions.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeRegionsResult
-type DescribeRegionsOutput struct {
- _ struct{} `type:"structure"`
-
- // Information about one or more regions.
- Regions []*Region `locationName:"regionInfo" locationNameList:"item" type:"list"`
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-// String returns the string representation
-func (s DescribeRegionsOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DescribeRegionsOutput) GoString() string {
- return s.String()
-}
-
-// SetRegions sets the Regions field's value.
-func (s *DescribeRegionsOutput) SetRegions(v []*Region) *DescribeRegionsOutput {
- s.Regions = v
- return s
-}
-
-type ReferenceQuota struct {
- QuotaSet []*QuotaSet `locationName:"quotaSet" locationNameList:"item" type:"list"`
- Reference *string `locationName:"reference" type:"string"`
-}
-
-type QuotaSet struct {
- Description *string `locationName:"description" type:"string"`
- DisplayName *string `locationName:"displayName" type:"string"`
- GroupName *string `locationName:"groupName" type:"string"`
- MaxQuotaValue *string `locationName:"maxQuotaValue" type:"string"`
- Name *string `locationName:"name" type:"string"`
- OwnerId *string `locationName:"ownerId" type:"string"`
- UsedQuotaValue *string `locationName:"usedQuotaValue" type:"string"`
-}
-
-type Region struct {
- _ struct{} `type:"structure"`
-
- // The region service endpoint.
- Endpoint *string `locationName:"regionEndpoint" type:"string"`
-
- // The name of the region.
- RegionName *string `locationName:"regionName" type:"string"`
-}
-
-// String returns the string representation
-func (s Region) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s Region) GoString() string {
- return s.String()
-}
-
-// SetEndpoint sets the Endpoint field's value.
-func (s *Region) SetEndpoint(v string) *Region {
- s.Endpoint = &v
- return s
-}
-
-// SetRegionName sets the RegionName field's value.
-func (s *Region) SetRegionName(v string) *Region {
- s.RegionName = &v
- return s
-}
-
-type CreateSnapshotInput struct {
- _ struct{} `type:"structure"`
-
- // A description for the snapshot.
- Description *string `type:"string"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The ID of the EBS volume.
- //
- // VolumeId is a required field
- VolumeId *string `type:"string" required:"true"`
-}
-
-// String returns the string representation
-func (s CreateSnapshotInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s CreateSnapshotInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *CreateSnapshotInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "CreateSnapshotInput"}
- if s.VolumeId == nil {
- invalidParams.Add(request.NewErrParamRequired("VolumeId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetDescription sets the Description field's value.
-func (s *CreateSnapshotInput) SetDescription(v string) *CreateSnapshotInput {
- s.Description = &v
- return s
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *CreateSnapshotInput) SetDryRun(v bool) *CreateSnapshotInput {
- s.DryRun = &v
- return s
-}
-
-// SetVolumeId sets the VolumeId field's value.
-func (s *CreateSnapshotInput) SetVolumeId(v string) *CreateSnapshotInput {
- s.VolumeId = &v
- return s
-}
-
-// Describes the snapshot created from the imported disk.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/SnapshotDetail
-type SnapshotDetail struct {
- _ struct{} `type:"structure"`
-
- // A description for the snapshot.
- Description *string `locationName:"description" type:"string"`
-
- // The block device mapping for the snapshot.
- DeviceName *string `locationName:"deviceName" type:"string"`
-
- // The size of the disk in the snapshot, in GiB.
- DiskImageSize *float64 `locationName:"diskImageSize" type:"double"`
-
- // The format of the disk image from which the snapshot is created.
- Format *string `locationName:"format" type:"string"`
-
- // The percentage of progress for the task.
- Progress *string `locationName:"progress" type:"string"`
-
- // The snapshot ID of the disk being imported.
- SnapshotId *string `locationName:"snapshotId" type:"string"`
-
- // A brief status of the snapshot creation.
- Status *string `locationName:"status" type:"string"`
-
- // A detailed status message for the snapshot creation.
- StatusMessage *string `locationName:"statusMessage" type:"string"`
-
- // The URL used to access the disk image.
- Url *string `locationName:"url" type:"string"`
-
- // The S3 bucket for the disk image.
- UserBucket *UserBucketDetails `locationName:"userBucket" type:"structure"`
-}
-
-// String returns the string representation
-func (s SnapshotDetail) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s SnapshotDetail) GoString() string {
- return s.String()
-}
-
-// SetDescription sets the Description field's value.
-func (s *SnapshotDetail) SetDescription(v string) *SnapshotDetail {
- s.Description = &v
- return s
-}
-
-// SetDeviceName sets the DeviceName field's value.
-func (s *SnapshotDetail) SetDeviceName(v string) *SnapshotDetail {
- s.DeviceName = &v
- return s
-}
-
-// SetDiskImageSize sets the DiskImageSize field's value.
-func (s *SnapshotDetail) SetDiskImageSize(v float64) *SnapshotDetail {
- s.DiskImageSize = &v
- return s
-}
-
-// SetFormat sets the Format field's value.
-func (s *SnapshotDetail) SetFormat(v string) *SnapshotDetail {
- s.Format = &v
- return s
-}
-
-// SetProgress sets the Progress field's value.
-func (s *SnapshotDetail) SetProgress(v string) *SnapshotDetail {
- s.Progress = &v
- return s
-}
-
-// SetSnapshotId sets the SnapshotId field's value.
-func (s *SnapshotDetail) SetSnapshotId(v string) *SnapshotDetail {
- s.SnapshotId = &v
- return s
-}
-
-// SetStatus sets the Status field's value.
-func (s *SnapshotDetail) SetStatus(v string) *SnapshotDetail {
- s.Status = &v
- return s
-}
-
-// SetStatusMessage sets the StatusMessage field's value.
-func (s *SnapshotDetail) SetStatusMessage(v string) *SnapshotDetail {
- s.StatusMessage = &v
- return s
-}
-
-// SetUrl sets the Url field's value.
-func (s *SnapshotDetail) SetUrl(v string) *SnapshotDetail {
- s.Url = &v
- return s
-}
-
-// SetUserBucket sets the UserBucket field's value.
-func (s *SnapshotDetail) SetUserBucket(v *UserBucketDetails) *SnapshotDetail {
- s.UserBucket = v
- return s
-}
-
-// The disk container object for the import snapshot request.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/SnapshotDiskContainer
-type SnapshotDiskContainer struct {
- _ struct{} `type:"structure"`
-
- // The description of the disk image being imported.
- Description *string `type:"string"`
-
- // The format of the disk image being imported.
- //
- // Valid values: RAW | VHD | VMDK | OVA
- Format *string `type:"string"`
-
- // The URL to the Amazon S3-based disk image being imported. It can either be
- // a https URL (https://..) or an Amazon S3 URL (s3://..).
- Url *string `type:"string"`
-
- // The S3 bucket for the disk image.
- UserBucket *UserBucket `type:"structure"`
-}
-
-// String returns the string representation
-func (s SnapshotDiskContainer) String() string {
- return awsutil.Prettify(s)
-}
-
-type DescribeProductTypesInput struct {
- Filters []*Filter `locationName:"Filter" locationNameList:"Filter" type:"list"`
-}
-
-type DescribeProductTypesOutput struct {
- ProductTypeSet []*ProductType `locationName:"productTypeSet" locationNameList:"item" type:"list"`
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-type ProductType struct {
- Description *string `locationName:"description" type:"string"`
- ProductTypeId *string `locationName:"productTypeId" type:"string"`
- Vendor *string `locationName:"vendor" type:"string"`
-}
-
-type DescribeReservedInstancesInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- Filters []*Filter `locationName:"Filter" locationNameList:"Filter" type:"list"`
-
- // Describes whether the Reserved Instance is Standard or Convertible.
- AvailabilityZone *string `type:"string" enum:"AvailabilityZone"`
-
- OfferingClass *string `type:"string" enum:"OfferingClassType"`
-
- // The Reserved Instance offering type. If you are using tools that predate
- // the 2011-11-01 API version, you only have access to the Medium Utilization
- // Reserved Instance offering type.
- OfferingType *string `locationName:"offeringType" type:"string" enum:"OfferingTypeValues"`
-
- // One or more Reserved Instance IDs.
- //
- // Default: Describes all your Reserved Instances, or only those otherwise specified.
- ReservedInstancesIds []*string `locationName:"ReservedInstancesId" locationNameList:"ReservedInstancesId" type:"list"`
-}
-
-// String returns the string representation
-func (s DescribeReservedInstancesInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s SnapshotDiskContainer) GoString() string {
- return s.String()
-}
-
-// SetDescription sets the Description field's value.
-func (s *SnapshotDiskContainer) SetDescription(v string) *SnapshotDiskContainer {
- s.Description = &v
- return s
-}
-
-// SetFormat sets the Format field's value.
-func (s *SnapshotDiskContainer) SetFormat(v string) *SnapshotDiskContainer {
- s.Format = &v
- return s
-}
-
-// SetUrl sets the Url field's value.
-func (s *SnapshotDiskContainer) SetUrl(v string) *SnapshotDiskContainer {
- s.Url = &v
- return s
-}
-
-// SetUserBucket sets the UserBucket field's value.
-func (s *SnapshotDiskContainer) SetUserBucket(v *UserBucket) *SnapshotDiskContainer {
- s.UserBucket = v
- return s
-}
-
-// Details about the import snapshot task.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/SnapshotTaskDetail
-type SnapshotTaskDetail struct {
- _ struct{} `type:"structure"`
-
- // The description of the snapshot.
- Description *string `locationName:"description" type:"string"`
-
- // The size of the disk in the snapshot, in GiB.
- DiskImageSize *float64 `locationName:"diskImageSize" type:"double"`
-
- // The format of the disk image from which the snapshot is created.
- Format *string `locationName:"format" type:"string"`
-
- // The percentage of completion for the import snapshot task.
- Progress *string `locationName:"progress" type:"string"`
-
- // The snapshot ID of the disk being imported.
- SnapshotId *string `locationName:"snapshotId" type:"string"`
-
- // A brief status for the import snapshot task.
- Status *string `locationName:"status" type:"string"`
-
- // A detailed status message for the import snapshot task.
- StatusMessage *string `locationName:"statusMessage" type:"string"`
-
- // The URL of the disk image from which the snapshot is created.
- Url *string `locationName:"url" type:"string"`
-
- // The S3 bucket for the disk image.
- UserBucket *UserBucketDetails `locationName:"userBucket" type:"structure"`
-}
-
-// String returns the string representation
-func (s SnapshotTaskDetail) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s SnapshotTaskDetail) GoString() string {
- return s.String()
-}
-
-// SetDescription sets the Description field's value.
-func (s *SnapshotTaskDetail) SetDescription(v string) *SnapshotTaskDetail {
- s.Description = &v
- return s
-}
-
-// SetDiskImageSize sets the DiskImageSize field's value.
-func (s *SnapshotTaskDetail) SetDiskImageSize(v float64) *SnapshotTaskDetail {
- s.DiskImageSize = &v
- return s
-}
-
-// SetFormat sets the Format field's value.
-func (s *SnapshotTaskDetail) SetFormat(v string) *SnapshotTaskDetail {
- s.Format = &v
- return s
-}
-
-// SetProgress sets the Progress field's value.
-func (s *SnapshotTaskDetail) SetProgress(v string) *SnapshotTaskDetail {
- s.Progress = &v
- return s
-}
-
-// SetSnapshotId sets the SnapshotId field's value.
-func (s *SnapshotTaskDetail) SetSnapshotId(v string) *SnapshotTaskDetail {
- s.SnapshotId = &v
- return s
-}
-
-// SetStatus sets the Status field's value.
-func (s *SnapshotTaskDetail) SetStatus(v string) *SnapshotTaskDetail {
- s.Status = &v
- return s
-}
-
-// SetStatusMessage sets the StatusMessage field's value.
-func (s *SnapshotTaskDetail) SetStatusMessage(v string) *SnapshotTaskDetail {
- s.StatusMessage = &v
- return s
-}
-
-// SetUrl sets the Url field's value.
-func (s *SnapshotTaskDetail) SetUrl(v string) *SnapshotTaskDetail {
- s.Url = &v
- return s
-}
-
-// SetUserBucket sets the UserBucket field's value.
-func (s *SnapshotTaskDetail) SetUserBucket(v *UserBucketDetails) *SnapshotTaskDetail {
- s.UserBucket = v
- return s
-}
-
-type UserBucketDetails struct {
- _ struct{} `type:"structure"`
-
- // The S3 bucket from which the disk image was created.
- S3Bucket *string `locationName:"s3Bucket" type:"string"`
-
- // The file name of the disk image.
- S3Key *string `locationName:"s3Key" type:"string"`
-}
-
-// String returns the string representation
-func (s UserBucketDetails) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s UserBucketDetails) GoString() string {
- return s.String()
-}
-
-// SetS3Bucket sets the S3Bucket field's value.
-func (s *UserBucketDetails) SetS3Bucket(v string) *UserBucketDetails {
- s.S3Bucket = &v
- return s
-}
-
-// SetS3Key sets the S3Key field's value.
-func (s *UserBucketDetails) SetS3Key(v string) *UserBucketDetails {
- s.S3Key = &v
- return s
-}
-
-type UserBucket struct {
- _ struct{} `type:"structure"`
-
- // The name of the S3 bucket where the disk image is located.
- S3Bucket *string `type:"string"`
-
- // The file name of the disk image.
- S3Key *string `type:"string"`
-}
-
-// String returns the string representation
-func (s UserBucket) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s UserBucket) GoString() string {
- return s.String()
-}
-
-// SetS3Bucket sets the S3Bucket field's value.
-func (s *UserBucket) SetS3Bucket(v string) *UserBucket {
- s.S3Bucket = &v
- return s
-}
-
-// SetS3Key sets the S3Key field's value.
-func (s *UserBucket) SetS3Key(v string) *UserBucket {
- s.S3Key = &v
- return s
-}
-
-type DeleteSnapshotInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The ID of the EBS snapshot.
- //
- // SnapshotId is a required field
- SnapshotId *string `type:"string" required:"true"`
-}
-
-// String returns the string representation
-func (s DeleteSnapshotInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DeleteSnapshotInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *DeleteSnapshotInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "DeleteSnapshotInput"}
- if s.SnapshotId == nil {
- invalidParams.Add(request.NewErrParamRequired("SnapshotId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *DeleteSnapshotInput) SetDryRun(v bool) *DeleteSnapshotInput {
- s.DryRun = &v
- return s
-}
-
-// SetSnapshotId sets the SnapshotId field's value.
-func (s *DeleteSnapshotInput) SetSnapshotId(v string) *DeleteSnapshotInput {
- s.SnapshotId = &v
- return s
-}
-
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DeleteSnapshotOutput
-type DeleteSnapshotOutput struct {
- _ struct{} `type:"structure"`
-}
-
-// String returns the string representation
-func (s DeleteSnapshotOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DeleteSnapshotOutput) GoString() string {
- return s.String()
-}
-
-func (s DescribeReservedInstancesInput) GoString() string {
- return s.String()
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *DescribeReservedInstancesInput) SetDryRun(v bool) *DescribeReservedInstancesInput {
- s.DryRun = &v
- return s
-}
-
-// SetFilters sets the Filters field's value.
-func (s *DescribeReservedInstancesInput) SetFilters(v []*Filter) *DescribeReservedInstancesInput {
- s.Filters = v
- return s
-}
-
-// SetOfferingClass sets the OfferingClass field's value.
-func (s *DescribeReservedInstancesInput) SetOfferingClass(v string) *DescribeReservedInstancesInput {
- s.OfferingClass = &v
- return s
-}
-
-// SetOfferingType sets the OfferingType field's value.
-func (s *DescribeReservedInstancesInput) SetOfferingType(v string) *DescribeReservedInstancesInput {
- s.OfferingType = &v
- return s
-}
-
-// SetReservedInstancesIds sets the ReservedInstancesIds field's value.
-func (s *DescribeReservedInstancesInput) SetReservedInstancesIds(v []*string) *DescribeReservedInstancesInput {
- s.ReservedInstancesIds = v
- return s
-}
-
-type DescribeReservedInstancesOutput struct {
- _ struct{} `type:"structure"`
-
- // A list of Reserved Instances.
- ReservedInstances []*ReservedInstances `locationName:"reservedInstancesSet" locationNameList:"item" type:"list"`
- RequestId *string `locationName:"requestId" type:"string"`
-}
-type ReservedInstances struct {
- _ struct{} `type:"structure"`
-
- // The Availability Zone in which the Reserved Instance can be used.
- AvailabilityZone *string `locationName:"availabilityZone" type:"string"`
-
- // The currency of the Reserved Instance. It's specified using ISO 4217 standard
- // currency codes. At this time, the only supported currency is USD.
- CurrencyCode *string `locationName:"currencyCode" type:"string" enum:"CurrencyCodeValues"`
-
- // The duration of the Reserved Instance, in seconds.
- Duration *int64 `locationName:"duration" type:"long"`
-
- // The time when the Reserved Instance expires.
- End *time.Time `locationName:"end" type:"timestamp" timestampFormat:"iso8601"`
-
- // The purchase price of the Reserved Instance.
- FixedPrice *float64 `locationName:"fixedPrice" type:"float"`
-
- // The number of reservations purchased.
- InstanceCount *int64 `locationName:"instanceCount" type:"integer"`
-
- // The tenancy of the instance.
- InstanceTenancy *string `locationName:"instanceTenancy" type:"string" enum:"Tenancy"`
-
- // The instance type on which the Reserved Instance can be used.
- InstanceType *string `locationName:"instanceType" type:"string" enum:"InstanceType"`
-
- // The offering class of the Reserved Instance.
- OfferingClass *string `locationName:"offeringClass" type:"string" enum:"OfferingClassType"`
-
- // The Reserved Instance offering type.
- OfferingType *string `locationName:"offeringType" type:"string" enum:"OfferingTypeValues"`
-
- // The Reserved Instance product platform description.
- ProductDescription *string `locationName:"productDescription" type:"string" enum:"RIProductDescription"`
-
- // The recurring charge tag assigned to the resource.
- RecurringCharges []*RecurringCharge `locationName:"recurringCharges" locationNameList:"item" type:"list"`
-
- // The ID of the Reserved Instance.
- ReservedInstancesId *string `locationName:"reservedInstancesId" type:"string"`
-
- // The scope of the Reserved Instance.
- Scope *string `locationName:"scope" type:"string" enum:"scope"`
-
- // The date and time the Reserved Instance started.
- Start *time.Time `locationName:"start" type:"timestamp" timestampFormat:"iso8601"`
-
- // The state of the Reserved Instance purchase.
- State *string `locationName:"state" type:"string" enum:"ReservedInstanceState"`
-
- // Any tags assigned to the resource.
- Tags []*Tag `locationName:"tagSet" locationNameList:"item" type:"list"`
-
- // The usage price of the Reserved Instance, per hour.
- UsagePrice *float64 `locationName:"usagePrice" type:"float"`
-}
-
-type RecurringCharge struct {
- _ struct{} `type:"structure"`
-
- // The amount of the recurring charge.
- Amount *float64 `locationName:"amount" type:"double"`
-
- // The frequency of the recurring charge.
- Frequency *string `locationName:"frequency" type:"string" enum:"RecurringChargeFrequency"`
-}
-
-type DescribeInstanceTypesInput struct {
- Filters []*Filter `locationName:"Filter" locationNameList:"Filter" type:"list"`
-}
-
-type DescribeInstanceTypesOutput struct {
- InstanceTypeSet []*InstanceType `locationName:"instanceTypeSet" locationNameList:"item" type:"list"`
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-type InstanceType struct {
- EbsOptimizedAvailable *bool `locationName:"ebsOptimizedAvailable" type:"bool"`
- MaxIpAddresses *int64 `locationName:"maxIpAddresses" type:"int64"`
- Memory *int64 `locationName:"memory" type:"int64"`
- Name *string `locationName:"name" type:"string"`
- StorageCount *int64 `locationName:"storageCount" type:"int64"`
- StorageSize *int64 `locationName:"storageSize" type:"int64"`
- Vcpu *int64 `locationName:"vcpu" type:"int64"`
-}
-
-type DescribeReservedInstancesOfferingsInput struct {
- AvailabilityZone *string `locationName:"availabilityZone" type:"string"`
- Filters []*Filter `locationName:"Filter" locationNameList:"Filter" type:"list"`
- InstanceTenancy *string `locationName:"instanceTenancy" type:"string" enum:"Tenancy"`
- InstanceType *string `locationName:"instanceType" type:"string" enum:"InstanceType"`
- OfferingType *string `locationName:"offeringType" type:"string" enum:"OfferingTypeValues"`
- ProductDescription *string `locationName:"productDescription" type:"string" enum:"RIProductDescription"`
- ReservedInstancesOfferingIds []*string `locationName:"reservedInstancesOfferingId" type:"string"`
-}
-
-type DescribeReservedInstancesOfferingsOutput struct {
- ReservedInstancesOfferingsSet []*ReservedInstancesOffering `locationName:"reservedInstancesOfferingsSet" locationNameList:"item" type:"list"`
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-type ReservedInstancesOffering struct {
- AvailabilityZone *string `locationName:"availabilityZone" type:"string"`
- CurrencyCode *string `locationName:"currencyCode" type:"string"`
- Duration *string `locationName:"duration" type:"string"`
- FixedPrice *int64 `locationName:"fixedPrice" type:"int64"`
- InstanceTenancy *string `locationName:"instanceTenancy" type:"string" enum:"Tenancy"`
- InstanceType *string `locationName:"instanceType" type:"string" enum:"InstanceType"`
- Martketplace *bool `locationName:"martketplace" type:"bool"`
- OfferingType *string `locationName:"offeringType" type:"string" enum:"OfferingTypeValues"`
- ProductDescription *string `locationName:"productDescription" type:"string" enum:"RIProductDescription"`
- PricingDetailsSet []*PricingDetail `locationName:"pricingDetail" locationNameList:"item" type:"list"`
- RecurringCharges []*RecurringCharge `locationName:"recurringCharges" locationNameList:"item" type:"list"`
- ReservedInstancesOfferingId *string `locationName:"reservedInstancesOfferingId" type:"string"`
- UsagePrice *int64 `locationName:"usagePrice" type:"int64"`
-}
-
-type PricingDetail struct {
- Count *int64 `locationName:"count" type:"int64"`
-}
-
-type DescribeImageAttributeInput struct {
- _ struct{} `type:"structure"`
-
- // The AMI attribute.
- //
- // Note: Depending on your account privileges, the blockDeviceMapping attribute
- // may return a Client.AuthFailure error. If this happens, use DescribeImages
- // to get information about the block device mapping for the AMI.
- //
- // Attribute is a required field
- Attribute *string `type:"string" required:"true" enum:"ImageAttributeName"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The ID of the AMI.
- //
- // ImageId is a required field
- ImageId *string `type:"string" required:"true"`
-}
-
-// String returns the string representation
-func (s DescribeImageAttributeInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DescribeImageAttributeInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *DescribeImageAttributeInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "DescribeImageAttributeInput"}
- if s.Attribute == nil {
- invalidParams.Add(request.NewErrParamRequired("Attribute"))
- }
- if s.ImageId == nil {
- invalidParams.Add(request.NewErrParamRequired("ImageId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetAttribute sets the Attribute field's value.
-func (s *DescribeImageAttributeInput) SetAttribute(v string) *DescribeImageAttributeInput {
- s.Attribute = &v
- return s
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *DescribeImageAttributeInput) SetDryRun(v bool) *DescribeImageAttributeInput {
- s.DryRun = &v
- return s
-}
-
-// SetImageId sets the ImageId field's value.
-func (s *DescribeImageAttributeInput) SetImageId(v string) *DescribeImageAttributeInput {
- s.ImageId = &v
- return s
-}
-
-// Describes an image attribute.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/ImageAttribute
-type DescribeImageAttributeOutput struct {
- _ struct{} `type:"structure"`
-
- // One or more block device mapping entries.
- BlockDeviceMappings []*BlockDeviceMapping `locationName:"blockDeviceMapping" locationNameList:"item" type:"list"`
-
- // A description for the AMI.
- Description *AttributeValue `locationName:"description" type:"structure"`
-
- // The ID of the AMI.
- ImageId *string `locationName:"imageId" type:"string"`
-
- // The kernel ID.
- KernelId *AttributeValue `locationName:"kernel" type:"structure"`
-
- // One or more launch permissions.
- LaunchPermissions []*LaunchPermission `locationName:"launchPermission" locationNameList:"item" type:"list"`
-
- // One or more product codes.
- ProductCodes []*ProductCode `locationName:"productCodes" locationNameList:"item" type:"list"`
-
- // The RAM disk ID.
- RamdiskId *AttributeValue `locationName:"ramdisk" type:"structure"`
-
- // Indicates whether enhanced networking with the Intel 82599 Virtual Function
- // interface is enabled.
- SriovNetSupport *AttributeValue `locationName:"sriovNetSupport" type:"structure"`
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-// String returns the string representation
-func (s DescribeImageAttributeOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DescribeImageAttributeOutput) GoString() string {
- return s.String()
-}
-
-// SetBlockDeviceMappings sets the BlockDeviceMappings field's value.
-func (s *DescribeImageAttributeOutput) SetBlockDeviceMappings(v []*BlockDeviceMapping) *DescribeImageAttributeOutput {
- s.BlockDeviceMappings = v
- return s
-}
-
-// SetDescription sets the Description field's value.
-func (s *DescribeImageAttributeOutput) SetDescription(v *AttributeValue) *DescribeImageAttributeOutput {
- s.Description = v
- return s
-}
-
-// SetImageId sets the ImageId field's value.
-func (s *DescribeImageAttributeOutput) SetImageId(v string) *DescribeImageAttributeOutput {
- s.ImageId = &v
- return s
-}
-
-// SetKernelId sets the KernelId field's value.
-func (s *DescribeImageAttributeOutput) SetKernelId(v *AttributeValue) *DescribeImageAttributeOutput {
- s.KernelId = v
- return s
-}
-
-// SetLaunchPermissions sets the LaunchPermissions field's value.
-func (s *DescribeImageAttributeOutput) SetLaunchPermissions(v []*LaunchPermission) *DescribeImageAttributeOutput {
- s.LaunchPermissions = v
- return s
-}
-
-// SetProductCodes sets the ProductCodes field's value.
-func (s *DescribeImageAttributeOutput) SetProductCodes(v []*ProductCode) *DescribeImageAttributeOutput {
- s.ProductCodes = v
- return s
-}
-
-// SetRamdiskId sets the RamdiskId field's value.
-func (s *DescribeImageAttributeOutput) SetRamdiskId(v *AttributeValue) *DescribeImageAttributeOutput {
- s.RamdiskId = v
- return s
-}
-
-// SetSriovNetSupport sets the SriovNetSupport field's value.
-func (s *DescribeImageAttributeOutput) SetSriovNetSupport(v *AttributeValue) *DescribeImageAttributeOutput {
- s.SriovNetSupport = v
- return s
-}
-
-type CreateVpcPeeringConnectionInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The AWS account ID of the owner of the peer VPC.
- //
- // Default: Your AWS account ID
- PeerOwnerId *string `locationName:"peerOwnerId" type:"string"`
-
- // The ID of the VPC with which you are creating the VPC peering connection.
- PeerVpcId *string `locationName:"peerVpcId" type:"string"`
-
- // The ID of the requester VPC.
- VpcId *string `locationName:"vpcId" type:"string"`
-}
-
-// String returns the string representation
-func (s CreateVpcPeeringConnectionInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s CreateVpcPeeringConnectionInput) GoString() string {
- return s.String()
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *CreateVpcPeeringConnectionInput) SetDryRun(v bool) *CreateVpcPeeringConnectionInput {
- s.DryRun = &v
- return s
-}
-
-// SetPeerOwnerId sets the PeerOwnerId field's value.
-func (s *CreateVpcPeeringConnectionInput) SetPeerOwnerId(v string) *CreateVpcPeeringConnectionInput {
- s.PeerOwnerId = &v
- return s
-}
-
-// SetPeerVpcId sets the PeerVpcId field's value.
-func (s *CreateVpcPeeringConnectionInput) SetPeerVpcId(v string) *CreateVpcPeeringConnectionInput {
- s.PeerVpcId = &v
- return s
-}
-
-// SetVpcId sets the VpcId field's value.
-func (s *CreateVpcPeeringConnectionInput) SetVpcId(v string) *CreateVpcPeeringConnectionInput {
- s.VpcId = &v
- return s
-}
-
-// Contains the output of CreateVpcPeeringConnection.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/CreateVpcPeeringConnectionResult
-type CreateVpcPeeringConnectionOutput struct {
- _ struct{} `type:"structure"`
-
- // Information about the VPC peering connection.
- VpcPeeringConnection *VpcPeeringConnection `locationName:"vpcPeeringConnection" type:"structure"`
-}
-
-// String returns the string representation
-func (s CreateVpcPeeringConnectionOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s CreateVpcPeeringConnectionOutput) GoString() string {
- return s.String()
-}
-
-// SetVpcPeeringConnection sets the VpcPeeringConnection field's value.
-func (s *CreateVpcPeeringConnectionOutput) SetVpcPeeringConnection(v *VpcPeeringConnection) *CreateVpcPeeringConnectionOutput {
- s.VpcPeeringConnection = v
- return s
-}
-
-type VpcPeeringConnection struct {
- _ struct{} `type:"structure"`
-
- // Information about the accepter VPC. CIDR block information is not returned
- // when creating a VPC peering connection, or when describing a VPC peering
- // connection that's in the initiating-request or pending-acceptance state.
- AccepterVpcInfo *VpcPeeringConnectionVpcInfo `locationName:"accepterVpcInfo" type:"structure"`
-
- // The time that an unaccepted VPC peering connection will expire.
- ExpirationTime *time.Time `locationName:"expirationTime" type:"timestamp" timestampFormat:"iso8601"`
-
- // Information about the requester VPC.
- RequesterVpcInfo *VpcPeeringConnectionVpcInfo `locationName:"requesterVpcInfo" type:"structure"`
-
- // The status of the VPC peering connection.
- Status *VpcPeeringConnectionStateReason `locationName:"status" type:"structure"`
-
- // Any tags assigned to the resource.
- Tags []*Tag `locationName:"tagSet" locationNameList:"item" type:"list"`
-
- // The ID of the VPC peering connection.
- VpcPeeringConnectionId *string `locationName:"vpcPeeringConnectionId" type:"string"`
-}
-
-type VpcPeeringConnectionVpcInfo struct {
- _ struct{} `type:"structure"`
-
- // The IPv4 CIDR block for the VPC.
- CidrBlock *string `locationName:"cidrBlock" type:"string"`
-
- // The IPv6 CIDR block for the VPC.
- Ipv6CidrBlockSet []*Ipv6CidrBlock `locationName:"ipv6CidrBlockSet" locationNameList:"item" type:"list"`
-
- // The AWS account ID of the VPC owner.
- OwnerId *string `locationName:"ownerId" type:"string"`
-
- // Information about the VPC peering connection options for the accepter or
- // requester VPC.
- PeeringOptions *VpcPeeringConnectionOptionsDescription `locationName:"peeringOptions" type:"structure"`
-
- // The ID of the VPC.
- VpcId *string `locationName:"vpcId" type:"string"`
-}
-
-type Ipv6CidrBlock struct {
- _ struct{} `type:"structure"`
-
- // The IPv6 CIDR block.
- Ipv6CidrBlock *string `locationName:"ipv6CidrBlock" type:"string"`
-}
-
-type VpcPeeringConnectionOptionsDescription struct {
- _ struct{} `type:"structure"`
-
- // Indicates whether a local VPC can resolve public DNS hostnames to private
- // IP addresses when queried from instances in a peer VPC.
- AllowDnsResolutionFromRemoteVpc *bool `locationName:"allowDnsResolutionFromRemoteVpc" type:"boolean"`
-
- // Indicates whether a local ClassicLink connection can communicate with the
- // peer VPC over the VPC peering connection.
- AllowEgressFromLocalClassicLinkToRemoteVpc *bool `locationName:"allowEgressFromLocalClassicLinkToRemoteVpc" type:"boolean"`
-
- // Indicates whether a local VPC can communicate with a ClassicLink connection
- // in the peer VPC over the VPC peering connection.
- AllowEgressFromLocalVpcToRemoteClassicLink *bool `locationName:"allowEgressFromLocalVpcToRemoteClassicLink" type:"boolean"`
-}
-
-type VpcPeeringConnectionStateReason struct {
- _ struct{} `type:"structure"`
-
- // The status of the VPC peering connection.
- Code *string `locationName:"code" type:"string" enum:"VpcPeeringConnectionStateReasonCode"`
-
- // A message that provides more information about the status, if applicable.
- Message *string `locationName:"message" type:"string"`
-}
-
-type DescribeVpcPeeringConnectionsInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // One or more filters.
- //
- // * accepter-vpc-info.cidr-block - The IPv4 CIDR block of the peer VPC.
- //
- // * accepter-vpc-info.owner-id - The AWS account ID of the owner of the
- // peer VPC.
- //
- // * accepter-vpc-info.vpc-id - The ID of the peer VPC.
- //
- // * expiration-time - The expiration date and time for the VPC peering connection.
- //
- // * requester-vpc-info.cidr-block - The IPv4 CIDR block of the requester's
- // VPC.
- //
- // * requester-vpc-info.owner-id - The AWS account ID of the owner of the
- // requester VPC.
- //
- // * requester-vpc-info.vpc-id - The ID of the requester VPC.
- //
- // * status-code - The status of the VPC peering connection (pending-acceptance
- // | failed | expired | provisioning | active | deleted | rejected).
- //
- // * status-message - A message that provides more information about the
- // status of the VPC peering connection, if applicable.
- //
- // * tag:key=value - The key/value combination of a tag assigned to the resource.
- // Specify the key of the tag in the filter name and the value of the tag
- // in the filter value. For example, for the tag Purpose=X, specify tag:Purpose
- // for the filter name and X for the filter value.
- //
- // * tag-key - The key of a tag assigned to the resource. This filter is
- // independent of the tag-value filter. For example, if you use both the
- // filter "tag-key=Purpose" and the filter "tag-value=X", you get any resources
- // assigned both the tag key Purpose (regardless of what the tag's value
- // is), and the tag value X (regardless of what the tag's key is). If you
- // want to list only resources where Purpose is X, see the tag:key=value
- // filter.
- //
- // * tag-value - The value of a tag assigned to the resource. This filter
- // is independent of the tag-key filter.
- //
- // * vpc-peering-connection-id - The ID of the VPC peering connection.
- Filters []*Filter `locationName:"Filter" locationNameList:"Filter" type:"list"`
-
- // One or more VPC peering connection IDs.
- //
- // Default: Describes all your VPC peering connections.
- VpcPeeringConnectionIds []*string `locationName:"VpcPeeringConnectionId" locationNameList:"item" type:"list"`
-}
-
-// String returns the string representation
-func (s DescribeVpcPeeringConnectionsInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DescribeVpcPeeringConnectionsInput) GoString() string {
- return s.String()
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *DescribeVpcPeeringConnectionsInput) SetDryRun(v bool) *DescribeVpcPeeringConnectionsInput {
- s.DryRun = &v
- return s
-}
-
-// SetFilters sets the Filters field's value.
-func (s *DescribeVpcPeeringConnectionsInput) SetFilters(v []*Filter) *DescribeVpcPeeringConnectionsInput {
- s.Filters = v
- return s
-}
-
-// SetVpcPeeringConnectionIds sets the VpcPeeringConnectionIds field's value.
-func (s *DescribeVpcPeeringConnectionsInput) SetVpcPeeringConnectionIds(v []*string) *DescribeVpcPeeringConnectionsInput {
- s.VpcPeeringConnectionIds = v
- return s
-}
-
-// Contains the output of DescribeVpcPeeringConnections.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeVpcPeeringConnectionsResult
-type DescribeVpcPeeringConnectionsOutput struct {
- _ struct{} `type:"structure"`
-
- // Information about the VPC peering connections.
- VpcPeeringConnections []*VpcPeeringConnection `locationName:"vpcPeeringConnectionSet" locationNameList:"item" type:"list"`
-
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-// String returns the string representation
-func (s DescribeVpcPeeringConnectionsOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DescribeVpcPeeringConnectionsOutput) GoString() string {
- return s.String()
-}
-
-// SetVpcPeeringConnections sets the VpcPeeringConnections field's value.
-func (s *DescribeVpcPeeringConnectionsOutput) SetVpcPeeringConnections(v []*VpcPeeringConnection) *DescribeVpcPeeringConnectionsOutput {
- s.VpcPeeringConnections = v
- return s
-}
-
-type AcceptVpcPeeringConnectionInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The ID of the VPC peering connection.
- VpcPeeringConnectionId *string `locationName:"vpcPeeringConnectionId" type:"string"`
-}
-
-// String returns the string representation
-func (s AcceptVpcPeeringConnectionInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s AcceptVpcPeeringConnectionInput) GoString() string {
- return s.String()
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *AcceptVpcPeeringConnectionInput) SetDryRun(v bool) *AcceptVpcPeeringConnectionInput {
- s.DryRun = &v
- return s
-}
-
-// SetVpcPeeringConnectionId sets the VpcPeeringConnectionId field's value.
-func (s *AcceptVpcPeeringConnectionInput) SetVpcPeeringConnectionId(v string) *AcceptVpcPeeringConnectionInput {
- s.VpcPeeringConnectionId = &v
- return s
-}
-
-// Contains the output of AcceptVpcPeeringConnection.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/AcceptVpcPeeringConnectionResult
-type AcceptVpcPeeringConnectionOutput struct {
- _ struct{} `type:"structure"`
-
- // Information about the VPC peering connection.
- VpcPeeringConnection *VpcPeeringConnection `locationName:"vpcPeeringConnection" type:"structure"`
-}
-
-// String returns the string representation
-func (s AcceptVpcPeeringConnectionOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s AcceptVpcPeeringConnectionOutput) GoString() string {
- return s.String()
-}
-
-// SetVpcPeeringConnection sets the VpcPeeringConnection field's value.
-func (s *AcceptVpcPeeringConnectionOutput) SetVpcPeeringConnection(v *VpcPeeringConnection) *AcceptVpcPeeringConnectionOutput {
- s.VpcPeeringConnection = v
- return s
-}
-
-type ModifyVpcPeeringConnectionOptionsInput struct {
- _ struct{} `type:"structure"`
-
- // The VPC peering connection options for the accepter VPC.
- AccepterPeeringConnectionOptions *PeeringConnectionOptionsRequest `type:"structure"`
-
- // Checks whether you have the required permissions for the operation, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `type:"boolean"`
-
- // The VPC peering connection options for the requester VPC.
- RequesterPeeringConnectionOptions *PeeringConnectionOptionsRequest `type:"structure"`
-
- // The ID of the VPC peering connection.
- //
- // VpcPeeringConnectionId is a required field
- VpcPeeringConnectionId *string `type:"string" required:"true"`
-}
-
-// String returns the string representation
-func (s ModifyVpcPeeringConnectionOptionsInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s ModifyVpcPeeringConnectionOptionsInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *ModifyVpcPeeringConnectionOptionsInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "ModifyVpcPeeringConnectionOptionsInput"}
- if s.VpcPeeringConnectionId == nil {
- invalidParams.Add(request.NewErrParamRequired("VpcPeeringConnectionId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetAccepterPeeringConnectionOptions sets the AccepterPeeringConnectionOptions field's value.
-func (s *ModifyVpcPeeringConnectionOptionsInput) SetAccepterPeeringConnectionOptions(v *PeeringConnectionOptionsRequest) *ModifyVpcPeeringConnectionOptionsInput {
- s.AccepterPeeringConnectionOptions = v
- return s
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *ModifyVpcPeeringConnectionOptionsInput) SetDryRun(v bool) *ModifyVpcPeeringConnectionOptionsInput {
- s.DryRun = &v
- return s
-}
-
-// SetRequesterPeeringConnectionOptions sets the RequesterPeeringConnectionOptions field's value.
-func (s *ModifyVpcPeeringConnectionOptionsInput) SetRequesterPeeringConnectionOptions(v *PeeringConnectionOptionsRequest) *ModifyVpcPeeringConnectionOptionsInput {
- s.RequesterPeeringConnectionOptions = v
- return s
-}
-
-// SetVpcPeeringConnectionId sets the VpcPeeringConnectionId field's value.
-func (s *ModifyVpcPeeringConnectionOptionsInput) SetVpcPeeringConnectionId(v string) *ModifyVpcPeeringConnectionOptionsInput {
- s.VpcPeeringConnectionId = &v
- return s
-}
-
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/ModifyVpcPeeringConnectionOptionsResult
-type ModifyVpcPeeringConnectionOptionsOutput struct {
- _ struct{} `type:"structure"`
-
- // Information about the VPC peering connection options for the accepter VPC.
- AccepterPeeringConnectionOptions *PeeringConnectionOptions `locationName:"accepterPeeringConnectionOptions" type:"structure"`
-
- // Information about the VPC peering connection options for the requester VPC.
- RequesterPeeringConnectionOptions *PeeringConnectionOptions `locationName:"requesterPeeringConnectionOptions" type:"structure"`
-}
-
-// String returns the string representation
-func (s ModifyVpcPeeringConnectionOptionsOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s ModifyVpcPeeringConnectionOptionsOutput) GoString() string {
- return s.String()
-}
-
-// SetAccepterPeeringConnectionOptions sets the AccepterPeeringConnectionOptions field's value.
-func (s *ModifyVpcPeeringConnectionOptionsOutput) SetAccepterPeeringConnectionOptions(v *PeeringConnectionOptions) *ModifyVpcPeeringConnectionOptionsOutput {
- s.AccepterPeeringConnectionOptions = v
- return s
-}
-
-// SetRequesterPeeringConnectionOptions sets the RequesterPeeringConnectionOptions field's value.
-func (s *ModifyVpcPeeringConnectionOptionsOutput) SetRequesterPeeringConnectionOptions(v *PeeringConnectionOptions) *ModifyVpcPeeringConnectionOptionsOutput {
- s.RequesterPeeringConnectionOptions = v
- return s
-}
-
-type PeeringConnectionOptions struct {
- _ struct{} `type:"structure"`
-
- // If true, enables a local VPC to resolve public DNS hostnames to private IP
- // addresses when queried from instances in the peer VPC.
- AllowDnsResolutionFromRemoteVpc *bool `locationName:"allowDnsResolutionFromRemoteVpc" type:"boolean"`
-
- // If true, enables outbound communication from an EC2-Classic instance that's
- // linked to a local VPC via ClassicLink to instances in a peer VPC.
- AllowEgressFromLocalClassicLinkToRemoteVpc *bool `locationName:"allowEgressFromLocalClassicLinkToRemoteVpc" type:"boolean"`
-
- // If true, enables outbound communication from instances in a local VPC to
- // an EC2-Classic instance that's linked to a peer VPC via ClassicLink.
- AllowEgressFromLocalVpcToRemoteClassicLink *bool `locationName:"allowEgressFromLocalVpcToRemoteClassicLink" type:"boolean"`
-}
-
-type PeeringConnectionOptionsRequest struct {
- _ struct{} `type:"structure"`
-
- // If true, enables a local VPC to resolve public DNS hostnames to private IP
- // addresses when queried from instances in the peer VPC.
- AllowDnsResolutionFromRemoteVpc *bool `type:"boolean"`
-
- // If true, enables outbound communication from an EC2-Classic instance that's
- // linked to a local VPC via ClassicLink to instances in a peer VPC.
- AllowEgressFromLocalClassicLinkToRemoteVpc *bool `type:"boolean"`
-
- // If true, enables outbound communication from instances in a local VPC to
- // an EC2-Classic instance that's linked to a peer VPC via ClassicLink.
- AllowEgressFromLocalVpcToRemoteClassicLink *bool `type:"boolean"`
-}
-
-type DeleteVpcPeeringConnectionInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The ID of the VPC peering connection.
- //
- // VpcPeeringConnectionId is a required field
- VpcPeeringConnectionId *string `locationName:"vpcPeeringConnectionId" type:"string" required:"true"`
-}
-
-// String returns the string representation
-func (s DeleteVpcPeeringConnectionInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DeleteVpcPeeringConnectionInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *DeleteVpcPeeringConnectionInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "DeleteVpcPeeringConnectionInput"}
- if s.VpcPeeringConnectionId == nil {
- invalidParams.Add(request.NewErrParamRequired("VpcPeeringConnectionId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *DeleteVpcPeeringConnectionInput) SetDryRun(v bool) *DeleteVpcPeeringConnectionInput {
- s.DryRun = &v
- return s
-}
-
-// SetVpcPeeringConnectionId sets the VpcPeeringConnectionId field's value.
-func (s *DeleteVpcPeeringConnectionInput) SetVpcPeeringConnectionId(v string) *DeleteVpcPeeringConnectionInput {
- s.VpcPeeringConnectionId = &v
- return s
-}
-
-// Contains the output of DeleteVpcPeeringConnection.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DeleteVpcPeeringConnectionResult
-type DeleteVpcPeeringConnectionOutput struct {
- _ struct{} `type:"structure"`
-
- // Returns true if the request succeeds; otherwise, it returns an error.
- Return *bool `locationName:"return" type:"boolean"`
-}
-
-// String returns the string representation
-func (s DeleteVpcPeeringConnectionOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DeleteVpcPeeringConnectionOutput) GoString() string {
- return s.String()
-}
-
-// SetReturn sets the Return field's value.
-func (s *DeleteVpcPeeringConnectionOutput) SetReturn(v bool) *DeleteVpcPeeringConnectionOutput {
- s.Return = &v
- return s
-}
-
-//
-//
-// Create Network Interface
-
-// Contains the parameters for CreateNetworkInterface.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/CreateNetworkInterfaceRequest
-type CreateNetworkInterfaceInput struct {
- _ struct{} `type:"structure"`
-
- // A description for the network interface.
- Description *string `locationName:"description" type:"string"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The IDs of one or more security groups.
- Groups []*string `locationName:"SecurityGroupId" locationNameList:"SecurityGroupId" type:"list"`
-
- // The number of IPv6 addresses to assign to a network interface. Amazon EC2
- // automatically selects the IPv6 addresses from the subnet range. You can't
- // use this option if specifying specific IPv6 addresses. If your subnet has
- // the AssignIpv6AddressOnCreation attribute set to true, you can specify 0
- // to override this setting.
- Ipv6AddressCount *int64 `locationName:"ipv6AddressCount" type:"integer"`
-
- // One or more specific IPv6 addresses from the IPv6 CIDR block range of your
- // subnet. You can't use this option if you're specifying a number of IPv6 addresses.
- Ipv6Addresses []*InstanceIpv6Address `locationName:"ipv6Addresses" locationNameList:"item" type:"list"`
-
- // The primary private IPv4 address of the network interface. If you don't specify
- // an IPv4 address, Amazon EC2 selects one for you from the subnet's IPv4 CIDR
- // range. If you specify an IP address, you cannot indicate any IP addresses
- // specified in privateIpAddresses as primary (only one IP address can be designated
- // as primary).
- PrivateIpAddress *string `locationName:"privateIpAddress" type:"string"`
-
- // One or more private IPv4 addresses.
- PrivateIpAddresses []*PrivateIpAddressSpecification `locationName:"privateIpAddresses" locationNameList:"item" type:"list"`
-
- // The number of secondary private IPv4 addresses to assign to a network interface.
- // When you specify a number of secondary IPv4 addresses, Amazon EC2 selects
- // these IP addresses within the subnet's IPv4 CIDR range. You can't specify
- // this option and specify more than one private IP address using privateIpAddresses.
- //
- // The number of IP addresses you can assign to a network interface varies by
- // instance type. For more information, see IP Addresses Per ENI Per Instance
- // Type (http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using-eni.html#AvailableIpPerENI)
- // in the Amazon Virtual Private Cloud User Guide.
- SecondaryPrivateIpAddressCount *int64 `locationName:"secondaryPrivateIpAddressCount" type:"integer"`
-
- // The ID of the subnet to associate with the network interface.
- //
- // SubnetId is a required field
- SubnetId *string `locationName:"subnetId" type:"string" required:"true"`
-}
-
-// String returns the string representation
-func (s CreateNetworkInterfaceInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s CreateNetworkInterfaceInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *CreateNetworkInterfaceInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "CreateNetworkInterfaceInput"}
- if s.SubnetId == nil {
- invalidParams.Add(request.NewErrParamRequired("SubnetId"))
- }
- if s.PrivateIpAddresses != nil {
- for i, v := range s.PrivateIpAddresses {
- if v == nil {
- continue
- }
- if err := s.Validate(); err != nil {
- invalidParams.AddNested(fmt.Sprintf("%s[%v]", "PrivateIpAddresses", i), err.(request.ErrInvalidParams))
- }
- }
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetDescription sets the Description field's value.
-func (s *CreateNetworkInterfaceInput) SetDescription(v string) *CreateNetworkInterfaceInput {
- s.Description = &v
- return s
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *CreateNetworkInterfaceInput) SetDryRun(v bool) *CreateNetworkInterfaceInput {
- s.DryRun = &v
- return s
-}
-
-// SetGroups sets the Groups field's value.
-func (s *CreateNetworkInterfaceInput) SetGroups(v []*string) *CreateNetworkInterfaceInput {
- s.Groups = v
- return s
-}
-
-// SetIpv6AddressCount sets the Ipv6AddressCount field's value.
-func (s *CreateNetworkInterfaceInput) SetIpv6AddressCount(v int64) *CreateNetworkInterfaceInput {
- s.Ipv6AddressCount = &v
- return s
-}
-
-// SetIpv6Addresses sets the Ipv6Addresses field's value.
-func (s *CreateNetworkInterfaceInput) SetIpv6Addresses(v []*InstanceIpv6Address) *CreateNetworkInterfaceInput {
- s.Ipv6Addresses = v
- return s
-}
-
-// SetPrivateIpAddress sets the PrivateIpAddress field's value.
-func (s *CreateNetworkInterfaceInput) SetPrivateIpAddress(v string) *CreateNetworkInterfaceInput {
- s.PrivateIpAddress = &v
- return s
-}
-
-// SetPrivateIpAddresses sets the PrivateIpAddresses field's value.
-func (s *CreateNetworkInterfaceInput) SetPrivateIpAddresses(v []*PrivateIpAddressSpecification) *CreateNetworkInterfaceInput {
- s.PrivateIpAddresses = v
- return s
-}
-
-// SetSecondaryPrivateIpAddressCount sets the SecondaryPrivateIpAddressCount field's value.
-func (s *CreateNetworkInterfaceInput) SetSecondaryPrivateIpAddressCount(v int64) *CreateNetworkInterfaceInput {
- s.SecondaryPrivateIpAddressCount = &v
- return s
-}
-
-// SetSubnetId sets the SubnetId field's value.
-func (s *CreateNetworkInterfaceInput) SetSubnetId(v string) *CreateNetworkInterfaceInput {
- s.SubnetId = &v
- return s
-}
-
-// Contains the output of CreateNetworkInterface.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/CreateNetworkInterfaceResult
-type CreateNetworkInterfaceOutput struct {
- _ struct{} `type:"structure"`
-
- // Information about the network interface.
- NetworkInterface *NetworkInterface `locationName:"networkInterface" type:"structure"`
-}
-
-// String returns the string representation
-func (s CreateNetworkInterfaceOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s CreateNetworkInterfaceOutput) GoString() string {
- return s.String()
-}
-
-// SetNetworkInterface sets the NetworkInterface field's value.
-func (s *CreateNetworkInterfaceOutput) SetNetworkInterface(v *NetworkInterface) *CreateNetworkInterfaceOutput {
- s.NetworkInterface = v
- return s
-}
-
-// Contains the parameters for DeleteNetworkInterface.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DeleteNetworkInterfaceRequest
-type DeleteNetworkInterfaceInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The ID of the network interface.
- //
- // NetworkInterfaceId is a required field
- NetworkInterfaceId *string `locationName:"networkInterfaceId" type:"string" required:"true"`
-}
-
-// String returns the string representation
-func (s DeleteNetworkInterfaceInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DeleteNetworkInterfaceInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *DeleteNetworkInterfaceInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "DeleteNetworkInterfaceInput"}
- if s.NetworkInterfaceId == nil {
- invalidParams.Add(request.NewErrParamRequired("NetworkInterfaceId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *DeleteNetworkInterfaceInput) SetDryRun(v bool) *DeleteNetworkInterfaceInput {
- s.DryRun = &v
- return s
-}
-
-// SetNetworkInterfaceId sets the NetworkInterfaceId field's value.
-func (s *DeleteNetworkInterfaceInput) SetNetworkInterfaceId(v string) *DeleteNetworkInterfaceInput {
- s.NetworkInterfaceId = &v
- return s
-}
-
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DeleteNetworkInterfaceOutput
-type DeleteNetworkInterfaceOutput struct {
- _ struct{} `type:"structure"`
-}
-
-// String returns the string representation
-func (s DeleteNetworkInterfaceOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DeleteNetworkInterfaceOutput) GoString() string {
- return s.String()
-}
-
-// Contains the parameters for DescribeNetworkInterfaces.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeNetworkInterfacesRequest
-type DescribeNetworkInterfacesInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // One or more filters.
- //
- // * addresses.private-ip-address - The private IPv4 addresses associated
- // with the network interface.
- //
- // * addresses.primary - Whether the private IPv4 address is the primary
- // IP address associated with the network interface.
- //
- // * addresses.association.public-ip - The association ID returned when the
- // network interface was associated with the Elastic IP address (IPv4).
- //
- // * addresses.association.owner-id - The owner ID of the addresses associated
- // with the network interface.
- //
- // * association.association-id - The association ID returned when the network
- // interface was associated with an IPv4 address.
- //
- // * association.allocation-id - The allocation ID returned when you allocated
- // the Elastic IP address (IPv4) for your network interface.
- //
- // * association.ip-owner-id - The owner of the Elastic IP address (IPv4)
- // associated with the network interface.
- //
- // * association.public-ip - The address of the Elastic IP address (IPv4)
- // bound to the network interface.
- //
- // * association.public-dns-name - The public DNS name for the network interface
- // (IPv4).
- //
- // * attachment.attachment-id - The ID of the interface attachment.
- //
- // * attachment.attach.time - The time that the network interface was attached
- // to an instance.
- //
- // * attachment.delete-on-termination - Indicates whether the attachment
- // is deleted when an instance is terminated.
- //
- // * attachment.device-index - The device index to which the network interface
- // is attached.
- //
- // * attachment.instance-id - The ID of the instance to which the network
- // interface is attached.
- //
- // * attachment.instance-owner-id - The owner ID of the instance to which
- // the network interface is attached.
- //
- // * attachment.nat-gateway-id - The ID of the NAT gateway to which the network
- // interface is attached.
- //
- // * attachment.status - The status of the attachment (attaching | attached
- // | detaching | detached).
- //
- // * availability-zone - The Availability Zone of the network interface.
- //
- // * description - The description of the network interface.
- //
- // * group-id - The ID of a security group associated with the network interface.
- //
- // * group-name - The name of a security group associated with the network
- // interface.
- //
- // * ipv6-addresses.ipv6-address - An IPv6 address associated with the network
- // interface.
- //
- // * mac-address - The MAC address of the network interface.
- //
- // * network-interface-id - The ID of the network interface.
- //
- // * owner-id - The AWS account ID of the network interface owner.
- //
- // * private-ip-address - The private IPv4 address or addresses of the network
- // interface.
- //
- // * private-dns-name - The private DNS name of the network interface (IPv4).
- //
- // * requester-id - The ID of the entity that launched the instance on your
- // behalf (for example, AWS Management Console, Auto Scaling, and so on).
- //
- // * requester-managed - Indicates whether the network interface is being
- // managed by an AWS service (for example, AWS Management Console, Auto Scaling,
- // and so on).
- //
- // * source-desk-check - Indicates whether the network interface performs
- // source/destination checking. A value of true means checking is enabled,
- // and false means checking is disabled. The value must be false for the
- // network interface to perform network address translation (NAT) in your
- // VPC.
- //
- // * status - The status of the network interface. If the network interface
- // is not attached to an instance, the status is available; if a network
- // interface is attached to an instance the status is in-use.
- //
- // * subnet-id - The ID of the subnet for the network interface.
- //
- // * tag:key=value - The key/value combination of a tag assigned to the resource.
- // Specify the key of the tag in the filter name and the value of the tag
- // in the filter value. For example, for the tag Purpose=X, specify tag:Purpose
- // for the filter name and X for the filter value.
- //
- // * tag-key - The key of a tag assigned to the resource. This filter is
- // independent of the tag-value filter. For example, if you use both the
- // filter "tag-key=Purpose" and the filter "tag-value=X", you get any resources
- // assigned both the tag key Purpose (regardless of what the tag's value
- // is), and the tag value X (regardless of what the tag's key is). If you
- // want to list only resources where Purpose is X, see the tag:key=value
- // filter.
- //
- // * tag-value - The value of a tag assigned to the resource. This filter
- // is independent of the tag-key filter.
- //
- // * vpc-id - The ID of the VPC for the network interface.
- Filters []*Filter `locationName:"filter" locationNameList:"Filter" type:"list"`
-
- // One or more network interface IDs.
- //
- // Default: Describes all your network interfaces.
- NetworkInterfaceIds []*string `locationName:"NetworkInterfaceId" locationNameList:"item" type:"list"`
-}
-
-// String returns the string representation
-func (s DescribeNetworkInterfacesInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DescribeNetworkInterfacesInput) GoString() string {
- return s.String()
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *DescribeNetworkInterfacesInput) SetDryRun(v bool) *DescribeNetworkInterfacesInput {
- s.DryRun = &v
- return s
-}
-
-// SetFilters sets the Filters field's value.
-func (s *DescribeNetworkInterfacesInput) SetFilters(v []*Filter) *DescribeNetworkInterfacesInput {
- s.Filters = v
- return s
-}
-
-// SetNetworkInterfaceIds sets the NetworkInterfaceIds field's value.
-func (s *DescribeNetworkInterfacesInput) SetNetworkInterfaceIds(v []*string) *DescribeNetworkInterfacesInput {
- s.NetworkInterfaceIds = v
- return s
-}
-
-// Contains the output of DescribeNetworkInterfaces.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeNetworkInterfacesResult
-type DescribeNetworkInterfacesOutput struct {
- _ struct{} `type:"structure"`
-
- // Information about one or more network interfaces.
- NetworkInterfaces []*NetworkInterface `locationName:"networkInterfaceSet" locationNameList:"item" type:"list"`
-
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-// String returns the string representation
-func (s DescribeNetworkInterfacesOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DescribeNetworkInterfacesOutput) GoString() string {
- return s.String()
-}
-
-// SetNetworkInterfaces sets the NetworkInterfaces field's value.
-func (s *DescribeNetworkInterfacesOutput) SetNetworkInterfaces(v []*NetworkInterface) *DescribeNetworkInterfacesOutput {
- s.NetworkInterfaces = v
- return s
-}
-
-// Contains the parameters for ModifyNetworkInterfaceAttribute.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/ModifyNetworkInterfaceAttributeRequest
-type ModifyNetworkInterfaceAttributeInput struct {
- _ struct{} `type:"structure"`
-
- // Information about the interface attachment. If modifying the 'delete on termination'
- // attribute, you must specify the ID of the interface attachment.
- Attachment *NetworkInterfaceAttachmentChanges `locationName:"attachment" type:"structure"`
-
- // A description for the network interface.
- Description *AttributeValue `locationName:"description" type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // Changes the security groups for the network interface. The new set of groups
- // you specify replaces the current set. You must specify at least one group,
- // even if it's just the default security group in the VPC. You must specify
- // the ID of the security group, not the name.
- Groups []*string `locationName:"SecurityGroupId" locationNameList:"SecurityGroupId" type:"list"`
-
- // The ID of the network interface.
- //
- // NetworkInterfaceId is a required field
- NetworkInterfaceId *string `locationName:"networkInterfaceId" type:"string" required:"true"`
-
- // Indicates whether source/destination checking is enabled. A value of true
- // means checking is enabled, and false means checking is disabled. This value
- // must be false for a NAT instance to perform NAT. For more information, see
- // NAT Instances (http://docs.aws.amazon.com/AmazonVPC/latest/UserGuide/VPC_NAT_Instance.html)
- // in the Amazon Virtual Private Cloud User Guide.
- SourceDestCheck *AttributeBooleanValue `locationName:"sourceDestCheck" type:"structure"`
-}
-
-// String returns the string representation
-func (s ModifyNetworkInterfaceAttributeInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s ModifyNetworkInterfaceAttributeInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *ModifyNetworkInterfaceAttributeInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "ModifyNetworkInterfaceAttributeInput"}
- if s.NetworkInterfaceId == nil {
- invalidParams.Add(request.NewErrParamRequired("NetworkInterfaceId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetAttachment sets the Attachment field's value.
-func (s *ModifyNetworkInterfaceAttributeInput) SetAttachment(v *NetworkInterfaceAttachmentChanges) *ModifyNetworkInterfaceAttributeInput {
- s.Attachment = v
- return s
-}
-
-// SetDescription sets the Description field's value.
-func (s *ModifyNetworkInterfaceAttributeInput) SetDescription(v *AttributeValue) *ModifyNetworkInterfaceAttributeInput {
- s.Description = v
- return s
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *ModifyNetworkInterfaceAttributeInput) SetDryRun(v bool) *ModifyNetworkInterfaceAttributeInput {
- s.DryRun = &v
- return s
-}
-
-// SetGroups sets the Groups field's value.
-func (s *ModifyNetworkInterfaceAttributeInput) SetGroups(v []*string) *ModifyNetworkInterfaceAttributeInput {
- s.Groups = v
- return s
-}
-
-// SetNetworkInterfaceId sets the NetworkInterfaceId field's value.
-func (s *ModifyNetworkInterfaceAttributeInput) SetNetworkInterfaceId(v string) *ModifyNetworkInterfaceAttributeInput {
- s.NetworkInterfaceId = &v
- return s
-}
-
-// SetSourceDestCheck sets the SourceDestCheck field's value.
-func (s *ModifyNetworkInterfaceAttributeInput) SetSourceDestCheck(v *AttributeBooleanValue) *ModifyNetworkInterfaceAttributeInput {
- s.SourceDestCheck = v
- return s
-}
-
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/ModifyNetworkInterfaceAttributeOutput
-type ModifyNetworkInterfaceAttributeOutput struct {
- _ struct{} `type:"structure"`
-}
-
-// String returns the string representation
-func (s ModifyNetworkInterfaceAttributeOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s ModifyNetworkInterfaceAttributeOutput) GoString() string {
- return s.String()
-}
-
-// Contains the parameters for DescribeNetworkInterfaceAttribute.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeNetworkInterfaceAttributeRequest
-type DescribeNetworkInterfaceAttributeInput struct {
- _ struct{} `type:"structure"`
-
- // The attribute of the network interface. This parameter is required.
- Attribute *string `locationName:"attribute" type:"string" enum:"NetworkInterfaceAttribute"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The ID of the network interface.
- //
- // NetworkInterfaceId is a required field
- NetworkInterfaceId *string `locationName:"networkInterfaceId" type:"string" required:"true"`
-}
-
-// String returns the string representation
-func (s DescribeNetworkInterfaceAttributeInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DescribeNetworkInterfaceAttributeInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *DescribeNetworkInterfaceAttributeInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "DescribeNetworkInterfaceAttributeInput"}
- if s.NetworkInterfaceId == nil {
- invalidParams.Add(request.NewErrParamRequired("NetworkInterfaceId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetAttribute sets the Attribute field's value.
-func (s *DescribeNetworkInterfaceAttributeInput) SetAttribute(v string) *DescribeNetworkInterfaceAttributeInput {
- s.Attribute = &v
- return s
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *DescribeNetworkInterfaceAttributeInput) SetDryRun(v bool) *DescribeNetworkInterfaceAttributeInput {
- s.DryRun = &v
- return s
-}
-
-// SetNetworkInterfaceId sets the NetworkInterfaceId field's value.
-func (s *DescribeNetworkInterfaceAttributeInput) SetNetworkInterfaceId(v string) *DescribeNetworkInterfaceAttributeInput {
- s.NetworkInterfaceId = &v
- return s
-}
-
-// Contains the output of DescribeNetworkInterfaceAttribute.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DescribeNetworkInterfaceAttributeResult
-type DescribeNetworkInterfaceAttributeOutput struct {
- _ struct{} `type:"structure"`
-
- // The attachment (if any) of the network interface.
- Attachment *NetworkInterfaceAttachment `locationName:"attachment" type:"structure"`
-
- // The description of the network interface.
- Description *AttributeValue `locationName:"description" type:"structure"`
-
- // The security groups associated with the network interface.
- Groups []*GroupIdentifier `locationName:"groupSet" locationNameList:"item" type:"list"`
-
- // The ID of the network interface.
- NetworkInterfaceId *string `locationName:"networkInterfaceId" type:"string"`
-
- // Indicates whether source/destination checking is enabled.
- SourceDestCheck *AttributeBooleanValue `locationName:"sourceDestCheck" type:"structure"`
-}
-
-// String returns the string representation
-func (s DescribeNetworkInterfaceAttributeOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DescribeNetworkInterfaceAttributeOutput) GoString() string {
- return s.String()
-}
-
-// SetAttachment sets the Attachment field's value.
-func (s *DescribeNetworkInterfaceAttributeOutput) SetAttachment(v *NetworkInterfaceAttachment) *DescribeNetworkInterfaceAttributeOutput {
- s.Attachment = v
- return s
-}
-
-// SetDescription sets the Description field's value.
-func (s *DescribeNetworkInterfaceAttributeOutput) SetDescription(v *AttributeValue) *DescribeNetworkInterfaceAttributeOutput {
- s.Description = v
- return s
-}
-
-// SetGroups sets the Groups field's value.
-func (s *DescribeNetworkInterfaceAttributeOutput) SetGroups(v []*GroupIdentifier) *DescribeNetworkInterfaceAttributeOutput {
- s.Groups = v
- return s
-}
-
-// SetNetworkInterfaceId sets the NetworkInterfaceId field's value.
-func (s *DescribeNetworkInterfaceAttributeOutput) SetNetworkInterfaceId(v string) *DescribeNetworkInterfaceAttributeOutput {
- s.NetworkInterfaceId = &v
- return s
-}
-
-// SetSourceDestCheck sets the SourceDestCheck field's value.
-func (s *DescribeNetworkInterfaceAttributeOutput) SetSourceDestCheck(v *AttributeBooleanValue) *DescribeNetworkInterfaceAttributeOutput {
- s.SourceDestCheck = v
- return s
-}
-
-// Describes a network interface.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/NetworkInterface
-type NetworkInterface struct {
- _ struct{} `type:"structure"`
-
- // The association information for an Elastic IP address (IPv4) associated with
- // the network interface.
- Association *NetworkInterfaceAssociation `locationName:"association" type:"structure"`
-
- // The network interface attachment.
- Attachment *NetworkInterfaceAttachment `locationName:"attachment" type:"structure"`
-
- // The Availability Zone.
- AvailabilityZone *string `locationName:"availabilityZone" type:"string"`
-
- // A description.
- Description *string `locationName:"description" type:"string"`
-
- // Any security groups for the network interface.
- Groups []*GroupIdentifier `locationName:"groupSet" locationNameList:"item" type:"list"`
-
- // The type of interface.
- InterfaceType *string `locationName:"interfaceType" type:"string" enum:"NetworkInterfaceType"`
-
- // The IPv6 addresses associated with the network interface.
- Ipv6Addresses []*NetworkInterfaceIpv6Address `locationName:"ipv6AddressesSet" locationNameList:"item" type:"list"`
-
- // The MAC address.
- MacAddress *string `locationName:"macAddress" type:"string"`
-
- // The ID of the network interface.
- NetworkInterfaceId *string `locationName:"networkInterfaceId" type:"string"`
-
- // The AWS account ID of the owner of the network interface.
- OwnerId *string `locationName:"ownerId" type:"string"`
-
- // The private DNS name.
- PrivateDnsName *string `locationName:"privateDnsName" type:"string"`
-
- // The IPv4 address of the network interface within the subnet.
- PrivateIpAddress *string `locationName:"privateIpAddress" type:"string"`
-
- // The private IPv4 addresses associated with the network interface.
- PrivateIpAddresses []*NetworkInterfacePrivateIpAddress `locationName:"privateIpAddressesSet" locationNameList:"item" type:"list"`
-
- // The ID of the entity that launched the instance on your behalf (for example,
- // AWS Management Console or Auto Scaling).
- RequesterId *string `locationName:"requesterId" type:"string"`
-
- // Indicates whether the network interface is being managed by AWS.
- RequesterManaged *bool `locationName:"requesterManaged" type:"boolean"`
-
- // Indicates whether traffic to or from the instance is validated.
- SourceDestCheck *bool `locationName:"sourceDestCheck" type:"boolean"`
-
- // The status of the network interface.
- Status *string `locationName:"status" type:"string" enum:"NetworkInterfaceStatus"`
-
- // The ID of the subnet.
- SubnetId *string `locationName:"subnetId" type:"string"`
-
- // Any tags assigned to the network interface.
- TagSet []*Tag `locationName:"tagSet" locationNameList:"item" type:"list"`
-
- // The ID of the VPC.
- VpcId *string `locationName:"vpcId" type:"string"`
-}
-
-// String returns the string representation
-func (s NetworkInterface) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s NetworkInterface) GoString() string {
- return s.String()
-}
-
-// SetAssociation sets the Association field's value.
-func (s *NetworkInterface) SetAssociation(v *NetworkInterfaceAssociation) *NetworkInterface {
- s.Association = v
- return s
-}
-
-// SetAttachment sets the Attachment field's value.
-func (s *NetworkInterface) SetAttachment(v *NetworkInterfaceAttachment) *NetworkInterface {
- s.Attachment = v
- return s
-}
-
-// SetAvailabilityZone sets the AvailabilityZone field's value.
-func (s *NetworkInterface) SetAvailabilityZone(v string) *NetworkInterface {
- s.AvailabilityZone = &v
- return s
-}
-
-// SetDescription sets the Description field's value.
-func (s *NetworkInterface) SetDescription(v string) *NetworkInterface {
- s.Description = &v
- return s
-}
-
-// SetGroups sets the Groups field's value.
-func (s *NetworkInterface) SetGroups(v []*GroupIdentifier) *NetworkInterface {
- s.Groups = v
- return s
-}
-
-// SetInterfaceType sets the InterfaceType field's value.
-func (s *NetworkInterface) SetInterfaceType(v string) *NetworkInterface {
- s.InterfaceType = &v
- return s
-}
-
-// SetIpv6Addresses sets the Ipv6Addresses field's value.
-func (s *NetworkInterface) SetIpv6Addresses(v []*NetworkInterfaceIpv6Address) *NetworkInterface {
- s.Ipv6Addresses = v
- return s
-}
-
-// SetMacAddress sets the MacAddress field's value.
-func (s *NetworkInterface) SetMacAddress(v string) *NetworkInterface {
- s.MacAddress = &v
- return s
-}
-
-// SetNetworkInterfaceId sets the NetworkInterfaceId field's value.
-func (s *NetworkInterface) SetNetworkInterfaceId(v string) *NetworkInterface {
- s.NetworkInterfaceId = &v
- return s
-}
-
-// SetOwnerId sets the OwnerId field's value.
-func (s *NetworkInterface) SetOwnerId(v string) *NetworkInterface {
- s.OwnerId = &v
- return s
-}
-
-// SetPrivateDnsName sets the PrivateDnsName field's value.
-func (s *NetworkInterface) SetPrivateDnsName(v string) *NetworkInterface {
- s.PrivateDnsName = &v
- return s
-}
-
-// SetPrivateIpAddress sets the PrivateIpAddress field's value.
-func (s *NetworkInterface) SetPrivateIpAddress(v string) *NetworkInterface {
- s.PrivateIpAddress = &v
- return s
-}
-
-// SetPrivateIpAddresses sets the PrivateIpAddresses field's value.
-func (s *NetworkInterface) SetPrivateIpAddresses(v []*NetworkInterfacePrivateIpAddress) *NetworkInterface {
- s.PrivateIpAddresses = v
- return s
-}
-
-// SetRequesterId sets the RequesterId field's value.
-func (s *NetworkInterface) SetRequesterId(v string) *NetworkInterface {
- s.RequesterId = &v
- return s
-}
-
-// SetRequesterManaged sets the RequesterManaged field's value.
-func (s *NetworkInterface) SetRequesterManaged(v bool) *NetworkInterface {
- s.RequesterManaged = &v
- return s
-}
-
-// SetSourceDestCheck sets the SourceDestCheck field's value.
-func (s *NetworkInterface) SetSourceDestCheck(v bool) *NetworkInterface {
- s.SourceDestCheck = &v
- return s
-}
-
-// SetStatus sets the Status field's value.
-func (s *NetworkInterface) SetStatus(v string) *NetworkInterface {
- s.Status = &v
- return s
-}
-
-// SetSubnetId sets the SubnetId field's value.
-func (s *NetworkInterface) SetSubnetId(v string) *NetworkInterface {
- s.SubnetId = &v
- return s
-}
-
-// SetTagSet sets the TagSet field's value.
-func (s *NetworkInterface) SetTagSet(v []*Tag) *NetworkInterface {
- s.TagSet = v
- return s
-}
-
-// SetVpcId sets the VpcId field's value.
-func (s *NetworkInterface) SetVpcId(v string) *NetworkInterface {
- s.VpcId = &v
- return s
-}
-
-// Describes association information for an Elastic IP address (IPv4 only).
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/NetworkInterfaceAssociation
-type NetworkInterfaceAssociation struct {
- _ struct{} `type:"structure"`
-
- // The allocation ID.
- AllocationId *string `locationName:"allocationId" type:"string"`
-
- // The association ID.
- AssociationId *string `locationName:"associationId" type:"string"`
-
- // The ID of the Elastic IP address owner.
- IpOwnerId *string `locationName:"ipOwnerId" type:"string"`
-
- // The public DNS name.
- PublicDnsName *string `locationName:"publicDnsName" type:"string"`
-
- // The address of the Elastic IP address bound to the network interface.
- PublicIp *string `locationName:"publicIp" type:"string"`
-}
-
-// String returns the string representation
-func (s NetworkInterfaceAssociation) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s NetworkInterfaceAssociation) GoString() string {
- return s.String()
-}
-
-// SetAllocationId sets the AllocationId field's value.
-func (s *NetworkInterfaceAssociation) SetAllocationId(v string) *NetworkInterfaceAssociation {
- s.AllocationId = &v
- return s
-}
-
-// SetAssociationId sets the AssociationId field's value.
-func (s *NetworkInterfaceAssociation) SetAssociationId(v string) *NetworkInterfaceAssociation {
- s.AssociationId = &v
- return s
-}
-
-// SetIpOwnerId sets the IpOwnerId field's value.
-func (s *NetworkInterfaceAssociation) SetIpOwnerId(v string) *NetworkInterfaceAssociation {
- s.IpOwnerId = &v
- return s
-}
-
-// SetPublicDnsName sets the PublicDnsName field's value.
-func (s *NetworkInterfaceAssociation) SetPublicDnsName(v string) *NetworkInterfaceAssociation {
- s.PublicDnsName = &v
- return s
-}
-
-// SetPublicIp sets the PublicIp field's value.
-func (s *NetworkInterfaceAssociation) SetPublicIp(v string) *NetworkInterfaceAssociation {
- s.PublicIp = &v
- return s
-}
-
-// Describes a network interface attachment.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/NetworkInterfaceAttachment
-type NetworkInterfaceAttachment struct {
- _ struct{} `type:"structure"`
-
- // The timestamp indicating when the attachment initiated.
- AttachTime *time.Time `locationName:"attachTime" type:"timestamp" timestampFormat:"iso8601"`
-
- // The ID of the network interface attachment.
- AttachmentId *string `locationName:"attachmentId" type:"string"`
-
- // Indicates whether the network interface is deleted when the instance is terminated.
- DeleteOnTermination *bool `locationName:"deleteOnTermination" type:"boolean"`
-
- // The device index of the network interface attachment on the instance.
- DeviceIndex *int64 `locationName:"deviceIndex" type:"integer"`
-
- // The ID of the instance.
- InstanceId *string `locationName:"instanceId" type:"string"`
-
- // The AWS account ID of the owner of the instance.
- InstanceOwnerId *string `locationName:"instanceOwnerId" type:"string"`
-
- // The attachment state.
- Status *string `locationName:"status" type:"string" enum:"AttachmentStatus"`
-}
-
-// String returns the string representation
-func (s NetworkInterfaceAttachment) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s NetworkInterfaceAttachment) GoString() string {
- return s.String()
-}
-
-// SetAttachTime sets the AttachTime field's value.
-func (s *NetworkInterfaceAttachment) SetAttachTime(v time.Time) *NetworkInterfaceAttachment {
- s.AttachTime = &v
- return s
-}
-
-// SetAttachmentId sets the AttachmentId field's value.
-func (s *NetworkInterfaceAttachment) SetAttachmentId(v string) *NetworkInterfaceAttachment {
- s.AttachmentId = &v
- return s
-}
-
-// SetDeleteOnTermination sets the DeleteOnTermination field's value.
-func (s *NetworkInterfaceAttachment) SetDeleteOnTermination(v bool) *NetworkInterfaceAttachment {
- s.DeleteOnTermination = &v
- return s
-}
-
-// SetDeviceIndex sets the DeviceIndex field's value.
-func (s *NetworkInterfaceAttachment) SetDeviceIndex(v int64) *NetworkInterfaceAttachment {
- s.DeviceIndex = &v
- return s
-}
-
-// SetInstanceId sets the InstanceId field's value.
-func (s *NetworkInterfaceAttachment) SetInstanceId(v string) *NetworkInterfaceAttachment {
- s.InstanceId = &v
- return s
-}
-
-// SetInstanceOwnerId sets the InstanceOwnerId field's value.
-func (s *NetworkInterfaceAttachment) SetInstanceOwnerId(v string) *NetworkInterfaceAttachment {
- s.InstanceOwnerId = &v
- return s
-}
-
-// SetStatus sets the Status field's value.
-func (s *NetworkInterfaceAttachment) SetStatus(v string) *NetworkInterfaceAttachment {
- s.Status = &v
- return s
-}
-
-// Describes an IPv6 address associated with a network interface.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/NetworkInterfaceIpv6Address
-type NetworkInterfaceIpv6Address struct {
- _ struct{} `type:"structure"`
-
- // The IPv6 address.
- Ipv6Address *string `locationName:"ipv6Address" type:"string"`
-}
-
-// String returns the string representation
-func (s NetworkInterfaceIpv6Address) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s NetworkInterfaceIpv6Address) GoString() string {
- return s.String()
-}
-
-// SetIpv6Address sets the Ipv6Address field's value.
-func (s *NetworkInterfaceIpv6Address) SetIpv6Address(v string) *NetworkInterfaceIpv6Address {
- s.Ipv6Address = &v
- return s
-}
-
-// Describes the private IPv4 address of a network interface.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/NetworkInterfacePrivateIpAddress
-type NetworkInterfacePrivateIpAddress struct {
- _ struct{} `type:"structure"`
-
- // The association information for an Elastic IP address (IPv4) associated with
- // the network interface.
- Association *NetworkInterfaceAssociation `locationName:"association" type:"structure"`
-
- // Indicates whether this IPv4 address is the primary private IPv4 address of
- // the network interface.
- Primary *bool `locationName:"primary" type:"boolean"`
-
- // The private DNS name.
- PrivateDnsName *string `locationName:"privateDnsName" type:"string"`
-
- // The private IPv4 address.
- PrivateIpAddress *string `locationName:"privateIpAddress" type:"string"`
-}
-
-// String returns the string representation
-func (s NetworkInterfacePrivateIpAddress) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s NetworkInterfacePrivateIpAddress) GoString() string {
- return s.String()
-}
-
-// SetAssociation sets the Association field's value.
-func (s *NetworkInterfacePrivateIpAddress) SetAssociation(v *NetworkInterfaceAssociation) *NetworkInterfacePrivateIpAddress {
- s.Association = v
- return s
-}
-
-// SetPrimary sets the Primary field's value.
-func (s *NetworkInterfacePrivateIpAddress) SetPrimary(v bool) *NetworkInterfacePrivateIpAddress {
- s.Primary = &v
- return s
-}
-
-// SetPrivateDnsName sets the PrivateDnsName field's value.
-func (s *NetworkInterfacePrivateIpAddress) SetPrivateDnsName(v string) *NetworkInterfacePrivateIpAddress {
- s.PrivateDnsName = &v
- return s
-}
-
-// SetPrivateIpAddress sets the PrivateIpAddress field's value.
-func (s *NetworkInterfacePrivateIpAddress) SetPrivateIpAddress(v string) *NetworkInterfacePrivateIpAddress {
- s.PrivateIpAddress = &v
- return s
-}
-
-// Describes an IPv6 address.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/InstanceIpv6Address
-type InstanceIpv6Address struct {
- _ struct{} `type:"structure"`
-
- // The IPv6 address.
- Ipv6Address *string `locationName:"ipv6Address" type:"string"`
-}
-
-// String returns the string representation
-func (s InstanceIpv6Address) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s InstanceIpv6Address) GoString() string {
- return s.String()
-}
-
-// SetIpv6Address sets the Ipv6Address field's value.
-func (s *InstanceIpv6Address) SetIpv6Address(v string) *InstanceIpv6Address {
- s.Ipv6Address = &v
- return s
-}
-
-// Describes an attachment change.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/NetworkInterfaceAttachmentChanges
-type NetworkInterfaceAttachmentChanges struct {
- _ struct{} `type:"structure"`
-
- // The ID of the network interface attachment.
- AttachmentId *string `locationName:"attachmentId" type:"string"`
-
- // Indicates whether the network interface is deleted when the instance is terminated.
- DeleteOnTermination *bool `locationName:"deleteOnTermination" type:"boolean"`
-}
-
-// String returns the string representation
-func (s NetworkInterfaceAttachmentChanges) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s NetworkInterfaceAttachmentChanges) GoString() string {
- return s.String()
-}
-
-// SetAttachmentId sets the AttachmentId field's value.
-func (s *NetworkInterfaceAttachmentChanges) SetAttachmentId(v string) *NetworkInterfaceAttachmentChanges {
- s.AttachmentId = &v
- return s
-}
-
-// SetDeleteOnTermination sets the DeleteOnTermination field's value.
-func (s *NetworkInterfaceAttachmentChanges) SetDeleteOnTermination(v bool) *NetworkInterfaceAttachmentChanges {
- s.DeleteOnTermination = &v
- return s
-}
-
-// String returns the string representation
-func (s PrivateIpAddressSpecification) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s PrivateIpAddressSpecification) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *PrivateIpAddressSpecification) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "PrivateIpAddressSpecification"}
- if s.PrivateIpAddress == nil {
- invalidParams.Add(request.NewErrParamRequired("PrivateIpAddress"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetPrimary sets the Primary field's value.
-func (s *PrivateIpAddressSpecification) SetPrimary(v bool) *PrivateIpAddressSpecification {
- s.Primary = &v
- return s
-}
-
-// SetPrivateIpAddress sets the PrivateIpAddress field's value.
-func (s *PrivateIpAddressSpecification) SetPrivateIpAddress(v string) *PrivateIpAddressSpecification {
- s.PrivateIpAddress = &v
- return s
-}
-
-// Contains the parameters for DetachNetworkInterface.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DetachNetworkInterfaceRequest
-type DetachNetworkInterfaceInput struct {
- _ struct{} `type:"structure"`
-
- // The ID of the attachment.
- //
- // AttachmentId is a required field
- AttachmentId *string `locationName:"attachmentId" type:"string" required:"true"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // Specifies whether to force a detachment.
- Force *bool `locationName:"force" type:"boolean"`
-}
-
-// String returns the string representation
-func (s DetachNetworkInterfaceInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DetachNetworkInterfaceInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *DetachNetworkInterfaceInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "DetachNetworkInterfaceInput"}
- if s.AttachmentId == nil {
- invalidParams.Add(request.NewErrParamRequired("AttachmentId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetAttachmentId sets the AttachmentId field's value.
-func (s *DetachNetworkInterfaceInput) SetAttachmentId(v string) *DetachNetworkInterfaceInput {
- s.AttachmentId = &v
- return s
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *DetachNetworkInterfaceInput) SetDryRun(v bool) *DetachNetworkInterfaceInput {
- s.DryRun = &v
- return s
-}
-
-// SetForce sets the Force field's value.
-func (s *DetachNetworkInterfaceInput) SetForce(v bool) *DetachNetworkInterfaceInput {
- s.Force = &v
- return s
-}
-
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DetachNetworkInterfaceOutput
-type DetachNetworkInterfaceOutput struct {
- _ struct{} `type:"structure"`
-}
-
-// String returns the string representation
-func (s DetachNetworkInterfaceOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s DetachNetworkInterfaceOutput) GoString() string {
- return s.String()
-}
-
-// Contains the parameters for AttachNetworkInterface.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/AttachNetworkInterfaceRequest
-type AttachNetworkInterfaceInput struct {
- _ struct{} `type:"structure"`
-
- // The index of the device for the network interface attachment.
- //
- // DeviceIndex is a required field
- DeviceIndex *int64 `locationName:"deviceIndex" type:"integer" required:"true"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The ID of the instance.
- //
- // InstanceId is a required field
- InstanceId *string `locationName:"instanceId" type:"string" required:"true"`
-
- // The ID of the network interface.
- //
- // NetworkInterfaceId is a required field
- NetworkInterfaceId *string `locationName:"networkInterfaceId" type:"string" required:"true"`
-}
-
-// String returns the string representation
-func (s AttachNetworkInterfaceInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s AttachNetworkInterfaceInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *AttachNetworkInterfaceInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "AttachNetworkInterfaceInput"}
- if s.DeviceIndex == nil {
- invalidParams.Add(request.NewErrParamRequired("DeviceIndex"))
- }
- if s.InstanceId == nil {
- invalidParams.Add(request.NewErrParamRequired("InstanceId"))
- }
- if s.NetworkInterfaceId == nil {
- invalidParams.Add(request.NewErrParamRequired("NetworkInterfaceId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetDeviceIndex sets the DeviceIndex field's value.
-func (s *AttachNetworkInterfaceInput) SetDeviceIndex(v int64) *AttachNetworkInterfaceInput {
- s.DeviceIndex = &v
- return s
-}
-
-// SetDryRun sets the DryRun field's value.
-func (s *AttachNetworkInterfaceInput) SetDryRun(v bool) *AttachNetworkInterfaceInput {
- s.DryRun = &v
- return s
-}
-
-// SetInstanceId sets the InstanceId field's value.
-func (s *AttachNetworkInterfaceInput) SetInstanceId(v string) *AttachNetworkInterfaceInput {
- s.InstanceId = &v
- return s
-}
-
-// SetNetworkInterfaceId sets the NetworkInterfaceId field's value.
-func (s *AttachNetworkInterfaceInput) SetNetworkInterfaceId(v string) *AttachNetworkInterfaceInput {
- s.NetworkInterfaceId = &v
- return s
-}
-
-// Contains the output of AttachNetworkInterface.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/AttachNetworkInterfaceResult
-type AttachNetworkInterfaceOutput struct {
- _ struct{} `type:"structure"`
-
- // The ID of the network interface attachment.
- AttachmentId *string `locationName:"attachmentId" type:"string"`
-}
-
-// String returns the string representation
-func (s AttachNetworkInterfaceOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s AttachNetworkInterfaceOutput) GoString() string {
- return s.String()
-}
-
-// SetAttachmentId sets the AttachmentId field's value.
-func (s *AttachNetworkInterfaceOutput) SetAttachmentId(v string) *AttachNetworkInterfaceOutput {
- s.AttachmentId = &v
- return s
-}
-
-// Contains the parameters for AssignPrivateIpAddresses.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/AssignPrivateIpAddressesRequest
-type AssignPrivateIpAddressesInput struct {
- _ struct{} `type:"structure"`
-
- // Indicates whether to allow an IP address that is already assigned to another
- // network interface or instance to be reassigned to the specified network interface.
- AllowReassignment *bool `locationName:"allowReassignment" type:"boolean"`
-
- // The ID of the network interface.
- //
- // NetworkInterfaceId is a required field
- NetworkInterfaceId *string `locationName:"networkInterfaceId" type:"string" required:"true"`
-
- // One or more IP addresses to be assigned as a secondary private IP address
- // to the network interface. You can't specify this parameter when also specifying
- // a number of secondary IP addresses.
- //
- // If you don't specify an IP address, Amazon EC2 automatically selects an IP
- // address within the subnet range.
- PrivateIpAddresses []*string `locationName:"privateIpAddress" locationNameList:"PrivateIpAddress" type:"list"`
-
- // The number of secondary IP addresses to assign to the network interface.
- // You can't specify this parameter when also specifying private IP addresses.
- SecondaryPrivateIpAddressCount *int64 `locationName:"secondaryPrivateIpAddressCount" type:"integer"`
-}
-
-// String returns the string representation
-func (s AssignPrivateIpAddressesInput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s AssignPrivateIpAddressesInput) GoString() string {
- return s.String()
-}
-
-// Validate inspects the fields of the type to determine if they are valid.
-func (s *AssignPrivateIpAddressesInput) Validate() error {
- invalidParams := request.ErrInvalidParams{Context: "AssignPrivateIpAddressesInput"}
- if s.NetworkInterfaceId == nil {
- invalidParams.Add(request.NewErrParamRequired("NetworkInterfaceId"))
- }
-
- if invalidParams.Len() > 0 {
- return invalidParams
- }
- return nil
-}
-
-// SetAllowReassignment sets the AllowReassignment field's value.
-func (s *AssignPrivateIpAddressesInput) SetAllowReassignment(v bool) *AssignPrivateIpAddressesInput {
- s.AllowReassignment = &v
- return s
-}
-
-// SetNetworkInterfaceId sets the NetworkInterfaceId field's value.
-func (s *AssignPrivateIpAddressesInput) SetNetworkInterfaceId(v string) *AssignPrivateIpAddressesInput {
- s.NetworkInterfaceId = &v
- return s
-}
-
-// SetPrivateIpAddresses sets the PrivateIpAddresses field's value.
-func (s *AssignPrivateIpAddressesInput) SetPrivateIpAddresses(v []*string) *AssignPrivateIpAddressesInput {
- s.PrivateIpAddresses = v
- return s
-}
-
-// SetSecondaryPrivateIpAddressCount sets the SecondaryPrivateIpAddressCount field's value.
-func (s *AssignPrivateIpAddressesInput) SetSecondaryPrivateIpAddressCount(v int64) *AssignPrivateIpAddressesInput {
- s.SecondaryPrivateIpAddressCount = &v
- return s
-}
-
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/AssignPrivateIpAddressesOutput
-type AssignPrivateIpAddressesOutput struct {
- _ struct{} `type:"structure"`
-}
-
-// String returns the string representation
-func (s AssignPrivateIpAddressesOutput) String() string {
- return awsutil.Prettify(s)
-}
-
-// GoString returns the string representation
-func (s AssignPrivateIpAddressesOutput) GoString() string {
- return s.String()
-}
-
-// Contains the parameters for UnassignPrivateIpAddresses.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/UnassignPrivateIpAddressesRequest
-type UnassignPrivateIpAddressesInput struct {
- _ struct{} `type:"structure"`
-
- // The ID of the network interface.
- //
- // NetworkInterfaceId is a required field
- NetworkInterfaceId *string `locationName:"networkInterfaceId" type:"string" required:"true"`
-
- // The secondary private IP addresses to unassign from the network interface.
- // You can specify this option multiple times to unassign more than one IP address.
- //
- // PrivateIpAddresses is a required field
- PrivateIpAddresses []*string `locationName:"privateIpAddress" locationNameList:"PrivateIpAddress" type:"list" required:"true"`
-}
-
-type PurchaseReservedInstancesOfferingInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The number of Reserved Instances to purchase.
- //
- // InstanceCount is a required field
- InstanceCount *int64 `type:"integer" required:"true"`
-
- // Specified for Reserved Instance Marketplace offerings to limit the total
- // order and ensure that the Reserved Instances are not purchased at unexpected
- // prices.
- LimitPrice *ReservedInstanceLimitPrice `locationName:"limitPrice" type:"structure"`
-
- // The ID of the Reserved Instance offering to purchase.
- //
- // ReservedInstancesOfferingId is a required field
- ReservedInstancesOfferingId *string `type:"string" required:"true"`
-}
-
-type PurchaseReservedInstancesOfferingOutput struct {
- _ struct{} `type:"structure"`
-
- // The IDs of the purchased Reserved Instances.
- ReservedInstancesId *string `locationName:"reservedInstancesId" type:"string"`
-}
-
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/ReservedInstanceLimitPrice
-type ReservedInstanceLimitPrice struct {
- _ struct{} `type:"structure"`
-
- // Used for Reserved Instance Marketplace offerings. Specifies the limit price
- // on the total order (instanceCount * price).
- Amount *float64 `locationName:"amount" type:"double"`
-
- // The currency in which the limitPrice amount is specified. At this time, the
- // only supported currency is USD.
- CurrencyCode *string `locationName:"currencyCode" type:"string" enum:"CurrencyCodeValues"`
-}
-
-type UnassignPrivateIpAddressesOutput struct {
- _ struct{} `type:"structure"`
-}
-
-// Contains the parameters for CreateVpcEndpoint.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/CreateVpcEndpointRequest
-type CreateVpcEndpointInput struct {
- _ struct{} `type:"structure"`
-
- // Unique, case-sensitive identifier you provide to ensure the idempotency of
- // the request. For more information, see How to Ensure Idempotency (http://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html).
- ClientToken *string `type:"string"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `type:"boolean"`
-
- // A policy to attach to the endpoint that controls access to the service. The
- // policy must be in valid JSON format. If this parameter is not specified,
- // we attach a default policy that allows full access to the service.
- PolicyDocument *string `type:"string"`
-
- // One or more route table IDs.
- RouteTableIds []*string `locationName:"RouteTableId" locationNameList:"item" type:"list"`
-
- // The AWS service name, in the form com.amazonaws.region.service. To get a
- // list of available services, use the DescribeVpcEndpointServices request.
- //
- // ServiceName is a required field
- ServiceName *string `type:"string" required:"true"`
-
- // The ID of the VPC in which the endpoint will be used.
- //
- // VpcId is a required field
- VpcId *string `type:"string" required:"true"`
-}
-
-// Contains the output of CreateVpcEndpoint.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/CreateVpcEndpointResult
-type CreateVpcEndpointOutput struct {
- _ struct{} `type:"structure"`
-
- // Unique, case-sensitive identifier you provide to ensure the idempotency of
- // the request.
- ClientToken *string `locationName:"clientToken" type:"string"`
-
- // Information about the endpoint.
- VpcEndpoint *VpcEndpoint `locationName:"vpcEndpoint" type:"structure"`
-}
-
-// Describes a VPC endpoint.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/VpcEndpoint
-type VpcEndpoint struct {
- _ struct{} `type:"structure"`
-
- // The date and time the VPC endpoint was created.
- CreationTimestamp *time.Time `locationName:"creationTimestamp" type:"timestamp" timestampFormat:"iso8601"`
-
- // The policy document associated with the endpoint.
- PolicyDocument *string `locationName:"policyDocument" type:"string"`
-
- // One or more route tables associated with the endpoint.
- RouteTableIds []*string `locationName:"routeTableIdSet" locationNameList:"item" type:"list"`
-
- // The name of the AWS service to which the endpoint is associated.
- ServiceName *string `locationName:"serviceName" type:"string"`
-
- // The state of the VPC endpoint.
- State *string `locationName:"state" type:"string" enum:"State"`
-
- // The ID of the VPC endpoint.
- VpcEndpointId *string `locationName:"vpcEndpointId" type:"string"`
-
- // The ID of the VPC to which the endpoint is associated.
- VpcId *string `locationName:"vpcId" type:"string"`
-}
-
-// Contains the parameters for DescribeVpcEndpoints.
-type DescribeVpcEndpointsInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `type:"boolean"`
-
- // One or more filters.
- //
- // * service-name: The name of the service.
- //
- // * vpc-id: The ID of the VPC in which the endpoint resides.
- //
- // * vpc-endpoint-id: The ID of the endpoint.
- //
- // * vpc-endpoint-state: The state of the endpoint. (pending | available
- // | deleting | deleted)
- Filters []*Filter `locationName:"Filter" locationNameList:"Filter" type:"list"`
-
- // The maximum number of items to return for this request. The request returns
- // a token that you can specify in a subsequent call to get the next set of
- // results.
- //
- // Constraint: If the value is greater than 1000, we return only 1000 items.
- MaxResults *int64 `type:"integer"`
-
- // The token for the next set of items to return. (You received this token from
- // a prior call.)
- NextToken *string `type:"string"`
-
- // One or more endpoint IDs.
- VpcEndpointIds []*string `locationName:"VpcEndpointId" locationNameList:"item" type:"list"`
-}
-
-// Contains the output of DescribeVpcEndpoints.
-type DescribeVpcEndpointsOutput struct {
- _ struct{} `type:"structure"`
- NextToken *string `locationName:"nextToken" type:"string"`
- VpcEndpoints []*VpcEndpoint `locationName:"vpcEndpointSet" locationNameList:"item" type:"list"` // Information about the endpoints.
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-type ModifyVpcEndpointInput struct {
- _ struct{} `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `type:"boolean"`
- PolicyDocument *string `type:"string"` // A policy document to attach to the endpoint. The policy must be in valid JSON format.
- RemoveRouteTableIds []*string `locationName:"RemoveRouteTableId" locationNameList:"item" type:"list"` // One or more route table IDs to disassociate from the endpoint.
- ResetPolicy *bool `type:"boolean"` // Specify true to reset the policy document to the default policy. The default policy allows access to the service.
- VpcEndpointId *string `type:"string" required:"true"` //The ID of the endpoint. VpcEndpointId is a required field
- AddRouteTableIds []*string `locationName:"AddRouteTableId" locationNameList:"item" type:"list"` // One or more route tables IDs to associate with the endpoint.
-}
-
-// Contains the output of ModifyVpcEndpoint.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/ModifyVpcEndpointResult
-type ModifyVpcEndpointOutput struct {
- _ struct{} `type:"structure"`
- Return *bool `locationName:"return" type:"boolean"` // Returns true if the request succeeds; otherwise, it returns an error.
-}
-
-type DeleteVpcEndpointsInput struct {
- _ struct{} `type:"structure"`
- VpcEndpointIds []*string `locationName:"VpcEndpointId" locationNameList:"item" type:"list" required:"true"` // One or more endpoint IDs. VpcEndpointIds is a required field
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `type:"boolean"`
-}
-
-// Contains the output of DeleteVpcEndpoints.
-// Please also see https://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/DeleteVpcEndpointsResult
-type DeleteVpcEndpointsOutput struct {
- _ struct{} `type:"structure"`
-}
-
-type ModifySnapshotAttributeInput struct {
- _ struct{} `type:"structure"`
-
- // The snapshot attribute to modify.
- //
- // Only volume creation permissions may be modified at the customer level.
- Attribute *string `type:"string" enum:"SnapshotAttributeName"`
-
- // A JSON representation of the snapshot attribute modification.
- CreateVolumePermission *CreateVolumePermissionModifications `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The group to modify for the snapshot.
- GroupNames []*string `locationName:"UserGroup" locationNameList:"GroupName" type:"list"`
-
- // The type of operation to perform to the attribute.
- OperationType *string `type:"string" enum:"OperationType"`
-
- // The ID of the snapshot.
- //
- // SnapshotId is a required field
- SnapshotId *string `type:"string" required:"true"`
-
- // The account ID to modify for the snapshot.
- UserIds []*string `locationName:"UserId" locationNameList:"UserId" type:"list"`
-
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-type CreateVolumePermissionModifications struct {
- _ struct{} `type:"structure"`
-
- // Adds a specific AWS account ID or group to a volume's list of create volume
- // permissions.
- Add []*CreateVolumePermission `locationNameList:"item" type:"list"`
-
- // Removes a specific AWS account ID or group from a volume's list of create
- // volume permissions.
- Remove []*CreateVolumePermission `locationNameList:"item" type:"list"`
-}
-
-type CreateVolumePermission struct {
- _ struct{} `type:"structure"`
-
- // The specific group that is to be added or removed from a volume's list of
- // create volume permissions.
- Group *string `locationName:"group" type:"string" enum:"PermissionGroup"`
-
- // The specific AWS account ID that is to be added or removed from a volume's
- // list of create volume permissions.
- UserId *string `locationName:"userId" type:"string"`
-}
-
-type ModifySnapshotAttributeOutput struct {
- _ struct{} `type:"structure"`
-}
-
-type DescribeSnapshotAttributeInput struct {
- _ struct{} `type:"structure"`
-
- // The snapshot attribute you would like to view.
- //
- // Attribute is a required field
- Attribute *string `type:"string" required:"true" enum:"SnapshotAttributeName"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // The ID of the EBS snapshot.
- //
- // SnapshotId is a required field
- SnapshotId *string `type:"string" required:"true"`
-}
-
-type DescribeSnapshotAttributeOutput struct {
- _ struct{} `type:"structure"`
-
- // A list of permissions for creating volumes from the snapshot.
- CreateVolumePermissions []*CreateVolumePermission `locationName:"createVolumePermission" locationNameList:"item" type:"list"`
-
- // A list of product codes.
- ProductCodes []*ProductCode `locationName:"productCodes" locationNameList:"item" type:"list"`
-
- // The ID of the EBS snapshot.
- SnapshotId *string `locationName:"snapshotId" type:"string"`
-
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-type ImportSnapshotInput struct {
- _ struct{} `type:"structure"`
-
- // The client-specific data.
- ClientData *ClientData `type:"structure"`
-
- // Token to enable idempotency for VM import requests.
- ClientToken *string `type:"string"`
-
- // The description string for the import snapshot task.
- Description *string `type:"string"`
-
- // Information about the disk container.
- DiskContainer *SnapshotDiskContainer `type:"structure"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `type:"boolean"`
-
- // The name of the role to use when not using the default role, 'vmimport'.
- RoleName *string `type:"string"`
-
- SnapshotLocation *string `type:"string"`
- SnapshotSize *string `type:"string"`
-}
-
-type CopySnapshotInput struct {
- _ struct{} `type:"structure"`
-
- // A description for the EBS snapshot.
- Description *string `type:"string"`
-
- // The destination region to use in the PresignedUrl parameter of a snapshot
- // copy operation. This parameter is only valid for specifying the destination
- // region in a PresignedUrl parameter, where it is required.
- //
- // CopySnapshot sends the snapshot copy to the regional endpoint that you send
- // the HTTP request to, such as ec2.us-east-1.amazonaws.com (in the AWS CLI,
- // this is specified with the --region parameter or the default region in your
- // AWS configuration file).
- DestinationRegion *string `locationName:"destinationRegion" type:"string"`
-
- // Checks whether you have the required permissions for the action, without
- // actually making the request, and provides an error response. If you have
- // the required permissions, the error response is DryRunOperation. Otherwise,
- // it is UnauthorizedOperation.
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // Specifies whether the destination snapshot should be encrypted. You can encrypt
- // a copy of an unencrypted snapshot using this flag, but you cannot use it
- // to create an unencrypted copy from an encrypted snapshot. Your default CMK
- // for EBS is used unless a non-default AWS Key Management Service (AWS KMS)
- // CMK is specified with KmsKeyId. For more information, see Amazon EBS Encryption
- // (http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSEncryption.html) in
- // the Amazon Elastic Compute Cloud User Guide.
- Encrypted *bool `locationName:"encrypted" type:"boolean"`
-
- // The full ARN of the AWS Key Management Service (AWS KMS) CMK to use when
- // creating the snapshot copy. This parameter is only required if you want to
- // use a non-default CMK; if this parameter is not specified, the default CMK
- // for EBS is used. The ARN contains the arn:aws:kms namespace, followed by
- // the region of the CMK, the AWS account ID of the CMK owner, the key namespace,
- // and then the CMK ID. For example, arn:aws:kms:us-east-1:012345678910:key/abcd1234-a123-456a-a12b-a123b4cd56ef.
- // The specified CMK must exist in the region that the snapshot is being copied
- // to. If a KmsKeyId is specified, the Encrypted flag must also be set.
- KmsKeyId *string `locationName:"kmsKeyId" type:"string"`
-
- // The pre-signed URL that facilitates copying an encrypted snapshot. This parameter
- // is only required when copying an encrypted snapshot with the Amazon EC2 Query
- // API; it is available as an optional parameter in all other cases. The PresignedUrl
- // should use the snapshot source endpoint, the CopySnapshot action, and include
- // the SourceRegion, SourceSnapshotId, and DestinationRegion parameters. The
- // PresignedUrl must be signed using AWS Signature Version 4. Because EBS snapshots
- // are stored in Amazon S3, the signing algorithm for this parameter uses the
- // same logic that is described in Authenticating Requests by Using Query Parameters
- // (AWS Signature Version 4) (http://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html)
- // in the Amazon Simple Storage Service API Reference. An invalid or improperly
- // signed PresignedUrl will cause the copy operation to fail asynchronously,
- // and the snapshot will move to an error state.
- PresignedUrl *string `locationName:"presignedUrl" type:"string"`
-
- // The ID of the region that contains the snapshot to be copied.
- //
- // SourceRegion is a required field
- SourceRegion *string `type:"string" required:"true"`
-
- // The ID of the EBS snapshot to copy.
- //
- // SourceSnapshotId is a required field
- SourceSnapshotId *string `type:"string" required:"true"`
-}
-
-type ImportSnapshotOutput struct {
- _ struct{} `type:"structure"`
-
- // A description of the import snapshot task.
- Description *string `locationName:"description" type:"string"`
-
- // The ID of the import snapshot task.
- ImportTaskId *string `locationName:"importTaskId" type:"string"`
-
- // Information about the import snapshot task.
- SnapshotTaskDetail *SnapshotTaskDetail `locationName:"snapshotTaskDetail" type:"structure"`
-
- Id *string `locationName:"id" type:"string"`
-}
-
-type ClientData struct {
- _ struct{} `type:"structure"`
-
- // A user-defined comment about the disk upload.
- Comment *string `type:"string"`
-
- // The time that the disk upload ends.
- UploadEnd *time.Time `type:"timestamp" timestampFormat:"iso8601"`
-
- // The size of the uploaded disk image, in GiB.
- UploadSize *float64 `type:"double"`
-
- // The time that the disk upload starts.
- UploadStart *time.Time `type:"timestamp" timestampFormat:"iso8601"`
- DryRun *bool `locationName:"dryRun" type:"boolean"`
-
- // Specifies whether the destination snapshot should be encrypted. You can encrypt
- // a copy of an unencrypted snapshot using this flag, but you cannot use it
- // to create an unencrypted copy from an encrypted snapshot. Your default CMK
- // for EBS is used unless a non-default AWS Key Management Service (AWS KMS)
- // CMK is specified with KmsKeyId. For more information, see Amazon EBS Encryption
- // (http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSEncryption.html) in
- // the Amazon Elastic Compute Cloud User Guide.
- Encrypted *bool `locationName:"encrypted" type:"boolean"`
-
- // The full ARN of the AWS Key Management Service (AWS KMS) CMK to use when
- // creating the snapshot copy. This parameter is only required if you want to
- // use a non-default CMK; if this parameter is not specified, the default CMK
- // for EBS is used. The ARN contains the arn:aws:kms namespace, followed by
- // the region of the CMK, the AWS account ID of the CMK owner, the key namespace,
- // and then the CMK ID. For example, arn:aws:kms:us-east-1:012345678910:key/abcd1234-a123-456a-a12b-a123b4cd56ef.
- // The specified CMK must exist in the region that the snapshot is being copied
- // to. If a KmsKeyId is specified, the Encrypted flag must also be set.
- KmsKeyId *string `locationName:"kmsKeyId" type:"string"`
-
- // The pre-signed URL that facilitates copying an encrypted snapshot. This parameter
- // is only required when copying an encrypted snapshot with the Amazon EC2 Query
- // API; it is available as an optional parameter in all other cases. The PresignedUrl
- // should use the snapshot source endpoint, the CopySnapshot action, and include
- // the SourceRegion, SourceSnapshotId, and DestinationRegion parameters. The
- // PresignedUrl must be signed using AWS Signature Version 4. Because EBS snapshots
- // are stored in Amazon S3, the signing algorithm for this parameter uses the
- // same logic that is described in Authenticating Requests by Using Query Parameters
- // (AWS Signature Version 4) (http://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html)
- // in the Amazon Simple Storage Service API Reference. An invalid or improperly
- // signed PresignedUrl will cause the copy operation to fail asynchronously,
- // and the snapshot will move to an error state.
- PresignedUrl *string `locationName:"presignedUrl" type:"string"`
-
- // The ID of the region that contains the snapshot to be copied.
- //
- // SourceRegion is a required field
- SourceRegion *string `type:"string" required:"true"`
-
- // The ID of the EBS snapshot to copy.
- //
- // SourceSnapshotId is a required field
- SourceSnapshotId *string `type:"string" required:"true"`
-}
-
-type CopySnapshotOutput struct {
- _ struct{} `type:"structure"`
-
- // The ID of the new snapshot.
- SnapshotId *string `locationName:"snapshotId" type:"string"`
-
- RequestId *string `locationName:"requestId" type:"string"`
-}
-
-//DescribeVpcEndpointServicesInput represents input of Describe Request.
-type DescribeVpcEndpointServicesInput struct {
- _ struct{} `type:"structure"`
- DryRun *bool `type:"boolean"`
- MaxResults *int64 `type:"integer"`
- NextToken *string `type:"string"`
-}
-
-type DescribeVpcEndpointServicesOutput struct {
- _ struct{} `type:"structure"`
- NextToken *string `locationName:"nextToken" type:"string"` // The token to use when requesting the next set of items. If there are no additional items to return, the string is empty.
- ServiceNames []*string `locationName:"serviceNameSet" locationNameList:"item" type:"list"` // A list of supported services.
- RequestID *string `locationName:"requestId" type:"string"`
-}
-
-type CreateImageInput struct {
- _ struct{} `type:"structure"`
- BlockDeviceMappings []*BlockDeviceMapping `locationName:"blockDeviceMapping" locationNameList:"BlockDeviceMapping" type:"list"`
- Description *string `locationName:"description" type:"string"`
- DryRun *bool `locationName:"dryRun" type:"boolean"`
- InstanceId *string `locationName:"instanceId" type:"string" required:"true"`
- Name *string `locationName:"name" type:"string" required:"true"`
- NoReboot *bool `locationName:"noReboot" type:"boolean"`
-}
-
-type CreateImageOutput struct {
- _ struct{} `type:"structure"`
- ImageId *string `locationName:"imageId" type:"string"`
-}
diff --git a/osc/fcu/fcu_test.go b/osc/fcu/fcu_test.go
deleted file mode 100644
index 47674f4ba..000000000
--- a/osc/fcu/fcu_test.go
+++ /dev/null
@@ -1,25 +0,0 @@
-package fcu
-
-import (
- "testing"
-
- "github.com/terraform-providers/terraform-provider-outscale/osc"
-)
-
-func TestNewFCUClient(t *testing.T) {
- config := osc.Config{
- Credentials: &osc.Credentials{
- AccessKey: "AKID",
- SecretKey: "SecretKey",
- Region: "region",
- },
- }
-
- c, err := NewFCUClient(config)
- if err != nil {
- t.Fatalf("Got error %s", err)
- }
- if c == nil {
- t.Fatalf("Bad Client")
- }
-}
diff --git a/osc/fcu/lin.go b/osc/fcu/lin.go
deleted file mode 100644
index 4e323f963..000000000
--- a/osc/fcu/lin.go
+++ /dev/null
@@ -1,452 +0,0 @@
-package fcu
-
-import (
- "context"
- "net/http"
-)
-
-// CreateLinInternetGateway method
-func (v VMOperations) CreateInternetGateway(input *CreateInternetGatewayInput) (*CreateInternetGatewayOutput, error) {
- inURL := "/"
- endpoint := "CreateInternetGateway"
- output := &CreateInternetGatewayOutput{}
-
- if input == nil {
- input = &CreateInternetGatewayInput{}
- }
-
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DescribeInternetGateways(input *DescribeInternetGatewaysInput) (*DescribeInternetGatewaysOutput, error) {
- inURL := "/"
- endpoint := "DescribeInternetGateways"
- output := &DescribeInternetGatewaysOutput{}
-
- if input == nil {
- input = &DescribeInternetGatewaysInput{}
- }
-
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DeleteInternetGateway(input *DeleteInternetGatewayInput) (*DeleteInternetGatewayOutput, error) {
- inURL := "/"
- endpoint := "DeleteInternetGateway"
- output := &DeleteInternetGatewayOutput{}
-
- if input == nil {
- input = &DeleteInternetGatewayInput{}
- }
-
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-func (v VMOperations) CreateVpc(input *CreateVpcInput) (*CreateVpcOutput, error) {
- inURL := "/"
- endpoint := "CreateVpc"
- output := &CreateVpcOutput{}
-
- if input == nil {
- input = &CreateVpcInput{}
- }
-
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DescribeVpcs(input *DescribeVpcsInput) (*DescribeVpcsOutput, error) {
- inURL := "/"
- endpoint := "DescribeVpcs"
- output := &DescribeVpcsOutput{}
-
- if input == nil {
- input = &DescribeVpcsInput{}
- }
-
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DeleteVpc(input *DeleteVpcInput) (*DeleteVpcOutput, error) {
- inURL := "/"
- endpoint := "DeleteVpc"
- output := &DeleteVpcOutput{}
-
- if input == nil {
- input = &DeleteVpcInput{}
- }
-
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) AttachInternetGateway(input *AttachInternetGatewayInput) (*AttachInternetGatewayOutput, error) {
- inURL := "/"
- endpoint := "AttachInternetGateway"
- output := &AttachInternetGatewayOutput{}
-
- if input == nil {
- input = &AttachInternetGatewayInput{}
- }
-
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DetachInternetGateway(input *DetachInternetGatewayInput) (*DetachInternetGatewayOutput, error) {
- inURL := "/"
- endpoint := "DetachInternetGateway"
- output := &DetachInternetGatewayOutput{}
-
- if input == nil {
- input = &DetachInternetGatewayInput{}
- }
-
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) ModifyVpcAttribute(input *ModifyVpcAttributeInput) (*ModifyVpcAttributeOutput, error) {
- inURL := "/"
- endpoint := "ModifyVpcAttribute"
- output := &ModifyVpcAttributeOutput{}
-
- if input == nil {
- input = &ModifyVpcAttributeInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DescribeVpcAttribute(input *DescribeVpcAttributeInput) (*DescribeVpcAttributeOutput, error) {
- inURL := "/"
- endpoint := "DescribeVpcAttribute"
- output := &DescribeVpcAttributeOutput{}
-
- if input == nil {
- input = &DescribeVpcAttributeInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) CreateVpnConnection(input *CreateVpnConnectionInput) (*CreateVpnConnectionOutput, error) {
- inURL := "/"
- endpoint := "CreateVpnConnection"
- output := &CreateVpnConnectionOutput{}
-
- if input == nil {
- input = &CreateVpnConnectionInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) CreateVpnGateway(input *CreateVpnGatewayInput) (*CreateVpnGatewayOutput, error) {
- inURL := "/"
- endpoint := "CreateVpnGateway"
- output := &CreateVpnGatewayOutput{}
-
- if input == nil {
- input = &CreateVpnGatewayInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DescribeVpnConnections(input *DescribeVpnConnectionsInput) (*DescribeVpnConnectionsOutput, error) {
- inURL := "/"
- endpoint := "DescribeVpnConnections"
- output := &DescribeVpnConnectionsOutput{}
-
- if input == nil {
- input = &DescribeVpnConnectionsInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-func (v VMOperations) DescribeVpnGateways(input *DescribeVpnGatewaysInput) (*DescribeVpnGatewaysOutput, error) {
- inURL := "/"
- endpoint := "DescribeVpnGateways"
- output := &DescribeVpnGatewaysOutput{}
-
- if input == nil {
- input = &DescribeVpnGatewaysInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DeleteVpnConnection(input *DeleteVpnConnectionInput) (*DeleteVpnConnectionOutput, error) {
- inURL := "/"
- endpoint := "DeleteVpnConnection"
- output := &DeleteVpnConnectionOutput{}
-
- if input == nil {
- input = &DeleteVpnConnectionInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-func (v VMOperations) DeleteVpnGateway(input *DeleteVpnGatewayInput) (*DeleteVpnGatewayOutput, error) {
- inURL := "/"
- endpoint := "DeleteVpnGateway"
- output := &DeleteVpnGatewayOutput{}
-
- if input == nil {
- input = &DeleteVpnGatewayInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) AttachVpnGateway(input *AttachVpnGatewayInput) (*AttachVpnGatewayOutput, error) {
- inURL := "/"
- endpoint := "AttachVpnGateway"
- output := &AttachVpnGatewayOutput{}
-
- if input == nil {
- input = &AttachVpnGatewayInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DetachVpnGateway(input *DetachVpnGatewayInput) (*DetachVpnGatewayOutput, error) {
- inURL := "/"
- endpoint := "DetachVpnGateway"
- output := &DetachVpnGatewayOutput{}
-
- if input == nil {
- input = &DetachVpnGatewayInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) CreateVpnConnectionRoute(input *CreateVpnConnectionRouteInput) (*CreateVpnConnectionRouteOutput, error) {
- inURL := "/"
- endpoint := "CreateVpnConnectionRoute"
- output := &CreateVpnConnectionRouteOutput{}
-
- if input == nil {
- input = &CreateVpnConnectionRouteInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DeleteVpnConnectionRoute(input *DeleteVpnConnectionRouteInput) (*DeleteVpnConnectionRouteOutput, error) {
- inURL := "/"
- endpoint := "DeleteVpnConnectionRoute"
- output := &DeleteVpnConnectionRouteOutput{}
-
- if input == nil {
- input = &DeleteVpnConnectionRouteInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
diff --git a/osc/fcu/lin_test.go b/osc/fcu/lin_test.go
deleted file mode 100644
index 6ae1f78e5..000000000
--- a/osc/fcu/lin_test.go
+++ /dev/null
@@ -1,188 +0,0 @@
-package fcu
-
-import (
- "fmt"
- "net/http"
- "testing"
-
- "github.com/aws/aws-sdk-go/aws"
-)
-
-func TestVM_CreateInternetGateaway(t *testing.T) {
- setup()
- defer teardown()
-
- input := CreateInternetGatewayInput{}
-
- mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
-
- fmt.Fprintf(w, `
-6bc7d085-32c3-4b87-b07b-04ccc5c25f19igw-349c9be7`)
- })
-
- desc, err := client.VM.CreateInternetGateway(&input)
- if err != nil {
- t.Errorf("VM.CreateInternetGateway returned error: %v", err)
- }
-
- expectedID := "igw-349c9be7"
- outputInstanceID := *desc.InternetGateway.InternetGatewayId
-
- if outputInstanceID != expectedID {
- t.Fatalf("Expected InternetGatewayID:(%s), Got(%s)", outputInstanceID, expectedID)
- }
-
-}
-
-func TestVM_DescribeInternetGateaways(t *testing.T) {
- setup()
- defer teardown()
-
- expectedID := "igw-251475c9"
-
- input := DescribeInternetGatewaysInput{
- InternetGatewayIds: []*string{&expectedID},
- }
-
- mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
-
- fmt.Fprintf(w, `
-89135ead-4cab-4e11-b842-7b70d0d5f583- igw-251475c9
- vpc-e9d09d63available
- NameDefault
`)
- })
-
- desc, err := client.VM.DescribeInternetGateways(&input)
- if err != nil {
- t.Errorf("VM.DescribeInternetGateways returned error: %v", err)
- }
-
- outputInstanceID := *desc.InternetGateways[0].InternetGatewayId
-
- if outputInstanceID != expectedID {
- t.Fatalf("Expected InstanceID:(%s), Got(%s)", outputInstanceID, expectedID)
- }
-
-}
-
-func TestVM_DeleteInternetGateway(t *testing.T) {
- setup()
- defer teardown()
-
- expectedID := "igw-349c9be7"
-
- input := DeleteInternetGatewayInput{
- InternetGatewayId: &expectedID,
- }
-
- mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
-
- fmt.Fprintf(w, ``)
- })
-
- _, err := client.VM.DeleteInternetGateway(&input)
- if err != nil {
- t.Errorf("VM.DeleteInternetGateway returned error: %v", err)
- }
-
-}
-
-func TestVM_CreateVpc(t *testing.T) {
- setup()
- defer teardown()
-
- expectedID := "vpc-53769ad9"
-
- input := CreateVpcInput{
- CidrBlock: aws.String("10.0.0.0/16"),
- }
-
- mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
-
- fmt.Fprintf(w, `
-52faf8ea-65ea-46cb-896c-df33cd06e8fcvpc-53769ad9available10.0.0.0/16dopt-1ea5389edefaultfalse`)
- })
-
- desc, err := client.VM.CreateVpc(&input)
- if err != nil {
- t.Errorf("VM.CreateVpc returned error: %v", err)
- }
-
- outputVpcID := *desc.Vpc.VpcId
-
- if outputVpcID != expectedID {
- t.Fatalf("Expected VpcId:(%s), Got(%s)", outputVpcID, expectedID)
- }
- expectedState := "available"
- state := *desc.Vpc.State
- if expectedState != state {
- t.Fatalf("Expected state:(%s), Got(%s)", state, expectedState)
- }
-
- expectedCIDR := "10.0.0.0/16"
- cidr := *desc.Vpc.CidrBlock
- if expectedCIDR != expectedCIDR {
- t.Fatalf("Expected cidr:(%s), Got(%s)", cidr, expectedCIDR)
- }
-
-}
-
-func TestVM_DescribeVpcs(t *testing.T) {
- setup()
- defer teardown()
-
- expectedID := "vpc-53769ad9"
-
- input := DescribeVpcsInput{
- VpcIds: []*string{&expectedID},
- }
-
- mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
-
- fmt.Fprintf(w, `
-1ab37b1d-67fb-4edb-aeea-1cdb02e1c232- vpc-53769ad9available10.0.0.0/16dopt-1ea5389edefaultfalse
`)
- })
-
- desc, err := client.VM.DescribeVpcs(&input)
- if err != nil {
- t.Errorf("VM.DescribeVpcs returned error: %v", err)
- }
-
- outputVpcID := *desc.Vpcs[0].VpcId
-
- if outputVpcID != expectedID {
- t.Fatalf("Expected VPCID:(%s), Got(%s)", outputVpcID, expectedID)
- }
- expectedState := "available"
- state := *desc.Vpcs[0].State
- if expectedState != state {
- t.Fatalf("Expected state:(%s), Got(%s)", state, expectedState)
- }
-
- expectedCIDR := "10.0.0.0/16"
- cidr := *desc.Vpcs[0].CidrBlock
- if expectedCIDR != cidr {
- t.Fatalf("Expected cidr:(%s), Got(%s)", cidr, expectedCIDR)
- }
-
-}
-
-func TestVM_DeleteVpc(t *testing.T) {
- setup()
- defer teardown()
-
- expectedID := "vpc-53769ad9"
-
- input := DeleteVpcInput{
- VpcId: &expectedID,
- }
-
- mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
-
- fmt.Fprintf(w, ``)
- })
-
- _, err := client.VM.DeleteVpc(&input)
- if err != nil {
- t.Errorf("VM.DeleteVpc returned error: %v", err)
- }
-
-}
diff --git a/osc/fcu/route.go b/osc/fcu/route.go
deleted file mode 100644
index 9894ad8b5..000000000
--- a/osc/fcu/route.go
+++ /dev/null
@@ -1,248 +0,0 @@
-package fcu
-
-import (
- "context"
- "net/http"
-)
-
-func (v VMOperations) CreateRoute(input *CreateRouteInput) (*CreateRouteOutput, error) {
- inURL := "/"
- endpoint := "CreateRoute"
- output := &CreateRouteOutput{}
-
- if input == nil {
- input = &CreateRouteInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) ReplaceRoute(input *ReplaceRouteInput) (*ReplaceRouteOutput, error) {
- inURL := "/"
- endpoint := "ReplaceRoute"
- output := &ReplaceRouteOutput{}
-
- if input == nil {
- input = &ReplaceRouteInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DeleteRoute(input *DeleteRouteInput) (*DeleteRouteOutput, error) {
- inURL := "/"
- endpoint := "DeleteRoute"
- output := &DeleteRouteOutput{}
-
- if input == nil {
- input = &DeleteRouteInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DescribeRouteTables(input *DescribeRouteTablesInput) (*DescribeRouteTablesOutput, error) {
- inURL := "/"
- endpoint := "DescribeRouteTables"
- output := &DescribeRouteTablesOutput{}
-
- if input == nil {
- input = &DescribeRouteTablesInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) CreateRouteTable(input *CreateRouteTableInput) (*CreateRouteTableOutput, error) {
- inURL := "/"
- endpoint := "CreateRouteTable"
- output := &CreateRouteTableOutput{}
-
- if input == nil {
- input = &CreateRouteTableInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DisableVgwRoutePropagation(input *DisableVgwRoutePropagationInput) (*DisableVgwRoutePropagationOutput, error) {
- inURL := "/"
- endpoint := "DisableVgwRoutePropagation"
- output := &DisableVgwRoutePropagationOutput{}
-
- if input == nil {
- input = &DisableVgwRoutePropagationInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) EnableVgwRoutePropagation(input *EnableVgwRoutePropagationInput) (*EnableVgwRoutePropagationOutput, error) {
- inURL := "/"
- endpoint := "EnableVgwRoutePropagation"
- output := &EnableVgwRoutePropagationOutput{}
-
- if input == nil {
- input = &EnableVgwRoutePropagationInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DisassociateRouteTable(input *DisassociateRouteTableInput) (*DisassociateRouteTableOutput, error) {
- inURL := "/"
- endpoint := "DisassociateRouteTable"
- output := &DisassociateRouteTableOutput{}
-
- if input == nil {
- input = &DisassociateRouteTableInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DeleteRouteTable(input *DeleteRouteTableInput) (*DeleteRouteTableOutput, error) {
- inURL := "/"
- endpoint := "DeleteRouteTable"
- output := &DeleteRouteTableOutput{}
-
- if input == nil {
- input = &DeleteRouteTableInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) AssociateRouteTable(input *AssociateRouteTableInput) (*AssociateRouteTableOutput, error) {
- inURL := "/"
- endpoint := "AssociateRouteTable"
- output := &AssociateRouteTableOutput{}
-
- if input == nil {
- input = &AssociateRouteTableInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) ReplaceRouteTableAssociation(input *ReplaceRouteTableAssociationInput) (*ReplaceRouteTableAssociationOutput, error) {
- inURL := "/"
- endpoint := "ReplaceRouteTableAssociation"
- output := &ReplaceRouteTableAssociationOutput{}
-
- if input == nil {
- input = &ReplaceRouteTableAssociationInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
diff --git a/osc/fcu/vm.go b/osc/fcu/vm.go
deleted file mode 100644
index fe8fe154b..000000000
--- a/osc/fcu/vm.go
+++ /dev/null
@@ -1,2290 +0,0 @@
-package fcu
-
-import (
- "context"
- "net/http"
-
- "github.com/terraform-providers/terraform-provider-outscale/osc"
-)
-
-//VMOperations defines all the operations needed for FCU VMs
-type VMOperations struct {
- client *osc.Client
-}
-
-//VMService all the necessary actions for them VM service
-type VMService interface {
- RunInstance(input *RunInstancesInput) (*Reservation, error)
- DescribeInstances(input *DescribeInstancesInput) (*DescribeInstancesOutput, error)
- GetPasswordData(input *GetPasswordDataInput) (*GetPasswordDataOutput, error)
- ModifyInstanceKeyPair(input *ModifyInstanceKeyPairInput) error
- ModifyInstanceAttribute(input *ModifyInstanceAttributeInput) (*ModifyInstanceAttributeOutput, error)
- TerminateInstances(input *TerminateInstancesInput) (*TerminateInstancesOutput, error)
- AllocateAddress(input *AllocateAddressInput) (*AllocateAddressOutput, error)
- DescribeAddressesRequest(input *DescribeAddressesInput) (*DescribeAddressesOutput, error)
- StopInstances(input *StopInstancesInput) (*StopInstancesOutput, error)
- StartInstances(input *StartInstancesInput) (*StartInstancesOutput, error)
- ImportKeyPair(input *ImportKeyPairInput) (*ImportKeyPairOutput, error)
- DescribeKeyPairs(input *DescribeKeyPairsInput) (*DescribeKeyPairsOutput, error)
- DeleteKeyPairs(input *DeleteKeyPairInput) (*DeleteKeyPairOutput, error)
- CreateKeyPair(input *CreateKeyPairInput) (*CreateKeyPairOutput, error)
- AssociateAddress(input *AssociateAddressInput) (*AssociateAddressOutput, error)
- DisassociateAddress(input *DisassociateAddressInput) (*DisassociateAddressOutput, error)
- ReleaseAddress(input *ReleaseAddressInput) (*ReleaseAddressOutput, error)
- RegisterImage(input *RegisterImageInput) (*RegisterImageOutput, error)
- DescribeImages(input *DescribeImagesInput) (*DescribeImagesOutput, error)
- ModifyImageAttribute(input *ModifyImageAttributeInput) (*ModifyImageAttributeOutput, error)
- DeleteTags(input *DeleteTagsInput) (*DeleteTagsOutput, error)
- CreateTags(input *CreateTagsInput) (*CreateTagsOutput, error)
- DeregisterImage(input *DeregisterImageInput) (*DeregisterImageOutput, error)
- DescribeTags(input *DescribeTagsInput) (*DescribeTagsOutput, error)
- CreateSecurityGroup(input *CreateSecurityGroupInput) (*CreateSecurityGroupOutput, error)
- DescribeSecurityGroups(input *DescribeSecurityGroupsInput) (*DescribeSecurityGroupsOutput, error)
- RevokeSecurityGroupEgress(input *RevokeSecurityGroupEgressInput) (*RevokeSecurityGroupEgressOutput, error)
- RevokeSecurityGroupIngress(input *RevokeSecurityGroupIngressInput) (*RevokeSecurityGroupIngressOutput, error)
- AuthorizeSecurityGroupEgress(input *AuthorizeSecurityGroupEgressInput) (*AuthorizeSecurityGroupEgressOutput, error)
- AuthorizeSecurityGroupIngress(input *AuthorizeSecurityGroupIngressInput) (*AuthorizeSecurityGroupIngressOutput, error)
- DeleteSecurityGroup(input *DeleteSecurityGroupInput) (*DeleteSecurityGroupOutput, error)
- CreateVolume(input *CreateVolumeInput) (*Volume, error)
- DeleteVolume(input *DeleteVolumeInput) (*DeleteVolumeOutput, error)
- DescribeVolumes(input *DescribeVolumesInput) (*DescribeVolumesOutput, error)
- AttachVolume(input *AttachVolumeInput) (*VolumeAttachment, error)
- DetachVolume(input *DetachVolumeInput) (*VolumeAttachment, error)
- CreateSubNet(input *CreateSubnetInput) (*CreateSubnetOutput, error)
- DeleteSubNet(input *DeleteSubnetInput) (*DeleteSubnetOutput, error)
- DescribeSubNet(input *DescribeSubnetsInput) (*DescribeSubnetsOutput, error)
- DescribeInstanceAttribute(input *DescribeInstanceAttributeInput) (*DescribeInstanceAttributeOutput, error)
- DescribeInstanceStatus(input *DescribeInstanceStatusInput) (*DescribeInstanceStatusOutput, error)
- CreateInternetGateway(input *CreateInternetGatewayInput) (*CreateInternetGatewayOutput, error)
- DescribeInternetGateways(input *DescribeInternetGatewaysInput) (*DescribeInternetGatewaysOutput, error)
- DeleteInternetGateway(input *DeleteInternetGatewayInput) (*DeleteInternetGatewayOutput, error)
- CreateNatGateway(input *CreateNatGatewayInput) (*CreateNatGatewayOutput, error)
- DescribeNatGateways(input *DescribeNatGatewaysInput) (*DescribeNatGatewaysOutput, error)
- DeleteNatGateway(input *DeleteNatGatewayInput) (*DeleteNatGatewayOutput, error)
- CreateVpc(input *CreateVpcInput) (*CreateVpcOutput, error)
- DescribeVpcs(input *DescribeVpcsInput) (*DescribeVpcsOutput, error)
- DeleteVpc(input *DeleteVpcInput) (*DeleteVpcOutput, error)
- AttachInternetGateway(input *AttachInternetGatewayInput) (*AttachInternetGatewayOutput, error)
- DetachInternetGateway(input *DetachInternetGatewayInput) (*DetachInternetGatewayOutput, error)
- ModifyVpcAttribute(input *ModifyVpcAttributeInput) (*ModifyVpcAttributeOutput, error)
- DescribeVpcAttribute(input *DescribeVpcAttributeInput) (*DescribeVpcAttributeOutput, error)
- CreateAccessKey(input *CreateAccessKeyInput) (*CreateAccessKeyOutput, error)
- DescribeAccessKey(input *DescribeAccessKeyInput) (*DescribeAccessKeyOutput, error)
- DeleteAccessKey(input *DeleteAccessKeyInput) (*DeleteAccessKeyOutput, error)
- UpdateAccessKey(input *UpdateAccessKeyInput) (*UpdateAccessKeyOutput, error)
- DeleteDhcpOptions(input *DeleteDhcpOptionsInput) (*DeleteDhcpOptionsOutput, error)
- CreateDhcpOptions(input *CreateDhcpOptionsInput) (*CreateDhcpOptionsOutput, error)
- DescribeDhcpOptions(input *DescribeDhcpOptionsInput) (*DescribeDhcpOptionsOutput, error)
- AssociateDhcpOptions(input *AssociateDhcpOptionsInput) (*AssociateDhcpOptionsOutput, error)
- DescribeCustomerGateways(input *DescribeCustomerGatewaysInput) (*DescribeCustomerGatewaysOutput, error)
- DeleteCustomerGateway(input *DeleteCustomerGatewayInput) (*DeleteCustomerGatewayOutput, error)
- CreateCustomerGateway(input *CreateCustomerGatewayInput) (*CreateCustomerGatewayOutput, error)
- CreateRoute(input *CreateRouteInput) (*CreateRouteOutput, error)
- ReplaceRoute(input *ReplaceRouteInput) (*ReplaceRouteOutput, error)
- DeleteRoute(input *DeleteRouteInput) (*DeleteRouteOutput, error)
- DescribeRouteTables(input *DescribeRouteTablesInput) (*DescribeRouteTablesOutput, error)
- CreateRouteTable(input *CreateRouteTableInput) (*CreateRouteTableOutput, error)
- DisableVgwRoutePropagation(input *DisableVgwRoutePropagationInput) (*DisableVgwRoutePropagationOutput, error)
- EnableVgwRoutePropagation(input *EnableVgwRoutePropagationInput) (*EnableVgwRoutePropagationOutput, error)
- DisassociateRouteTable(input *DisassociateRouteTableInput) (*DisassociateRouteTableOutput, error)
- DeleteRouteTable(input *DeleteRouteTableInput) (*DeleteRouteTableOutput, error)
- AssociateRouteTable(input *AssociateRouteTableInput) (*AssociateRouteTableOutput, error)
- ReplaceRouteTableAssociation(input *ReplaceRouteTableAssociationInput) (*ReplaceRouteTableAssociationOutput, error)
- CopyImage(input *CopyImageInput) (*CopyImageOutput, error)
- DescribeSnapshots(input *DescribeSnapshotsInput) (*DescribeSnapshotsOutput, error)
- CreateVpnConnection(input *CreateVpnConnectionInput) (*CreateVpnConnectionOutput, error)
- DescribeVpnConnections(input *DescribeVpnConnectionsInput) (*DescribeVpnConnectionsOutput, error)
- DeleteVpnConnection(input *DeleteVpnConnectionInput) (*DeleteVpnConnectionOutput, error)
- CreateVpnGateway(input *CreateVpnGatewayInput) (*CreateVpnGatewayOutput, error)
- DescribeVpnGateways(input *DescribeVpnGatewaysInput) (*DescribeVpnGatewaysOutput, error)
- DeleteVpnGateway(input *DeleteVpnGatewayInput) (*DeleteVpnGatewayOutput, error)
- AttachVpnGateway(input *AttachVpnGatewayInput) (*AttachVpnGatewayOutput, error)
- DetachVpnGateway(input *DetachVpnGatewayInput) (*DetachVpnGatewayOutput, error)
- CreateImageExportTask(input *CreateImageExportTaskInput) (*CreateImageExportTaskOutput, error)
- DescribeImageExportTasks(input *DescribeImageExportTasksInput) (*DescribeImageExportTasksOutput, error)
- CreateVpnConnectionRoute(input *CreateVpnConnectionRouteInput) (*CreateVpnConnectionRouteOutput, error)
- DeleteVpnConnectionRoute(input *DeleteVpnConnectionRouteInput) (*DeleteVpnConnectionRouteOutput, error)
- DescribeAvailabilityZones(input *DescribeAvailabilityZonesInput) (*DescribeAvailabilityZonesOutput, error)
- DescribePrefixLists(input *DescribePrefixListsInput) (*DescribePrefixListsOutput, error)
- DescribeQuotas(input *DescribeQuotasInput) (*DescribeQuotasOutput, error)
- DescribeRegions(input *DescribeRegionsInput) (*DescribeRegionsOutput, error)
- CreateNetworkInterface(input *CreateNetworkInterfaceInput) (*CreateNetworkInterfaceOutput, error)
- DeleteNetworkInterface(input *DeleteNetworkInterfaceInput) (*DeleteNetworkInterfaceOutput, error)
- DescribeNetworkInterfaces(input *DescribeNetworkInterfacesInput) (*DescribeNetworkInterfacesOutput, error)
- ModifyNetworkInterfaceAttribute(input *ModifyNetworkInterfaceAttributeInput) (*ModifyNetworkInterfaceAttributeOutput, error)
- DescribeNetworkInterfaceAttribute(input *DescribeNetworkInterfaceAttributeInput) (*DescribeNetworkInterfaceAttributeOutput, error)
- DetachNetworkInterface(input *DetachNetworkInterfaceInput) (*DetachNetworkInterfaceOutput, error)
- AttachNetworkInterface(input *AttachNetworkInterfaceInput) (*AttachNetworkInterfaceOutput, error)
- AssignPrivateIpAddresses(input *AssignPrivateIpAddressesInput) (*AssignPrivateIpAddressesOutput, error)
- UnassignPrivateIpAddresses(input *UnassignPrivateIpAddressesInput) (*UnassignPrivateIpAddressesOutput, error)
- CreateSnapshotExportTask(input *CreateSnapshotExportTaskInput) (*CreateSnapshotExportTaskOutput, error)
- DescribeSnapshotExportTasks(input *DescribeSnapshotExportTasksInput) (*DescribeSnapshotExportTasksOutput, error)
- CreateSnapshot(input *CreateSnapshotInput) (*Snapshot, error)
- DeleteSnapshot(input *DeleteSnapshotInput) (*DeleteSnapshotOutput, error)
- DescribeProductTypes(input *DescribeProductTypesInput) (*DescribeProductTypesOutput, error)
- DescribeReservedInstances(input *DescribeReservedInstancesInput) (*DescribeReservedInstancesOutput, error)
- DescribeInstanceTypes(input *DescribeInstanceTypesInput) (*DescribeInstanceTypesOutput, error)
- DescribeReservedInstancesOfferings(input *DescribeReservedInstancesOfferingsInput) (*DescribeReservedInstancesOfferingsOutput, error)
- DescribeImageAttribute(input *DescribeImageAttributeInput) (*DescribeImageAttributeOutput, error)
- CreateVpcPeeringConnection(input *CreateVpcPeeringConnectionInput) (*CreateVpcPeeringConnectionOutput, error)
- DescribeVpcPeeringConnections(input *DescribeVpcPeeringConnectionsInput) (*DescribeVpcPeeringConnectionsOutput, error)
- AcceptVpcPeeringConnection(input *AcceptVpcPeeringConnectionInput) (*AcceptVpcPeeringConnectionOutput, error)
- ModifyVpcPeeringConnectionOptions(input *ModifyVpcPeeringConnectionOptionsInput) (*ModifyVpcPeeringConnectionOptionsOutput, error)
- DeleteVpcPeeringConnection(input *DeleteVpcPeeringConnectionInput) (*DeleteVpcPeeringConnectionOutput, error)
- PurchaseReservedInstancesOffering(input *PurchaseReservedInstancesOfferingInput) (*PurchaseReservedInstancesOfferingOutput, error)
- ModifySnapshotAttribute(input *ModifySnapshotAttributeInput) (*ModifySnapshotAttributeOutput, error)
- DescribeSnapshotAttribute(input *DescribeSnapshotAttributeInput) (*DescribeSnapshotAttributeOutput, error)
- CreateVpcEndpoint(input *CreateVpcEndpointInput) (*CreateVpcEndpointOutput, error)
- DescribeVpcEndpoints(input *DescribeVpcEndpointsInput) (*DescribeVpcEndpointsOutput, error)
- ModifyVpcEndpoint(input *ModifyVpcEndpointInput) (*ModifyVpcEndpointOutput, error)
- DeleteVpcEndpoints(input *DeleteVpcEndpointsInput) (*DeleteVpcEndpointsOutput, error)
- ImportSnapshot(input *ImportSnapshotInput) (*ImportSnapshotOutput, error)
- CopySnapshot(input *CopySnapshotInput) (*CopySnapshotOutput, error)
- DescribeVpcEndpointServices(input *DescribeVpcEndpointServicesInput) (*DescribeVpcEndpointServicesOutput, error)
- CreateImage(input *CreateImageInput) (*CreateImageOutput, error)
-}
-
-const opRunInstances = "RunInstances"
-
-//RunInstance ...
-func (v VMOperations) RunInstance(input *RunInstancesInput) (*Reservation, error) {
- req, err := v.client.NewRequest(context.Background(), opRunInstances, http.MethodGet, "/", input)
- if err != nil {
- return nil, err
- }
-
- output := Reservation{}
-
- err = v.client.Do(context.Background(), req, &output)
- if err != nil {
- return nil, err
- }
-
- return &output, nil
-}
-
-const opDescribeInstances = "DescribeInstances"
-
-// DescribeInstances method
-func (v VMOperations) DescribeInstances(input *DescribeInstancesInput) (*DescribeInstancesOutput, error) {
- inURL := "/"
- endpoint := "DescribeInstances"
- output := &DescribeInstancesOutput{}
-
- if input == nil {
- input = &DescribeInstancesInput{}
- }
-
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-// DescribeInstances method
-func (v VMOperations) ModifyInstanceKeyPair(input *ModifyInstanceKeyPairInput) error {
- inURL := "/"
- endpoint := "ModifyInstanceKeypair"
-
- if input == nil {
- input = &ModifyInstanceKeyPairInput{}
- }
-
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodPost, inURL, input)
-
- if err != nil {
- return err
- }
-
- err = v.client.Do(context.TODO(), req, nil)
- if err != nil {
- return err
- }
-
- return nil
-}
-
-func (v VMOperations) ModifyInstanceAttribute(input *ModifyInstanceAttributeInput) (*ModifyInstanceAttributeOutput, error) {
- inURL := "/"
- endpoint := "ModifyInstanceAttribute"
- output := &ModifyInstanceAttributeOutput{}
-
- if input == nil {
- input = &ModifyInstanceAttributeInput{}
- }
-
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) GetPasswordData(input *GetPasswordDataInput) (*GetPasswordDataOutput, error) {
- inURL := "/"
- endpoint := "GetPasswordData"
- output := &GetPasswordDataOutput{}
-
- if input == nil {
- input = &GetPasswordDataInput{}
- }
-
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-// DescribeInstances method
-func (v VMOperations) TerminateInstances(input *TerminateInstancesInput) (*TerminateInstancesOutput, error) {
- inURL := "/"
- endpoint := "TerminateInstances"
- output := &TerminateInstancesOutput{}
-
- if input == nil {
- input = &TerminateInstancesInput{}
- }
-
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) AllocateAddress(input *AllocateAddressInput) (*AllocateAddressOutput, error) {
- inURL := "/"
- endpoint := "AllocateAddress"
- output := &AllocateAddressOutput{}
-
- if input == nil {
- input = &AllocateAddressInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) StopInstances(input *StopInstancesInput) (*StopInstancesOutput, error) {
- inURL := "/"
- endpoint := "StopInstances"
- output := &StopInstancesOutput{}
-
- if input == nil {
- input = &StopInstancesInput{}
- }
-
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-//DescribeAddresses
-func (v VMOperations) DescribeAddressesRequest(input *DescribeAddressesInput) (*DescribeAddressesOutput, error) {
- inURL := "/"
- endpoint := "DescribeAddresses"
- output := &DescribeAddressesOutput{}
-
- if input == nil {
- input = &DescribeAddressesInput{}
- }
-
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) StartInstances(input *StartInstancesInput) (*StartInstancesOutput, error) {
- inURL := "/"
- endpoint := "StartInstances"
- output := &StartInstancesOutput{}
-
- if input == nil {
- input = &StartInstancesInput{}
- }
-
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) AssociateAddress(input *AssociateAddressInput) (*AssociateAddressOutput, error) {
- inURL := "/"
- endpoint := "AssociateAddress"
- output := &AssociateAddressOutput{}
-
- if input == nil {
- input = &AssociateAddressInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DisassociateAddress(input *DisassociateAddressInput) (*DisassociateAddressOutput, error) {
- inURL := "/"
- endpoint := "DisassociateAddress"
- output := &DisassociateAddressOutput{}
-
- if input == nil {
- input = &DisassociateAddressInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) ReleaseAddress(input *ReleaseAddressInput) (*ReleaseAddressOutput, error) {
- inURL := "/"
- endpoint := "ReleaseAddress"
- output := &ReleaseAddressOutput{}
-
- if input == nil {
- input = &ReleaseAddressInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) RegisterImage(input *RegisterImageInput) (*RegisterImageOutput, error) {
- inURL := "/"
- endpoint := "RegisterImage"
- output := &RegisterImageOutput{}
-
- if input == nil {
- input = &RegisterImageInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DescribeImages(input *DescribeImagesInput) (*DescribeImagesOutput, error) {
- inURL := "/"
- endpoint := "DescribeImages"
- output := &DescribeImagesOutput{}
-
- if input == nil {
- input = &DescribeImagesInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) ModifyImageAttribute(input *ModifyImageAttributeInput) (*ModifyImageAttributeOutput, error) {
- inURL := "/"
- endpoint := "ModifyImageAttribute"
- output := &ModifyImageAttributeOutput{}
-
- if input == nil {
- input = &ModifyImageAttributeInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DeleteTags(input *DeleteTagsInput) (*DeleteTagsOutput, error) {
- inURL := "/"
- endpoint := "DeleteTags"
- output := &DeleteTagsOutput{}
-
- if input == nil {
- input = &DeleteTagsInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) CreateTags(input *CreateTagsInput) (*CreateTagsOutput, error) {
- inURL := "/"
- endpoint := "CreateTags"
- output := &CreateTagsOutput{}
-
- if input == nil {
- input = &CreateTagsInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DeregisterImage(input *DeregisterImageInput) (*DeregisterImageOutput, error) {
- inURL := "/"
- endpoint := "DeregisterImage"
- output := &DeregisterImageOutput{}
-
- if input == nil {
- input = &DeregisterImageInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DescribeTags(input *DescribeTagsInput) (*DescribeTagsOutput, error) {
- inURL := "/"
- endpoint := "DescribeTags"
- output := &DescribeTagsOutput{}
-
- if input == nil {
- input = &DescribeTagsInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) CreateSecurityGroup(input *CreateSecurityGroupInput) (*CreateSecurityGroupOutput, error) {
- inURL := "/"
- endpoint := "CreateSecurityGroup"
- output := &CreateSecurityGroupOutput{}
-
- if input == nil {
- input = &CreateSecurityGroupInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) ImportKeyPair(input *ImportKeyPairInput) (*ImportKeyPairOutput, error) {
- inURL := "/"
- endpoint := "ImportKeyPair"
- output := &ImportKeyPairOutput{}
-
- if input == nil {
- input = &ImportKeyPairInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DescribeSecurityGroups(input *DescribeSecurityGroupsInput) (*DescribeSecurityGroupsOutput, error) {
- inURL := "/"
- endpoint := "DescribeSecurityGroups"
- output := &DescribeSecurityGroupsOutput{}
-
- if input == nil {
- input = &DescribeSecurityGroupsInput{}
-
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DescribeKeyPairs(input *DescribeKeyPairsInput) (*DescribeKeyPairsOutput, error) {
- inURL := "/"
- endpoint := "DescribeKeyPairs"
- output := &DescribeKeyPairsOutput{}
-
- if input == nil {
- input = &DescribeKeyPairsInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) RevokeSecurityGroupEgress(input *RevokeSecurityGroupEgressInput) (*RevokeSecurityGroupEgressOutput, error) {
- inURL := "/"
- endpoint := "RevokeSecurityGroupEgress"
- output := &RevokeSecurityGroupEgressOutput{}
-
- if input == nil {
- input = &RevokeSecurityGroupEgressInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) RevokeSecurityGroupIngress(input *RevokeSecurityGroupIngressInput) (*RevokeSecurityGroupIngressOutput, error) {
- inURL := "/"
- endpoint := "RevokeSecurityGroupIngress"
- output := &RevokeSecurityGroupIngressOutput{}
-
- if input == nil {
- input = &RevokeSecurityGroupIngressInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) AuthorizeSecurityGroupEgress(input *AuthorizeSecurityGroupEgressInput) (*AuthorizeSecurityGroupEgressOutput, error) {
- inURL := "/"
- endpoint := "AuthorizeSecurityGroupEgress"
- output := &AuthorizeSecurityGroupEgressOutput{}
-
- if input == nil {
- input = &AuthorizeSecurityGroupEgressInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DeleteKeyPairs(input *DeleteKeyPairInput) (*DeleteKeyPairOutput, error) {
- inURL := "/"
- endpoint := "DeleteKeyPair"
- output := &DeleteKeyPairOutput{}
-
- if input == nil {
- input = &DeleteKeyPairInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) AuthorizeSecurityGroupIngress(input *AuthorizeSecurityGroupIngressInput) (*AuthorizeSecurityGroupIngressOutput, error) {
- inURL := "/"
- endpoint := "AuthorizeSecurityGroupIngress"
- output := &AuthorizeSecurityGroupIngressOutput{}
-
- if input == nil {
- input = &AuthorizeSecurityGroupIngressInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DeleteSecurityGroup(input *DeleteSecurityGroupInput) (*DeleteSecurityGroupOutput, error) {
- inURL := "/"
- endpoint := "DeleteSecurityGroup"
- output := &DeleteSecurityGroupOutput{}
-
- if input == nil {
- input = &DeleteSecurityGroupInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) CreateKeyPair(input *CreateKeyPairInput) (*CreateKeyPairOutput, error) {
- inURL := "/"
- endpoint := "CreateKeyPair"
- output := &CreateKeyPairOutput{}
-
- if input == nil {
- input = &CreateKeyPairInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) CreateVolume(input *CreateVolumeInput) (*Volume, error) {
- inURL := "/"
- endpoint := "CreateVolume"
- output := &Volume{}
-
- if input == nil {
- input = &CreateVolumeInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DeleteVolume(input *DeleteVolumeInput) (*DeleteVolumeOutput, error) {
- inURL := "/"
- endpoint := "DeleteVolume"
- output := &DeleteVolumeOutput{}
-
- if input == nil {
- input = &DeleteVolumeInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-
-}
-
-func (v VMOperations) DescribeVolumes(input *DescribeVolumesInput) (*DescribeVolumesOutput, error) {
- inURL := "/"
- endpoint := "DescribeVolumes"
- output := &DescribeVolumesOutput{}
-
- if input == nil {
- input = &DescribeVolumesInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) AttachVolume(input *AttachVolumeInput) (*VolumeAttachment, error) {
- inURL := "/"
- endpoint := "AttachVolume"
- output := &VolumeAttachment{}
-
- if input == nil {
- input = &AttachVolumeInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DetachVolume(input *DetachVolumeInput) (*VolumeAttachment, error) {
- inURL := "/"
- endpoint := "DetachVolume"
- output := &VolumeAttachment{}
-
- if input == nil {
- input = &DetachVolumeInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DescribeInstanceAttribute(input *DescribeInstanceAttributeInput) (*DescribeInstanceAttributeOutput, error) {
- inURL := "/"
- endpoint := "DescribeInstanceAttribute"
- output := &DescribeInstanceAttributeOutput{}
-
- if input == nil {
- input = &DescribeInstanceAttributeInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-
-}
-func (v VMOperations) CreateNatGateway(input *CreateNatGatewayInput) (*CreateNatGatewayOutput, error) {
- inURL := "/"
- endpoint := "CreateNatGateway"
- output := &CreateNatGatewayOutput{}
-
- if input == nil {
- input = &CreateNatGatewayInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-
-}
-
-func (v VMOperations) DescribeNatGateways(input *DescribeNatGatewaysInput) (*DescribeNatGatewaysOutput, error) {
- inURL := "/"
- endpoint := "DescribeNatGateways"
- output := &DescribeNatGatewaysOutput{}
-
- if input == nil {
- input = &DescribeNatGatewaysInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DescribeInstanceStatus(input *DescribeInstanceStatusInput) (*DescribeInstanceStatusOutput, error) {
- inURL := "/"
- endpoint := "DescribeInstanceStatus"
- output := &DescribeInstanceStatusOutput{}
-
- if input == nil {
- input = &DescribeInstanceStatusInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-func (v VMOperations) DeleteNatGateway(input *DeleteNatGatewayInput) (*DeleteNatGatewayOutput, error) {
- inURL := "/"
- endpoint := "DeleteNatGateway"
- output := &DeleteNatGatewayOutput{}
-
- if input == nil {
- input = &DeleteNatGatewayInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-func (v VMOperations) CreateSubNet(input *CreateSubnetInput) (*CreateSubnetOutput, error) {
- inURL := "/"
- endpoint := "CreateSubnet"
- output := &CreateSubnetOutput{}
-
- if input == nil {
- input = &CreateSubnetInput{}
- }
-
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DeleteSubNet(input *DeleteSubnetInput) (*DeleteSubnetOutput, error) {
- inURL := "/"
- endpoint := "DeleteSubnet"
- output := &DeleteSubnetOutput{}
-
- if input == nil {
- input = &DeleteSubnetInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-func (v VMOperations) DescribeSubNet(input *DescribeSubnetsInput) (*DescribeSubnetsOutput, error) {
- inURL := "/"
- endpoint := "DescribeSubnets"
- output := &DescribeSubnetsOutput{}
-
- if input == nil {
- input = &DescribeSubnetsInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) CreateAccessKey(input *CreateAccessKeyInput) (*CreateAccessKeyOutput, error) {
- inURL := "/"
- endpoint := "CreateAccessKey"
- output := &CreateAccessKeyOutput{}
-
- if input == nil {
- input = &CreateAccessKeyInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-func (v VMOperations) DeleteDhcpOptions(input *DeleteDhcpOptionsInput) (*DeleteDhcpOptionsOutput, error) {
- inURL := "/"
- endpoint := "DescribeDhcpOptions"
- output := &DeleteDhcpOptionsOutput{}
-
- if input == nil {
- input = &DeleteDhcpOptionsInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DescribeCustomerGateways(input *DescribeCustomerGatewaysInput) (*DescribeCustomerGatewaysOutput, error) {
- inURL := "/"
- endpoint := "DescribeCustomerGateways"
- output := &DescribeCustomerGatewaysOutput{}
-
- if input == nil {
- input = &DescribeCustomerGatewaysInput{}
- }
-
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
-
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DescribeAccessKey(input *DescribeAccessKeyInput) (*DescribeAccessKeyOutput, error) {
- inURL := "/"
- endpoint := "GetAccessKey"
- output := &DescribeAccessKeyOutput{}
-
- if input == nil {
- input = &DescribeAccessKeyInput{}
- }
-
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-
-}
-
-func (v VMOperations) CreateDhcpOptions(input *CreateDhcpOptionsInput) (*CreateDhcpOptionsOutput, error) {
- inURL := "/"
- endpoint := "CreateDhcpOptions"
- output := &CreateDhcpOptionsOutput{}
-
- if input == nil {
- input = &CreateDhcpOptionsInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DeleteCustomerGateway(input *DeleteCustomerGatewayInput) (*DeleteCustomerGatewayOutput, error) {
- inURL := "/"
- endpoint := "DeleteCustomerGateway"
- output := &DeleteCustomerGatewayOutput{}
-
- if input == nil {
- input = &DeleteCustomerGatewayInput{}
- }
-
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DeleteAccessKey(input *DeleteAccessKeyInput) (*DeleteAccessKeyOutput, error) {
- inURL := "/"
- endpoint := "DeleteAccessKey"
- output := &DeleteAccessKeyOutput{}
-
- if input == nil {
- input = &DeleteAccessKeyInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-func (v VMOperations) UpdateAccessKey(input *UpdateAccessKeyInput) (*UpdateAccessKeyOutput, error) {
- inURL := "/"
- endpoint := "UpdateAccessKey"
- output := &UpdateAccessKeyOutput{}
-
- if input == nil {
- input = &UpdateAccessKeyInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DescribeDhcpOptions(input *DescribeDhcpOptionsInput) (*DescribeDhcpOptionsOutput, error) {
- inURL := "/"
- endpoint := "DescribeDhcpOptions"
- output := &DescribeDhcpOptionsOutput{}
-
- if input == nil {
- input = &DescribeDhcpOptionsInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) AssociateDhcpOptions(input *AssociateDhcpOptionsInput) (*AssociateDhcpOptionsOutput, error) {
- inURL := "/"
- endpoint := "AssociateDhcpOptions"
- output := &AssociateDhcpOptionsOutput{}
-
- if input == nil {
- input = &AssociateDhcpOptionsInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-func (v VMOperations) CreateCustomerGateway(input *CreateCustomerGatewayInput) (*CreateCustomerGatewayOutput, error) {
-
- inURL := "/"
- endpoint := "CreateCustomerGateway"
- output := &CreateCustomerGatewayOutput{}
-
- if input == nil {
- input = &CreateCustomerGatewayInput{}
-
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) CreateImageExportTask(input *CreateImageExportTaskInput) (*CreateImageExportTaskOutput, error) {
- inURL := "/"
- endpoint := "CreateImageExportTask"
- output := &CreateImageExportTaskOutput{}
-
- if input == nil {
- input = &CreateImageExportTaskInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-func (v VMOperations) CopyImage(input *CopyImageInput) (*CopyImageOutput, error) {
- inURL := "/"
- endpoint := "CopyImage"
- output := &CopyImageOutput{}
-
- if input == nil {
- input = &CopyImageInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-func (v VMOperations) DescribeAvailabilityZones(input *DescribeAvailabilityZonesInput) (*DescribeAvailabilityZonesOutput, error) {
- inURL := "/"
- endpoint := "DescribeAvailabilityZones"
- output := &DescribeAvailabilityZonesOutput{}
-
- if input == nil {
- input = &DescribeAvailabilityZonesInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DescribeImageExportTasks(input *DescribeImageExportTasksInput) (*DescribeImageExportTasksOutput, error) {
- inURL := "/"
- endpoint := "DescribeImageExportTasks"
- output := &DescribeImageExportTasksOutput{}
-
- if input == nil {
- input = &DescribeImageExportTasksInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-func (v VMOperations) DescribeSnapshots(input *DescribeSnapshotsInput) (*DescribeSnapshotsOutput, error) {
- inURL := "/"
- endpoint := "DescribeSnapshots"
- output := &DescribeSnapshotsOutput{}
-
- if input == nil {
- input = &DescribeSnapshotsInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-func (v VMOperations) DescribePrefixLists(input *DescribePrefixListsInput) (*DescribePrefixListsOutput, error) {
- inURL := "/"
- endpoint := "DescribePrefixLists"
- output := &DescribePrefixListsOutput{}
-
- if input == nil {
- input = &DescribePrefixListsInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DescribeQuotas(input *DescribeQuotasInput) (*DescribeQuotasOutput, error) {
- inURL := "/"
- endpoint := "DescribeQuotas"
- output := &DescribeQuotasOutput{}
-
- if input == nil {
- input = &DescribeQuotasInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-func (v VMOperations) CreateSnapshotExportTask(input *CreateSnapshotExportTaskInput) (*CreateSnapshotExportTaskOutput, error) {
- inURL := "/"
- endpoint := "CreateSnapshotExportTask"
- output := &CreateSnapshotExportTaskOutput{}
-
- if input == nil {
- input = &CreateSnapshotExportTaskInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DescribeRegions(input *DescribeRegionsInput) (*DescribeRegionsOutput, error) {
- inURL := "/"
- endpoint := "DescribeRegions"
- output := &DescribeRegionsOutput{}
-
- if input == nil {
- input = &DescribeRegionsInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) CreateNetworkInterface(input *CreateNetworkInterfaceInput) (*CreateNetworkInterfaceOutput, error) {
- inURL := "/"
- endpoint := "CreateNetworkInterface"
- output := &CreateNetworkInterfaceOutput{}
-
- if input == nil {
- input = &CreateNetworkInterfaceInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-func (v VMOperations) DescribeSnapshotExportTasks(input *DescribeSnapshotExportTasksInput) (*DescribeSnapshotExportTasksOutput, error) {
- inURL := "/"
- endpoint := "DescribeSnapshotExportTasks"
- output := &DescribeSnapshotExportTasksOutput{}
-
- if input == nil {
- input = &DescribeSnapshotExportTasksInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DeleteNetworkInterface(input *DeleteNetworkInterfaceInput) (*DeleteNetworkInterfaceOutput, error) {
- inURL := "/"
- endpoint := "DeleteNetworkInterface"
- output := &DeleteNetworkInterfaceOutput{}
-
- if input == nil {
- input = &DeleteNetworkInterfaceInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-func (v VMOperations) CreateSnapshot(input *CreateSnapshotInput) (*Snapshot, error) {
- inURL := "/"
- endpoint := "CreateSnapshot"
- output := &Snapshot{}
-
- if input == nil {
- input = &CreateSnapshotInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-func (v VMOperations) DescribeProductTypes(input *DescribeProductTypesInput) (*DescribeProductTypesOutput, error) {
- inURL := "/"
- endpoint := "DescribeProductTypes"
- output := &DescribeProductTypesOutput{}
-
- if input == nil {
- input = &DescribeProductTypesInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DescribeNetworkInterfaces(input *DescribeNetworkInterfacesInput) (*DescribeNetworkInterfacesOutput, error) {
- inURL := "/"
- endpoint := "DescribeNetworkInterfaces"
- output := &DescribeNetworkInterfacesOutput{}
-
- if input == nil {
- input = &DescribeNetworkInterfacesInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-func (v VMOperations) DescribeReservedInstances(input *DescribeReservedInstancesInput) (*DescribeReservedInstancesOutput, error) {
- inURL := "/"
- endpoint := "DescribeReservedInstances"
- output := &DescribeReservedInstancesOutput{}
-
- if input == nil {
- input = &DescribeReservedInstancesInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-
-}
-
-func (v VMOperations) ModifyNetworkInterfaceAttribute(input *ModifyNetworkInterfaceAttributeInput) (*ModifyNetworkInterfaceAttributeOutput, error) {
-
- inURL := "/"
- endpoint := "ModifyNetworkInterfaceAttribute"
- output := &ModifyNetworkInterfaceAttributeOutput{}
-
- if input == nil {
- input = &ModifyNetworkInterfaceAttributeInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DescribeInstanceTypes(input *DescribeInstanceTypesInput) (*DescribeInstanceTypesOutput, error) {
- inURL := "/"
- endpoint := "DescribeInstanceTypes"
- output := &DescribeInstanceTypesOutput{}
-
- if input == nil {
- input = &DescribeInstanceTypesInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DeleteSnapshot(input *DeleteSnapshotInput) (*DeleteSnapshotOutput, error) {
- inURL := "/"
- endpoint := "DeleteSnapshot"
- output := &DeleteSnapshotOutput{}
-
- if input == nil {
- input = &DeleteSnapshotInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-func (v VMOperations) DescribeReservedInstancesOfferings(input *DescribeReservedInstancesOfferingsInput) (*DescribeReservedInstancesOfferingsOutput, error) {
- inURL := "/"
- endpoint := "DescribeReservedInstancesOfferings"
- output := &DescribeReservedInstancesOfferingsOutput{}
-
- if input == nil {
- input = &DescribeReservedInstancesOfferingsInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DescribeNetworkInterfaceAttribute(input *DescribeNetworkInterfaceAttributeInput) (*DescribeNetworkInterfaceAttributeOutput, error) {
- inURL := "/"
- endpoint := "DescribeNetworkInterfaceAttribute"
- output := &DescribeNetworkInterfaceAttributeOutput{}
-
- if input == nil {
- input = &DescribeNetworkInterfaceAttributeInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-func (v VMOperations) DescribeImageAttribute(input *DescribeImageAttributeInput) (*DescribeImageAttributeOutput, error) {
- inURL := "/"
- endpoint := "DescribeImageAttribute"
- output := &DescribeImageAttributeOutput{}
-
- if input == nil {
- input = &DescribeImageAttributeInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DetachNetworkInterface(input *DetachNetworkInterfaceInput) (*DetachNetworkInterfaceOutput, error) {
- inURL := "/"
- endpoint := "DetachNetworkInterface"
- output := &DetachNetworkInterfaceOutput{}
-
- if input == nil {
- input = &DetachNetworkInterfaceInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-func (v VMOperations) CreateVpcPeeringConnection(input *CreateVpcPeeringConnectionInput) (*CreateVpcPeeringConnectionOutput, error) {
- inURL := "/"
- endpoint := "CreateVpcPeeringConnection"
- output := &CreateVpcPeeringConnectionOutput{}
-
- if input == nil {
- input = &CreateVpcPeeringConnectionInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-
-}
-
-func (v VMOperations) AttachNetworkInterface(input *AttachNetworkInterfaceInput) (*AttachNetworkInterfaceOutput, error) {
- inURL := "/"
- endpoint := "AttachNetworkInterface"
- output := &AttachNetworkInterfaceOutput{}
-
- if input == nil {
- input = &AttachNetworkInterfaceInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-
-}
-
-func (v VMOperations) DescribeVpcPeeringConnections(input *DescribeVpcPeeringConnectionsInput) (*DescribeVpcPeeringConnectionsOutput, error) {
- inURL := "/"
- endpoint := "DescribeVpcPeeringConnections"
- output := &DescribeVpcPeeringConnectionsOutput{}
-
- if input == nil {
- input = &DescribeVpcPeeringConnectionsInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) AcceptVpcPeeringConnection(input *AcceptVpcPeeringConnectionInput) (*AcceptVpcPeeringConnectionOutput, error) {
- inURL := "/"
- endpoint := "AcceptVpcPeeringConnection"
- output := &AcceptVpcPeeringConnectionOutput{}
-
- if input == nil {
- input = &AcceptVpcPeeringConnectionInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) AssignPrivateIpAddresses(input *AssignPrivateIpAddressesInput) (*AssignPrivateIpAddressesOutput, error) {
- inURL := "/"
- endpoint := "AssignPrivateIpAddresses"
- output := &AssignPrivateIpAddressesOutput{}
-
- if input == nil {
- input = &AssignPrivateIpAddressesInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-func (v VMOperations) ModifyVpcPeeringConnectionOptions(input *ModifyVpcPeeringConnectionOptionsInput) (*ModifyVpcPeeringConnectionOptionsOutput, error) {
- inURL := "/"
- endpoint := "ModifyVpcPeeringConnectionOptions"
- output := &ModifyVpcPeeringConnectionOptionsOutput{}
-
- if input == nil {
- input = &ModifyVpcPeeringConnectionOptionsInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) UnassignPrivateIpAddresses(input *UnassignPrivateIpAddressesInput) (*UnassignPrivateIpAddressesOutput, error) {
- inURL := "/"
- endpoint := "UnassignPrivateIpAddresses"
- output := &UnassignPrivateIpAddressesOutput{}
-
- if input == nil {
- input = &UnassignPrivateIpAddressesInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-func (v VMOperations) DeleteVpcPeeringConnection(input *DeleteVpcPeeringConnectionInput) (*DeleteVpcPeeringConnectionOutput, error) {
- inURL := "/"
- endpoint := "DeleteVpcPeeringConnection"
- output := &DeleteVpcPeeringConnectionOutput{}
-
- if input == nil {
- input = &DeleteVpcPeeringConnectionInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) PurchaseReservedInstancesOffering(input *PurchaseReservedInstancesOfferingInput) (*PurchaseReservedInstancesOfferingOutput, error) {
- inURL := "/"
- endpoint := "PurchaseReservedInstancesOffering"
- output := &PurchaseReservedInstancesOfferingOutput{}
-
- if input == nil {
- input = &PurchaseReservedInstancesOfferingInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-//CreateVpcEndpoint creates a CreateVpcEndpoint
-func (v VMOperations) CreateVpcEndpoint(input *CreateVpcEndpointInput) (*CreateVpcEndpointOutput, error) {
- inURL := "/"
- endpoint := "CreateVpcEndpoint"
- output := &CreateVpcEndpointOutput{}
-
- if input == nil {
- input = &CreateVpcEndpointInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-
-}
-
-//DescribeVpcEndpoints describe VPC Enpoints
-func (v VMOperations) DescribeVpcEndpoints(input *DescribeVpcEndpointsInput) (*DescribeVpcEndpointsOutput, error) {
- inURL := "/"
- endpoint := "DescribeVpcEndpoints"
- output := &DescribeVpcEndpointsOutput{}
-
- if input == nil {
- input = &DescribeVpcEndpointsInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-//ModifyVpcEndpoint ...
-func (v VMOperations) ModifyVpcEndpoint(input *ModifyVpcEndpointInput) (*ModifyVpcEndpointOutput, error) {
- inURL := "/"
- endpoint := "ModifyVpcEndpoint"
- output := &ModifyVpcEndpointOutput{}
-
- if input == nil {
- input = &ModifyVpcEndpointInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-//DeleteVpcEndpoints ...
-func (v VMOperations) DeleteVpcEndpoints(input *DeleteVpcEndpointsInput) (*DeleteVpcEndpointsOutput, error) {
- inURL := "/"
- endpoint := "DeleteVpcEndpoints"
- output := &DeleteVpcEndpointsOutput{}
-
- if input == nil {
- input = &DeleteVpcEndpointsInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-//DescribeVpcEndpointServices ...
-func (v VMOperations) DescribeVpcEndpointServices(input *DescribeVpcEndpointServicesInput) (*DescribeVpcEndpointServicesOutput, error) {
- inURL := "/"
- endpoint := "DescribeVpcEndpointServices"
- output := &DescribeVpcEndpointServicesOutput{}
-
- if input == nil {
- input = &DescribeVpcEndpointServicesInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) ModifySnapshotAttribute(input *ModifySnapshotAttributeInput) (*ModifySnapshotAttributeOutput, error) {
- inURL := "/"
- endpoint := "ModifySnapshotAttribute"
- output := &ModifySnapshotAttributeOutput{}
-
- if input == nil {
- input = &ModifySnapshotAttributeInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) DescribeSnapshotAttribute(input *DescribeSnapshotAttributeInput) (*DescribeSnapshotAttributeOutput, error) {
- inURL := "/"
- endpoint := "DescribeSnapshotAttribute"
- output := &DescribeSnapshotAttributeOutput{}
-
- if input == nil {
- input = &DescribeSnapshotAttributeInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-
-}
-
-func (v VMOperations) ImportSnapshot(input *ImportSnapshotInput) (*ImportSnapshotOutput, error) {
- inURL := "/"
- endpoint := "ImportSnapshot"
- output := &ImportSnapshotOutput{}
-
- if input == nil {
- input = &ImportSnapshotInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-
-}
-func (v VMOperations) CopySnapshot(input *CopySnapshotInput) (*CopySnapshotOutput, error) {
- inURL := "/"
- endpoint := "CopySnapshot"
- output := &CopySnapshotOutput{}
-
- if input == nil {
- input = &CopySnapshotInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
-
-func (v VMOperations) CreateImage(input *CreateImageInput) (*CreateImageOutput, error) {
- inURL := "/"
- endpoint := "CreateImage"
- output := &CreateImageOutput{}
-
- if input == nil {
- input = &CreateImageInput{}
- }
- req, err := v.client.NewRequest(context.TODO(), endpoint, http.MethodGet, inURL, input)
-
- if err != nil {
- return nil, err
- }
-
- err = v.client.Do(context.TODO(), req, output)
- if err != nil {
- return nil, err
- }
-
- return output, nil
-}
diff --git a/osc/fcu/vm_test.go b/osc/fcu/vm_test.go
deleted file mode 100644
index fe831b047..000000000
--- a/osc/fcu/vm_test.go
+++ /dev/null
@@ -1,500 +0,0 @@
-package fcu
-
-import (
- "context"
- "fmt"
- "net/http"
- "net/http/httptest"
- "net/url"
- "testing"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/terraform-providers/terraform-provider-outscale/osc"
-)
-
-var (
- mux *http.ServeMux
-
- ctx = context.TODO()
-
- client *Client
-
- server *httptest.Server
-)
-
-func setup() {
- mux = http.NewServeMux()
- server = httptest.NewServer(mux)
-
- config := osc.Config{
- Credentials: &osc.Credentials{
- AccessKey: "AKID",
- SecretKey: "SecretKey",
- Region: "region",
- },
- }
-
- client, _ = NewFCUClient(config)
-
- u, _ := url.Parse(server.URL)
- client.client.Config.BaseURL = u
-
-}
-
-func teardown() {
- server.Close()
-}
-
-func TestVM_RunInstance(t *testing.T) {
- setup()
- defer teardown()
-
- var maxC int64
- imageID := "ami-8a6a0120"
- maxC = 1
-
- input := &RunInstancesInput{
- ImageId: &imageID,
- MaxCount: &maxC,
- MinCount: &maxC,
- }
-
- mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
-
- fmt.Fprintf(w, `
-
-b07d2bff-536a-4bc2-b528-da9d008638e0r-b4f82c1c520679080430- sg-1385300fdefault
- i-0e8ea0a2ami-8a6a0120
0
pendingip-10-0-1-155.eu-west-2.compute.internalterraform-basic0t2.micro2018-02-22T20:48:32.524Zeu-west-2adefaultdisabledsubnet-861fbeccvpc-e9d09d6310.0.1.155true- sg-1385300fdefault
x86_64ebs/dev/sda1- /dev/sda1vol-9454b3ccattaching2018-02-22T20:48:32.524Ztrue
hvmxen- eni-33a7d022subnet-861fbeccvpc-e9d09d63Primary network interface520679080430in-useaa:7f:a8:aa:94:3310.0.1.155ip-10-0-1-155.eu-west-2.compute.internaltrue
- sg-1385300fdefault
eni-attach-e23c25bf0attached2018-02-22T20:48:32.524Ztrue- 10.0.1.155ip-10-0-1-155.eu-west-2.compute.internaltrue
false
- `)
- })
-
- server, err := client.VM.RunInstance(input)
- if err != nil {
- t.Errorf("VM.RunInstance returned error: %v", err)
- }
-
- instanceID := *server.Instances[0].InstanceId
- expectedID := "i-0e8ea0a2"
-
- if instanceID != expectedID {
- t.Fatalf("Expected InstanceID:(%s), Got(%s)", instanceID, expectedID)
- }
-
-}
-
-func TestDescribe_Instance(t *testing.T) {
- setup()
- defer teardown()
-
- instanceID := "i-d470ce8f"
-
- input := DescribeInstancesInput{
- InstanceIds: []*string{&instanceID},
- }
-
- mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
-
- fmt.Fprintf(w, `
- 16d09939-f72c-471c-99ae-6704d888197c- r-3cedd89e520679080430
- sg-6ed31f3edefault
- i-bee7ebf3ami-8a6a0120
0
pendingip-10-9-8-166.eu-west-2.compute.internal0m1.small2018-02-08T01:46:45.269Zeu-west-2adefaultdisabled10.9.8.166- sg-6ed31f3edefault
x86_64ebs/dev/sda1- /dev/sda1vol-f65f0614attaching2018-02-08T01:46:45.269Ztrue
hvmxenfalse
- `)
- })
-
- desc, err := client.VM.DescribeInstances(&input)
- if err != nil {
- t.Errorf("VM.RunInstance returned error: %v", err)
- }
-
- expectedID := "i-bee7ebf3"
- outputInstanceID := *desc.Reservations[0].Instances[0].InstanceId
-
- if outputInstanceID != expectedID {
- t.Fatalf("Expected InstanceID:(%s), Got(%s)", outputInstanceID, expectedID)
- }
-
-}
-
-func TestVM_GetPasswordData(t *testing.T) {
- setup()
- defer teardown()
-
- instanceID := "i-9c1b9711"
-
- input := GetPasswordDataInput{
- InstanceId: &instanceID,
- }
-
- mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
-
- fmt.Fprintf(w, `
- ce00bd1d-9f3f-4bfd-be6b-1b7b73454c20i-9c1b97112018-02-08T02:46:15.789Z
- `)
- })
-
- term, err := client.VM.GetPasswordData(&input)
- if err != nil {
- t.Errorf("VM.GetPasswordData returned error: %v", err)
- }
-
- expectedID := "i-9c1b9711"
- outputInstanceID := *term.InstanceId
-
- if outputInstanceID != expectedID {
- t.Fatalf("Expected InstanceID:(%s), Got(%s)", outputInstanceID, expectedID)
- }
-}
-
-func TestVM_ModifyInstanceKeyPair(t *testing.T) {
- t.Skip()
- setup()
- defer teardown()
-
- instanceID := "i-484e76e2"
- keypair := "testkey"
-
- input := ModifyInstanceKeyPairInput{
- InstanceId: &instanceID,
- KeyName: &keypair,
- }
-
- mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
-
- fmt.Fprintf(w, ``)
- })
-
- err := client.VM.ModifyInstanceKeyPair(&input)
- if err != nil {
- t.Errorf("VM.ModifyInstanceKeyPair returned error: %v", err)
- }
-}
-
-func TestVM_TerminateInstances(t *testing.T) {
- setup()
- defer teardown()
-
- instanceID := "i-484e76e2"
-
- input := TerminateInstancesInput{
- InstanceIds: []*string{&instanceID},
- }
-
- mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
-
- fmt.Fprintf(w, `
- f508de7e-fe4b-4572-a977-e74efb9f3b76- i-484e76e2
32
shutting-down0
pending
- `)
- })
-
- term, err := client.VM.TerminateInstances(&input)
- if err != nil {
- t.Errorf("VM.RunInstance returned error: %v", err)
- }
-
- expectedID := "i-484e76e2"
- outputInstanceID := *term.TerminatingInstances[0].InstanceId
-
- if outputInstanceID != expectedID {
- t.Fatalf("Expected InstanceID:(%s), Got(%s)", outputInstanceID, expectedID)
- }
-}
-
-func TestVM_ModifyInstanceAttribute(t *testing.T) {
- setup()
- defer teardown()
-
- instanceID := "i-d742ed97"
-
- input := ModifyInstanceAttributeInput{
- InstanceId: aws.String(instanceID),
- DisableApiTermination: &AttributeBooleanValue{
- Value: aws.Bool(false),
- },
- }
-
- mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
-
- fmt.Fprintf(w, `
-
- f508de7e-fe4b-4572-a977-e74efb9f3b76
- true
-
- `)
- })
-
- _, err := client.VM.ModifyInstanceAttribute(&input)
- if err != nil {
- t.Errorf("VM.ModifyInstanceAttribute returned error: %v", err)
- }
-}
-
-func TestVM_StopInstances(t *testing.T) {
- setup()
- defer teardown()
-
- instanceID := "i-d742ed97"
-
- input := StopInstancesInput{
- InstanceIds: []*string{aws.String(instanceID)},
- }
-
- mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
-
- fmt.Fprintf(w, `
-
- f508de7e-fe4b-4572-a977-e74efb9f3b76
-
- -
- i-d742ed97
-
-
64
- stopping
-
-
- 16
- running
-
-
-
-
- `)
- })
-
- _, err := client.VM.StopInstances(&input)
- if err != nil {
- t.Errorf("VM.StopInstances returned error: %v", err)
- }
-}
-
-func TestVM_StartInstances(t *testing.T) {
- setup()
- defer teardown()
-
- instanceID := "i-d742ed97"
-
- input := StartInstancesInput{
- InstanceIds: []*string{aws.String(instanceID)},
- }
-
- mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
-
- fmt.Fprintf(w, `
-
- f508de7e-fe4b-4572-a977-e74efb9f3b76
-
- -
- i-d742ed97
-
-
0
- pending
-
-
- 80
- pending
-
-
-
-
- `)
- })
-
- _, err := client.VM.StartInstances(&input)
- if err != nil {
- t.Errorf("VM.StartInstances returned error: %v", err)
- }
-}
-
-func TestVM_GetOwnerId(t *testing.T) {
- setup()
- defer teardown()
-
- var maxC int64
- imageID := "ami-8a6a0120"
- maxC = 1
-
- input := &RunInstancesInput{
- ImageId: &imageID,
- MaxCount: &maxC,
- MinCount: &maxC,
- }
-
- mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
-
- fmt.Fprintf(w, `
- 193ddebf-63d4-466d-9fe1-d5b74b9962f0r-071eb05d520679080430- sg-6ed31f3edefault
- i-d470ce8fami-8a6a0120
0
pendingip-10-9-10-212.eu-west-2.compute.internal0m1.small2018-02-08T00:51:38.866Zeu-west-2adefaultdisabled10.9.10.212- sg-6ed31f3edefault
x86_64ebs/dev/sda1- /dev/sda1vol-ee2f2a14attaching2018-02-08T00:51:38.866Ztrue
hvmxenfalse
- `)
- })
-
- server, err := client.VM.RunInstance(input)
- if err != nil {
- t.Errorf("VM.RunInstance returned error: %v", err)
- }
-
- ownerID := *server.OwnerId
- expectedOwnerID := "520679080430"
-
- if ownerID != expectedOwnerID {
- t.Fatalf("Expected OwnerID:(%s), Got(%s)", ownerID, expectedOwnerID)
- }
-}
-
-func TestVM_GetRequesterID(t *testing.T) {
- setup()
- defer teardown()
-
- var maxC int64
- imageID := "ami-8a6a0120"
- maxC = 1
-
- input := &RunInstancesInput{
- ImageId: &imageID,
- MaxCount: &maxC,
- MinCount: &maxC,
- }
-
- mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
-
- fmt.Fprintf(w, `
- 193ddebf-63d4-466d-9fe1-d5b74b9962f0r-071eb05d520679080430- sg-6ed31f3edefault
- i-d470ce8fami-8a6a0120
0
pendingip-10-9-10-212.eu-west-2.compute.internal0m1.small2018-02-08T00:51:38.866Zeu-west-2adefaultdisabled10.9.10.212- sg-6ed31f3edefault
x86_64ebs/dev/sda1- /dev/sda1vol-ee2f2a14attaching2018-02-08T00:51:38.866Ztrue
hvmxenfalse
- `)
- })
-
- server, err := client.VM.RunInstance(input)
- if err != nil {
- t.Errorf("VM.RunInstance returned error: %v", err)
- }
-
- requesterID := *server.RequestId
- expectedrequesterID := "193ddebf-63d4-466d-9fe1-d5b74b9962f0"
-
- if requesterID != expectedrequesterID {
- t.Fatalf("Expected OwnerID:(%s), Got(%s)", requesterID, expectedrequesterID)
- }
-}
-func TestVM_GetReservationID(t *testing.T) {
- setup()
- defer teardown()
-
- var maxC int64
- imageID := "ami-8a6a0120"
- maxC = 1
-
- input := &RunInstancesInput{
- ImageId: &imageID,
- MaxCount: &maxC,
- MinCount: &maxC,
- }
-
- mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
-
- fmt.Fprintf(w, `
- 193ddebf-63d4-466d-9fe1-d5b74b9962f0r-071eb05d520679080430- sg-6ed31f3edefault
- i-d470ce8fami-8a6a0120
0
pendingip-10-9-10-212.eu-west-2.compute.internal0m1.small2018-02-08T00:51:38.866Zeu-west-2adefaultdisabled10.9.10.212- sg-6ed31f3edefault
x86_64ebs/dev/sda1- /dev/sda1vol-ee2f2a14attaching2018-02-08T00:51:38.866Ztrue
hvmxenfalse
- `)
- })
-
- server, err := client.VM.RunInstance(input)
- if err != nil {
- t.Errorf("VM.RunInstance returned error: %v", err)
- }
-
- reservationID := *server.ReservationId
- expectedReservationID := "r-071eb05d"
-
- if reservationID != expectedReservationID {
- t.Fatalf("Expected OwnerID:(%s), Got(%s)", reservationID, expectedReservationID)
- }
-}
-
-func TestVM_CreateKeyPair(t *testing.T) {
- setup()
- defer teardown()
-
- keyName := "tf-acc-key-pair"
-
- input := &CreateKeyPairInput{
- KeyName: &keyName,
- }
-
- mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
-
- fmt.Fprintf(w, `
-f3e8ff89-cf5d-4d39-a36f-d32fc213bee0tf-acc-key-pair90:f3:02:7e:00:03:c6:72:77:fd:dd:46:6f:1e:80:90key-body`)
- })
-
- key, err := client.VM.CreateKeyPair(input)
- if err != nil {
- t.Errorf("VM.Create Key Pair returned error: %v", err)
- }
-
- expectedFingerPrint := "90:f3:02:7e:00:03:c6:72:77:fd:dd:46:6f:1e:80:90"
- expectedKeyMaterial := "key-body"
-
- if keyName != *key.KeyName {
- t.Fatalf("Expected KeyName:(%s), Got(%s)", keyName, *key.KeyName)
- }
- if *key.KeyFingerprint != expectedFingerPrint {
- t.Fatalf("Expected FingerPrint:(%s), Got(%s)", *key.KeyFingerprint, expectedFingerPrint)
- }
- if *key.KeyMaterial != expectedKeyMaterial {
- t.Fatalf("Expected KeyMaterial:(%s), Got(%s)", *key.KeyMaterial, expectedKeyMaterial)
- }
-}
-
-func TestVM_DescribeKeyPair(t *testing.T) {
- setup()
- defer teardown()
-
- keyName := "tf-acc-key-pair"
- requestID := "4c534b1d-80dc-4778-a075-9d6f8d6ba22e"
-
- input := &DescribeKeyPairsInput{
- KeyNames: []*string{&keyName},
- }
-
- mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
-
- fmt.Fprintf(w, `
-4c534b1d-80dc-4778-a075-9d6f8d6ba22e- tf-acc-key-pair90:f3:02:7e:00:03:c6:72:77:fd:dd:46:6f:1e:80:90
`)
- })
-
- key, err := client.VM.DescribeKeyPairs(input)
- if err != nil {
- t.Errorf("VM.Describe Key Pair returned error: %v", err)
- }
-
- expectedFingerPrint := "90:f3:02:7e:00:03:c6:72:77:fd:dd:46:6f:1e:80:90"
-
- if keyName != *key.KeyPairs[0].KeyName {
- t.Fatalf("Expected KeyName:(%s), Got(%s)", keyName, *key.KeyPairs[0].KeyName)
- }
- if expectedFingerPrint != *key.KeyPairs[0].KeyFingerprint {
- t.Fatalf("Expected FingerPrint:(%s), Got(%s)", expectedFingerPrint, *key.KeyPairs[0].KeyFingerprint)
- }
- if requestID != *key.RequestId {
- t.Fatalf("Expected RequestId:(%s), Got(%s)", requestID, *key.RequestId)
- }
-}
-
-func TestVM_DeleteKeyPair(t *testing.T) {
- setup()
- defer teardown()
-
- // The Request ID
- keyName := "tf-acc-key-pair"
-
- input := &DeleteKeyPairInput{
- KeyName: &keyName,
- }
-
- mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
-
- fmt.Fprintf(w, `
-476a1739-406d-48c2-8189-c5939cf617a9true`)
- })
-
- _, err := client.VM.DeleteKeyPairs(input)
- if err != nil {
- t.Errorf("VM.Delete KeyPair returned error: %v", err)
- }
-
-}
diff --git a/osc/handler/build_request_url_encode_request_test.go b/osc/handler/build_request_url_encode_request_test.go
deleted file mode 100644
index 3cfea0e8f..000000000
--- a/osc/handler/build_request_url_encode_request_test.go
+++ /dev/null
@@ -1,34 +0,0 @@
-package handler
-
-import (
- "net/http"
- "testing"
-)
-
-func Test(t *testing.T) {
- input := "Action=DescribeInstances&InstanceId.1=i-76536489&Version=2017-12-15"
- inputURL := "http://localhost"
-
- // Test Post
- _, _, err := BuildURLEncodedRequest(input, http.MethodPost, inputURL)
- if err != nil {
- t.Fatalf("Got error(%s)", err)
- }
-
- // Test Get
- req, _, err := BuildURLEncodedRequest(input, http.MethodGet, inputURL)
- if err != nil {
- t.Fatalf("Got error(%s)", err)
- }
-
- if req.URL.RawQuery != input {
- t.Fatalf("req.URL.RawQuery(%s) Got(%s)", req.URL.RawQuery, input)
- }
-
- // Test Unsupported
- _, _, err = BuildURLEncodedRequest(input, http.MethodDelete, inputURL)
- if err == nil {
- t.Fatalf("Got error(%s)", err)
- }
-
-}
diff --git a/osc/handler/build_url_request_encode_request.go b/osc/handler/build_url_request_encode_request.go
deleted file mode 100644
index f2829e476..000000000
--- a/osc/handler/build_url_request_encode_request.go
+++ /dev/null
@@ -1,35 +0,0 @@
-package handler
-
-import (
- "fmt"
- "io"
- "net/http"
- "strings"
-)
-
-// BuildURLEncodedRequest the request with a body, if it's post then adds it to the body of the request,
-// otherwise adds it to the url query
-func BuildURLEncodedRequest(body interface{}, method, url string) (*http.Request, io.ReadSeeker, error) {
-
- if method == http.MethodPost {
- reader := strings.NewReader(body.(string))
- req, err := http.NewRequest(method, url, reader)
- if err != nil {
- return nil, nil, err
- }
- return req, reader, nil
- }
-
- if method == http.MethodGet {
- req, err := http.NewRequest(method, url, nil)
- if err != nil {
- return nil, nil, err
- }
-
- req.URL.RawQuery = body.(string)
-
- return req, nil, nil
-
- }
- return nil, nil, fmt.Errorf("Method %s not supported", method)
-}
diff --git a/osc/handler/check_xml_response.go b/osc/handler/check_xml_response.go
deleted file mode 100644
index ea45049b4..000000000
--- a/osc/handler/check_xml_response.go
+++ /dev/null
@@ -1,104 +0,0 @@
-package handler
-
-import (
- "bytes"
- "encoding/json"
- "encoding/xml"
- "errors"
- "fmt"
- "io/ioutil"
- "net/http"
- "strings"
-
- "github.com/aws/aws-sdk-go/private/protocol/xml/xmlutil"
-)
-
-// UnmarshalXML unmarshals a response body for the XML protocol.
-func UnmarshalXML(v interface{}, r *http.Response, operation string) error {
-
- defer r.Body.Close()
-
- if v == nil {
- return nil
- }
-
- decoder := xml.NewDecoder(r.Body)
- err := xmlutil.UnmarshalXML(v, decoder, "")
-
- return sendError(err)
-}
-
-// UnmarshalLBUXML unmarshals a response body for the XML protocol.
-func UnmarshalLBUXML(v interface{}, r *http.Response, operation string) error {
-
- defer r.Body.Close()
-
- if v == nil {
- return nil
- }
-
- operationName := operation[7:strings.Index(operation, "&")]
-
- decoder := xml.NewDecoder(r.Body)
- err := xmlutil.UnmarshalXML(v, decoder, operationName+"Result")
-
- return sendError(err)
-}
-
-//UnmarshalDLHandler ...
-func UnmarshalDLHandler(v interface{}, r *http.Response, operation string) error {
- defer r.Body.Close()
-
- j := struct {
- RequestID string `json:"RequestId" type:"string"`
- }{
- r.Header.Get("X-Amz-Requestid"),
- }
-
- err := json.NewDecoder(r.Body).Decode(v)
- if err != nil {
- return err
- }
-
- o := commonStuctre(j, v)
-
- err = json.Unmarshal([]byte(o), v)
- if err != nil {
- return err
- }
-
- return err
-
-}
-
-//UnmarshalICUHandler ...
-func UnmarshalICUHandler(v interface{}, r *http.Response, operation string) error {
- defer r.Body.Close()
-
- err := json.NewDecoder(r.Body).Decode(v)
- if err != nil {
- return err
- }
-
- return err
-}
-
-func debugResponse(r *http.Response) {
-
- var bodyBytes []byte
- if r.Body != nil {
- bodyBytes, _ = ioutil.ReadAll(r.Body)
- }
-
- r.Body = ioutil.NopCloser(bytes.NewBuffer(bodyBytes))
- bodyString := string(bodyBytes)
- fmt.Println(bodyString)
-}
-
-func sendError(err error) error {
- if err != nil {
- return errors.New("SerializationError" + "failed decoding query response" + fmt.Sprint(err))
- }
-
- return nil
-}
diff --git a/osc/handler/check_xml_response_test.go b/osc/handler/check_xml_response_test.go
deleted file mode 100644
index 35c56b936..000000000
--- a/osc/handler/check_xml_response_test.go
+++ /dev/null
@@ -1,54 +0,0 @@
-package handler
-
-import (
- "bytes"
- "io/ioutil"
- "log"
- "net/http"
- "testing"
-)
-
-func TestUnmarshalXML(t *testing.T) {
- buf := bytes.NewReader([]byte("myname123falsetrue1.21.3200arequest-id"))
- res := &http.Response{StatusCode: 200, Body: ioutil.NopCloser(buf), Header: http.Header{}}
- var v struct{}
- if err := UnmarshalXML(&v, res, ""); err != nil {
- t.Fatalf("err: %s", err)
- }
-}
-
-type LBUResponse struct {
- CreateAppCookieStickinessPolicyResult *CreateAppCookieStickinessPolicyResult `type:"structure"`
- ResponseMetadata *ResponseMetadata `type:"structure"`
-}
-
-type CreateAppCookieStickinessPolicyResult struct {
- _ struct{} `type:"structure"`
-}
-
-//ResponseMatadata ...
-type ResponseMetadata struct {
- RequestID *string `locationName:"RequestId" type:"string"`
-}
-
-func TestUnmarshalXMLRequestMetadata(t *testing.T) {
- buf := bytes.NewReader([]byte(`fbb49983-45f5-4284-8c8f-52b7f180946a`))
- res := &http.Response{StatusCode: 200, Body: ioutil.NopCloser(buf), Header: http.Header{}}
-
- v := LBUResponse{}
-
- if err := UnmarshalXML(&v, res, "Action=CreateAppCookieStickinessPolicy&CookieName=MyOtherAppCookie&LoadBalancerName=tf-test-lb-tqi13&PolicyName=foo-policy&Version=2018-05-14"); err != nil {
- t.Fatalf("err: %s", err)
- }
-
- if v.ResponseMetadata == nil {
- t.Fatalf("Cannot unmarshal with the struct %+v", v)
-
- if v.ResponseMetadata.RequestID == nil {
- t.Fatalf("Cannot unmarshal ResponseMetadata %+v", v.ResponseMetadata)
- }
- }
-
- log.Printf("[DEBUG] %s\n", *v.ResponseMetadata.RequestID)
-
-}
diff --git a/osc/handler/error_handler.go b/osc/handler/error_handler.go
deleted file mode 100644
index a19497aed..000000000
--- a/osc/handler/error_handler.go
+++ /dev/null
@@ -1,101 +0,0 @@
-package handler
-
-import (
- "encoding/json"
- "encoding/xml"
- "fmt"
- "io/ioutil"
- "net/http"
-)
-
-// ErrMsg Error lists available
-var ErrMsg = map[string]string{
- "SerializationError": "unable to unmarshal EC2 metadata error respose",
- "HTTP": "HTTP Error",
-}
-
-// Error ...
-type Error struct {
- Code string `xml:"Code"`
- Message string `xml:"Message"`
-}
-
-// XMLError ...
-type XMLError struct {
- XMLName xml.Name `xml:"Response"`
- Errors []Error `xml:"Errors>Error"`
- RequestID string `xml:"RequestID"`
-}
-
-// XMLLBUError ...
-type XMLLBUError struct {
- XMLName xml.Name `xml:"ErrorResponse"`
- Errors Error `xml:"Error"`
- RequestID string `xml:"RequestID"`
-}
-
-//JSONICUError ...
-type JSONICUError struct {
- Msj string `json:"message"`
- Type string `json:"__type"`
- Message string `json:"Message"`
-}
-
-// UnmarshalErrorHandler for HTTP Response
-func UnmarshalErrorHandler(r *http.Response) error {
- defer r.Body.Close()
- v := XMLError{}
-
- data, err := ioutil.ReadAll(r.Body)
- if err != nil {
- return fmt.Errorf("Read body: %v", err)
- }
-
- err = xml.Unmarshal(data, &v)
- if err != nil {
- return fmt.Errorf("error unmarshalling response %v", err)
- }
-
- // Response body format is not consistent between metadata endpoints.
- // Grab the error message as a string and include that as the source error
- return fmt.Errorf("%s: %s", v.Errors[0].Code, v.Errors[0].Message)
-}
-
-// UnmarshalJSONErrorHandler for HTTP Response
-func UnmarshalJSONErrorHandler(r *http.Response) error {
- defer r.Body.Close()
- v := JSONICUError{}
-
- data, err := ioutil.ReadAll(r.Body)
- if err != nil {
- return fmt.Errorf("Read body: %v", err)
- }
-
- err = json.Unmarshal(data, &v)
- if err != nil {
- return fmt.Errorf("error unmarshalling response %v", err)
- }
-
- // Response body format is not consistent between metadata endpoints.
- // Grab the error message as a string and include that as the source error
- return fmt.Errorf("%s: %s", v.Type, v.Message)
-}
-
-// UnmarshalLBUErrorHandler for HTTP Response
-func UnmarshalLBUErrorHandler(r *http.Response) error {
- defer r.Body.Close()
- v := XMLLBUError{}
-
- data, err := ioutil.ReadAll(r.Body)
- if err != nil {
- return fmt.Errorf("Read body: %v", err)
- }
-
- err = xml.Unmarshal(data, &v)
- if err != nil {
- return fmt.Errorf("error unmarshalling response %v", err)
- }
-
- return fmt.Errorf("%s: %s", v.Errors.Code, v.Errors.Message)
-
-}
diff --git a/osc/handler/error_handler_test.go b/osc/handler/error_handler_test.go
deleted file mode 100644
index 895f27c81..000000000
--- a/osc/handler/error_handler_test.go
+++ /dev/null
@@ -1,28 +0,0 @@
-package handler
-
-import (
- "bytes"
- "io/ioutil"
- "net/http"
- "testing"
-)
-
-func TestUnmarshallErrorHandler(t *testing.T) {
- data := `
-
-
- MissingParameter
- Mensaje
-
-
-
- `
- test := &http.Response{
- Body: ioutil.NopCloser(bytes.NewReader([]byte(data))),
- }
-
- if err := UnmarshalErrorHandler(test); err == nil {
-
- t.Fatalf("err: %s", err)
- }
-}
diff --git a/osc/handler/jsonify_body.go b/osc/handler/jsonify_body.go
deleted file mode 100644
index fa1d31e25..000000000
--- a/osc/handler/jsonify_body.go
+++ /dev/null
@@ -1,43 +0,0 @@
-package handler
-
-import (
- "encoding/json"
- "log"
-)
-
-// Bind ...
-func Bind(operation string, body interface{}) {}
-
-// BindICU ...
-func BindICU(operation string, body interface{}) string {
- v := struct {
- Action string `json:"Action"`
- Version string `json:"Version"`
- AuthenticationMethod string `json:"AuthenticationMethod"`
- }{operation, "2018-05-14", "accesskey"}
-
- return commonStuctre(v, body)
-}
-
-// BindDL ...
-func BindDL(operation string, body interface{}) string {
- s, err := json.Marshal(body)
- if err != nil {
- log.Printf("[WARN] Error Marshal: %s", err)
- }
- return string(s)
-}
-
-func commonStuctre(v, body interface{}) string {
- var m map[string]interface{}
-
- jb, _ := json.Marshal(body)
- json.Unmarshal(jb, &m)
-
- ja, _ := json.Marshal(v)
- json.Unmarshal(ja, &m)
-
- jm, _ := json.Marshal(m)
-
- return string(jm)
-}
diff --git a/osc/handler/set_headers.go b/osc/handler/set_headers.go
deleted file mode 100644
index 0ca7072cb..000000000
--- a/osc/handler/set_headers.go
+++ /dev/null
@@ -1,32 +0,0 @@
-package handler
-
-import (
- "fmt"
- "net/http"
-)
-
-const mediaTypeURLEncoded = "application/x-www-form-urlencoded"
-const mediaType = "application/x-amz-json-1.1"
-
-// SetHeaders sets the headers for the request
-func SetHeaders(agent string, req *http.Request, operation string) {
- req.Header.Add("X-Amz-Target", fmt.Sprintf("%s.%s", agent, operation))
- commonHeadres(agent, mediaTypeURLEncoded, req)
-}
-
-// SetHeadersICU sets the headers for the request
-func SetHeadersICU(agent string, req *http.Request, operation string) {
- req.Header.Add("X-Amz-Target", fmt.Sprintf("TinaIcuService.%s", operation))
- commonHeadres(agent, mediaType, req)
-}
-
-// SetHeadersDL sets the headers for the request
-func SetHeadersDL(agent string, req *http.Request, operation string) {
- req.Header.Add("X-Amz-Target", fmt.Sprintf("OvertureService.%s", operation))
- commonHeadres(agent, mediaType, req)
-}
-
-func commonHeadres(agent, media string, req *http.Request) {
- req.Header.Add("User-Agent", agent)
- req.Header.Add("Content-Type", media)
-}
diff --git a/osc/handler/url_encode_handler.go b/osc/handler/url_encode_handler.go
deleted file mode 100644
index 82d3f8780..000000000
--- a/osc/handler/url_encode_handler.go
+++ /dev/null
@@ -1,30 +0,0 @@
-package handler
-
-import (
- "net/url"
-
- "github.com/aws/aws-sdk-go/aws/awserr"
- "github.com/aws/aws-sdk-go/private/protocol/query/queryutil"
-)
-
-// URLEncodeMarshalHander encodes the body to url encode
-func URLEncodeMarshalHander(v interface{}, action, version string) (string, error) {
- return marshal(v, action, version, true)
-}
-
-// URLLBUEncodeMarshalHander ...
-func URLLBUEncodeMarshalHander(v interface{}, action, version string) (string, error) {
- return marshal(v, action, version, false)
-}
-
-func marshal(v interface{}, action, version string, isLBU bool) (string, error) {
- body := url.Values{
- "Action": {action},
- "Version": {version},
- }
- if err := queryutil.Parse(body, v, isLBU); err != nil {
- return "", awserr.New("SerializationError", "failed encoding query request", err)
- }
-
- return body.Encode(), nil
-}
diff --git a/osc/handler/url_encode_handler_test.go b/osc/handler/url_encode_handler_test.go
deleted file mode 100644
index a630a84dd..000000000
--- a/osc/handler/url_encode_handler_test.go
+++ /dev/null
@@ -1,28 +0,0 @@
-package handler
-
-import (
- "testing"
-)
-
-type InputService3TestShapeStructType struct {
- _ struct{} `type:"structure"`
-
- InstanceIds []*string `locationName:"InstanceId" locationNameList:"InstanceId" type:"list"`
-}
-
-func TestURLEncodeMarshalHander(t *testing.T) {
- expOut := "Action=DescribeInstances&InstanceId.1=i-76536489&Version=2017-12-15"
-
- ID1 := "i-76536489"
- input := &InputService3TestShapeStructType{
- InstanceIds: []*string{&ID1},
- }
-
- res, err := URLEncodeMarshalHander(input, "DescribeInstances", "2017-12-15")
- if err != nil {
- t.Fatalf("Got error: %s", err)
- }
- if res != expOut {
- t.Fatalf("Error Marshal: Got:(%s), Have(%s)", expOut, res)
- }
-}
diff --git a/osc/osc.go b/osc/osc.go
deleted file mode 100644
index 9a8430991..000000000
--- a/osc/osc.go
+++ /dev/null
@@ -1,151 +0,0 @@
-// Package osc ...
-package osc
-
-import (
- "context"
- "io"
- "net/http"
- "net/url"
- "time"
-
- v4 "github.com/aws/aws-sdk-go/aws/signer/v4"
-)
-
-const (
- libraryVersion = "1.0"
- // DefaultBaseURL ...
- DefaultBaseURL = "https://%s.%s.outscale.com"
- opaqueBaseURL = "/%s.%s.outscale.com/%s"
- // UserAgent ...
- UserAgent = "osc/" + libraryVersion
- mediaTypeJSON = "application/json"
- mediaTypeWSDL = "application/wsdl+xml"
- mediaTypeURLEncoded = "application/x-www-form-urlencoded"
- signatureVersion = "4"
-)
-
-// BuildRequestHandler creates a new request and marshals the body depending on the implementation
-type BuildRequestHandler func(v interface{}, method, url string) (*http.Request, io.ReadSeeker, error)
-
-// MarshalHander marshals the incoming body to a desired format
-type MarshalHander func(v interface{}, action, version string) (string, error)
-
-// UnmarshalHandler unmarshals the body request depending on different implementations
-type UnmarshalHandler func(v interface{}, req *http.Response, operation string) error
-
-// UnmarshalErrorHandler unmarshals the errors coming from an http respose
-type UnmarshalErrorHandler func(r *http.Response) error
-
-// SetHeaders unmarshals the errors coming from an http respose
-type SetHeaders func(agent string, req *http.Request, operation string)
-
-// BindBody unmarshals the errors coming from an http respose
-type BindBody func(operation string, body interface{}) string
-
-// Client manages the communication between the Outscale API's
-type Client struct {
- Config Config
- Signer *v4.Signer
-
- // Handlers
- MarshalHander MarshalHander
- BuildRequestHandler BuildRequestHandler
- UnmarshalHandler UnmarshalHandler
- UnmarshalErrorHandler UnmarshalErrorHandler
- SetHeaders SetHeaders
- BindBody BindBody
-}
-
-// Config Configuration of the client
-type Config struct {
- Target string
- Credentials *Credentials
-
- // HTTP client used to communicate with the Outscale API.
- Client *http.Client
-
- // Base URL for API requests.
- BaseURL *url.URL
-
- // User agent for client
- UserAgent string
-
- // Optional function called after every successful request made to the DO APIs
- onRequestCompleted RequestCompletionCallback
-}
-
-// Credentials needed access key, secret key and region
-type Credentials struct {
- AccessKey string
- SecretKey string
- Region string
-}
-
-// RequestCompletionCallback defines the type of the request callback function.
-type RequestCompletionCallback func(*http.Request, *http.Response)
-
-// Sign HTTP Request for authentication
-func (c Client) Sign(req *http.Request, body io.ReadSeeker, timestamp time.Time, service string) (http.Header, error) {
- return c.Signer.Sign(req, body, c.Config.Target, c.Config.Credentials.Region, timestamp)
-}
-
-// NewRequest creates a request and signs it
-func (c *Client) NewRequest(ctx context.Context, operation, method, urlStr string, body interface{}) (*http.Request, error) {
- rel, errp := url.Parse(urlStr)
- if errp != nil {
- return nil, errp
- }
-
- var b interface{}
- var err error
-
- if method != http.MethodPost { // method for FCU & LBU API
- b, err = c.MarshalHander(body, operation, "2018-05-14")
- if err != nil {
- return nil, err
- }
- } else if method == http.MethodPost { // method for ICU and DL API
- b = c.BindBody(operation, body)
- }
-
- u := c.Config.BaseURL.ResolveReference(rel)
-
- req, reader, err := c.BuildRequestHandler(b, method, u.String())
- if err != nil {
- return nil, err
- }
-
- c.SetHeaders(c.Config.Target, req, operation)
-
- _, err = c.Sign(req, reader, time.Now(), c.Config.Target)
- if err != nil {
- return nil, err
- }
- return req, nil
-}
-
-// Do sends the request to the API's
-func (c *Client) Do(ctx context.Context, req *http.Request, v interface{}) error {
-
- req = req.WithContext(ctx)
-
- resp, err := c.Config.Client.Do(req)
- if err != nil {
- return err
- }
-
- err = c.checkResponse(resp)
- if err != nil {
- return err
- }
-
- return c.UnmarshalHandler(v, resp, req.URL.RawQuery)
-}
-
-func (c Client) checkResponse(r *http.Response) error {
- if c := r.StatusCode; c >= 200 && c <= 299 {
- return nil
- }
-
- return c.UnmarshalErrorHandler(r)
-}
diff --git a/osc/osc_test.go b/osc/osc_test.go
deleted file mode 100644
index 80295ca83..000000000
--- a/osc/osc_test.go
+++ /dev/null
@@ -1,219 +0,0 @@
-// Package osc ...
-package osc
-
-import (
- "context"
- "errors"
- "fmt"
- "io"
- "io/ioutil"
- "log"
- "net/http"
- "net/http/httptest"
- "net/url"
- "strings"
- "testing"
- "time"
-
- "github.com/aws/aws-sdk-go/aws/credentials"
- "github.com/aws/aws-sdk-go/aws/signer/v4"
-)
-
-var (
- mux *http.ServeMux
-
- ctx = context.TODO()
-
- client *Client
-
- server *httptest.Server
-)
-
-func init() {
- client = buildClient()
-}
-
-func setup() {
- mux = http.NewServeMux()
- server = httptest.NewServer(mux)
-
- client = buildClient()
- url, _ := url.Parse(server.URL)
- client.Config.BaseURL = url
-}
-
-func teardown() {
- server.Close()
-}
-
-func buildRequest(service, region, body string) (*http.Request, io.ReadSeeker) {
- reader := strings.NewReader(body)
- endpoint := fmt.Sprintf("https://%s.%s.outscale.com", service, region)
- req, _ := http.NewRequest("POST", endpoint, reader)
- req.URL.Opaque = "//example.org/bucket/key-._~,!@#$%^&*()"
- req.Header.Add("X-Amz-Target", "prefix.Operation")
- req.Header.Add("Content-Type", "application/json")
- req.Header.Add("Content-Length", fmt.Sprintf("%d", len(body)))
- return req, reader
-}
-
-func buildSigner() *v4.Signer {
- return &v4.Signer{
- Credentials: credentials.NewStaticCredentials("AKID", "SECRET", ""),
- }
-}
-
-func buildClient() *Client {
-
- baseURL, _ := url.Parse(fmt.Sprintf(DefaultBaseURL, "fcu", "eu-west-2"))
- fmt.Println(baseURL.Opaque)
-
- return &Client{
- Signer: buildSigner(),
- BuildRequestHandler: buildTestHandler,
- MarshalHander: testBuildRequestHandler,
- UnmarshalHandler: unmarshalTestHandler,
- UnmarshalErrorHandler: testUnmarshalErrorHandler,
- SetHeaders: testSetHeaders,
- Config: Config{
- UserAgent: "test",
- Target: "fcu",
- BaseURL: baseURL,
- Client: &http.Client{},
- Credentials: &Credentials{
- Region: "eu-west-1",
- },
- },
- }
-}
-
-func buildTestHandler(v interface{}, method, url string) (*http.Request, io.ReadSeeker, error) {
- reader := strings.NewReader("{}")
- req, _ := http.NewRequest(method, url, reader)
- req.Header.Add("Content-Type", mediaTypeURLEncoded)
-
- return req, reader, nil
-}
-
-func testBuildRequestHandler(v interface{}, action, version string) (string, error) {
- return "{}", nil
-}
-
-func unmarshalTestHandler(v interface{}, req *http.Response, op string) error {
- return nil
-}
-
-func testUnmarshalErrorHandler(r *http.Response) error {
- return errors.New("This is an error")
-}
-
-// SetHeaders sets the headers for the request
-func testSetHeaders(agent string, req *http.Request, operation string) {
- req.Header.Add("X-Amz-Target", fmt.Sprintf("%s.%s", agent, operation))
- req.Header.Add("User-Agent", "test")
-}
-
-func TestSign(t *testing.T) {
- req, body := buildRequest("fcu", "eu-west-1", "{}")
- client.Sign(req, body, time.Unix(0, 0), "fcu")
-
- expectedDate := "19700101T000000Z"
- expectedSig := "AWS4-HMAC-SHA256 Credential=AKID/19700101/eu-west-1/fcu/aws4_request, SignedHeaders=content-length;content-type;host;x-amz-date;x-amz-target, Signature=3e6c29372bb5d7c5ce2c605a29bd774b1be3a8d794ea31e033c50af2c5e27302"
-
- q := req.Header
-
- if e, a := expectedDate, q.Get("X-Amz-Date"); e != a {
- t.Errorf("expect %v, got %v", e, a)
- }
-
- if e, a := expectedSig, q.Get("Authorization"); e != a {
- t.Errorf("expect %v, got %v", e, a)
- }
-
-}
-
-func TestSetHeaders(t *testing.T) {
- c := buildClient()
-
- log.Printf("Client: %+v", c)
-
- req, _ := http.NewRequest(http.MethodGet, "http//:example.org/", nil)
-
- log.Printf("request: %+v", req)
- c.SetHeaders(c.Config.Target, req, "DescribeInstances")
-
- q := req.Header
- targetExpected := "fcu.DescribeInstances"
- agentExpected := "test"
-
- if e, a := targetExpected, q.Get("X-Amz-Target"); e != a {
- t.Errorf("expect %v, got %v", e, a)
- }
-
- if e, a := agentExpected, q.Get("User-Agent"); e != a {
- t.Errorf("expect %v, got %v", e, a)
- }
-}
-
-func TestNewRequest(t *testing.T) {
- c := buildClient()
-
- inURL, outURL := "foo", fmt.Sprintf(DefaultBaseURL+"/foo", "fcu", "eu-west-2")
- inBody, outBody := "{}", "{}"
- req, _ := c.NewRequest(context.TODO(), "operation", http.MethodGet, inURL, inBody)
- fmt.Println(req.URL.Opaque)
-
- // test relative URL was expanded
- if req.URL.String() != outURL {
- t.Errorf("NewRequest(%v) URL = %v, expected %v", inURL, req.URL, outURL)
- }
-
- // test body was JSON encoded
- body, _ := ioutil.ReadAll(req.Body)
- if string(body) != outBody {
- t.Errorf("NewRequest(%v)Body = %v, expected %v", inBody, string(body), outBody)
- }
-}
-
-func TestDo(t *testing.T) {
- setup()
- defer teardown()
-
- mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
- if m := http.MethodGet; m != r.Method {
- t.Errorf("Request method = %v, expected %v", r.Method, m)
- }
- fmt.Fprint(w, `{}`)
- })
-
- inURL := "/"
- inBody := "{}"
-
- req, _ := client.NewRequest(context.TODO(), "operation", http.MethodGet, inURL, inBody)
- err := client.Do(context.Background(), req, nil)
- if err != nil {
- t.Fatalf("Do(): %v", err)
- }
-}
-
-func TestDo_ErrorResponse(t *testing.T) {
- setup()
- defer teardown()
-
- mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
- if m := http.MethodGet; m != r.Method {
- t.Errorf("Request method = %v, expected %v", r.Method, m)
- }
- w.WriteHeader(http.StatusForbidden)
- fmt.Fprint(w, `{}`)
- })
-
- inURL := "/"
- inBody := "{}"
-
- req, _ := client.NewRequest(context.TODO(), "operation", http.MethodGet, inURL, inBody)
- err := client.Do(context.Background(), req, nil)
- if err == nil {
- t.Fatalf("Do(): %v", err)
- }
-}
diff --git a/osc/services.go b/osc/services.go
deleted file mode 100644
index 4fc69a3bf..000000000
--- a/osc/services.go
+++ /dev/null
@@ -1,5 +0,0 @@
-package osc
-
-// FCUVMService ...
-type FCUVMService interface {
-}
diff --git a/outscale/config.go b/outscale/config.go
index 07098e830..1f7ba177b 100644
--- a/outscale/config.go
+++ b/outscale/config.go
@@ -5,12 +5,7 @@ import (
"fmt"
"net/http"
- "github.com/outscale/osc-go/oapi"
-
- "github.com/terraform-providers/terraform-provider-outscale/osc"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-
- "github.com/hashicorp/terraform/helper/logging"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/logging"
oscgo "github.com/marinsalinas/osc-sdk-go"
)
@@ -21,38 +16,16 @@ type Config struct {
SecretKeyID string
Region string
TokenID string
- OApi bool
+ Endpoints map[string]interface{}
}
//OutscaleClient client
type OutscaleClient struct {
- FCU *fcu.Client
- OAPI *oapi.Client
OSCAPI *oscgo.APIClient
}
// Client ...
func (c *Config) Client() (*OutscaleClient, error) {
- config := osc.Config{
- Credentials: &osc.Credentials{
- AccessKey: c.AccessKeyID,
- SecretKey: c.SecretKeyID,
- Region: c.Region,
- },
- }
- fcu, err := fcu.NewFCUClient(config)
- if err != nil {
- return nil, err
- }
-
- oapicfg := &oapi.Config{
- AccessKey: c.AccessKeyID,
- SecretKey: c.SecretKeyID,
- Region: c.Region,
- Service: "api",
- URL: "outscale.com/oapi/latest",
- }
-
skipClient := &http.Client{
Transport: &http.Transport{
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
@@ -63,8 +36,14 @@ func (c *Config) Client() (*OutscaleClient, error) {
skipClient.Transport = oscgo.NewTransport(c.AccessKeyID, c.SecretKeyID, c.Region, skipClient.Transport)
+ basePath := fmt.Sprintf("https://api.%s.outscale.com/oapi/latest", c.Region)
+
+ if endpoint, ok := c.Endpoints["api"]; ok {
+ basePath = endpoint.(string)
+ }
+
oscConfig := &oscgo.Configuration{
- BasePath: fmt.Sprintf("https://api.%s.outscale.com/oapi/latest", c.Region),
+ BasePath: basePath,
DefaultHeader: make(map[string]string),
UserAgent: "terraform-provider-outscale-dev",
HTTPClient: skipClient,
@@ -72,11 +51,7 @@ func (c *Config) Client() (*OutscaleClient, error) {
oscClient := oscgo.NewAPIClient(oscConfig)
- oapiClient := oapi.NewClient(oapicfg, skipClient)
-
client := &OutscaleClient{
- FCU: fcu,
- OAPI: oapiClient,
OSCAPI: oscClient,
}
diff --git a/outscale/data_source_outscale_client_endpoint.go b/outscale/data_source_outscale_client_endpoint.go
deleted file mode 100644
index 611047a10..000000000
--- a/outscale/data_source_outscale_client_endpoint.go
+++ /dev/null
@@ -1,113 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "strconv"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
-)
-
-func dataSourceOutscaleOAPICustomerGateway() *schema.Resource {
- return &schema.Resource{
- Read: dataSourceOutscaleOAPICustomerGatewayRead,
-
- Schema: map[string]*schema.Schema{
- "filter": dataSourceFiltersOApiSchema(),
- "bgp_asn": {
- Type: schema.TypeInt,
- Computed: true,
- },
-
- "public_ip": {
- Type: schema.TypeString,
- Computed: true,
- },
-
- "type": {
- Type: schema.TypeString,
- Computed: true,
- },
-
- "client_endpoint_id": {
- Type: schema.TypeString,
- Optional: true,
- },
-
- "state": {
- Type: schema.TypeString,
- Computed: true,
- },
- "tag": tagsOAPISchemaComputed(),
- },
- }
-}
-
-func dataSourceOutscaleOAPICustomerGatewayRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- req := &fcu.DescribeCustomerGatewaysInput{}
-
- filters, filtersOk := d.GetOk("filter")
- v, vOk := d.GetOk("client_endpoint_id")
-
- if filtersOk {
- req.Filters = buildOutscaleDataSourceFilters(filters.(*schema.Set))
- }
- if vOk {
- req.CustomerGatewayIds = []*string{aws.String(v.(string))}
- }
-
- var resp *fcu.DescribeCustomerGatewaysOutput
- var err error
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribeCustomerGateways(req)
-
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "InvalidCustomerGatewayID.NotFound") {
- d.SetId("")
- return nil
- }
- fmt.Printf("[ERROR] Error finding CustomerGateway: %s", err)
- return err
- }
-
- if len(resp.CustomerGateways) == 0 {
- return fmt.Errorf("Unable to find Customer Gateway")
- }
-
- if len(resp.CustomerGateways) > 1 {
- return fmt.Errorf("multiple results returned, please use a more specific criteria in your query")
- }
-
- customerGateway := resp.CustomerGateways[0]
- d.SetId(*customerGateway.CustomerGatewayId)
- d.Set("public_ip", customerGateway.IpAddress)
- d.Set("type", customerGateway.Type)
- d.Set("tag", tagsToMap(customerGateway.Tags))
-
- if *customerGateway.BgpAsn != "" {
- val, err := strconv.ParseInt(*customerGateway.BgpAsn, 0, 0)
- if err != nil {
- return fmt.Errorf("error parsing bgp_asn: %s", err)
- }
-
- d.Set("bgp_asn", int(val))
- }
-
- return nil
-}
diff --git a/outscale/data_source_outscale_client_endpoint_test.go b/outscale/data_source_outscale_client_endpoint_test.go
deleted file mode 100644
index 125b136f8..000000000
--- a/outscale/data_source_outscale_client_endpoint_test.go
+++ /dev/null
@@ -1,50 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "testing"
-
- "github.com/hashicorp/terraform/helper/acctest"
- "github.com/hashicorp/terraform/helper/resource"
-)
-
-func TestAccOutscaleOAPIDSCustomerGateway_basic(t *testing.T) {
- t.Skip()
-
- rBgpAsn := acctest.RandIntRange(64512, 65534)
- rInt := acctest.RandInt()
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- IDRefreshName: "outscale_client_endpoint.foo",
- Providers: testAccProviders,
- CheckDestroy: testAccCheckOAPICustomerGatewayDestroy,
- Steps: []resource.TestStep{
- {
- Config: testAccOAPICustomerGatewayDSConfig(rInt, rBgpAsn),
- Check: resource.ComposeTestCheckFunc(
- resource.TestCheckResourceAttr("data.outscale_client_endpoint.test", "ip_address", "172.0.0.1"),
- ),
- },
- },
- })
-}
-
-func testAccOAPICustomerGatewayDSConfig(rInt, rBgpAsn int) string {
- return fmt.Sprintf(`
- resource "outscale_client_endpoint" "foo" {
- bgp_asn = %d
- public_ip = "172.0.0.1"
- type = "ipsec.1"
- tag {
- Name = "foo-gateway-%d"
- }
- }
-
- data "outscale_client_endpoint" "test" {
- client_endpoint_id = "${outscale_client_endpoint.foo.id}"
- }
- `, rBgpAsn, rInt)
-}
diff --git a/outscale/data_source_outscale_client_endpoints.go b/outscale/data_source_outscale_client_endpoints.go
deleted file mode 100644
index a8d7e004f..000000000
--- a/outscale/data_source_outscale_client_endpoints.go
+++ /dev/null
@@ -1,140 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "strconv"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
-)
-
-func dataSourceOutscaleOAPICustomerGateways() *schema.Resource {
- return &schema.Resource{
- Read: dataSourceOutscaleOAPICustomerGatewaysRead,
-
- Schema: map[string]*schema.Schema{
- "filter": dataSourceFiltersSchema(),
- "client_endpoint_id": {
- Type: schema.TypeList,
- Optional: true,
- Elem: &schema.Schema{Type: schema.TypeString},
- },
- "request_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- "client_endpoint": {
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "bgp_asn": {
- Type: schema.TypeInt,
- Computed: true,
- },
-
- "public_ip": {
- Type: schema.TypeString,
- Computed: true,
- },
-
- "type": {
- Type: schema.TypeString,
- Computed: true,
- },
-
- "client_endpoint_id": {
- Type: schema.TypeString,
- Computed: true,
- },
-
- "state": {
- Type: schema.TypeString,
- Computed: true,
- },
- "tag_set": tagsSchemaComputed(),
- },
- },
- },
- },
- }
-}
-
-func dataSourceOutscaleOAPICustomerGatewaysRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- req := &fcu.DescribeCustomerGatewaysInput{}
-
- filters, filtersOk := d.GetOk("filter")
- v, vOk := d.GetOk("client_endpoint_id")
-
- if filtersOk {
- req.Filters = buildOutscaleDataSourceFilters(filters.(*schema.Set))
- }
- if vOk {
- var g []*string
- for _, s := range v.([]interface{}) {
- g = append(g, aws.String(s.(string)))
- }
- req.CustomerGatewayIds = g
- }
-
- var resp *fcu.DescribeCustomerGatewaysOutput
- var err error
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribeCustomerGateways(req)
-
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "InvalidCustomerGatewayID.NotFound") {
- d.SetId("")
- return nil
- }
- fmt.Printf("[ERROR] Error finding CustomerGateway: %s", err)
- return err
- }
-
- if len(resp.CustomerGateways) == 0 {
- return fmt.Errorf("Unable to find Customer Gateways")
- }
-
- customerGateways := make([]map[string]interface{}, len(resp.CustomerGateways))
-
- for k, v := range resp.CustomerGateways {
- customerGateway := make(map[string]interface{})
-
- customerGateway["client_endpoint_id"] = *v.CustomerGatewayId
- customerGateway["public_ip"] = *v.IpAddress
- customerGateway["type"] = *v.Type
- customerGateway["tag_set"] = tagsToMap(v.Tags)
-
- if *v.BgpAsn != "" {
- val, err := strconv.ParseInt(*v.BgpAsn, 0, 0)
- if err != nil {
- return fmt.Errorf("error parsing bgp_asn: %s", err)
- }
- customerGateway["bgp_asn"] = int(val)
- }
-
- customerGateways[k] = customerGateway
- }
-
- d.Set("client_endpoint", customerGateways)
- d.Set("request_id", resp.RequestId)
- d.SetId(resource.UniqueId())
-
- return nil
-}
diff --git a/outscale/data_source_outscale_client_endpoints_test.go b/outscale/data_source_outscale_client_endpoints_test.go
deleted file mode 100644
index 514b37980..000000000
--- a/outscale/data_source_outscale_client_endpoints_test.go
+++ /dev/null
@@ -1,67 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "testing"
-
- "github.com/hashicorp/terraform/helper/acctest"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
-)
-
-func TestAccOutscaleOAPIDSCustomerGateways_basic(t *testing.T) {
- t.Skip()
-
- rBgpAsn := acctest.RandIntRange(64512, 65534)
- rInt := acctest.RandInt()
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- IDRefreshName: "outscale_client_endpoint.foo",
- Providers: testAccProviders,
- CheckDestroy: testAccCheckOAPICustomerGatewayDestroy,
- Steps: []resource.TestStep{
- {
- Config: testAccOAPICustomerGatewaysDSConfig(rInt, rBgpAsn),
- Check: resource.ComposeTestCheckFunc(
- testAccCheckOutscaleCGsDataSourceID("data.outscale_client_endpoints.test"),
- resource.TestCheckResourceAttr("data.outscale_client_endpoints.test", "customer_gateway_set.#", "1"),
- ),
- },
- },
- })
-}
-
-func testAccCheckOutscaleCGsDataSourceID(n string) resource.TestCheckFunc {
- // Wait for IAM role
- return func(s *terraform.State) error {
- rs, ok := s.RootModule().Resources[n]
- if !ok {
- return fmt.Errorf("Can't find Customer Gateway data source: %s", n)
- }
-
- if rs.Primary.ID == "" {
- return fmt.Errorf("Customer Gateway data source ID not set")
- }
- return nil
- }
-}
-
-func testAccOAPICustomerGatewaysDSConfig(rInt, rBgpAsn int) string {
- return fmt.Sprintf(`
- resource "outscale_client_endpoint" "foo" {
- bgp_asn = %d
- ip_range = "172.0.0.1"
- type = "ipsec.1"
- tag {
- Name = "foo-gateway-%d"
- }
- }
-
- data "outscale_client_endpoints" "test" {
- client_endpoint_id = ["${outscale_client_endpoint.foo.id}"]
- }
- `, rBgpAsn, rInt)
-}
diff --git a/outscale/data_source_outscale_image.go b/outscale/data_source_outscale_image.go
index 5d1767eb8..81c7bb5a0 100644
--- a/outscale/data_source_outscale_image.go
+++ b/outscale/data_source_outscale_image.go
@@ -9,8 +9,8 @@ import (
"github.com/spf13/cast"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func dataSourceOutscaleOAPIImage() *schema.Resource {
@@ -213,19 +213,41 @@ func dataSourceOutscaleOAPIImageRead(d *schema.ResourceData, meta interface{}) e
image := images[0]
d.SetId(*image.ImageId)
- set("architecture", image.Architecture)
- set("creation_date", image.CreationDate)
- set("description", image.Description)
- set("image_id", image.ImageId)
- set("file_location", image.FileLocation)
- set("account_alias", image.AccountAlias)
- set("account_id", image.AccountId)
- set("image_type", image.ImageType)
- set("image_name", image.ImageName)
- set("root_device_name", image.RootDeviceName)
- set("root_device_type", image.RootDeviceType)
- set("state", image.State)
+ if err := set("architecture", image.Architecture); err != nil {
+ return err
+ }
+
+ if err := set("creation_date", image.CreationDate); err != nil {
+ return err
+ }
+ if err := set("image_id", image.ImageId); err != nil {
+ return err
+ }
+ if err := set("file_location", image.FileLocation); err != nil {
+ return err
+ }
+ if err := set("account_alias", image.AccountAlias); err != nil {
+ return err
+ }
+ if err := set("account_id", image.AccountId); err != nil {
+ return err
+ }
+ if err := set("image_type", image.ImageType); err != nil {
+ return err
+ }
+ if err := set("image_name", image.ImageName); err != nil {
+ return err
+ }
+ if err := set("root_device_name", image.RootDeviceName); err != nil {
+ return err
+ }
+ if err := set("root_device_type", image.RootDeviceType); err != nil {
+ return err
+ }
+ if err := set("state", image.State); err != nil {
+ return err
+ }
if err := set("block_device_mappings", omiOAPIBlockDeviceMappings(*image.BlockDeviceMappings)); err != nil {
return err
}
diff --git a/outscale/data_source_outscale_image_test.go b/outscale/data_source_outscale_image_test.go
index e834d78d6..b369771ba 100644
--- a/outscale/data_source_outscale_image_test.go
+++ b/outscale/data_source_outscale_image_test.go
@@ -5,20 +5,19 @@ import (
"os"
"testing"
- "github.com/hashicorp/terraform/helper/acctest"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/acctest"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccOutscaleOAPIImageDataSource_Instance(t *testing.T) {
- omi := getOMIByRegion("eu-west-2", "ubuntu").OMI
+ omi := os.Getenv("OUTSCALE_IMAGEID")
region := os.Getenv("OUTSCALE_REGION")
imageName := fmt.Sprintf("image-test-%d", acctest.RandInt())
resource.Test(t, resource.TestCase{
PreCheck: func() {
testAccPreCheck(t)
- skipIfNoOAPI(t)
},
Providers: testAccProviders,
Steps: []resource.TestStep{
@@ -34,13 +33,12 @@ func TestAccOutscaleOAPIImageDataSource_Instance(t *testing.T) {
}
func TestAccOutscaleOAPIImageDataSource_basic(t *testing.T) {
- omi := getOMIByRegion("eu-west-2", "ubuntu").OMI
+ omi := os.Getenv("OUTSCALE_IMAGEID")
region := os.Getenv("OUTSCALE_REGION")
imageName := fmt.Sprintf("image-test-%d", acctest.RandInt())
resource.Test(t, resource.TestCase{
PreCheck: func() {
- skipIfNoOAPI(t)
testAccPreCheck(t)
},
Providers: testAccProviders,
@@ -96,7 +94,7 @@ func testAccCheckOutscaleOAPIImageDataSourceBasicConfig(omi, vmType, region, ima
func testAccCheckOutscaleOAPIImageConfigBasic(omi, vmType, region, imageName string) string {
return fmt.Sprintf(`
resource "outscale_vm" "basic" {
- image_id = "%[1]s"
+ image_id = "%[1]s"
vm_type = "%[2]s"
keypair_name = "terraform-basic"
placement_subregion_name = "%[3]sa"
diff --git a/outscale/data_source_outscale_images.go b/outscale/data_source_outscale_images.go
index b8dc43be9..80e4e70a1 100644
--- a/outscale/data_source_outscale_images.go
+++ b/outscale/data_source_outscale_images.go
@@ -1,20 +1,17 @@
package outscale
import (
- "bytes"
"fmt"
"log"
"time"
"github.com/antihax/optional"
- "github.com/aws/aws-sdk-go/aws"
oscgo "github.com/marinsalinas/osc-sdk-go"
"github.com/spf13/cast"
- "github.com/hashicorp/terraform/helper/hashcode"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func dataSourceOutscaleOAPIImages() *schema.Resource {
@@ -243,7 +240,10 @@ func dataSourceOutscaleOAPIImagesRead(d *schema.ResourceData, meta interface{})
}
}
- d.Set("request_id", resp.ResponseContext.RequestId)
+ if err := d.Set("request_id", resp.ResponseContext.RequestId); err != nil {
+ return err
+ }
+
return set("images", imgs)
})
}
@@ -318,25 +318,3 @@ func expandStringValueList(configured []interface{}) []string {
}
return vs
}
-
-func expandStringList(configured []interface{}) []*string {
- vs := make([]*string, 0, len(configured))
- for _, v := range configured {
- val, ok := v.(string)
- if ok && val != "" {
- vs = append(vs, aws.String(v.(string)))
- }
- }
- return vs
-}
-
-// Generates a hash for the set hash function used by the product_codes
-// attribute.
-func amiProductCodesHash(v interface{}) int {
- var buf bytes.Buffer
- m := v.(map[string]interface{})
- // All keys added in alphabetical order.
- buf.WriteString(fmt.Sprintf("%s-", m["product_code_id"].(string)))
- buf.WriteString(fmt.Sprintf("%s-", m["product_code_type"].(string)))
- return hashcode.String(buf.String())
-}
diff --git a/outscale/data_source_outscale_images_test.go b/outscale/data_source_outscale_images_test.go
index 454ec5baf..63f043e7c 100644
--- a/outscale/data_source_outscale_images_test.go
+++ b/outscale/data_source_outscale_images_test.go
@@ -5,19 +5,18 @@ import (
"os"
"testing"
- "github.com/hashicorp/terraform/helper/acctest"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/acctest"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccOutscaleOAPIImagesDataSource_Instance(t *testing.T) {
- omi := getOMIByRegion("eu-west-2", "ubuntu").OMI
+ omi := os.Getenv("OUTSCALE_IMAGEID")
region := os.Getenv("OUTSCALE_REGION")
imageName := fmt.Sprintf("image-test-%d", acctest.RandInt())
resource.Test(t, resource.TestCase{
PreCheck: func() {
- skipIfNoOAPI(t)
testAccPreCheck(t)
},
Providers: testAccProviders,
diff --git a/outscale/data_source_outscale_internet_service.go b/outscale/data_source_outscale_internet_service.go
index 584fcee1b..6e253cb92 100644
--- a/outscale/data_source_outscale_internet_service.go
+++ b/outscale/data_source_outscale_internet_service.go
@@ -10,8 +10,8 @@ import (
"github.com/antihax/optional"
oscgo "github.com/marinsalinas/osc-sdk-go"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func datasourceOutscaleOAPIInternetService() *schema.Resource {
@@ -46,7 +46,7 @@ func datasourceOutscaleOAPIInternetServiceRead(d *schema.ResourceData, meta inte
filters, filtersOk := d.GetOk("filter")
internetID, insternetIDOk := d.GetOk("internet_service_id")
- if filtersOk == false && insternetIDOk == false {
+ if !filtersOk && !insternetIDOk {
return fmt.Errorf("One of filters, or instance_id must be assigned")
}
@@ -65,8 +65,8 @@ func datasourceOutscaleOAPIInternetServiceRead(d *schema.ResourceData, meta inte
}
var resp oscgo.ReadInternetServicesResponse
- var err error
- err = resource.Retry(120*time.Second, func() *resource.RetryError {
+
+ err := resource.Retry(120*time.Second, func() *resource.RetryError {
r, _, err := conn.InternetServiceApi.ReadInternetServices(context.Background(), &oscgo.ReadInternetServicesOpts{ReadInternetServicesRequest: optional.NewInterface(params)})
if err != nil {
@@ -97,10 +97,21 @@ func datasourceOutscaleOAPIInternetServiceRead(d *schema.ResourceData, meta inte
log.Printf("[DEBUG] Setting OAPI Internet Service id (%s)", err)
- d.Set("request_id", resp.ResponseContext.GetRequestId())
- d.Set("internet_service_id", result.GetInternetServiceId())
- d.Set("state", result.GetState())
- d.Set("net_id", result.GetNetId())
+ if err := d.Set("request_id", resp.ResponseContext.GetRequestId()); err != nil {
+ return err
+ }
+
+ if err := d.Set("internet_service_id", result.GetInternetServiceId()); err != nil {
+ return err
+ }
+
+ if err := d.Set("state", result.GetState()); err != nil {
+ return err
+ }
+
+ if err := d.Set("net_id", result.GetNetId()); err != nil {
+ return err
+ }
d.SetId(result.GetInternetServiceId())
diff --git a/outscale/data_source_outscale_internet_service_test.go b/outscale/data_source_outscale_internet_service_test.go
index 8d359e8a1..34bf44d31 100644
--- a/outscale/data_source_outscale_internet_service_test.go
+++ b/outscale/data_source_outscale_internet_service_test.go
@@ -3,16 +3,13 @@ package outscale
import (
"testing"
- "github.com/hashicorp/terraform/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
)
func TestAccOutscaleOAPIINternetServiceDatasource_basic(t *testing.T) {
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
resource.TestStep{
diff --git a/outscale/data_source_outscale_internet_services.go b/outscale/data_source_outscale_internet_services.go
index dd3740a88..15ea7a208 100644
--- a/outscale/data_source_outscale_internet_services.go
+++ b/outscale/data_source_outscale_internet_services.go
@@ -3,14 +3,15 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"log"
"strings"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func datasourceOutscaleOAPIInternetServices() *schema.Resource {
@@ -60,7 +61,7 @@ func datasourceOutscaleOAPIInternetServicesRead(d *schema.ResourceData, meta int
filters, filtersOk := d.GetOk("filter")
internetID, internetIDOk := d.GetOk("internet_service_ids")
- if filtersOk == false && internetIDOk == false {
+ if !filtersOk && !internetIDOk {
return fmt.Errorf("One of filters, or instance_id must be assigned")
}
@@ -72,9 +73,7 @@ func datasourceOutscaleOAPIInternetServicesRead(d *schema.ResourceData, meta int
if internetIDOk {
i := internetID.([]string)
in := make([]string, len(i))
- for k, v := range i {
- in[k] = v
- }
+ copy(in, i)
filter.SetInternetServiceIds(in)
params.SetFilters(filter)
}
@@ -107,7 +106,10 @@ func datasourceOutscaleOAPIInternetServicesRead(d *schema.ResourceData, meta int
log.Printf("[DEBUG] Setting OAPI LIN Internet Gateways id (%s)", err)
- d.Set("request_id", resp.ResponseContext.GetRequestId())
+ if err := d.Set("request_id", resp.ResponseContext.GetRequestId()); err != nil {
+ return err
+ }
+
d.SetId(resource.UniqueId())
result := resp.GetInternetServices()
diff --git a/outscale/data_source_outscale_internet_services_test.go b/outscale/data_source_outscale_internet_services_test.go
index f024fe241..85312a2e4 100644
--- a/outscale/data_source_outscale_internet_services_test.go
+++ b/outscale/data_source_outscale_internet_services_test.go
@@ -3,16 +3,13 @@ package outscale
import (
"testing"
- "github.com/hashicorp/terraform/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
)
func TestAccOutscaleOAPIInternetServicesDatasource_basic(t *testing.T) {
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
resource.TestStep{
diff --git a/outscale/data_source_outscale_keypair.go b/outscale/data_source_outscale_keypair.go
index eb05d2ddc..c80838c44 100644
--- a/outscale/data_source_outscale_keypair.go
+++ b/outscale/data_source_outscale_keypair.go
@@ -3,14 +3,15 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"log"
"strings"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func datasourceOutscaleOApiKeyPairRead(d *schema.ResourceData, meta interface{}) error {
@@ -67,12 +68,19 @@ func datasourceOutscaleOApiKeyPairRead(d *schema.ResourceData, meta interface{})
}
if resp.ResponseContext.GetRequestId() != "" {
- d.Set("request_id", resp.ResponseContext.GetRequestId())
+ if err := d.Set("request_id", resp.ResponseContext.GetRequestId()); err != nil {
+ return err
+ }
}
keypair := resp.GetKeypairs()[0]
- d.Set("keypair_name", keypair.GetKeypairName())
- d.Set("keypair_fingerprint", keypair.GetKeypairFingerprint())
+ if err := d.Set("keypair_name", keypair.GetKeypairName()); err != nil {
+ return err
+ }
+ if err := d.Set("keypair_fingerprint", keypair.GetKeypairFingerprint()); err != nil {
+ return err
+ }
+
d.SetId(keypair.GetKeypairName())
return nil
}
diff --git a/outscale/data_source_outscale_keypair_test.go b/outscale/data_source_outscale_keypair_test.go
index a34c88b4e..b5bd0dfa6 100644
--- a/outscale/data_source_outscale_keypair_test.go
+++ b/outscale/data_source_outscale_keypair_test.go
@@ -4,14 +4,12 @@ import (
"fmt"
"testing"
- "github.com/hashicorp/terraform/helper/acctest"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/acctest"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccOutscaleOAPIKeypairDataSource_Instance(t *testing.T) {
- //t.Skip()
-
keyPairName := fmt.Sprintf("acc-test-keypair-%d", acctest.RandIntRange(0, 400))
resource.Test(t, resource.TestCase{
diff --git a/outscale/data_source_outscale_keypairs.go b/outscale/data_source_outscale_keypairs.go
index 7f67386b9..1f253d2fd 100644
--- a/outscale/data_source_outscale_keypairs.go
+++ b/outscale/data_source_outscale_keypairs.go
@@ -8,8 +8,8 @@ import (
"strings"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func datasourceOutscaleOAPiKeyPairsRead(d *schema.ResourceData, meta interface{}) error {
diff --git a/outscale/data_source_outscale_keypairs_test.go b/outscale/data_source_outscale_keypairs_test.go
index 13a5565e4..cd4eb61da 100644
--- a/outscale/data_source_outscale_keypairs_test.go
+++ b/outscale/data_source_outscale_keypairs_test.go
@@ -4,19 +4,16 @@ import (
"fmt"
"testing"
- "github.com/hashicorp/terraform/helper/acctest"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/acctest"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccOutscaleOAPIKeypairsDataSource_Instance(t *testing.T) {
keyPairName := fmt.Sprintf("test-acc-keypair-%d", acctest.RandIntRange(0, 400))
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
{
diff --git a/outscale/data_source_outscale_nat_service.go b/outscale/data_source_outscale_nat_service.go
index 7e1efd716..0bfd140ae 100644
--- a/outscale/data_source_outscale_nat_service.go
+++ b/outscale/data_source_outscale_nat_service.go
@@ -3,14 +3,15 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"log"
"strings"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func dataSourceOutscaleOAPINatService() *schema.Resource {
@@ -67,7 +68,7 @@ func dataSourceOutscaleOAPINatServiceRead(d *schema.ResourceData, meta interface
filters, filtersOk := d.GetOk("filter")
natGatewayID, natGatewayIDOK := d.GetOk("nat_service_id")
- if filtersOk == false && natGatewayIDOK == false {
+ if !filtersOk && !natGatewayIDOK {
return fmt.Errorf("filters, or owner must be assigned, or nat_service_id must be provided")
}
diff --git a/outscale/data_source_outscale_nat_service_test.go b/outscale/data_source_outscale_nat_service_test.go
index f8ac23647..8d2ed2bd2 100644
--- a/outscale/data_source_outscale_nat_service_test.go
+++ b/outscale/data_source_outscale_nat_service_test.go
@@ -4,17 +4,14 @@ import (
"fmt"
"testing"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccOutscaleOAPINatServiceDataSource_Instance(t *testing.T) {
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
{
diff --git a/outscale/data_source_outscale_nat_services.go b/outscale/data_source_outscale_nat_services.go
index 9bf7cab8b..e28a63a15 100644
--- a/outscale/data_source_outscale_nat_services.go
+++ b/outscale/data_source_outscale_nat_services.go
@@ -3,13 +3,14 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"strings"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func dataSourceOutscaleOAPINatServices() *schema.Resource {
@@ -79,7 +80,7 @@ func dataSourceOutscaleOAPINatServicesRead(d *schema.ResourceData, meta interfac
filters, filtersOk := d.GetOk("filter")
natGatewayID, natGatewayIDOK := d.GetOk("nat_service_ids")
- if filtersOk == false && natGatewayIDOK == false {
+ if !filtersOk && !natGatewayIDOK {
return fmt.Errorf("filters, or owner must be assigned, or nat_service_id must be provided")
}
diff --git a/outscale/data_source_outscale_nat_services_test.go b/outscale/data_source_outscale_nat_services_test.go
index 73a3f24f9..e9f13ee8a 100644
--- a/outscale/data_source_outscale_nat_services_test.go
+++ b/outscale/data_source_outscale_nat_services_test.go
@@ -3,16 +3,13 @@ package outscale
import (
"testing"
- "github.com/hashicorp/terraform/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
)
func TestAccOutscaleOAPINatServicesDataSource_Instance(t *testing.T) {
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
{
diff --git a/outscale/data_source_outscale_lin.go b/outscale/data_source_outscale_net.go
similarity index 96%
rename from outscale/data_source_outscale_lin.go
rename to outscale/data_source_outscale_net.go
index 0d4cb4f04..96d45d77f 100644
--- a/outscale/data_source_outscale_lin.go
+++ b/outscale/data_source_outscale_net.go
@@ -9,8 +9,8 @@ import (
"strings"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func dataSourceOutscaleOAPIVpc() *schema.Resource {
diff --git a/outscale/data_source_outscale_net_api_access.go b/outscale/data_source_outscale_net_api_access.go
deleted file mode 100644
index f14a01a51..000000000
--- a/outscale/data_source_outscale_net_api_access.go
+++ /dev/null
@@ -1,183 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/errwrap"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/hashicorp/terraform/helper/structure"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func dataSourceOutscaleOAPIVpcEndpoint() *schema.Resource {
- return &schema.Resource{
- Read: dataSourceOutscaleOAPIVpcEndpointRead,
-
- Schema: map[string]*schema.Schema{
- "filter": dataSourceFiltersSchema(),
- "net_api_access_id": {
- Type: schema.TypeString,
- Optional: true,
- },
- "net_id": {
- Type: schema.TypeString,
- Computed: true,
- },
-
- "prefix_list_name": {
- Type: schema.TypeString,
- Computed: true,
- },
- "route_table_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- "policy": {
- Type: schema.TypeString,
- Computed: true,
- },
- "state": {
- Type: schema.TypeString,
- Computed: true,
- },
- "prefix_list_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- "ip_range": {
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Schema{Type: schema.TypeString},
- },
- "request_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- },
- }
-}
-
-func dataSourceOutscaleOAPIVpcEndpointRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- req := &fcu.DescribeVpcEndpointsInput{}
-
- id, ok1 := d.GetOk("net_api_access_id")
- v, ok2 := d.GetOk("filter")
-
- if ok1 == false && ok2 == false {
- return fmt.Errorf("One of filters, or net_api_access_id must be assigned")
- }
-
- if ok1 {
- req.VpcEndpointIds = []*string{aws.String(id.(string))}
- }
-
- if ok2 {
- req.Filters = buildOutscaleDataSourceFilters(v.(*schema.Set))
- }
- var err error
- var resp *fcu.DescribeVpcEndpointsOutput
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- var err error
-
- resp, err = conn.VM.DescribeVpcEndpoints(req)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- return err
- }
- if resp == nil || len(resp.VpcEndpoints) == 0 {
- return fmt.Errorf("no matching VPC found")
- }
- if len(resp.VpcEndpoints) > 1 {
- return fmt.Errorf("multiple VPCs matched; use additional constraints to reduce matches to a single VPC")
- }
-
- vpc := resp.VpcEndpoints[0]
-
- policy, err := structure.NormalizeJsonString(aws.StringValue(vpc.PolicyDocument))
- if err != nil {
- return errwrap.Wrapf("policy contains an invalid JSON: {{err}}", err)
- }
-
- plID, cidrs, err := getOAPIPrefixList(conn, aws.StringValue(vpc.ServiceName))
-
- if err != nil {
- return err
- }
-
- if plID != nil {
- d.Set("prefix_list_id", plID)
- }
-
- d.SetId(*vpc.VpcEndpointId)
- d.Set("vpc_id", vpc.VpcEndpointId)
- d.Set("prefix_list_name", vpc.ServiceName)
- d.Set("route_table_id", flattenStringList(vpc.RouteTableIds))
- d.Set("policy", policy)
- d.Set("state", vpc.State)
- d.Set("ip_ranges", cidrs)
- d.Set("request_id", resp.RequestId)
-
- return nil
-}
-
-func getOAPIPrefixList(conn *fcu.Client, serviceName string) (*string, []interface{}, error) {
- req := &fcu.DescribePrefixListsInput{}
- req.Filters = buildFCUAttributeFilterListOAPI(
- map[string]string{
- "prefix-list-name": serviceName,
- },
- )
-
- var resp *fcu.DescribePrefixListsOutput
- var err error
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribePrefixLists(req)
-
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
-
- return nil
- })
-
- if err != nil {
- return nil, make([]interface{}, 0), err
- }
- if resp != nil && len(resp.PrefixLists) > 0 {
- if len(resp.PrefixLists) > 1 {
- return nil, make([]interface{}, 0), fmt.Errorf("multiple prefix lists associated with the service name '%s'. Unexpected", serviceName)
- }
-
- pl := resp.PrefixLists[0]
-
- return pl.PrefixListId, flattenStringList(pl.Cidrs), nil
-
- }
- return nil, make([]interface{}, 0), nil
-
-}
-
-func flattenStringList(list []*string) []interface{} {
- vs := make([]interface{}, 0, len(list))
- for _, v := range list {
- vs = append(vs, *v)
- }
- return vs
-}
diff --git a/outscale/data_source_outscale_net_api_access_services.go b/outscale/data_source_outscale_net_api_access_services.go
deleted file mode 100644
index 41cb5cf3c..000000000
--- a/outscale/data_source_outscale_net_api_access_services.go
+++ /dev/null
@@ -1,65 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "strings"
- "time"
-
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func dataSourceOutscaleOAPIVpcEndpointServices() *schema.Resource {
- return &schema.Resource{
- Read: dataSourceOutscaleOAPIVpcEndpointServicesRead,
-
- Schema: map[string]*schema.Schema{
- "request_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- "prefix_list_name": {
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Schema{
- Type: schema.TypeString,
- },
- },
- },
- }
-}
-
-func dataSourceOutscaleOAPIVpcEndpointServicesRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- params := &fcu.DescribeVpcEndpointServicesInput{}
- var res *fcu.DescribeVpcEndpointServicesOutput
- var err error
- err = resource.Retry(40*time.Minute, func() *resource.RetryError {
- res, err = conn.VM.DescribeVpcEndpointServices(params)
-
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded") {
- fmt.Printf("[INFO] Request limit exceeded")
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
-
- return resource.RetryableError(err)
- })
-
- if err != nil {
- return err
- }
-
- if len(res.ServiceNames) < 1 {
- return fmt.Errorf("your query returned no results, please change your search criteria and try again")
- }
-
- d.SetId(resource.UniqueId())
- d.Set("request_id", res.RequestID)
-
- return d.Set("prefix_list_name", flattenStringList(res.ServiceNames))
-}
diff --git a/outscale/data_source_outscale_net_api_access_services_test.go b/outscale/data_source_outscale_net_api_access_services_test.go
deleted file mode 100644
index 78dd07972..000000000
--- a/outscale/data_source_outscale_net_api_access_services_test.go
+++ /dev/null
@@ -1,54 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "strconv"
- "testing"
-
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
-)
-
-func TestAccOutscaleOAPIVpcEndpointServicesDataSource_Instance(t *testing.T) {
- t.Skip()
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- Providers: testAccProviders,
- Steps: []resource.TestStep{
- {
- Config: testAccCheckOutscaleOAPIVpcEndpointServicesDataSourceConfig,
- Check: resource.ComposeTestCheckFunc(
- testAccCheckOutscaleOAPIVpcEndpointServicesDataSourceID("data.outscale_net_api_access_services.test"),
- resource.TestCheckResourceAttrSet("data.outscale_net_api_access_services.test", "request_id"),
- ),
- },
- },
- })
-}
-
-func testAccCheckOutscaleOAPIVpcEndpointServicesDataSourceID(n string) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- rs, ok := s.RootModule().Resources[n]
- if !ok {
- return fmt.Errorf("Can't find OMI data source: %s", n)
- }
-
- if rs.Primary.ID == "" {
- return fmt.Errorf("Key Pair data source ID not set")
- }
-
- if i, err := strconv.Atoi(rs.Primary.Attributes["prefix_list_name.#"]); err != nil || i < 1 {
- return fmt.Errorf("prefix_list_name attribute doesn't have elements")
- }
-
- return nil
- }
-}
-
-const testAccCheckOutscaleOAPIVpcEndpointServicesDataSourceConfig = `
-data "outscale_net_api_access_services" "test" {}
-`
diff --git a/outscale/data_source_outscale_net_api_access_test.go b/outscale/data_source_outscale_net_api_access_test.go
deleted file mode 100644
index 1c977e3cf..000000000
--- a/outscale/data_source_outscale_net_api_access_test.go
+++ /dev/null
@@ -1,50 +0,0 @@
-package outscale
-
-import (
- "testing"
-
- "github.com/hashicorp/terraform/helper/resource"
-)
-
-func TestAccOutscaleOAPIDSLinAPIAccess_basic(t *testing.T) {
- t.Skip()
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- Providers: testAccProviders,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccOutscaleOAPIDSLinAPIAccessConfig,
- Check: resource.ComposeTestCheckFunc(
- resource.TestCheckResourceAttr(
- "data.outscale_net_api_access.test", "service_name", "com.outscale.eu-west-2.osu"),
- ),
- },
- },
- })
-}
-
-const testAccOutscaleOAPIDSLinAPIAccessConfig = `
- resource "outscale_net "foo" {
- ip_ranges = "10.1.0.0/16"
- }
-
- resource "outscale_route_table" "foo" {
- net_id = "${outscale_net.foo.id}"
- }
-
- resource "outscale_net_api_access" "link" {
- net_id = "${outscale_net.foo.id}"
- route_table_id = [
- "${outscale_route_table.foo.id}"
- ]
- service_name = "com.outscale.eu-west-2.osu"
- }
-
- data "outscale_net_api_access" "test" {
- net_api_access_id = "${outscale_net_api_access.link.id}"
- }
-`
diff --git a/outscale/data_source_outscale_net_api_accesses.go b/outscale/data_source_outscale_net_api_accesses.go
deleted file mode 100644
index fe6974a83..000000000
--- a/outscale/data_source_outscale_net_api_accesses.go
+++ /dev/null
@@ -1,204 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/errwrap"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/hashicorp/terraform/helper/structure"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
- "github.com/terraform-providers/terraform-provider-outscale/utils"
-)
-
-func dataSourceOutscaleOAPIVpcEndpoints() *schema.Resource {
- return &schema.Resource{
- Read: dataSourceOutscaleOAPIVpcEndpointsRead,
-
- Schema: map[string]*schema.Schema{
- "filter": dataSourceFiltersSchema(),
- "net_api_access_id": {
- Type: schema.TypeList,
- Optional: true,
- Elem: &schema.Schema{
- Type: schema.TypeString,
- },
- },
- "net_api_access": {
- Type: schema.TypeList,
- Optional: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "net_id": {
- Type: schema.TypeString,
- Computed: true,
- },
-
- "prefix_list_name": {
- Type: schema.TypeString,
- Computed: true,
- },
-
- "route_table_id": {
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Schema{Type: schema.TypeString},
- },
-
- "policy": {
- Type: schema.TypeString,
- Computed: true,
- },
-
- "state": {
- Type: schema.TypeString,
- Computed: true,
- },
- "prefix_list_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- "net_api_access_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- "ip_ranges": {
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Schema{Type: schema.TypeString},
- },
- },
- },
- },
- "request_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- },
- }
-}
-
-func dataSourceOutscaleOAPIVpcEndpointsRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- req := &fcu.DescribeVpcEndpointsInput{}
-
- filters, filtersOk := d.GetOk("filter")
- vpcEndpointIDs, vpceIDsOk := d.GetOk("vpc_endpoint_id")
-
- if filtersOk == false && vpceIDsOk == false {
- return fmt.Errorf("One of filters, or vpc_endpoint_id must be assigned")
- }
-
- if filtersOk {
- req.Filters = buildOutscaleDataSourceFilters(filters.(*schema.Set))
- }
-
- if vpceIDsOk {
- var ids []*string
- for _, v := range vpcEndpointIDs.([]interface{}) {
- ids = append(ids, aws.String(v.(string)))
- }
- req.VpcEndpointIds = ids
- }
-
- var err error
- var resp *fcu.DescribeVpcEndpointsOutput
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- var err error
-
- resp, err = conn.VM.DescribeVpcEndpoints(req)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- return err
- }
- if resp == nil || len(resp.VpcEndpoints) == 0 {
- return fmt.Errorf("no matching VPC Endpoints found")
- }
-
- utils.PrintToJSON(resp, "VpcEndpoint Response")
-
- d.SetId(resource.UniqueId())
-
- vpcEndpoints := make([]map[string]interface{}, len(resp.VpcEndpoints))
-
- for k, v := range resp.VpcEndpoints {
- vpce := make(map[string]interface{})
-
- policy, err := structure.NormalizeJsonString(aws.StringValue(v.PolicyDocument))
- if err != nil {
- return errwrap.Wrapf("policy contains an invalid JSON: {{err}}", err)
- }
-
- plID, cidrs, err := getPrefixList(conn, aws.StringValue(v.ServiceName))
-
- if err != nil {
- return err
- }
-
- vpce["prefix_list_id"] = aws.StringValue(plID)
- vpce["net_api_access_id"] = aws.StringValue(v.VpcEndpointId)
- vpce["net_id"] = aws.StringValue(v.VpcEndpointId)
- vpce["prefix_list_name"] = aws.StringValue(v.ServiceName)
- vpce["route_table_id"] = flattenStringList(v.RouteTableIds)
- vpce["policy"] = policy
- vpce["state"] = aws.StringValue(v.State)
- vpce["ip_ranges"] = cidrs
-
- vpcEndpoints[k] = vpce
- }
-
- d.Set("request_id", resp.RequestId)
-
- return d.Set("net_api_access", vpcEndpoints)
-}
-
-func getPrefixList(conn *fcu.Client, serviceName string) (*string, []interface{}, error) {
- req := &fcu.DescribePrefixListsInput{}
- req.Filters = buildFCUAttributeFilterListOAPI(
- map[string]string{
- "prefix-list-name": serviceName,
- },
- )
-
- var resp *fcu.DescribePrefixListsOutput
- var err error
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribePrefixLists(req)
-
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
-
- return nil
- })
-
- if err != nil {
- return nil, make([]interface{}, 0), err
- }
- if resp != nil && len(resp.PrefixLists) > 0 {
- if len(resp.PrefixLists) > 1 {
- return nil, make([]interface{}, 0), fmt.Errorf("multiple prefix lists associated with the service name '%s'. Unexpected", serviceName)
- }
-
- pl := resp.PrefixLists[0]
-
- return pl.PrefixListId, flattenStringList(pl.Cidrs), nil
-
- }
- return nil, make([]interface{}, 0), nil
-}
diff --git a/outscale/data_source_outscale_net_api_accesses_test.go b/outscale/data_source_outscale_net_api_accesses_test.go
deleted file mode 100644
index 5fe6c9187..000000000
--- a/outscale/data_source_outscale_net_api_accesses_test.go
+++ /dev/null
@@ -1,72 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "testing"
-
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
-)
-
-func TestAccOutscaleOAPIDSLinAPIAccesses_basic(t *testing.T) {
- t.Skip()
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- Providers: testAccProviders,
- Steps: []resource.TestStep{
- {
- Config: testAccCheckOutscaleOAPIVpcEndpointsDataSourceConfig,
- Check: resource.ComposeTestCheckFunc(
- testAccCheckOutscaleOAPIVpcEndpointsDataSourceID("data.outscale_net_api_accesses.test"),
- resource.TestCheckResourceAttr("data.outscale_net_api_accesses.test", "vpc_endpoint_set.0.service_name", "com.outscale.eu-west-2.osu"),
- ),
- },
- },
- })
-}
-
-func testAccCheckOutscaleOAPIVpcEndpointsDataSourceID(n string) resource.TestCheckFunc {
- // Wait for IAM role
- return func(s *terraform.State) error {
- rs, ok := s.RootModule().Resources[n]
- if !ok {
- return fmt.Errorf("Can't find OMI data source: %s", n)
- }
-
- if rs.Primary.ID == "" {
- return fmt.Errorf("Key Pair data source ID not set")
- }
- return nil
- }
-}
-
-const testAccCheckOutscaleOAPIVpcEndpointsDataSourceConfig = `
- resource "outscale_net" "foo" {
- ip_ranges = "10.1.0.0/16"
- }
-
- resource "outscale_route_table" "foo" {
- net_id = "${outscale_net.foo.id}"
- }
-
- resource "outscale_net_api_access" "link" {
- net_id = "${outscale_net.foo.id}"
-
- route_table_id = [
- "${outscale_route_table.foo.id}",
- ]
-
- prefix_list_name = "com.outscale.eu-west-2.osu"
- }
-
- data "outscale_net_api_accesses" "test" {
- filter {
- name = "service-name"
- values = ["${outscale_net_api_access.link.service_name}"]
- }
- }
-`
diff --git a/outscale/data_source_outscale_net_attributes.go b/outscale/data_source_outscale_net_attributes.go
index f6c1f0fc1..7fa9785a0 100644
--- a/outscale/data_source_outscale_net_attributes.go
+++ b/outscale/data_source_outscale_net_attributes.go
@@ -3,14 +3,16 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"log"
"strings"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+ "github.com/terraform-providers/terraform-provider-outscale/utils"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func dataSourceOutscaleOAPIVpcAttr() *schema.Resource {
@@ -60,7 +62,7 @@ func dataSourceOutscaleOAPIVpcAttrRead(d *schema.ResourceData, meta interface{})
return resource.RetryableError(err)
})
if err != nil {
- log.Printf("[DEBUG] Error reading lin (%s)", err)
+ log.Printf("[DEBUG] Error reading lin (%s)", utils.GetErrorResponse(err))
}
if len(resp.GetNets()) == 0 {
diff --git a/outscale/data_source_outscale_net_attributes_test.go b/outscale/data_source_outscale_net_attributes_test.go
index f2ed4fda9..c92d7bb61 100644
--- a/outscale/data_source_outscale_net_attributes_test.go
+++ b/outscale/data_source_outscale_net_attributes_test.go
@@ -4,16 +4,13 @@ import (
"fmt"
"testing"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccOutscaleOAPIDSLinAttr_basic(t *testing.T) {
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
resource.TestStep{
@@ -48,9 +45,15 @@ const testAccOutscaleOAPIDSLinAttrConfig = `
ip_range = "10.0.0.0/16"
}
+ resource "outscale_net" "vpc2" {
+ ip_range = "10.0.0.0/16"
+ }
+
+
+
resource "outscale_net_attributes" "outscale_net_attributes" {
net_id = "${outscale_net.vpc.id}"
- dhcp_options_set_id = "dopt-ca98300d"
+ dhcp_options_set_id = outscale_net.vpc2.dhcp_options_set_id
}
data "outscale_net_attributes" "test" {
diff --git a/outscale/data_source_outscale_net_peering.go b/outscale/data_source_outscale_net_peering.go
index 75327acc9..c8dacad67 100644
--- a/outscale/data_source_outscale_net_peering.go
+++ b/outscale/data_source_outscale_net_peering.go
@@ -11,8 +11,8 @@ import (
"time"
"github.com/hashicorp/errwrap"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func dataSourceOutscaleOAPILinPeeringConnection() *schema.Resource {
diff --git a/outscale/data_source_outscale_net_peering_test.go b/outscale/data_source_outscale_net_peering_test.go
index 3e4ebd72c..def845e7f 100644
--- a/outscale/data_source_outscale_net_peering_test.go
+++ b/outscale/data_source_outscale_net_peering_test.go
@@ -4,17 +4,14 @@ import (
"fmt"
"testing"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccDataSourceOutscaleOAPILinPeeringConnection_basic(t *testing.T) {
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
resource.TestStep{
diff --git a/outscale/data_source_outscale_net_peerings.go b/outscale/data_source_outscale_net_peerings.go
index 8ba2a7fa4..1972f1286 100644
--- a/outscale/data_source_outscale_net_peerings.go
+++ b/outscale/data_source_outscale_net_peerings.go
@@ -3,13 +3,14 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"log"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func dataSourceOutscaleOAPILinPeeringsConnection() *schema.Resource {
@@ -99,16 +100,14 @@ func dataSourceOutscaleOAPILinPeeringsConnectionRead(d *schema.ResourceData, met
}
func getOAPINetPeerings(peerings []oscgo.NetPeering) (res []map[string]interface{}) {
- if peerings != nil {
- for _, p := range peerings {
- res = append(res, map[string]interface{}{
- "accepter_net": getOAPINetPeeringAccepterNet(p.GetAccepterNet()),
- "net_peering_id": p.GetNetPeeringId(),
- "source_net": getOAPINetPeeringSourceNet(p.GetSourceNet()),
- "state": getOAPINetPeeringState(p.GetState()),
- //"tags": getOapiTagSet(p.Tags),
- })
- }
+ for _, p := range peerings {
+ res = append(res, map[string]interface{}{
+ "accepter_net": getOAPINetPeeringAccepterNet(p.GetAccepterNet()),
+ "net_peering_id": p.GetNetPeeringId(),
+ "source_net": getOAPINetPeeringSourceNet(p.GetSourceNet()),
+ "state": getOAPINetPeeringState(p.GetState()),
+ //"tags": getOapiTagSet(p.Tags),
+ })
}
return res
}
diff --git a/outscale/data_source_outscale_net_peerings_test.go b/outscale/data_source_outscale_net_peerings_test.go
index 8fc5c9545..d2206c147 100644
--- a/outscale/data_source_outscale_net_peerings_test.go
+++ b/outscale/data_source_outscale_net_peerings_test.go
@@ -3,16 +3,13 @@ package outscale
import (
"testing"
- "github.com/hashicorp/terraform/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
)
func TestAccDataSourceOutscaleOAPILinPeeringsConnection_basic(t *testing.T) {
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
resource.TestStep{
diff --git a/outscale/data_source_outscale_net_test.go b/outscale/data_source_outscale_net_test.go
index 939c100a1..65d655f70 100644
--- a/outscale/data_source_outscale_net_test.go
+++ b/outscale/data_source_outscale_net_test.go
@@ -6,8 +6,8 @@ import (
"testing"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccDataSourceOutscaleOAPIVpc_basic(t *testing.T) {
@@ -16,10 +16,7 @@ func TestAccDataSourceOutscaleOAPIVpc_basic(t *testing.T) {
ipRange := fmt.Sprintf("172.%d.0.0/16", rInt)
tag := fmt.Sprintf("terraform-testacc-vpc-data-source-%d", rInt)
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
{
diff --git a/outscale/data_source_outscale_lins.go b/outscale/data_source_outscale_nets.go
similarity index 95%
rename from outscale/data_source_outscale_lins.go
rename to outscale/data_source_outscale_nets.go
index 8b9a5bb3e..44ffe5de7 100644
--- a/outscale/data_source_outscale_lins.go
+++ b/outscale/data_source_outscale_nets.go
@@ -3,13 +3,14 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"strings"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func dataSourceOutscaleOAPIVpcs() *schema.Resource {
@@ -72,7 +73,7 @@ func dataSourceOutscaleOAPIVpcsRead(d *schema.ResourceData, meta interface{}) er
filters, filtersOk := d.GetOk("filter")
netIds, netIdsOk := d.GetOk("net_id")
- if filtersOk == false && netIdsOk == false {
+ if !filtersOk && !netIdsOk {
return fmt.Errorf("filters or net_id(s) must be provided")
}
diff --git a/outscale/data_source_outscale_lins_test.go b/outscale/data_source_outscale_nets_test.go
similarity index 88%
rename from outscale/data_source_outscale_lins_test.go
rename to outscale/data_source_outscale_nets_test.go
index 985765334..30fb5070a 100644
--- a/outscale/data_source_outscale_lins_test.go
+++ b/outscale/data_source_outscale_nets_test.go
@@ -6,7 +6,7 @@ import (
"testing"
"time"
- "github.com/hashicorp/terraform/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
)
func TestAccDataSourceOutscaleOAPIVpcs_basic(t *testing.T) {
@@ -16,10 +16,7 @@ func TestAccDataSourceOutscaleOAPIVpcs_basic(t *testing.T) {
ipRange := fmt.Sprintf("172.%d.0.0/16", rInt)
tag := fmt.Sprintf("terraform-testacc-vpc-data-source-%d", rInt)
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
{
diff --git a/outscale/data_source_outscale_nic.go b/outscale/data_source_outscale_nic.go
index 1db67b215..553c3d2ec 100644
--- a/outscale/data_source_outscale_nic.go
+++ b/outscale/data_source_outscale_nic.go
@@ -11,8 +11,8 @@ import (
"time"
"github.com/aws/aws-sdk-go/aws/awserr"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
// Creates a network interface in the specified subnet
diff --git a/outscale/data_source_outscale_nic_test.go b/outscale/data_source_outscale_nic_test.go
index 95685979d..6ea435a16 100644
--- a/outscale/data_source_outscale_nic_test.go
+++ b/outscale/data_source_outscale_nic_test.go
@@ -4,7 +4,6 @@ import (
"context"
"fmt"
"os"
- "strconv"
"strings"
"github.com/antihax/optional"
@@ -13,8 +12,8 @@ import (
"testing"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccOutscaleOAPIENIDataSource_basic(t *testing.T) {
@@ -22,10 +21,7 @@ func TestAccOutscaleOAPIENIDataSource_basic(t *testing.T) {
subregion := os.Getenv("OUTSCALE_REGION")
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
IDRefreshName: "outscale_nic.outscale_nic",
Providers: testAccProviders,
CheckDestroy: testAccCheckOutscaleOAPIENIDestroy,
@@ -44,22 +40,11 @@ func TestAccOutscaleOAPIENIDataSource_basic(t *testing.T) {
func TestAccOutscaleOAPIENIDataSource_basicFilter(t *testing.T) {
var conf oscgo.Nic
- o := os.Getenv("OUTSCALE_OAPI")
-
subregion := os.Getenv("OUTSCALE_REGION")
if subregion == "" {
subregion = "in-west-2"
}
- oapi, err := strconv.ParseBool(o)
- if err != nil {
- oapi = false
- }
-
- if !oapi {
- t.Skip()
- }
-
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
IDRefreshName: "outscale_nic.outscale_nic",
diff --git a/outscale/data_source_outscale_nics.go b/outscale/data_source_outscale_nics.go
index a5a6860fe..e2a0b5f02 100644
--- a/outscale/data_source_outscale_nics.go
+++ b/outscale/data_source_outscale_nics.go
@@ -3,13 +3,13 @@ package outscale
import (
"context"
"fmt"
+ "time"
+
"github.com/antihax/optional"
oscgo "github.com/marinsalinas/osc-sdk-go"
- "log"
- "time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
// Creates a network interface in the specified subnet
@@ -226,7 +226,7 @@ func dataSourceOutscaleOAPINicsRead(d *schema.ResourceData, meta interface{}) er
conn := meta.(*OutscaleClient).OSCAPI
filters, filtersOk := d.GetOk("filter")
- if filtersOk == false {
+ if !filtersOk {
return fmt.Errorf("filters, or owner must be assigned, or nic_id must be provided")
}
@@ -261,7 +261,6 @@ func dataSourceOutscaleOAPINicsRead(d *schema.ResourceData, meta interface{}) er
d.SetId(resource.UniqueId())
if err := set("nics", getOAPIVMNetworkInterfaceSet(nics)); err != nil {
- log.Printf("[DEBUG] NICS ERR %+v", err)
return err
}
diff --git a/outscale/data_source_outscale_nics_test.go b/outscale/data_source_outscale_nics_test.go
index 6e5a82e33..67a00500b 100644
--- a/outscale/data_source_outscale_nics_test.go
+++ b/outscale/data_source_outscale_nics_test.go
@@ -5,17 +5,14 @@ import (
"os"
"testing"
- "github.com/hashicorp/terraform/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
)
func TestAccOutscaleOAPINicsDataSource(t *testing.T) {
subregion := os.Getenv("OUTSCALE_REGION")
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
{
diff --git a/outscale/data_source_outscale_prefix_list.go b/outscale/data_source_outscale_prefix_list.go
deleted file mode 100644
index f0a9b15be..000000000
--- a/outscale/data_source_outscale_prefix_list.go
+++ /dev/null
@@ -1,103 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "log"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func dataSourceOutscaleOAPIPrefixList() *schema.Resource {
- return &schema.Resource{
- Read: dataSourceOutscaleOAPIPrefixListRead,
-
- Schema: map[string]*schema.Schema{
- "filter": dataSourceFiltersSchema(),
- "prefix_list_id": &schema.Schema{
- Type: schema.TypeString,
- Optional: true,
- Computed: true,
- },
- "prefix_list_name": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "request_id": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "ip_range": &schema.Schema{
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Schema{Type: schema.TypeString},
- },
- },
- }
-}
-
-func dataSourceOutscaleOAPIPrefixListRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- filters, filtersOk := d.GetOk("filter")
- prefix, prefixOk := d.GetOk("prefix_list_id")
-
- if !filtersOk && !prefixOk {
- return fmt.Errorf("One of prefix_list_id or filters must be assigned")
- }
-
- params := &fcu.DescribePrefixListsInput{}
-
- if filtersOk {
- params.Filters = buildOutscaleDataSourceFilters(filters.(*schema.Set))
- }
-
- if prefixOk {
- params.PrefixListIds = aws.StringSlice([]string{prefix.(string)})
- }
-
- log.Printf("[DEBUG] DescribePrefixLists %s\n", params)
-
- var resp *fcu.DescribePrefixListsOutput
- err := resource.Retry(60*time.Second, func() *resource.RetryError {
- var err error
- resp, err = conn.VM.DescribePrefixLists(params)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded") {
- return resource.RetryableError(err)
- }
- }
-
- return resource.NonRetryableError(err)
- })
-
- if err != nil {
- return err
- }
- if resp == nil || len(resp.PrefixLists) == 0 {
- return fmt.Errorf("no matching prefix list found; the prefix list ID or name may be invalid or not exist in the current region")
- }
-
- if len(resp.PrefixLists) > 1 {
- return fmt.Errorf("multiple Prefix matched; use additional constraints to reduce matches to a single Prefix")
- }
-
- pl := resp.PrefixLists[0]
-
- d.SetId(*pl.PrefixListId)
- d.Set("prefix_list_id", pl.PrefixListId)
- d.Set("prefix_list_name", pl.PrefixListName)
-
- cidrs := make([]string, len(pl.Cidrs))
- for i, v := range pl.Cidrs {
- cidrs[i] = *v
- }
- d.Set("ip_range", cidrs)
- d.Set("request_id", resp.RequestId)
-
- return nil
-}
diff --git a/outscale/data_source_outscale_prefix_list_test.go b/outscale/data_source_outscale_prefix_list_test.go
deleted file mode 100644
index 28685eb30..000000000
--- a/outscale/data_source_outscale_prefix_list_test.go
+++ /dev/null
@@ -1,68 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "strconv"
- "testing"
-
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
-)
-
-func TestAccDataSourceOutscaleOAPIPrefixList(t *testing.T) {
- t.Skip()
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- Providers: testAccProviders,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccDataSourceOutscaleOAPIPrefixListConfig,
- Check: resource.ComposeTestCheckFunc(
- testAccDataSourceOutscaleOAPIPrefixListCheck("data.outscale_prefix_list.s3_by_id"),
- ),
- },
- },
- })
-}
-
-func testAccDataSourceOutscaleOAPIPrefixListCheck(name string) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- rs, ok := s.RootModule().Resources[name]
- if !ok {
- return fmt.Errorf("root module has no resource called %s", name)
- }
-
- attr := rs.Primary.Attributes
-
- if attr["prefix_list_name"] != "com.outscale.eu-west-2.osu" {
- return fmt.Errorf("bad name %s", attr["prefix_list_name"])
- }
- if attr["prefix_list_id"] != "pl-a14a8cdc" {
- return fmt.Errorf("bad id %s", attr["prefix_list_id"])
- }
-
- var (
- cidrBlockSize int
- err error
- )
-
- if cidrBlockSize, err = strconv.Atoi(attr["cidr_set.#"]); err != nil {
- return err
- }
- if cidrBlockSize < 1 {
- return fmt.Errorf("cidr_set seem suspiciously low: %d", cidrBlockSize)
- }
-
- return nil
- }
-}
-
-const testAccDataSourceOutscaleOAPIPrefixListConfig = `
- data "outscale_prefix_list" "s3_by_id" {
- prefix_list_id = "pl-a14a8cdc"
- }
-`
diff --git a/outscale/data_source_outscale_prefix_lists.go b/outscale/data_source_outscale_prefix_lists.go
deleted file mode 100644
index c8cbb3366..000000000
--- a/outscale/data_source_outscale_prefix_lists.go
+++ /dev/null
@@ -1,119 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func dataSourceOutscaleOAPIPrefixLists() *schema.Resource {
- return &schema.Resource{
- Read: dataSourceOutscaleOAPIPrefixListsRead,
-
- Schema: map[string]*schema.Schema{
- "filter": dataSourceFiltersSchema(),
- "prefix_list_id": &schema.Schema{
- Type: schema.TypeList,
- Optional: true,
- Elem: &schema.Schema{Type: schema.TypeString},
- },
- "prefix_list_set": &schema.Schema{
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "prefix_list_id": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "prefix_list_name": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "ip_range": &schema.Schema{
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Schema{Type: schema.TypeString},
- },
- },
- },
- },
-
- "request_id": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- },
- }
-}
-
-func dataSourceOutscaleOAPIPrefixListsRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- filters, filtersOk := d.GetOk("filter")
- prefix, prefixOk := d.GetOk("prefix_list_id")
-
- if !filtersOk && !prefixOk {
- return fmt.Errorf("One of prefix_list_id or filters must be assigned")
- }
-
- params := &fcu.DescribePrefixListsInput{}
-
- if filtersOk {
- params.Filters = buildOutscaleDataSourceFilters(filters.(*schema.Set))
- }
-
- if prefixOk {
- var ids []*string
- for _, v := range prefix.([]interface{}) {
- ids = append(ids, aws.String(v.(string)))
- }
- params.PrefixListIds = ids
- }
-
- var resp *fcu.DescribePrefixListsOutput
- err := resource.Retry(60*time.Second, func() *resource.RetryError {
- var err error
- resp, err = conn.VM.DescribePrefixLists(params)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded") {
- return resource.RetryableError(err)
- }
- }
-
- return resource.NonRetryableError(err)
- })
-
- if err != nil {
- return err
- }
- if resp == nil || len(resp.PrefixLists) == 0 {
- return fmt.Errorf("no matching prefix list found; the prefix list ID or name may be invalid or not exist in the current region")
- }
-
- d.SetId(resource.UniqueId())
-
- pls := make([]map[string]interface{}, len(resp.PrefixLists))
-
- for k, v := range resp.PrefixLists {
- pl := make(map[string]interface{})
- pl["prefix_list_id"] = *v.PrefixListId
- pl["prefix_list_name"] = *v.PrefixListName
- cidrs := make([]string, len(v.Cidrs))
- for i, v1 := range v.Cidrs {
- cidrs[i] = *v1
- }
- pl["ip_range"] = cidrs
- pls[k] = pl
- }
-
- d.Set("prefix_list_set", pls)
- d.Set("request_id", resp.RequestId)
-
- return nil
-}
diff --git a/outscale/data_source_outscale_prefix_lists_test.go b/outscale/data_source_outscale_prefix_lists_test.go
deleted file mode 100644
index f64f4dc18..000000000
--- a/outscale/data_source_outscale_prefix_lists_test.go
+++ /dev/null
@@ -1,33 +0,0 @@
-package outscale
-
-import (
- "testing"
-
- "github.com/hashicorp/terraform/helper/resource"
-)
-
-func TestAccDataSourceOutscaleOAPIPrefixLists(t *testing.T) {
- t.Skip()
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- Providers: testAccProviders,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccDataSourceOutscaleOAPIPrefixListsConfig,
- Check: resource.ComposeTestCheckFunc(
- resource.TestCheckResourceAttr("data.outscale_prefix_lists.s3_by_id", "prefix_list_set.#", "1"),
- ),
- },
- },
- })
-}
-
-const testAccDataSourceOutscaleOAPIPrefixListsConfig = `
- data "outscale_prefix_lists" "s3_by_id" {
- prefix_list_id = ["pl-a14a8cdc"]
- }
-`
diff --git a/outscale/data_source_outscale_product_types.go b/outscale/data_source_outscale_product_types.go
deleted file mode 100644
index d575e5540..000000000
--- a/outscale/data_source_outscale_product_types.go
+++ /dev/null
@@ -1,100 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "strings"
- "time"
-
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func dataSourceOutscaleOAPIProductTypes() *schema.Resource {
- return &schema.Resource{
- Read: dataSourceOutscaleOAPIProductTypesRead,
-
- Schema: map[string]*schema.Schema{
- "filter": dataSourceFiltersSchema(),
- "product_type": &schema.Schema{
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "description": {
- Type: schema.TypeString,
- Computed: true,
- },
- "product_type_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- "product_type_vendor": {
- Type: schema.TypeString,
- Computed: true,
- },
- },
- },
- },
- "request_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- },
- }
-}
-
-func dataSourceOutscaleOAPIProductTypesRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- filters, filtersOk := d.GetOk("filter")
-
- params := &fcu.DescribeProductTypesInput{}
-
- if filtersOk {
- params.Filters = buildOutscaleDataSourceFilters(filters.(*schema.Set))
- }
-
- var resp *fcu.DescribeProductTypesOutput
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribeProductTypes(params)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.NonRetryableError(err)
- })
-
- if err != nil {
- return err
- }
- if resp == nil || len(resp.ProductTypeSet) == 0 {
- return fmt.Errorf("no matching Product Types found: %#v", params)
- }
-
- vcs := make([]map[string]interface{}, len(resp.ProductTypeSet))
-
- for k, v := range resp.ProductTypeSet {
- vc := make(map[string]interface{})
- vc["description"] = *v.Description
- vc["product_type_id"] = *v.ProductTypeId
- if v.Vendor != nil {
- vc["product_type_vendor"] = *v.Vendor
- } else {
- vc["product_type_vendor"] = ""
- }
- vcs[k] = vc
- }
-
- if err := d.Set("product_type", vcs); err != nil {
- return err
- }
- d.Set("request_id", resp.RequestId)
- d.SetId(resource.UniqueId())
-
- return nil
-}
diff --git a/outscale/data_source_outscale_product_types_test.go b/outscale/data_source_outscale_product_types_test.go
deleted file mode 100644
index 9bee9de76..000000000
--- a/outscale/data_source_outscale_product_types_test.go
+++ /dev/null
@@ -1,31 +0,0 @@
-package outscale
-
-import (
- "testing"
-
- "github.com/hashicorp/terraform/helper/resource"
-)
-
-func TestAccDataSourceOutscaleOAPIProductTypes(t *testing.T) {
- t.Skip()
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- Providers: testAccProviders,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccDataSourceOutscaleOAPIProductTypesConfig,
- Check: resource.ComposeTestCheckFunc(
- resource.TestCheckResourceAttr("data.outscale_product_types.test", "product_type_set.#", "3"),
- ),
- },
- },
- })
-}
-
-const testAccDataSourceOutscaleOAPIProductTypesConfig = `
-data "outscale_product_types" "test" {}
-`
diff --git a/outscale/data_source_outscale_public_ip.go b/outscale/data_source_outscale_public_ip.go
index 349e3dc9f..aa13de4d8 100644
--- a/outscale/data_source_outscale_public_ip.go
+++ b/outscale/data_source_outscale_public_ip.go
@@ -9,8 +9,8 @@ import (
"strings"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func dataSourceOutscaleOAPIPublicIP() *schema.Resource {
diff --git a/outscale/data_source_outscale_public_ip_test.go b/outscale/data_source_outscale_public_ip_test.go
index cd719f297..ed36b9a08 100644
--- a/outscale/data_source_outscale_public_ip_test.go
+++ b/outscale/data_source_outscale_public_ip_test.go
@@ -4,17 +4,14 @@ import (
"fmt"
"testing"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccDataSourceOutscaleOAPIPublicIP(t *testing.T) {
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
resource.TestStep{
diff --git a/outscale/data_source_outscale_public_ips.go b/outscale/data_source_outscale_public_ips.go
index 0bd4e05c9..466faf369 100644
--- a/outscale/data_source_outscale_public_ips.go
+++ b/outscale/data_source_outscale_public_ips.go
@@ -8,8 +8,8 @@ import (
"strings"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func dataSourceOutscaleOAPIPublicIPS() *schema.Resource {
diff --git a/outscale/data_source_outscale_public_ips_test.go b/outscale/data_source_outscale_public_ips_test.go
index a13e0b170..34c40d787 100644
--- a/outscale/data_source_outscale_public_ips_test.go
+++ b/outscale/data_source_outscale_public_ips_test.go
@@ -3,16 +3,13 @@ package outscale
import (
"testing"
- "github.com/hashicorp/terraform/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
)
func TestAccDataSourceOutscaleOAPIPublicIPS(t *testing.T) {
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
resource.TestStep{
diff --git a/outscale/data_source_outscale_quota.go b/outscale/data_source_outscale_quota.go
deleted file mode 100644
index 8dee58448..000000000
--- a/outscale/data_source_outscale_quota.go
+++ /dev/null
@@ -1,150 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "strconv"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func dataSourceOutscaleOAPIQuota() *schema.Resource {
- return &schema.Resource{
- Read: dataSourceOutscaleOAPIQuotaRead,
-
- Schema: map[string]*schema.Schema{
- "filter": dataSourceFiltersSchema(),
- "quota_name": &schema.Schema{
- Type: schema.TypeString,
- Optional: true,
- Computed: true,
- },
- "quota": &schema.Schema{
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "description": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "short_description": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "firewall_rules_set_name": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "max_value": &schema.Schema{
- Type: schema.TypeInt,
- Computed: true,
- },
- "name": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "account_id": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "used_value": &schema.Schema{
- Type: schema.TypeInt,
- Computed: true,
- },
- },
- },
- },
- "request_id": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "reference": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- },
- }
-}
-
-func dataSourceOutscaleOAPIQuotaRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- filters, filtersOk := d.GetOk("filter")
- quota, quotaOk := d.GetOk("quota_name")
-
- if !filtersOk && !quotaOk {
- return fmt.Errorf("One of quota_name or filters must be assigned")
- }
-
- params := &fcu.DescribeQuotasInput{}
-
- if filtersOk {
- params.Filters = buildOutscaleDataSourceFilters(filters.(*schema.Set))
- }
-
- if quotaOk {
- params.QuotaName = aws.StringSlice([]string{quota.(string)})
- }
-
- var resp *fcu.DescribeQuotasOutput
- err := resource.Retry(60*time.Second, func() *resource.RetryError {
- var err error
- resp, err = conn.VM.DescribeQuotas(params)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded") {
- return resource.RetryableError(err)
- }
- }
-
- return resource.NonRetryableError(err)
- })
-
- if err != nil {
- return err
- }
- if resp == nil || len(resp.ReferenceQuotaSet) == 0 {
- return fmt.Errorf("no matching quotas list found; the quotas list ID or name may be invalid or not exist in the current region")
- }
-
- if len(resp.ReferenceQuotaSet) > 1 {
- return fmt.Errorf("multiple Quotas matched; use additional constraints to reduce matches to a single Quotas")
- }
-
- pl := resp.ReferenceQuotaSet[0]
-
- d.SetId(resource.UniqueId())
-
- quotas := make([]map[string]interface{}, len(pl.QuotaSet))
- for k, v := range pl.QuotaSet {
- quota := make(map[string]interface{})
- quota["description"] = aws.StringValue(v.Description)
- quota["display_name"] = aws.StringValue(v.DisplayName)
- quota["firewall_rules_set_name"] = aws.StringValue(v.GroupName)
- i, err := strconv.Atoi(*v.MaxQuotaValue)
- if err != nil {
- return err
- }
- quota["max_value"] = i
- quota["name"] = aws.StringValue(v.Name)
- quota["account_id"] = aws.StringValue(v.OwnerId)
- i2, err := strconv.Atoi(*v.MaxQuotaValue)
- if err != nil {
- return err
- }
- quota["used_value"] = i2
- quotas[k] = quota
- }
-
- if err := d.Set("quota", quotas); err != nil {
- return err
- }
- d.Set("reference", pl.Reference)
- d.Set("request_id", resp.RequestId)
-
- return nil
-}
diff --git a/outscale/data_source_outscale_quota_test.go b/outscale/data_source_outscale_quota_test.go
deleted file mode 100644
index 8cce50078..000000000
--- a/outscale/data_source_outscale_quota_test.go
+++ /dev/null
@@ -1,46 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "testing"
-
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
-)
-
-func TestAccDataSourceOutscaleOAPIQuota(t *testing.T) {
- t.Skip()
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- Providers: testAccProviders,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccDataSourceOutscaleOAPIQuotaConfig,
- Check: resource.ComposeTestCheckFunc(
- testAccDataSourceOutscaleQuotaOAPICheck("data.outscale_quota.s3_by_id"),
- ),
- },
- },
- })
-}
-
-func testAccDataSourceOutscaleQuotaOAPICheck(name string) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- _, ok := s.RootModule().Resources[name]
- if !ok {
- return fmt.Errorf("root module has no resource called %s", name)
- }
-
- return nil
- }
-}
-
-const testAccDataSourceOutscaleOAPIQuotaConfig = `
- data "outscale_quota" "s3_by_id" {
- quota_name = "vm_limit"
- }
-`
diff --git a/outscale/data_source_outscale_quotas.go b/outscale/data_source_outscale_quotas.go
deleted file mode 100644
index fdfb65037..000000000
--- a/outscale/data_source_outscale_quotas.go
+++ /dev/null
@@ -1,167 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "strconv"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func dataSourceOutscaleOAPIQuotas() *schema.Resource {
- return &schema.Resource{
- Read: dataSourceOutscaleOAPIQuotasRead,
-
- Schema: map[string]*schema.Schema{
- "filter": dataSourceFiltersSchema(),
- "quota_name": &schema.Schema{
- Type: schema.TypeList,
- Optional: true,
- Elem: &schema.Schema{Type: schema.TypeString},
- },
- "quota_type": &schema.Schema{
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "quota": &schema.Schema{
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "description": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "short_description": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "firewall_rules_set_name": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "max_value": &schema.Schema{
- Type: schema.TypeInt,
- Computed: true,
- },
- "name": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "account_id": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "used_value": &schema.Schema{
- Type: schema.TypeInt,
- Computed: true,
- },
- },
- },
- },
- "reference": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- },
- },
- },
- "request_id": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- },
- }
-}
-
-func dataSourceOutscaleOAPIQuotasRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- filters, filtersOk := d.GetOk("filter")
- quota, quotaOk := d.GetOk("quota_name")
-
- if !filtersOk && !quotaOk {
- return fmt.Errorf("One of quota_name or filters must be assigned")
- }
-
- params := &fcu.DescribeQuotasInput{}
-
- if filtersOk {
- params.Filters = buildOutscaleDataSourceFilters(filters.(*schema.Set))
- }
-
- if quotaOk {
- var ids []*string
- for _, v := range quota.([]interface{}) {
- ids = append(ids, aws.String(v.(string)))
- }
- params.QuotaName = ids
- }
-
- var resp *fcu.DescribeQuotasOutput
- err := resource.Retry(60*time.Second, func() *resource.RetryError {
- var err error
- resp, err = conn.VM.DescribeQuotas(params)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded") {
- return resource.RetryableError(err)
- }
- }
-
- return resource.NonRetryableError(err)
- })
-
- if err != nil {
- return err
- }
- if resp == nil || len(resp.ReferenceQuotaSet) == 0 {
- return fmt.Errorf("no matching quotas list found; the quotas list ID or name may be invalid or not exist in the current region")
- }
-
- d.SetId(resource.UniqueId())
-
- qs := make([]map[string]interface{}, len(resp.ReferenceQuotaSet))
-
- for k, v := range resp.ReferenceQuotaSet {
- q := make(map[string]interface{})
- q["reference"] = *v.Reference
-
- quotas := make([]map[string]interface{}, len(v.QuotaSet))
- for k, v := range v.QuotaSet {
- quota := make(map[string]interface{})
- quota["description"] = *v.Description
- quota["short_description"] = *v.DisplayName
- quota["firewall_rules_set_name"] = *v.GroupName
- i, err := strconv.Atoi(*v.MaxQuotaValue)
- if err != nil {
- return err
- }
- quota["max_value"] = i
- quota["name"] = *v.Name
- quota["account_id"] = *v.OwnerId
- i2, err := strconv.Atoi(*v.UsedQuotaValue)
- if err != nil {
- return err
- }
- quota["used_value"] = i2
- quotas[k] = quota
- }
-
- q["quota"] = quotas
-
- qs[k] = q
- }
-
- if err := d.Set("quota_type", qs); err != nil {
- return err
- }
-
- d.Set("request_id", resp.RequestId)
-
- return nil
-}
diff --git a/outscale/data_source_outscale_quotas_test.go b/outscale/data_source_outscale_quotas_test.go
deleted file mode 100644
index 61a621f1c..000000000
--- a/outscale/data_source_outscale_quotas_test.go
+++ /dev/null
@@ -1,33 +0,0 @@
-package outscale
-
-import (
- "testing"
-
- "github.com/hashicorp/terraform/helper/resource"
-)
-
-func TestAccDataSourceOutscaleOAPIQuotas(t *testing.T) {
- t.Skip()
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- Providers: testAccProviders,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccDataSourceOutscaleOAPIQuotasConfig,
- Check: resource.ComposeTestCheckFunc(
- resource.TestCheckResourceAttr("data.outscale_quotas.s3_by_id", "reference_quota_set.#", "1"),
- ),
- },
- },
- })
-}
-
-const testAccDataSourceOutscaleOAPIQuotasConfig = `
- data "outscale_quotas" "s3_by_id" {
- quota_name = ["vm_limit"]
- }
-`
diff --git a/outscale/data_source_outscale_region.go b/outscale/data_source_outscale_region.go
deleted file mode 100644
index b393919fd..000000000
--- a/outscale/data_source_outscale_region.go
+++ /dev/null
@@ -1,94 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "log"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func dataSourceOutscaleOAPIRegion() *schema.Resource {
- return &schema.Resource{
- Read: dataSourceOutscaleOAPIRegionRead,
-
- Schema: map[string]*schema.Schema{
- "filter": dataSourceFiltersSchema(),
- "region_name": &schema.Schema{
- Type: schema.TypeString,
- Optional: true,
- Computed: true,
- },
-
- "request_id": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
-
- "region_endpoint": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- },
- }
-}
-
-func dataSourceOutscaleOAPIRegionRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- rtbID, rtbOk := d.GetOk("region_name")
- filter, filterOk := d.GetOk("filter")
-
- if !filterOk && !rtbOk {
- return fmt.Errorf("One of region_name or filters must be assigned")
- }
-
- req := &fcu.DescribeRegionsInput{}
-
- if rtbOk {
- req.RegionNames = []*string{aws.String(rtbID.(string))}
- }
-
- if filterOk {
- req.Filters = buildOutscaleDataSourceFilters(filter.(*schema.Set))
- }
-
- log.Printf("[DEBUG] DescribeRegions %+v\n", req)
-
- var resp *fcu.DescribeRegionsOutput
- err := resource.Retry(60*time.Second, func() *resource.RetryError {
- var err error
- resp, err = conn.VM.DescribeRegions(req)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded") {
- return resource.RetryableError(err)
- }
- }
-
- return nil
- })
-
- if err != nil {
- return err
- }
-
- if resp == nil || len(resp.Regions) == 0 {
- return fmt.Errorf("no matching regions found")
- }
- if len(resp.Regions) > 1 {
- return fmt.Errorf("multiple regions matched; use additional constraints to reduce matches to a single region")
- }
-
- region := resp.Regions[0]
-
- d.SetId(*region.RegionName)
- d.Set("region_name", region.RegionName)
- d.Set("region_endpoint", region.Endpoint)
- d.Set("request_id", resp.RequestId)
-
- return nil
-}
diff --git a/outscale/data_source_outscale_region_test.go b/outscale/data_source_outscale_region_test.go
deleted file mode 100644
index e31f0345b..000000000
--- a/outscale/data_source_outscale_region_test.go
+++ /dev/null
@@ -1,56 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "testing"
-
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
-)
-
-func TestAccDataSourceOutscaleOAPIRegion(t *testing.T) {
- t.Skip()
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- Providers: testAccProviders,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccDataSourceOutscaleOAPIRegionConfig,
- Check: resource.ComposeTestCheckFunc(
- testAccDataSourceOutscaleOAPIRegionCheck("data.outscale_region.by_name_current", "eu-west-2", "true"),
- // testAccDataSourceOutscaleOAPIRegionCheck("data.outscale_region.by_name_other", "us-west-1", "false"),
- ),
- },
- },
- })
-}
-
-func testAccDataSourceOutscaleOAPIRegionCheck(name, region, current string) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- rs, ok := s.RootModule().Resources[name]
- if !ok {
- return fmt.Errorf("root module has no resource called %s", name)
- }
-
- attr := rs.Primary.Attributes
-
- if attr["region_name"] != region {
- return fmt.Errorf("bad name %s", attr["region_name"])
- }
-
- return nil
- }
-}
-
-const testAccDataSourceOutscaleOAPIRegionConfig = `
- data "outscale_region" "by_name_current" {
- filter {
- name = "region-name"
- values = ["eu-west-2"]
- }
- }
-`
diff --git a/outscale/data_source_outscale_regions.go b/outscale/data_source_outscale_regions.go
deleted file mode 100644
index 3a1161554..000000000
--- a/outscale/data_source_outscale_regions.go
+++ /dev/null
@@ -1,112 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "log"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func dataSourceOutscaleOAPIRegions() *schema.Resource {
- return &schema.Resource{
- Read: dataSourceOutscaleOAPIRegionsRead,
-
- Schema: map[string]*schema.Schema{
- "filter": dataSourceFiltersSchema(),
- "region_name": &schema.Schema{
- Type: schema.TypeList,
- Optional: true,
- Elem: &schema.Schema{Type: schema.TypeString},
- },
- "region": &schema.Schema{
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "region_name": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "region_endpoint": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- },
- },
- },
- "request_id": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- },
- }
-}
-
-func dataSourceOutscaleOAPIRegionsRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- rtbID, rtbOk := d.GetOk("region_name")
- filter, filterOk := d.GetOk("filter")
-
- if !filterOk && !rtbOk {
- return fmt.Errorf("One of region_name or filters must be assigned")
- }
-
- req := &fcu.DescribeRegionsInput{}
-
- if rtbOk {
- var ids []*string
- for _, v := range rtbID.([]interface{}) {
- ids = append(ids, aws.String(v.(string)))
- }
- req.RegionNames = ids
- }
-
- if filterOk {
- req.Filters = buildOutscaleDataSourceFilters(filter.(*schema.Set))
- }
-
- log.Printf("[DEBUG] DescribeRegions %+v\n", req)
-
- var resp *fcu.DescribeRegionsOutput
- err := resource.Retry(60*time.Second, func() *resource.RetryError {
- var err error
- resp, err = conn.VM.DescribeRegions(req)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded") {
- return resource.RetryableError(err)
- }
- }
-
- return nil
- })
-
- if err != nil {
- return err
- }
-
- if resp == nil || len(resp.Regions) == 0 {
- return fmt.Errorf("no matching regions found")
- }
-
- d.SetId(resource.UniqueId())
-
- ri := make([]map[string]interface{}, len(resp.Regions))
-
- for k, v := range resp.Regions {
- r := make(map[string]interface{})
- r["region_endpoint"] = *v.Endpoint
- r["region_name"] = *v.RegionName
- ri[k] = r
- }
-
- d.Set("region", ri)
- d.Set("request_id", resp.RequestId)
-
- return nil
-}
diff --git a/outscale/data_source_outscale_regions_test.go b/outscale/data_source_outscale_regions_test.go
deleted file mode 100644
index 8ed8a9222..000000000
--- a/outscale/data_source_outscale_regions_test.go
+++ /dev/null
@@ -1,36 +0,0 @@
-package outscale
-
-import (
- "testing"
-
- "github.com/hashicorp/terraform/helper/resource"
-)
-
-func TestAccDataSourceOutscaleOAPIRegions(t *testing.T) {
- t.Skip()
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- Providers: testAccProviders,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccDataSourceOutscaleOAPIRegionsConfig,
- Check: resource.ComposeTestCheckFunc(
- resource.TestCheckResourceAttr("data.outscale_regions.by_name_current", "region_info.#", "1"),
- ),
- },
- },
- })
-}
-
-const testAccDataSourceOutscaleOAPIRegionsConfig = `
- data "outscale_regions" "by_name_current" {
- filter {
- name = "region-name"
- values = ["eu-west-2"]
- }
- }
-`
diff --git a/outscale/data_source_outscale_reserved_vms.go b/outscale/data_source_outscale_reserved_vms.go
deleted file mode 100644
index 0f50ba602..000000000
--- a/outscale/data_source_outscale_reserved_vms.go
+++ /dev/null
@@ -1,177 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func dataSourceOutscaleOAPIReservedVMS() *schema.Resource {
- return &schema.Resource{
- Read: dataSourceOutscaleOAPIReservedVMSRead,
-
- Schema: map[string]*schema.Schema{
- "filter": dataSourceFiltersSchema(),
- "reserved_vms_id": &schema.Schema{
- Type: schema.TypeList,
- Optional: true,
- Elem: &schema.Schema{Type: schema.TypeString},
- },
- "sub_region_name": &schema.Schema{
- Type: schema.TypeString,
- Optional: true,
- Computed: true,
- },
- "offering_type": &schema.Schema{
- Type: schema.TypeString,
- Optional: true,
- Computed: true,
- },
- "reserved_vm": &schema.Schema{
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "sub_region_name": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "currency_code": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "vm_count": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "tenancy": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "type": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "offering_type": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "product_type": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "recurring_charge": &schema.Schema{
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "frequency": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- },
- },
- },
- "reserved_vms_id": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "state": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- },
- },
- },
- "request_id": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- },
- }
-}
-
-func dataSourceOutscaleOAPIReservedVMSRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- az, azok := d.GetOk("sub_region_name")
- ot, otok := d.GetOk("offering_type")
- ri, riok := d.GetOk("reserved_vms_id")
- filter, filterOk := d.GetOk("filter")
-
- req := &fcu.DescribeReservedInstancesInput{}
-
- if azok {
- req.AvailabilityZone = aws.String(az.(string))
- }
- if otok {
- req.OfferingType = aws.String(ot.(string))
- }
- if riok {
- var ids []*string
- for _, v := range ri.([]interface{}) {
- ids = append(ids, aws.String(v.(string)))
- }
- req.ReservedInstancesIds = ids
- }
- if filterOk {
- req.Filters = buildOutscaleDataSourceFilters(filter.(*schema.Set))
- }
-
- var resp *fcu.DescribeReservedInstancesOutput
- err := resource.Retry(60*time.Second, func() *resource.RetryError {
- var err error
- resp, err = conn.VM.DescribeReservedInstances(req)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded") {
- return resource.RetryableError(err)
- }
- }
- return nil
- })
-
- if err != nil {
- return err
- }
-
- if resp == nil || len(resp.ReservedInstances) == 0 {
- return fmt.Errorf("no matching reserved VMS found")
- }
-
- d.SetId(resource.UniqueId())
-
- rsi := make([]map[string]interface{}, len(resp.ReservedInstances))
-
- for k, v := range resp.ReservedInstances {
- r := make(map[string]interface{})
- r["sub_region_name"] = *v.AvailabilityZone
- r["currency_code"] = *v.CurrencyCode
- r["vm_count"] = *v.InstanceCount
- r["tenancy"] = *v.InstanceTenancy
- r["type"] = *v.InstanceType
- r["offering_type"] = *v.OfferingType
- r["product_type"] = *v.ProductDescription
-
- rcs := make([]map[string]interface{}, len(v.RecurringCharges))
- for k1, v1 := range v.RecurringCharges {
- rc := make(map[string]interface{})
- rc["frequency"] = v1.Frequency
- rcs[k1] = rc
- }
-
- r["recurring_charge"] = rcs
- r["reserved_vms_id"] = *v.ReservedInstancesId
- r["state"] = *v.State
- rsi[k] = r
- }
-
- d.Set("reserved_vm", rsi)
- d.Set("request_id", resp.RequestId)
-
- return nil
-}
diff --git a/outscale/data_source_outscale_reserved_vms_offer.go b/outscale/data_source_outscale_reserved_vms_offer.go
deleted file mode 100644
index 8d2d4f618..000000000
--- a/outscale/data_source_outscale_reserved_vms_offer.go
+++ /dev/null
@@ -1,186 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func dataSourceOutscaleOAPIReservedVMOffer() *schema.Resource {
- return &schema.Resource{
- Read: dataSourceOutscaleOAPIReservedVMOfferRead,
-
- Schema: map[string]*schema.Schema{
- "filter": dataSourceFiltersSchema(),
- "reserved_vms_offer_id": &schema.Schema{
- Type: schema.TypeList,
- Optional: true,
- Elem: &schema.Schema{Type: schema.TypeString},
- },
- "pricing_detail": &schema.Schema{
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "vm_count": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- },
- },
- },
- "sub_region_name": &schema.Schema{
- Type: schema.TypeString,
- Optional: true,
- Computed: true,
- },
- "tenancy": &schema.Schema{
- Type: schema.TypeString,
- Optional: true,
- Computed: true,
- },
- "offering_type": &schema.Schema{
- Type: schema.TypeString,
- Optional: true,
- Computed: true,
- },
- "product_type": &schema.Schema{
- Type: schema.TypeString,
- Optional: true,
- Computed: true,
- },
- "currency_code": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "type": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "marketplace": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "recurring_charge": &schema.Schema{
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "frequency": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- },
- },
- },
- "request_id": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- },
- }
-}
-
-func dataSourceOutscaleOAPIReservedVMOfferRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- az, azok := d.GetOk("sub_region_name")
- it, itok := d.GetOk("tenancy")
- ity, ityok := d.GetOk("type")
- pd, pdok := d.GetOk("product_type")
- ot, otok := d.GetOk("offering_type")
- ri, riok := d.GetOk("reserved_vms_offer_id")
- filter, filterOk := d.GetOk("filter")
-
- req := &fcu.DescribeReservedInstancesOfferingsInput{}
-
- if azok {
- req.AvailabilityZone = aws.String(az.(string))
- }
- if otok {
- req.OfferingType = aws.String(ot.(string))
- }
- if itok {
- req.InstanceTenancy = aws.String(it.(string))
- }
- if ityok {
- req.InstanceTenancy = aws.String(ity.(string))
- }
- if pdok {
- req.InstanceTenancy = aws.String(pd.(string))
- }
- if riok {
- var ids []*string
- for _, v := range ri.([]interface{}) {
- ids = append(ids, aws.String(v.(string)))
- }
- req.ReservedInstancesOfferingIds = ids
- }
- if filterOk {
- req.Filters = buildOutscaleDataSourceFilters(filter.(*schema.Set))
- }
-
- var resp *fcu.DescribeReservedInstancesOfferingsOutput
- err := resource.Retry(60*time.Second, func() *resource.RetryError {
- var err error
- resp, err = conn.VM.DescribeReservedInstancesOfferings(req)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded") {
- return resource.RetryableError(err)
- }
- }
- return nil
- })
-
- if err != nil {
- return err
- }
-
- if resp == nil || len(resp.ReservedInstancesOfferingsSet) == 0 {
- return fmt.Errorf("no matching reserved VMS Offer found")
- }
-
- if len(resp.ReservedInstancesOfferingsSet) > 1 {
- return fmt.Errorf("multiple VM Offer matched; use additional constraints to reduce matches to a single VM Offer")
- }
-
- d.SetId(resource.UniqueId())
-
- v := resp.ReservedInstancesOfferingsSet[0]
-
- d.Set("sub_region_name", v.AvailabilityZone)
- d.Set("currency_code", v.CurrencyCode)
- d.Set("tenancy", v.InstanceTenancy)
- d.Set("type", v.InstanceType)
- d.Set("marketplace", v.Martketplace)
- d.Set("offering_type", v.OfferingType)
- d.Set("product_type", v.ProductDescription)
- d.Set("reserved_vms_offer_id", v.ReservedInstancesOfferingId)
-
- rcs := make([]map[string]interface{}, len(v.RecurringCharges))
- for k1, v1 := range v.RecurringCharges {
- rc := make(map[string]interface{})
- rc["frequency"] = v1.Frequency
- rcs[k1] = rc
- }
-
- d.Set("recurring_charge", rcs)
-
- pds := make([]map[string]interface{}, len(v.PricingDetailsSet))
- for k1, v1 := range v.PricingDetailsSet {
- rc := make(map[string]interface{})
- rc["vm_count"] = v1.Count
- rcs[k1] = rc
- }
-
- d.Set("pricing_detail", pds)
-
- d.Set("request_id", resp.RequestId)
-
- return nil
-}
diff --git a/outscale/data_source_outscale_reserved_vms_offer_test.go b/outscale/data_source_outscale_reserved_vms_offer_test.go
deleted file mode 100644
index 6127d338a..000000000
--- a/outscale/data_source_outscale_reserved_vms_offer_test.go
+++ /dev/null
@@ -1,31 +0,0 @@
-package outscale
-
-import (
- "testing"
-
- "github.com/hashicorp/terraform/helper/resource"
-)
-
-func TestAccDataSourceOutscaleOAPIReservedVMSOffer(t *testing.T) {
- t.Skip()
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- Providers: testAccProviders,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccDataSourceOutscaleOAPIReservedVMSOfferConfig,
- Check: resource.ComposeTestCheckFunc(
- resource.TestCheckResourceAttrSet("data.outscale_reserved_vms_offer.test", "reserved_instances_offering_id"),
- ),
- },
- },
- })
-}
-
-const testAccDataSourceOutscaleOAPIReservedVMSOfferConfig = `
- data "outscale_reserved_vms_offer" "test" {}
-`
diff --git a/outscale/data_source_outscale_reserved_vms_offers.go b/outscale/data_source_outscale_reserved_vms_offers.go
deleted file mode 100644
index 33cb819cd..000000000
--- a/outscale/data_source_outscale_reserved_vms_offers.go
+++ /dev/null
@@ -1,183 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
- "github.com/terraform-providers/terraform-provider-outscale/utils"
-)
-
-func dataSourceOutscaleOAPIReservedVMOffers() *schema.Resource {
- return &schema.Resource{
- Read: dataSourceOutscaleOAPIReservedVMOffersRead,
-
- Schema: map[string]*schema.Schema{
- "filter": dataSourceFiltersSchema(),
- "sub_region_name": &schema.Schema{
- Type: schema.TypeString,
- Optional: true,
- },
- "tenancy": &schema.Schema{
- Type: schema.TypeString,
- Optional: true,
- },
- "offering_type": &schema.Schema{
- Type: schema.TypeString,
- Optional: true,
- },
- "product_type": &schema.Schema{
- Type: schema.TypeString,
- Optional: true,
- },
- "reserved_vms_offering_id": &schema.Schema{
- Type: schema.TypeList,
- Optional: true,
- Elem: &schema.Schema{Type: schema.TypeString},
- },
- "reserved_vms_offering": &schema.Schema{
- Type: schema.TypeList,
- Optional: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "currency_code": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "type": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "marketplace": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "recurring_charge": &schema.Schema{
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "frequency": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- },
- },
- },
- },
- },
- },
- "request_id": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- },
- }
-}
-
-func dataSourceOutscaleOAPIReservedVMOffersRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- az, azok := d.GetOk("sub_region_name")
- it, itok := d.GetOk("tenancy")
- ity, ityok := d.GetOk("type")
- pd, pdok := d.GetOk("product_type")
- ot, otok := d.GetOk("offering_type")
- ri, riok := d.GetOk("reserved_vms_offering_id")
- filter, filterOk := d.GetOk("filter")
-
- req := &fcu.DescribeReservedInstancesOfferingsInput{}
-
- if azok {
- req.AvailabilityZone = aws.String(az.(string))
- }
- if otok {
- req.OfferingType = aws.String(ot.(string))
- }
- if itok {
- req.InstanceTenancy = aws.String(it.(string))
- }
- if ityok {
- req.InstanceTenancy = aws.String(ity.(string))
- }
- if pdok {
- req.InstanceTenancy = aws.String(pd.(string))
- }
- if riok {
- var ids []*string
- for _, v := range ri.([]interface{}) {
- ids = append(ids, aws.String(v.(string)))
- }
- req.ReservedInstancesOfferingIds = ids
- }
- if filterOk {
- req.Filters = buildOutscaleDataSourceFilters(filter.(*schema.Set))
- }
-
- var resp *fcu.DescribeReservedInstancesOfferingsOutput
- err := resource.Retry(60*time.Second, func() *resource.RetryError {
- var err error
- resp, err = conn.VM.DescribeReservedInstancesOfferings(req)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded") {
- return resource.RetryableError(err)
- }
- }
- return nil
- })
-
- if err != nil {
- return err
- }
-
- if resp == nil || len(resp.ReservedInstancesOfferingsSet) == 0 {
- return fmt.Errorf("no matching reserved VMS Offer found")
- }
-
- utils.PrintToJSON(resp, "OFFERS")
-
- d.SetId(resource.UniqueId())
-
- rsi := make([]map[string]interface{}, len(resp.ReservedInstancesOfferingsSet))
-
- for k, v := range resp.ReservedInstancesOfferingsSet {
- r := make(map[string]interface{})
- r["sub_region_name"] = *v.AvailabilityZone
- r["currency_code"] = *v.CurrencyCode
- r["tenancy"] = *v.InstanceTenancy
- r["type"] = *v.InstanceType
- r["marketplace"] = *v.Martketplace
- r["offering_type"] = *v.OfferingType
- r["product_type"] = *v.ProductDescription
- r["reserved_vms_offering_id"] = *v.ReservedInstancesOfferingId
-
- rcs := make([]map[string]interface{}, len(v.RecurringCharges))
- for k1, v1 := range v.RecurringCharges {
- rc := make(map[string]interface{})
- rc["frequency"] = v1.Frequency
- rcs[k1] = rc
- }
-
- r["recurring_charge"] = rcs
-
- pds := make([]map[string]interface{}, len(v.PricingDetailsSet))
- for k1, v1 := range v.PricingDetailsSet {
- rc := make(map[string]interface{})
- rc["count"] = v1.Count
- rcs[k1] = rc
- }
-
- r["pricing_details_set"] = pds
-
- rsi[k] = r
- }
-
- d.Set("reserved_vms_offering", rsi)
- d.Set("request_id", resp.RequestId)
-
- return nil
-}
diff --git a/outscale/data_source_outscale_reserved_vms_offers_test.go b/outscale/data_source_outscale_reserved_vms_offers_test.go
deleted file mode 100644
index 34864701a..000000000
--- a/outscale/data_source_outscale_reserved_vms_offers_test.go
+++ /dev/null
@@ -1,31 +0,0 @@
-package outscale
-
-import (
- "testing"
-
- "github.com/hashicorp/terraform/helper/resource"
-)
-
-func TestAccDataSourceOutscaleOAPIReservedVMSOffers(t *testing.T) {
- t.Skip()
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- Providers: testAccProviders,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccDataSourceOutscaleOAPIReservedVMSOffersConfig,
- Check: resource.ComposeTestCheckFunc(
- resource.TestCheckResourceAttrSet("data.outscale_reserved_vms_offers.test", "reserved_instances_offerings_set"),
- ),
- },
- },
- })
-}
-
-const testAccDataSourceOutscaleOAPIReservedVMSOffersConfig = `
-data "outscale_reserved_vms_offers" "test" {}
-`
diff --git a/outscale/data_source_outscale_reserved_vms_test.go b/outscale/data_source_outscale_reserved_vms_test.go
deleted file mode 100644
index 9392249e8..000000000
--- a/outscale/data_source_outscale_reserved_vms_test.go
+++ /dev/null
@@ -1,31 +0,0 @@
-package outscale
-
-import (
- "testing"
-
- "github.com/hashicorp/terraform/helper/resource"
-)
-
-func TestAccDataSourceOutscaleOAPIReservedVMS(t *testing.T) {
- t.Skip()
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- Providers: testAccProviders,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccDataSourceOutscaleOAPIReservedVMSConfig,
- Check: resource.ComposeTestCheckFunc(
- resource.TestCheckResourceAttrSet("data.outscale_reserved_vms.test", "reserved_instances_set"),
- ),
- },
- },
- })
-}
-
-const testAccDataSourceOutscaleOAPIReservedVMSConfig = `
-data "outscale_reserved_vms" "test" {}
-`
diff --git a/outscale/data_source_outscale_route_table.go b/outscale/data_source_outscale_route_table.go
index 8d935da95..5d0c45d80 100644
--- a/outscale/data_source_outscale_route_table.go
+++ b/outscale/data_source_outscale_route_table.go
@@ -9,8 +9,8 @@ import (
"strings"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func dataSourceOutscaleOAPIRouteTable() *schema.Resource {
diff --git a/outscale/data_source_outscale_route_table_test.go b/outscale/data_source_outscale_route_table_test.go
index ed2df072c..f7554d989 100644
--- a/outscale/data_source_outscale_route_table_test.go
+++ b/outscale/data_source_outscale_route_table_test.go
@@ -4,16 +4,13 @@ import (
"fmt"
"testing"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccDataSourceOutscaleOAPIRouteTable_basic(t *testing.T) {
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
{
diff --git a/outscale/data_source_outscale_route_tables.go b/outscale/data_source_outscale_route_tables.go
index 7d8b599db..e95c3e0b4 100644
--- a/outscale/data_source_outscale_route_tables.go
+++ b/outscale/data_source_outscale_route_tables.go
@@ -8,8 +8,8 @@ import (
"strings"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func dataSourceOutscaleOAPIRouteTables() *schema.Resource {
diff --git a/outscale/data_source_outscale_route_tables_test.go b/outscale/data_source_outscale_route_tables_test.go
index d50b70abe..ba088e03c 100644
--- a/outscale/data_source_outscale_route_tables_test.go
+++ b/outscale/data_source_outscale_route_tables_test.go
@@ -3,15 +3,12 @@ package outscale
import (
"testing"
- "github.com/hashicorp/terraform/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
)
func TestAccDataSourceOutscaleOAPIRouteTables_basic(t *testing.T) {
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
{
diff --git a/outscale/data_source_outscale_security_group.go b/outscale/data_source_outscale_security_group.go
index b630604b5..da60026af 100644
--- a/outscale/data_source_outscale_security_group.go
+++ b/outscale/data_source_outscale_security_group.go
@@ -9,8 +9,8 @@ import (
"strings"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func dataSourceOutscaleOAPISecurityGroup() *schema.Resource {
diff --git a/outscale/data_source_outscale_security_group_test.go b/outscale/data_source_outscale_security_group_test.go
index fb8fa0699..8701538b3 100644
--- a/outscale/data_source_outscale_security_group_test.go
+++ b/outscale/data_source_outscale_security_group_test.go
@@ -4,18 +4,15 @@ import (
"fmt"
"testing"
- "github.com/hashicorp/terraform/helper/acctest"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/acctest"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccDataSourceOutscaleOAPISecurityGroup_basic(t *testing.T) {
rInt := acctest.RandInt()
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
{
@@ -32,10 +29,7 @@ func TestAccDataSourceOutscaleOAPISecurityGroup_basic(t *testing.T) {
func TestAccDataSourceOutscaleOAPISecurityGroupPublic(t *testing.T) {
rInt := acctest.RandInt()
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
{
diff --git a/outscale/data_source_outscale_security_groups.go b/outscale/data_source_outscale_security_groups.go
index 70ee69e91..d8c8621eb 100644
--- a/outscale/data_source_outscale_security_groups.go
+++ b/outscale/data_source_outscale_security_groups.go
@@ -9,8 +9,8 @@ import (
"strings"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
"github.com/terraform-providers/terraform-provider-outscale/utils"
)
diff --git a/outscale/data_source_outscale_security_groups_test.go b/outscale/data_source_outscale_security_groups_test.go
index 7aacd10fa..0d5f911bb 100644
--- a/outscale/data_source_outscale_security_groups_test.go
+++ b/outscale/data_source_outscale_security_groups_test.go
@@ -4,17 +4,14 @@ import (
"fmt"
"testing"
- "github.com/hashicorp/terraform/helper/acctest"
- "github.com/hashicorp/terraform/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/acctest"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
)
func TestAccDataSourceOutscaleOAPISecurityGroups_vpc(t *testing.T) {
rInt := acctest.RandInt()
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
{
diff --git a/outscale/data_source_outscale_snapshot.go b/outscale/data_source_outscale_snapshot.go
index 11552455b..a791ddd7e 100644
--- a/outscale/data_source_outscale_snapshot.go
+++ b/outscale/data_source_outscale_snapshot.go
@@ -3,17 +3,18 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"log"
"strconv"
"strings"
"time"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
"github.com/terraform-providers/terraform-provider-outscale/utils"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func dataSourceOutscaleOAPISnapshot() *schema.Resource {
@@ -93,7 +94,7 @@ func dataSourceOutscaleOAPISnapshotRead(d *schema.ResourceData, meta interface{}
snapshotIds, snapshotIdsOk := d.GetOk("snapshot_id")
owners, ownersOk := d.GetOk("account_id")
- if restorableUsers == false && filtersOk == false && snapshotIds == false && ownersOk == false {
+ if restorableUsers == false && !filtersOk && snapshotIds == false && !ownersOk {
return fmt.Errorf("One of snapshot_ids, filters, restorable_by_user_ids, or owners must be assigned")
}
diff --git a/outscale/data_source_outscale_snapshot_test.go b/outscale/data_source_outscale_snapshot_test.go
index 4eb31d5b1..afcfec430 100644
--- a/outscale/data_source_outscale_snapshot_test.go
+++ b/outscale/data_source_outscale_snapshot_test.go
@@ -5,18 +5,15 @@ import (
"os"
"testing"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccOutscaleOAPISnapshotDataSource_basic(t *testing.T) {
region := os.Getenv("OUTSCALE_REGION")
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
{
@@ -34,10 +31,7 @@ func TestAccOutscaleOAPISnapshotDataSource_multipleFilters(t *testing.T) {
region := os.Getenv("OUTSCALE_REGION")
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
{
diff --git a/outscale/data_source_outscale_snapshots.go b/outscale/data_source_outscale_snapshots.go
index fd99f3dc0..056ff4968 100644
--- a/outscale/data_source_outscale_snapshots.go
+++ b/outscale/data_source_outscale_snapshots.go
@@ -3,13 +3,14 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"strings"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func dataSourceOutscaleOAPISnapshots() *schema.Resource {
@@ -109,7 +110,7 @@ func dataSourceOutscaleOAPISnapshotsRead(d *schema.ResourceData, meta interface{
snapshotIds, snapshotIdsOk := d.GetOk("snapshot_id")
owners, ownersOk := d.GetOk("account_id")
- if restorableUsers == false && filtersOk == false && snapshotIds == false && ownersOk == false {
+ if restorableUsers == false && !filtersOk && snapshotIds == false && !ownersOk {
return fmt.Errorf("One of snapshot_ids, filters, restorable_by_user_ids, or owners must be assigned")
}
diff --git a/outscale/data_source_outscale_snapshots_test.go b/outscale/data_source_outscale_snapshots_test.go
index 602b5cd6d..43c968107 100644
--- a/outscale/data_source_outscale_snapshots_test.go
+++ b/outscale/data_source_outscale_snapshots_test.go
@@ -3,15 +3,12 @@ package outscale
import (
"testing"
- "github.com/hashicorp/terraform/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
)
func TestAccOutscaleOAPISnapshotsDataSource_basic(t *testing.T) {
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
{
diff --git a/outscale/data_source_outscale_sub_region.go b/outscale/data_source_outscale_sub_region.go
deleted file mode 100644
index 6029c940e..000000000
--- a/outscale/data_source_outscale_sub_region.go
+++ /dev/null
@@ -1,96 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "strings"
-
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func dataSourceOutscaleOAPIAvailabilityZone() *schema.Resource {
- return &schema.Resource{
- Read: dataSourceOutscaleOAPIAvailabilityZoneRead,
-
- Schema: map[string]*schema.Schema{
- "filter": dataSourceFiltersSchema(),
- "sub_region_name": &schema.Schema{
- Type: schema.TypeString,
- Optional: true,
- Computed: true,
- },
-
- "region_name": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
-
- "request_id": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
-
- "state": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- },
- }
-}
-
-func dataSourceOutscaleOAPIAvailabilityZoneRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- filters, filtersOk := d.GetOk("filter")
- zone, zoneOk := d.GetOk("sub_region_name")
-
- if !filtersOk && !zoneOk {
- return fmt.Errorf("One of sub_region_name or filters must be assigned")
- }
-
- req := &fcu.DescribeAvailabilityZonesInput{}
-
- if zoneOk {
- req.ZoneNames = []*string{aws.String(zone.(string))}
- }
-
- if filtersOk {
- req.Filters = buildOutscaleDataSourceFilters(filters.(*schema.Set))
- }
-
- var resp *fcu.DescribeAvailabilityZonesOutput
- var err error
- err = resource.Retry(60*time.Second, func() *resource.RetryError {
- resp, err = conn.VM.DescribeAvailabilityZones(req)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded") {
- return resource.RetryableError(err)
- }
- }
-
- return resource.NonRetryableError(err)
- })
- if err != nil {
- return err
- }
- if resp == nil || len(resp.AvailabilityZones) == 0 {
- return fmt.Errorf("no matching AZ found")
- }
- if len(resp.AvailabilityZones) > 1 {
- return fmt.Errorf("multiple AZs matched; use additional constraints to reduce matches to a single AZ")
- }
-
- az := resp.AvailabilityZones[0]
-
- d.SetId(*az.ZoneName)
- d.Set("sub_region_name", az.ZoneName)
- d.Set("region_name", az.RegionName)
- d.Set("state", az.State)
- d.Set("request_id", resp.RequestId)
-
- return nil
-}
diff --git a/outscale/data_source_outscale_sub_region_test.go b/outscale/data_source_outscale_sub_region_test.go
deleted file mode 100644
index 7871897dc..000000000
--- a/outscale/data_source_outscale_sub_region_test.go
+++ /dev/null
@@ -1,55 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "testing"
-
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
-)
-
-func TestAccDataSourceOutscaleOAPIAvailabilityZone(t *testing.T) {
- t.Skip()
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- Providers: testAccProviders,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccDataSourceOutscaleOAPIAvailabilityZoneConfig,
- Check: resource.ComposeTestCheckFunc(
- testAccDataSourceOutscaleOAPIAvailabilityZoneCheck("data.outscale_sub_region.by_name"),
- ),
- },
- },
- })
-}
-
-func testAccDataSourceOutscaleOAPIAvailabilityZoneCheck(name string) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- rs, ok := s.RootModule().Resources[name]
- if !ok {
- return fmt.Errorf("root module has no resource called %s", name)
- }
-
- attr := rs.Primary.Attributes
-
- if attr["sub_region_name"] != "eu-west-2a" {
- return fmt.Errorf("bad name %s", attr["sub_region_name"])
- }
- if attr["region_name"] != "eu-west-2" {
- return fmt.Errorf("bad region %s", attr["region_name"])
- }
-
- return nil
- }
-}
-
-const testAccDataSourceOutscaleOAPIAvailabilityZoneConfig = `
-data "outscale_sub_region" "by_name" {
- sub_region_name = "eu-west-2a"
-}
-`
diff --git a/outscale/data_source_outscale_subnet.go b/outscale/data_source_outscale_subnet.go
index bb6530564..f46a5e020 100644
--- a/outscale/data_source_outscale_subnet.go
+++ b/outscale/data_source_outscale_subnet.go
@@ -3,14 +3,15 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"log"
"strings"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func dataSourceOutscaleOAPISubnet() *schema.Resource {
@@ -75,8 +76,8 @@ func dataSourceOutscaleOAPISubnetRead(d *schema.ResourceData, meta interface{})
}
var resp oscgo.ReadSubnetsResponse
- var err error
- err = resource.Retry(120*time.Second, func() *resource.RetryError {
+
+ err := resource.Retry(120*time.Second, func() *resource.RetryError {
r, _, err := conn.SubnetApi.ReadSubnets(context.Background(), &oscgo.ReadSubnetsOpts{ReadSubnetsRequest: optional.NewInterface(req)})
if err != nil {
diff --git a/outscale/data_source_outscale_subnet_test.go b/outscale/data_source_outscale_subnet_test.go
index 8be7590b6..3b6e690a6 100644
--- a/outscale/data_source_outscale_subnet_test.go
+++ b/outscale/data_source_outscale_subnet_test.go
@@ -4,17 +4,14 @@ import (
"fmt"
"testing"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccDataSourceOutscaleOAPISubnet(t *testing.T) {
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
{
diff --git a/outscale/data_source_outscale_subnets.go b/outscale/data_source_outscale_subnets.go
index 62599f5b7..49a168004 100644
--- a/outscale/data_source_outscale_subnets.go
+++ b/outscale/data_source_outscale_subnets.go
@@ -8,8 +8,8 @@ import (
"strings"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func dataSourceOutscaleOAPISubnets() *schema.Resource {
diff --git a/outscale/data_source_outscale_subnets_test.go b/outscale/data_source_outscale_subnets_test.go
index efb97076f..62051caf6 100644
--- a/outscale/data_source_outscale_subnets_test.go
+++ b/outscale/data_source_outscale_subnets_test.go
@@ -4,8 +4,8 @@ import (
"fmt"
"testing"
- "github.com/hashicorp/terraform/helper/acctest"
- "github.com/hashicorp/terraform/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/acctest"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
)
func TestAccDataSourceOutscaleOAPISubnets(t *testing.T) {
@@ -13,10 +13,7 @@ func TestAccDataSourceOutscaleOAPISubnets(t *testing.T) {
rInt := acctest.RandIntRange(0, 256)
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
{
diff --git a/outscale/data_source_outscale_tag.go b/outscale/data_source_outscale_tag.go
index d614c032e..46874f32c 100644
--- a/outscale/data_source_outscale_tag.go
+++ b/outscale/data_source_outscale_tag.go
@@ -10,8 +10,8 @@ import (
"github.com/antihax/optional"
oscgo "github.com/marinsalinas/osc-sdk-go"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func dataSourceOutscaleOAPITag() *schema.Resource {
diff --git a/outscale/data_source_outscale_tag_test.go b/outscale/data_source_outscale_tag_test.go
index 354da986e..b27acb525 100644
--- a/outscale/data_source_outscale_tag_test.go
+++ b/outscale/data_source_outscale_tag_test.go
@@ -2,20 +2,17 @@ package outscale
import (
"fmt"
+ "os"
"testing"
- "github.com/hashicorp/terraform/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
)
func TestAccOutscaleOAPITagDataSource(t *testing.T) {
- //t.Skip()
- omi := getOMIByRegion("eu-west-2", "ubuntu").OMI
+ omi := os.Getenv("OUTSCALE_IMAGEID")
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
{
diff --git a/outscale/data_source_outscale_tags.go b/outscale/data_source_outscale_tags.go
index 4627c3cb7..4230a01cf 100644
--- a/outscale/data_source_outscale_tags.go
+++ b/outscale/data_source_outscale_tags.go
@@ -7,8 +7,8 @@ import (
"strings"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func dataSourceOutscaleOAPITags() *schema.Resource {
diff --git a/outscale/data_source_outscale_tags_test.go b/outscale/data_source_outscale_tags_test.go
index c79431f96..559d63855 100644
--- a/outscale/data_source_outscale_tags_test.go
+++ b/outscale/data_source_outscale_tags_test.go
@@ -3,15 +3,12 @@ package outscale
import (
"testing"
- "github.com/hashicorp/terraform/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
)
func TestAccOutscaleOAPITagsDataSource_basic(t *testing.T) {
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
{
diff --git a/outscale/data_source_outscale_vm.go b/outscale/data_source_outscale_vm.go
index b3a64ec04..cfaa25f38 100644
--- a/outscale/data_source_outscale_vm.go
+++ b/outscale/data_source_outscale_vm.go
@@ -5,34 +5,16 @@ import (
"errors"
"fmt"
"log"
- "strconv"
"strings"
"time"
"github.com/antihax/optional"
"github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
oscgo "github.com/marinsalinas/osc-sdk-go"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
)
-func buildOutscaleDataSourceFilters(set *schema.Set) []*fcu.Filter {
- var filters []*fcu.Filter
- for _, v := range set.List() {
- m := v.(map[string]interface{})
- var filterValues []*string
- for _, e := range m["values"].([]interface{}) {
- filterValues = append(filterValues, aws.String(e.(string)))
- }
- filters = append(filters, &fcu.Filter{
- Name: aws.String(m["name"].(string)),
- Values: filterValues,
- })
- }
- return filters
-}
-
func dataSourceOutscaleOAPIVM() *schema.Resource {
return &schema.Resource{
Read: dataSourceOutscaleOAPIVMRead,
@@ -45,7 +27,7 @@ func dataSourceOutscaleOAPIVMRead(d *schema.ResourceData, meta interface{}) erro
filters, filtersOk := d.GetOk("filter")
instanceID, instanceIDOk := d.GetOk("vm_id")
- if filtersOk == false && instanceIDOk == false {
+ if !filtersOk && !instanceIDOk {
return fmt.Errorf("One of filters, or instance_id must be assigned")
}
@@ -135,10 +117,11 @@ func oapiVMDescriptionAttributes(set AttributeSetter, vm *oscgo.Vm) error {
set("net_id", vm.GetNetId())
if err := set("nics", getOAPIVMNetworkInterfaceLightSet(vm.GetNics())); err != nil {
- log.Printf("[DEBUG] NICS ERR %+v", err)
return err
}
+
set("os_family", vm.GetOsFamily())
+ set("performance", vm.GetPerformance())
set("placement_subregion_name", aws.StringValue(vm.GetPlacement().SubregionName))
set("placement_tenancy", aws.StringValue(vm.GetPlacement().Tenancy))
set("private_dns_name", vm.GetPrivateDnsName())
@@ -238,18 +221,6 @@ func buildOutscaleOAPIDataSourceVMFilters(set *schema.Set) *oscgo.FiltersVm {
return filters
}
-func sliceAtoi(sa []string) ([]int64, error) {
- si := make([]int64, 0, len(sa))
- for _, a := range sa {
- i, err := strconv.Atoi(a)
- if err != nil {
- return si, err
- }
- si = append(si, int64(i))
- }
- return si, nil
-}
-
func getOApiVMAttributesSchema() map[string]*schema.Schema {
return map[string]*schema.Schema{
// Attributes
@@ -444,8 +415,9 @@ func getOApiVMAttributesSchema() map[string]*schema.Schema {
Optional: true,
},
"link_nic": {
- Type: schema.TypeMap,
+ Type: schema.TypeList,
Computed: true,
+ MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"delete_on_vm_deletion": {
@@ -533,6 +505,10 @@ func getOApiVMAttributesSchema() map[string]*schema.Schema {
Type: schema.TypeString,
Computed: true,
},
+ "performance": {
+ Type: schema.TypeString,
+ Computed: true,
+ },
"placement_subregion_name": {
Type: schema.TypeString,
Optional: true,
diff --git a/outscale/data_source_outscale_vm_state.go b/outscale/data_source_outscale_vm_state.go
index 92e0ab568..ce65a2f58 100644
--- a/outscale/data_source_outscale_vm_state.go
+++ b/outscale/data_source_outscale_vm_state.go
@@ -10,8 +10,8 @@ import (
"strings"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func dataSourceOutscaleOAPIVMState() *schema.Resource {
diff --git a/outscale/data_source_outscale_vm_state_test.go b/outscale/data_source_outscale_vm_state_test.go
index 704de42a7..78687d766 100644
--- a/outscale/data_source_outscale_vm_state_test.go
+++ b/outscale/data_source_outscale_vm_state_test.go
@@ -2,20 +2,18 @@ package outscale
import (
"fmt"
+ "os"
"testing"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccDataSourceOutscaleOAPIVmState(t *testing.T) {
- omi := getOMIByRegion("eu-west-2", "ubuntu").OMI
+ omi := os.Getenv("OUTSCALE_IMAGEID")
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
resource.TestStep{
diff --git a/outscale/data_source_outscale_vm_test.go b/outscale/data_source_outscale_vm_test.go
index f78235ace..7a83a624d 100644
--- a/outscale/data_source_outscale_vm_test.go
+++ b/outscale/data_source_outscale_vm_test.go
@@ -2,18 +2,16 @@ package outscale
import (
"fmt"
+ "os"
"testing"
- "github.com/hashicorp/terraform/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
)
func TestAccOutscaleOAPIVMDataSource_basic(t *testing.T) {
- omi := getOMIByRegion("eu-west-2", "ubuntu").OMI
+ omi := os.Getenv("OUTSCALE_IMAGEID")
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
{
diff --git a/outscale/data_source_outscale_vm_type.go b/outscale/data_source_outscale_vm_type.go
deleted file mode 100644
index ddef3a2ae..000000000
--- a/outscale/data_source_outscale_vm_type.go
+++ /dev/null
@@ -1,107 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "strings"
- "time"
-
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func dataSourceOutscaleOAPIVMType() *schema.Resource {
- return &schema.Resource{
- Read: dataSourceOutscaleOAPIVMTypeRead,
-
- Schema: map[string]*schema.Schema{
- "filter": dataSourceFiltersSchema(),
- "bsu_optimized": &schema.Schema{
- Type: schema.TypeBool,
- Computed: true,
- },
- "max_private_ip": &schema.Schema{
- Type: schema.TypeInt,
- Computed: true,
- },
- "memory_size": &schema.Schema{
- Type: schema.TypeInt,
- Computed: true,
- },
- "name": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "storage_count": &schema.Schema{
- Type: schema.TypeInt,
- Computed: true,
- },
- "storage_size": &schema.Schema{
- Type: schema.TypeInt,
- Computed: true,
- },
- "vcore_count": &schema.Schema{
- Type: schema.TypeInt,
- Computed: true,
- },
- "request_id": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- },
- }
-}
-
-func dataSourceOutscaleOAPIVMTypeRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- filter, filterOk := d.GetOk("filter")
-
- req := &fcu.DescribeInstanceTypesInput{}
-
- if filterOk {
- req.Filters = buildOutscaleDataSourceFilters(filter.(*schema.Set))
- }
-
- var resp *fcu.DescribeInstanceTypesOutput
- err := resource.Retry(60*time.Second, func() *resource.RetryError {
- var err error
- resp, err = conn.VM.DescribeInstanceTypes(req)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded") {
- return resource.RetryableError(err)
- }
- }
-
- return nil
- })
-
- if err != nil {
- return err
- }
-
- if resp == nil || len(resp.InstanceTypeSet) == 0 {
- return fmt.Errorf("no matching regions found")
- }
- if len(resp.InstanceTypeSet) > 1 {
- return fmt.Errorf("multiple vm types matched; use additional constraints to reduce matches to a single vm type")
- }
-
- vm := resp.InstanceTypeSet[0]
-
- d.SetId(*vm.Name)
- d.Set("bsu_optimized", *vm.EbsOptimizedAvailable)
- d.Set("max_private_ip", *vm.MaxIpAddresses)
- d.Set("memory_size", *vm.Memory)
- d.Set("name", *vm.Name)
- d.Set("storage_count", *vm.StorageCount)
- if vm.StorageSize != nil {
- d.Set("storage_size", *vm.StorageSize)
- } else {
- d.Set("storage_size", 0)
- }
- d.Set("vcore_count", *vm.Vcpu)
- d.Set("request_id", resp.RequestId)
-
- return nil
-}
diff --git a/outscale/data_source_outscale_vm_type_test.go b/outscale/data_source_outscale_vm_type_test.go
deleted file mode 100644
index db2fd43b7..000000000
--- a/outscale/data_source_outscale_vm_type_test.go
+++ /dev/null
@@ -1,43 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "testing"
-
- "github.com/hashicorp/terraform/helper/acctest"
- "github.com/hashicorp/terraform/helper/resource"
-)
-
-func TestAccDataSourceOutscaleOAPIVMType_basic(t *testing.T) {
- t.Skip()
-
- rInt := acctest.RandInt()
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- Providers: testAccProviders,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccDataSourceOutscaleOAPIVMType(rInt),
- Check: resource.ComposeTestCheckFunc(
- resource.TestCheckResourceAttr(
- "data.outscale_vm_type.test_by_id", "name", "t2.micro"),
- ),
- },
- },
- })
-}
-
-func testAccDataSourceOutscaleOAPIVMType(rInt int) string {
- return fmt.Sprintf(`
- data "outscale_vm_type" "test_by_id" {
- filter {
- name = "name"
- values = ["t2.micro"]
- }
- }
- `)
-}
diff --git a/outscale/data_source_outscale_vm_types.go b/outscale/data_source_outscale_vm_types.go
deleted file mode 100644
index 0edef3d22..000000000
--- a/outscale/data_source_outscale_vm_types.go
+++ /dev/null
@@ -1,123 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "log"
- "strings"
- "time"
-
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func dataSourceOutscaleOAPIVMTypes() *schema.Resource {
- return &schema.Resource{
- Read: dataSourceOutscaleOAPIVMTypesRead,
-
- Schema: map[string]*schema.Schema{
- "filter": dataSourceFiltersSchema(),
- "type": &schema.Schema{
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "bsu_optimized": &schema.Schema{
- Type: schema.TypeBool,
- Computed: true,
- },
- "max_private_ip": &schema.Schema{
- Type: schema.TypeInt,
- Computed: true,
- },
- "memory_size": &schema.Schema{
- Type: schema.TypeInt,
- Computed: true,
- },
- "name": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "storage_count": &schema.Schema{
- Type: schema.TypeInt,
- Computed: true,
- },
- "storage_size": &schema.Schema{
- Type: schema.TypeInt,
- Computed: true,
- },
- "vcore_count": &schema.Schema{
- Type: schema.TypeInt,
- Computed: true,
- },
- },
- },
- },
- "request_id": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- },
- }
-}
-
-func dataSourceOutscaleOAPIVMTypesRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- filter, filterOk := d.GetOk("filter")
-
- req := &fcu.DescribeInstanceTypesInput{}
-
- if filterOk {
- req.Filters = buildOutscaleDataSourceFilters(filter.(*schema.Set))
- }
-
- log.Printf("[DEBUG] DescribeVMTypes %+v\n", req)
-
- var resp *fcu.DescribeInstanceTypesOutput
- err := resource.Retry(60*time.Second, func() *resource.RetryError {
- var err error
- resp, err = conn.VM.DescribeInstanceTypes(req)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded") {
- return resource.RetryableError(err)
- }
- }
-
- return nil
- })
-
- if err != nil {
- return err
- }
-
- if resp == nil || len(resp.InstanceTypeSet) == 0 {
- return fmt.Errorf("no matching regions found")
- }
-
- vms := make([]map[string]interface{}, len(resp.InstanceTypeSet))
-
- for k, v := range resp.InstanceTypeSet {
- vm := make(map[string]interface{})
- vm["bsu_optimized"] = *v.EbsOptimizedAvailable
- vm["max_private_ip"] = *v.MaxIpAddresses
- vm["memory_size"] = *v.Memory
- vm["name"] = *v.Name
- vm["storage_count"] = *v.StorageCount
- if v.StorageSize != nil {
- vm["storage_size"] = *v.StorageSize
- } else {
- vm["storage_size"] = 0
- }
- vm["vcore_count"] = *v.Vcpu
- vms[k] = vm
- }
-
- if err := d.Set("type", vms); err != nil {
- return err
- }
- d.SetId(resource.UniqueId())
- d.Set("request_id", resp.RequestId)
-
- return nil
-}
diff --git a/outscale/data_source_outscale_vm_types_test.go b/outscale/data_source_outscale_vm_types_test.go
deleted file mode 100644
index 7d819cdc2..000000000
--- a/outscale/data_source_outscale_vm_types_test.go
+++ /dev/null
@@ -1,43 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "testing"
-
- "github.com/hashicorp/terraform/helper/acctest"
- "github.com/hashicorp/terraform/helper/resource"
-)
-
-func TestAccDataSourceOutscaleOAPIVMTypes_basic(t *testing.T) {
- t.Skip()
-
- rInt := acctest.RandInt()
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- Providers: testAccProviders,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccDataSourceOutscaleOAPIVMTypes(rInt),
- Check: resource.ComposeTestCheckFunc(
- resource.TestCheckResourceAttr(
- "data.outscale_vm_types.test_by_id", "instance_type_set.#", "1"),
- ),
- },
- },
- })
-}
-
-func testAccDataSourceOutscaleOAPIVMTypes(rInt int) string {
- return fmt.Sprintf(`
- data "outscale_vm_types" "test_by_id" {
- filter {
- name = "name"
- values = ["t2.micro"]
- }
- }
- `)
-}
diff --git a/outscale/data_source_outscale_vms.go b/outscale/data_source_outscale_vms.go
index 8932122f4..6732b5c55 100644
--- a/outscale/data_source_outscale_vms.go
+++ b/outscale/data_source_outscale_vms.go
@@ -9,8 +9,8 @@ import (
"time"
"github.com/antihax/optional"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
oscgo "github.com/marinsalinas/osc-sdk-go"
)
@@ -69,7 +69,7 @@ func dataSourceOutscaleOApiVMSRead(d *schema.ResourceData, meta interface{}) err
filters, filtersOk := d.GetOk("filter")
vmID, vmIDOk := d.GetOk("vm_id")
- if filtersOk == false && vmIDOk == false {
+ if !filtersOk && !vmIDOk {
return fmt.Errorf("One of filters, and vm ID must be assigned")
}
@@ -146,25 +146,3 @@ func dataSourceOAPIVMS(i []oscgo.Vm) []map[string]interface{} {
}
return vms
}
-
-func dataSourceFiltersOApiSchema() *schema.Schema {
- return &schema.Schema{
- Type: schema.TypeSet,
- Optional: true,
- ForceNew: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "name": {
- Type: schema.TypeString,
- Required: true,
- },
-
- "values": {
- Type: schema.TypeList,
- Required: true,
- Elem: &schema.Schema{Type: schema.TypeString},
- },
- },
- },
- }
-}
diff --git a/outscale/data_source_outscale_vms_state.go b/outscale/data_source_outscale_vms_state.go
index 51ca6c644..3b94f05fa 100644
--- a/outscale/data_source_outscale_vms_state.go
+++ b/outscale/data_source_outscale_vms_state.go
@@ -4,13 +4,14 @@ import (
"context"
"errors"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"strings"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func dataSourceOutscaleOAPIVMSState() *schema.Resource {
@@ -104,7 +105,9 @@ func statusDescriptionOAPIVMSStateAttributes(d *schema.ResourceData, status []os
return nil
}
- statusDescriptionOAPIVMStateAttributes(setterFunc, &v)
+ if err := statusDescriptionOAPIVMStateAttributes(setterFunc, &v); err != nil {
+ return err
+ }
states[k] = state
}
diff --git a/outscale/data_source_outscale_vms_state_test.go b/outscale/data_source_outscale_vms_state_test.go
index 0f1d925e0..d9852e128 100644
--- a/outscale/data_source_outscale_vms_state_test.go
+++ b/outscale/data_source_outscale_vms_state_test.go
@@ -2,19 +2,17 @@ package outscale
import (
"fmt"
+ "os"
"testing"
- "github.com/hashicorp/terraform/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
)
func TestAccDataSourceOutscaleOAPIVMSState(t *testing.T) {
- omi := getOMIByRegion("eu-west-2", "ubuntu").OMI
+ omi := os.Getenv("OUTSCALE_IMAGEID")
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
resource.TestStep{
diff --git a/outscale/data_source_outscale_vms_test.go b/outscale/data_source_outscale_vms_test.go
index eb0e6e562..3e91f0630 100644
--- a/outscale/data_source_outscale_vms_test.go
+++ b/outscale/data_source_outscale_vms_test.go
@@ -5,18 +5,14 @@ import (
"os"
"testing"
- "github.com/hashicorp/terraform/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
)
func TestAccOutscaleOAPIVMSDataSource_basic(t *testing.T) {
- region := os.Getenv("OUTSCALE_REGION")
- omi := getOMIByRegion(region, "ubuntu").OMI
+ omi := os.Getenv("OUTSCALE_IMAGEID")
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
{
diff --git a/outscale/data_source_outscale_volume.go b/outscale/data_source_outscale_volume.go
index 0cfafdb8b..09c0bf1c3 100644
--- a/outscale/data_source_outscale_volume.go
+++ b/outscale/data_source_outscale_volume.go
@@ -3,15 +3,16 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"log"
"strings"
"time"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
"github.com/davecgh/go-spew/spew"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
"github.com/terraform-providers/terraform-provider-outscale/utils"
)
@@ -158,30 +159,7 @@ func volumeOAPIDescriptionAttributes(d *schema.ResourceData, volume *oscgo.Volum
d.Set("iops", volume.GetIops())
if volume.LinkedVolumes != nil {
- res := make([]map[string]interface{}, len(volume.GetLinkedVolumes()))
- for k, g := range volume.GetLinkedVolumes() {
- r := make(map[string]interface{})
- if g.DeleteOnVmDeletion != nil {
- r["delete_on_vm_deletion"] = g.GetDeleteOnVmDeletion()
- }
- if g.GetDeviceName() != "" {
- r["device_name"] = g.GetDeviceName()
- }
- if g.GetVmId() != "" {
- r["vm_id"] = g.GetVmId()
- }
- if g.GetState() != "" {
- r["state"] = g.GetState()
- }
- if g.GetVolumeId() != "" {
- r["volume_id"] = g.GetVolumeId()
- }
-
- res[k] = r
-
- }
-
- if err := d.Set("linked_volumes", res); err != nil {
+ if err := d.Set("linked_volumes", getLinkedVolumes(volume.GetLinkedVolumes())); err != nil {
return err
}
} else {
diff --git a/outscale/data_source_outscale_volume_test.go b/outscale/data_source_outscale_volume_test.go
index 7838ac7f2..12bfc6b6a 100644
--- a/outscale/data_source_outscale_volume_test.go
+++ b/outscale/data_source_outscale_volume_test.go
@@ -5,18 +5,15 @@ import (
"os"
"testing"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccOutscaleOAPIVolumeDataSource_basic(t *testing.T) {
region := os.Getenv("OUTSCALE_REGION")
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
{
diff --git a/outscale/data_source_outscale_volumes.go b/outscale/data_source_outscale_volumes.go
index 48d6949af..ad1a979db 100644
--- a/outscale/data_source_outscale_volumes.go
+++ b/outscale/data_source_outscale_volumes.go
@@ -3,17 +3,16 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"log"
"strings"
"time"
- "github.com/spf13/cast"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
"github.com/davecgh/go-spew/spew"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func datasourceOutscaleOAPIVolumes() *schema.Resource {
@@ -164,7 +163,7 @@ func getOAPIVolumes(volumes []oscgo.Volume) (res []map[string]interface{}) {
for _, v := range volumes {
res = append(res, map[string]interface{}{
"iops": v.Iops,
- "linked_volumes": v.LinkedVolumes,
+ "linked_volumes": getLinkedVolumes(v.GetLinkedVolumes()),
"size": v.Size,
"snapshot_id": v.SnapshotId,
"state": v.State,
@@ -177,10 +176,10 @@ func getOAPIVolumes(volumes []oscgo.Volume) (res []map[string]interface{}) {
return
}
-func getOAPILinkedVolumes(linkedVolumes []oscgo.LinkedVolume) (res []map[string]interface{}) {
+func getLinkedVolumes(linkedVolumes []oscgo.LinkedVolume) (res []map[string]interface{}) {
for _, l := range linkedVolumes {
res = append(res, map[string]interface{}{
- "delete_on_vm_deletion": cast.ToString(l.DeleteOnVmDeletion),
+ "delete_on_vm_deletion": l.DeleteOnVmDeletion,
"device_name": l.DeviceName,
"vm_id": l.VmId,
"state": l.State,
diff --git a/outscale/data_source_outscale_volumes_test.go b/outscale/data_source_outscale_volumes_test.go
index 1c68b003a..be19a620a 100644
--- a/outscale/data_source_outscale_volumes_test.go
+++ b/outscale/data_source_outscale_volumes_test.go
@@ -5,17 +5,14 @@ import (
"os"
"testing"
- "github.com/hashicorp/terraform/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
)
func TestAccOutscaleOAPIVolumesDataSource_multipleFilters(t *testing.T) {
region := os.Getenv("OUTSCALE_REGION")
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
{
@@ -34,10 +31,7 @@ func TestAccOutscaleOAPIVolumeDataSource_multipleVIdsFilters(t *testing.T) {
region := os.Getenv("OUTSCALE_REGION")
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
{
diff --git a/outscale/data_source_outscale_vpn_connection.go b/outscale/data_source_outscale_vpn_connection.go
deleted file mode 100644
index 2a1f7d8f7..000000000
--- a/outscale/data_source_outscale_vpn_connection.go
+++ /dev/null
@@ -1,207 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "strconv"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
-
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func dataSourceOutscaleVpnConnection() *schema.Resource {
- return &schema.Resource{
- Read: dataSourceOutscaleVpnConnectionRead,
-
- Schema: map[string]*schema.Schema{
- "filter": dataSourceFiltersSchema(),
- "vpn_connection_id": {
- Type: schema.TypeString,
- Optional: true,
- Computed: true,
- },
- "customer_gateway_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- "options": {
- Type: schema.TypeMap,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "static_routes_only": {
- Type: schema.TypeString,
- Computed: true,
- },
- },
- },
- },
- "type": {
- Type: schema.TypeString,
- Computed: true,
- },
- "vpn_gateway_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- "customer_gateway_configuration": {
- Type: schema.TypeString,
- Computed: true,
- },
- "routes": {
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "destination_cidr_block": {
- Type: schema.TypeString,
- Computed: true,
- },
- "source": {
- Type: schema.TypeString,
- Computed: true,
- },
- "state": {
- Type: schema.TypeString,
- Computed: true,
- },
- },
- },
- },
-
- "tag_set": tagsSchemaComputed(),
- "state": {
- Type: schema.TypeString,
- Computed: true,
- },
- "vgw_telemetry": {
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "accepted_route_count": {
- Type: schema.TypeInt,
- Computed: true,
- },
- "outside_ip_address": {
- Type: schema.TypeString,
- Computed: true,
- },
- "status": {
- Type: schema.TypeString,
- Computed: true,
- },
- "status_message": {
- Type: schema.TypeString,
- Computed: true,
- },
- },
- },
- },
- "request_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- },
- }
-}
-
-func dataSourceOutscaleVpnConnectionRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- filters, filtersOk := d.GetOk("filter")
- vpn, vpnOk := d.GetOk("vpn_connection_id")
-
- if !filtersOk && !vpnOk {
- return fmt.Errorf("One of vpn_connection_id or filters must be assigned")
- }
-
- params := &fcu.DescribeVpnConnectionsInput{}
-
- if filtersOk {
- params.Filters = buildOutscaleDataSourceFilters(filters.(*schema.Set))
- }
- if vpnOk {
- params.VpnConnectionIds = []*string{aws.String(vpn.(string))}
- }
-
- var resp *fcu.DescribeVpnConnectionsOutput
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribeVpnConnections(params)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.NonRetryableError(err)
- })
-
- if err != nil {
- return err
- }
- if resp == nil || len(resp.VpnConnections) == 0 {
- return fmt.Errorf("no matching VPN connection found: %#v", params)
- }
- if len(resp.VpnConnections) > 1 {
- return fmt.Errorf("multiple VPN connections matched; use additional constraints to reduce matches to a single VPN connection")
- }
-
- vpnConnection := resp.VpnConnections[0]
-
- options := make(map[string]interface{})
- if vpnConnection.Options != nil {
- options["static_routes_only"] = strconv.FormatBool(aws.BoolValue(vpnConnection.Options.StaticRoutesOnly))
- } else {
- options["static_routes_only"] = strconv.FormatBool(false)
- }
-
- d.Set("options", options)
- d.Set("customer_gateway_configuration", vpnConnection.CustomerGatewayConfiguration)
-
- routes := make([]map[string]interface{}, len(vpnConnection.Routes))
-
- for k, v := range vpnConnection.Routes {
- route := make(map[string]interface{})
-
- route["destination_cidr_block"] = *v.DestinationCidrBlock
- route["source"] = *v.Source
- route["state"] = *v.State
-
- routes[k] = route
- }
-
- d.Set("customer_gateway_id", vpnConnection.CustomerGatewayId)
- d.Set("routes", routes)
- d.Set("tag_set", tagsToMap(vpnConnection.Tags))
-
- d.Set("state", vpnConnection.State)
-
- vgws := make([]map[string]interface{}, len(vpnConnection.VgwTelemetry))
-
- for k, v := range vpnConnection.VgwTelemetry {
- vgw := make(map[string]interface{})
-
- vgw["accepted_route_count"] = *v.AcceptedRouteCount
- vgw["outside_ip_address"] = *v.OutsideIpAddress
- vgw["status"] = *v.Status
- vgw["status_message"] = *v.StatusMessage
-
- vgws[k] = vgw
- }
-
- d.Set("vgw_telemetry", vgws)
- d.Set("vpn_connection_id", vpnConnection.VpnConnectionId)
- d.Set("vpn_gateway_id", vpnConnection.VpnGatewayId)
- d.Set("type", vpnConnection.Type)
- d.Set("request_id", resp.RequestId)
- d.SetId(resource.UniqueId())
-
- return nil
-}
diff --git a/outscale/data_source_outscale_vpn_connection_test.go b/outscale/data_source_outscale_vpn_connection_test.go
deleted file mode 100644
index 040b1703e..000000000
--- a/outscale/data_source_outscale_vpn_connection_test.go
+++ /dev/null
@@ -1,74 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "os"
- "strconv"
- "testing"
-
- "github.com/hashicorp/terraform/helper/acctest"
- "github.com/hashicorp/terraform/helper/resource"
-)
-
-func TestAccOutscaleVpnConnectionDataSource_basic(t *testing.T) {
- t.Skip()
-
- o := os.Getenv("OUTSCALE_OAPI")
-
- oapi, err := strconv.ParseBool(o)
- if err != nil {
- oapi = false
- }
-
- if oapi {
- t.Skip()
- }
-
- rBgpAsn := acctest.RandIntRange(64512, 65534)
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() { testAccPreCheck(t) },
- Providers: testAccProviders,
- Steps: []resource.TestStep{
- {
- Config: testAccOutscaleVpnConnectionDataSourceConfig(rBgpAsn),
- Check: resource.ComposeTestCheckFunc(
- resource.TestCheckResourceAttrSet("data.outscale_vpn_connection.test", "vpn_gateway_id"),
- resource.TestCheckResourceAttrSet("data.outscale_vpn_connection.test", "state"),
- ),
- },
- },
- })
-}
-
-func testAccOutscaleVpnConnectionDataSourceConfig(rBgpAsn int) string {
- return fmt.Sprintf(`
- resource "outscale_vpn_gateway" "vpn_gateway" {
- tag {
- Name = "vpn_gateway"
- }
- }
-
- resource "outscale_client_endpoint" "customer_gateway" {
- bgp_asn = %d
- ip_address = "178.0.0.1"
- type = "ipsec.1"
- tag {
- Name = "main-customer-gateway"
- }
- }
-
- resource "outscale_vpn_connection" "foo" {
- vpn_gateway_id = "${outscale_vpn_gateway.vpn_gateway.id}"
- customer_gateway_id = "${outscale_client_endpoint.customer_gateway.id}"
- type = "ipsec.1"
- options {
- static_routes_only = true
- }
- }
-
- data "outscale_vpn_connection" "test" {
- vpn_connection_id = "${outscale_vpn_connection.foo.id}"
- }
-`, rBgpAsn)
-}
diff --git a/outscale/data_source_outscale_vpn_connections.go b/outscale/data_source_outscale_vpn_connections.go
deleted file mode 100644
index 0a80efeea..000000000
--- a/outscale/data_source_outscale_vpn_connections.go
+++ /dev/null
@@ -1,226 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "strconv"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
-
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func dataSourceOutscaleOAPIVpnConnections() *schema.Resource {
- return &schema.Resource{
- Read: dataSourceOutscaleOAPIVpnConnectionsRead,
-
- Schema: map[string]*schema.Schema{
- "filter": dataSourceFiltersSchema(),
- "vpn_connection_id": {
- Type: schema.TypeList,
- Optional: true,
- Elem: &schema.Schema{Type: schema.TypeString},
- },
- "vpn_connection": &schema.Schema{
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "vpn_connection_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- "client_endpoint_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- "vpn_connection_option": {
- Type: schema.TypeMap,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "static_routes_only": {
- Type: schema.TypeString,
- Computed: true,
- },
- },
- },
- },
- "type": {
- Type: schema.TypeString,
- Computed: true,
- },
- "vpn_gateway_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- "client_endpoint_configuration": {
- Type: schema.TypeString,
- Computed: true,
- },
- "vpn_static_route": {
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "destination_ip_range": {
- Type: schema.TypeString,
- Computed: true,
- },
- "type": {
- Type: schema.TypeString,
- Computed: true,
- },
- "state": {
- Type: schema.TypeString,
- Computed: true,
- },
- },
- },
- },
-
- "tag": tagsSchemaComputed(),
- "state": {
- Type: schema.TypeString,
- Computed: true,
- },
- "vpn_tunnel_description": {
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "accepted_routes_count": {
- Type: schema.TypeInt,
- Computed: true,
- },
- "outscale_side_ip": {
- Type: schema.TypeString,
- Computed: true,
- },
- "status": {
- Type: schema.TypeString,
- Computed: true,
- },
- "comment": {
- Type: schema.TypeString,
- Computed: true,
- },
- },
- },
- },
- },
- },
- },
- "request_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- },
- }
-}
-
-func dataSourceOutscaleOAPIVpnConnectionsRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- filters, filtersOk := d.GetOk("filter")
- vpn, vpnOk := d.GetOk("vpn_connection_id")
-
- if !filtersOk && !vpnOk {
- return fmt.Errorf("One of vpn_connection_id or filters must be assigned")
- }
-
- params := &fcu.DescribeVpnConnectionsInput{}
-
- if filtersOk {
- params.Filters = buildOutscaleDataSourceFilters(filters.(*schema.Set))
- }
- if vpnOk {
- var ids []*string
-
- for _, id := range vpn.([]interface{}) {
- ids = append(ids, aws.String(id.(string)))
- }
- params.VpnConnectionIds = ids
- }
-
- var resp *fcu.DescribeVpnConnectionsOutput
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribeVpnConnections(params)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.NonRetryableError(err)
- })
-
- if err != nil {
- return err
- }
- if resp == nil || len(resp.VpnConnections) == 0 {
- return fmt.Errorf("no matching VPN connection found: %#v", params)
- }
-
- vcs := make([]map[string]interface{}, len(resp.VpnConnections))
-
- for k, v := range resp.VpnConnections {
- vc := make(map[string]interface{})
- vpn := make(map[string]interface{})
- if v.Options != nil {
- vpn["static_routes_only"] = strconv.FormatBool(aws.BoolValue(v.Options.StaticRoutesOnly))
- } else {
- vpn["static_routes_only"] = strconv.FormatBool(false)
- }
- vc["vpn_connection_option"] = vpn
- vc["client_endpoint_configuration"] = *v.CustomerGatewayConfiguration
- vc["client_endpoint_id"] = *v.CustomerGatewayId
-
- vr := make([]map[string]interface{}, len(v.Routes))
-
- for k1, v1 := range v.Routes {
- route := make(map[string]interface{})
-
- route["destination_ip_range"] = *v1.DestinationCidrBlock
- route["type"] = *v1.Source
- route["state"] = *v1.State
-
- vr[k1] = route
- }
- vc["vpn_static_route"] = vr
- vc["tag"] = tagsToMap(v.Tags)
- vc["state"] = *v.State
-
- vgws := make([]map[string]interface{}, len(v.VgwTelemetry))
-
- for k1, v1 := range v.VgwTelemetry {
- vgw := make(map[string]interface{})
-
- vgw["accepted_routes_count"] = *v1.AcceptedRouteCount
- vgw["outscale_side_ip"] = *v1.OutsideIpAddress
- vgw["status"] = *v1.Status
- vgw["comment"] = *v1.StatusMessage
-
- vgws[k1] = vgw
- }
- vc["vpn_tunnel_description"] = vgws
- vc["vpn_connection_id"] = *v.VpnConnectionId
- vc["vpn_gateway_id"] = *v.VpnGatewayId
- vc["type"] = *v.Type
-
- vcs[k] = vc
- }
-
- if err := d.Set("vpn_connection", vcs); err != nil {
- return err
- }
- d.Set("request_id", resp.RequestId)
- d.SetId(resource.UniqueId())
-
- return nil
-}
diff --git a/outscale/data_source_outscale_vpn_connections_test.go b/outscale/data_source_outscale_vpn_connections_test.go
deleted file mode 100644
index 93b8ab8df..000000000
--- a/outscale/data_source_outscale_vpn_connections_test.go
+++ /dev/null
@@ -1,66 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "testing"
-
- "github.com/hashicorp/terraform/helper/acctest"
- "github.com/hashicorp/terraform/helper/resource"
-)
-
-func TestAccOutscaleOAPIVpnConnectionsDataSource_basic(t *testing.T) {
- t.Skip()
-
- rBgpAsn := acctest.RandIntRange(64512, 65534)
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- Providers: testAccProviders,
- Steps: []resource.TestStep{
- {
- Config: testAccOutscaleOAPIVpnConnectionsDataSourceConfig(rBgpAsn),
- Check: resource.ComposeTestCheckFunc(
- resource.TestCheckResourceAttr(
- "data.outscale_vpn_connections.test", "vpn_connection_set.#", "1"),
- ),
- },
- },
- })
-}
-
-func testAccOutscaleOAPIVpnConnectionsDataSourceConfig(rBgpAsn int) string {
- return fmt.Sprintf(`
- resource "outscale_vpn_gateway" "vpn_gateway" {
- tag {
- Name = "vpn_gateway"
- }
- }
-
- resource "outscale_client_endpoint" "customer_gateway" {
- bgp_asn = %d
- ip_address = "178.0.0.1"
- type = "ipsec.1"
-
- tag {
- Name = "main-customer-gateway"
- }
- }
-
- resource "outscale_vpn_connection" "foo" {
- vpn_gateway_id = "${outscale_vpn_gateway.vpn_gateway.id}"
- client_endpoint_id = "${outscale_client_endpoint.customer_gateway.id}"
- type = "ipsec.1"
-
- vpn_connection_option {
- static_routes_only = true
- }
- }
-
- data "outscale_vpn_connections" "test" {
- vpn_connection_id = ["${outscale_vpn_connection.foo.id}"]
- }
- `, rBgpAsn)
-}
diff --git a/outscale/data_source_outscale_vpn_gateway.go b/outscale/data_source_outscale_vpn_gateway.go
deleted file mode 100644
index e08507a64..000000000
--- a/outscale/data_source_outscale_vpn_gateway.go
+++ /dev/null
@@ -1,124 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func dataSourceOutscaleOAPIVpnGateway() *schema.Resource {
- return &schema.Resource{
- Read: dataSourceOutscaleOAPIVpnGatewayRead,
-
- Schema: map[string]*schema.Schema{
- "filter": dataSourceFiltersSchema(),
- "vpn_gateway_id": {
- Type: schema.TypeString,
- Optional: true,
- Computed: true,
- },
- "state": {
- Type: schema.TypeString,
- Optional: true,
- Computed: true,
- },
- "type": {
- Type: schema.TypeString,
- Optional: true,
- Computed: true,
- },
- "lin_to_vpn_gateway_link": &schema.Schema{
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "state": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "lin_id": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- },
- },
- },
- "request_id": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "tag": tagsSchemaComputed(),
- },
- }
-}
-
-func dataSourceOutscaleOAPIVpnGatewayRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- filters, filtersOk := d.GetOk("filter")
- vpn, vpnOk := d.GetOk("vpn_gateway_id")
-
- if !filtersOk && !vpnOk {
- return fmt.Errorf("One of vpn_gateway_id or filters must be assigned")
- }
-
- params := &fcu.DescribeVpnGatewaysInput{}
-
- if filtersOk {
- params.Filters = buildOutscaleDataSourceFilters(filters.(*schema.Set))
- }
- if vpnOk {
- params.VpnGatewayIds = []*string{aws.String(vpn.(string))}
- }
-
- var resp *fcu.DescribeVpnGatewaysOutput
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribeVpnGateways(params)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.NonRetryableError(err)
- })
-
- if err != nil {
- return err
- }
- if resp == nil || len(resp.VpnGateways) == 0 {
- return fmt.Errorf("no matching VPN gateway found: %#v", params)
- }
- if len(resp.VpnGateways) > 1 {
- return fmt.Errorf("multiple VPN gateways matched; use additional constraints to reduce matches to a single VPN gateway")
- }
-
- vgw := resp.VpnGateways[0]
-
- d.SetId(aws.StringValue(vgw.VpnGatewayId))
- vs := make([]map[string]interface{}, len(vgw.VpcAttachments))
-
- for k, v := range vgw.VpcAttachments {
- vp := make(map[string]interface{})
-
- vp["state"] = aws.StringValue(v.State)
- vp["lin_id"] = aws.StringValue(v.VpcId)
-
- vs[k] = vp
- }
-
- d.Set("lin_to_vpn_gateway_link", vs)
- d.Set("state", aws.StringValue(vgw.State))
- d.Set("type", aws.StringValue(vgw.Type))
- d.Set("tag", tagsToMap(vgw.Tags))
- d.Set("request_id", resp.RequestId)
-
- return nil
-}
diff --git a/outscale/data_source_outscale_vpn_gateway_test.go b/outscale/data_source_outscale_vpn_gateway_test.go
deleted file mode 100644
index e5bb5ed2a..000000000
--- a/outscale/data_source_outscale_vpn_gateway_test.go
+++ /dev/null
@@ -1,51 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "testing"
-
- "github.com/hashicorp/terraform/helper/acctest"
- "github.com/hashicorp/terraform/helper/resource"
-)
-
-func TestAccDataSourceOutscaleOAPIVpnGateway_unattached(t *testing.T) {
- t.Skip()
-
- rInt := acctest.RandInt()
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- Providers: testAccProviders,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccDataSourceOutscaleOAPIVpnGatewayUnattachedConfig(rInt),
- Check: resource.ComposeTestCheckFunc(
- resource.TestCheckResourceAttrPair(
- "data.outscale_vpn_gateway.test_by_id", "id",
- "outscale_vpn_gateway.unattached", "id"),
- resource.TestCheckResourceAttrSet("data.outscale_vpn_gateway.test_by_id", "state"),
- resource.TestCheckNoResourceAttr("data.outscale_vpn_gateway.test_by_id", "attached_vpc_id"),
- ),
- },
- },
- })
-}
-
-func testAccDataSourceOutscaleOAPIVpnGatewayUnattachedConfig(rInt int) string {
- return fmt.Sprintf(`
- resource "outscale_vpn_gateway" "unattached" {
- tag {
- Name = "terraform-testacc-vpn-gateway-data-source-unattached-%d"
- ABC = "testacc-%d"
- XYZ = "testacc-%d"
- }
- }
-
- data "outscale_vpn_gateway" "test_by_id" {
- vpn_gateway_id = "${outscale_vpn_gateway.unattached.id}"
- }
- `, rInt, rInt+1, rInt-1)
-}
diff --git a/outscale/data_source_outscale_vpn_gateways.go b/outscale/data_source_outscale_vpn_gateways.go
deleted file mode 100644
index a312c3e45..000000000
--- a/outscale/data_source_outscale_vpn_gateways.go
+++ /dev/null
@@ -1,138 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func dataSourceOutscaleOAPIVpnGateways() *schema.Resource {
- return &schema.Resource{
- Read: dataSourceOutscaleOAPIVpnGatewaysRead,
-
- Schema: map[string]*schema.Schema{
- "filter": dataSourceFiltersSchema(),
- "vpn_gateway_id": {
- Type: schema.TypeList,
- Optional: true,
- Elem: &schema.Schema{Type: schema.TypeString},
- },
- "vpn_gateway": {
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "state": {
- Type: schema.TypeString,
- Computed: true,
- },
- "type": {
- Type: schema.TypeString,
- Computed: true,
- },
- "vpn_gateway_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- "lin_to_vpn_gateway_link": &schema.Schema{
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "state": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "lin_id": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- },
- },
- },
- "tag": tagsSchemaComputed(),
- },
- },
- },
- "request_id": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- },
- }
-}
-
-func dataSourceOutscaleOAPIVpnGatewaysRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- filters, filtersOk := d.GetOk("filter")
- vpn, vpnOk := d.GetOk("vpn_gateway_id")
-
- if !filtersOk && !vpnOk {
- return fmt.Errorf("One of vpn_gateway_id or filters must be assigned")
- }
-
- params := &fcu.DescribeVpnGatewaysInput{}
-
- if filtersOk {
- params.Filters = buildOutscaleDataSourceFilters(filters.(*schema.Set))
- }
- if vpnOk {
- params.VpnGatewayIds = expandStringList(vpn.([]interface{}))
- }
-
- var resp *fcu.DescribeVpnGatewaysOutput
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribeVpnGateways(params)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.NonRetryableError(err)
- })
-
- if err != nil {
- return err
- }
- if resp == nil || len(resp.VpnGateways) == 0 {
- return fmt.Errorf("no matching VPN gateway found: %#v", params)
- }
-
- vpns := make([]map[string]interface{}, len(resp.VpnGateways))
-
- for k, v := range resp.VpnGateways {
- vpn := make(map[string]interface{})
-
- vs := make([]map[string]interface{}, len(v.VpcAttachments))
-
- for k, v1 := range v.VpcAttachments {
- vp := make(map[string]interface{})
- vp["state"] = aws.StringValue(v1.State)
- vp["lin_id"] = aws.StringValue(v1.VpcId)
-
- vs[k] = vp
- }
-
- vpn["lin_to_vpn_gateway_link"] = vs
- vpn["state"] = aws.StringValue(v.State)
- vpn["vpn_gateway_id"] = aws.StringValue(v.VpnGatewayId)
- vpn["tag"] = tagsToMap(v.Tags)
-
- vpns[k] = vpn
- }
-
- d.Set("vpn_gateway", vpns)
- d.Set("request_id", resp.RequestId)
- d.SetId(resource.UniqueId())
-
- return nil
-}
diff --git a/outscale/data_source_outscale_vpn_gateways_test.go b/outscale/data_source_outscale_vpn_gateways_test.go
deleted file mode 100644
index ee8c914e5..000000000
--- a/outscale/data_source_outscale_vpn_gateways_test.go
+++ /dev/null
@@ -1,48 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "testing"
-
- "github.com/hashicorp/terraform/helper/acctest"
- "github.com/hashicorp/terraform/helper/resource"
-)
-
-func TestAccDataSourceOutscaleOAPIVpnGateways_unattached(t *testing.T) {
- t.Skip()
-
- rInt := acctest.RandInt()
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- Providers: testAccProviders,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccDataSourceOutscaleOAPIVpnGatewaysUnattachedConfig(rInt),
- Check: resource.ComposeTestCheckFunc(
- resource.TestCheckResourceAttr(
- "data.outscale_vpn_gateways.test_by_id", "vpn_gateway_set.#", "1"),
- ),
- },
- },
- })
-}
-
-func testAccDataSourceOutscaleOAPIVpnGatewaysUnattachedConfig(rInt int) string {
- return fmt.Sprintf(`
- resource "outscale_vpn_gateway" "unattached" {
- tag {
- Name = "terraform-testacc-vpn-gateway-data-source-unattached-%d"
- ABC = "testacc-%d"
- XYZ = "testacc-%d"
- }
- }
-
- data "outscale_vpn_gateways" "test_by_id" {
- vpn_gateway_id = ["${outscale_vpn_gateway.unattached.id}"]
- }
- `, rInt, rInt+1, rInt-1)
-}
diff --git a/outscale/dictionary.go b/outscale/dictionary.go
index b693159a9..f6d1433b4 100644
--- a/outscale/dictionary.go
+++ b/outscale/dictionary.go
@@ -1,6 +1,6 @@
package outscale
-import "github.com/hashicorp/terraform/helper/schema"
+import "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
//Dictionary for the Outscale APIs maps the apis to their respective functions
type Dictionary map[string]ResourceMap
@@ -17,109 +17,75 @@ var datasources Dictionary
func init() {
resources = Dictionary{
"oapi": ResourceMap{
- "outscale_vm": resourceOutscaleOApiVM,
- "outscale_firewall_rules_set": resourceOutscaleOAPISecurityGroup,
- "outscale_security_group": resourceOutscaleOAPISecurityGroup,
- "outscale_image": resourceOutscaleOAPIImage,
- "outscale_keypair": resourceOutscaleOAPIKeyPair,
- "outscale_public_ip": resourceOutscaleOAPIPublicIP,
- "outscale_public_ip_link": resourceOutscaleOAPIPublicIPLink,
- "outscale_volume": resourceOutscaleOAPIVolume,
- "outscale_volumes_link": resourceOutscaleOAPIVolumeLink,
- "outscale_outbound_rule": resourceOutscaleOAPIOutboundRule,
- "outscale_security_group_rule": resourceOutscaleOAPIOutboundRule,
- "outscale_tag": resourceOutscaleOAPITags,
- "outscale_net_attributes": resourceOutscaleOAPILinAttributes,
- "outscale_net": resourceOutscaleOAPINet,
- "outscale_internet_service": resourceOutscaleOAPIInternetService,
- "outscale_internet_service_link": resourceOutscaleOAPIInternetServiceLink,
- "outscale_nat_service": resourceOutscaleOAPINatService,
- "outscale_subnet": resourceOutscaleOAPISubNet,
- "outscale_client_endpoint": resourceOutscaleOAPICustomerGateway,
- "outscale_route": resourceOutscaleOAPIRoute,
- "outscale_route_table": resourceOutscaleOAPIRouteTable,
- "outscale_route_table_link": resourceOutscaleOAPILinkRouteTable,
- "outscale_snapshot": resourceOutscaleOAPISnapshot,
- "outscale_keypair_importation": resourceOutscaleOAPIKeyPairImportation,
- "outscale_image_launch_permission": resourceOutscaleOAPIImageLaunchPermission,
- "outscale_net_peering": resourceOutscaleOAPILinPeeringConnection,
- "outscale_nic_private_ip": resourceOutscaleOAPINetworkInterfacePrivateIP,
- "outscale_nic_link": resourceOutscaleOAPINetworkInterfaceAttachment,
- "outscale_nic": resourceOutscaleOAPINic,
- "outscale_image_tasks": resourceOutscaleOAPIImageTasks,
- "outscale_vpn_gateway": resourceOutscaleOAPIVpnGateway,
- "outscale_vpn_gateway_route_propagation": resourceOutscaleOAPIVpnGatewayRoutePropagation,
- "outscale_snapshot_export_tasks": resourceOutscaleOAPIImageExportTasks,
- "outscale_vpn_connection_route": resourceOutscaleOAPIVpnConnectionRoute,
- "outscale_vpn_connection": resourceOutscaleOAPIVpnConnection,
- "outscale_image_copy": resourceOutscaleOAPIImageCopy,
- "outscale_vpn_gateway_link": resourceOutscaleOAPIVpnGatewayLink,
- "outscale_snapshot_attributes": resourcedOutscaleOAPISnapshotAttributes,
- "outscale_net_api_access": resourceOutscaleOAPIVpcEndpoint,
- "outscale_image_register": resourceOutscaleOAPIImageRegister,
- "outscale_net_peering_acceptation": resourceOutscaleOAPILinPeeringConnectionAccepter,
- "outscale_snapshot_import": resourcedOutscaleOAPISnapshotImport,
+ "outscale_vm": resourceOutscaleOApiVM,
+ "outscale_firewall_rules_set": resourceOutscaleOAPISecurityGroup,
+ "outscale_security_group": resourceOutscaleOAPISecurityGroup,
+ "outscale_image": resourceOutscaleOAPIImage,
+ "outscale_keypair": resourceOutscaleOAPIKeyPair,
+ "outscale_public_ip": resourceOutscaleOAPIPublicIP,
+ "outscale_public_ip_link": resourceOutscaleOAPIPublicIPLink,
+ "outscale_volume": resourceOutscaleOAPIVolume,
+ "outscale_volumes_link": resourceOutscaleOAPIVolumeLink,
+ "outscale_outbound_rule": resourceOutscaleOAPIOutboundRule,
+ "outscale_security_group_rule": resourceOutscaleOAPIOutboundRule,
+ "outscale_tag": resourceOutscaleOAPITags,
+ "outscale_net_attributes": resourceOutscaleOAPILinAttributes,
+ "outscale_net": resourceOutscaleOAPINet,
+ "outscale_internet_service": resourceOutscaleOAPIInternetService,
+ "outscale_internet_service_link": resourceOutscaleOAPIInternetServiceLink,
+ "outscale_nat_service": resourceOutscaleOAPINatService,
+ "outscale_subnet": resourceOutscaleOAPISubNet,
+ "outscale_route": resourceOutscaleOAPIRoute,
+ "outscale_route_table": resourceOutscaleOAPIRouteTable,
+ "outscale_route_table_link": resourceOutscaleOAPILinkRouteTable,
+ "outscale_snapshot": resourceOutscaleOAPISnapshot,
+ "outscale_image_launch_permission": resourceOutscaleOAPIImageLaunchPermission,
+ "outscale_net_peering": resourceOutscaleOAPILinPeeringConnection,
+ "outscale_nic_private_ip": resourceOutscaleOAPINetworkInterfacePrivateIP,
+ "outscale_nic_link": resourceOutscaleOAPINetworkInterfaceAttachment,
+ "outscale_nic": resourceOutscaleOAPINic,
+ "outscale_snapshot_attributes": resourcedOutscaleOAPISnapshotAttributes,
+ "outscale_net_peering_acceptation": resourceOutscaleOAPILinPeeringConnectionAccepter,
},
}
datasources = Dictionary{
"oapi": ResourceMap{
- "outscale_vm": dataSourceOutscaleOAPIVM,
- "outscale_vms": datasourceOutscaleOApiVMS,
- "outscale_firewall_rules_sets": dataSourceOutscaleOAPISecurityGroups,
- "outscale_security_groups": dataSourceOutscaleOAPISecurityGroups,
- "outscale_images": dataSourceOutscaleOAPIImages,
- "outscale_firewall_rules_set": dataSourceOutscaleOAPISecurityGroup,
- "outscale_security_group": dataSourceOutscaleOAPISecurityGroup,
- "outscale_tag": dataSourceOutscaleOAPITag,
- "outscale_tags": dataSourceOutscaleOAPITags,
- "outscale_volume": datasourceOutscaleOAPIVolume,
- "outscale_volumes": datasourceOutscaleOAPIVolumes,
- "outscale_keypair": datasourceOutscaleOAPIKeyPair,
- "outscale_keypairs": datasourceOutscaleOAPIKeyPairs,
- "outscale_internet_service": datasourceOutscaleOAPIInternetService,
- "outscale_internet_services": datasourceOutscaleOAPIInternetServices,
- "outscale_subnet": dataSourceOutscaleOAPISubnet,
- "outscale_subnets": dataSourceOutscaleOAPISubnets,
- "outscale_vm_state": dataSourceOutscaleOAPIVMState,
- "outscale_vms_state": dataSourceOutscaleOAPIVMSState,
- "outscale_net": dataSourceOutscaleOAPIVpc,
- "outscale_nets": dataSourceOutscaleOAPIVpcs,
- "outscale_net_attributes": dataSourceOutscaleOAPIVpcAttr,
- "outscale_client_endpoint": dataSourceOutscaleOAPICustomerGateway,
- "outscale_client_endpoints": dataSourceOutscaleOAPICustomerGateways,
- "outscale_route_table": dataSourceOutscaleOAPIRouteTable,
- "outscale_route_tables": dataSourceOutscaleOAPIRouteTables,
- "outscale_snapshot": dataSourceOutscaleOAPISnapshot,
- "outscale_snapshots": dataSourceOutscaleOAPISnapshots,
- "outscale_net_peering": dataSourceOutscaleOAPILinPeeringConnection,
- "outscale_net_peerings": dataSourceOutscaleOAPILinPeeringsConnection,
- "outscale_nic": dataSourceOutscaleOAPINic,
- "outscale_nics": dataSourceOutscaleOAPINics,
- "outscale_regions": dataSourceOutscaleOAPIRegions,
- "outscale_region": dataSourceOutscaleOAPIRegion,
- "outscale_reserved_vms_offer": dataSourceOutscaleOAPIReservedVMOffer,
- "outscale_reserved_vms_offers": dataSourceOutscaleOAPIReservedVMOffers,
- "outscale_reserved_vms": dataSourceOutscaleOAPIReservedVMS,
- "outscale_vpn_gateways": dataSourceOutscaleOAPIVpnGateways,
- "outscale_vpn_gateway": dataSourceOutscaleOAPIVpnGateway,
- "outscale_vm_type": dataSourceOutscaleOAPIVMType,
- "outscale_vm_types": dataSourceOutscaleOAPIVMTypes,
- "outscale_quotas": dataSourceOutscaleOAPIQuotas,
- "outscale_quota": dataSourceOutscaleOAPIQuota,
- "outscale_prefix_lists": dataSourceOutscaleOAPIPrefixLists,
- "outscale_prefix_list": dataSourceOutscaleOAPIPrefixList,
- "outscale_vpn_connections": dataSourceOutscaleOAPIVpnConnections,
- "outscale_sub_region": dataSourceOutscaleOAPIAvailabilityZone,
- "outscale_product_types": dataSourceOutscaleOAPIProductTypes,
- "outscale_image": dataSourceOutscaleOAPIImage,
- "outscale_net_api_access_services": dataSourceOutscaleOAPIVpcEndpointServices,
- "outscale_public_ip": dataSourceOutscaleOAPIPublicIP,
- "outscale_public_ips": dataSourceOutscaleOAPIPublicIPS,
- "outscale_nat_service": dataSourceOutscaleOAPINatService,
- "outscale_nat_services": dataSourceOutscaleOAPINatServices,
- "outscale_net_api_access": dataSourceOutscaleOAPIVpcEndpoint,
- "outscale_net_api_accesses": dataSourceOutscaleOAPIVpcEndpoints,
+ "outscale_vm": dataSourceOutscaleOAPIVM,
+ "outscale_vms": datasourceOutscaleOApiVMS,
+ "outscale_firewall_rules_sets": dataSourceOutscaleOAPISecurityGroups,
+ "outscale_security_groups": dataSourceOutscaleOAPISecurityGroups,
+ "outscale_images": dataSourceOutscaleOAPIImages,
+ "outscale_firewall_rules_set": dataSourceOutscaleOAPISecurityGroup,
+ "outscale_security_group": dataSourceOutscaleOAPISecurityGroup,
+ "outscale_tag": dataSourceOutscaleOAPITag,
+ "outscale_tags": dataSourceOutscaleOAPITags,
+ "outscale_volume": datasourceOutscaleOAPIVolume,
+ "outscale_volumes": datasourceOutscaleOAPIVolumes,
+ "outscale_keypair": datasourceOutscaleOAPIKeyPair,
+ "outscale_keypairs": datasourceOutscaleOAPIKeyPairs,
+ "outscale_internet_service": datasourceOutscaleOAPIInternetService,
+ "outscale_internet_services": datasourceOutscaleOAPIInternetServices,
+ "outscale_subnet": dataSourceOutscaleOAPISubnet,
+ "outscale_subnets": dataSourceOutscaleOAPISubnets,
+ "outscale_vm_state": dataSourceOutscaleOAPIVMState,
+ "outscale_vms_state": dataSourceOutscaleOAPIVMSState,
+ "outscale_net": dataSourceOutscaleOAPIVpc,
+ "outscale_nets": dataSourceOutscaleOAPIVpcs,
+ "outscale_net_attributes": dataSourceOutscaleOAPIVpcAttr,
+ "outscale_route_table": dataSourceOutscaleOAPIRouteTable,
+ "outscale_route_tables": dataSourceOutscaleOAPIRouteTables,
+ "outscale_snapshot": dataSourceOutscaleOAPISnapshot,
+ "outscale_snapshots": dataSourceOutscaleOAPISnapshots,
+ "outscale_net_peering": dataSourceOutscaleOAPILinPeeringConnection,
+ "outscale_net_peerings": dataSourceOutscaleOAPILinPeeringsConnection,
+ "outscale_nic": dataSourceOutscaleOAPINic,
+ "outscale_nics": dataSourceOutscaleOAPINics,
+ "outscale_image": dataSourceOutscaleOAPIImage,
+ "outscale_public_ip": dataSourceOutscaleOAPIPublicIP,
+ "outscale_public_ips": dataSourceOutscaleOAPIPublicIPS,
+ "outscale_nat_service": dataSourceOutscaleOAPINatService,
+ "outscale_nat_services": dataSourceOutscaleOAPINatServices,
},
}
}
diff --git a/outscale/import_outscale_client_endpoint_test.go b/outscale/import_outscale_client_endpoint_test.go
deleted file mode 100644
index e7941679b..000000000
--- a/outscale/import_outscale_client_endpoint_test.go
+++ /dev/null
@@ -1,45 +0,0 @@
-package outscale
-
-import (
- "os"
- "strconv"
- "testing"
-
- "github.com/hashicorp/terraform/helper/acctest"
- "github.com/hashicorp/terraform/helper/resource"
-)
-
-func TestAccOutscaleClientEndpoint_importBasic(t *testing.T) {
- t.Skip()
-
- o := os.Getenv("OUTSCALE_OAPI")
-
- oapi, err := strconv.ParseBool(o)
- if err != nil {
- oapi = false
- }
-
- if oapi {
- t.Skip()
- }
- resourceName := "outscale_client_endpoint.foo"
- rBgpAsn := acctest.RandIntRange(64512, 65534)
- rInt := acctest.RandInt()
- resource.Test(t, resource.TestCase{
- PreCheck: func() { testAccPreCheck(t) },
- Providers: testAccProviders,
- CheckDestroy: testAccCheckOAPICustomerGatewayDestroy,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccOAPICustomerGatewayConfig(rInt, rBgpAsn),
- },
-
- resource.TestStep{
- ResourceName: resourceName,
- ImportState: true,
- ImportStateVerify: true,
- ImportStateVerifyIgnore: []string{"associate_public_ip_address", "user_data", "security_group", "tag"},
- },
- },
- })
-}
diff --git a/outscale/import_outscale_net_attributes_test.go b/outscale/import_outscale_net_attributes_test.go
index 31fd5b1fc..4c13b8228 100644
--- a/outscale/import_outscale_net_attributes_test.go
+++ b/outscale/import_outscale_net_attributes_test.go
@@ -1,37 +1,22 @@
package outscale
import (
- "os"
- "strconv"
"testing"
- "github.com/hashicorp/terraform/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
)
-func TestAccOutscaleLinAttr_import(t *testing.T) {
- t.Skip()
-
- o := os.Getenv("OUTSCALE_OAPI")
-
- oapi, err := strconv.ParseBool(o)
- if err != nil {
- oapi = false
- }
-
- if oapi {
- t.Skip()
- }
- resourceName := "outscale_lin_attributes.outscale_lin_attributes"
+func TestAccOutscaleNetAttr_import(t *testing.T) {
+ resourceName := "outscale_net_attributes.outscale_net_attributes"
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
- resource.TestStep{
+ {
Config: testAccOutscaleOAPILinAttrConfig,
},
-
- resource.TestStep{
+ {
ResourceName: resourceName,
ImportState: true,
ImportStateVerify: true,
diff --git a/outscale/import_outscale_route_table.go b/outscale/import_outscale_route_table.go
index 7975169fd..68e695e3c 100644
--- a/outscale/import_outscale_route_table.go
+++ b/outscale/import_outscale_route_table.go
@@ -3,49 +3,43 @@ package outscale
import (
"fmt"
- "github.com/hashicorp/terraform/helper/hashcode"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/hashcode"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
+
+ oscgo "github.com/marinsalinas/osc-sdk-go"
)
-func routeIDHash(d *schema.ResourceData, r *fcu.Route) string {
- return fmt.Sprintf("r-%s%d", d.Get("route_table_id").(string), hashcode.String(*r.DestinationCidrBlock))
+func routeIDHash(d *schema.ResourceData, r *oscgo.Route) string {
+ return fmt.Sprintf("r-%s%d", d.Get("route_table_id").(string),
+ hashcode.String(r.GetDestinationIpRange()))
}
// Route table import also imports all the rules
func resourceOutscaleRouteTableImportState(
d *schema.ResourceData,
meta interface{}) ([]*schema.ResourceData, error) {
- conn := meta.(*OutscaleClient).FCU
+ conn := meta.(*OutscaleClient).OSCAPI
// First query the resource itself
id := d.Id()
- resp, err := conn.VM.DescribeRouteTables(&fcu.DescribeRouteTablesInput{
- RouteTableIds: []*string{&id},
- })
- if err != nil {
- return nil, err
- }
- if len(resp.RouteTables) < 1 || resp.RouteTables[0] == nil {
- return nil, fmt.Errorf("route table %s is not found", id)
- }
- table := resp.RouteTables[0]
+ tableRaw, _, _ := readOAPIRouteTable(conn, id)
+ table := tableRaw.(oscgo.RouteTable)
// Start building our results
results := make([]*schema.ResourceData, 1,
- 2+len(table.Associations)+len(table.Routes))
+ 2+len(table.GetLinkRouteTables())+len(table.GetRoutes()))
results[0] = d
{
// Construct the routes
subResource := resourceOutscaleOAPIRoute()
- for _, route := range table.Routes {
+ for _, route := range table.GetRoutes() {
// Ignore the local/default route
if route.GatewayId != nil && *route.GatewayId == "local" {
continue
}
- if route.DestinationPrefixListId != nil {
+ if route.DestinationServiceId != nil {
// Skipping because VPC endpoint routes are handled separately
// See aws_vpc_endpoint
continue
@@ -55,8 +49,8 @@ func resourceOutscaleRouteTableImportState(
d := subResource.Data(nil)
d.SetType("outscale_route")
d.Set("route_table_id", id)
- d.Set("destination_cidr_block", route.DestinationCidrBlock)
- d.SetId(routeIDHash(d, route))
+ d.Set("destination_cidr_block", route.DestinationIpRange)
+ d.SetId(routeIDHash(d, &route))
results = append(results, d)
}
}
@@ -64,7 +58,7 @@ func resourceOutscaleRouteTableImportState(
{
// Construct the associations
subResource := resourceOutscaleOAPILinkRouteTable()
- for _, assoc := range table.Associations {
+ for _, assoc := range table.GetLinkRouteTables() {
if *assoc.Main {
// Ignore
continue
@@ -74,7 +68,7 @@ func resourceOutscaleRouteTableImportState(
d := subResource.Data(nil)
d.SetType("outscale_route_table_link")
d.Set("route_table_id", assoc.RouteTableId)
- d.SetId(*assoc.RouteTableAssociationId)
+ d.SetId(*assoc.LinkRouteTableId)
results = append(results, d)
}
}
diff --git a/outscale/import_outscale_vpn_connection_route_test.go b/outscale/import_outscale_vpn_connection_route_test.go
deleted file mode 100644
index 76bb79d3d..000000000
--- a/outscale/import_outscale_vpn_connection_route_test.go
+++ /dev/null
@@ -1,32 +0,0 @@
-package outscale
-
-import (
- "testing"
-
- "github.com/hashicorp/terraform/helper/acctest"
- "github.com/hashicorp/terraform/helper/resource"
-)
-
-func TestAccOutscaleVpnConnectionRoute_importBasic(t *testing.T) {
- t.Skip()
- resourceName := "outscale_vpn_connection_route.foo"
- rBgpAsn := acctest.RandIntRange(64512, 65534)
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() { testAccPreCheck(t) },
- Providers: testAccProviders,
- CheckDestroy: testAccOutscaleOAPIVpnConnectionRouteDestroy,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccOutscaleOAPIVpnConnectionRouteConfig(rBgpAsn),
- },
-
- resource.TestStep{
- ResourceName: resourceName,
- ImportState: true,
- ImportStateVerify: true,
- ImportStateVerifyIgnore: []string{"associate_public_ip_address", "user_data", "security_group", "request_id"},
- },
- },
- })
-}
diff --git a/outscale/import_outscale_vpn_gateway_test.go b/outscale/import_outscale_vpn_gateway_test.go
deleted file mode 100644
index 6bd30e9f8..000000000
--- a/outscale/import_outscale_vpn_gateway_test.go
+++ /dev/null
@@ -1,30 +0,0 @@
-package outscale
-
-import (
- "testing"
-
- "github.com/hashicorp/terraform/helper/resource"
-)
-
-func TestAccOutscaleVpnGateway_importBasic(t *testing.T) {
- t.Skip()
- resourceName := "outscale_vpn_gateway.foo"
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() { testAccPreCheck(t) },
- Providers: testAccProviders,
- CheckDestroy: testAccCheckOAPIVpnGatewayDestroy,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccOAPIVpnGatewayConfig,
- },
-
- resource.TestStep{
- ResourceName: resourceName,
- ImportState: true,
- ImportStateVerify: true,
- ImportStateVerifyIgnore: []string{"associate_public_ip_address", "user_data", "security_group", "request_id"},
- },
- },
- })
-}
diff --git a/outscale/instance_set_set.go b/outscale/instance_set_set.go
index 650d50d31..83d66ce11 100644
--- a/outscale/instance_set_set.go
+++ b/outscale/instance_set_set.go
@@ -5,144 +5,11 @@ import (
"fmt"
"strconv"
- "github.com/hashicorp/terraform/helper/hashcode"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/hashcode"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
oscgo "github.com/marinsalinas/osc-sdk-go"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
)
-func getPlacement(placement *fcu.Placement) map[string]interface{} {
- res := map[string]interface{}{}
-
- if placement != nil {
- if placement.Affinity != nil {
- res["affinity"] = *placement.Affinity
- }
- res["availability_zone"] = *placement.AvailabilityZone
- res["group_name"] = *placement.GroupName
- if placement.HostId != nil {
- res["host_id"] = *placement.HostId
- }
- res["tenancy"] = *placement.Tenancy
- }
-
- return res
-}
-
-func getProductCodes(codes []*fcu.ProductCode) []map[string]interface{} {
- var res []map[string]interface{}
-
- if len(codes) > 0 {
- res = make([]map[string]interface{}, len(codes))
- for _, c := range codes {
- code := map[string]interface{}{}
-
- code["product_code"] = *c.ProductCode
- code["type"] = *c.Type
-
- res = append(res, code)
- }
- } else {
- res = make([]map[string]interface{}, 0)
- }
-
- return res
-}
-
-func getStateReason(reason *fcu.StateReason) map[string]interface{} {
- res := map[string]interface{}{}
- if reason != nil {
- res["code"] = reason.Code
- res["message"] = reason.Message
- }
- return res
-}
-
-func getTagSet(tags []*fcu.Tag) []map[string]interface{} {
- res := []map[string]interface{}{}
-
- if tags != nil {
- for _, t := range tags {
- tag := map[string]interface{}{}
-
- tag["key"] = *t.Key
- tag["value"] = *t.Value
-
- res = append(res, tag)
- }
- }
-
- return res
-}
-
-func getTagDescriptionSet(tags []*fcu.TagDescription) []map[string]interface{} {
- res := []map[string]interface{}{}
-
- if tags != nil {
- for _, t := range tags {
- tag := map[string]interface{}{}
-
- tag["key"] = *t.Key
- tag["value"] = *t.Value
- tag["resourceId"] = *t.ResourceId
- tag["resourceType"] = *t.ResourceType
-
- res = append(res, tag)
- }
- }
-
- return res
-}
-
-func flattenEBS(ebs *fcu.EbsInstanceBlockDevice) map[string]interface{} {
-
- res := map[string]interface{}{
- "delete_on_termination": fmt.Sprintf("%t", *ebs.DeleteOnTermination),
- "status": *ebs.Status,
- "volume_id": *ebs.VolumeId,
- }
-
- return res
-}
-
-func getBlockDeviceMapping(blockDeviceMappings []*fcu.InstanceBlockDeviceMapping) []map[string]interface{} {
- var blockDeviceMapping []map[string]interface{}
-
- if len(blockDeviceMappings) > 0 {
- blockDeviceMapping = make([]map[string]interface{}, len(blockDeviceMappings))
- for _, mapping := range blockDeviceMappings {
- r := map[string]interface{}{}
- r["device_name"] = *mapping.DeviceName
-
- e := map[string]interface{}{}
- e["delete_on_termination"] = *mapping.Ebs.DeleteOnTermination
- e["status"] = *mapping.Ebs.Status
- e["volume_id"] = *mapping.Ebs.VolumeId
- r["ebs"] = e
-
- blockDeviceMapping = append(blockDeviceMapping, r)
- }
- } else {
- blockDeviceMapping = make([]map[string]interface{}, 0)
- }
-
- return blockDeviceMapping
-}
-
-func getGroupSet(groupSet []*fcu.GroupIdentifier) []map[string]interface{} {
- res := []map[string]interface{}{}
- for _, g := range groupSet {
-
- r := map[string]interface{}{
- "group_id": *g.GroupId,
- "group_name": *g.GroupName,
- }
- res = append(res, r)
- }
-
- return res
-}
-
func getOAPISecurityGroups(groups []oscgo.SecurityGroupLight) (SecurityGroup []map[string]interface{}, SecurityGroupIds []string) {
for _, g := range groups {
SecurityGroup = append(SecurityGroup, map[string]interface{}{
@@ -154,13 +21,13 @@ func getOAPISecurityGroups(groups []oscgo.SecurityGroupLight) (SecurityGroup []m
return
}
-func getOAPILinkNicLight(l oscgo.LinkNicLight) map[string]interface{} {
- return map[string]interface{}{
- "delete_on_vm_deletion": strconv.FormatBool(l.GetDeleteOnVmDeletion()),
+func getOAPILinkNicLight(l oscgo.LinkNicLight) []map[string]interface{} {
+ return []map[string]interface{}{{
+ "delete_on_vm_deletion": l.GetDeleteOnVmDeletion(),
"device_number": fmt.Sprintf("%d", l.GetDeviceNumber()),
"link_nic_id": l.GetLinkNicId(),
"state": l.GetState(),
- }
+ }}
}
func getOAPILinkNic(l oscgo.LinkNic) map[string]interface{} {
@@ -217,10 +84,14 @@ func getOAPIPrivateIPsLight(privateIPs []oscgo.PrivateIpLightForVm) *schema.Set
for _, p := range privateIPs {
r := map[string]interface{}{
"is_primary": p.GetIsPrimary(),
- "link_public_ip": getOAPILinkPublicIPLight(p.GetLinkPublicIp()),
"private_dns_name": p.GetPrivateDnsName(),
"private_ip": p.GetPrivateIp(),
}
+
+ if p.HasLinkPublicIp() {
+ r["link_public_ip"] = getOAPILinkPublicIPLight(p.GetLinkPublicIp())
+ }
+
res.Add(r)
}
return res
@@ -239,59 +110,65 @@ func getOAPIPrivateIPs(privateIPs []oscgo.PrivateIp) (res []map[string]interface
}
func getOAPIVMNetworkInterfaceLightSet(nics []oscgo.NicLight) (res []map[string]interface{}) {
- if nics != nil {
- for _, nic := range nics {
- securityGroups, securityGroupIds := getOAPISecurityGroups(nic.GetSecurityGroups())
+ for _, nic := range nics {
+ securityGroups, securityGroupIds := getOAPISecurityGroups(nic.GetSecurityGroups())
+
+ nicMap := map[string]interface{}{
+ "delete_on_vm_deletion": nic.LinkNic.GetDeleteOnVmDeletion(), // Workaround.
+ "device_number": nic.LinkNic.GetDeviceNumber(),
+ "account_id": nic.GetAccountId(),
+ "is_source_dest_checked": nic.GetIsSourceDestChecked(),
+ "mac_address": nic.GetMacAddress(),
+ "net_id": nic.GetNetId(),
+ "nic_id": nic.GetNicId(),
+ "private_dns_name": nic.GetPrivateDnsName(),
+ "security_groups": securityGroups,
+ "security_group_ids": securityGroupIds,
+ "state": nic.GetState(),
+ "subnet_id": nic.GetSubnetId(),
+ }
- nicMap := map[string]interface{}{
- "delete_on_vm_deletion": *nic.GetLinkNic().DeleteOnVmDeletion, // Workaround.
- "account_id": nic.GetAccountId(),
- "description": nic.GetDescription(),
- "is_source_dest_checked": nic.GetIsSourceDestChecked(),
- "link_nic": getOAPILinkNicLight(nic.GetLinkNic()),
- "mac_address": nic.GetMacAddress(),
- "net_id": nic.GetNetId(),
- "nic_id": nic.GetNicId(),
- "private_dns_name": nic.GetPrivateDnsName(),
- "private_ips": getOAPIPrivateIPsLight(nic.GetPrivateIps()),
- "security_groups": securityGroups,
- "security_group_ids": securityGroupIds,
- "state": nic.GetState(),
- "subnet_id": nic.GetSubnetId(),
- }
+ if nic.HasDescription() {
+ nicMap["description"] = nic.GetDescription()
+ }
- if nic.HasLinkPublicIp() {
- nicMap["link_public_ip"] = getOAPILinkPublicIPLight(nic.GetLinkPublicIp())
- }
+ if nic.HasLinkPublicIp() {
+ nicMap["link_public_ip"] = getOAPILinkPublicIPLight(nic.GetLinkPublicIp())
+ }
+
+ if nic.HasPrivateIps() {
+ nicMap["private_ips"] = getOAPIPrivateIPsLight(nic.GetPrivateIps())
+ }
- res = append(res, nicMap)
+ if nic.HasLinkNic() {
+ nicMap["link_nic"] = getOAPILinkNicLight(nic.GetLinkNic())
}
+
+ res = append(res, nicMap)
}
return
}
func getOAPIVMNetworkInterfaceSet(nics []oscgo.Nic) (res []map[string]interface{}) {
- if nics != nil {
- for _, nic := range nics {
- //securityGroups, _ := getOAPISecurityGroups(nic.SecurityGroups)
- res = append(res, map[string]interface{}{
- "account_id": nic.GetAccountId(),
- "description": nic.GetDescription(),
- "is_source_dest_checked": nic.GetIsSourceDestChecked(),
- "link_nic": getOAPILinkNic(nic.GetLinkNic()),
- "link_public_ip": getOAPILinkPublicIP(nic.GetLinkPublicIp()),
- "mac_address": nic.GetMacAddress(),
- "net_id": nic.GetNetId(),
- "nic_id": nic.GetNicId(),
- "private_dns_name": nic.GetPrivateDnsName(),
- "private_ips": getOAPIPrivateIPs(nic.GetPrivateIps()),
- //"security_groups": securityGroups,
- "state": nic.GetState(),
- "subnet_id": nic.GetSubnetId(),
- "subregion_name": nic.GetSubregionName(),
- // "tags": getOapiTagSet(nic.Tags),
- })
- }
+ for _, nic := range nics {
+ //securityGroups, _ := getOAPISecurityGroups(nic.SecurityGroups)
+ res = append(res, map[string]interface{}{
+ "account_id": nic.GetAccountId(),
+ "description": nic.GetDescription(),
+ "is_source_dest_checked": nic.GetIsSourceDestChecked(),
+ "link_nic": getOAPILinkNic(nic.GetLinkNic()),
+ "link_public_ip": getOAPILinkPublicIP(nic.GetLinkPublicIp()),
+ "mac_address": nic.GetMacAddress(),
+ "net_id": nic.GetNetId(),
+ "nic_id": nic.GetNicId(),
+ "private_dns_name": nic.GetPrivateDnsName(),
+ "private_ips": getOAPIPrivateIPs(nic.GetPrivateIps()),
+ //"security_groups": securityGroups,
+ "state": nic.GetState(),
+ "subnet_id": nic.GetSubnetId(),
+ "subregion_name": nic.GetSubregionName(),
+ // "tags": getOapiTagSet(nic.Tags),
+ })
}
return
}
diff --git a/outscale/oapi_tags.go b/outscale/oapi_tags.go
index dcd6c28ab..6cf95fb35 100644
--- a/outscale/oapi_tags.go
+++ b/outscale/oapi_tags.go
@@ -9,13 +9,9 @@ import (
"time"
"github.com/antihax/optional"
- "github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
oscgo "github.com/marinsalinas/osc-sdk-go"
- "github.com/outscale/osc-go/oapi"
- "github.com/terraform-providers/terraform-provider-outscale/osc/common"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
)
func tagsSchemaComputed() *schema.Schema {
@@ -138,23 +134,6 @@ func tagsListOAPISchema() *schema.Schema {
}
}
-// tagsOAPI ToMap turns the list of tag into a map.
-func tagsOAPIToMap(ts []oapi.ResourceTag) []map[string]string {
- result := make([]map[string]string, len(ts))
- if len(ts) > 0 {
- for k, t := range ts {
- tag := make(map[string]string)
- tag["key"] = t.Key
- tag["value"] = t.Value
- result[k] = tag
- }
- } else {
- result = make([]map[string]string, 0)
- }
-
- return result
-}
-
// tagsOSCsAPI ToMap turns the list of tag into a map.
func tagsOSCAPIToMap(ts []oscgo.ResourceTag) []map[string]string {
result := make([]map[string]string, len(ts))
@@ -172,19 +151,6 @@ func tagsOSCAPIToMap(ts []oscgo.ResourceTag) []map[string]string {
return result
}
-func tagsOAPIFromMap(m map[string]interface{}) []oapi.ResourceTag {
- result := make([]oapi.ResourceTag, 0, len(m))
- for k, v := range m {
- t := oapi.ResourceTag{
- Key: k,
- Value: v.(string),
- }
- result = append(result, t)
- }
-
- return result
-}
-
func tagsOSCAPIFromMap(m map[string]interface{}) []oscgo.ResourceTag {
result := make([]oscgo.ResourceTag, 0, len(m))
for k, v := range m {
@@ -198,28 +164,9 @@ func tagsOSCAPIFromMap(m map[string]interface{}) []oscgo.ResourceTag {
return result
}
-// diffOAPITags takes our tag locally and the ones remotely and returns
+// diffOSCAPITags takes our tag locally and the ones remotely and returns
// the set of tag that must be created, and the set of tag that must
// be destroyed.
-func diffOAPITags(oldTags, newTags []oapi.ResourceTag) ([]oapi.ResourceTag, []oapi.ResourceTag) {
- // First, we're creating everything we have
- create := make(map[string]interface{})
- for _, t := range newTags {
- create[t.Key] = t.Value
- }
-
- // Build the list of what to remove
- var remove []oapi.ResourceTag
- for _, t := range oldTags {
- old, ok := create[t.Key]
- if !ok || old != t.Value {
- remove = append(remove, t)
- }
- }
-
- return tagsOAPIFromMap(create), remove
-}
-
func diffOSCAPITags(oldTags, newTags []oscgo.ResourceTag) ([]oscgo.ResourceTag, []oscgo.ResourceTag) {
// First, we're creating everything we have
create := make(map[string]interface{})
@@ -239,20 +186,6 @@ func diffOSCAPITags(oldTags, newTags []oscgo.ResourceTag) ([]oscgo.ResourceTag,
return tagsOSCAPIFromMap(create), remove
}
-func tagsOAPIFromSliceMap(m []interface{}) []oapi.ResourceTag {
- result := make([]oapi.ResourceTag, 0, len(m))
- for _, v := range m {
- tag := v.(map[string]interface{})
- t := oapi.ResourceTag{
- Key: tag["key"].(string),
- Value: tag["value"].(string),
- }
- result = append(result, t)
- }
-
- return result
-}
-
func tagsFromSliceMap(m []interface{}) []oscgo.ResourceTag {
result := make([]oscgo.ResourceTag, 0, len(m))
for _, v := range m {
@@ -286,52 +219,13 @@ func oapiTagsDescToList(ts []oscgo.Tag) []map[string]interface{} {
func oapiTagDescIgnored(t *oscgo.Tag) bool {
filter := []string{"^outscale:"}
for _, v := range filter {
- if r, _ := regexp.MatchString(v, t.GetKey()); r == true {
+ if r, _ := regexp.MatchString(v, t.GetKey()); r {
return true
}
}
return false
}
-func assignOapiTags(tag []interface{}, resourceID string, conn *oapi.Client) error {
- request := oapi.CreateTagsRequest{}
- request.Tags = tagsOAPIFromSliceMap(tag)
- request.ResourceIds = []string{resourceID}
- err := resource.Retry(60*time.Second, func() *resource.RetryError {
- _, err := conn.POST_CreateTags(request)
- if err != nil {
- if strings.Contains(fmt.Sprint(err), ".NotFound") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
- if err != nil {
- return err
- }
- return nil
-}
-
-func unAssignOapiTags(tag []interface{}, resourceID string, conn *oapi.Client) error {
- request := oapi.DeleteTagsRequest{}
- request.Tags = tagsOAPIFromSliceMap(tag)
- request.ResourceIds = []string{resourceID}
- err := resource.Retry(60*time.Second, func() *resource.RetryError {
- _, err := conn.POST_DeleteTags(request)
- if err != nil {
- if strings.Contains(fmt.Sprint(err), ".NotFound") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
- if err != nil {
- return err
- }
- return nil
-}
func assignTags(tag []interface{}, resourceID string, conn *oscgo.APIClient) error {
request := oscgo.CreateTagsRequest{}
request.Tags = tagsFromSliceMap(tag)
@@ -355,86 +249,6 @@ func assignTags(tag []interface{}, resourceID string, conn *oscgo.APIClient) err
return nil
}
-//TODO: remove the following function after oapi integration
-
-func tagsToMapC(ts []*common.Tag) []map[string]string {
- result := make([]map[string]string, len(ts))
- if len(ts) > 0 {
- for k, t := range ts {
- tag := make(map[string]string)
- tag["key"] = *t.Key
- tag["value"] = *t.Value
- result[k] = tag
- }
- } else {
- result = make([]map[string]string, 0)
- }
-
- return result
-}
-
-func tagsDescToMap(ts []*fcu.TagDescription) map[string]string {
- result := make(map[string]string)
- for _, t := range ts {
- if !tagDescIgnored(t) {
- result[*t.Key] = *t.Value
- }
- }
-
- return result
-}
-
-func tagsDescToList(ts []*fcu.TagDescription) []map[string]string {
- result := make([]map[string]string, len(ts))
- for k, t := range ts {
- if !tagDescIgnored(t) {
- r := map[string]string{}
- r["load_balancer_name"] = *t.Key
- r["value"] = *t.Value
- r["resource_id"] = *t.ResourceId
- r["resource_type"] = *t.ResourceType
-
- result[k] = r
- }
- }
-
- return result
-}
-
-func tagSetDescToList(ts []*fcu.TagDescription) []map[string]string {
- result := make([]map[string]string, len(ts))
- for k, t := range ts {
- if !tagDescIgnored(t) {
- r := map[string]string{}
- r["key"] = *t.Key
- r["value"] = *t.Value
- r["resource_id"] = *t.ResourceId
- r["resource_type"] = *t.ResourceType
-
- result[k] = r
- }
- }
-
- return result
-}
-
-// tagsToMap turns the list of tag into a map.
-func tagsToMap(ts []*fcu.Tag) []map[string]string {
- result := make([]map[string]string, len(ts))
- if len(ts) > 0 {
- for k, t := range ts {
- tag := make(map[string]string)
- tag["key"] = *t.Key
- tag["value"] = *t.Value
- result[k] = tag
- }
- } else {
- result = make([]map[string]string, 0)
- }
-
- return result
-}
-
func dataSourceTagsSchema() *schema.Schema {
return &schema.Schema{
Type: schema.TypeSet,
@@ -462,135 +276,6 @@ func tagsSchema() *schema.Schema {
}
}
-func tagDescIgnored(t *fcu.TagDescription) bool {
- filter := []string{"^outscale:"}
- for _, v := range filter {
- if r, _ := regexp.MatchString(v, *t.Key); r == true {
- return true
- }
- }
- return false
-}
-
-func setTags(conn *fcu.Client, d *schema.ResourceData) error {
-
- if d.HasChange("tag") {
- oraw, nraw := d.GetChange("tag")
- o := oraw.(map[string]interface{})
- n := nraw.(map[string]interface{})
- create, remove := diffTags(tagsFromMap(o), tagsFromMap(n))
-
- // Set tag
- if len(remove) > 0 {
- err := resource.Retry(60*time.Second, func() *resource.RetryError {
- _, err := conn.VM.DeleteTags(&fcu.DeleteTagsInput{
- Resources: []*string{aws.String(d.Id())},
- Tags: remove,
- })
- if err != nil {
- if strings.Contains(fmt.Sprint(err), ".NotFound") {
- return resource.RetryableError(err) // retry
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
- if err != nil {
- return err
- }
- }
- if len(create) > 0 {
- err := resource.Retry(60*time.Second, func() *resource.RetryError {
- _, err := conn.VM.CreateTags(&fcu.CreateTagsInput{
- Resources: []*string{aws.String(d.Id())},
- Tags: create,
- })
- if err != nil {
- if strings.Contains(fmt.Sprint(err), ".NotFound") {
- return resource.RetryableError(err) // retry
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
- if err != nil {
- return err
- }
- }
- }
-
- return nil
-}
-
-// diffTags takes our tag locally and the ones remotely and returns
-// the set of tag that must be created, and the set of tag that must
-// be destroyed.
-func diffTags(oldTags, newTags []*fcu.Tag) ([]*fcu.Tag, []*fcu.Tag) {
- // First, we're creating everything we have
- create := make(map[string]interface{})
- for _, t := range newTags {
- create[*t.Key] = *t.Value
- }
-
- // Build the list of what to remove
- var remove []*fcu.Tag
- for _, t := range oldTags {
- old, ok := create[*t.Key]
- if !ok || old != *t.Value {
- remove = append(remove, t)
- }
- }
-
- return tagsFromMap(create), remove
-}
-
-func tagsFromMap(m map[string]interface{}) []*fcu.Tag {
- result := make([]*fcu.Tag, 0, len(m))
- for k, v := range m {
- t := &fcu.Tag{
- Key: aws.String(k),
- Value: aws.String(v.(string)),
- }
- result = append(result, t)
- }
-
- return result
-}
-
-func diffTagsCommon(oldTags, newTags []*common.Tag) ([]*common.Tag, []*common.Tag) {
- // First, we're creating everything we have
- create := make(map[string]interface{})
- for _, t := range newTags {
- create[*t.Key] = *t.Value
- }
-
- // Build the list of what to remove
- var remove []*common.Tag
- for _, t := range oldTags {
- old, ok := create[*t.Key]
- if !ok || old != *t.Value {
- remove = append(remove, t)
- }
- }
-
- return tagsFromMapCommon(create), remove
-}
-
-// tagsFromMap returns the tag for the given map of data.
-
-func tagsFromMapCommon(m map[string]interface{}) []*common.Tag {
- result := make([]*common.Tag, 0, len(m))
- for k, v := range m {
- t := &common.Tag{
- Key: aws.String(k),
- Value: aws.String(v.(string)),
- }
- result = append(result, t)
- }
-
- return result
-}
-
func getOapiTagSet(tags *[]oscgo.ResourceTag) []map[string]interface{} {
res := []map[string]interface{}{}
@@ -611,15 +296,13 @@ func getOapiTagSet(tags *[]oscgo.ResourceTag) []map[string]interface{} {
func getOscAPITagSet(tags []oscgo.ResourceTag) []map[string]interface{} {
res := []map[string]interface{}{}
- if tags != nil {
- for _, t := range tags {
- tag := map[string]interface{}{}
+ for _, t := range tags {
+ tag := map[string]interface{}{}
- tag["key"] = t.Key
- tag["value"] = t.Value
+ tag["key"] = t.Key
+ tag["value"] = t.Value
- res = append(res, tag)
- }
+ res = append(res, tag)
}
return res
diff --git a/outscale/provider.go b/outscale/provider.go
index bd5953689..a70d97cce 100644
--- a/outscale/provider.go
+++ b/outscale/provider.go
@@ -1,10 +1,18 @@
package outscale
import (
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
+var endpointServiceNames []string
+
+func init() {
+ endpointServiceNames = []string{
+ "api",
+ }
+}
+
// Provider ...
func Provider() terraform.ResourceProvider {
return &schema.Provider{
@@ -27,116 +35,73 @@ func Provider() terraform.ResourceProvider {
DefaultFunc: schema.EnvDefaultFunc("OUTSCALE_REGION", nil),
Description: "The Region for API operations.",
},
- "oapi": {
- Type: schema.TypeBool,
- Optional: true,
- DefaultFunc: schema.EnvDefaultFunc("OUTSCALE_OAPI", false),
- Description: "Enable oAPI Usage",
- },
+ "endpoints": endpointsSchema(),
},
ResourcesMap: map[string]*schema.Resource{
- "outscale_vm": resourceOutscaleOApiVM(),
- "outscale_keypair": resourceOutscaleOAPIKeyPair(),
- "outscale_image": resourceOutscaleOAPIImage(),
- "outscale_internet_service_link": resourceOutscaleOAPIInternetServiceLink(),
- "outscale_internet_service": resourceOutscaleOAPIInternetService(),
- "outscale_net": resourceOutscaleOAPINet(),
- "outscale_security_group": resourceOutscaleOAPISecurityGroup(),
- "outscale_outbound_rule": resourceOutscaleOAPIOutboundRule(),
- "outscale_security_group_rule": resourceOutscaleOAPIOutboundRule(),
- "outscale_tag": resourceOutscaleOAPITags(),
- "outscale_public_ip": resourceOutscaleOAPIPublicIP(),
- "outscale_public_ip_link": resourceOutscaleOAPIPublicIPLink(),
- "outscale_volume": resourceOutscaleOAPIVolume(),
- "outscale_volumes_link": resourceOutscaleOAPIVolumeLink(),
- "outscale_net_attributes": resourceOutscaleOAPILinAttributes(),
- "outscale_nat_service": resourceOutscaleOAPINatService(),
- "outscale_subnet": resourceOutscaleOAPISubNet(),
- "outscale_route": resourceOutscaleOAPIRoute(),
- "outscale_route_table": resourceOutscaleOAPIRouteTable(),
- "outscale_route_table_link": resourceOutscaleOAPILinkRouteTable(),
- "outscale_image_copy": resourceOutscaleOAPIImageCopy(),
- "outscale_vpn_connection": resourceOutscaleOAPIVpnConnection(),
- "outscale_vpn_gateway": resourceOutscaleOAPIVpnGateway(),
- "outscale_image_tasks": resourceOutscaleOAPIVpnGateway(),
- "outscale_vpn_connection_route": resourceOutscaleOAPIVpnConnectionRoute(),
- "outscale_vpn_gateway_route_propagation": resourceOutscaleOAPIVpnGatewayRoutePropagation(),
- "outscale_vpn_gateway_link": resourceOutscaleOAPIVpnGatewayLink(),
- "outscale_nic": resourceOutscaleOAPINic(),
- "outscale_snapshot_export_tasks": resourceOutscaleOAPIImageExportTasks(),
- "outscale_snapshot": resourceOutscaleOAPISnapshot(),
- "outscale_image_register": resourceOutscaleOAPIImageRegister(),
- "outscale_keypair_importation": resourceOutscaleOAPIKeyPairImportation(),
- "outscale_image_launch_permission": resourceOutscaleOAPIImageLaunchPermission(),
- "outscale_net_peering": resourceOutscaleOAPILinPeeringConnection(),
- "outscale_net_peering_acceptation": resourceOutscaleOAPILinPeeringConnectionAccepter(),
- "outscale_nic_link": resourceOutscaleOAPINetworkInterfaceAttachment(),
- "outscale_nic_private_ip": resourceOutscaleOAPINetworkInterfacePrivateIP(),
- "outscale_snapshot_attributes": resourcedOutscaleOAPISnapshotAttributes(),
- "outscale_net_api_access": resourceOutscaleOAPIVpcEndpoint(),
- "outscale_snapshot_import": resourcedOutscaleOAPISnapshotImport(),
- "outscale_client_endpoint": resourceOutscaleOAPICustomerGateway(),
- "outscale_dhcp_option": resourceOutscaleDHCPOption(), //TODO: OAPI
- "outscale_dhcp_option_link": resourceOutscaleDHCPOptionLink(), //TODO: OAPI
+ "outscale_vm": resourceOutscaleOApiVM(),
+ "outscale_keypair": resourceOutscaleOAPIKeyPair(),
+ "outscale_image": resourceOutscaleOAPIImage(),
+ "outscale_internet_service_link": resourceOutscaleOAPIInternetServiceLink(),
+ "outscale_internet_service": resourceOutscaleOAPIInternetService(),
+ "outscale_net": resourceOutscaleOAPINet(),
+ "outscale_security_group": resourceOutscaleOAPISecurityGroup(),
+ "outscale_outbound_rule": resourceOutscaleOAPIOutboundRule(),
+ "outscale_security_group_rule": resourceOutscaleOAPIOutboundRule(),
+ "outscale_tag": resourceOutscaleOAPITags(),
+ "outscale_public_ip": resourceOutscaleOAPIPublicIP(),
+ "outscale_public_ip_link": resourceOutscaleOAPIPublicIPLink(),
+ "outscale_volume": resourceOutscaleOAPIVolume(),
+ "outscale_volumes_link": resourceOutscaleOAPIVolumeLink(),
+ "outscale_net_attributes": resourceOutscaleOAPILinAttributes(),
+ "outscale_nat_service": resourceOutscaleOAPINatService(),
+ "outscale_subnet": resourceOutscaleOAPISubNet(),
+ "outscale_route": resourceOutscaleOAPIRoute(),
+ "outscale_route_table": resourceOutscaleOAPIRouteTable(),
+ "outscale_route_table_link": resourceOutscaleOAPILinkRouteTable(),
+ "outscale_nic": resourceOutscaleOAPINic(),
+ "outscale_snapshot": resourceOutscaleOAPISnapshot(),
+ "outscale_image_launch_permission": resourceOutscaleOAPIImageLaunchPermission(),
+ "outscale_net_peering": resourceOutscaleOAPILinPeeringConnection(),
+ "outscale_net_peering_acceptation": resourceOutscaleOAPILinPeeringConnectionAccepter(),
+ "outscale_nic_link": resourceOutscaleOAPINetworkInterfaceAttachment(),
+ "outscale_nic_private_ip": resourceOutscaleOAPINetworkInterfacePrivateIP(),
+ "outscale_snapshot_attributes": resourcedOutscaleOAPISnapshotAttributes(),
},
DataSourcesMap: map[string]*schema.Resource{
- "outscale_vm": dataSourceOutscaleOAPIVM(),
- "outscale_vms": datasourceOutscaleOApiVMS(),
- "outscale_security_group": dataSourceOutscaleOAPISecurityGroup(),
- "outscale_security_groups": dataSourceOutscaleOAPISecurityGroups(),
- "outscale_image": dataSourceOutscaleOAPIImage(),
- "outscale_images": dataSourceOutscaleOAPIImages(),
- "outscale_tag": dataSourceOutscaleOAPITag(),
- "outscale_tags": dataSourceOutscaleOAPITags(),
- "outscale_public_ip": dataSourceOutscaleOAPIPublicIP(),
- "outscale_public_ips": dataSourceOutscaleOAPIPublicIPS(),
- "outscale_volume": datasourceOutscaleOAPIVolume(),
- "outscale_volumes": datasourceOutscaleOAPIVolumes(),
- "outscale_nat_service": dataSourceOutscaleOAPINatService(),
- "outscale_nat_services": dataSourceOutscaleOAPINatServices(),
- "outscale_keypair": datasourceOutscaleOAPIKeyPair(),
- "outscale_keypairs": datasourceOutscaleOAPIKeyPairs(),
- "outscale_vm_state": dataSourceOutscaleOAPIVMState(),
- "outscale_vms_state": dataSourceOutscaleOAPIVMSState(),
- "outscale_internet_service": datasourceOutscaleOAPIInternetService(),
- "outscale_internet_services": datasourceOutscaleOAPIInternetServices(),
- "outscale_subnet": dataSourceOutscaleOAPISubnet(),
- "outscale_subnets": dataSourceOutscaleOAPISubnets(),
- "outscale_net": dataSourceOutscaleOAPIVpc(),
- "outscale_nets": dataSourceOutscaleOAPIVpcs(),
- "outscale_net_attributes": dataSourceOutscaleOAPIVpcAttr(),
- "outscale_client_endpoint": dataSourceOutscaleOAPICustomerGateway(),
- "outscale_client_endpoints": dataSourceOutscaleOAPICustomerGateways(),
- "outscale_route_table": dataSourceOutscaleOAPIRouteTable(),
- "outscale_route_tables": dataSourceOutscaleOAPIRouteTables(),
- "outscale_vpn_gateway": dataSourceOutscaleOAPIVpnGateway(),
- "outscale_vpn_gateways": dataSourceOutscaleOAPIVpnGateways(),
- "outscale_vpn_connection": dataSourceOutscaleVpnConnection(), //TODO: OAPI
- "outscale_sub_region": dataSourceOutscaleOAPIAvailabilityZone(),
- "outscale_prefix_list": dataSourceOutscaleOAPIPrefixList(),
- "outscale_quota": dataSourceOutscaleOAPIQuota(),
- "outscale_quotas": dataSourceOutscaleOAPIQuotas(),
- "outscale_prefix_lists": dataSourceOutscaleOAPIPrefixLists(),
- "outscale_region": dataSourceOutscaleOAPIRegion(),
- "outscale_sub_regions": dataSourceOutscaleOAPIAvailabilityZone(),
- "outscale_regions": dataSourceOutscaleOAPIRegions(),
- "outscale_vpn_connections": dataSourceOutscaleOAPIVpnConnections(),
- "outscale_product_types": dataSourceOutscaleOAPIProductTypes(),
- "outscale_reserved_vms": dataSourceOutscaleOAPIReservedVMS(),
- "outscale_vm_type": dataSourceOutscaleOAPIVMType(),
- "outscale_vm_types": dataSourceOutscaleOAPIVMTypes(),
- "outscale_reserved_vms_offer": dataSourceOutscaleOAPIReservedVMOffer(),
- "outscale_reserved_vms_offers": dataSourceOutscaleOAPIReservedVMOffers(),
- "outscale_snapshot": dataSourceOutscaleOAPISnapshot(),
- "outscale_snapshots": dataSourceOutscaleOAPISnapshots(),
- "outscale_net_peering": dataSourceOutscaleOAPILinPeeringConnection(),
- "outscale_net_peerings": dataSourceOutscaleOAPILinPeeringsConnection(),
- "outscale_nics": dataSourceOutscaleOAPINics(),
- "outscale_nic": dataSourceOutscaleOAPINic(),
- "outscale_net_api_access": dataSourceOutscaleOAPIVpcEndpoint(),
- "outscale_net_api_accesses": dataSourceOutscaleOAPIVpcEndpoints(),
- "outscale_net_api_access_services": dataSourceOutscaleOAPIVpcEndpointServices(),
+ "outscale_vm": dataSourceOutscaleOAPIVM(),
+ "outscale_vms": datasourceOutscaleOApiVMS(),
+ "outscale_security_group": dataSourceOutscaleOAPISecurityGroup(),
+ "outscale_security_groups": dataSourceOutscaleOAPISecurityGroups(),
+ "outscale_image": dataSourceOutscaleOAPIImage(),
+ "outscale_images": dataSourceOutscaleOAPIImages(),
+ "outscale_tag": dataSourceOutscaleOAPITag(),
+ "outscale_tags": dataSourceOutscaleOAPITags(),
+ "outscale_public_ip": dataSourceOutscaleOAPIPublicIP(),
+ "outscale_public_ips": dataSourceOutscaleOAPIPublicIPS(),
+ "outscale_volume": datasourceOutscaleOAPIVolume(),
+ "outscale_volumes": datasourceOutscaleOAPIVolumes(),
+ "outscale_nat_service": dataSourceOutscaleOAPINatService(),
+ "outscale_nat_services": dataSourceOutscaleOAPINatServices(),
+ "outscale_keypair": datasourceOutscaleOAPIKeyPair(),
+ "outscale_keypairs": datasourceOutscaleOAPIKeyPairs(),
+ "outscale_vm_state": dataSourceOutscaleOAPIVMState(),
+ "outscale_vms_state": dataSourceOutscaleOAPIVMSState(),
+ "outscale_internet_service": datasourceOutscaleOAPIInternetService(),
+ "outscale_internet_services": datasourceOutscaleOAPIInternetServices(),
+ "outscale_subnet": dataSourceOutscaleOAPISubnet(),
+ "outscale_subnets": dataSourceOutscaleOAPISubnets(),
+ "outscale_net": dataSourceOutscaleOAPIVpc(),
+ "outscale_nets": dataSourceOutscaleOAPIVpcs(),
+ "outscale_net_attributes": dataSourceOutscaleOAPIVpcAttr(),
+ "outscale_route_table": dataSourceOutscaleOAPIRouteTable(),
+ "outscale_route_tables": dataSourceOutscaleOAPIRouteTables(),
+ "outscale_snapshot": dataSourceOutscaleOAPISnapshot(),
+ "outscale_snapshots": dataSourceOutscaleOAPISnapshots(),
+ "outscale_net_peering": dataSourceOutscaleOAPILinPeeringConnection(),
+ "outscale_net_peerings": dataSourceOutscaleOAPILinPeeringsConnection(),
+ "outscale_nics": dataSourceOutscaleOAPINics(),
+ "outscale_nic": dataSourceOutscaleOAPINic(),
},
ConfigureFunc: providerConfigureClient,
@@ -148,7 +113,38 @@ func providerConfigureClient(d *schema.ResourceData) (interface{}, error) {
AccessKeyID: d.Get("access_key_id").(string),
SecretKeyID: d.Get("secret_key_id").(string),
Region: d.Get("region").(string),
- OApi: d.Get("oapi").(bool),
+ Endpoints: make(map[string]interface{}),
+ }
+
+ endpointsSet := d.Get("endpoints").(*schema.Set)
+
+ for _, endpointsSetI := range endpointsSet.List() {
+ endpoints := endpointsSetI.(map[string]interface{})
+ for _, endpointServiceName := range endpointServiceNames {
+ config.Endpoints[endpointServiceName] = endpoints[endpointServiceName].(string)
+ }
}
+
return config.Client()
}
+
+func endpointsSchema() *schema.Schema {
+ endpointsAttributes := make(map[string]*schema.Schema)
+
+ for _, endpointServiceName := range endpointServiceNames {
+ endpointsAttributes[endpointServiceName] = &schema.Schema{
+ Type: schema.TypeString,
+ Optional: true,
+ Default: "",
+ Description: "Use this to override the default service endpoint URL",
+ }
+ }
+
+ return &schema.Schema{
+ Type: schema.TypeSet,
+ Optional: true,
+ Elem: &schema.Resource{
+ Schema: endpointsAttributes,
+ },
+ }
+}
diff --git a/outscale/provider_test.go b/outscale/provider_test.go
index a56c11a29..0b60b43a5 100644
--- a/outscale/provider_test.go
+++ b/outscale/provider_test.go
@@ -2,14 +2,11 @@ package outscale
import (
"os"
- "strconv"
"testing"
- "time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
var testAccProviders map[string]terraform.ResourceProvider
@@ -25,22 +22,6 @@ func init() {
}
-func TestGetOMIByRegion(t *testing.T) {
- if omi := getOMIByRegion("eu-west-2", "ubuntu"); omi.OMI != "ami-abe953fa" {
- t.Fatalf("expected %s, but got %s", "ami-abe953fa", omi.OMI)
- }
- if omi := getOMIByRegion("eu-west-2", "centos"); omi.OMI != "ami-4a7bf2b3" {
- t.Fatalf("expected %s, but got %s", "ami-4a7bf2b3", omi.OMI)
- }
- if omi := getOMIByRegion("cn-southeast-1", "ubuntu"); omi.OMI != "ami-d0abdc85" {
- t.Fatalf("expected %s, but got %s", "ami-d0abdc85", omi.OMI)
- }
- // default is centos6 eu-west-2
- if omi := getOMIByRegion("", ""); omi.OMI != "ami-4a7bf2b3" {
- t.Fatalf("expected %s, but got %s", "ami-4a7bf2b3", omi.OMI)
- }
-}
-
func TestProvider(t *testing.T) {
if err := Provider().(*schema.Provider).InternalValidate(); err != nil {
t.Fatalf("err: %s", err)
@@ -51,49 +32,12 @@ func TestProvider_impl(t *testing.T) {
var _ terraform.ResourceProvider = Provider()
}
-func skipIfNoOAPI(t *testing.T) {
- isOAPI, err := strconv.ParseBool(os.Getenv("OUTSCALE_OAPI"))
- if err != nil || !isOAPI {
- t.Skip()
- }
-}
-
func testAccPreCheck(t *testing.T) {
-}
-
-func testAccWait(n time.Duration) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- time.Sleep(n)
- return nil
- }
-}
-
-type Item struct {
- Platform string
- OMI string
-}
-
-func getOMIByRegion(region, platform string) Item {
- if region == "" {
- region = "eu-west-2"
- }
- omis := make(map[string][]Item)
- omis["eu-west-2"] = []Item{Item{Platform: "centos", OMI: "ami-4a7bf2b3"}}
- omis["eu-west-2"] = append(omis["eu-west-2"], Item{Platform: "ubuntu", OMI: "ami-abe953fa"})
-
- omis["us-east-2"] = []Item{Item{Platform: "centos", OMI: "ami-8ceca82d"}}
- omis["us-east-2"] = append(omis["us-east-2"], Item{Platform: "ubuntu", OMI: "ami-f2ea59af"})
-
- omis["us-west-1"] = []Item{Item{Platform: "centos", OMI: "ami-6e94897f"}}
- omis["us-west-1"] = append(omis["us-west-1"], Item{Platform: "ubuntu", OMI: "ami-b1d1f100"})
-
- omis["cn-southeast-1"] = []Item{Item{Platform: "centos", OMI: "ami-9c559f7b"}}
- omis["cn-southeast-1"] = append(omis["cn-southeast-1"], Item{Platform: "ubuntu", OMI: "ami-d0abdc85"})
-
- for _, omi := range omis[region] {
- if omi.Platform == platform {
- return omi
- }
+ if os.Getenv("OUTSCALE_ACCESSKEYID") == "" ||
+ os.Getenv("OUTSCALE_REGION") == "" ||
+ os.Getenv("OUTSCALE_SECRETKEYID") == "" ||
+ os.Getenv("OUTSCALE_IMAGEID") == "" ||
+ os.Getenv("OUTSCALE_ACCOUNT") == "" {
+ t.Fatal("`OUTSCALE_ACCESSKEYID`, `OUTSCALE_SECRETKEYID`, `OUTSCALE_REGION`, `OUTSCALE_ACCOUNT` and `OUTSCALE_IMAGEID` must be set for acceptance testing")
}
- return omis[region][0]
}
diff --git a/outscale/resource_outscale_client_endpoint.go b/outscale/resource_outscale_client_endpoint.go
deleted file mode 100644
index e6ca24497..000000000
--- a/outscale/resource_outscale_client_endpoint.go
+++ /dev/null
@@ -1,350 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "log"
- "strconv"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
-)
-
-func resourceOutscaleOAPICustomerGateway() *schema.Resource {
- return &schema.Resource{
- Create: resourceOutscaleOAPICustomerGatewayCreate,
- Read: resourceOutscaleOAPICustomerGatewayRead,
- Delete: resourceOutscaleOAPICustomerGatewayDelete,
- Importer: &schema.ResourceImporter{
- State: schema.ImportStatePassthrough,
- },
-
- Schema: map[string]*schema.Schema{
- "bgp_asn": {
- Type: schema.TypeInt,
- Required: true,
- ForceNew: true,
- },
-
- "public_ip": {
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- },
-
- "type": {
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- },
-
- "client_endpoint_id": {
- Type: schema.TypeString,
- Computed: true,
- },
-
- "state": {
- Type: schema.TypeString,
- Computed: true,
- },
-
- "request_id": {
- Type: schema.TypeString,
- Computed: true,
- },
-
- "tags": tagsSchemaComputed(),
- "tag": tagsSchema(),
- },
- }
-}
-
-func resourceOutscaleOAPICustomerGatewayCreate(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- ipAddress := d.Get("public_ip").(string)
- vpnType := d.Get("type").(string)
- bgpAsn := d.Get("bgp_asn").(int)
-
- alreadyExists, err := resourceOutscaleOAPICustomerGatewayExists(vpnType, ipAddress, bgpAsn, conn)
- if err != nil {
- return err
- }
-
- if alreadyExists {
- return fmt.Errorf("An existing customer gateway for IpAddress: %s, VpnType: %s, BGP ASN: %d has been found", ipAddress, vpnType, bgpAsn)
- }
-
- createOpts := &fcu.CreateCustomerGatewayInput{
- BgpAsn: aws.Int64(int64(bgpAsn)),
- PublicIp: aws.String(ipAddress),
- Type: aws.String(vpnType),
- }
-
- // Create the Customer Gateway.
- log.Printf("[DEBUG] Creating customer gateway")
-
- var resp *fcu.CreateCustomerGatewayOutput
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.CreateCustomerGateway(createOpts)
-
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
- if err != nil {
- return fmt.Errorf("Error creating customer gateway: %s", err)
- }
-
- // Store the ID
- customerGateway := resp.CustomerGateway
- d.SetId(*customerGateway.CustomerGatewayId)
- fmt.Printf("[INFO] Customer gateway ID: %s", *customerGateway.CustomerGatewayId)
-
- // Wait for the CustomerGateway to be available.
- stateConf := &resource.StateChangeConf{
- Pending: []string{"pending"},
- Target: []string{"available"},
- Refresh: customerOAPIGatewayRefreshFunc(conn, *customerGateway.CustomerGatewayId),
- Timeout: 10 * time.Minute,
- Delay: 10 * time.Second,
- MinTimeout: 3 * time.Second,
- }
-
- _, stateErr := stateConf.WaitForState()
- if stateErr != nil {
- return fmt.Errorf(
- "Error waiting for customer gateway (%s) to become ready: %s",
- *customerGateway.CustomerGatewayId, err)
- }
-
- // Create tags.
- if err := setTags(conn, d); err != nil {
- return err
- }
-
- t := make([]map[string]interface{}, 0)
-
- d.Set("tags", t)
-
- return nil
-}
-
-func customerOAPIGatewayRefreshFunc(conn *fcu.Client, gatewayID string) resource.StateRefreshFunc {
- return func() (interface{}, string, error) {
- gatewayFilter := &fcu.Filter{
- Name: aws.String("customer-gateway-id"),
- Values: []*string{aws.String(gatewayID)},
- }
-
- var resp *fcu.DescribeCustomerGatewaysOutput
- var err error
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribeCustomerGateways(&fcu.DescribeCustomerGatewaysInput{
- Filters: []*fcu.Filter{gatewayFilter},
- })
-
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "InvalidCustomerGatewayID.NotFound") {
- resp = nil
- } else {
- fmt.Printf("Error on CustomerGatewayRefresh: %s", err)
- return nil, "", err
- }
- }
-
- if resp == nil || len(resp.CustomerGateways) == 0 {
- // handle consistency issues
- return nil, "", nil
- }
-
- gateway := resp.CustomerGateways[0]
- return gateway, *gateway.State, nil
- }
-}
-
-func resourceOutscaleOAPICustomerGatewayExists(vpnType, ipAddress string, bgpAsn int, conn *fcu.Client) (bool, error) {
- ipAddressFilter := &fcu.Filter{
- Name: aws.String("ip-address"),
- Values: []*string{aws.String(ipAddress)},
- }
-
- typeFilter := &fcu.Filter{
- Name: aws.String("type"),
- Values: []*string{aws.String(vpnType)},
- }
-
- bgp := strconv.Itoa(bgpAsn)
- bgpAsnFilter := &fcu.Filter{
- Name: aws.String("bgp-asn"),
- Values: []*string{aws.String(bgp)},
- }
-
- var resp *fcu.DescribeCustomerGatewaysOutput
- var err error
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribeCustomerGateways(&fcu.DescribeCustomerGatewaysInput{
- Filters: []*fcu.Filter{ipAddressFilter, typeFilter, bgpAsnFilter},
- })
-
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- return false, err
- }
-
- if len(resp.CustomerGateways) > 0 && *resp.CustomerGateways[0].State != "deleted" {
- return true, nil
- }
-
- return false, nil
-}
-
-func resourceOutscaleOAPICustomerGatewayRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- gatewayFilter := &fcu.Filter{
- Name: aws.String("customer-gateway-id"),
- Values: []*string{aws.String(d.Id())},
- }
-
- var resp *fcu.DescribeCustomerGatewaysOutput
- var err error
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribeCustomerGateways(&fcu.DescribeCustomerGatewaysInput{
- Filters: []*fcu.Filter{gatewayFilter},
- })
-
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "InvalidCustomerGatewayID.NotFound") {
- d.SetId("")
- return nil
- }
- fmt.Printf("[ERROR] Error finding CustomerGateway: %s", err)
- return err
- }
-
- if len(resp.CustomerGateways) != 1 {
- return fmt.Errorf("[ERROR] Error finding CustomerGateway: %s", d.Id())
- }
-
- if *resp.CustomerGateways[0].State == "deleted" {
- fmt.Printf("[INFO] Customer Gateway is in `deleted` state: %s", d.Id())
- d.SetId("")
- return nil
- }
-
- customerGateway := resp.CustomerGateways[0]
- d.Set("public_ip", customerGateway.IpAddress)
- d.Set("type", customerGateway.Type)
- d.Set("tags", tagsToMap(customerGateway.Tags))
-
- if *customerGateway.BgpAsn != "" {
- val, err := strconv.ParseInt(*customerGateway.BgpAsn, 0, 0)
- if err != nil {
- return fmt.Errorf("error parsing bgp_asn: %s", err)
- }
-
- d.Set("bgp_asn", int(val))
- }
-
- return nil
-}
-
-func resourceOutscaleOAPICustomerGatewayDelete(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- var err error
- err = resource.Retry(15*time.Minute, func() *resource.RetryError {
- _, err := conn.VM.DeleteCustomerGateway(&fcu.DeleteCustomerGatewayInput{
- CustomerGatewayId: aws.String(d.Id()),
- })
-
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "InvalidCustomerGatewayID.NotFound") {
- d.SetId("")
- return nil
- }
- fmt.Printf("[ERROR] Error deleting CustomerGateway: %s", err)
- return err
- }
-
- gatewayFilter := &fcu.Filter{
- Name: aws.String("customer-gateway-id"),
- Values: []*string{aws.String(d.Id())},
- }
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err := conn.VM.DescribeCustomerGateways(&fcu.DescribeCustomerGatewaysInput{
- Filters: []*fcu.Filter{gatewayFilter},
- })
-
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "InvalidCustomerGatewayID.NotFound") {
- return nil
- }
- return resource.NonRetryableError(err)
- }
-
- if len(resp.CustomerGateways) != 1 {
- return resource.RetryableError(fmt.Errorf("[ERROR] Error finding CustomerGateway for delete: %s", d.Id()))
- }
-
- switch *resp.CustomerGateways[0].State {
- case "pending", "available", "deleting":
- return resource.RetryableError(fmt.Errorf("[DEBUG] Gateway (%s) in state (%s), retrying", d.Id(), *resp.CustomerGateways[0].State))
- case "deleted":
- return nil
- default:
- return resource.RetryableError(fmt.Errorf("[DEBUG] Unrecognized state (%s) for Customer Gateway delete on (%s)", *resp.CustomerGateways[0].State, d.Id()))
- }
- })
-
- if err != nil {
- return err
- }
-
- return nil
-}
diff --git a/outscale/resource_outscale_client_endpoint_test.go b/outscale/resource_outscale_client_endpoint_test.go
deleted file mode 100644
index ea63259b8..000000000
--- a/outscale/resource_outscale_client_endpoint_test.go
+++ /dev/null
@@ -1,305 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "regexp"
- "strings"
- "testing"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-
- "github.com/hashicorp/terraform/helper/acctest"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
-)
-
-func TestAccOutscaleOAPICustomerGateway_basic(t *testing.T) {
- t.Skip()
-
- var gateway fcu.CustomerGateway
- rBgpAsn := acctest.RandIntRange(64512, 65534)
- rInt := acctest.RandInt()
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- IDRefreshName: "outscale_client_endpoint.foo",
- Providers: testAccProviders,
- CheckDestroy: testAccCheckOAPICustomerGatewayDestroy,
- Steps: []resource.TestStep{
- {
- Config: testAccOAPICustomerGatewayConfig(rInt, rBgpAsn),
- Check: resource.ComposeTestCheckFunc(
- testAccOAPICheckCustomerGateway("outscale_client_endpoint.foo", &gateway),
- ),
- },
- {
- Config: testAccOAPICustomerGatewayConfigUpdateTags(rInt, rBgpAsn),
- Check: resource.ComposeTestCheckFunc(
- testAccOAPICheckCustomerGateway("outscale_client_endpoint.foo", &gateway),
- ),
- },
- {
- Config: testAccOAPICustomerGatewayConfigForceReplace(rInt, rBgpAsn),
- Check: resource.ComposeTestCheckFunc(
- testAccOAPICheckCustomerGateway("outscale_client_endpoint.foo", &gateway),
- ),
- },
- },
- })
-}
-
-func TestAccOutscaleOAPICustomerGateway_similarAlreadyExists(t *testing.T) {
- t.Skip()
-
- var gateway fcu.CustomerGateway
- rInt := acctest.RandInt()
- rBgpAsn := acctest.RandIntRange(64512, 65534)
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- IDRefreshName: "outscale_client_endpoint.foo",
- Providers: testAccProviders,
- CheckDestroy: testAccCheckOAPICustomerGatewayDestroy,
- Steps: []resource.TestStep{
- {
- Config: testAccOAPICustomerGatewayConfig(rInt, rBgpAsn),
- Check: resource.ComposeTestCheckFunc(
- testAccOAPICheckCustomerGateway("outscale_client_endpoint.foo", &gateway),
- ),
- },
- {
- Config: testAccOAPICustomerGatewayConfigIdentical(rInt, rBgpAsn),
- ExpectError: regexp.MustCompile("An existing customer gateway"),
- },
- },
- })
-}
-
-func TestAccOutscaleOAPICustomerGateway_disappears(t *testing.T) {
- t.Skip()
-
- rInt := acctest.RandInt()
- rBgpAsn := acctest.RandIntRange(64512, 65534)
- var gateway fcu.CustomerGateway
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- Providers: testAccProviders,
- CheckDestroy: testAccCheckOAPICustomerGatewayDestroy,
- Steps: []resource.TestStep{
- {
- Config: testAccOAPICustomerGatewayConfig(rInt, rBgpAsn),
- Check: resource.ComposeTestCheckFunc(
- testAccOAPICheckCustomerGateway("outscale_client_endpoint.foo", &gateway),
- testAccOutscaleOAPICustomerGatewayDisappears(&gateway),
- ),
- ExpectNonEmptyPlan: true,
- },
- },
- })
-}
-
-func testAccOutscaleOAPICustomerGatewayDisappears(gateway *fcu.CustomerGateway) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- conn := testAccProvider.Meta().(*OutscaleClient).FCU
-
- opts := &fcu.DeleteCustomerGatewayInput{
- CustomerGatewayId: gateway.CustomerGatewayId,
- }
-
- var err error
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- _, err := conn.VM.DeleteCustomerGateway(opts)
-
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- return err
- }
-
- return resource.Retry(40*time.Minute, func() *resource.RetryError {
- opts := &fcu.DescribeCustomerGatewaysInput{
- CustomerGatewayIds: []*string{gateway.CustomerGatewayId},
- }
- resp, err := conn.VM.DescribeCustomerGateways(opts)
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "InvalidCustomerGatewayID.NotFound") {
- return nil
- }
- return resource.NonRetryableError(
- fmt.Errorf("Error retrieving Customer Gateway: %s", err))
- }
- if *resp.CustomerGateways[0].State == "deleted" {
- return nil
- }
- return resource.RetryableError(fmt.Errorf(
- "Waiting for Customer Gateway: %v", gateway.CustomerGatewayId))
- })
- }
-}
-
-func testAccCheckOAPICustomerGatewayDestroy(s *terraform.State) error {
- conn := testAccProvider.Meta().(*OutscaleClient).FCU
-
- for _, rs := range s.RootModule().Resources {
- if rs.Type != "outscale_customer_endpoint" {
- continue
- }
-
- gatewayFilter := &fcu.Filter{
- Name: aws.String("customer-gateway-id"),
- Values: []*string{aws.String(rs.Primary.ID)},
- }
-
- resp, err := conn.VM.DescribeCustomerGateways(&fcu.DescribeCustomerGatewaysInput{
- Filters: []*fcu.Filter{gatewayFilter},
- })
-
- if strings.Contains(fmt.Sprint(err), "InvalidCustomerGatewayID.NotFound") {
- continue
- }
-
- if err == nil {
- if len(resp.CustomerGateways) > 0 {
- return fmt.Errorf("Customer gateway still exists: %v", resp.CustomerGateways)
- }
-
- if *resp.CustomerGateways[0].State == "deleted" {
- continue
- }
- }
-
- return err
- }
-
- return nil
-}
-
-func testAccOAPICheckCustomerGateway(gatewayResource string, cgw *fcu.CustomerGateway) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- rs, ok := s.RootModule().Resources[gatewayResource]
- if !ok {
- return fmt.Errorf("Not found: %s", gatewayResource)
- }
-
- if rs.Primary.ID == "" {
- return fmt.Errorf("No ID is set")
- }
-
- gateway, ok := s.RootModule().Resources[gatewayResource]
- if !ok {
- return fmt.Errorf("Not found: %s", gatewayResource)
- }
-
- conn := testAccProvider.Meta().(*OutscaleClient).FCU
- gatewayFilter := &fcu.Filter{
- Name: aws.String("customer-gateway-id"),
- Values: []*string{aws.String(gateway.Primary.ID)},
- }
-
- var resp *fcu.DescribeCustomerGatewaysOutput
- var err error
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribeCustomerGateways(&fcu.DescribeCustomerGatewaysInput{
- Filters: []*fcu.Filter{gatewayFilter},
- })
-
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- return err
- }
-
- respGateway := resp.CustomerGateways[0]
- *cgw = *respGateway
-
- return nil
- }
-}
-
-func testAccOAPICustomerGatewayConfig(rInt, rBgpAsn int) string {
- return fmt.Sprintf(`
- resource "outscale_client_endpoint" "foo" {
- bgp_asn = %d
- public_ip = "172.0.0.1"
- type = "ipsec.1"
- tag {
- Name = "foo-gateway-%d"
- }
- }
- `, rBgpAsn, rInt)
-}
-
-func testAccOAPICustomerGatewayConfigIdentical(randInt, rBgpAsn int) string {
- return fmt.Sprintf(`
- resource "outscale_client_endpoint" "foo" {
- bgp_asn = %d
- public_ip = "172.0.0.1"
- type = "ipsec.1"
- tag {
- Name = "foo-gateway-%d"
- }
- }
-
- resource "outscale_client_endpoint" "identical" {
- bgp_asn = %d
- public_ip = "172.0.0.1"
- type = "ipsec.1"
- tag {
- Name = "foo-gateway-identical-%d"
- }
- }
- `, rBgpAsn, randInt, rBgpAsn, randInt)
-}
-
-// Add the Another: "tag" tag.
-func testAccOAPICustomerGatewayConfigUpdateTags(rInt, rBgpAsn int) string {
- return fmt.Sprintf(`
- resource "outscale_client_endpoint" "foo" {
- bgp_asn = %d
- public_ip = "172.0.0.1"
- type = "ipsec.1"
- tag {
- Name = "foo-gateway-%d"
- Another = "tag"
- }
- }
- `, rBgpAsn, rInt)
-}
-
-// Change the public_ip.
-func testAccOAPICustomerGatewayConfigForceReplace(rInt, rBgpAsn int) string {
- return fmt.Sprintf(`
- resource "outscale_client_endpoint" "foo" {
- bgp_asn = %d
- public_ip = "172.10.10.1"
- type = "ipsec.1"
- tag {
- Name = "foo-gateway-%d"
- Another = "tag"
- }
- }
- `, rBgpAsn, rInt)
-}
diff --git a/outscale/resource_outscale_dhcp_options.go b/outscale/resource_outscale_dhcp_options.go
deleted file mode 100644
index a54444437..000000000
--- a/outscale/resource_outscale_dhcp_options.go
+++ /dev/null
@@ -1,388 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "log"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/aws/aws-sdk-go/aws/awserr"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func resourceOutscaleDHCPOption() *schema.Resource {
- return &schema.Resource{
- Create: resourceOutscaleDHCPOptionCreate,
- Read: resourceOutscaleDHCPOptionRead,
- Delete: resourceOutscaleDHCPOptionDelete,
- Importer: &schema.ResourceImporter{
- State: schema.ImportStatePassthrough,
- },
-
- Timeouts: &schema.ResourceTimeout{
- Create: schema.DefaultTimeout(30 * time.Minute),
- Delete: schema.DefaultTimeout(30 * time.Minute),
- },
-
- Schema: getDHCPOptionSchema(),
- }
-}
-
-func getDHCPOptionSchema() map[string]*schema.Schema {
- return map[string]*schema.Schema{
- // Attributes
- "dhcp_configuration": {
- Type: schema.TypeList,
- Optional: true,
- ForceNew: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "key": {
- Type: schema.TypeString,
- Optional: true,
- },
- "value": {
- Type: schema.TypeList,
- Optional: true,
- Elem: &schema.Schema{Type: schema.TypeString},
- },
- },
- },
- },
- "dhcp_configuration_set": {
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "key": {
- Type: schema.TypeString,
- Computed: true,
- },
- "value_set": {
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "value": {
- Type: schema.TypeString,
- Computed: true,
- },
- },
- },
- },
- },
- },
- },
- "dhcp_options_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- "tag_set": {
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "key": {
- Type: schema.TypeString,
- Computed: true,
- },
- "value": {
- Type: schema.TypeString,
- Computed: true,
- },
- },
- },
- },
- "request_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- }
-}
-
-func resourceOutscaleDHCPOptionCreate(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- setDHCPOption := func(key string) *fcu.NewDhcpConfiguration {
- log.Printf("[DEBUG] Setting DHCP option %s...", key)
- tfKey := strings.Replace(key, "-", "_", -1)
-
- value, ok := d.GetOk(tfKey)
- if !ok {
- return nil
- }
-
- if v, ok := value.(string); ok {
- return &fcu.NewDhcpConfiguration{
- Key: aws.String(key),
- Values: []*string{
- aws.String(v),
- },
- }
- }
-
- if v, ok := value.([]interface{}); ok {
- var s []*string
- for _, attr := range v {
- s = append(s, aws.String(attr.(string)))
- }
-
- return &fcu.NewDhcpConfiguration{
- Key: aws.String(key),
- Values: s,
- }
- }
-
- return nil
- }
-
- var createOpts *fcu.CreateDhcpOptionsInput
-
- if v := setDHCPOption("dhcp-configuration"); v != nil {
-
- log.Printf("[DEBUG] INPUT %s", v)
-
- createOpts = &fcu.CreateDhcpOptionsInput{
- DhcpConfigurations: []*fcu.NewDhcpConfiguration{
- v,
- },
- }
- } else {
- createOpts = &fcu.CreateDhcpOptionsInput{}
- createOpts.DhcpConfigurations = []*fcu.NewDhcpConfiguration{
- &fcu.NewDhcpConfiguration{
- Key: aws.String(""),
- Values: []*string{},
- },
- }
- }
-
- log.Printf("[DEBUG] VALUE => %s", createOpts)
-
- var resp *fcu.CreateDhcpOptionsOutput
-
- err := resource.Retry(5*time.Minute, func() *resource.RetryError {
- var err error
- resp, err = conn.VM.CreateDhcpOptions(createOpts)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
- if err != nil {
- return fmt.Errorf("Error creating DHCP Options Set: %s", err)
- }
-
- dos := resp.DhcpOptions
- d.SetId(*dos.DhcpOptionsId)
- log.Printf("[INFO] DHCP Options Set ID: %s", d.Id())
-
- // Wait for the DHCP Options to become available
- log.Printf("[DEBUG] Waiting for DHCP Options (%s) to become available", d.Id())
- stateConf := &resource.StateChangeConf{
- Pending: []string{"pending"},
- Target: []string{"created"},
- Refresh: resourceDHCPOptionsStateRefreshFunc(conn, d.Id()),
- Timeout: 1 * time.Minute,
- }
- if _, err := stateConf.WaitForState(); err != nil {
- return fmt.Errorf(
- "Error waiting for DHCP Options (%s) to become available: %s",
- d.Id(), err)
- }
-
- dhcp := make([]map[string]interface{}, 0)
- d.Set("dhcp_configuration_set", dhcp)
-
- return resourceOutscaleDHCPOptionRead(d, meta)
-}
-
-func resourceOutscaleDHCPOptionRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
- req := &fcu.DescribeDhcpOptionsInput{
- DhcpOptionsIds: []*string{
- aws.String(d.Id()),
- },
- }
-
- var resp *fcu.DescribeDhcpOptionsOutput
-
- err := resource.Retry(5*time.Minute, func() *resource.RetryError {
- var err error
- resp, err = conn.VM.DescribeDhcpOptions(req)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- return fmt.Errorf("Error describing DHCP Options: %s", err)
- }
-
- opts := resp.DhcpOptions[0]
- d.Set("tag_set", tagsToMap(opts.Tags))
-
- var dhcpConfiguration []map[string]interface{}
-
- for _, cfg := range opts.DhcpConfigurations {
- dhcp := make(map[string]interface{})
- if cfg.Key != nil {
- var values []map[string]interface{}
- for _, v := range cfg.Values {
- values = append(values, map[string]interface{}{
- "value": *v.Value,
- })
- }
-
- dhcp["key"] = *cfg.Key
- dhcp["value_set"] = values
-
- dhcpConfiguration = append(dhcpConfiguration, dhcp)
- }
- }
- d.Set("dhcp_options_id", d.Id())
- d.Set("request_id", resp.RequestId)
-
- return d.Set("dhcp_configuration_set", dhcpConfiguration)
-}
-
-func resourceOutscaleDHCPOptionDelete(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- return resource.Retry(3*time.Minute, func() *resource.RetryError {
- log.Printf("[INFO] Deleting DHCP Options ID %s...", d.Id())
-
- err := resource.Retry(5*time.Minute, func() *resource.RetryError {
- var err error
- _, err = conn.VM.DeleteDhcpOptions(&fcu.DeleteDhcpOptionsInput{
- DhcpOptionsId: aws.String(d.Id()),
- })
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err == nil {
- return nil
- }
-
- log.Printf("[WARN] %s", err)
-
- ec2err, ok := err.(awserr.Error)
- if !ok {
- return resource.RetryableError(err)
- }
-
- switch ec2err.Code() {
- case "InvalidDhcpOptionsID.NotFound":
- return nil
- case "DependencyViolation":
- vpcs, err2 := findVPCsByDHCPOptionsID(conn, d.Id())
- if err2 != nil {
- log.Printf("[ERROR] %s", err2)
- return resource.RetryableError(err2)
- }
-
- for _, vpc := range vpcs {
- log.Printf("[INFO] Disassociating DHCP Options Set %s from VPC %s...", d.Id(), *vpc.VpcId)
- if _, err := conn.VM.AssociateDhcpOptions(&fcu.AssociateDhcpOptionsInput{
- DhcpOptionsId: aws.String("default"),
- VpcId: vpc.VpcId,
- }); err != nil {
- return resource.RetryableError(err)
- }
- }
- return resource.RetryableError(err)
- default:
- return resource.NonRetryableError(err)
- }
- })
-}
-
-func resourceDHCPOptionsStateRefreshFunc(conn *fcu.Client, id string) resource.StateRefreshFunc {
- return func() (interface{}, string, error) {
- DescribeDhcpOpts := &fcu.DescribeDhcpOptionsInput{
- DhcpOptionsIds: []*string{
- aws.String(id),
- },
- }
-
- var resp *fcu.DescribeDhcpOptionsOutput
-
- err := resource.Retry(5*time.Minute, func() *resource.RetryError {
- var err error
- resp, err = conn.VM.DescribeDhcpOptions(DescribeDhcpOpts)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
-
- if strings.Contains(fmt.Sprint(err), "InvalidDhcpOptionsID.NotFound") {
- resp = nil
- } else {
- log.Printf("Error on DHCPOptionsStateRefresh: %s", err)
- return nil, "", err
- }
- }
-
- if resp == nil {
- return nil, "", nil
- }
-
- dos := resp.DhcpOptions[0]
- return dos, "created", nil
- }
-}
-
-func findVPCsByDHCPOptionsID(conn *fcu.Client, id string) ([]*fcu.Vpc, error) {
- req := &fcu.DescribeVpcsInput{
- Filters: []*fcu.Filter{
- &fcu.Filter{
- Name: aws.String("dhcp-options-id"),
- Values: []*string{
- aws.String(id),
- },
- },
- },
- }
-
- var resp *fcu.DescribeVpcsOutput
-
- err := resource.Retry(5*time.Minute, func() *resource.RetryError {
- var err error
- resp, err = conn.VM.DescribeVpcs(req)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- return nil, fmt.Errorf("InvalidVpcID.NotFound: %s", err)
- }
-
- return resp.Vpcs, nil
-}
diff --git a/outscale/resource_outscale_dhcp_options_link.go b/outscale/resource_outscale_dhcp_options_link.go
deleted file mode 100644
index fef6e9190..000000000
--- a/outscale/resource_outscale_dhcp_options_link.go
+++ /dev/null
@@ -1,178 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "log"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func resourceOutscaleDHCPOptionLink() *schema.Resource {
- return &schema.Resource{
- Create: resourceOutscaleDHCPOptionLinkCreate,
- Read: resourceOutscaleDHCPOptionLinkRead,
- Delete: resourceOutscaleDHCPOptionLinkDelete,
- Importer: &schema.ResourceImporter{
- State: schema.ImportStatePassthrough,
- },
-
- Timeouts: &schema.ResourceTimeout{
- Create: schema.DefaultTimeout(30 * time.Minute),
- Delete: schema.DefaultTimeout(30 * time.Minute),
- },
-
- Schema: getDHCPOptionLinkSchema(),
- }
-}
-
-func getDHCPOptionLinkSchema() map[string]*schema.Schema {
- return map[string]*schema.Schema{
- "dhcp_options_id": {
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- },
- "vpc_id": {
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- },
- "request_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- }
-}
-
-func resourceOutscaleDHCPOptionLinkCreate(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- fmt.Printf(
- "[INFO] Creating DHCP Options Link: %s => %s",
- d.Get("vpc_id").(string),
- d.Get("dhcp_options_id").(string))
-
- optsID := aws.String(d.Get("dhcp_options_id").(string))
- vpcID := aws.String(d.Get("vpc_id").(string))
-
- var err error
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- _, err = conn.VM.AssociateDhcpOptions(&fcu.AssociateDhcpOptionsInput{
- DhcpOptionsId: optsID,
- VpcId: vpcID,
- })
-
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- return err
- }
-
- // Set the ID and return
- d.SetId(*optsID + "-" + *vpcID)
- fmt.Printf("[INFO] Association ID: %s", d.Id())
-
- return resourceOutscaleDHCPOptionLinkRead(d, meta)
-
-}
-
-func resourceOutscaleDHCPOptionLinkRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
- var resp *fcu.DescribeVpcsOutput
- var err error
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- DescribeVpcOpts := &fcu.DescribeVpcsInput{
- VpcIds: []*string{aws.String(d.Get("vpc_id").(string))},
- }
- resp, err = conn.VM.DescribeVpcs(DescribeVpcOpts)
-
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- return err
- }
-
- vpc := resp.Vpcs[0]
- if *vpc.VpcId != d.Get("vpc_id") || *vpc.DhcpOptionsId != d.Get("dhcp_options_id") {
- fmt.Printf("[INFO] It seems the DHCP Options Link is gone. Deleting reference from Graph...")
- d.SetId("")
- }
-
- d.Set("request_id", resp.RequestId)
-
- return nil
-}
-
-func resourceOutscaleDHCPOptionLinkDelete(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- fmt.Printf("[INFO] Disassociating DHCP Options Set %s from VPC %s...", d.Get("dhcp_options_id"), d.Get("vpc_id"))
-
- var err error
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- _, err = conn.VM.AssociateDhcpOptions(&fcu.AssociateDhcpOptionsInput{
- DhcpOptionsId: aws.String(d.Get("dhcp_options_id").(string)),
- VpcId: aws.String(d.Get("vpc_id").(string)),
- })
-
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- return err
- }
-
- d.SetId("")
- return nil
-
-}
-
-// VPCStateRefreshFunc ...
-func VPCStateRefreshFunc(conn *fcu.Client, ID string) resource.StateRefreshFunc {
- return func() (interface{}, string, error) {
- DescribeVpcOpts := &fcu.DescribeVpcsInput{
- VpcIds: []*string{aws.String(ID)},
- }
- resp, err := conn.VM.DescribeVpcs(DescribeVpcOpts)
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "InvalidVpcID.NotFound") {
- resp = nil
- } else {
- log.Printf("Error on VPCStateRefresh: %s", err)
- return nil, "", err
- }
- }
-
- if resp == nil {
- return nil, "", nil
- }
-
- vpc := resp.Vpcs[0]
- return vpc, *vpc.State, nil
- }
-}
diff --git a/outscale/resource_outscale_dhcp_options_link_test.go b/outscale/resource_outscale_dhcp_options_link_test.go
deleted file mode 100644
index 4552514e2..000000000
--- a/outscale/resource_outscale_dhcp_options_link_test.go
+++ /dev/null
@@ -1,90 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "testing"
-
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func TestAccOutscaleDHCPOptionsAssociation_basic(t *testing.T) {
- t.Skip()
- var v fcu.Vpc
- var d fcu.DhcpOptions
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() { testAccPreCheck(t) },
- Providers: testAccProviders,
- CheckDestroy: testAccCheckDHCPOptionsAssociationDestroy,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccDHCPOptionsAssociationConfig,
- Check: resource.ComposeTestCheckFunc(
- testAccCheckDHCPOptionsExists("outscale_dhcp_option.foo", &d),
- //testAccCheckOutscaleOAPILinExists("outscale_lin.foo", &v), //TODO: fix once we refactor this resourceTestGetOMIByRegion
- testAccCheckDHCPOptionsAssociationExist("outscale_dhcp_option_link.foo", &v),
- ),
- },
- },
- })
-}
-
-func testAccCheckDHCPOptionsAssociationDestroy(s *terraform.State) error {
- conn := testAccProvider.Meta().(*OutscaleClient).FCU
-
- for _, rs := range s.RootModule().Resources {
- if rs.Type != "outscale_dhcp_option_link" {
- continue
- }
-
- // Try to find the VPC associated to the DHCP Options set
- vpcs, err := findVPCsByDHCPOptionsID(conn, rs.Primary.Attributes["dhcp_options_id"])
- if err != nil {
- return err
- }
-
- if len(vpcs) > 0 {
- return fmt.Errorf("DHCP Options association is still associated to %d VPCs", len(vpcs))
- }
- }
-
- return nil
-}
-
-func testAccCheckDHCPOptionsAssociationExist(n string, vpc *fcu.Vpc) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- rs, ok := s.RootModule().Resources[n]
- if !ok {
- return fmt.Errorf("Not found: %s", n)
- }
-
- if rs.Primary.ID == "" {
- return fmt.Errorf("No DHCP Options Set association ID is set")
- }
-
- if *vpc.DhcpOptionsId != rs.Primary.Attributes["dhcp_options_id"] {
- return fmt.Errorf("VPC %s does not have DHCP Options Set %s associated", *vpc.VpcId, rs.Primary.Attributes["dhcp_options_id"])
- }
-
- if *vpc.VpcId != rs.Primary.Attributes["vpc_id"] {
- return fmt.Errorf("DHCP Options Set %s is not associated with VPC %s", rs.Primary.Attributes["dhcp_options_id"], *vpc.VpcId)
- }
-
- return nil
- }
-}
-
-const testAccDHCPOptionsAssociationConfig = `
-resource "outscale_lin" "foo" {
- cidr_block = "10.1.0.0/16"
-}
-
-resource "outscale_dhcp_option" "foo" {}
-
-resource "outscale_dhcp_option_link" "foo" {
- vpc_id = "${outscale_lin.foo.id}"
- dhcp_options_id = "${outscale_dhcp_option.foo.id}"
-}
-`
diff --git a/outscale/resource_outscale_dhcp_options_test.go b/outscale/resource_outscale_dhcp_options_test.go
deleted file mode 100644
index d52aa89da..000000000
--- a/outscale/resource_outscale_dhcp_options_test.go
+++ /dev/null
@@ -1,193 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "os"
- "strconv"
- "strings"
- "testing"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func TestAccOutscaleDHCPOptions_basic(t *testing.T) {
- t.Skip()
-
- var d fcu.DhcpOptions
- o := os.Getenv("OUTSCALE_OAPI")
-
- oapi, err := strconv.ParseBool(o)
- if err != nil {
- oapi = false
- }
-
- if oapi {
- t.Skip()
- }
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() { testAccPreCheck(t) },
- Providers: testAccProviders,
- CheckDestroy: testAccCheckDHCPOptionsDestroy,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccDHCPOptionsConfig,
- Check: resource.ComposeTestCheckFunc(
- testAccCheckDHCPOptionsExists("outscale_dhcp_option.foo", &d),
- ),
- },
- },
- })
-}
-
-func TestAccOutscaleDHCPOptions_deleteOptions(t *testing.T) {
- t.Skip()
- var d fcu.DhcpOptions
- o := os.Getenv("OUTSCALE_OAPI")
-
- oapi, err := strconv.ParseBool(o)
- if err != nil {
- oapi = false
- }
-
- if oapi {
- t.Skip()
- }
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() { testAccPreCheck(t) },
- Providers: testAccProviders,
- CheckDestroy: testAccCheckDHCPOptionsDestroy,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccDHCPOptionsConfig,
- Check: resource.ComposeTestCheckFunc(
- testAccCheckDHCPOptionsExists("outscale_dhcp_option.foo", &d),
- testAccCheckDHCPOptionsDelete("outscale_dhcp_option.foo"),
- ),
- ExpectNonEmptyPlan: true,
- },
- },
- })
-}
-
-func testAccCheckDHCPOptionsDestroy(s *terraform.State) error {
- conn := testAccProvider.Meta().(*OutscaleClient).FCU
-
- for _, rs := range s.RootModule().Resources {
- if rs.Type != "outscale_dhcp_option" {
- continue
- }
-
- var resp *fcu.DescribeDhcpOptionsOutput
- var err error
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribeDhcpOptions(&fcu.DescribeDhcpOptionsInput{
- DhcpOptionsIds: []*string{
- aws.String(rs.Primary.ID),
- },
- })
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if strings.Contains(fmt.Sprint(err), "InvalidDhcpOptionsID.NotFound") {
- return nil
- }
- if err == nil {
- if len(resp.DhcpOptions) > 0 {
- return fmt.Errorf("still exists")
- }
- return nil
- }
- }
-
- return nil
-}
-
-func testAccCheckDHCPOptionsExists(n string, d *fcu.DhcpOptions) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- rs, ok := s.RootModule().Resources[n]
- if !ok {
- return fmt.Errorf("Not found: %s", n)
- }
-
- if rs.Primary.ID == "" {
- return fmt.Errorf("No ID is set")
- }
-
- conn := testAccProvider.Meta().(*OutscaleClient).FCU
- // // resp, err := conn.VM.DescribeDhcpOptions(&fcu.DescribeDhcpOptionsInput{
- // DhcpOptionsIds: []*string{
- // aws.String(rs.Primary.ID),
- // },
- // })
-
- var resp *fcu.DescribeDhcpOptionsOutput
-
- err := resource.Retry(5*time.Minute, func() *resource.RetryError {
- var err error
- resp, err = conn.VM.DescribeDhcpOptions(&fcu.DescribeDhcpOptionsInput{
- DhcpOptionsIds: []*string{
- aws.String(rs.Primary.ID),
- },
- })
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
-
- return fmt.Errorf("DHCP Options not found: %s", err)
- }
-
- // if err != nil {
- // return err
- // }
- // if len(resp.DhcpOptions) == 0 {
- // return fmt.Errorf("DHCP Options not found")
- // }
-
- *d = *resp.DhcpOptions[0]
-
- return nil
- }
-}
-
-func testAccCheckDHCPOptionsDelete(n string) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- rs, ok := s.RootModule().Resources[n]
- if !ok {
- return fmt.Errorf("Not found: %s", n)
- }
-
- if rs.Primary.ID == "" {
- return fmt.Errorf("No ID is set")
- }
-
- conn := testAccProvider.Meta().(*OutscaleClient).FCU
- _, err := conn.VM.DeleteDhcpOptions(&fcu.DeleteDhcpOptionsInput{
- DhcpOptionsId: aws.String(rs.Primary.ID),
- })
-
- return err
- }
-}
-
-const testAccDHCPOptionsConfig = `
-resource "outscale_dhcp_option" "foo" {}
-`
diff --git a/outscale/resource_outscale_firewall_rules_set.go b/outscale/resource_outscale_firewall_rules_set.go
deleted file mode 100644
index 5431805d4..000000000
--- a/outscale/resource_outscale_firewall_rules_set.go
+++ /dev/null
@@ -1,200 +0,0 @@
-package outscale
-
-import (
- "bytes"
- "fmt"
- "strconv"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/aws/aws-sdk-go/aws/awserr"
- "github.com/hashicorp/terraform/helper/hashcode"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func getIPPerms() *schema.Schema {
- return &schema.Schema{
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "from_port": {
- Type: schema.TypeInt,
- Computed: true,
- },
- "to_port": {
- Type: schema.TypeInt,
- Computed: true,
- },
- "ip_protocol": {
- Type: schema.TypeString,
- Computed: true,
- },
- "ip_ranges": {
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Schema{Type: schema.TypeMap},
- },
- "prefix_list_ids": {
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Schema{Type: schema.TypeMap},
- },
- "groups": {
- Type: schema.TypeList,
- Optional: true,
- Elem: &schema.Schema{Type: schema.TypeMap},
- },
- },
- },
- }
-}
-
-func resourceOutscaleSecurityGroupDelete(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- return resource.Retry(5*time.Minute, func() *resource.RetryError {
- _, err := conn.VM.DeleteSecurityGroup(&fcu.DeleteSecurityGroupInput{
- GroupId: aws.String(d.Id()),
- })
-
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded") || strings.Contains(err.Error(), "DependencyViolation") {
- return resource.RetryableError(err)
- } else if strings.Contains(err.Error(), "InvalidGroup.NotFound") {
- return nil
- }
- return resource.NonRetryableError(err)
- }
-
- return nil
- })
-}
-
-func idHash(rType, protocol string, toPort, fromPort int64, self bool) string {
- var buf bytes.Buffer
- buf.WriteString(fmt.Sprintf("%s-", rType))
- buf.WriteString(fmt.Sprintf("%d-", toPort))
- buf.WriteString(fmt.Sprintf("%d-", fromPort))
- buf.WriteString(fmt.Sprintf("%s-", strings.ToLower(protocol)))
- buf.WriteString(fmt.Sprintf("%t-", self))
-
- return fmt.Sprintf("rule-%d", hashcode.String(buf.String()))
-}
-
-func flattenSecurityGroups(list []*fcu.UserIdGroupPair, ownerID *string) []*fcu.GroupIdentifier {
- result := make([]*fcu.GroupIdentifier, 0, len(list))
- for _, g := range list {
- var userID *string
- if g.UserId != nil && *g.UserId != "" && (ownerID == nil || *ownerID != *g.UserId) {
- userID = g.UserId
- }
-
- vpc := g.GroupName == nil || *g.GroupName == ""
- var ID *string
- if vpc {
- ID = g.GroupId
- } else {
- ID = g.GroupName
- }
-
- if userID != nil {
- ID = aws.String(*userID + "/" + *ID)
- }
-
- if vpc {
- result = append(result, &fcu.GroupIdentifier{
- GroupId: ID,
- })
- } else {
- result = append(result, &fcu.GroupIdentifier{
- GroupId: g.GroupId,
- GroupName: ID,
- })
- }
- }
- return result
-}
-
-// SGStateRefreshFunc ...
-func SGStateRefreshFunc(conn *fcu.Client, ID string) resource.StateRefreshFunc {
- return func() (interface{}, string, error) {
- req := &fcu.DescribeSecurityGroupsInput{
- GroupIds: []*string{aws.String(ID)},
- }
-
- var err error
- var resp *fcu.DescribeSecurityGroupsOutput
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribeSecurityGroups(req)
-
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
-
- return nil
- })
-
- if err != nil {
- if ec2err, ok := err.(awserr.Error); ok {
- if ec2err.Code() == "InvalidSecurityGroupID.NotFound" ||
- ec2err.Code() == "InvalidGroup.NotFound" {
- resp = nil
- err = nil
- }
- }
-
- if err != nil {
- fmt.Printf("\n\nError on SGStateRefresh: %s", err)
- return nil, "", err
- }
- }
-
- if resp == nil {
- return nil, "", nil
- }
-
- group := resp.SecurityGroups[0]
- return group, "exists", nil
- }
-}
-
-func protocolForValue(v string) string {
- protocol := strings.ToLower(v)
- if protocol == "-1" || protocol == "all" {
- return "-1"
- }
- if _, ok := sgProtocolIntegers()[protocol]; ok {
- return protocol
- }
- p, err := strconv.Atoi(protocol)
- if err != nil {
- fmt.Printf("\n\n[WARN] Unable to determine valid protocol: %s", err)
- return protocol
- }
-
- for k, v := range sgProtocolIntegers() {
- if p == v {
- return strings.ToLower(k)
- }
- }
-
- return protocol
-}
-
-func sgProtocolIntegers() map[string]int {
- var protocolIntegers = make(map[string]int)
- protocolIntegers = map[string]int{
- "udp": 17,
- "tcp": 6,
- "icmp": 1,
- "all": -1,
- }
- return protocolIntegers
-}
diff --git a/outscale/resource_outscale_firewall_rules_set_test.go b/outscale/resource_outscale_firewall_rules_set_test.go
deleted file mode 100644
index 603da9968..000000000
--- a/outscale/resource_outscale_firewall_rules_set_test.go
+++ /dev/null
@@ -1,139 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "os"
- "strconv"
- "strings"
- "testing"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func TestAccOutscaleSecurityGroup_DefaultEgress_Classic(t *testing.T) {
- t.Skip()
-
- o := os.Getenv("OUTSCALE_OAPI")
-
- oapi, err := strconv.ParseBool(o)
- if err != nil {
- oapi = false
- }
-
- if oapi {
- t.Skip()
- }
- // Classic
- var group fcu.SecurityGroup
- resource.Test(t, resource.TestCase{
- PreCheck: func() { testAccPreCheck(t) },
- IDRefreshName: "outscale_firewall_rules_set.web",
- Providers: testAccProviders,
- CheckDestroy: testAccCheckOutscaleSecurityGroupDestroy,
- Steps: []resource.TestStep{
- {
- Config: testAccOutscaleSecurityGroupConfigClassic,
- Check: resource.ComposeTestCheckFunc(
- testAccCheckOutscaleSecurityGroupExists("outscale_firewall_rules_set.web", &group),
- ),
- },
- },
- })
-}
-
-func testAccCheckOutscaleSecurityGroupExists(n string, group *fcu.SecurityGroup) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- rs, ok := s.RootModule().Resources[n]
- if !ok {
- return fmt.Errorf("Not found: %s", n)
- }
-
- if rs.Primary.ID == "" {
- return fmt.Errorf("No Security Group is set")
- }
-
- conn := testAccProvider.Meta().(*OutscaleClient).FCU
- req := &fcu.DescribeSecurityGroupsInput{
- GroupIds: []*string{aws.String(rs.Primary.ID)},
- }
- var err error
- var resp *fcu.DescribeSecurityGroupsOutput
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribeSecurityGroups(req)
-
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
-
- return nil
- })
- if err != nil {
- return err
- }
-
- if len(resp.SecurityGroups) > 0 && *resp.SecurityGroups[0].GroupId == rs.Primary.ID {
- *group = *resp.SecurityGroups[0]
- return nil
- }
-
- return fmt.Errorf("Security Group not found")
- }
-}
-
-func testAccCheckOutscaleSecurityGroupDestroy(s *terraform.State) error {
- conn := testAccProvider.Meta().(*OutscaleClient).FCU
-
- for _, rs := range s.RootModule().Resources {
- if rs.Type != "outscale_firewall_rules_set" {
- continue
- }
-
- // Retrieve our group
- req := &fcu.DescribeSecurityGroupsInput{
- GroupIds: []*string{aws.String(rs.Primary.ID)},
- }
-
- var err error
- var resp *fcu.DescribeSecurityGroupsOutput
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribeSecurityGroups(req)
-
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
-
- return nil
- })
-
- if err == nil {
- if len(resp.SecurityGroups) > 0 && *resp.SecurityGroups[0].GroupId == rs.Primary.ID {
- return fmt.Errorf("Security Group (%s) still exists", rs.Primary.ID)
- }
-
- return nil
- }
-
- if strings.Contains(err.Error(), "InvalidGroup.NotFound") {
- return nil
- }
- }
-
- return nil
-}
-
-const testAccOutscaleSecurityGroupConfigClassic = `
-resource "outscale_firewall_rules_set" "web" {
- group_name = "terraform_acceptance_test_example_1"
- group_description = "Used in the terraform acceptance tests"
-}
-`
diff --git a/outscale/resource_outscale_image.go b/outscale/resource_outscale_image.go
index 0955d2edd..bda48f01e 100644
--- a/outscale/resource_outscale_image.go
+++ b/outscale/resource_outscale_image.go
@@ -13,8 +13,8 @@ import (
"github.com/openlyinc/pointy"
"github.com/spf13/cast"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
const (
@@ -249,7 +249,7 @@ func resourceOAPIImageCreate(d *schema.ResourceData, meta interface{}) error {
image := resp.GetImage()
- fmt.Printf("Waiting for OMI %s to become available...", *image.ImageId)
+ log.Printf("[DEBUG] Waiting for OMI %s to become available...", *image.ImageId)
filterReq := &oscgo.ReadImagesOpts{
ReadImagesRequest: optional.NewInterface(oscgo.ReadImagesRequest{
diff --git a/outscale/resource_outscale_image_copy.go b/outscale/resource_outscale_image_copy.go
deleted file mode 100644
index 25aafa335..000000000
--- a/outscale/resource_outscale_image_copy.go
+++ /dev/null
@@ -1,210 +0,0 @@
-package outscale
-
-import (
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func resourceOutscaleOAPIImageCopy() *schema.Resource {
- return &schema.Resource{
- Create: resourceOAPIImageCopyCreate,
- Read: resourceOAPIImageRead,
-
- Delete: resourceOAPIImageDelete,
- Importer: &schema.ResourceImporter{
- State: schema.ImportStatePassthrough,
- },
-
- Schema: map[string]*schema.Schema{
- "vm_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- "name": {
- Type: schema.TypeString,
- Computed: true,
- Optional: true,
- ForceNew: true,
- },
- "architecture": {
- Type: schema.TypeString,
- Computed: true,
- },
- "creation_date": {
- Type: schema.TypeString,
- Computed: true,
- },
- "image_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- "osu_location": {
- Type: schema.TypeString,
- Computed: true,
- },
- "account_alias": {
- Type: schema.TypeString,
- Computed: true,
- },
- "account_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- "state": {
- Type: schema.TypeString,
- Computed: true,
- },
- "type": {
- Type: schema.TypeString,
- Computed: true,
- },
- "is_public": {
- Type: schema.TypeBool,
- Computed: true,
- },
- "root_device_name": {
- Type: schema.TypeString,
- Computed: true,
- },
- "root_device_type": {
- Type: schema.TypeString,
- Computed: true,
- },
- "request_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- // Complex computed values
- "block_device_mappings": {
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "device_name": {
- Type: schema.TypeString,
- Computed: true,
- },
- "no_device": {
- Type: schema.TypeString,
- Computed: true,
- },
- "virtual_device_name": {
- Type: schema.TypeString,
- Computed: true,
- },
- "bsu": {
- Type: schema.TypeMap,
- Computed: true,
- },
- },
- },
- },
- "product_codes": {
- Type: schema.TypeSet,
- Computed: true,
- Set: amiProductCodesHash,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "product_code": {
- Type: schema.TypeString,
- Computed: true,
- },
- "type": {
- Type: schema.TypeString,
- Computed: true,
- },
- },
- },
- },
- "state_comment": {
- Type: schema.TypeMap,
- Computed: true,
- },
- "tag": dataSourceTagsSchema(),
-
- //Argument
- "token": {
- Type: schema.TypeString,
- Optional: true,
- ForceNew: true,
- Computed: true,
- },
- "description": {
- Type: schema.TypeString,
- Optional: true,
- ForceNew: true,
- Computed: true,
- },
-
- "source_image_id": {
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- },
- "source_region_name": {
- Type: schema.TypeString,
- Optional: true,
- ForceNew: true,
- Computed: true,
- },
- },
- }
-}
-
-func resourceOAPIImageCopyCreate(d *schema.ResourceData, meta interface{}) error {
- client := meta.(*OutscaleClient).FCU
- req := &fcu.CopyImageInput{}
- if v, ok := d.GetOk("name"); ok {
- req.Name = aws.String(v.(string))
- }
- if v, ok := d.GetOk("description"); ok {
- req.Description = aws.String(v.(string))
- }
- if v, ok := d.GetOk("source_image_id"); ok {
- req.SourceImageId = aws.String(v.(string))
- }
- if v, ok := d.GetOk("source_region_name"); ok {
- req.SourceRegion = aws.String(v.(string))
- }
- if v, ok := d.GetOk("token"); ok {
- req.ClientToken = aws.String(v.(string))
- }
-
- var res *fcu.CopyImageOutput
-
- err := resource.Retry(5*time.Minute, func() *resource.RetryError {
- var err error
- res, err = client.VM.CopyImage(req)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
-
- return err
- }
-
- id := *res.ImageId
- d.SetId(id)
- d.Partial(true) // make sure we record the id even if the rest of this gets interrupted
- d.Set("image_id", id)
- d.SetPartial("image_id")
- d.Partial(false)
- //TODO
- //_, err = resourceOutscaleOAPIImageWaitForAvailable(id, client, 1)
- // if err != nil {
- // return err
- // }
-
- return resourceOAPIImageUpdate(d, meta)
-}
diff --git a/outscale/resource_outscale_image_copy_test.go b/outscale/resource_outscale_image_copy_test.go
deleted file mode 100644
index 290bd398d..000000000
--- a/outscale/resource_outscale_image_copy_test.go
+++ /dev/null
@@ -1,212 +0,0 @@
-package outscale
-
-import (
- "errors"
- "fmt"
- "strings"
- "testing"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/aws/aws-sdk-go/aws/awserr"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func TestAccOutscaleOAPIImageCopy(t *testing.T) {
- t.Skip()
- var amiID string
-
- snapshots := []string{}
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- Providers: testAccProviders,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccOutscaleOAPIImageCopyConfig,
- Check: func(state *terraform.State) error {
- rs, ok := state.RootModule().Resources["outscale_image_copy.test"]
- if !ok {
- return fmt.Errorf("Image resource not found")
- }
-
- amiID = rs.Primary.ID
-
- if amiID == "" {
- return fmt.Errorf("Image id is not set")
- }
-
- conn := testAccProvider.Meta().(*OutscaleClient).FCU
- req := &fcu.DescribeImagesInput{
- ImageIds: []*string{aws.String(amiID)},
- }
-
- var describe *fcu.DescribeImagesOutput
-
- err := resource.Retry(5*time.Minute, func() *resource.RetryError {
- var err error
- describe, err = conn.VM.DescribeImages(req)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
-
- return err
- }
-
- if len(describe.Images) != 1 ||
- *describe.Images[0].ImageId != rs.Primary.ID {
- return fmt.Errorf("Image not found")
- }
-
- image := describe.Images[0]
- if expected := "available"; *image.State != expected {
- return fmt.Errorf("invalid image state; expected %v, got %v", expected, image.State)
- }
- if expected := "machine"; *image.ImageType != expected {
- return fmt.Errorf("wrong image type; expected %v, got %v", expected, image.ImageType)
- }
- // if expected := "terraform-acc-ami-copy"; *image.Name != expected {
- // return fmt.Errorf("wrong name; expected %v, got %v", expected, image.Name)
- // }
-
- for _, bdm := range image.BlockDeviceMappings {
- // The snapshot ID might not be set,
- // even for a block device that is an
- // EBS volume.
- if bdm.Ebs != nil && bdm.Ebs.SnapshotId != nil {
- snapshots = append(snapshots, *bdm.Ebs.SnapshotId)
- }
- }
-
- if expected := 1; len(snapshots) != expected {
- return fmt.Errorf("wrong number of snapshots; expected %v, got %v", expected, len(snapshots))
- }
-
- return nil
- },
- },
- },
- CheckDestroy: func(state *terraform.State) error {
- conn := testAccProvider.Meta().(*OutscaleClient).FCU
- diReq := &fcu.DescribeImagesInput{
- ImageIds: []*string{aws.String(amiID)},
- }
-
- var diRes *fcu.DescribeImagesOutput
-
- err := resource.Retry(5*time.Minute, func() *resource.RetryError {
- var err error
- diRes, err = conn.VM.DescribeImages(diReq)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "InvalidAMIID.NotFound") {
- return nil
- }
- return err
- }
-
- if len(diRes.Images) > 0 {
- state := diRes.Images[0].State
- return fmt.Errorf("Image %v remains in state %v", amiID, state)
- }
-
- stillExist := make([]string, 0, len(snapshots))
- checkErrors := make(map[string]error)
- for _, snapshotID := range snapshots {
- dsReq := &fcu.DescribeSnapshotsInput{
- SnapshotIds: []*string{aws.String(snapshotID)},
- }
-
- var err error
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
-
- _, err = conn.VM.DescribeSnapshots(dsReq)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err == nil {
- stillExist = append(stillExist, snapshotID)
- continue
- }
-
- awsErr, ok := err.(awserr.Error)
- if !ok {
- checkErrors[snapshotID] = err
- continue
- }
-
- if awsErr.Code() != "InvalidSnapshot.NotFound" {
- checkErrors[snapshotID] = err
- continue
- }
- }
-
- if len(stillExist) > 0 || len(checkErrors) > 0 {
- errParts := []string{
- "Expected all snapshots to be gone, but:",
- }
- for _, snapshotID := range stillExist {
- errParts = append(
- errParts,
- fmt.Sprintf("- %v still exists", snapshotID),
- )
- }
- for snapshotID, err := range checkErrors {
- errParts = append(
- errParts,
- fmt.Sprintf("- checking %v gave error: %v", snapshotID, err),
- )
- }
- return errors.New(strings.Join(errParts, "\n"))
- }
-
- return nil
- },
- })
-}
-
-var testAccOutscaleOAPIImageCopyConfig = `
- resource "outscale_vm" "outscale_vm" {
- count = 1
- image_id = "ami-880caa66"
- type = "c4.large"
- }
-
- resource "outscale_image" "outscale_image" {
- name = "image_${outscale_vm.outscale_vm.id}"
- vm_id = "${outscale_vm.outscale_vm.id}"
- }
-
- resource "outscale_image_copy" "test" {
- count = 1
-
- source_image_id = "${outscale_image.outscale_image.image_id}"
- source_region_name = "eu-west-2"
- }
-`
diff --git a/outscale/resource_outscale_image_launch_permission.go b/outscale/resource_outscale_image_launch_permission.go
index f0cf0d7e2..d0679a0b8 100644
--- a/outscale/resource_outscale_image_launch_permission.go
+++ b/outscale/resource_outscale_image_launch_permission.go
@@ -12,8 +12,8 @@ import (
"github.com/spf13/cast"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func resourceOutscaleOAPIImageLaunchPermission() *schema.Resource {
diff --git a/outscale/resource_outscale_image_launch_permission_test.go b/outscale/resource_outscale_image_launch_permission_test.go
index 579f0901e..c83a8610b 100644
--- a/outscale/resource_outscale_image_launch_permission_test.go
+++ b/outscale/resource_outscale_image_launch_permission_test.go
@@ -3,33 +3,31 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"os"
"strings"
"testing"
"time"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
"github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/terraform/helper/acctest"
- r "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/acctest"
+ r "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccOutscaleOAPIImageLaunchPermission_Basic(t *testing.T) {
- omi := getOMIByRegion("eu-west-2", "ubuntu").OMI
+ omi := os.Getenv("OUTSCALE_IMAGEID")
region := os.Getenv("OUTSCALE_REGION")
+ accountID := os.Getenv("OUTSCALE_ACCOUNT")
imageID := ""
- accountID := "520679080430"
rInt := acctest.RandInt()
r.Test(t, r.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []r.TestStep{
// Scaffold everything
@@ -67,17 +65,14 @@ func TestAccOutscaleOAPIImageLaunchPermission_Basic(t *testing.T) {
}
func TestAccOutscaleOAPIImageLaunchPermissionDestruction_Basic(t *testing.T) {
- omi := getOMIByRegion("eu-west-2", "ubuntu").OMI
+ omi := os.Getenv("OUTSCALE_IMAGEID")
region := os.Getenv("OUTSCALE_REGION")
var imageID string
rInt := acctest.RandInt()
r.Test(t, r.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []r.TestStep{
// Scaffold everything
diff --git a/outscale/resource_outscale_image_register.go b/outscale/resource_outscale_image_register.go
deleted file mode 100644
index f7aa09149..000000000
--- a/outscale/resource_outscale_image_register.go
+++ /dev/null
@@ -1,299 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
-
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func resourceOutscaleOAPIImageRegister() *schema.Resource {
- return &schema.Resource{
- Create: resourceOAPIImageRegisterCreate,
- Read: resourceOAPIImageRead,
- Delete: resourceOAPIImageRegisterDelete,
- Importer: &schema.ResourceImporter{
- State: schema.ImportStatePassthrough,
- },
-
- Timeouts: &schema.ResourceTimeout{
- Create: schema.DefaultTimeout(10 * time.Minute),
- Delete: schema.DefaultTimeout(10 * time.Minute),
- },
-
- Schema: map[string]*schema.Schema{
- //Image
- "instance_id": {
- Type: schema.TypeString,
- Optional: true,
- ForceNew: true,
- },
- "name": {
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- },
- "dry_run": {
- Type: schema.TypeBool,
- Optional: true,
- ForceNew: true,
- },
- "no_reboot": {
- Type: schema.TypeBool,
- Optional: true,
- Computed: true,
- },
- "description": {
- Type: schema.TypeString,
- Optional: true,
- ForceNew: true,
- },
-
- "architecture": {
- Type: schema.TypeString,
- Computed: true,
- Optional: true,
- ForceNew: true,
- },
- "creation_date": {
- Type: schema.TypeString,
- Computed: true,
- },
- "image_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- "osu_location": {
- Type: schema.TypeString,
- Computed: true,
- ForceNew: true,
- Optional: true,
- },
- "image_owner_alias": {
- Type: schema.TypeString,
- Computed: true,
- },
- "image_owner_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- "image_state": {
- Type: schema.TypeString,
- Computed: true,
- },
- "image_type": {
- Type: schema.TypeString,
- Computed: true,
- },
- "is_public": {
- Type: schema.TypeBool,
- Computed: true,
- },
- "root_device_name": {
- Type: schema.TypeString,
- Computed: true,
- Optional: true,
- ForceNew: true,
- },
- "root_device_type": {
- Type: schema.TypeString,
- Computed: true,
- },
- "request_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- // Complex computed values
- "block_device_mapping": {
- Type: schema.TypeList,
- Computed: true,
- Optional: true,
- ForceNew: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "device_name": {
- Type: schema.TypeString,
- Computed: true,
- Optional: true,
- ForceNew: true,
- },
- "no_device": {
- Type: schema.TypeString,
- Computed: true,
- Optional: true,
- ForceNew: true,
- },
- "virtual_device_name": {
- Type: schema.TypeString,
- Computed: true,
- Optional: true,
- ForceNew: true,
- },
- "bsu": {
- Type: schema.TypeMap,
- Computed: true,
- Optional: true,
- ForceNew: true,
- },
- },
- },
- },
- "product_codes": {
- Type: schema.TypeSet,
- Computed: true,
- Set: amiProductCodesHash,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "product_code": {
- Type: schema.TypeString,
- Computed: true,
- },
- "type": {
- Type: schema.TypeString,
- Computed: true,
- },
- },
- },
- },
- "state_reason": {
- Type: schema.TypeMap,
- Computed: true,
- },
- "tag_set": dataSourceTagsSchema(),
-
- // Image Register
- "arquitecture": {
- Type: schema.TypeString,
- Optional: true,
- ForceNew: true,
- Computed: true,
- },
- },
- }
-}
-
-func resourceOAPIImageRegisterCreate(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- request := &fcu.RegisterImageInput{}
-
- architecture, architectureOk := d.GetOk("architecture")
- blockDeviceMapping, blockDeviceMappingOk := d.GetOk("block_device_mapping")
- description, descriptionOk := d.GetOk("description")
- imageLocation, imageLocationOk := d.GetOk("osu_location")
- name, nameOk := d.GetOk("name")
- rootDeviceName, rootDeviceNameOk := d.GetOk("root_device_name")
- instanceID, instanceIDOk := d.GetOk("instance_id")
-
- if !nameOk && !instanceIDOk {
- return fmt.Errorf("please provide the required attributes name and instance_id")
- }
-
- if architectureOk {
- request.Architecture = aws.String(architecture.(string))
- }
- if blockDeviceMappingOk {
- maps := blockDeviceMapping.([]interface{})
- mappings := []*fcu.BlockDeviceMapping{}
-
- for _, m := range maps {
- f := m.(map[string]interface{})
- mapping := &fcu.BlockDeviceMapping{
- DeviceName: aws.String(f["device_name"].(string)),
- }
-
- e := f["ebs"].(map[string]interface{})
- var del bool
- if e["delete_on_termination"].(string) == "0" {
- del = false
- } else {
- del = true
- }
-
- ebs := &fcu.EbsBlockDevice{
- DeleteOnTermination: aws.Bool(del),
- }
-
- mapping.Ebs = ebs
-
- mappings = append(mappings, mapping)
- }
-
- request.BlockDeviceMappings = mappings
- }
- if descriptionOk {
- request.Description = aws.String(description.(string))
- }
- if imageLocationOk {
- request.ImageLocation = aws.String(imageLocation.(string))
- }
- if rootDeviceNameOk {
- request.RootDeviceName = aws.String(rootDeviceName.(string))
- }
- if instanceIDOk {
- request.InstanceId = aws.String(instanceID.(string))
- }
-
- request.Name = aws.String(name.(string))
-
- var registerResp *fcu.RegisterImageOutput
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
-
- registerResp, err = conn.VM.RegisterImage(request)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- return fmt.Errorf("Error register image %s", err)
- }
-
- d.SetId(*registerResp.ImageId)
- d.Set("image_id", *registerResp.ImageId)
-
- //TODO
- // _, err = resourceOutscaleOAPIImageWaitForAvailable(*registerResp.ImageId, conn, 1)
- // if err != nil {
- // return err
- // }
-
- return resourceOAPIImageRead(d, meta)
-}
-
-func resourceOAPIImageRegisterDelete(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
-
- _, err = conn.VM.DeregisterImage(&fcu.DeregisterImageInput{
- ImageId: aws.String(d.Id()),
- })
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
-
- return fmt.Errorf("Error Deregister image %s", err)
- }
- return nil
-}
diff --git a/outscale/resource_outscale_image_register_test.go b/outscale/resource_outscale_image_register_test.go
deleted file mode 100644
index d1fccfa61..000000000
--- a/outscale/resource_outscale_image_register_test.go
+++ /dev/null
@@ -1,102 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "strings"
- "testing"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/terraform/helper/acctest"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func TestAccOutscaleOAPIImageRegister_basic(t *testing.T) {
- t.Skip()
-
- r := acctest.RandInt()
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- Providers: testAccProviders,
- CheckDestroy: testAccCheckOutscaleOAPIImageRegisterDestroy,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccOutscaleOAPIImageRegisterConfig(r),
- Check: resource.ComposeTestCheckFunc(
- testAccCheckOutscaleOAPIImageRegisterExists("outscale_image_register.outscale_image_register"),
- ),
- },
- },
- })
-}
-
-func testAccCheckOutscaleOAPIImageRegisterDestroy(s *terraform.State) error {
-
- for _, rs := range s.RootModule().Resources {
- if rs.Type != "outscale_image_register" {
- continue
- }
- amiID := rs.Primary.ID
- conn := testAccProvider.Meta().(*OutscaleClient).FCU
- diReq := &fcu.DescribeImagesInput{
- ImageIds: []*string{aws.String(amiID)},
- }
-
- var err error
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- _, err = conn.VM.DescribeImages(diReq)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "InvalidAMIID.NotFound") {
- return nil
- }
- return fmt.Errorf("[DEBUG TES] Error register image %s", err)
- }
-
- }
-
- return nil
-}
-
-func testAccCheckOutscaleOAPIImageRegisterExists(n string) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- rs, ok := s.RootModule().Resources[n]
- if !ok {
- return fmt.Errorf("Not found: %s", n)
- }
-
- if rs.Primary.ID == "" {
- return fmt.Errorf("No Role name is set")
- }
- return nil
- }
-}
-
-func testAccOutscaleOAPIImageRegisterConfig(r int) string {
- return fmt.Sprintf(`
- resource "outscale_vm" "outscale_vm" {
- count = 1
- image_id = "ami-880caa66"
- type = "c4.large"
- }
-
- resource "outscale_image_register" "outscale_image_register" {
- name = "image_%d"
- vm_id = "${outscale_vm.outscale_vm.id}"
- }
- `, r)
-}
diff --git a/outscale/resource_outscale_image_tasks.go b/outscale/resource_outscale_image_tasks.go
deleted file mode 100644
index d7b765273..000000000
--- a/outscale/resource_outscale_image_tasks.go
+++ /dev/null
@@ -1,349 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "log"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func resourceOutscaleOAPIImageTasks() *schema.Resource {
- return &schema.Resource{
- Create: resourceOAPIImageTasksCreate,
- Read: resourceOAPIImageTasksRead,
- Delete: resourceOAPIImageTasksDelete,
- Importer: &schema.ResourceImporter{
- State: schema.ImportStatePassthrough,
- },
-
- Timeouts: &schema.ResourceTimeout{
- Create: schema.DefaultTimeout(40 * time.Minute),
- Delete: schema.DefaultTimeout(40 * time.Minute),
- },
-
- Schema: map[string]*schema.Schema{
- "osu_export": {
- Type: schema.TypeMap,
- Required: true,
- ForceNew: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "disk_image_format": {
- Type: schema.TypeString,
- Required: true,
- },
- "manifest_url": {
- Type: schema.TypeString,
- Optional: true,
- },
- "osu_api_key": {
- Type: schema.TypeMap,
- Optional: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "api_key_id": {
- Type: schema.TypeString,
- Required: true,
- },
- "secret_key": {
- Type: schema.TypeString,
- Required: true,
- },
- },
- },
- },
- "osu_bucket": {
- Type: schema.TypeString,
- Optional: true,
- },
- },
- },
- },
- "image_id": {
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- },
- "request_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- "image_export_task": &schema.Schema{
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "completion": {
- Type: schema.TypeString,
- Computed: true,
- },
- "osu_export": {
- Type: schema.TypeMap,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "disk_image_format": {
- Type: schema.TypeString,
- Computed: true,
- },
- "manifest_url": {
- Type: schema.TypeString,
- Computed: true,
- },
- "osu_api_key": {
- Type: schema.TypeMap,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "api_key_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- "secret_key": {
- Type: schema.TypeString,
- Computed: true,
- },
- },
- },
- },
- "osu_bucket": {
- Type: schema.TypeString,
- Computed: true,
- },
- },
- },
- },
- "image_export": {
- Type: schema.TypeMap,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "image_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- },
- },
- },
- "task_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- "image_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- "state": {
- Type: schema.TypeString,
- Computed: true,
- },
- "comment": {
- Type: schema.TypeString,
- Computed: true,
- },
- },
- },
- },
- },
- }
-}
-
-func resourceOAPIImageTasksCreate(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- eto, etoOk := d.GetOk("osu_export")
- v, ok := d.GetOk("image_id")
- request := &fcu.CreateImageExportTaskInput{}
-
- if !etoOk && !ok {
- return fmt.Errorf("Please provide the required attributes osu_export and image_id")
- }
-
- request.ImageId = aws.String(v.(string))
-
- if etoOk {
- e := eto.(map[string]interface{})
- et := &fcu.ImageExportToOsuTaskSpecification{}
- if v, ok := e["disk_image_format"]; ok {
- et.DiskImageFormat = aws.String(v.(string))
- }
- if v, ok := e["manifest_url"]; ok {
- et.OsuManifestUrl = aws.String(v.(string))
- }
- if v, ok := e["osu_bucket"]; ok {
- et.OsuBucket = aws.String(v.(string))
- }
- if v, ok := e["osu_api_key"]; ok {
- w := v.(map[string]interface{})
- et.OsuAkSk = &fcu.ExportToOsuAccessKeySpecification{
- AccessKey: aws.String(w["api_key_id"].(string)),
- SecretKey: aws.String(w["secret_key"].(string)),
- }
- }
- request.ExportToOsu = et
- }
-
- var resp *fcu.CreateImageExportTaskOutput
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.CreateImageExportTask(request)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- return fmt.Errorf("[DEBUG] Error image task %s", err)
- }
-
- ID := *resp.ImageExportTask.ImageExportTaskId
- d.SetId(ID)
-
- _, err = resourceOutscaleImageTaskWaitForAvailable(ID, conn, 1)
- if err != nil {
- return err
- }
-
- return resourceOAPIImageTasksRead(d, meta)
-}
-
-func resourceOutscaleImageTaskWaitForAvailable(ID string, client *fcu.Client, i int) (*fcu.Image, error) {
- fmt.Printf("Waiting for Image Task %s to become available...", ID)
-
- stateConf := &resource.StateChangeConf{
- Pending: []string{"pending", "pending/queued", "queued"},
- Target: []string{"available"},
- Refresh: OAPIImageTaskStateRefreshFunc(client, ID),
- Timeout: OutscaleImageRetryTimeout,
- Delay: OutscaleImageRetryDelay,
- MinTimeout: OutscaleImageRetryMinTimeout,
- }
-
- info, err := stateConf.WaitForState()
- if err != nil {
- return nil, fmt.Errorf("Error waiting for OMI (%s) to be ready: %v", ID, err)
- }
- return info.(*fcu.Image), nil
-}
-
-func resourceOAPIImageTasksRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- var resp *fcu.DescribeImageExportTasksOutput
- var err error
-
- log.Printf("[DEBUG] DESCRIBE IMAGE TASK")
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribeImageExportTasks(&fcu.DescribeImageExportTasksInput{
- ImageExportTaskId: []*string{aws.String(d.Id())},
- })
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- return fmt.Errorf("Error reading task image %s", err)
- }
-
- imageExportTask := make([]map[string]interface{}, len(resp.ImageExportTask))
- for k, v := range resp.ImageExportTask {
- i := make(map[string]interface{})
- i["completion"] = *v.Completion
- i["task_id"] = *v.ImageExportTaskId
- i["image_id"] = *v.ImageId
- i["state"] = *v.State
- i["comment"] = *v.StatusMessage
-
- exportToOsu := make(map[string]interface{})
- exportToOsu["disk_image_format"] = *v.ExportToOsu.DiskImageFormat
- exportToOsu["osu_bucket"] = *v.ExportToOsu.OsuBucket
- exportToOsu["manifest_url"] = *v.ExportToOsu.OsuManifestUrl
- exportToOsu["osu_prefix"] = *v.ExportToOsu.OsuPrefix
-
- osuAkSk := make(map[string]interface{})
- osuAkSk["api_key_id"] = *v.ExportToOsu.OsuAkSk.AccessKey
- osuAkSk["secret_key"] = *v.ExportToOsu.OsuAkSk.SecretKey
-
- exportToOsu["osu_api_key"] = osuAkSk
-
- i["osu_export"] = exportToOsu
-
- imageExportTask[k] = i
- }
-
- if err := d.Set("image_export_task", imageExportTask); err != nil {
- return err
- }
-
- d.Set("request_id", resp.RequestId)
-
- return nil
-}
-
-func resourceOAPIImageTasksDelete(d *schema.ResourceData, meta interface{}) error {
-
- d.SetId("")
-
- return nil
-}
-
-// OAPIImageTaskStateRefreshFunc ...
-func OAPIImageTaskStateRefreshFunc(client *fcu.Client, ID string) resource.StateRefreshFunc {
- return func() (interface{}, string, error) {
- emptyResp := &fcu.DescribeImageExportTasksOutput{}
-
- var resp *fcu.DescribeImageExportTasksOutput
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = client.VM.DescribeImageExportTasks(&fcu.DescribeImageExportTasksInput{
- ImageExportTaskId: []*string{aws.String(ID)},
- })
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- if e := fmt.Sprint(err); strings.Contains(e, "InvalidAMIID.NotFound") {
- log.Printf("[INFO] OMI %s state %s", ID, "destroyed")
- return emptyResp, "destroyed", nil
-
- } else if resp != nil && len(resp.ImageExportTask) == 0 {
- log.Printf("[INFO] OMI %s state %s", ID, "destroyed")
- return emptyResp, "destroyed", nil
- } else {
- return emptyResp, "", fmt.Errorf("Error on refresh: %+v", err)
- }
- }
-
- if resp == nil || resp.ImageExportTask == nil || len(resp.ImageExportTask) == 0 {
- return emptyResp, "destroyed", nil
- }
-
- log.Printf("[INFO] OMI %s state %s", *resp.ImageExportTask[0].ImageId, *resp.ImageExportTask[0].State)
-
- // OMI is valid, so return it's state
- return resp.ImageExportTask[0], *resp.ImageExportTask[0].State, nil
- }
-}
diff --git a/outscale/resource_outscale_image_tasks_test.go b/outscale/resource_outscale_image_tasks_test.go
deleted file mode 100644
index 6211444e6..000000000
--- a/outscale/resource_outscale_image_tasks_test.go
+++ /dev/null
@@ -1,69 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "testing"
-
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
-)
-
-func TestAccOutscaleOAPIImageTask_basic(t *testing.T) {
- t.Skip()
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- Providers: testAccProviders,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccOutscaleOAPIImageTaskConfig,
- Check: resource.ComposeTestCheckFunc(
- testAccCheckOutscaleOAPIImageTaskExists("outscale_image_tasks.outscale_image_tasks"),
- ),
- },
- },
- })
-}
-
-func testAccCheckOutscaleOAPIImageTaskExists(n string) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- rs, ok := s.RootModule().Resources[n]
- if !ok {
- return fmt.Errorf("Not found: %s", n)
- }
-
- if rs.Primary.ID == "" {
- return fmt.Errorf("No image task id is set")
- }
-
- return nil
- }
-}
-
-var testAccOutscaleOAPIImageTaskConfig = `
- resource "outscale_vm" "outscale_vm" {
- count = 1
-
- image_id = "ami-880caa66"
- type = "c4.large"
- }
-
- resource "outscale_image" "outscale_image" {
- name = "image_${outscale_vm.outscale_vm.id}"
- vm_id = "${outscale_vm.outscale_vm.id}"
- }
-
- resource "outscale_image_tasks" "outscale_image_tasks" {
- count = 1
-
- osu_export {
- disk_image_format = "raw"
- osu_bucket = "test"
- }
-
- image_id = "${outscale_image.outscale_image.image_id}"
- }
-`
diff --git a/outscale/resource_outscale_image_test.go b/outscale/resource_outscale_image_test.go
index fb18cb51a..668d03630 100644
--- a/outscale/resource_outscale_image_test.go
+++ b/outscale/resource_outscale_image_test.go
@@ -3,28 +3,26 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- "github.com/marinsalinas/osc-sdk-go"
"os"
"testing"
- "github.com/hashicorp/terraform/helper/acctest"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/acctest"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccOutscaleOAPIImage_basic(t *testing.T) {
- omi := getOMIByRegion("eu-west-2", "centos").OMI
+ omi := os.Getenv("OUTSCALE_IMAGEID")
region := os.Getenv("OUTSCALE_REGION")
var ami oscgo.Image
rInt := acctest.RandInt()
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckOAPIImageDestroy,
Steps: []resource.TestStep{
diff --git a/outscale/resource_outscale_internet_service.go b/outscale/resource_outscale_internet_service.go
index bff150a95..09909a580 100644
--- a/outscale/resource_outscale_internet_service.go
+++ b/outscale/resource_outscale_internet_service.go
@@ -9,8 +9,8 @@ import (
"github.com/antihax/optional"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
oscgo "github.com/marinsalinas/osc-sdk-go"
)
@@ -80,8 +80,8 @@ func resourceOutscaleOAPIInternetServiceRead(d *schema.ResourceData, meta interf
}
var resp oscgo.ReadInternetServicesResponse
- var err error
- err = resource.Retry(120*time.Second, func() *resource.RetryError {
+
+ err := resource.Retry(120*time.Second, func() *resource.RetryError {
r, _, err := conn.InternetServiceApi.ReadInternetServices(context.Background(), &oscgo.ReadInternetServicesOpts{ReadInternetServicesRequest: optional.NewInterface(req)})
if err != nil {
diff --git a/outscale/resource_outscale_internet_service_link.go b/outscale/resource_outscale_internet_service_link.go
index 32151294a..8b34363f0 100644
--- a/outscale/resource_outscale_internet_service_link.go
+++ b/outscale/resource_outscale_internet_service_link.go
@@ -10,8 +10,8 @@ import (
"github.com/antihax/optional"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func resourceOutscaleOAPIInternetServiceLink() *schema.Resource {
diff --git a/outscale/resource_outscale_internet_service_link_test.go b/outscale/resource_outscale_internet_service_link_test.go
index 4d5b535a2..6e95200b3 100644
--- a/outscale/resource_outscale_internet_service_link_test.go
+++ b/outscale/resource_outscale_internet_service_link_test.go
@@ -8,16 +8,13 @@ import (
"github.com/antihax/optional"
oscgo "github.com/marinsalinas/osc-sdk-go"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccOutscaleOAPIInternetServiceLink_basic(t *testing.T) {
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckOutscaleOSCAPIInternetServiceLinkDestroyed,
Steps: []resource.TestStep{
diff --git a/outscale/resource_outscale_internet_service_test.go b/outscale/resource_outscale_internet_service_test.go
index 055dae2c7..a8d4bbea6 100644
--- a/outscale/resource_outscale_internet_service_test.go
+++ b/outscale/resource_outscale_internet_service_test.go
@@ -8,16 +8,13 @@ import (
"github.com/antihax/optional"
oscgo "github.com/marinsalinas/osc-sdk-go"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccOutscaleOAPIInternetService_basic(t *testing.T) {
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckOutscaleInternetServiceDestroyed,
Steps: []resource.TestStep{
diff --git a/outscale/resource_outscale_keypair.go b/outscale/resource_outscale_keypair.go
index 5ec761bf2..2eeb50dd1 100644
--- a/outscale/resource_outscale_keypair.go
+++ b/outscale/resource_outscale_keypair.go
@@ -8,8 +8,8 @@ import (
"strings"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func resourceOutscaleOAPIKeyPair() *schema.Resource {
diff --git a/outscale/resource_outscale_keypair_importation.go b/outscale/resource_outscale_keypair_importation.go
deleted file mode 100644
index 8a76583fd..000000000
--- a/outscale/resource_outscale_keypair_importation.go
+++ /dev/null
@@ -1,188 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/aws/aws-sdk-go/aws/awserr"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func resourceOutscaleOAPIKeyPairImportation() *schema.Resource {
- return &schema.Resource{
- Create: resourceOAPIKeyPairImportationCreate,
- Read: resourceOAPIKeyPairImportationRead,
- Delete: resourceOAPIKeyPairImportationDelete,
- Importer: &schema.ResourceImporter{
- State: schema.ImportStatePassthrough,
- },
-
- Timeouts: &schema.ResourceTimeout{
- Create: schema.DefaultTimeout(10 * time.Minute),
- Read: schema.DefaultTimeout(10 * time.Minute),
- Delete: schema.DefaultTimeout(10 * time.Minute),
- },
-
- Schema: getOAPIKeyPairImportationSchema(),
- }
-}
-
-func resourceOAPIKeyPairImportationCreate(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- var keyName string
- if v, ok := d.GetOk("public_key_material"); ok {
- keyName = v.(string)
- } else {
- keyName = resource.UniqueId()
- d.Set("public_key_material", keyName)
- }
- if publicKey, ok := d.GetOk("public_key_material"); ok {
- req := &fcu.ImportKeyPairInput{
- KeyName: aws.String(keyName),
- PublicKeyMaterial: []byte(publicKey.(string)),
- }
-
- var resp *fcu.ImportKeyPairOutput
- err := resource.Retry(120*time.Second, func() *resource.RetryError {
- var err error
- resp, err = conn.VM.ImportKeyPair(req)
-
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.RetryableError(err)
- })
-
- if err != nil {
- return fmt.Errorf("Error import KeyPair: %s", err)
- }
- d.SetId(*resp.KeyName)
-
- } else {
- req := &fcu.CreateKeyPairInput{
- KeyName: aws.String(keyName),
- }
-
- var resp *fcu.CreateKeyPairOutput
- err := resource.Retry(120*time.Second, func() *resource.RetryError {
- var err error
- resp, err = conn.VM.CreateKeyPair(req)
-
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.RetryableError(err)
- })
- if err != nil {
- return fmt.Errorf("Error creating OAPIKeyPairImportation: %s", err)
- }
- d.SetId(*resp.KeyName)
- d.Set("public_key_material", *resp.KeyMaterial)
- }
- return nil
-}
-
-func resourceOAPIKeyPairImportationRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
- req := &fcu.DescribeKeyPairsInput{
- KeyNames: []*string{aws.String(d.Id())},
- }
-
- var resp *fcu.DescribeKeyPairsOutput
- err := resource.Retry(120*time.Second, func() *resource.RetryError {
- var err error
- resp, err = conn.VM.DescribeKeyPairs(req)
-
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.RetryableError(err)
- })
-
- if err != nil {
- return err
- }
-
- if err != nil {
- awsErr, ok := err.(awserr.Error)
- if ok && awsErr.Code() == "InvalidKeyPair.NotFound" {
- d.SetId("")
- return nil
- }
- return fmt.Errorf("Error retrieving KeyPair: %s", err)
- }
-
- for _, keyPair := range resp.KeyPairs {
- if *keyPair.KeyName == d.Id() {
- d.Set("public_key_material", keyPair.KeyName)
- d.Set("fingerprint", keyPair.KeyFingerprint)
- return nil
- }
- }
-
- return fmt.Errorf("Unable to find key pair within: %#v", resp.KeyPairs)
-}
-
-func resourceOAPIKeyPairImportationDelete(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- err := resource.Retry(5*time.Minute, func() *resource.RetryError {
- var err error
- _, err = conn.VM.DeleteKeyPairs(&fcu.DeleteKeyPairInput{
- KeyName: aws.String(d.Id()),
- })
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.NonRetryableError(err)
- })
-
- if err != nil {
- return err
- }
-
- return err
-}
-
-func getOAPIKeyPairImportationSchema() map[string]*schema.Schema {
- return map[string]*schema.Schema{
- // Attributes
- "key_fingerprint": {
- Type: schema.TypeString,
- Optional: true,
- Computed: true,
- },
- "public_key_material": {
- Type: schema.TypeString,
- Optional: true,
- Computed: true,
- },
- "key_name": {
- Type: schema.TypeString,
- Optional: true,
- Computed: true,
- },
- "request_id": {
- Type: schema.TypeString,
- Optional: true,
- Computed: true,
- },
- }
-}
diff --git a/outscale/resource_outscale_keypair_importation_test.go b/outscale/resource_outscale_keypair_importation_test.go
deleted file mode 100644
index 01f23c3bb..000000000
--- a/outscale/resource_outscale_keypair_importation_test.go
+++ /dev/null
@@ -1,275 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "strings"
- "testing"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/aws/aws-sdk-go/aws/awserr"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-
- "github.com/hashicorp/terraform/helper/acctest"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
-)
-
-func TestAccOutscaleOAPIKeyPairImportation_basic(t *testing.T) {
- t.Skip()
-
- rInt := acctest.RandInt()
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- Providers: testAccProviders,
- CheckDestroy: testAccCheckOutscaleOAPIKeyPairImportationDestroy,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccOutscaleOAPIKeyPairConfig(rInt),
- Check: resource.ComposeTestCheckFunc(
- // testAccCheckOutscaleOAPIKeyPairExists("outscale_keypair_importation.a_key_pair", &conf),
- // testAccCheckOutscaleOAPIKeyPairFingerprint("8a:47:95:bb:b1:45:66:ef:99:f5:80:91:cc:be:94:48", &conf),
- ),
- },
- },
- })
-}
-
-func TestAccOutscaleOAPIKeyPairImportation_basic_name(t *testing.T) {
- t.Skip()
-
- rInt := acctest.RandInt()
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- Providers: testAccProviders,
- CheckDestroy: testAccCheckOutscaleOAPIKeyPairImportationDestroy,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccOutscaleOAPIKeyPairConfigRetrieveName(rInt),
- Check: resource.ComposeTestCheckFunc(
- //testAccCheckOutscaleOAPIKeyPairExists("outscale_keypair_importation.a_key_pair", &conf),
- resource.TestCheckResourceAttr(
- "outscale_keypair_importation.a_key_pair", "key_name", "tf-acc-key-pair",
- ),
- ),
- },
- },
- })
-}
-
-func TestAccOutscaleOAPIKeyPairImportation_generatedName(t *testing.T) {
- t.Skip()
- var conf fcu.KeyPairInfo
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- Providers: testAccProviders,
- CheckDestroy: testAccCheckOutscaleOAPIKeyPairDestroy,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccOutscaleOAPIKeyPairConfigGeneratedName,
- Check: resource.ComposeTestCheckFunc(
- //testAccCheckOutscaleOAPIKeyPairExists("outscale_keypair_importation.a_key_pair", &conf),
- //testAccCheckOutscaleOAPIKeyPairFingerprint("8a:47:95:bb:b1:45:66:ef:99:f5:80:91:cc:be:94:48", &conf),
- func(s *terraform.State) error {
- if conf.KeyName == nil {
- return fmt.Errorf("bad: No SG name")
- }
- if !strings.HasPrefix(*conf.KeyName, "terraform-") {
- return fmt.Errorf("No terraform- prefix: %s", *conf.KeyName)
- }
- return nil
- },
- ),
- },
- },
- })
-}
-
-func testAccCheckOutscaleOAPIKeyPairImportationDestroy(s *terraform.State) error {
- conn := testAccProvider.Meta().(*OutscaleClient)
-
- for _, rs := range s.RootModule().Resources {
- if rs.Type != "outscale_keypair_importation" {
- continue
- }
-
- // Try to find key pair
- var resp *fcu.DescribeKeyPairsOutput
- err := resource.Retry(5*time.Minute, func() *resource.RetryError {
- var err error
- resp, err = conn.FCU.VM.DescribeKeyPairs(&fcu.DescribeKeyPairsInput{
- KeyNames: []*string{aws.String(rs.Primary.ID)},
- })
-
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
-
- return resource.RetryableError(err)
- })
-
- if resp == nil {
- return nil
- }
-
- if err == nil {
- if len(resp.KeyPairs) > 0 {
- return fmt.Errorf("still exist")
- }
- return nil
- }
-
- // Verify the error is what we want
- ec2err, ok := err.(awserr.Error)
- if !ok {
- return err
- }
- if ec2err.Code() != "InvalidKeyPair.NotFound" {
- return err
- }
- }
-
- return nil
-}
-
-func testAccCheckOutscaleOAPIKeyPairImportationFingerprint(expectedFingerprint string, conf *fcu.KeyPairInfo) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- if *conf.KeyFingerprint != expectedFingerprint {
- return fmt.Errorf("incorrect fingerprint. expected %s, got %s", expectedFingerprint, *conf.KeyFingerprint)
- }
- return nil
- }
-}
-
-func testAccCheckOutscaleOAPIKeyPairImportationExists(n string, res *fcu.KeyPairInfo) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- rs, ok := s.RootModule().Resources[n]
- if !ok {
- return fmt.Errorf("Not found: %s", n)
- }
-
- if rs.Primary.ID == "" {
- return fmt.Errorf("No KeyPair name is set")
- }
- var resp *fcu.DescribeKeyPairsOutput
- conn := testAccProvider.Meta().(*OutscaleClient)
-
- err := resource.Retry(5*time.Minute, func() *resource.RetryError {
- var err error
- resp, err = conn.FCU.VM.DescribeKeyPairs(&fcu.DescribeKeyPairsInput{
- KeyNames: []*string{aws.String(rs.Primary.ID)},
- })
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.NonRetryableError(err)
- })
- if err != nil {
- return err
- }
- if len(resp.KeyPairs) != 1 ||
- *resp.KeyPairs[0].KeyName != rs.Primary.ID {
- return fmt.Errorf("KeyPair not found")
- }
-
- *res = *resp.KeyPairs[0]
-
- return nil
- }
-}
-
-func testAccCheckOutscaleOAPIKeyPairImportationNamePrefix(t *testing.T) {
- t.Skip()
- //var conf fcu.KeyPairInfo
-
- rInt := acctest.RandInt()
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- IDRefreshName: "outscale_keypair_importation.a_key_pair",
- IDRefreshIgnore: []string{"key_name_prefix"},
- Providers: testAccProviders,
- CheckDestroy: testAccCheckOutscaleOAPIKeyPairDestroy,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccCheckOutscaleOAPIKeyPairPrefixNameConfig(rInt),
- Check: resource.ComposeTestCheckFunc(
- //testAccCheckOutscaleOAPIKeyPairExists("outscale_keypair_importation.a_key_pair", &conf),
- testAccCheckOutscaleOAPIKeyPairGeneratedNamePrefix(
- "outscale_keypair_importation.a_key_pair", "baz-"),
- ),
- },
- },
- })
-}
-
-func testAccCheckOutscaleOAPIKeyPairImportationGeneratedNamePrefix(
- resource, prefix string) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- r, ok := s.RootModule().Resources[resource]
- if !ok {
- return fmt.Errorf("Resource not found")
- }
- name, ok := r.Primary.Attributes["name"]
- if !ok {
- return fmt.Errorf("Name attr not found: %#v", r.Primary.Attributes)
- }
- if !strings.HasPrefix(name, prefix) {
- return fmt.Errorf("Name: %q, does not have prefix: %q", name, prefix)
- }
- return nil
- }
-}
-
-func testAccOutscaleOAPIKeyPairImportationConfig(r int) string {
- return fmt.Sprintf(
- `
-resource "outscale_keypair_importation" "a_key_pair" {
- key_name = "tf-acc-key-pair-%d"
- public_key_material = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD3F6tyPEFEzV0LX3X8BsXdMsQz1x2cEikKDEY0aIj41qgxMCP/iteneqXSIFZBp5vizPvaoIR3Um9xK7PGoW8giupGn+EPuxIA4cDM4vzOqOkiMPhz5XK0whEjkVzTo4+S0puvDZuwIsdiW9mxhJc7tgBNL0cYlWSYVkz4G/fslNfRPW5mYAM49f4fhtxPb5ok4Q2Lg9dPKVHO/Bgeu5woMc7RY0p1ej6D4CKFE6lymSDJpW0YHX/wqE9+cfEauh7xZcG0q9t2ta6F6fmX0agvpFyZo8aFbXeUBr7osSCJNgvavWbM/06niWrOvYX2xwWdhXmXSrbX8ZbabVohBK41 phodgson@thoughtworks.com"
-}
-`, r)
-}
-
-func testAccOutscaleOAPIKeyPairImportationConfigRetrieveName(r int) string {
- return fmt.Sprintf(
- `
-resource "outscale_keypair_importation" "a_key_pair" {
- key_name = "tf-acc-key-pair"
-}
-`)
-}
-
-const testAccOutscaleOAPIKeyPairImportationConfigGeneratedName = `
-resource "outscale_keypair_importation" "a_key_pair" {
- public_key_material = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD3F6tyPEFEzV0LX3X8BsXdMsQz1x2cEikKDEY0aIj41qgxMCP/iteneqXSIFZBp5vizPvaoIR3Um9xK7PGoW8giupGn+EPuxIA4cDM4vzOqOkiMPhz5XK0whEjkVzTo4+S0puvDZuwIsdiW9mxhJc7tgBNL0cYlWSYVkz4G/fslNfRPW5mYAM49f4fhtxPb5ok4Q2Lg9dPKVHO/Bgeu5woMc7RY0p1ej6D4CKFE6lymSDJpW0YHX/wqE9+cfEauh7xZcG0q9t2ta6F6fmX0agvpFyZo8aFbXeUBr7osSCJNgvavWbM/06niWrOvYX2xwWdhXmXSrbX8ZbabVohBK41 phodgson@thoughtworks.com"
-}
-`
-
-func testAccCheckOutscaleOAPIKeyPairImportationPrefixNameConfig(r int) string {
- return fmt.Sprintf(
- `
-resource "outscale_keypair_importation" "a_key_pair" {
- key_name_prefix = "baz-%d"
- public_key_material = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD3F6tyPEFEzV0LX3X8BsXdMsQz1x2cEikKDEY0aIj41qgxMCP/iteneqXSIFZBp5vizPvaoIR3Um9xK7PGoW8giupGn+EPuxIA4cDM4vzOqOkiMPhz5XK0whEjkVzTo4+S0puvDZuwIsdiW9mxhJc7tgBNL0cYlWSYVkz4G/fslNfRPW5mYAM49f4fhtxPb5ok4Q2Lg9dPKVHO/Bgeu5woMc7RY0p1ej6D4CKFE6lymSDJpW0YHX/wqE9+cfEauh7xZcG0q9t2ta6F6fmX0agvpFyZo8aFbXeUBr7osSCJNgvavWbM/06niWrOvYX2xwWdhXmXSrbX8ZbabVohBK41 phodgson@thoughtworks.com"
-}
-`, r)
-}
diff --git a/outscale/resource_outscale_keypair_test.go b/outscale/resource_outscale_keypair_test.go
index d36acf820..08615bb0e 100644
--- a/outscale/resource_outscale_keypair_test.go
+++ b/outscale/resource_outscale_keypair_test.go
@@ -3,16 +3,17 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"strings"
"testing"
"time"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
"github.com/aws/aws-sdk-go/aws/awserr"
- "github.com/hashicorp/terraform/helper/acctest"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/acctest"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccOutscaleOAPIKeyPair_basic(t *testing.T) {
@@ -20,10 +21,7 @@ func TestAccOutscaleOAPIKeyPair_basic(t *testing.T) {
rInt := acctest.RandInt()
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckOutscaleOAPIKeyPairDestroy,
Steps: []resource.TestStep{
@@ -43,10 +41,7 @@ func TestAccOutscaleOAPIKeyPair_retrieveName(t *testing.T) {
rInt := acctest.RandInt()
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckOutscaleOAPIKeyPairDestroy,
Steps: []resource.TestStep{
@@ -67,10 +62,7 @@ func TestAccOutscaleOAPIKeyPair_generatedName(t *testing.T) {
var conf oscgo.Keypair
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckOutscaleOAPIKeyPairDestroy,
Steps: []resource.TestStep{
@@ -190,34 +182,7 @@ func testAccCheckOutscaleOAPIKeyPairExists(n string, res *oscgo.Keypair) resourc
}
}
-func testAccCheckOutscaleOAPIKeyPairNamePrefix(t *testing.T) {
- var conf oscgo.Keypair
-
- rInt := acctest.RandInt()
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- IDRefreshName: "outscale_keypair.a_key_pair",
- IDRefreshIgnore: []string{"keypair_name_prefix"},
- Providers: testAccProviders,
- CheckDestroy: testAccCheckOutscaleOAPIKeyPairDestroy,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccCheckOutscaleOAPIKeyPairPrefixNameConfig(rInt),
- Check: resource.ComposeTestCheckFunc(
- testAccCheckOutscaleOAPIKeyPairExists("outscale_keypair.a_key_pair", &conf),
- testAccCheckOutscaleOAPIKeyPairGeneratedNamePrefix(
- "outscale_keypair.a_key_pair", "baz-"),
- ),
- },
- },
- })
-}
-
-func testAccCheckOutscaleOAPIKeyPairGeneratedNamePrefix(
- resource, prefix string) resource.TestCheckFunc {
+func testAccCheckOutscaleOAPIKeyPairGeneratedNamePrefix(resource, prefix string) resource.TestCheckFunc {
return func(s *terraform.State) error {
r, ok := s.RootModule().Resources[resource]
if !ok {
@@ -256,12 +221,3 @@ const testAccOutscaleOAPIKeyPairConfigGeneratedName = `
public_key = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD3F6tyPEFEzV0LX3X8BsXdMsQz1x2cEikKDEY0aIj41qgxMCP/iteneqXSIFZBp5vizPvaoIR3Um9xK7PGoW8giupGn+EPuxIA4cDM4vzOqOkiMPhz5XK0whEjkVzTo4+S0puvDZuwIsdiW9mxhJc7tgBNL0cYlWSYVkz4G/fslNfRPW5mYAM49f4fhtxPb5ok4Q2Lg9dPKVHO/Bgeu5woMc7RY0p1ej6D4CKFE6lymSDJpW0YHX/wqE9+cfEauh7xZcG0q9t2ta6F6fmX0agvpFyZo8aFbXeUBr7osSCJNgvavWbM/06niWrOvYX2xwWdhXmXSrbX8ZbabVohBK41 phodgson@thoughtworks.com"
}
`
-
-func testAccCheckOutscaleOAPIKeyPairPrefixNameConfig(r int) string {
- return fmt.Sprintf(`
- resource "outscale_keypair" "a_key_pair" {
- keypair_name_prefix = "baz-%d"
- public_key = "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQD3F6tyPEFEzV0LX3X8BsXdMsQz1x2cEikKDEY0aIj41qgxMCP/iteneqXSIFZBp5vizPvaoIR3Um9xK7PGoW8giupGn+EPuxIA4cDM4vzOqOkiMPhz5XK0whEjkVzTo4+S0puvDZuwIsdiW9mxhJc7tgBNL0cYlWSYVkz4G/fslNfRPW5mYAM49f4fhtxPb5ok4Q2Lg9dPKVHO/Bgeu5woMc7RY0p1ej6D4CKFE6lymSDJpW0YHX/wqE9+cfEauh7xZcG0q9t2ta6F6fmX0agvpFyZo8aFbXeUBr7osSCJNgvavWbM/06niWrOvYX2xwWdhXmXSrbX8ZbabVohBK41 phodgson@thoughtworks.com"
- }
- `, r)
-}
diff --git a/outscale/resource_outscale_nat_service.go b/outscale/resource_outscale_nat_service.go
index ab6446154..b28797881 100644
--- a/outscale/resource_outscale_nat_service.go
+++ b/outscale/resource_outscale_nat_service.go
@@ -10,8 +10,8 @@ import (
"github.com/antihax/optional"
oscgo "github.com/marinsalinas/osc-sdk-go"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func resourceOutscaleOAPINatService() *schema.Resource {
@@ -27,12 +27,10 @@ func resourceOutscaleOAPINatService() *schema.Resource {
"public_ip_id": {
Type: schema.TypeString,
Required: true,
- ForceNew: true,
},
"subnet_id": {
Type: schema.TypeString,
Required: true,
- ForceNew: true,
},
"nat_service_id": {
@@ -159,11 +157,9 @@ func resourceOAPINatServiceRead(d *schema.ResourceData, meta interface{}) error
set("nat_service_id", natService.NatServiceId)
set("net_id", natService.NetId)
set("state", natService.State)
+ set("subnet_id", natService.SubnetId)
- if err := set("public_ips", getOSCPublicIPs(*natService.PublicIps)); err != nil {
- return err
- }
- if err := set("tags", getOapiTagSet(natService.Tags)); err != nil {
+ if err := set("public_ips", getOSCPublicIPs(natService.GetPublicIps())); err != nil {
return err
}
@@ -260,59 +256,3 @@ func getOSCPublicIPs(publicIps []oscgo.PublicIpLight) (res []map[string]interfac
}
return
}
-
-func getOAPINatServiceSchema() map[string]*schema.Schema {
- return map[string]*schema.Schema{
- // Arguments
- "public_ip_id": {
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- },
- "token": {
- Type: schema.TypeString,
- Optional: true,
- ForceNew: true,
- Computed: true,
- },
- "request_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- "subnet_id": {
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- },
- // Attributes
- "public_ips": {
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "public_ip_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- "public_ip": {
- Type: schema.TypeString,
- Computed: true,
- },
- },
- },
- },
- "nat_service_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- "state": {
- Type: schema.TypeString,
- Computed: true,
- },
- "net_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- "tags": tagsListOAPISchema(),
- }
-}
diff --git a/outscale/resource_outscale_nat_service_test.go b/outscale/resource_outscale_nat_service_test.go
index dcd4ffc0d..4900c5f01 100644
--- a/outscale/resource_outscale_nat_service_test.go
+++ b/outscale/resource_outscale_nat_service_test.go
@@ -3,22 +3,20 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- "github.com/marinsalinas/osc-sdk-go"
"testing"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccOutscaleOAPINatService_basic(t *testing.T) {
var natService oscgo.NatService
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckOAPINatGatewayDestroy,
Steps: []resource.TestStep{
@@ -36,10 +34,7 @@ func TestAccOutscaleOAPINatService_basicWithDataSource(t *testing.T) {
var natService oscgo.NatService
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckOAPINatGatewayDestroy,
Steps: []resource.TestStep{
diff --git a/outscale/resource_outscale_lin.go b/outscale/resource_outscale_net.go
similarity index 75%
rename from outscale/resource_outscale_lin.go
rename to outscale/resource_outscale_net.go
index 8c04825c9..24639b47f 100644
--- a/outscale/resource_outscale_lin.go
+++ b/outscale/resource_outscale_net.go
@@ -3,16 +3,15 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
- "log"
"strings"
"time"
- "github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+ "github.com/terraform-providers/terraform-provider-outscale/utils"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func resourceOutscaleOAPINet() *schema.Resource {
@@ -59,12 +58,8 @@ func resourceOutscaleOAPINetCreate(d *schema.ResourceData, meta interface{}) err
return resource.RetryableError(err)
})
- var errString string
-
if err != nil {
- errString = err.Error()
-
- return fmt.Errorf("Error creating Outscale Net: %s", errString)
+ return fmt.Errorf("error creating Outscale Net: %s", utils.GetErrorResponse(err))
}
//SetTags
@@ -80,52 +75,6 @@ func resourceOutscaleOAPINetCreate(d *schema.ResourceData, meta interface{}) err
return resourceOutscaleOAPINetRead(d, meta)
}
-func resourceOutscaleLinRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- id := d.Id()
-
- req := &fcu.DescribeVpcsInput{
- VpcIds: []*string{aws.String(id)},
- }
-
- var resp *fcu.DescribeVpcsOutput
- var err error
- err = resource.Retry(120*time.Second, func() *resource.RetryError {
- resp, err = conn.VM.DescribeVpcs(req)
-
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.RetryableError(err)
- })
- if err != nil {
- log.Printf("[DEBUG] Error reading lin (%s)", err)
- }
-
- if resp == nil {
- d.SetId("")
- return fmt.Errorf("Lin not found")
- }
-
- if len(resp.Vpcs) == 0 {
- d.SetId("")
- return fmt.Errorf("Lin not found")
- }
-
- d.Set("cidr_block", resp.Vpcs[0].CidrBlock)
- d.Set("instance_tenancy", resp.Vpcs[0].InstanceTenancy)
- d.Set("dhcp_options_id", resp.Vpcs[0].DhcpOptionsId)
- d.Set("request_id", resp.RequestId)
- d.Set("state", resp.Vpcs[0].State)
- d.Set("vpc_id", resp.Vpcs[0].VpcId)
-
- return d.Set("tag_set", tagsToMap(resp.Vpcs[0].Tags))
-}
-
func resourceOutscaleOAPINetRead(d *schema.ResourceData, meta interface{}) error {
conn := meta.(*OutscaleClient).OSCAPI
diff --git a/outscale/resource_outscale_net_api_access.go b/outscale/resource_outscale_net_api_access.go
deleted file mode 100644
index 44967f33f..000000000
--- a/outscale/resource_outscale_net_api_access.go
+++ /dev/null
@@ -1,417 +0,0 @@
-package outscale
-
-import (
- "errors"
- "fmt"
- "log"
- "sort"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/errwrap"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/hashicorp/terraform/helper/structure"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func resourceOutscaleOAPIVpcEndpoint() *schema.Resource {
- return &schema.Resource{
- Create: resourceOutscaleOAPIVpcEndpointCreate,
- Read: resourceOutscaleOAPIVpcEndpointRead,
- Update: resourceOutscaleOAPIVpcEndpointUpdate,
- Delete: resourceOutscaleOAPIVpcEndpointDelete,
- Importer: &schema.ResourceImporter{
- State: schema.ImportStatePassthrough,
- },
-
- Schema: map[string]*schema.Schema{
- "net_id": {
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- },
- "prefix_list_name": {
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- },
- "policy": {
- Type: schema.TypeString,
- Optional: true,
- Computed: true,
- },
- "route_table_id": {
- Type: schema.TypeSet,
- Optional: true,
- Computed: true,
- Elem: &schema.Schema{Type: schema.TypeString},
- Set: schema.HashString,
- },
- "net_api_access_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- "state": {
- Type: schema.TypeString,
- Computed: true,
- },
- "prefix_list_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- "ip_ranges": {
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Schema{Type: schema.TypeString},
- },
- "request_id": {
- Type: schema.TypeString,
- Optional: true,
- },
- },
- }
-}
-
-func resourceOutscaleOAPIVpcEndpointCreate(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- req := &fcu.CreateVpcEndpointInput{
- VpcId: aws.String(d.Get("net_id").(string)),
- ServiceName: aws.String(d.Get("prefix_list_name").(string)),
- }
-
- setVpcEndpointCreateListOAPI(d, "route_table_id", &req.RouteTableIds)
-
- log.Printf("[DEBUG] Creating VPC Endpoint: %#v", req)
-
- var err error
- var resp *fcu.CreateVpcEndpointOutput
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.CreateVpcEndpoint(req)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- return fmt.Errorf("Error creating VPC Endpoint: %s", err.Error())
- }
-
- vpce := resp.VpcEndpoint
- d.SetId(aws.StringValue(vpce.VpcEndpointId))
-
- if err := vpcEndpointWaitUntilAvailableOAPI(d, conn); err != nil {
- return err
- }
-
- return resourceOutscaleOAPIVpcEndpointRead(d, meta)
-}
-
-func resourceOutscaleOAPIVpcEndpointRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- var resp *fcu.DescribeVpcEndpointsOutput
- var err error
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribeVpcEndpoints(&fcu.DescribeVpcEndpointsInput{
- VpcEndpointIds: aws.StringSlice([]string{d.Id()}),
- })
-
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- return err
- }
-
- vpce := resp.VpcEndpoints[0]
- state := *vpce.State
-
- if err != nil && state != "failed" {
- return fmt.Errorf("Error reading VPC Endpoint: %s", err.Error())
- }
-
- terminalStates := map[string]bool{
- "deleted": true,
- "deleting": true,
- "failed": true,
- "expired": true,
- "rejected": true,
- }
- if _, ok := terminalStates[state]; ok {
- log.Printf("[WARN] VPC Endpoint (%s) in state (%s), removing from state", d.Id(), state)
- d.SetId("")
- return nil
- }
-
- d.Set("request_id", *resp.RequestId)
- return vpcEndpointAttributesOAPI(d, vpce, conn)
-}
-
-func resourceOutscaleOAPIVpcEndpointUpdate(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- req := &fcu.ModifyVpcEndpointInput{
- VpcEndpointId: aws.String(d.Id()),
- }
-
- if d.HasChange("policy") {
- policy, err := structure.NormalizeJsonString(d.Get("policy"))
- if err != nil {
- return errwrap.Wrapf("policy contains an invalid JSON: {{err}}", err)
- }
-
- if policy == "" {
- req.ResetPolicy = aws.Bool(true)
- } else {
- req.PolicyDocument = aws.String(policy)
- }
- }
-
- setVpcEndpointUpdateListsOAPI(d, "route_table_id", &req.AddRouteTableIds, &req.RemoveRouteTableIds)
-
- log.Printf("[DEBUG] Updating VPC Endpoint: %#v", req)
-
- var err error
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- _, err = conn.VM.ModifyVpcEndpoint(req)
-
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
-
- return nil
- })
-
- if err != nil {
- return fmt.Errorf("Error updating VPC Endpoint: %s", err.Error())
- }
-
- if err := vpcEndpointWaitUntilAvailableOAPI(d, conn); err != nil {
- return err
- }
-
- return resourceOutscaleOAPIVpcEndpointRead(d, meta)
-}
-
-func resourceOutscaleOAPIVpcEndpointDelete(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- log.Printf("[DEBUG] Deleting VPC Endpoint: %s", d.Id())
-
- var err error
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- _, err = conn.VM.DeleteVpcEndpoints(&fcu.DeleteVpcEndpointsInput{
- VpcEndpointIds: aws.StringSlice([]string{d.Id()}),
- })
-
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
-
- return nil
- })
-
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "InvalidVpcEndpointId.NotFound") {
- log.Printf("[DEBUG] VPC Endpoint %s is already gone", d.Id())
- } else {
- return fmt.Errorf("Error deleting VPC Endpoint: %s", err.Error())
- }
- }
-
- stateConf := &resource.StateChangeConf{
- Pending: []string{"available", "pending", "deleting"},
- Target: []string{"deleted"},
- Refresh: vpcEndpointStateRefreshOAPI(conn, d.Id()),
- Timeout: 10 * time.Minute,
- Delay: 5 * time.Second,
- MinTimeout: 5 * time.Second,
- }
- if _, err = stateConf.WaitForState(); err != nil {
- return fmt.Errorf("Error waiting for VPC Endpoint %s to delete: %s", d.Id(), err.Error())
- }
-
- return nil
-}
-
-func vpcEndpointStateRefreshOAPI(conn *fcu.Client, vpceID string) resource.StateRefreshFunc {
- return func() (interface{}, string, error) {
- log.Printf("[DEBUG] Reading VPC Endpoint: %s", vpceID)
-
- var resp *fcu.DescribeVpcEndpointsOutput
- var err error
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribeVpcEndpoints(&fcu.DescribeVpcEndpointsInput{
- VpcEndpointIds: aws.StringSlice([]string{vpceID}),
- })
-
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "InvalidVpcEndpointId.NotFound") {
- return false, "deleted", nil
- }
-
- return nil, "", err
- }
-
- vpce := resp.VpcEndpoints[0]
- state := aws.StringValue(vpce.State)
- // No use in retrying if the endpoint is in a failed state.
- if state == "failed" {
- return nil, state, errors.New("VPC Endpoint is in a failed state")
- }
- return vpce, state, nil
- }
-}
-
-func vpcEndpointWaitUntilAvailableOAPI(d *schema.ResourceData, conn *fcu.Client) error {
- stateConf := &resource.StateChangeConf{
- Pending: []string{"pending"},
- Target: []string{"available", "pendingAcceptance"},
- Refresh: vpcEndpointStateRefreshOAPI(conn, d.Id()),
- Timeout: 10 * time.Minute,
- Delay: 5 * time.Second,
- MinTimeout: 5 * time.Second,
- }
- if _, err := stateConf.WaitForState(); err != nil {
- return fmt.Errorf("Error waiting for VPC Endpoint %s to become available: %s", d.Id(), err.Error())
- }
-
- return nil
-}
-
-func vpcEndpointAttributesOAPI(d *schema.ResourceData, vpce *fcu.VpcEndpoint, conn *fcu.Client) error {
- d.Set("state", vpce.State)
- d.Set("net_id", vpce.VpcId)
-
- serviceName := aws.StringValue(vpce.ServiceName)
- d.Set("prefix_list_name", serviceName)
- d.Set("net_api_access_id", aws.StringValue(vpce.VpcEndpointId))
-
- policy, err := structure.NormalizeJsonString(aws.StringValue(vpce.PolicyDocument))
- if err != nil {
- return errwrap.Wrapf("policy contains an invalid JSON: {{err}}", err)
- }
- d.Set("policy", policy)
-
- d.Set("route_table_id", flattenStringList(vpce.RouteTableIds))
-
- req := &fcu.DescribePrefixListsInput{}
- req.Filters = buildFCUAttributeFilterListOAPI(
- map[string]string{
- "prefix-list-name": serviceName,
- },
- )
-
- var resp *fcu.DescribePrefixListsOutput
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribePrefixLists(req)
-
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
-
- return nil
- })
-
- if err != nil {
- return err
- }
- if resp != nil && len(resp.PrefixLists) > 0 {
- if len(resp.PrefixLists) > 1 {
- return fmt.Errorf("multiple prefix lists associated with the service name '%s'. Unexpected", serviceName)
- }
-
- pl := resp.PrefixLists[0]
- d.Set("prefix_list_id", pl.PrefixListId)
- d.Set("ip_ranges", flattenStringList(pl.Cidrs))
- } else {
- d.Set("ip_ranges", make([]string, 0))
- }
-
- return nil
-}
-
-func setVpcEndpointCreateListOAPI(d *schema.ResourceData, key string, c *[]*string) {
- if v, ok := d.GetOk(key); ok {
- list := v.(*schema.Set).List()
- if len(list) > 0 {
- *c = expandStringList(list)
- }
- }
-}
-
-func setVpcEndpointUpdateListsOAPI(d *schema.ResourceData, key string, a, r *[]*string) {
- if d.HasChange(key) {
- o, n := d.GetChange(key)
- os := o.(*schema.Set)
- ns := n.(*schema.Set)
-
- add := expandStringList(ns.Difference(os).List())
- if len(add) > 0 {
- *a = add
- }
-
- remove := expandStringList(os.Difference(ns).List())
- if len(remove) > 0 {
- *r = remove
- }
- }
-}
-
-func buildFCUAttributeFilterListOAPI(attrs map[string]string) []*fcu.Filter {
- var filters []*fcu.Filter
-
- // sort the filters by name to make the output deterministic
- var names []string
- for filterName := range attrs {
- names = append(names, filterName)
- }
-
- sort.Strings(names)
-
- for _, filterName := range names {
- value := attrs[filterName]
- if value == "" {
- continue
- }
-
- filters = append(filters, &fcu.Filter{
- Name: aws.String(filterName),
- Values: []*string{aws.String(value)},
- })
- }
-
- return filters
-}
diff --git a/outscale/resource_outscale_net_api_access_test.go b/outscale/resource_outscale_net_api_access_test.go
deleted file mode 100644
index dd456a1f1..000000000
--- a/outscale/resource_outscale_net_api_access_test.go
+++ /dev/null
@@ -1,107 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "strings"
- "testing"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func TestAccOutscaleLinOAPIAccess_basic(t *testing.T) {
- t.Skip()
- var conf fcu.VpcEndpoint
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- Providers: testAccProviders,
- CheckDestroy: testAccCheckOutscaleLinOAPIAccessDestroy,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccOutscaleLinOAPIAccessConfig,
- Check: resource.ComposeTestCheckFunc(
- testAccCheckOutscaleLinOAPIAccessExists("outscale_net_api_access.link", &conf),
- ),
- },
- },
- })
-}
-
-func testAccCheckOutscaleLinOAPIAccessExists(n string, res *fcu.VpcEndpoint) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- rs, ok := s.RootModule().Resources[n]
- if !ok {
- return fmt.Errorf("Not found: %s", n)
- }
-
- if rs.Primary.ID == "" {
- return fmt.Errorf("No api_access id is set")
- }
-
- return nil
- }
-}
-
-func testAccCheckOutscaleLinOAPIAccessDestroy(s *terraform.State) error {
- conn := testAccProvider.Meta().(*OutscaleClient)
-
- for _, rs := range s.RootModule().Resources {
- if rs.Type != "outscale_net_api_access" {
- continue
- }
-
- id := rs.Primary.Attributes["net_id"]
-
- var err error
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
-
- _, err = conn.FCU.VM.DescribeVpcEndpoints(&fcu.DescribeVpcEndpointsInput{
- VpcEndpointIds: []*string{aws.String(id)},
- })
-
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
-
- return nil
- })
-
- if strings.Contains(fmt.Sprint(err), "InvalidVpcEndpointId.NotFound") {
- return nil
- }
-
- if err != nil {
- return err
- }
-
- }
- return nil
-}
-
-const testAccOutscaleLinOAPIAccessConfig = `
-resource "outscale_net" "foo" {
- ip_ranges = "10.1.0.0/16"
-}
-
-resource "outscale_route_table" "foo" {
- net_id = "${outscale_net.foo.id}"
-}
-
-resource "outscale_net_api_access" "link" {
- net_id = "${outscale_net.foo.id}"
- route_table_id = [
- "${outscale_route_table.foo.id}"
- ]
- prefix_list_name = "com.outscale.eu-west-2.osu"
-}
-`
diff --git a/outscale/resource_outscale_net_attributes.go b/outscale/resource_outscale_net_attributes.go
index 28eede4e8..910e49e30 100644
--- a/outscale/resource_outscale_net_attributes.go
+++ b/outscale/resource_outscale_net_attributes.go
@@ -3,14 +3,16 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"log"
"strings"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+ "github.com/terraform-providers/terraform-provider-outscale/utils"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func resourceOutscaleOAPILinAttributes() *schema.Resource {
@@ -66,16 +68,13 @@ func resourceOutscaleOAPILinAttrCreate(d *schema.ResourceData, meta interface{})
return nil
})
- var errString string
-
if err != nil {
- errString = err.Error()
- return fmt.Errorf("[DEBUG] Error creating net attribute. Details: %s", errString)
+ return fmt.Errorf("[DEBUG] Error creating net attribute. Details: %s", utils.GetErrorResponse(err))
}
d.Set("request_id", resp.ResponseContext.GetRequestId())
- d.SetId(resource.UniqueId())
+ d.SetId(resp.Net.GetNetId())
return resourceOutscaleOAPILinAttrRead(d, meta)
}
@@ -105,8 +104,8 @@ func resourceOutscaleOAPILinAttrUpdate(d *schema.ResourceData, meta interface{})
return nil
})
if err != nil {
- log.Printf("[DEBUG] Error creating lin (%s)", err)
- return err
+ return fmt.Errorf("[DEBUG] Error creating lin (%s)", utils.GetErrorResponse(err))
+
}
return resourceOutscaleOAPILinAttrRead(d, meta)
@@ -116,7 +115,7 @@ func resourceOutscaleOAPILinAttrRead(d *schema.ResourceData, meta interface{}) e
conn := meta.(*OutscaleClient).OSCAPI
filters := oscgo.FiltersNet{
- NetIds: &[]string{d.Get("net_id").(string)},
+ NetIds: &[]string{d.Id()},
}
req := oscgo.ReadNetsRequest{
@@ -137,12 +136,12 @@ func resourceOutscaleOAPILinAttrRead(d *schema.ResourceData, meta interface{}) e
return resource.RetryableError(err)
})
if err != nil {
- log.Printf("[DEBUG] Error reading lin (%s)", err)
+ log.Printf("[DEBUG] Error reading lin (%s)", utils.GetErrorResponse(err))
}
if len(resp.GetNets()) == 0 {
d.SetId("")
- return fmt.Errorf("oAPI Lin not found")
+ return fmt.Errorf("network is not found")
}
d.Set("net_id", resp.GetNets()[0].GetNetId())
diff --git a/outscale/resource_outscale_net_attributes_test.go b/outscale/resource_outscale_net_attributes_test.go
index 84fb1f030..3ae8306a3 100644
--- a/outscale/resource_outscale_net_attributes_test.go
+++ b/outscale/resource_outscale_net_attributes_test.go
@@ -3,16 +3,13 @@ package outscale
import (
"testing"
- "github.com/hashicorp/terraform/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
)
func TestAccOutscaleOAPILinAttr_basic(t *testing.T) {
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
resource.TestStep{
diff --git a/outscale/resource_outscale_net_peering.go b/outscale/resource_outscale_net_peering.go
index 83d482153..05a1b2ea8 100644
--- a/outscale/resource_outscale_net_peering.go
+++ b/outscale/resource_outscale_net_peering.go
@@ -12,8 +12,8 @@ import (
"time"
"github.com/hashicorp/errwrap"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func resourceOutscaleOAPILinPeeringConnection() *schema.Resource {
diff --git a/outscale/resource_outscale_net_peering_acceptation.go b/outscale/resource_outscale_net_peering_acceptation.go
index 95b4c0f98..7ecba4b53 100644
--- a/outscale/resource_outscale_net_peering_acceptation.go
+++ b/outscale/resource_outscale_net_peering_acceptation.go
@@ -9,8 +9,8 @@ import (
"strings"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func resourceOutscaleOAPILinPeeringConnectionAccepter() *schema.Resource {
diff --git a/outscale/resource_outscale_net_peering_acceptation_test.go b/outscale/resource_outscale_net_peering_acceptation_test.go
index 14b34ea19..d2400bc26 100644
--- a/outscale/resource_outscale_net_peering_acceptation_test.go
+++ b/outscale/resource_outscale_net_peering_acceptation_test.go
@@ -4,16 +4,13 @@ import (
"fmt"
"testing"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccOutscaleOAPILinPeeringConnectionAccepter_sameAccount(t *testing.T) {
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccOutscaleOAPILinPeeringConnectionAccepterDestroy,
Steps: []resource.TestStep{
diff --git a/outscale/resource_outscale_net_peering_test.go b/outscale/resource_outscale_net_peering_test.go
index e7bf19794..dfcff90cb 100644
--- a/outscale/resource_outscale_net_peering_test.go
+++ b/outscale/resource_outscale_net_peering_test.go
@@ -3,25 +3,23 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"log"
"strings"
"testing"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccOutscaleOAPILinPeeringConnection_basic(t *testing.T) {
var connection oscgo.NetPeering
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
IDRefreshName: "outscale_net_peering.foo",
Providers: testAccProviders,
CheckDestroy: testAccCheckOutscaleOAPILinPeeringConnectionDestroy,
@@ -55,10 +53,7 @@ func TestAccOutscaleOAPILinPeeringConnection_plan(t *testing.T) {
}
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckOutscaleOAPILinPeeringConnectionDestroy,
Steps: []resource.TestStep{
diff --git a/outscale/resource_outscale_net_test.go b/outscale/resource_outscale_net_test.go
index 2d9428c1f..75f1340eb 100644
--- a/outscale/resource_outscale_net_test.go
+++ b/outscale/resource_outscale_net_test.go
@@ -3,14 +3,15 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"strings"
"testing"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccOutscaleOAPILin_basic(t *testing.T) {
@@ -18,10 +19,7 @@ func TestAccOutscaleOAPILin_basic(t *testing.T) {
var conf2 oscgo.Net
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
// CheckDestroy: testAccCheckOutscaleLinDestroyed, // we need to create the destroyed test case
Steps: []resource.TestStep{
@@ -99,57 +97,6 @@ func testAccCheckOutscaleOAPILinExists(n string, res *oscgo.Net) resource.TestCh
}
}
-//Missing on Swagger Spec
-// func testAccCheckOutscaleOAPILinDestroyed(s *terraform.State) error {
-// conn := testAccProvider.Meta().(*OutscaleClient)
-
-// for _, rs := range s.RootModule().Resources {
-// if rs.Type != "outscale_net" {
-// continue
-// }
-
-// // Try to find an internet gateway
-// var resp *oscgo.ReadGate
-// err := resource.Retry(5*time.Minute, func() *resource.RetryError {
-// var err error
-// resp, err = conn.FCU.VM.DescribeInternetGateways(&fcu.DescribeInternetGatewaysInput{
-// InternetGatewayIds: []*string{aws.String(rs.Primary.ID)},
-// })
-
-// if err != nil {
-// if strings.Contains(err.Error(), "RequestLimitExceeded:") {
-// return resource.RetryableError(err)
-// }
-// return resource.NonRetryableError(err)
-// }
-
-// return resource.RetryableError(err)
-// })
-
-// if resp == nil {
-// return nil
-// }
-
-// if err == nil {
-// if len(resp.InternetGateways) > 0 {
-// return fmt.Errorf("still exist")
-// }
-// return nil
-// }
-
-// // Verify the error is what we want
-// ec2err, ok := err.(awserr.Error)
-// if !ok {
-// return err
-// }
-// if ec2err.Code() != "InvalidVPC.NotFound" {
-// return err
-// }
-// }
-
-// return nil
-// }
-
const testAccOutscaleOAPILinConfig = `
resource "outscale_net" "vpc" {
ip_range = "10.0.0.0/16"
diff --git a/outscale/resource_outscale_nic.go b/outscale/resource_outscale_nic.go
index abf17607e..7d7bbaff2 100644
--- a/outscale/resource_outscale_nic.go
+++ b/outscale/resource_outscale_nic.go
@@ -3,17 +3,18 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"log"
"math"
"strconv"
"strings"
"time"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
"github.com/aws/aws-sdk-go/aws/awserr"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
"github.com/openlyinc/pointy"
)
@@ -603,32 +604,6 @@ func resourceOutscaleOAPINicUpdate(d *schema.ResourceData, meta interface{}) err
d.SetPartial("private_ip")
}
- // Missing Sourcedestcheck
- // request := oscgo.UpdateNicRequest{
- // NicId: d.Id(),
- // SourceDestCheck: &fcu.AttributeBooleanValue{Value: aws.Bool(d.Get("is_source_dest_checked").(bool))},
- // }
-
- // _, err := conn.VM.ModifyNetworkInterfaceAttribute(request)
-
- // err := resource.Retry(5*time.Minute, func() *resource.RetryError {
- // var err error
- // _, err = conn.POST_UpdateNic(request)
- // if err != nil {
- // if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- // return resource.RetryableError(err)
- // }
- // return resource.NonRetryableError(err)
- // }
- // return nil
- // })
-
- // if err != nil {
- // return fmt.Errorf("Failure updating ENI: %s", err)
- // }
-
- // d.SetPartial("is_source_dest_checked")
-
if d.HasChange("private_ips_count") {
o, n := d.GetChange("private_ips_count")
pips := d.Get("pips").(*schema.Set).List()
@@ -723,7 +698,7 @@ func resourceOutscaleOAPINicUpdate(d *schema.ResourceData, meta interface{}) err
if d.HasChange("description") {
request := oscgo.UpdateNicRequest{
NicId: d.Id(),
- Description: d.Get("description").(*string),
+ Description: pointy.String(d.Get("description").(string)),
}
var err error
diff --git a/outscale/resource_outscale_nic_link.go b/outscale/resource_outscale_nic_link.go
index 23bb007b7..a54fed3d2 100644
--- a/outscale/resource_outscale_nic_link.go
+++ b/outscale/resource_outscale_nic_link.go
@@ -10,8 +10,8 @@ import (
"strings"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func resourceOutscaleOAPINetworkInterfaceAttachment() *schema.Resource {
diff --git a/outscale/resource_outscale_nic_link_test.go b/outscale/resource_outscale_nic_link_test.go
index 5d543ac0c..887290a17 100644
--- a/outscale/resource_outscale_nic_link_test.go
+++ b/outscale/resource_outscale_nic_link_test.go
@@ -2,24 +2,21 @@ package outscale
import (
"fmt"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"os"
"testing"
- "github.com/hashicorp/terraform/helper/resource"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
)
func TestAccOutscaleOAPINetworkInterfaceAttachmentBasic(t *testing.T) {
- //t.Skip()
var conf oscgo.Nic
- omi := getOMIByRegion("eu-west-2", "ubuntu").OMI
+ omi := os.Getenv("OUTSCALE_IMAGEID")
region := os.Getenv("OUTSCALE_REGION")
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
IDRefreshName: "outscale_nic.outscale_nic",
Providers: testAccProviders,
CheckDestroy: testAccCheckOutscaleOAPINICDestroy,
diff --git a/outscale/resource_outscale_nic_private_ip.go b/outscale/resource_outscale_nic_private_ip.go
index 389a69648..2050d299f 100644
--- a/outscale/resource_outscale_nic_private_ip.go
+++ b/outscale/resource_outscale_nic_private_ip.go
@@ -8,8 +8,8 @@ import (
"strings"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func resourceOutscaleOAPINetworkInterfacePrivateIP() *schema.Resource {
diff --git a/outscale/resource_outscale_nic_private_ip_test.go b/outscale/resource_outscale_nic_private_ip_test.go
index 12c1d602a..6b55f5be5 100644
--- a/outscale/resource_outscale_nic_private_ip_test.go
+++ b/outscale/resource_outscale_nic_private_ip_test.go
@@ -2,11 +2,12 @@ package outscale
import (
"fmt"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"os"
"testing"
- "github.com/hashicorp/terraform/helper/resource"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
)
func TestAccOutscaleOAPINetworkInterfacePrivateIPBasic(t *testing.T) {
@@ -14,10 +15,7 @@ func TestAccOutscaleOAPINetworkInterfacePrivateIPBasic(t *testing.T) {
var conf oscgo.Nic
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
IDRefreshName: "outscale_nic.outscale_nic",
Providers: testAccProviders,
CheckDestroy: testAccCheckOutscaleOAPIENIDestroy,
diff --git a/outscale/resource_outscale_nic_test.go b/outscale/resource_outscale_nic_test.go
index 3128594a2..b0eb28df6 100644
--- a/outscale/resource_outscale_nic_test.go
+++ b/outscale/resource_outscale_nic_test.go
@@ -3,16 +3,17 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"os"
"reflect"
"strings"
"testing"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccOutscaleOAPIENI_basic(t *testing.T) {
@@ -20,10 +21,7 @@ func TestAccOutscaleOAPIENI_basic(t *testing.T) {
subregion := os.Getenv("OUTSCALE_REGION")
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
IDRefreshName: "outscale_nic.outscale_nic",
Providers: testAccProviders,
CheckDestroy: testAccCheckOutscaleOAPINICDestroy,
diff --git a/outscale/resource_outscale_public_ip.go b/outscale/resource_outscale_public_ip.go
index 0264d56f9..11ad05c94 100644
--- a/outscale/resource_outscale_public_ip.go
+++ b/outscale/resource_outscale_public_ip.go
@@ -3,15 +3,17 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"log"
"strings"
"time"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+ "github.com/terraform-providers/terraform-provider-outscale/utils"
+
"github.com/aws/aws-sdk-go/aws/awserr"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func resourceOutscaleOAPIPublicIP() *schema.Resource {
@@ -41,7 +43,7 @@ func resourceOutscaleOAPIPublicIPCreate(d *schema.ResourceData, meta interface{}
log.Printf("[DEBUG] EIP create configuration: %#v", allocOpts)
resp, _, err := conn.PublicIpApi.CreatePublicIp(context.Background(), &oscgo.CreatePublicIpOpts{CreatePublicIpRequest: optional.NewInterface(allocOpts)})
if err != nil {
- return fmt.Errorf("Error creating EIP: %s", err)
+ return fmt.Errorf("error creating EIP: %s", utils.GetErrorResponse(err))
}
allocResp := resp
@@ -80,7 +82,11 @@ func resourceOutscaleOAPIPublicIPRead(d *schema.ResourceData, meta interface{})
return nil
}
- return fmt.Errorf("Error retrieving EIP: %s", err)
+ return fmt.Errorf("Error retrieving EIP: %s", utils.GetErrorResponse(err))
+ }
+
+ if len(response.GetPublicIps()) == 0 {
+ return fmt.Errorf("Error retrieving EIP: not found")
}
if len(response.GetPublicIps()) != 1 ||
@@ -177,7 +183,7 @@ func resourceOutscaleOAPIPublicIPUpdate(d *schema.ResourceData, meta interface{}
if err != nil {
d.Set("vm_id", "")
d.Set("nic_id", "")
- return fmt.Errorf("Failure associating EIP: %s", err)
+ return fmt.Errorf("Failure associating EIP: %s", utils.GetErrorResponse(err))
}
d.Partial(true)
@@ -213,12 +219,14 @@ func resourceOutscaleOAPIPublicIPDelete(d *schema.ResourceData, meta interface{}
var err error
switch resourceOutscaleOAPIPublicIPDomain(d) {
case "vpc":
+ lppiId := d.Get("link_public_ip_id").(string)
_, _, err = conn.PublicIpApi.UnlinkPublicIp(context.Background(), &oscgo.UnlinkPublicIpOpts{UnlinkPublicIpRequest: optional.NewInterface(oscgo.UnlinkPublicIpRequest{
- LinkPublicIpId: d.Get("link_public_ip_id").(*string),
+ LinkPublicIpId: &lppiId,
})})
case "standard":
+ pIP := d.Get("public_ip").(string)
_, _, err = conn.PublicIpApi.UnlinkPublicIp(context.Background(), &oscgo.UnlinkPublicIpOpts{UnlinkPublicIpRequest: optional.NewInterface(oscgo.UnlinkPublicIpRequest{
- PublicIp: d.Get("public_ip").(*string),
+ PublicIp: &pIP,
})})
}
diff --git a/outscale/resource_outscale_public_ip_link.go b/outscale/resource_outscale_public_ip_link.go
index b29ad108c..4ad4b5dc9 100644
--- a/outscale/resource_outscale_public_ip_link.go
+++ b/outscale/resource_outscale_public_ip_link.go
@@ -9,8 +9,8 @@ import (
"strings"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func resourceOutscaleOAPIPublicIPLink() *schema.Resource {
diff --git a/outscale/resource_outscale_public_ip_link_test.go b/outscale/resource_outscale_public_ip_link_test.go
index c1d2fd726..cda956e93 100644
--- a/outscale/resource_outscale_public_ip_link_test.go
+++ b/outscale/resource_outscale_public_ip_link_test.go
@@ -3,28 +3,26 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"log"
"os"
"strings"
"testing"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccOutscaleOAPIPublicIPLink_basic(t *testing.T) {
var a oscgo.PublicIp
- omi := getOMIByRegion("eu-west-2", "centos").OMI
+ omi := os.Getenv("OUTSCALE_IMAGEID")
region := os.Getenv("OUTSCALE_REGION")
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckOutscaleOAPIPublicIPLinkDestroy,
Steps: []resource.TestStep{
@@ -176,16 +174,6 @@ func testAccCheckOutscaleOAPIPublicIPLExists(n string, res *oscgo.PublicIp) reso
return err
}
- if err != nil {
-
- // Verify the error is what we want
- if e := fmt.Sprint(err); strings.Contains(e, "InvalidAllocationID.NotFound") || strings.Contains(e, "InvalidAddress.NotFound") {
- return nil
- }
-
- return err
- }
-
if len(response.GetPublicIps()) != 1 ||
response.GetPublicIps()[0].GetPublicIp() != rs.Primary.ID {
return fmt.Errorf("PublicIP not found")
diff --git a/outscale/resource_outscale_public_ip_test.go b/outscale/resource_outscale_public_ip_test.go
index 7f077a6bc..2626af004 100644
--- a/outscale/resource_outscale_public_ip_test.go
+++ b/outscale/resource_outscale_public_ip_test.go
@@ -3,25 +3,23 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"os"
"strings"
"testing"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccOutscaleOAPIPublicIP_basic(t *testing.T) {
var conf oscgo.PublicIp
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
IDRefreshName: "outscale_public_ip.bar",
Providers: testAccProviders,
CheckDestroy: testAccCheckOutscaleOAPIPublicIPDestroy,
@@ -39,15 +37,12 @@ func TestAccOutscaleOAPIPublicIP_basic(t *testing.T) {
func TestAccOutscaleOAPIPublicIP_instance(t *testing.T) {
var conf oscgo.PublicIp
- omi := getOMIByRegion("eu-west-2", "ubuntu").OMI
+ omi := os.Getenv("OUTSCALE_IMAGEID")
region := os.Getenv("OUTSCALE_REGION")
//rInt := acctest.RandInt()
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
IDRefreshName: "outscale_public_ip.bar",
Providers: testAccProviders,
CheckDestroy: testAccCheckOutscaleOAPIPublicIPDestroy,
@@ -75,14 +70,11 @@ func TestAccOutscaleOAPIPublicIP_instance(t *testing.T) {
// // associated Private PublicIPs of two instances
func TestAccOutscaleOAPIPublicIP_associated_user_private_ip(t *testing.T) {
var one oscgo.PublicIp
- omi := getOMIByRegion("eu-west-2", "ubuntu").OMI
+ omi := os.Getenv("OUTSCALE_IMAGEID")
region := os.Getenv("OUTSCALE_REGION")
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
IDRefreshName: "outscale_public_ip.bar",
Providers: testAccProviders,
CheckDestroy: testAccCheckOutscaleOAPIPublicIPDestroy,
@@ -246,16 +238,6 @@ func testAccCheckOutscaleOAPIPublicIPExists(n string, res *oscgo.PublicIp) resou
return err
}
- if err != nil {
-
- // Verify the error is what we want
- if e := fmt.Sprint(err); strings.Contains(e, "InvalidAllocationID.NotFound") || strings.Contains(e, "InvalidPublicIps.NotFound") {
- return nil
- }
-
- return err
- }
-
if len(response.GetPublicIps()) != 1 ||
response.GetPublicIps()[0].GetPublicIp() != rs.Primary.ID {
return fmt.Errorf("PublicIP not found")
diff --git a/outscale/resource_outscale_reserved_vms_offer_purchase_test.go b/outscale/resource_outscale_reserved_vms_offer_purchase_test.go
deleted file mode 100644
index fd6a5b69b..000000000
--- a/outscale/resource_outscale_reserved_vms_offer_purchase_test.go
+++ /dev/null
@@ -1,86 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "log"
- "strings"
- "testing"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func TestAccOutscaleOAPIReservedVmsOfferPurchase_basic(t *testing.T) {
- t.Skip()
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- Providers: testAccProviders,
- Steps: []resource.TestStep{
- {
- Config: testAccOutscaleOAPIReservedVmsOfferPurchaseEgressConfig,
- Check: resource.ComposeTestCheckFunc(
- testAccCheckOutscaleOAPIReservedVmsOfferPurchaseExists("outscale_reserved_vms_offer_purchase.test"),
- ),
- },
- },
- })
-}
-
-func testAccCheckOutscaleOAPIReservedVmsOfferPurchaseExists(n string) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- rs, ok := s.RootModule().Resources[n]
- if !ok {
- return fmt.Errorf("Not found: %s", n)
- }
-
- if rs.Primary.ID == "" {
- return fmt.Errorf("No Security Group is set")
- }
-
- conn := testAccProvider.Meta().(*OutscaleClient).FCU
-
- req := &fcu.DescribeReservedInstancesOfferingsInput{
- ReservedInstancesOfferingIds: []*string{aws.String(rs.Primary.ID)},
- }
-
- var resp *fcu.DescribeReservedInstancesOfferingsOutput
- var err error
- err = resource.Retry(120*time.Second, func() *resource.RetryError {
- resp, err = conn.VM.DescribeReservedInstancesOfferings(req)
-
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.RetryableError(err)
- })
- if err != nil {
- log.Printf("[DEBUG] Error reading lin (%s)", err)
- }
- if err != nil {
- return err
- }
-
- if len(resp.ReservedInstancesOfferingsSet) > 0 && *resp.ReservedInstancesOfferingsSet[0].ReservedInstancesOfferingId == rs.Primary.ID {
- return nil
- }
-
- return fmt.Errorf("Security Group not found")
- }
-}
-
-const testAccOutscaleOAPIReservedVmsOfferPurchaseEgressConfig = `
- resource "outscale_reserved_vms_offer_purchase" "test" {
- instance_count = 1
- reserved_vm_offering_id = ""
- }
- `
diff --git a/outscale/resource_outscale_route.go b/outscale/resource_outscale_route.go
index e464d074b..f582eb25e 100644
--- a/outscale/resource_outscale_route.go
+++ b/outscale/resource_outscale_route.go
@@ -4,16 +4,19 @@ import (
"context"
"errors"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"log"
"reflect"
"strings"
"time"
- "github.com/hashicorp/terraform/helper/hashcode"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+ "github.com/openlyinc/pointy"
+ "github.com/terraform-providers/terraform-provider-outscale/utils"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/hashcode"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
var errOAPIRoute = errors.New("Error: more than 1 target specified. Only 1 of gateway_id, " +
@@ -45,6 +48,7 @@ func resourceOutscaleOAPIRoute() *schema.Resource {
"destination_ip_range": {
Type: schema.TypeString,
Required: true,
+ ForceNew: true,
},
"destination_service_id": {
Type: schema.TypeString,
@@ -53,12 +57,10 @@ func resourceOutscaleOAPIRoute() *schema.Resource {
"gateway_id": {
Type: schema.TypeString,
Optional: true,
- Computed: true,
},
"nat_service_id": {
Type: schema.TypeString,
Optional: true,
- Computed: true,
},
"nat_access_point": {
Type: schema.TypeString,
@@ -67,7 +69,6 @@ func resourceOutscaleOAPIRoute() *schema.Resource {
"net_peering_id": {
Type: schema.TypeString,
Optional: true,
- Computed: true,
},
"nic_id": {
Type: schema.TypeString,
@@ -85,7 +86,6 @@ func resourceOutscaleOAPIRoute() *schema.Resource {
"vm_id": {
Type: schema.TypeString,
Optional: true,
- Computed: true,
},
"route_table_id": {
Type: schema.TypeString,
@@ -253,31 +253,31 @@ func resourceOutscaleOAPIRouteUpdate(d *schema.ResourceData, meta interface{}) e
replaceOpts = &oscgo.UpdateRouteRequest{
RouteTableId: d.Get("route_table_id").(string),
DestinationIpRange: d.Get("destination_ip_range").(string),
- GatewayId: d.Get("gateway_id").(*string),
+ GatewayId: pointy.String(d.Get("gateway_id").(string)),
}
case "nat_service_id":
replaceOpts = &oscgo.UpdateRouteRequest{
RouteTableId: d.Get("route_table_id").(string),
DestinationIpRange: d.Get("destination_ip_range").(string),
- GatewayId: d.Get("nat_service_id").(*string),
+ GatewayId: pointy.String(d.Get("nat_service_id").(string)),
}
case "vm_id":
replaceOpts = &oscgo.UpdateRouteRequest{
RouteTableId: d.Get("route_table_id").(string),
DestinationIpRange: d.Get("destination_ip_range").(string),
- VmId: d.Get("vm_id").(*string),
+ VmId: pointy.String(d.Get("vm_id").(string)),
}
case "nic_id":
replaceOpts = &oscgo.UpdateRouteRequest{
RouteTableId: d.Get("route_table_id").(string),
DestinationIpRange: d.Get("destination_ip_range").(string),
- NicId: d.Get("nic_id").(*string),
+ NicId: pointy.String(d.Get("nic_id").(string)),
}
case "net_peering_id":
replaceOpts = &oscgo.UpdateRouteRequest{
RouteTableId: d.Get("route_table_id").(string),
DestinationIpRange: d.Get("destination_ip_range").(string),
- NetPeeringId: d.Get("net_peering_id").(*string),
+ NetPeeringId: pointy.String(d.Get("net_peering_id").(string)),
}
default:
return fmt.Errorf("An invalid target type specified: %s", target)
@@ -286,7 +286,7 @@ func resourceOutscaleOAPIRouteUpdate(d *schema.ResourceData, meta interface{}) e
var err error
err = resource.Retry(2*time.Minute, func() *resource.RetryError {
- _, _, err = conn.RouteApi.UpdateRoute(context.Background(), &oscgo.UpdateRouteOpts{UpdateRouteRequest: optional.NewInterface(replaceOpts)})
+ _, _, err = conn.RouteApi.UpdateRoute(context.Background(), &oscgo.UpdateRouteOpts{UpdateRouteRequest: optional.NewInterface(*replaceOpts)})
if err != nil {
if strings.Contains(fmt.Sprint(err), "InvalidParameterException") {
@@ -300,7 +300,7 @@ func resourceOutscaleOAPIRouteUpdate(d *schema.ResourceData, meta interface{}) e
return nil
})
if err != nil {
- return err
+ return fmt.Errorf("error updating route: %s", utils.GetErrorResponse(err))
}
return nil
@@ -315,10 +315,8 @@ func resourceOutscaleOAPIRouteDelete(d *schema.ResourceData, meta interface{}) e
if v, ok := d.GetOk("destination_ip_range"); ok {
deleteOpts.SetDestinationIpRange(v.(string))
}
- log.Printf("[DEBUG] Route delete opts: %+v", deleteOpts)
- var err error
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
+ err := resource.Retry(5*time.Minute, func() *resource.RetryError {
log.Printf("[DEBUG] Trying to delete route with opts %+v", deleteOpts)
resp, _, err := conn.RouteApi.DeleteRoute(context.Background(), &oscgo.DeleteRouteOpts{DeleteRouteRequest: optional.NewInterface(deleteOpts)})
log.Printf("[DEBUG] Route delete result: %+v", resp)
diff --git a/outscale/resource_outscale_route_table.go b/outscale/resource_outscale_route_table.go
index 67ddb18ff..3d5c521f7 100644
--- a/outscale/resource_outscale_route_table.go
+++ b/outscale/resource_outscale_route_table.go
@@ -4,15 +4,14 @@ import (
"context"
"fmt"
"log"
- "sort"
"strings"
"time"
"github.com/antihax/optional"
oscgo "github.com/marinsalinas/osc-sdk-go"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func resourceOutscaleOAPIRouteTable() *schema.Resource {
@@ -235,13 +234,14 @@ func readOAPIRouteTable(conn *oscgo.APIClient, routeTableID string, linkIds ...s
//Fix for OAPI issue when passing routeTableIds and routeTableLinkIds
rts := resp.GetRouteTables()[0].GetLinkRouteTables()
+
if len(linkIds) > 0 {
for _, linkID := range linkIds {
- i := sort.Search(len(rts), func(i int) bool { return rts[i].GetLinkRouteTableId() == linkID })
- if len(rts) > 0 && rts[i].GetLinkRouteTableId() == linkID {
- return resp.GetRouteTables()[0], resp.ResponseContext.GetRequestId(), err
+ for _, rt := range rts {
+ if rt.GetLinkRouteTableId() == linkID {
+ return resp.GetRouteTables()[0], resp.ResponseContext.GetRequestId(), err
+ }
}
-
}
return nil, resp.ResponseContext.GetRequestId(), fmt.Errorf("Error getting route table: LinkRouteTables didn't match with provided (%+v)", linkIds)
}
diff --git a/outscale/resource_outscale_route_table_link.go b/outscale/resource_outscale_route_table_link.go
index 21329dad8..6553322ba 100644
--- a/outscale/resource_outscale_route_table_link.go
+++ b/outscale/resource_outscale_route_table_link.go
@@ -9,8 +9,8 @@ import (
"strings"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func resourceOutscaleOAPILinkRouteTable() *schema.Resource {
diff --git a/outscale/resource_outscale_route_table_link_test.go b/outscale/resource_outscale_route_table_link_test.go
index dd3217f7e..d942d28fa 100644
--- a/outscale/resource_outscale_route_table_link_test.go
+++ b/outscale/resource_outscale_route_table_link_test.go
@@ -3,24 +3,22 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"log"
"strings"
"testing"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccOutscaleOAPILinkRouteTable_basic(t *testing.T) {
var v oscgo.RouteTable
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckOAPILinkRouteTableDestroy,
Steps: []resource.TestStep{
diff --git a/outscale/resource_outscale_route_table_test.go b/outscale/resource_outscale_route_table_test.go
index 51a9a637e..c59184ad3 100644
--- a/outscale/resource_outscale_route_table_test.go
+++ b/outscale/resource_outscale_route_table_test.go
@@ -3,16 +3,17 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"log"
"os"
"strings"
"testing"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccOutscaleOAPIRouteTable_basic(t *testing.T) {
@@ -51,10 +52,7 @@ func TestAccOutscaleOAPIRouteTable_basic(t *testing.T) {
}
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
IDRefreshName: "outscale_route_table.foo",
Providers: testAccProviders,
CheckDestroy: testAccCheckOAPIRouteTableDestroy,
@@ -62,7 +60,7 @@ func TestAccOutscaleOAPIRouteTable_basic(t *testing.T) {
{
Config: testAccOAPIRouteTableConfig,
Check: resource.ComposeTestCheckFunc(
- testAccCheckOAPIRouteTableExists("outscale_route_table.foo", &v),
+ testAccCheckOAPIRouteTableExists("outscale_route_table.foo", &v, nil),
testCheck,
),
},
@@ -70,7 +68,7 @@ func TestAccOutscaleOAPIRouteTable_basic(t *testing.T) {
{
Config: testAccOAPIRouteTableConfigChange,
Check: resource.ComposeTestCheckFunc(
- testAccCheckOAPIRouteTableExists("outscale_route_table.foo", &v),
+ testAccCheckOAPIRouteTableExists("outscale_route_table.foo", &v, nil),
testCheckChange,
),
},
@@ -79,7 +77,7 @@ func TestAccOutscaleOAPIRouteTable_basic(t *testing.T) {
}
func TestAccOutscaleOAPIRouteTable_instance(t *testing.T) {
- omi := getOMIByRegion("eu-west-2", "ubuntu").OMI
+ omi := os.Getenv("OUTSCALE_IMAGEID")
region := os.Getenv("OUTSCALE_REGION")
var v oscgo.RouteTable
@@ -101,10 +99,7 @@ func TestAccOutscaleOAPIRouteTable_instance(t *testing.T) {
}
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
IDRefreshName: "outscale_route_table.foo",
Providers: testAccProviders,
CheckDestroy: testAccCheckOAPIRouteTableDestroy,
@@ -113,7 +108,7 @@ func TestAccOutscaleOAPIRouteTable_instance(t *testing.T) {
Config: testAccOAPIRouteTableConfigInstance(omi, "c4.large", region),
Check: resource.ComposeTestCheckFunc(
testAccCheckOAPIRouteTableExists(
- "outscale_route_table.foo", &v),
+ "outscale_route_table.foo", &v, nil),
testCheck,
),
},
@@ -122,8 +117,6 @@ func TestAccOutscaleOAPIRouteTable_instance(t *testing.T) {
}
func TestAccOutscaleOAPIRouteTable_tags(t *testing.T) {
- t.Skip()
-
value1 := `
tags {
key = "name"
@@ -141,10 +134,10 @@ func TestAccOutscaleOAPIRouteTable_tags(t *testing.T) {
}`
var rt oscgo.RouteTable
+ rtTags := make([]oscgo.ResourceTag, 0)
resource.Test(t, resource.TestCase{
PreCheck: func() {
- //skipIfNoOAPI(t)
testAccPreCheck(t)
},
Providers: testAccProviders,
@@ -153,15 +146,17 @@ func TestAccOutscaleOAPIRouteTable_tags(t *testing.T) {
{
Config: testAccOAPIRouteTableConfigTags(value1),
Check: resource.ComposeTestCheckFunc(
- testAccCheckOAPIRouteTableExists("outscale_route_table.foo", &rt),
- testAccCheckOAPITags(rt.GetTags(), "name", "Terraform-RT"),
+ testAccCheckOAPIRouteTableExists("outscale_route_table.foo", &rt, &rtTags),
+
+ testAccCheckOAPITags(&rtTags, "name", "Terraform-nic"),
),
},
{
Config: testAccOAPIRouteTableConfigTags(value2),
Check: resource.ComposeTestCheckFunc(
- testAccCheckOAPIRouteTableExists("outscale_route_table.foo", &rt),
- testAccCheckOAPITags(rt.GetTags(), "name", "Terraform-RT"),
+ testAccCheckOAPIRouteTableExists("outscale_route_table.foo", &rt, &rtTags),
+ testAccCheckOAPITags(&rtTags, "name", "Terraform-RT"),
+ testAccCheckOAPITags(&rtTags, "name2", "Terraform-RT2"),
),
},
},
@@ -214,7 +209,7 @@ func testAccCheckOAPIRouteTableDestroy(s *terraform.State) error {
return nil
}
-func testAccCheckOAPIRouteTableExists(n string, v *oscgo.RouteTable) resource.TestCheckFunc {
+func testAccCheckOAPIRouteTableExists(n string, v *oscgo.RouteTable, t *[]oscgo.ResourceTag) resource.TestCheckFunc {
return func(s *terraform.State) error {
rs, ok := s.RootModule().Resources[n]
if !ok {
@@ -257,6 +252,11 @@ func testAccCheckOAPIRouteTableExists(n string, v *oscgo.RouteTable) resource.Te
*v = resp.GetRouteTables()[0]
+ if t != nil {
+ *t = resp.GetRouteTables()[0].GetTags()
+ log.Printf("[DEBUG] Route Table Tags= %+v", t)
+ }
+
log.Printf("[DEBUG] RouteTable in Exist %+v", resp.GetRouteTables())
return nil
diff --git a/outscale/resource_outscale_route_test.go b/outscale/resource_outscale_route_test.go
index e5ab375e0..12f461808 100644
--- a/outscale/resource_outscale_route_test.go
+++ b/outscale/resource_outscale_route_test.go
@@ -2,11 +2,12 @@ package outscale
import (
"fmt"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"testing"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccOutscaleOAPIRoute_noopdiff(t *testing.T) {
@@ -23,7 +24,7 @@ func TestAccOutscaleOAPIRoute_noopdiff(t *testing.T) {
resource.Test(t, resource.TestCase{
PreCheck: func() {
testAccPreCheck(t)
- skipIfNoOAPI(t)
+
},
Providers: testAccProviders,
CheckDestroy: testAccCheckOAPIOutscaleRouteDestroy,
diff --git a/outscale/resource_outscale_security_group.go b/outscale/resource_outscale_security_group.go
index 23ce9d5ff..828f0511a 100644
--- a/outscale/resource_outscale_security_group.go
+++ b/outscale/resource_outscale_security_group.go
@@ -10,8 +10,8 @@ import (
"github.com/antihax/optional"
oscgo "github.com/marinsalinas/osc-sdk-go"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func resourceOutscaleOAPISecurityGroup() *schema.Resource {
diff --git a/outscale/resource_outscale_security_group_rule.go b/outscale/resource_outscale_security_group_rule.go
index a1e9ecace..9659c15b7 100644
--- a/outscale/resource_outscale_security_group_rule.go
+++ b/outscale/resource_outscale_security_group_rule.go
@@ -4,25 +4,58 @@ import (
"bytes"
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"log"
"sort"
+ "strconv"
"strings"
"time"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
"github.com/aws/aws-sdk-go/aws/awserr"
- "github.com/hashicorp/terraform/helper/hashcode"
- "github.com/hashicorp/terraform/helper/mutexkv"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/hashicorp/terraform/helper/validation"
- "github.com/outscale/osc-go/oapi"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/hashcode"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/mutexkv"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/validation"
)
const OAPI_INBOUND_RULE = "Inbound"
const OAPI_OUTBOUND_RULE = "Outbound"
+func sgProtocolIntegers() map[string]int {
+ return map[string]int{
+ "udp": 17,
+ "tcp": 6,
+ "icmp": 1,
+ "all": -1,
+ }
+}
+
+func protocolForValue(v string) string {
+ protocol := strings.ToLower(v)
+ if protocol == "-1" || protocol == "all" {
+ return "-1"
+ }
+ if _, ok := sgProtocolIntegers()[protocol]; ok {
+ return protocol
+ }
+ p, err := strconv.Atoi(protocol)
+ if err != nil {
+ fmt.Printf("\n\n[WARN] Unable to determine valid protocol: %s", err)
+ return protocol
+ }
+
+ for k, v := range sgProtocolIntegers() {
+ if p == v {
+ return strings.ToLower(k)
+ }
+ }
+
+ return protocol
+}
+
func resourceOutscaleOAPIOutboundRule() *schema.Resource {
return &schema.Resource{
Create: resourceOutscaleOAPIOutboundRuleCreate,
@@ -651,153 +684,6 @@ func validateOAPISecurityGroupRule(ippems []interface{}) error {
return nil
}
-func ipOAPIPermissionIDHash(ruleType, sgID string, ips []oapi.SecurityGroupRule) string {
- var buf bytes.Buffer
- buf.WriteString(fmt.Sprintf("%s-", sgID))
-
- for _, ip := range ips {
- if ip.FromPortRange > 0 {
- buf.WriteString(fmt.Sprintf("%d-", ip.FromPortRange))
- }
- if ip.ToPortRange > 0 {
- buf.WriteString(fmt.Sprintf("%d-", ip.ToPortRange))
- }
- buf.WriteString(fmt.Sprintf("%s-", ip.IpProtocol))
- buf.WriteString(fmt.Sprintf("%s-", ruleType))
-
- // We need to make sure to sort the strings below so that we always
- // generate the same hash code no matter what is in the set.
- if len(ip.IpRanges) > 0 {
- s := make([]string, len(ip.IpRanges))
- copy(s, ip.IpRanges)
- sort.Strings(s)
-
- for _, v := range s {
- buf.WriteString(fmt.Sprintf("%s-", v))
- }
- }
-
- if len(ip.PrefixListIds) > 0 {
- s := make([]string, len(ip.PrefixListIds))
- copy(s, ip.PrefixListIds)
- sort.Strings(s)
-
- for _, v := range s {
- buf.WriteString(fmt.Sprintf("%s-", v))
- }
- }
-
- if len(ip.SecurityGroupsMembers) > 0 {
- //sort.Sort(ByGroupsMember(ip.SecurityGroupsMembers))
- for _, pair := range ip.SecurityGroupsMembers {
- if pair.SecurityGroupId != "" {
- buf.WriteString(fmt.Sprintf("%s-", pair.SecurityGroupId))
- } else {
- buf.WriteString("-")
- }
- if pair.SecurityGroupName != "" {
- buf.WriteString(fmt.Sprintf("%s-", pair.SecurityGroupName))
- } else {
- buf.WriteString("-")
- }
- }
- }
- }
-
- return fmt.Sprintf("sgrule-%d", hashcode.String(buf.String()))
-}
-
-func findOAPIRuleMatch(p []oapi.SecurityGroupRule, rules []oapi.SecurityGroupRule) []oapi.SecurityGroupRule {
- var rule = make([]oapi.SecurityGroupRule, 0)
- //fmt.Printf("Rules (from config) -> %+v\n", p)
- //fmt.Printf("Rules (from service) -> %+v\n", rules)
- for _, i := range p {
- for _, r := range rules {
-
- //fmt.Printf("Rule (from config) -> %+v\nRule (from service) -> %+v\n", i, r)
- if i.ToPortRange != r.ToPortRange {
- continue
- }
-
- if i.FromPortRange != r.FromPortRange {
- continue
- }
-
- if i.IpProtocol != r.IpProtocol {
- continue
- }
-
- remaining := len(i.IpRanges)
- for _, ip := range i.IpRanges {
- for _, rip := range r.IpRanges {
- if ip == rip {
- remaining--
- }
- }
- }
-
- if remaining > 0 {
- continue
- }
-
- remaining = len(i.PrefixListIds)
- for _, pl := range i.PrefixListIds {
- for _, rpl := range r.PrefixListIds {
- if pl == rpl {
- remaining--
- }
- }
- }
-
- if remaining > 0 {
- continue
- }
-
- remaining = len(i.SecurityGroupsMembers)
- for _, ip := range i.SecurityGroupsMembers {
- for _, rip := range r.SecurityGroupsMembers {
- if ip.SecurityGroupId == rip.SecurityGroupId {
- remaining--
- }
- }
- }
-
- if remaining > 0 {
- continue
- }
-
- rule = append(rule, r)
- }
- }
- return rule
-}
-
-func setOAPIFromIPPerm(d *schema.ResourceData, sg *oapi.SecurityGroup, rules []oapi.SecurityGroupRule) ([]map[string]interface{}, error) {
- ips := make([]map[string]interface{}, len(rules))
-
- for k, rule := range rules {
- ip := make(map[string]interface{})
-
- ip["from_port_range"] = rule.FromPortRange
- ip["to_port_range"] = rule.ToPortRange
- ip["ip_protocol"] = rule.IpProtocol
- ip["ip_ranges"] = rule.IpRanges
- ip["service_ids"] = rule.PrefixListIds
-
- if len(rule.SecurityGroupsMembers) > 0 {
- s := rule.SecurityGroupsMembers[0]
-
- d.Set("account_id", s.AccountId)
- d.Set("security_group_id", s.SecurityGroupId)
- d.Set("security_group_name", s.SecurityGroupName)
- }
-
- ips[k] = ip
- }
-
- return ips, nil
-}
-
func ipOSCAPIPermissionIDHash(ruleType, sgID string, ips []oscgo.SecurityGroupRule) string {
var buf bytes.Buffer
buf.WriteString(fmt.Sprintf("%s-", sgID))
@@ -951,7 +837,7 @@ type oapiSecurityGroupNotFound struct {
}
func (err oapiSecurityGroupNotFound) Error() string {
- if err.securityGroups == nil {
+ if len(err.securityGroups) == 0 {
return fmt.Sprintf("No security group with ID %q", err.id)
}
return fmt.Sprintf("Expected to find one security group with ID %q, got: %#v",
diff --git a/outscale/resource_outscale_security_group_rule_test.go b/outscale/resource_outscale_security_group_rule_test.go
index adc1f0ee9..fd6990bb9 100644
--- a/outscale/resource_outscale_security_group_rule_test.go
+++ b/outscale/resource_outscale_security_group_rule_test.go
@@ -9,11 +9,11 @@ import (
"github.com/antihax/optional"
oscgo "github.com/marinsalinas/osc-sdk-go"
- "github.com/outscale/osc-go/oapi"
+ "github.com/terraform-providers/terraform-provider-outscale/utils"
- "github.com/hashicorp/terraform/helper/acctest"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/acctest"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccOutscaleOAPIOutboundRule(t *testing.T) {
@@ -21,10 +21,7 @@ func TestAccOutscaleOAPIOutboundRule(t *testing.T) {
rInt := acctest.RandInt()
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckOutscaleOAPISecurityGroupRuleDestroy,
Steps: []resource.TestStep{
@@ -40,47 +37,27 @@ func TestAccOutscaleOAPIOutboundRule(t *testing.T) {
}
func testAccCheckOutscaleOAPISecurityGroupRuleDestroy(s *terraform.State) error {
- conn := testAccProvider.Meta().(*OutscaleClient).OAPI
+ conn := testAccProvider.Meta().(*OutscaleClient).OSCAPI
for _, rs := range s.RootModule().Resources {
- if rs.Type != "outscale_firewall_rules_set" {
+ if rs.Type != "outscale_security_group_rule" {
continue
}
- // Retrieve our group
- req := oapi.ReadSecurityGroupsRequest{
- Filters: oapi.FiltersSecurityGroup{
- SecurityGroupIds: []string{rs.Primary.ID},
- },
- }
- var resp *oapi.POST_ReadSecurityGroupsResponses
- var err error
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.POST_ReadSecurityGroups(req)
-
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded") {
- fmt.Printf("\n\n[INFO] Request limit exceeded")
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
+ resp, _, err := findOSCAPIResourceSecurityGroup(conn, rs.Primary.ID)
- return nil
- })
if err == nil {
- if len(resp.OK.SecurityGroups) > 0 && resp.OK.SecurityGroups[0].SecurityGroupId == rs.Primary.ID {
+ if *resp.SecurityGroupId == rs.Primary.ID {
return fmt.Errorf("Security Group (%s) still exists", rs.Primary.ID)
}
-
return nil
}
- if strings.Contains(fmt.Sprint(err), "InvalidGroup.NotFound") {
+ if strings.Contains(fmt.Sprint(err), "No security group with ID") {
return nil
}
- return err
+ return utils.GetErrorResponse(err)
}
return nil
diff --git a/outscale/resource_outscale_security_group_test.go b/outscale/resource_outscale_security_group_test.go
index bfcc1e582..e7907e5c9 100644
--- a/outscale/resource_outscale_security_group_test.go
+++ b/outscale/resource_outscale_security_group_test.go
@@ -3,15 +3,16 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"strings"
"testing"
"time"
- "github.com/hashicorp/terraform/helper/acctest"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/acctest"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccOutscaleOAPISecurityGroup(t *testing.T) {
@@ -19,10 +20,7 @@ func TestAccOutscaleOAPISecurityGroup(t *testing.T) {
rInt := acctest.RandInt()
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckOutscaleOAPISGRuleDestroy,
Steps: []resource.TestStep{
diff --git a/outscale/resource_outscale_snapshot.go b/outscale/resource_outscale_snapshot.go
index a30d87b29..cc81ed40d 100644
--- a/outscale/resource_outscale_snapshot.go
+++ b/outscale/resource_outscale_snapshot.go
@@ -3,15 +3,16 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"log"
"strings"
"time"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
"github.com/aws/aws-sdk-go/aws/awserr"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func resourceOutscaleOAPISnapshot() *schema.Resource {
@@ -250,11 +251,9 @@ func resourceOutscaleOAPISnapshotDelete(d *schema.ResourceData, meta interface{}
conn := meta.(*OutscaleClient).OSCAPI
return resource.Retry(5*time.Minute, func() *resource.RetryError {
- request := oscgo.DeleteSnapshotRequest{
- SnapshotId: d.Id(),
- }
- var err error
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
+ request := oscgo.DeleteSnapshotRequest{SnapshotId: d.Id()}
+
+ err := resource.Retry(5*time.Minute, func() *resource.RetryError {
_, _, err := conn.SnapshotApi.DeleteSnapshot(context.Background(), &oscgo.DeleteSnapshotOpts{DeleteSnapshotRequest: optional.NewInterface(request)})
if err != nil {
diff --git a/outscale/resource_outscale_snapshot_attributes.go b/outscale/resource_outscale_snapshot_attributes.go
index 2dbfc70b4..7c10e54e8 100644
--- a/outscale/resource_outscale_snapshot_attributes.go
+++ b/outscale/resource_outscale_snapshot_attributes.go
@@ -3,14 +3,15 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"log"
"strings"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func resourcedOutscaleOAPISnapshotAttributes() *schema.Resource {
@@ -74,14 +75,6 @@ func resourcedOutscaleOAPISnapshotAttributes() *schema.Resource {
}
}
-func expandAccountIds(param interface{}) []string {
- var values []string
- for _, v := range param.([]interface{}) {
- values = append(values, v.(string))
- }
- return values
-}
-
func resourcedOutscaleOAPISnapshotAttributesCreate(d *schema.ResourceData, meta interface{}) error {
conn := meta.(*OutscaleClient).OSCAPI
diff --git a/outscale/resource_outscale_snapshot_attributes_test.go b/outscale/resource_outscale_snapshot_attributes_test.go
index bd4bda57d..c1853ab6f 100644
--- a/outscale/resource_outscale_snapshot_attributes_test.go
+++ b/outscale/resource_outscale_snapshot_attributes_test.go
@@ -5,29 +5,26 @@ import (
"os"
"testing"
- "github.com/hashicorp/terraform/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
)
func TestAccOutscaleOAPISnapshotAttributes_Basic(t *testing.T) {
- //t.Skip()
+ t.Skip()
var snapshotID string
accountID := os.Getenv("OUTSCALE_ACCOUNT")
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
resource.TestStep{
- Config: testAccOutscaleOAPISnapshotAttributesAdditionsConfig(true, accountID),
+ Config: testAccOutscaleOAPISnapshotAttributesConfig(true, false, accountID),
Check: resource.ComposeTestCheckFunc(
testCheckResourceGetAttr("outscale_snapshot.test", "id", &snapshotID),
),
},
resource.TestStep{
- Config: testAccOutscaleOAPISnapshotAttributesRemovalsConfig(true, accountID),
+ Config: testAccOutscaleOAPISnapshotAttributesConfig(true, true, accountID),
Check: resource.ComposeTestCheckFunc(
testCheckResourceGetAttr("outscale_snapshot.test", "id", &snapshotID),
),
@@ -36,8 +33,8 @@ func TestAccOutscaleOAPISnapshotAttributes_Basic(t *testing.T) {
})
}
-func testAccOutscaleOAPISnapshotAttributesAdditionsConfig(includeCreateVolumePermission bool, aid string) string {
- return fmt.Sprintf(`
+func testAccOutscaleOAPISnapshotAttributesConfig(includeAddition, includeRemoval bool, aid string) string {
+ base := fmt.Sprintf(`
resource "outscale_volume" "description_test" {
subregion_name = "eu-west-2a"
size = 1
@@ -51,31 +48,34 @@ func testAccOutscaleOAPISnapshotAttributesAdditionsConfig(includeCreateVolumePer
resource "outscale_snapshot_attributes" "self-test" {
snapshot_id = "${outscale_snapshot.test.id}"
- permissions_to_create_volume_additions {
+ permissions_to_create_volume_removals {
account_ids = ["%s"]
}
}
`, aid)
-}
-func testAccOutscaleOAPISnapshotAttributesRemovalsConfig(includeCreateVolumePermission bool, aid string) string {
- return fmt.Sprintf(`
- resource "outscale_volume" "description_test" {
- subregion_name = "eu-west-2a"
- size = 1
- }
-
- resource "outscale_snapshot" "test" {
- volume_id = "${outscale_volume.description_test.id}"
- description = "Snapshot Acceptance Test"
- }
-
- resource "outscale_snapshot_attributes" "self-test" {
+ if includeAddition {
+ return base + fmt.Sprintf(`
+ resource "outscale_snapshot_attributes" "additions" {
+ snapshot_id = "${outscale_snapshot.test.id}"
+
+ permissions_to_create_volume_additions {
+ account_ids = ["%s"]
+ }
+ }
+ `, aid)
+ }
+
+ if includeRemoval {
+ return base + fmt.Sprintf(`
+ resource "outscale_snapshot_attributes" "removals" {
snapshot_id = "${outscale_snapshot.test.id}"
permissions_to_create_volume_removals {
account_ids = ["%s"]
}
}
- `, aid)
+ `, aid)
+ }
+ return base
}
diff --git a/outscale/resource_outscale_snapshot_copy.go b/outscale/resource_outscale_snapshot_copy.go
deleted file mode 100644
index fae6fc732..000000000
--- a/outscale/resource_outscale_snapshot_copy.go
+++ /dev/null
@@ -1,97 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "log"
- "strings"
- "time"
-
- "github.com/outscale/osc-go/oapi"
-
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
-)
-
-func resourcedOutscaleOAPISnapshotCopy() *schema.Resource {
- return &schema.Resource{
- Create: resourcedOutscaleOAPISnapshotCopyCreate,
- Read: resourcedOutscaleOAPISnapshotCopyRead,
- Delete: resourcedOutscaleOAPISnapshotCopyDelete,
-
- Schema: map[string]*schema.Schema{
- "description": &schema.Schema{
- Type: schema.TypeString,
- Optional: true,
- Computed: true,
- ForceNew: true,
- },
- "source_region_name": &schema.Schema{
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- },
- "snapshot_id": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "request_id": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "source_snapshot_id": &schema.Schema{
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- },
- },
- }
-}
-
-func resourcedOutscaleOAPISnapshotCopyCreate(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).OAPI
-
- req := oapi.CreateSnapshotRequest{
- SourceRegionName: d.Get("source_region_name").(string),
- SourceSnapshotId: d.Get("source_snapshot_id").(string),
- }
-
- if v, ok := d.GetOk("description"); ok {
- req.Description = v.(string)
- }
-
- var o *oapi.POST_CreateSnapshotResponses
- var err error
- err = resource.Retry(2*time.Minute, func() *resource.RetryError {
- o, err = conn.POST_CreateSnapshot(req)
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "RequestLimitExceeded") {
- log.Printf("[DEBUG] Error: %q", err)
- return resource.RetryableError(err)
- }
-
- return resource.NonRetryableError(err)
- }
-
- return nil
- })
-
- if err != nil {
- return fmt.Errorf("Error copying snapshot: %s", err)
- }
-
- d.SetId(resource.UniqueId())
- d.Set("snapshot_id", o.OK.Snapshot.SnapshotId)
- d.Set("request_id", o.OK.ResponseContext.RequestId)
-
- return nil
-}
-
-func resourcedOutscaleOAPISnapshotCopyRead(d *schema.ResourceData, meta interface{}) error {
- return nil
-}
-
-func resourcedOutscaleOAPISnapshotCopyDelete(d *schema.ResourceData, meta interface{}) error {
- d.SetId("")
-
- return nil
-}
diff --git a/outscale/resource_outscale_snapshot_export_task.go b/outscale/resource_outscale_snapshot_export_task.go
deleted file mode 100644
index e419112f2..000000000
--- a/outscale/resource_outscale_snapshot_export_task.go
+++ /dev/null
@@ -1,333 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "log"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func resourceOutscaleOAPIImageExportTasks() *schema.Resource {
- return &schema.Resource{
- Create: resourceOAPIImageExportTasksCreate,
- Read: resourceOAPIImageExportTasksRead,
- Delete: resourceOAPIImageExportTasksDelete,
- Importer: &schema.ResourceImporter{
- State: schema.ImportStatePassthrough,
- },
-
- Timeouts: &schema.ResourceTimeout{
- Create: schema.DefaultTimeout(40 * time.Minute),
- Delete: schema.DefaultTimeout(40 * time.Minute),
- },
-
- Schema: map[string]*schema.Schema{
- "osu_export": {
- Type: schema.TypeList,
- Required: true,
- ForceNew: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "disk_image_format": {
- Type: schema.TypeString,
- Required: true,
- },
- "osu_bucket": {
- Type: schema.TypeString,
- Required: true,
- },
- "osu_key": {
- Type: schema.TypeString,
- Optional: true,
- Computed: true,
- },
- "osu_prefix": {
- Type: schema.TypeString,
- Optional: true,
- Computed: true,
- },
- "osu_api_key": {
- Type: schema.TypeList,
- Optional: true,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "api_key_id": {
- Type: schema.TypeString,
- Required: true,
- },
- "secret_key": {
- Type: schema.TypeString,
- Required: true,
- },
- },
- },
- },
- },
- },
- },
- "snapshot_id": {
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- },
- "request_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- "completion": {
- Type: schema.TypeString,
- Computed: true,
- },
- "snapshot_description": {
- Type: schema.TypeMap,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "snapshot_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- },
- },
- },
- "task_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- "state": {
- Type: schema.TypeString,
- Computed: true,
- },
- "comment": {
- Type: schema.TypeString,
- Computed: true,
- },
- },
- }
-}
-
-func resourceOAPIImageExportTasksCreate(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- eto, etoOk := d.GetOk("osu_export")
- v, ok := d.GetOk("snapshot_id")
- request := &fcu.CreateSnapshotExportTaskInput{}
-
- if !etoOk && !ok {
- return fmt.Errorf("Please provide the required attributes osu_export and image_id")
- }
-
- request.SnapshotId = aws.String(v.(string))
-
- if etoOk {
- exp := eto.([]interface{})
- e := exp[0].(map[string]interface{})
-
- et := &fcu.ExportToOsuTaskSpecification{}
-
- if v, ok := e["disk_image_format"]; ok {
- et.DiskImageFormat = aws.String(v.(string))
- }
- if v, ok := e["osu_key"]; ok {
- et.OsuKey = aws.String(v.(string))
- }
- if v, ok := e["osu_bucket"]; ok {
- et.OsuBucket = aws.String(v.(string))
- }
- if v, ok := e["osu_prefix"]; ok {
- et.OsuPrefix = aws.String(v.(string))
- }
- if v, ok := e["osu_api_key"]; ok {
- a := v.([]interface{})
- if len(a) > 0 {
- w := a[0].(map[string]interface{})
- et.AkSk = &fcu.ExportToOsuAccessKeySpecification{
- AccessKey: aws.String(w["api_key_id"].(string)),
- SecretKey: aws.String(w["secret_key"].(string)),
- }
- }
- }
- request.ExportToOsu = et
- }
-
- var resp *fcu.CreateSnapshotExportTaskOutput
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.CreateSnapshotExportTask(request)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- return fmt.Errorf("[DEBUG] Error image task %s", err)
- }
-
- id := *resp.SnapshotExportTask.SnapshotExportTaskId
- d.SetId(id)
-
- _, err = resourceOutscaleSnapshotTaskWaitForAvailable(id, conn, 1)
- if err != nil {
- return err
- }
-
- return resourceOAPIImageExportTasksRead(d, meta)
-}
-
-func resourceOAPIImageExportTasksRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- var resp *fcu.DescribeSnapshotExportTasksOutput
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribeSnapshotExportTasks(&fcu.DescribeSnapshotExportTasksInput{
- SnapshotExportTaskId: []*string{aws.String(d.Id())},
- })
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- return fmt.Errorf("Error reading task image %s", err)
- }
-
- v := resp.SnapshotExportTask[0]
-
- d.Set("completion", v.Completion)
- d.Set("task_id", v.SnapshotExportTaskId)
- d.Set("state", v.State)
- d.Set("completion", v.Completion)
- if v.StatusMessage != nil {
- d.Set("comment", v.StatusMessage)
- } else {
- d.Set("comment", "")
- }
-
- exp := make([]map[string]interface{}, 1)
- exportToOsu := make(map[string]interface{})
- exportToOsu["disk_image_format"] = *v.ExportToOsu.DiskImageFormat
- exportToOsu["osu_bucket"] = *v.ExportToOsu.OsuBucket
- exportToOsu["osu_key"] = *v.ExportToOsu.OsuKey
- if v.ExportToOsu.OsuPrefix != nil {
- exportToOsu["osu_prefix"] = *v.ExportToOsu.OsuPrefix
- } else {
- exportToOsu["osu_prefix"] = ""
- }
-
- apk := make([]map[string]interface{}, 1)
- osuAkSk := make(map[string]interface{})
- if v.ExportToOsu.AkSk != nil {
- osuAkSk["api_key_id"] = *v.ExportToOsu.AkSk.AccessKey
- osuAkSk["secret_key"] = *v.ExportToOsu.AkSk.SecretKey
- } else {
- osuAkSk["api_key_id"] = ""
- osuAkSk["secret_key"] = ""
- }
- apk[0] = osuAkSk
- exportToOsu["osu_api_key"] = apk
-
- snapExp := make(map[string]interface{})
- snapExp["snapshot_id"] = *v.SnapshotExport.SnapshotId
-
- d.Set("snapshot_description", snapExp)
- exp[0] = exportToOsu
- if err := d.Set("osu_export", exp); err != nil {
- return err
- }
- d.Set("request_id", resp.RequestId)
-
- return nil
-}
-
-func resourceOutscaleSnapshotTaskWaitForAvailable(id string, client *fcu.Client, i int) (*fcu.SnapshotExportTask, error) {
- log.Printf("Waiting for Image Task %s to become available...", id)
-
- stateConf := &resource.StateChangeConf{
- Pending: []string{"pending", "pending/queued", "queued"},
- Target: []string{"active"},
- Refresh: SnapshotTaskStateRefreshFunc(client, id),
- Timeout: OutscaleImageRetryTimeout,
- Delay: OutscaleImageRetryDelay,
- MinTimeout: OutscaleImageRetryMinTimeout,
- }
-
- info, err := stateConf.WaitForState()
- if err != nil {
- return nil, fmt.Errorf("Error waiting for OMI (%s) to be ready: %s", id, err)
- }
- return info.(*fcu.SnapshotExportTask), nil
-}
-
-func resourceOAPIImageExportTasksDelete(d *schema.ResourceData, meta interface{}) error {
-
- d.SetId("")
- d.Set("snapshot_description", nil)
- d.Set("osu_export", nil)
- d.Set("request_id", nil)
-
- return nil
-}
-
-// SnapshotTaskStateRefreshFunc ...
-func SnapshotTaskStateRefreshFunc(client *fcu.Client, id string) resource.StateRefreshFunc {
- return func() (interface{}, string, error) {
- emptyResp := &fcu.DescribeSnapshotExportTasksOutput{}
-
- var resp *fcu.DescribeSnapshotExportTasksOutput
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = client.VM.DescribeSnapshotExportTasks(&fcu.DescribeSnapshotExportTasksInput{
- SnapshotExportTaskId: []*string{aws.String(id)},
- })
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- if e := fmt.Sprint(err); strings.Contains(e, "InvalidAMIID.NotFound") {
- log.Printf("[INFO] OMI %s state %s", id, "destroyed")
- return emptyResp, "destroyed", nil
-
- } else if resp != nil && len(resp.SnapshotExportTask) == 0 {
- log.Printf("[INFO] OMI %s state %s", id, "destroyed")
- return emptyResp, "destroyed", nil
- } else {
- return emptyResp, "", fmt.Errorf("Error on refresh: %+v", err)
- }
- }
-
- if resp == nil || resp.SnapshotExportTask == nil || len(resp.SnapshotExportTask) == 0 {
- return emptyResp, "destroyed", nil
- }
-
- if *resp.SnapshotExportTask[0].State == "failed" {
- return resp.SnapshotExportTask[0], *resp.SnapshotExportTask[0].State, fmt.Errorf(*resp.SnapshotExportTask[0].StatusMessage)
- }
-
- // OMI is valid, so return it's state
- return resp.SnapshotExportTask[0], *resp.SnapshotExportTask[0].State, nil
- }
-}
diff --git a/outscale/resource_outscale_snapshot_export_task_test.go b/outscale/resource_outscale_snapshot_export_task_test.go
deleted file mode 100644
index 29008bdf3..000000000
--- a/outscale/resource_outscale_snapshot_export_task_test.go
+++ /dev/null
@@ -1,65 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "testing"
-
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
-)
-
-func TestAccOutscaleOAPISnapshotExportTask_basic(t *testing.T) {
- t.Skip()
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- Providers: testAccProviders,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccOutscaleOAPISnapshotExportTaskConfig,
- Check: resource.ComposeTestCheckFunc(
- testAccCheckOutscaleOAPISnapshotExportTaskExists("outscale_snapshot_export_task.outscale_snapshot_export_task"),
- ),
- },
- },
- })
-}
-
-func testAccCheckOutscaleOAPISnapshotExportTaskExists(n string) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- rs, ok := s.RootModule().Resources[n]
- if !ok {
- return fmt.Errorf("Not found: %s", n)
- }
-
- if rs.Primary.ID == "" {
- return fmt.Errorf("No image task id is set")
- }
-
- return nil
- }
-}
-
-var testAccOutscaleOAPISnapshotExportTaskConfig = `
-resource "outscale_volume" "test" {
- sub_region = "eu-west-2a"
- size = 1
-}
-
-resource "outscale_snapshot" "test" {
- volume_id = "${outscale_volume.test.id}"
-}
-
-resource "outscale_snapshot_export_task" "outscale_snapshot_export_task" {
- count = 1
-
- osu_export {
- disk_image_format = "raw"
- osu_bucket = "test"
- }
- snapshot_id = "${outscale_snapshot.test.id}"
-}
-`
diff --git a/outscale/resource_outscale_snapshot_import.go b/outscale/resource_outscale_snapshot_import.go
deleted file mode 100644
index 59ce06056..000000000
--- a/outscale/resource_outscale_snapshot_import.go
+++ /dev/null
@@ -1,230 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "log"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/aws/aws-sdk-go/aws/awserr"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func resourcedOutscaleOAPISnapshotImport() *schema.Resource {
- return &schema.Resource{
- Create: resourcedOutscaleOAPISnapshotImportCreate,
- Read: resourcedOutscaleOAPISnapshotImportRead,
- Delete: resourcedOutscaleOAPISnapshotImportDelete,
-
- Schema: map[string]*schema.Schema{
- "description": &schema.Schema{
- Type: schema.TypeString,
- Optional: true,
- Computed: true,
- ForceNew: true,
- },
- "osu_location": &schema.Schema{
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- },
- "snapshot_size": &schema.Schema{
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- },
- "is_encrypted": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "vm_profile_id": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "account_alias": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "completion": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "state": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "volume_size": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "request_id": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- },
- }
-}
-
-func resourcedOutscaleOAPISnapshotImportCreate(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- req := &fcu.ImportSnapshotInput{
- SnapshotLocation: aws.String(d.Get("osu_location").(string)),
- SnapshotSize: aws.String(d.Get("snapshot_size").(string)),
- }
-
- if v, ok := d.GetOk("description"); ok {
- req.Description = aws.String(v.(string))
- }
-
- var resp *fcu.ImportSnapshotOutput
- var err error
- err = resource.Retry(2*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.ImportSnapshot(req)
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "RequestLimitExceeded") {
- log.Printf("[DEBUG] Error: %q", err)
- return resource.RetryableError(err)
- }
-
- return resource.NonRetryableError(err)
- }
-
- return nil
- })
-
- if err != nil {
- return fmt.Errorf("Error adding snapshot createVolumePermission: %s", err)
- }
-
- d.Set("vm_profile_id", resp.ImportTaskId)
- d.SetId(*resp.Id)
-
- // Wait for the account to appear in the permission list
- stateConf := &resource.StateChangeConf{
- Pending: []string{"pending"},
- Target: []string{"completed"},
- Refresh: resourcedOutscaleOAPISnapshotImportStateRefreshFunc(d, conn, *resp.ImportTaskId),
- Timeout: 5 * time.Minute,
- Delay: 10 * time.Second,
- MinTimeout: 10 * time.Second,
- }
- if _, err := stateConf.WaitForState(); err != nil {
- return fmt.Errorf(
- "Error waiting for snapshot createVolumePermission (%s) to be added: %s",
- d.Id(), err)
- }
-
- return resourcedOutscaleOAPISnapshotImportRead(d, meta)
-}
-
-func resourcedOutscaleOAPISnapshotImportRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- var attrs *fcu.DescribeSnapshotsOutput
- var err error
- err = resource.Retry(2*time.Minute, func() *resource.RetryError {
- attrs, err = conn.VM.DescribeSnapshots(&fcu.DescribeSnapshotsInput{
- SnapshotIds: []*string{aws.String(d.Id())},
- })
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "RequestLimitExceeded") {
- log.Printf("[DEBUG] Error: %q", err)
- return resource.RetryableError(err)
- }
-
- return resource.NonRetryableError(err)
- }
-
- return nil
- })
-
- if err != nil {
- return fmt.Errorf("Error refreshing snapshot state: %s", err)
- }
-
- s := attrs.Snapshots[0]
-
- d.Set("description", s.Description)
- d.Set("is_encrypted", s.Encrypted)
- d.Set("account_alias", s.OwnerAlias)
- d.Set("completion", s.Progress)
- d.Set("state", s.State)
- d.Set("volume_size", s.VolumeSize)
- d.Set("request_id", attrs.RequestId)
-
- return nil
-}
-
-func resourcedOutscaleOAPISnapshotImportDelete(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- return resource.Retry(5*time.Minute, func() *resource.RetryError {
- request := &fcu.DeleteSnapshotInput{
- SnapshotId: aws.String(d.Id()),
- }
- var err error
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- _, err := conn.VM.DeleteSnapshot(request)
-
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
-
- return nil
- })
- if err == nil {
- return nil
- }
-
- ebsErr, ok := err.(awserr.Error)
- if ebsErr.Code() == "SnapshotInUse" {
- return resource.RetryableError(fmt.Errorf("EBS SnapshotInUse - trying again while it detaches"))
- }
-
- if !ok {
- return resource.NonRetryableError(err)
- }
-
- return nil
- })
-}
-
-func resourcedOutscaleOAPISnapshotImportStateRefreshFunc(d *schema.ResourceData, conn *fcu.Client, id string) resource.StateRefreshFunc {
- return func() (interface{}, string, error) {
-
- var attrs *fcu.DescribeSnapshotsOutput
- var err error
- err = resource.Retry(2*time.Minute, func() *resource.RetryError {
- attrs, err = conn.VM.DescribeSnapshots(&fcu.DescribeSnapshotsInput{
- SnapshotIds: []*string{aws.String(id)},
- })
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "RequestLimitExceeded") {
- log.Printf("[DEBUG] Error: %q", err)
- return resource.RetryableError(err)
- }
-
- return resource.NonRetryableError(err)
- }
-
- return nil
- })
-
- if err != nil {
- return nil, "", fmt.Errorf("Error refreshing snapshot state: %s", err)
- }
-
- s := attrs.Snapshots[0]
-
- d.Set("completion", s.Progress)
-
- return attrs, "error", nil
- }
-}
diff --git a/outscale/resource_outscale_snapshot_import_test.go b/outscale/resource_outscale_snapshot_import_test.go
deleted file mode 100644
index e302586a8..000000000
--- a/outscale/resource_outscale_snapshot_import_test.go
+++ /dev/null
@@ -1,79 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "os"
- "strconv"
- "testing"
-
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
-)
-
-func TestAccOutscaleOAPISnapshotImport_Basic(t *testing.T) {
- t.Skip()
- o := os.Getenv("OUTSCALE_OAPI")
-
- oapi, err := strconv.ParseBool(o)
- if err != nil {
- oapi = false
- }
-
- if !oapi {
- t.Skip()
- }
- resource.Test(t, resource.TestCase{
- Providers: testAccProviders,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccOutscaleOAPISnapshotCopyConfig(),
- Check: resource.ComposeTestCheckFunc(
- testAccOutscaleOAPISnapshotCopyExists("outscale_snapshot_import"),
- ),
- },
- },
- })
-}
-
-func testAccOutscaleOAPISnapshotCopyExists(n string) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- rs, ok := s.RootModule().Resources[n]
- if !ok {
- return fmt.Errorf("Not found: %s", n)
- }
-
- if rs.Primary.ID == "" {
- return fmt.Errorf("No ID is set")
- }
-
- return nil
- }
-}
-
-func testAccOutscaleOAPISnapshotImportConfig() string {
- return fmt.Sprintf(`
-resource "outscale_snapshot_import" "test" {
- osu_location = ""
-snapshot_size = ""
-}
-`)
-}
-
-func testAccOutscaleOAPISnapshotCopyConfig() string {
- return fmt.Sprintf(`
-resource "outscale_volume" "test" {
- sub_region_name = "eu-west-2a"
- size = 1
-}
-
-resource "outscale_snapshot" "test" {
- volume_id = "${outscale_volume.test.id}"
- description = "Snapshot Acceptance Test"
-}
-
-resource "outscale_snapshot_copy" "test" {
- source_region_name = "eu-west-2b"
- source_snapshot_id = "${outscale_snapshot.test.id}"
-}
-`)
-}
diff --git a/outscale/resource_outscale_snapshot_test.go b/outscale/resource_outscale_snapshot_test.go
index e55253822..cf8c91a13 100644
--- a/outscale/resource_outscale_snapshot_test.go
+++ b/outscale/resource_outscale_snapshot_test.go
@@ -3,15 +3,16 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"os"
"strings"
"testing"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccOutscaleOAPISnapshot_basic(t *testing.T) {
@@ -19,10 +20,7 @@ func TestAccOutscaleOAPISnapshot_basic(t *testing.T) {
var v oscgo.Snapshot
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
{
@@ -40,10 +38,7 @@ func TestAccOutscaleOAPISnapshot_withDescription(t *testing.T) {
var v oscgo.Snapshot
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
{
@@ -62,10 +57,7 @@ func TestAccOutscaleOAPISnapshot_CopySnapshot(t *testing.T) {
var v oscgo.Snapshot
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
{
@@ -84,10 +76,7 @@ func TestAccOutscaleOAPISnapshot_UpdateTags(t *testing.T) {
//var v oscgo.Snapshot
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
{
@@ -197,12 +186,16 @@ func testAccOutscaleOAPISnapshotConfigCopySnapshot(region string) string {
func testAccOutscaleOAPISnapshotConfigUpdateTags(region, value string) string {
return fmt.Sprintf(`
resource "outscale_volume" "outscale_volume" {
- subregion_name = "%sa" size = 10
- }
- resource "outscale_snapshot" "outscale_snapshot" {
- volume_id = "${outscale_volume.outscale_volume.volume_id}"
- tags = {
- key = "Name"
- value = "%s" }
- } `, region, value)
+ subregion_name = "%sa"
+ size = 10
+ }
+ resource "outscale_snapshot" "outscale_snapshot" {
+ volume_id = "${outscale_volume.outscale_volume.volume_id}"
+
+ tags {
+ key = "Name"
+ value = "%s"
+ }
+ }
+ `, region, value)
}
diff --git a/outscale/resource_outscale_subnet.go b/outscale/resource_outscale_subnet.go
index 6b1231a6a..c9ab57c7f 100644
--- a/outscale/resource_outscale_subnet.go
+++ b/outscale/resource_outscale_subnet.go
@@ -8,8 +8,8 @@ import (
"time"
"github.com/antihax/optional"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
oscgo "github.com/marinsalinas/osc-sdk-go"
)
@@ -95,8 +95,7 @@ func resourceOutscaleOAPISubNetRead(d *schema.ResourceData, meta interface{}) er
},
}
var resp oscgo.ReadSubnetsResponse
- var err error
- err = resource.Retry(120*time.Second, func() *resource.RetryError {
+ err := resource.Retry(120*time.Second, func() *resource.RetryError {
r, _, err := conn.SubnetApi.ReadSubnets(context.Background(), &oscgo.ReadSubnetsOpts{ReadSubnetsRequest: optional.NewInterface(req)})
if err != nil {
if strings.Contains(err.Error(), "RequestLimitExceeded:") {
diff --git a/outscale/resource_outscale_subnet_test.go b/outscale/resource_outscale_subnet_test.go
index d2faf370b..536339b1b 100644
--- a/outscale/resource_outscale_subnet_test.go
+++ b/outscale/resource_outscale_subnet_test.go
@@ -9,8 +9,8 @@ import (
"time"
"github.com/antihax/optional"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
oscgo "github.com/marinsalinas/osc-sdk-go"
)
@@ -19,10 +19,7 @@ func TestAccOutscaleOAPISubNet_basic(t *testing.T) {
region := os.Getenv("OUTSCALE_REGION")
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckOutscaleOAPISubNetDestroyed, // we need to create the destroyed test case
Steps: []resource.TestStep{
diff --git a/outscale/resource_outscale_tags.go b/outscale/resource_outscale_tags.go
index d7eb4a654..f5e579d70 100644
--- a/outscale/resource_outscale_tags.go
+++ b/outscale/resource_outscale_tags.go
@@ -3,14 +3,15 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"strings"
"time"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
"github.com/aws/aws-sdk-go/aws/awserr"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func resourceOutscaleOAPITags() *schema.Resource {
@@ -144,7 +145,7 @@ func resourceOutscaleOAPITagsDelete(d *schema.ResourceData, meta interface{}) er
resourceIds, resourceIdsOk := d.GetOk("resource_ids")
- if tagsOk == false && resourceIdsOk == false {
+ if !tagsOk && !resourceIdsOk {
return fmt.Errorf("One tag and resource id, must be assigned")
}
diff --git a/outscale/resource_outscale_tags_test.go b/outscale/resource_outscale_tags_test.go
index 45435b8c8..f11fe7e87 100644
--- a/outscale/resource_outscale_tags_test.go
+++ b/outscale/resource_outscale_tags_test.go
@@ -3,29 +3,28 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
"log"
"os"
"testing"
"time"
+ "github.com/antihax/optional"
+ "github.com/go-test/deep"
+
oscgo "github.com/marinsalinas/osc-sdk-go"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccOutscaleOAPIVM_tags(t *testing.T) {
v := &oscgo.Vm{}
- omi := getOMIByRegion("eu-west-2", "ubuntu").OMI
+ omi := os.Getenv("OUTSCALE_IMAGEID")
region := os.Getenv("OUTSCALE_REGION")
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckOutscaleOAPIVMDestroy,
Steps: []resource.TestStep{
@@ -118,23 +117,20 @@ func oapiTestAccCheckOutscaleVMExistsWithProviders(n string, i *oscgo.Vm, provid
}
func testAccCheckOAPITags(
- ts []oscgo.ResourceTag, key string, value string) resource.TestCheckFunc {
- log.Printf("[DEBUG] testAccCheckOAPITags %+v", ts)
+ ts *[]oscgo.ResourceTag, key string, value string) resource.TestCheckFunc {
return func(s *terraform.State) error {
- m := tagsOSCAPIToMap(ts)
- v, ok := m[0]["Key"]
- if value != "" && !ok {
- return fmt.Errorf("Missing tag: %s", key)
- } else if value == "" && ok {
- return fmt.Errorf("Extra tag: %s", key)
+ expected := map[string]string{
+ "key": key,
+ "value": value,
}
- if value == "" {
+ tags := tagsOSCAPIToMap(*ts)
+ for _, tag := range tags {
+ if diff := deep.Equal(tag, expected); diff != nil {
+ continue
+ }
return nil
}
- if v != value {
- return fmt.Errorf("%s: bad value: %s", key, v)
- }
- return nil
+ return fmt.Errorf("error checking tags expected tag %+v is not found in %+v", expected, tags)
}
}
diff --git a/outscale/resource_outscale_vm.go b/outscale/resource_outscale_vm.go
index a2cf7930d..3a11e1118 100644
--- a/outscale/resource_outscale_vm.go
+++ b/outscale/resource_outscale_vm.go
@@ -9,13 +9,14 @@ import (
"time"
"github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
"github.com/spf13/cast"
"github.com/antihax/optional"
oscgo "github.com/marinsalinas/osc-sdk-go"
+ "github.com/terraform-providers/terraform-provider-outscale/utils"
)
func resourceOutscaleOApiVM() *schema.Resource {
@@ -107,15 +108,18 @@ func resourceOutscaleOApiVM() *schema.Resource {
Computed: true,
},
"nics": {
- Type: schema.TypeList,
+ Type: schema.TypeSet,
Optional: true,
Computed: true,
+ Set: func(v interface{}) int {
+ return v.(map[string]interface{})["device_number"].(int)
+ },
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"delete_on_vm_deletion": {
Type: schema.TypeBool,
- Default: true,
Optional: true,
+ Computed: true,
},
"description": {
Type: schema.TypeString,
@@ -124,8 +128,7 @@ func resourceOutscaleOApiVM() *schema.Resource {
},
"device_number": {
Type: schema.TypeInt,
- Computed: true,
- Optional: true,
+ Required: true,
},
"nic_id": {
Type: schema.TypeString,
@@ -196,7 +199,8 @@ func resourceOutscaleOApiVM() *schema.Resource {
Optional: true,
},
"link_nic": {
- Type: schema.TypeMap,
+ Type: schema.TypeList,
+ MaxItems: 1,
Computed: true,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
@@ -257,12 +261,6 @@ func resourceOutscaleOApiVM() *schema.Resource {
Optional: true,
Elem: &schema.Schema{Type: schema.TypeString},
},
- "security_groups_names": {
- Type: schema.TypeList,
- Optional: true,
- Computed: true,
- Elem: &schema.Schema{Type: schema.TypeString},
- },
"security_groups": {
Type: schema.TypeList,
Computed: true,
@@ -397,6 +395,10 @@ func resourceOutscaleOApiVM() *schema.Resource {
Type: schema.TypeString,
Computed: true,
},
+ "performance": {
+ Type: schema.TypeString,
+ Computed: true,
+ },
"private_dns_name": {
Type: schema.TypeString,
Computed: true,
@@ -497,7 +499,7 @@ func resourceOAPIVMCreate(d *schema.ResourceData, meta interface{}) error {
})
if err != nil {
- return fmt.Errorf("Error launching source VM: %s", err)
+ return fmt.Errorf("Error launching source VM: %s", utils.GetErrorResponse(err))
}
if !resp.HasVms() || len(resp.GetVms()) == 0 {
@@ -630,13 +632,10 @@ func resourceOAPIVMUpdate(d *schema.ResourceData, meta interface{}) error {
id := d.Get("vm_id").(string)
- var stateConf *resource.StateChangeConf
- var err error
if d.HasChange("vm_type") && !d.IsNewResource() ||
d.HasChange("user_data") && !d.IsNewResource() ||
d.HasChange("bsu_optimized") && !d.IsNewResource() {
- stateConf, err = stopVM(id, conn)
- if err != nil {
+ if err := stopVM(id, conn); err != nil {
return err
}
}
@@ -759,7 +758,7 @@ func resourceOAPIVMUpdate(d *schema.ResourceData, meta interface{}) error {
d.Partial(false)
- if err := startVM(id, stateConf, conn); err != nil {
+ if err := startVM(id, conn); err != nil {
return err
}
@@ -925,11 +924,10 @@ func expandBlockDeviceBSU(bsu map[string]interface{}) oscgo.BsuToCreate {
func buildNetworkOApiInterfaceOpts(d *schema.ResourceData) []oscgo.NicForVmCreation {
- nics := d.Get("nics").([]interface{})
- log.Printf("[DEBUG] NICS TO CREATE -> %+v", nics)
+ nics := d.Get("nics").(*schema.Set).List()
networkInterfaces := []oscgo.NicForVmCreation{}
- for _, v := range nics {
+ for i, v := range nics {
nic := v.(map[string]interface{})
ni := oscgo.NicForVmCreation{
@@ -944,8 +942,9 @@ func buildNetworkOApiInterfaceOpts(d *schema.ResourceData) []oscgo.NicForVmCreat
ni.SetSecondaryPrivateIpCount(int64(v))
}
- if d, dOk := nic["delete_on_vm_deletion"]; dOk {
- ni.SetDeleteOnVmDeletion(d.(bool))
+ if delete, deleteOK := d.GetOk(fmt.Sprintf("nics.%d.delete_on_vm_deletion", i)); deleteOK {
+ log.Printf("[DEBUG] delete=%+v, deleteOK=%+v", delete, deleteOK)
+ ni.SetDeleteOnVmDeletion(delete.(bool))
}
ni.SetDescription(nic["description"].(string))
@@ -1026,7 +1025,7 @@ func vmStateRefreshFunc(conn *oscgo.APIClient, instanceID, failState string) res
}
}
-func stopVM(vmID string, conn *oscgo.APIClient) (*resource.StateChangeConf, error) {
+func stopVM(vmID string, conn *oscgo.APIClient) error {
_, _, err := conn.VmApi.StopVms(context.Background(), &oscgo.StopVmsOpts{
StopVmsRequest: optional.NewInterface(oscgo.StopVmsRequest{
VmIds: []string{vmID},
@@ -1034,7 +1033,7 @@ func stopVM(vmID string, conn *oscgo.APIClient) (*resource.StateChangeConf, erro
})
if err != nil {
- return nil, fmt.Errorf("error stopping vms %s", err)
+ return fmt.Errorf("error stopping vms %s", err)
}
stateConf := &resource.StateChangeConf{
@@ -1048,14 +1047,13 @@ func stopVM(vmID string, conn *oscgo.APIClient) (*resource.StateChangeConf, erro
_, err = stateConf.WaitForState()
if err != nil {
- return nil, fmt.Errorf(
- "Error waiting for instance (%s) to stop: %s", vmID, err)
+ return fmt.Errorf("Error waiting for instance (%s) to stop: %s", vmID, err)
}
- return stateConf, nil
+ return nil
}
-func startVM(vmID string, stateConf *resource.StateChangeConf, conn *oscgo.APIClient) error {
+func startVM(vmID string, conn *oscgo.APIClient) error {
_, _, err := conn.VmApi.StartVms(context.Background(), &oscgo.StartVmsOpts{
StartVmsRequest: optional.NewInterface(oscgo.StartVmsRequest{
VmIds: []string{vmID},
@@ -1066,7 +1064,7 @@ func startVM(vmID string, stateConf *resource.StateChangeConf, conn *oscgo.APICl
return fmt.Errorf("error starting vm %s", err)
}
- stateConf = &resource.StateChangeConf{
+ stateConf := &resource.StateChangeConf{
Pending: []string{"pending", "stopped"},
Target: []string{"running"},
Refresh: vmStateRefreshFunc(conn, vmID, ""),
@@ -1091,12 +1089,6 @@ func updateVmAttr(conn *oscgo.APIClient, instanceAttrOpts oscgo.UpdateVmRequest)
return nil
}
-func getOSCVMsFilterByVMID(vmID string) *oscgo.FiltersVm {
- return &oscgo.FiltersVm{
- VmIds: &[]string{vmID},
- }
-}
-
// AttributeSetter you can use this function to set the attributes
type AttributeSetter func(key string, value interface{}) error
diff --git a/outscale/resource_outscale_vm_test.go b/outscale/resource_outscale_vm_test.go
index c4050db41..ae5cb3b94 100644
--- a/outscale/resource_outscale_vm_test.go
+++ b/outscale/resource_outscale_vm_test.go
@@ -13,21 +13,18 @@ import (
oscgo "github.com/marinsalinas/osc-sdk-go"
"github.com/aws/aws-sdk-go/aws/awserr"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccOutscaleOAPIVM_Basic(t *testing.T) {
var server oscgo.Vm
- omi := getOMIByRegion("eu-west-2", "centos").OMI
+ omi := os.Getenv("OUTSCALE_IMAGEID")
region := os.Getenv("OUTSCALE_REGION")
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckOutscaleOAPIVMDestroy,
Steps: []resource.TestStep{
@@ -46,16 +43,38 @@ func TestAccOutscaleOAPIVM_Basic(t *testing.T) {
})
}
-func TestAccOutscaleOAPIVM_BasicTags(t *testing.T) {
+func TestAccOutscaleOAPIVM_BasicWithNicAttached(t *testing.T) {
var server oscgo.Vm
- omi := getOMIByRegion("eu-west-2", "centos").OMI
+ omi := os.Getenv("OUTSCALE_IMAGEID")
region := os.Getenv("OUTSCALE_REGION")
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
+ PreCheck: func() { testAccPreCheck(t) },
+ Providers: testAccProviders,
+ CheckDestroy: testAccCheckOutscaleOAPIVMDestroy,
+ Steps: []resource.TestStep{
+ {
+ Config: testAccCheckOutscaleOAPIVMConfigBasicWithNicAttached(omi, "c4.large", region),
+ Check: resource.ComposeTestCheckFunc(
+ testAccCheckOutscaleOAPIVMExists("outscale_vm.basic", &server),
+ testAccCheckOutscaleOAPIVMAttributes(t, &server, omi),
+ resource.TestCheckResourceAttr(
+ "outscale_vm.basic", "image_id", omi),
+ resource.TestCheckResourceAttr(
+ "outscale_vm.basic", "vm_type", "c4.large"),
+ ),
+ },
},
+ })
+}
+
+func TestAccOutscaleOAPIVM_BasicTags(t *testing.T) {
+ var server oscgo.Vm
+ omi := os.Getenv("OUTSCALE_IMAGEID")
+ region := os.Getenv("OUTSCALE_REGION")
+
+ resource.Test(t, resource.TestCase{
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckOutscaleOAPIVMDestroy,
Steps: []resource.TestStep{
@@ -76,13 +95,10 @@ func TestAccOutscaleOAPIVM_BasicTags(t *testing.T) {
func TestAccOutscaleOAPIVM_BasicWithNics(t *testing.T) {
var server oscgo.Vm
- omi := getOMIByRegion("eu-west-2", "centos").OMI
+ omi := os.Getenv("OUTSCALE_IMAGEID")
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckOutscaleOAPIVMDestroy,
Steps: []resource.TestStep{
@@ -103,17 +119,13 @@ func TestAccOutscaleOAPIVM_BasicWithNics(t *testing.T) {
func TestAccOutscaleOAPIVM_Update(t *testing.T) {
region := os.Getenv("OUTSCALE_REGION")
- omi := getOMIByRegion(region, "centos").OMI
- omi2 := getOMIByRegion(region, "centos").OMI
+ omi := os.Getenv("OUTSCALE_IMAGEID")
var before oscgo.Vm
var after oscgo.Vm
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckOutscaleOAPIVMDestroy,
Steps: []resource.TestStep{
@@ -127,10 +139,11 @@ func TestAccOutscaleOAPIVM_Update(t *testing.T) {
),
},
{
- Config: testAccVmsConfigUpdateOAPIVMKey(omi2, "c4.large", region),
+ Config: testAccVmsConfigUpdateOAPIVMKey(omi, "t2.micro", region),
Check: resource.ComposeTestCheckFunc(
testAccCheckOAPIVMExists("outscale_vm.basic", &after),
testAccCheckOAPIVMNotRecreated(t, &before, &after),
+ resource.TestCheckResourceAttr("outscale_vm.basic", "vm_type", "t2.micro"),
),
},
},
@@ -139,14 +152,11 @@ func TestAccOutscaleOAPIVM_Update(t *testing.T) {
func TestAccOutscaleOAPIVM_WithSubnet(t *testing.T) {
var server oscgo.Vm
- omi := getOMIByRegion("eu-west-2", "centos").OMI
+ omi := os.Getenv("OUTSCALE_IMAGEID")
region := os.Getenv("OUTSCALE_REGION")
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckOutscaleOAPIVMDestroy,
Steps: []resource.TestStep{
@@ -167,15 +177,12 @@ func TestAccOutscaleOAPIVM_WithSubnet(t *testing.T) {
func TestAccOutscaleOAPIVM_WithBlockDeviceMappings(t *testing.T) {
var server oscgo.Vm
- omi := getOMIByRegion("eu-west-2", "centos").OMI
+ omi := os.Getenv("OUTSCALE_IMAGEID")
region := os.Getenv("OUTSCALE_REGION")
vmType := "t2.micro"
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckOutscaleOAPIVMDestroy,
Steps: []resource.TestStep{
@@ -195,13 +202,10 @@ func TestAccOutscaleOAPIVM_WithBlockDeviceMappings(t *testing.T) {
}
func TestAccOutscaleOAPIVM_DeletionProtectionUpdate(t *testing.T) {
- omi := getOMIByRegion("eu-west-2", "centos").OMI
+ omi := os.Getenv("OUTSCALE_IMAGEID")
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckOutscaleOAPIVMDestroy,
Steps: []resource.TestStep{
@@ -222,18 +226,12 @@ func TestAccOutscaleOAPIVM_DeletionProtectionUpdate(t *testing.T) {
}
func TestAccOutscaleOAPIVMTags_Update(t *testing.T) {
- omi := getOMIByRegion("eu-west-2", "centos").OMI
- //omi2 := getOMIByRegion("eu-west-2", "centos").OMI
+ omi := os.Getenv("OUTSCALE_IMAGEID")
region := os.Getenv("OUTSCALE_REGION")
- //var before oscgo.Vm
- //var after oscgo.Vm
-
+ //TODO: check tags
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckOutscaleOAPIVMDestroy,
Steps: []resource.TestStep{
@@ -251,15 +249,12 @@ func TestAccOutscaleOAPIVMTags_Update(t *testing.T) {
func TestAccOutscaleOAPIVM_WithNet(t *testing.T) {
var server oscgo.Vm
- omi := getOMIByRegion("eu-west-2", "centos").OMI
+ omi := os.Getenv("OUTSCALE_IMAGEID")
region := os.Getenv("OUTSCALE_REGION")
vmType := "t2.micro"
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckOutscaleOAPIVMDestroy,
Steps: []resource.TestStep{
@@ -308,11 +303,6 @@ func testAccCheckOAPIVMExists(n string, i *oscgo.Vm) resource.TestCheckFunc {
return testAccCheckOAPIVMExistsWithProviders(n, i, &providers)
}
-func testAccCheckOSCAPIVMExists(n string, i *oscgo.Vm) resource.TestCheckFunc {
- providers := []*schema.Provider{testAccProvider}
- return testAccCheckOSCAPIVMExistsWithProviders(n, i, &providers)
-}
-
func getVMsFilterByVMID(vmID string) *oscgo.FiltersVm {
return &oscgo.FiltersVm{
VmIds: &[]string{vmID},
@@ -361,48 +351,6 @@ func testAccCheckOAPIVMExistsWithProviders(n string, i *oscgo.Vm, providers *[]*
}
}
-func testAccCheckOSCAPIVMExistsWithProviders(n string, i *oscgo.Vm, providers *[]*schema.Provider) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- rs, ok := s.RootModule().Resources[n]
- if !ok {
- return fmt.Errorf("Not found: %s", n)
- }
-
- if rs.Primary.ID == "" {
- return fmt.Errorf("No ID is set")
- }
- for _, provider := range *providers {
- // Ignore if Meta is empty, this can happen for validation providers
- if provider.Meta() == nil {
- continue
- }
-
- client := provider.Meta().(*OutscaleClient)
-
- var resp oscgo.ReadVmsResponse
- var err error
- for {
- resp, _, err = client.OSCAPI.VmApi.ReadVms(context.Background(), &oscgo.ReadVmsOpts{ReadVmsRequest: optional.NewInterface(oscgo.ReadVmsRequest{
- Filters: getOSCVMsFilterByVMID(rs.Primary.ID),
- })})
- if err != nil {
- time.Sleep(10 * time.Second)
- } else {
- break
- }
-
- }
-
- if len(resp.GetVms()) > 0 {
- *i = resp.GetVms()[0]
- return nil
- }
- }
-
- return fmt.Errorf("Vms not found")
- }
-}
-
func testAccCheckOAPIVMNotRecreated(t *testing.T, before, after *oscgo.Vm) resource.TestCheckFunc {
return func(s *terraform.State) error {
assertNotEqual(t, before.VmId, after.VmId, "Outscale VM IDs have changed.")
@@ -531,7 +479,7 @@ func testAccCheckOutscaleOAPIVMExistsWithProviders(n string, i *oscgo.Vm, provid
func testAccCheckOutscaleOAPIVMAttributes(t *testing.T, server *oscgo.Vm, omi string) resource.TestCheckFunc {
return func(s *terraform.State) error {
- assertEqual(t, omi, server.ImageId, "Bad image_id.")
+ assertEqual(t, omi, server.GetImageId(), "Bad image_id.")
return nil
}
}
@@ -549,15 +497,61 @@ func testAccCheckOutscaleOAPIVMConfigBasic(omi, vmType, region string) string {
}
resource "outscale_vm" "basic" {
- image_id = "%s"
+ image_id = "%s"
vm_type = "%s"
- keypair_name = "terraform-basic"
+ keypair_name = "terraform-basic"
placement_subregion_name = "%sa"
subnet_id = "${outscale_subnet.outscale_subnet.subnet_id}"
private_ips = ["10.0.0.12"]
}`, omi, vmType, region)
}
+func testAccCheckOutscaleOAPIVMConfigBasicWithNicAttached(omi, vmType, region string) string {
+ return fmt.Sprintf(`
+ resource "outscale_net" "outscale_net" {
+ ip_range = "10.0.0.0/16"
+ }
+
+ resource "outscale_subnet" "outscale_subnet" {
+ net_id = "${outscale_net.outscale_net.net_id}"
+ ip_range = "10.0.0.0/24"
+ subregion_name = "eu-west-2a"
+ }
+
+ resource "outscale_security_group" "outscale_security_group8" {
+ description = "test vm with nic"
+ security_group_name = "private-sg-1"
+ net_id = outscale_net.outscale_net.net_id
+ }
+
+ resource "outscale_nic" "outscale_nic5" {
+ subnet_id = outscale_subnet.outscale_subnet.subnet_id
+ }
+
+ resource "outscale_vm" "basic" {
+ image_id = "%s"
+ vm_type = "%s"
+ keypair_name = "terraform-basic"
+ placement_subregion_name = "%sa"
+
+ nics {
+ subnet_id = outscale_subnet.outscale_subnet.subnet_id
+ security_group_ids = [outscale_security_group.outscale_security_group8.security_group_id]
+ private_ips {
+ private_ip ="10.0.0.123"
+ is_primary = true
+ }
+ device_number = 0
+ }
+
+ nics {
+ nic_id =outscale_nic.outscale_nic5.nic_id
+ device_number = 1
+ }
+
+ }`, omi, vmType, region)
+}
+
func testAccCheckOutscaleOAPIVMConfigBasicWithNics(omi, vmType string) string {
return fmt.Sprintf(`resource "outscale_net" "outscale_net" {
ip_range = "10.0.0.0/16"
diff --git a/outscale/resource_outscale_volume.go b/outscale/resource_outscale_volume.go
index eac14035d..58d71e9ef 100644
--- a/outscale/resource_outscale_volume.go
+++ b/outscale/resource_outscale_volume.go
@@ -12,8 +12,8 @@ import (
"github.com/terraform-providers/terraform-provider-outscale/utils"
"github.com/aws/aws-sdk-go/aws/awserr"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func resourceOutscaleOAPIVolume() *schema.Resource {
@@ -183,9 +183,8 @@ func resourceOAPIVolumeRead(d *schema.ResourceData, meta interface{}) error {
}
var resp oscgo.ReadVolumesResponse
- var err error
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
+ err := resource.Retry(5*time.Minute, func() *resource.RetryError {
r, _, err := conn.VolumeApi.ReadVolumes(context.Background(), &oscgo.ReadVolumesOpts{ReadVolumesRequest: optional.NewInterface(request)})
if err != nil {
if strings.Contains(err.Error(), "RequestLimitExceeded:") {
@@ -288,7 +287,7 @@ func readOAPIVolume(d *schema.ResourceData, volume oscgo.Volume) error {
d.Set("subregion_name", volume.GetSubregionName())
//Commented until backend issues is resolved.
- //d.Set("size", volume.Size)
+ d.Set("size", volume.Size)
d.Set("snapshot_id", volume.GetSnapshotId())
if volume.GetVolumeType() != "" {
diff --git a/outscale/resource_outscale_volume_link.go b/outscale/resource_outscale_volume_link.go
index da6a11ecd..c46151339 100644
--- a/outscale/resource_outscale_volume_link.go
+++ b/outscale/resource_outscale_volume_link.go
@@ -12,9 +12,9 @@ import (
oscgo "github.com/marinsalinas/osc-sdk-go"
"github.com/aws/aws-sdk-go/aws/awserr"
- "github.com/hashicorp/terraform/helper/hashcode"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/hashcode"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func resourceOutscaleOAPIVolumeLink() *schema.Resource {
@@ -136,9 +136,7 @@ func resourceOAPIVolumeLinkCreate(d *schema.ResourceData, meta interface{}) erro
log.Printf("[DEBUG] Attaching Volume (%s) to Instance (%s)", vID, iID)
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
+ err := resource.Retry(5*time.Minute, func() *resource.RetryError {
var err error
_, _, err = conn.VolumeApi.LinkVolume(context.Background(), &oscgo.LinkVolumeOpts{LinkVolumeRequest: optional.NewInterface(opts)})
if err != nil {
@@ -194,10 +192,6 @@ func isElegibleToLink(volumes []oscgo.Volume, instanceID string) bool {
return elegible
}
-func isElegibleToUnLink(volumes []oscgo.Volume, instanceID string) bool {
- return !isElegibleToLink(volumes, instanceID)
-}
-
func volumeOAPIAttachmentStateRefreshFunc(conn *oscgo.APIClient, volumeID, instanceID string) resource.StateRefreshFunc {
return func() (interface{}, string, error) {
diff --git a/outscale/resource_outscale_volume_link_test.go b/outscale/resource_outscale_volume_link_test.go
index 8b81283bd..9c85a3d75 100644
--- a/outscale/resource_outscale_volume_link_test.go
+++ b/outscale/resource_outscale_volume_link_test.go
@@ -7,34 +7,33 @@ import (
oscgo "github.com/marinsalinas/osc-sdk-go"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccOutscaleOAPIVolumeAttachment_basic(t *testing.T) {
- omi := getOMIByRegion("eu-west-2", "centos").OMI
+ omi := os.Getenv("OUTSCALE_IMAGEID")
region := os.Getenv("OUTSCALE_REGION")
- //var i oscgo.Vm
- //var v oscgo.Volume
+ var i oscgo.Vm
+ var v oscgo.Volume
resource.Test(t, resource.TestCase{
PreCheck: func() {
testAccPreCheck(t)
- skipIfNoOAPI(t)
+
},
Providers: testAccProviders,
CheckDestroy: testAccCheckOAPIVolumeAttachmentDestroy,
Steps: []resource.TestStep{
{
Config: testAccOAPIVolumeAttachmentConfig(omi, "c4.large", region),
- Check: resource.ComposeTestCheckFunc( /*
+ Check: resource.ComposeTestCheckFunc(
resource.TestCheckResourceAttr(
"outscale_volumes_link.ebs_att", "device_name", "/dev/sdh"),
- testAccCheckOSCAPIVMExists(
- "outscale_vm.web", &i),
+ testAccCheckOutscaleOAPIVMExists("outscale_vm.web", &i),
testAccCheckOAPIVolumeAttachmentExists(
- "outscale_volumes_link.ebs_att", &i, &v),*/
+ "outscale_volumes_link.ebs_att", &i, &v),
),
},
},
diff --git a/outscale/resource_outscale_volume_test.go b/outscale/resource_outscale_volume_test.go
index d02bba0b7..8ed94893a 100644
--- a/outscale/resource_outscale_volume_test.go
+++ b/outscale/resource_outscale_volume_test.go
@@ -3,15 +3,16 @@ package outscale
import (
"context"
"fmt"
- "github.com/antihax/optional"
- oscgo "github.com/marinsalinas/osc-sdk-go"
"os"
"strings"
"testing"
"time"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/antihax/optional"
+ oscgo "github.com/marinsalinas/osc-sdk-go"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
func TestAccOutscaleOAPIVolume_basic(t *testing.T) {
@@ -19,10 +20,7 @@ func TestAccOutscaleOAPIVolume_basic(t *testing.T) {
var v oscgo.Volume
resource.Test(t, resource.TestCase{
- PreCheck: func() {
- testAccPreCheck(t)
- skipIfNoOAPI(t)
- },
+ PreCheck: func() { testAccPreCheck(t) },
IDRefreshName: "outscale_volume.test",
Providers: testAccProviders,
Steps: []resource.TestStep{
@@ -32,6 +30,11 @@ func TestAccOutscaleOAPIVolume_basic(t *testing.T) {
testAccCheckOAPIVolumeExists("outscale_volume.test", &v),
),
},
+ {
+ ResourceName: "outscale_volume.test",
+ ImportState: true,
+ //ImportStateVerify: true,
+ },
},
})
}
@@ -43,7 +46,7 @@ func TestAccOutscaleOAPIVolume_updateSize(t *testing.T) {
resource.Test(t, resource.TestCase{
PreCheck: func() {
testAccPreCheck(t)
- skipIfNoOAPI(t)
+
},
IDRefreshName: "outscale_volume.test",
Providers: testAccProviders,
@@ -73,7 +76,7 @@ func TestAccOutscaleOAPIVolume_io1Type(t *testing.T) {
resource.Test(t, resource.TestCase{
PreCheck: func() {
testAccPreCheck(t)
- skipIfNoOAPI(t)
+
},
IDRefreshName: "outscale_volume.test-io",
Providers: testAccProviders,
@@ -168,16 +171,3 @@ func testOutscaleOAPIVolumeConfigIO1Type(region string) string {
}
`, region)
}
-
-func testAccOutscaleOAPIVolumeConfigUpdateTags(region, value string) string {
- return fmt.Sprintf(`
- resource "outscale_volume" "outscale_volume" {
- volume_type = "gp2"
- subregion_name = "%sa"
- size = 10
- tags {
- key = "name"
- value = "%s"
- }
- }`, region, value)
-}
diff --git a/outscale/resource_outscale_vpn_connection.go b/outscale/resource_outscale_vpn_connection.go
deleted file mode 100644
index f3bb6d069..000000000
--- a/outscale/resource_outscale_vpn_connection.go
+++ /dev/null
@@ -1,379 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "log"
- "strconv"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
-
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func resourceOutscaleOAPIVpnConnection() *schema.Resource {
- return &schema.Resource{
- Create: resourceOutscaleOAPIVpnConnectionCreate,
- Read: resourceOutscaleOAPIVpnConnectionRead,
- Delete: resourceOutscaleOAPIVpnConnectionDelete,
- Importer: &schema.ResourceImporter{
- State: schema.ImportStatePassthrough,
- },
-
- Schema: map[string]*schema.Schema{
- // Argumentos
- "client_endpoint_id": {
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- },
-
- "vpn_connection_option": {
- Type: schema.TypeMap,
- Optional: true,
- Computed: true,
- ForceNew: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "static_vpn_static_route_only": {
- Type: schema.TypeString,
- Optional: true,
- Computed: true,
- },
- },
- },
- },
-
- "type": {
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- },
-
- "vpn_gateway_id": {
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- },
-
- // Atributos
-
- "client_endpoint_configuration": {
- Type: schema.TypeString,
- Computed: true,
- },
-
- "vpn_static_route": {
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "destination_ip_range": {
- Type: schema.TypeString,
- Computed: true,
- },
- "type": {
- Type: schema.TypeString,
- Computed: true,
- },
- "state": {
- Type: schema.TypeString,
- Computed: true,
- },
- },
- },
- },
-
- "tag": tagsSchemaComputed(),
- "tags": tagsSchema(),
-
- "state": {
- Type: schema.TypeString,
- Computed: true,
- },
-
- "vpn_tunnel_description": {
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "accepted_routes_count": {
- Type: schema.TypeInt,
- Computed: true,
- },
- "outscale_side_ip": {
- Type: schema.TypeString,
- Computed: true,
- },
- "state": {
- Type: schema.TypeString,
- Computed: true,
- },
- "comment": {
- Type: schema.TypeString,
- Computed: true,
- },
- },
- },
- },
-
- "vpn_connection_id": {
- Type: schema.TypeString,
- Computed: true,
- },
-
- "request_id": {
- Type: schema.TypeString,
- Computed: true,
- },
- },
- }
-}
-
-func resourceOutscaleOAPIVpnConnectionCreate(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- cgid, ok := d.GetOk("client_endpoint_id")
- vpngid, ok2 := d.GetOk("vpn_gateway_id")
- typev, ok3 := d.GetOk("type")
- vpn, ok4 := d.GetOk("vpn_connection_option")
-
- if !ok && !ok2 && ok3 {
- return fmt.Errorf("please provide the required attributes client_endpoint_id, vpn_gateway_id and type")
- }
-
- createOpts := &fcu.CreateVpnConnectionInput{
- CustomerGatewayId: aws.String(cgid.(string)),
- Type: aws.String(typev.(string)),
- VpnGatewayId: aws.String(vpngid.(string)),
- }
-
- if ok4 {
- opt := vpn.(map[string]interface{})
- option := opt["static_vpn_static_route_only"]
-
- b, err := strconv.ParseBool(option.(string))
- if err != nil {
- return err
- }
-
- createOpts.Options = &fcu.VpnConnectionOptionsSpecification{
- StaticRoutesOnly: aws.Bool(b),
- }
- }
-
- var resp *fcu.CreateVpnConnectionOutput
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.CreateVpnConnection(createOpts)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.NonRetryableError(err)
- })
-
- if err != nil {
- return fmt.Errorf("Error creating vpn connection: %s", err)
- }
-
- vpnConnection := resp.VpnConnection
- d.SetId(*vpnConnection.VpnConnectionId)
-
- stateConf := &resource.StateChangeConf{
- Pending: []string{"pending"},
- Target: []string{"available"},
- Refresh: vpnConnectionRefreshFunc(conn, *vpnConnection.VpnConnectionId),
- Timeout: 30 * time.Minute,
- Delay: 10 * time.Second,
- MinTimeout: 10 * time.Second,
- }
-
- _, stateErr := stateConf.WaitForState()
- if stateErr != nil {
- return fmt.Errorf(
- "Error waiting for VPN connection (%s) to become ready: %s",
- *vpnConnection.VpnConnectionId, err)
- }
-
- if err := setTags(conn, d); err != nil {
- return err
- }
-
- return resourceOutscaleOAPIVpnConnectionRead(d, meta)
-}
-
-func vpnConnectionRefreshFunc(conn *fcu.Client, connectionID string) resource.StateRefreshFunc {
- return func() (interface{}, string, error) {
-
- var resp *fcu.DescribeVpnConnectionsOutput
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribeVpnConnections(&fcu.DescribeVpnConnectionsInput{
- VpnConnectionIds: []*string{aws.String(connectionID)},
- })
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.NonRetryableError(err)
- })
-
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "InvalidVpnConnectionID.NotFound") {
- resp = nil
- } else {
- log.Printf("Error on VPNConnectionRefresh: %s", err)
- return nil, "", err
- }
- }
-
- if resp == nil || len(resp.VpnConnections) == 0 {
- return nil, "", nil
- }
-
- connection := resp.VpnConnections[0]
- return connection, *connection.State, nil
- }
-}
-
-func resourceOutscaleOAPIVpnConnectionRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- var resp *fcu.DescribeVpnConnectionsOutput
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribeVpnConnections(&fcu.DescribeVpnConnectionsInput{
- VpnConnectionIds: []*string{aws.String(d.Id())},
- })
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.NonRetryableError(err)
- })
-
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "InvalidVpnConnectionID.NotFound") {
- d.SetId("")
- return nil
- }
- log.Printf("[ERROR] Error finding VPN connection: %s", err)
- return err
- }
-
- if len(resp.VpnConnections) != 1 {
- return fmt.Errorf("[ERROR] Error finding VPN connection: %s", d.Id())
- }
-
- vpnConnection := resp.VpnConnections[0]
- if vpnConnection == nil || *vpnConnection.State == "deleted" {
- d.SetId("")
- return nil
- }
- vpn := make(map[string]interface{})
- if vpnConnection.Options != nil {
- vpn["static_vpn_static_route_only"] = strconv.FormatBool(aws.BoolValue(vpnConnection.Options.StaticRoutesOnly))
- } else {
- vpn["static_vpn_static_route_only"] = strconv.FormatBool(false)
- }
- if err := d.Set("vpn_connection_option", vpn); err != nil {
- return err
- }
- d.Set("client_endpoint_configuration", vpnConnection.CustomerGatewayConfiguration)
-
- vpns := make([]map[string]interface{}, len(vpnConnection.Routes))
-
- for k, v := range vpnConnection.Routes {
- route := make(map[string]interface{})
-
- route["destination_ip_range"] = *v.DestinationCidrBlock
- route["type"] = *v.Source
- route["state"] = *v.State
-
- vpns[k] = route
- }
-
- d.Set("vpn_static_route", vpns)
- d.Set("tag", tagsToMap(vpnConnection.Tags))
-
- d.Set("state", vpnConnection.State)
-
- vgws := make([]map[string]interface{}, len(vpnConnection.VgwTelemetry))
-
- for k, v := range vpnConnection.VgwTelemetry {
- vgw := make(map[string]interface{})
-
- vgw["accepted_routes_count"] = *v.AcceptedRouteCount
- vgw["outscale_side_ip"] = *v.OutsideIpAddress
- vgw["state"] = *v.Status
- vgw["comment"] = *v.StatusMessage
-
- vgws[k] = vgw
- }
-
- d.Set("vpn_tunnel_description", vgws)
- d.Set("vpn_connection_id", vpnConnection.VpnConnectionId)
- d.Set("vpn_gateway_id", vpnConnection.VpnGatewayId)
- d.Set("request_id", resp.RequestId)
-
- return nil
-}
-
-func resourceOutscaleOAPIVpnConnectionDelete(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- _, err = conn.VM.DeleteVpnConnection(&fcu.DeleteVpnConnectionInput{
- VpnConnectionId: aws.String(d.Id()),
- })
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.NonRetryableError(err)
- })
-
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "InvalidVpnConnectionID.NotFound") {
- d.SetId("")
- return nil
- }
- fmt.Printf("[ERROR] Error deleting VPN connection: %s", err)
- return err
- }
-
- stateConf := &resource.StateChangeConf{
- Pending: []string{"deleting"},
- Target: []string{"deleted"},
- Refresh: vpnConnectionRefreshFunc(conn, d.Id()),
- Timeout: 30 * time.Minute,
- Delay: 10 * time.Second,
- MinTimeout: 10 * time.Second,
- }
-
- _, stateErr := stateConf.WaitForState()
- if stateErr != nil {
-
- return fmt.Errorf(
- "Error waiting for VPN connection (%s) to delete: %s", d.Id(), stateErr)
- }
-
- return nil
-}
diff --git a/outscale/resource_outscale_vpn_connection_route.go b/outscale/resource_outscale_vpn_connection_route.go
deleted file mode 100644
index 31de7c0b8..000000000
--- a/outscale/resource_outscale_vpn_connection_route.go
+++ /dev/null
@@ -1,174 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "log"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
-)
-
-func resourceOutscaleOAPIVpnConnectionRoute() *schema.Resource {
- return &schema.Resource{
- Create: resourceOutscaleOAPIVpnConnectionRouteCreate,
- Read: resourceOutscaleOAPIVpnConnectionRouteRead,
- Delete: resourceOutscaleOAPIVpnConnectionRouteDelete,
-
- Schema: map[string]*schema.Schema{
- "destination_ip_range": &schema.Schema{
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- },
-
- "vpn_connection_id": &schema.Schema{
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- },
- "request_id": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- },
- }
-}
-
-func resourceOutscaleOAPIVpnConnectionRouteCreate(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- createOpts := &fcu.CreateVpnConnectionRouteInput{
- DestinationCidrBlock: aws.String(d.Get("destination_ip_range").(string)),
- VpnConnectionId: aws.String(d.Get("vpn_connection_id").(string)),
- }
-
- // Create the route.
- log.Printf("[DEBUG] Creating VPN connection route")
-
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- _, err = conn.VM.CreateVpnConnectionRoute(createOpts)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.NonRetryableError(err)
- })
-
- if err != nil {
- return fmt.Errorf("Error creating VPN connection route: %s", err)
- }
-
- // Store the ID by the only two data we have available to us.
- d.SetId(fmt.Sprintf("%s:%s", *createOpts.DestinationCidrBlock, *createOpts.VpnConnectionId))
-
- return resourceOutscaleOAPIVpnConnectionRouteRead(d, meta)
-}
-
-func resourceOutscaleOAPIVpnConnectionRouteRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- cidrBlock, vpnConnectionID := resourceOutscaleOAPIVpnConnectionRouteParseID(d.Id())
-
- routeFilters := []*fcu.Filter{
- &fcu.Filter{
- Name: aws.String("route.destination-cidr-block"),
- Values: []*string{aws.String(cidrBlock)},
- },
- &fcu.Filter{
- Name: aws.String("vpn-connection-ID"),
- Values: []*string{aws.String(vpnConnectionID)},
- },
- }
-
- var err error
- var resp *fcu.DescribeVpnConnectionsOutput
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribeVpnConnections(&fcu.DescribeVpnConnectionsInput{
- Filters: routeFilters,
- })
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.NonRetryableError(err)
- })
-
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "InvalidVpnConnectionID.NotFound") {
- d.SetId("")
- return nil
- }
- log.Printf("[ERROR] Error finding VPN connection route: %s", err)
- return err
- }
- if resp == nil || len(resp.VpnConnections) == 0 {
- return fmt.Errorf("No VPN connections returned")
- }
-
- vpnConnection := resp.VpnConnections[0]
-
- var found bool
- for _, r := range vpnConnection.Routes {
- if *r.DestinationCidrBlock == cidrBlock {
- d.Set("destination_ip_range", *r.DestinationCidrBlock)
- d.Set("vpn_connection_id", *vpnConnection.VpnConnectionId)
- found = true
- }
- }
- if !found {
- // Something other than tersraform eliminated the route.
- d.SetId("")
- }
-
- d.Set("request_id", resp.RequestId)
-
- return nil
-}
-
-func resourceOutscaleOAPIVpnConnectionRouteDelete(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- _, err = conn.VM.DeleteVpnConnectionRoute(&fcu.DeleteVpnConnectionRouteInput{
- DestinationCidrBlock: aws.String(d.Get("destination_ip_range").(string)),
- VpnConnectionId: aws.String(d.Get("vpn_connection_id").(string)),
- })
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.NonRetryableError(err)
- })
-
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "InvalidVpnConnectionID.NotFound") {
- d.SetId("")
- return nil
- }
- log.Printf("[ERROR] Error deleting VPN connection route: %s", err)
- return err
- }
-
- return nil
-}
-
-func resourceOutscaleOAPIVpnConnectionRouteParseID(ID string) (string, string) {
- parts := strings.SplitN(ID, ":", 2)
- return parts[0], parts[1]
-}
diff --git a/outscale/resource_outscale_vpn_connection_route_test.go b/outscale/resource_outscale_vpn_connection_route_test.go
deleted file mode 100644
index 55fb0b6ca..000000000
--- a/outscale/resource_outscale_vpn_connection_route_test.go
+++ /dev/null
@@ -1,241 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "strings"
- "testing"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/aws/aws-sdk-go/aws/awserr"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-
- "github.com/hashicorp/terraform/helper/acctest"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
-)
-
-func TestAccOutscaleOAPIVpnConnectionRoute_basic(t *testing.T) {
- t.Skip()
-
- rBgpAsn := acctest.RandIntRange(64512, 65534)
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- testAccPreCheck(t)
- skipIfNoOAPI(t)
- },
- Providers: testAccProviders,
- CheckDestroy: testAccOutscaleOAPIVpnConnectionRouteDestroy,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccOutscaleOAPIVpnConnectionRouteConfig(rBgpAsn),
- Check: resource.ComposeTestCheckFunc(
- testAccOutscaleOAPIVpnConnectionRoute(
- "outscale_vpn_gateway.vpn_gateway",
- "outscale_client_endpoint.customer_gateway",
- "outscale_vpn_connection.vpn_connection",
- "outscale_vpn_connection_route.foo",
- ),
- ),
- },
- resource.TestStep{
- Config: testAccOutscaleOAPIVpnConnectionRouteConfigUpdate(rBgpAsn),
- Check: resource.ComposeTestCheckFunc(
- testAccOutscaleOAPIVpnConnectionRoute(
- "outscale_vpn_gateway.vpn_gateway",
- "outscale_client_endpoint.customer_gateway",
- "outscale_vpn_connection.vpn_connection",
- "outscale_vpn_connection_route.foo",
- ),
- ),
- },
- },
- })
-}
-
-func testAccOutscaleOAPIVpnConnectionRouteDestroy(s *terraform.State) error {
- conn := testAccProvider.Meta().(*OutscaleClient).FCU
- for _, rs := range s.RootModule().Resources {
- if rs.Type != "outscale_vpn_connection_route" {
- continue
- }
-
- cidrBlock, vpnConnectionID := resourceOutscaleOAPIVpnConnectionRouteParseID(rs.Primary.ID)
-
- routeFilters := []*fcu.Filter{
- &fcu.Filter{
- Name: aws.String("route.destination-cidr-block"),
- Values: []*string{aws.String(cidrBlock)},
- },
- &fcu.Filter{
- Name: aws.String("vpn-connection-id"),
- Values: []*string{aws.String(vpnConnectionID)},
- },
- }
-
- var resp *fcu.DescribeVpnConnectionsOutput
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribeVpnConnections(&fcu.DescribeVpnConnectionsInput{
- Filters: routeFilters,
- })
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.NonRetryableError(err)
- })
-
- if err != nil {
- if ec2err, ok := err.(awserr.Error); ok && ec2err.Code() == "InvalidVpnConnectionID.NotFound" {
- // not found, all good
- return nil
- }
- return err
- }
-
- var vpnc *fcu.VpnConnection
- if resp != nil {
- // range over the connections and isolate the one we created
- for _, v := range resp.VpnConnections {
- if *v.VpnConnectionId == vpnConnectionID {
- vpnc = v
- }
- }
-
- if vpnc == nil {
- // vpn connection not found, so that's good...
- return nil
- }
-
- if vpnc.State != nil && *vpnc.State == "deleted" {
- return nil
- }
- }
-
- }
- return fmt.Errorf("Fall through error, Check Destroy criteria not met")
-}
-
-func testAccOutscaleOAPIVpnConnectionRoute(
- vpnGatewayResource string,
- customerGatewayResource string,
- vpnConnectionResource string,
- vpnConnectionRouteResource string) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- rs, ok := s.RootModule().Resources[vpnConnectionRouteResource]
- if !ok {
- return fmt.Errorf("Not found: %s", vpnConnectionRouteResource)
- }
-
- if rs.Primary.ID == "" {
- return fmt.Errorf("No ID is set")
- }
- route, ok := s.RootModule().Resources[vpnConnectionRouteResource]
- if !ok {
- return fmt.Errorf("Not found: %s", vpnConnectionRouteResource)
- }
-
- cidrBlock, vpnConnectionID := resourceOutscaleOAPIVpnConnectionRouteParseID(route.Primary.ID)
-
- routeFilters := []*fcu.Filter{
- &fcu.Filter{
- Name: aws.String("route.destination-cidr-block"),
- Values: []*string{aws.String(cidrBlock)},
- },
- &fcu.Filter{
- Name: aws.String("vpn-connection-id"),
- Values: []*string{aws.String(vpnConnectionID)},
- },
- }
-
- FCU := testAccProvider.Meta().(*OutscaleClient).FCU
-
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- _, err = FCU.VM.DescribeVpnConnections(&fcu.DescribeVpnConnectionsInput{
- Filters: routeFilters,
- })
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.NonRetryableError(err)
- })
-
- if err != nil {
- return err
- }
-
- return nil
- }
-}
-
-func testAccOutscaleOAPIVpnConnectionRouteConfig(rBgpAsn int) string {
- return fmt.Sprintf(`
- resource "outscale_vpn_gateway" "vpn_gateway" {
- tag {
- Name = "vpn_gateway"
- }
- }
-
- resource "outscale_client_endpoint" "customer_gateway" {
- bgp_asn = %d
- ip_address = "182.0.0.1"
- type = "ipsec.1"
- }
-
- resource "outscale_vpn_connection" "vpn_connection" {
- vpn_gateway_id = "${outscale_vpn_gateway.vpn_gateway.id}"
- customer_gateway_id = "${outscale_client_endpoint.customer_gateway.id}"
- type = "ipsec.1"
-
- options {
- static_routes_only = true
- }
- }
-
- resource "outscale_vpn_connection_route" "foo" {
- destination_ip_range = "172.168.10.0/24"
- vpn_connection_id = "${outscale_vpn_connection.vpn_connection.id}"
- }
- `, rBgpAsn)
-}
-
-// Change destination_ip_range
-func testAccOutscaleOAPIVpnConnectionRouteConfigUpdate(rBgpAsn int) string {
- return fmt.Sprintf(`
- resource "outscale_vpn_gateway" "vpn_gateway" {
- tag {
- Name = "vpn_gateway"
- }
- }
-
- resource "outscale_client_endpoint" "customer_gateway" {
- bgp_asn = %d
- ip_address = "182.0.0.1"
- type = "ipsec.1"
- }
-
- resource "outscale_vpn_connection" "vpn_connection" {
- vpn_gateway_id = "${outscale_vpn_gateway.vpn_gateway.id}"
- customer_gateway_id = "${outscale_client_endpoint.customer_gateway.id}"
- type = "ipsec.1"
-
- options {
- static_routes_only = true
- }
- }
-
- resource "outscale_vpn_connection_route" "foo" {
- destination_ip_range = "172.168.20.0/24"
- vpn_connection_id = "${outscale_vpn_connection.vpn_connection.id}"
- }
- `, rBgpAsn)
-}
diff --git a/outscale/resource_outscale_vpn_connection_test.go b/outscale/resource_outscale_vpn_connection_test.go
deleted file mode 100644
index 6767be9b6..000000000
--- a/outscale/resource_outscale_vpn_connection_test.go
+++ /dev/null
@@ -1,334 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "strings"
- "testing"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/aws/aws-sdk-go/aws/awserr"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-
- "github.com/hashicorp/terraform/helper/acctest"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
-)
-
-func TestAccOutscaleOAPIVpnConnection_basic(t *testing.T) {
- t.Skip()
-
- rBgpAsn := acctest.RandIntRange(64512, 65534)
- var vpn fcu.VpnConnection
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- testAccPreCheck(t)
- skipIfNoOAPI(t)
- },
- IDRefreshName: "outscale_vpn_connection.foo",
- Providers: testAccProviders,
- CheckDestroy: testAccOutscaleOAPIVpnConnectionDestroy,
- Steps: []resource.TestStep{
- {
- Config: testAccOutscaleOAPIVpnConnectionConfig(rBgpAsn),
- Check: resource.ComposeTestCheckFunc(
- testAccOutscaleOAPIVpnConnection(
- "outscale_net.vpc",
- "outscale_vpn_gateway.vpn_gateway",
- "outscale_client_endpoint.customer_gateway",
- "outscale_vpn_connection.foo",
- &vpn,
- ),
- ),
- },
- },
- })
-}
-
-func TestAccOutscaleOAPIVpnConnection_withoutStaticRoutes(t *testing.T) {
- t.Skip()
-
- rInt := acctest.RandInt()
- rBgpAsn := acctest.RandIntRange(64512, 65534)
- var vpn fcu.VpnConnection
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- testAccPreCheck(t)
- skipIfNoOAPI(t)
- },
- IDRefreshName: "outscale_vpn_connection.foo",
- Providers: testAccProviders,
- CheckDestroy: testAccOutscaleOAPIVpnConnectionDestroy,
- Steps: []resource.TestStep{
- {
- Config: testAccOutscaleOAPIVpnConnectionConfigUpdate(rInt, rBgpAsn),
- Check: resource.ComposeTestCheckFunc(
- testAccOutscaleOAPIVpnConnection(
- "outscale_net.vpc",
- "outscale_vpn_gateway.vpn_gateway",
- "outscale_client_endpoint.customer_gateway",
- "outscale_vpn_connection.foo",
- &vpn,
- ),
- resource.TestCheckResourceAttr("outscale_vpn_connection.foo", "static_routes_only", "false"),
- ),
- },
- },
- })
-}
-
-func TestAccOutscaleOAPIVpnConnection_disappears(t *testing.T) {
- t.Skip()
-
- rBgpAsn := acctest.RandIntRange(64512, 65534)
- var vpn fcu.VpnConnection
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- testAccPreCheck(t)
- skipIfNoOAPI(t)
- },
- Providers: testAccProviders,
- CheckDestroy: testAccOutscaleOAPIVpnConnectionDestroy,
- Steps: []resource.TestStep{
- {
- Config: testAccOutscaleOAPIVpnConnectionConfig(rBgpAsn),
- Check: resource.ComposeTestCheckFunc(
- testAccOutscaleOAPIVpnConnection(
- "outscale_net.vpc",
- "outscale_vpn_gateway.vpn_gateway",
- "outscale_client_endpoint.customer_gateway",
- "outscale_vpn_connection.foo",
- &vpn,
- ),
- testAccOutscaleOAPIVpnConnectionDisappears(&vpn),
- ),
- ExpectNonEmptyPlan: true,
- },
- },
- })
-}
-
-func testAccOutscaleOAPIVpnConnectionDisappears(connection *fcu.VpnConnection) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- conn := testAccProvider.Meta().(*OutscaleClient).FCU
-
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- _, err = conn.VM.DeleteVpnConnection(&fcu.DeleteVpnConnectionInput{
- VpnConnectionId: connection.VpnConnectionId,
- })
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.NonRetryableError(err)
- })
-
- if err != nil {
- if fcuerr, ok := err.(awserr.Error); ok && fcuerr.Code() == "InvalidVpnConnectionID.NotFound" {
- return nil
- }
- if err != nil {
- return err
- }
- }
-
- return resource.Retry(40*time.Minute, func() *resource.RetryError {
- opts := &fcu.DescribeVpnConnectionsInput{
- VpnConnectionIds: []*string{connection.VpnConnectionId},
- }
-
- var resp *fcu.DescribeVpnConnectionsOutput
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribeVpnConnections(opts)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.NonRetryableError(err)
- })
-
- if err != nil {
- cgw, ok := err.(awserr.Error)
- if ok && cgw.Code() == "InvalidVpnConnectionID.NotFound" {
- return nil
- }
- if ok && cgw.Code() == "IncorrectState" {
- return resource.RetryableError(fmt.Errorf(
- "Waiting for VPN Connection to be in the correct state: %v", connection.VpnConnectionId))
- }
- return resource.NonRetryableError(
- fmt.Errorf("Error retrieving VPN Connection: %s", err))
- }
- if *resp.VpnConnections[0].State == "deleted" {
- return nil
- }
- return resource.RetryableError(fmt.Errorf(
- "Waiting for VPN Connection: %v", connection.VpnConnectionId))
- })
- }
-}
-
-func testAccOutscaleOAPIVpnConnectionDestroy(s *terraform.State) error {
- conn := testAccProvider.Meta().(*OutscaleClient).FCU
- for _, rs := range s.RootModule().Resources {
- if rs.Type != "outscale_vpn_connection" {
- continue
- }
-
- var resp *fcu.DescribeVpnConnectionsOutput
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribeVpnConnections(&fcu.DescribeVpnConnectionsInput{
- VpnConnectionIds: []*string{aws.String(rs.Primary.ID)},
- })
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.NonRetryableError(err)
- })
-
- if err != nil {
- if fcuerr, ok := err.(awserr.Error); ok && fcuerr.Code() == "InvalidVpnConnectionID.NotFound" {
- // not found
- return nil
- }
- return err
- }
-
- var vpn *fcu.VpnConnection
- for _, v := range resp.VpnConnections {
- if v.VpnConnectionId != nil && *v.VpnConnectionId == rs.Primary.ID {
- vpn = v
- }
- }
-
- if vpn == nil {
- // vpn connection not found
- return nil
- }
-
- if vpn.State != nil && *vpn.State == "deleted" {
- return nil
- }
-
- }
-
- return nil
-}
-
-func testAccOutscaleOAPIVpnConnection(
- vpcResource string,
- vpnGatewayResource string,
- customerGatewayResource string,
- vpnConnectionResource string,
- vpnConnection *fcu.VpnConnection) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- rs, ok := s.RootModule().Resources[vpnConnectionResource]
- if !ok {
- return fmt.Errorf("Not found: %s", vpnConnectionResource)
- }
-
- if rs.Primary.ID == "" {
- return fmt.Errorf("No ID is set")
- }
- connection, ok := s.RootModule().Resources[vpnConnectionResource]
- if !ok {
- return fmt.Errorf("Not found: %s", vpnConnectionResource)
- }
-
- fcuconn := testAccProvider.Meta().(*OutscaleClient).FCU
-
- var resp *fcu.DescribeVpnConnectionsOutput
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = fcuconn.VM.DescribeVpnConnections(&fcu.DescribeVpnConnectionsInput{
- VpnConnectionIds: []*string{aws.String(connection.Primary.ID)},
- })
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.NonRetryableError(err)
- })
-
- if err != nil {
- return err
- }
-
- *vpnConnection = *resp.VpnConnections[0]
-
- return nil
- }
-}
-
-func testAccOutscaleOAPIVpnConnectionConfig(rBgpAsn int) string {
- return fmt.Sprintf(`
- resource "outscale_vpn_gateway" "vpn_gateway" {
- tag {
- Name = "vpn_gateway"
- }
- }
-
- resource "outscale_client_endpoint" "customer_gateway" {
- bgp_asn = %d
- ip_address = "178.0.0.1"
- type = "ipsec.1"
- tag {
- Name = "main-customer-gateway"
- }
- }
-
- resource "outscale_vpn_connection" "foo" {
- vpn_gateway_id = "${outscale_vpn_gateway.vpn_gateway.id}"
- customer_gateway_id = "${outscale_client_endpoint.customer_gateway.id}"
- type = "ipsec.1"
- options {
- static_routes_only = true
- }
- }
- `, rBgpAsn)
-}
-
-// Change static_routes_only to be false, forcing a refresh.
-func testAccOutscaleOAPIVpnConnectionConfigUpdate(rInt, rBgpAsn int) string {
- return fmt.Sprintf(`
- resource "outscale_vpn_gateway" "vpn_gateway" {
- tag {
- Name = "vpn_gateway"
- }
- }
-
- resource "outscale_client_endpoint" "customer_gateway" {
- bgp_asn = %d
- ip_address = "178.0.0.1"
- type = "ipsec.1"
- tag {
- Name = "main-customer-gateway-%d"
- }
- }
-
- resource "outscale_vpn_connection" "foo" {
- vpn_gateway_id = "${outscale_vpn_gateway.vpn_gateway.id}"
- customer_gateway_id = "${outscale_client_endpoint.customer_gateway.id}"
- type = "ipsec.1"
- static_routes_only = false
- }
- `, rBgpAsn, rInt)
-}
diff --git a/outscale/resource_outscale_vpn_gateway.go b/outscale/resource_outscale_vpn_gateway.go
deleted file mode 100644
index 4298b4a86..000000000
--- a/outscale/resource_outscale_vpn_gateway.go
+++ /dev/null
@@ -1,248 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/aws/aws-sdk-go/aws/awserr"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func resourceOutscaleOAPIVpnGateway() *schema.Resource {
- return &schema.Resource{
- Create: resourceOutscaleOAPIVpnGatewayCreate,
- Read: resourceOutscaleOAPIVpnGatewayRead,
- Delete: resourceOutscaleOAPIVpnGatewayDelete,
- Importer: &schema.ResourceImporter{
- State: schema.ImportStatePassthrough,
- },
-
- Schema: map[string]*schema.Schema{
- "type": &schema.Schema{
- Type: schema.TypeString,
- Optional: true,
- ForceNew: true,
- },
-
- "lin_to_vpn_gateway_link": &schema.Schema{
- Type: schema.TypeList,
- Computed: true,
- Elem: &schema.Resource{
- Schema: map[string]*schema.Schema{
- "state": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "lin_id": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- },
- },
- },
- "state": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "vpn_gateway_id": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "lin_id": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "request_id": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "tag": tagsSchemaComputed(),
- "tags": tagsSchema(),
- },
- }
-}
-
-func resourceOutscaleOAPIVpnGatewayCreate(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- createOpts := &fcu.CreateVpnGatewayInput{
- Type: aws.String("ipsec.1"),
- }
-
- var resp *fcu.CreateVpnGatewayOutput
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.CreateVpnGateway(createOpts)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.NonRetryableError(err)
- })
- if err != nil {
- return fmt.Errorf("Error creating VPN gateway: %s", err)
- }
-
- vpnGateway := resp.VpnGateway
- d.SetId(*vpnGateway.VpnGatewayId)
-
- if d.IsNewResource() {
- if err := setTags(conn, d); err != nil {
- return err
- }
- d.SetPartial("tag")
- }
-
- return resourceOutscaleOAPIVpnGatewayRead(d, meta)
-}
-
-func resourceOutscaleOAPIVpnGatewayRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- var resp *fcu.DescribeVpnGatewaysOutput
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribeVpnGateways(&fcu.DescribeVpnGatewaysInput{
- VpnGatewayIds: []*string{aws.String(d.Id())},
- })
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.NonRetryableError(err)
- })
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "InvalidVpnGatewayID.NotFound") {
- d.SetId("")
- return nil
- }
- fmt.Printf("\n\n[ERROR] Error finding VpnGateway: %s", err)
- return err
- }
-
- vpnGateway := resp.VpnGateways[0]
- if vpnGateway == nil || *vpnGateway.State == "deleted" {
- d.SetId("")
- return nil
- }
-
- vpnAttachment := oapiVpnGatewayGetAttachment(vpnGateway)
- if len(vpnGateway.VpcAttachments) == 0 || *vpnAttachment.State == "detached" {
- d.Set("lin_id", "")
- } else {
- d.Set("lin_id", *vpnAttachment.VpcId)
- }
-
- vs := make([]map[string]interface{}, len(vpnGateway.VpcAttachments))
-
- for k, v := range vpnGateway.VpcAttachments {
- vp := make(map[string]interface{})
-
- vp["state"] = *v.State
- vp["lin_id"] = *v.VpcId
-
- vs[k] = vp
- }
-
- d.Set("vpn_gateway_id", vpnGateway.VpnGatewayId)
- d.Set("lin_to_vpn_gateway_link", vs)
- d.Set("state", vpnGateway.State)
- d.Set("tag", tagsToMap(vpnGateway.Tags))
- d.Set("request_id", resp.RequestId)
-
- return nil
-}
-
-func resourceOutscaleOAPIVpnGatewayDelete(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- return resource.Retry(5*time.Minute, func() *resource.RetryError {
- _, err := conn.VM.DeleteVpnGateway(&fcu.DeleteVpnGatewayInput{
- VpnGatewayId: aws.String(d.Id()),
- })
- if err == nil {
- return nil
- }
-
- ec2err, ok := err.(awserr.Error)
- if !ok {
- return resource.RetryableError(err)
- }
-
- switch ec2err.Code() {
- case "InvalidVpnGatewayID.NotFound":
- return nil
- case "IncorrectState":
- return resource.RetryableError(err)
- }
-
- return resource.NonRetryableError(err)
- })
-}
-
-// vpnGatewayAttachStateRefreshFunc returns a resource.StateRefreshFunc that is used to watch
-// the state of a VPN gateway's attachment
-func vpnGatewayAttachStateRefreshFunc(conn *fcu.Client, id string, expected string) resource.StateRefreshFunc {
- var start time.Time
- return func() (interface{}, string, error) {
- if start.IsZero() {
- start = time.Now()
- }
-
- var resp *fcu.DescribeVpnGatewaysOutput
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribeVpnGateways(&fcu.DescribeVpnGatewaysInput{
- VpnGatewayIds: []*string{aws.String(id)},
- })
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.NonRetryableError(err)
- })
-
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "InvalidVpnGatewayID.NotFound") {
- resp = nil
- } else {
- fmt.Printf("[ERROR] Error on VpnGatewayStateRefresh: %s", err)
- return nil, "", err
- }
- }
-
- if resp == nil {
- return nil, "", nil
- }
-
- vpnGateway := resp.VpnGateways[0]
- if len(vpnGateway.VpcAttachments) == 0 {
- return vpnGateway, "detached", nil
- }
-
- vpnAttachment := oapiVpnGatewayGetAttachment(vpnGateway)
- return vpnGateway, *vpnAttachment.State, nil
- }
-}
-
-func oapiVpnGatewayGetAttachment(vgw *fcu.VpnGateway) *fcu.VpcAttachment {
- for _, v := range vgw.VpcAttachments {
- if *v.State == "attached" {
- return v
- }
- }
- return &fcu.VpcAttachment{State: aws.String("detached")}
-}
diff --git a/outscale/resource_outscale_vpn_gateway_link.go b/outscale/resource_outscale_vpn_gateway_link.go
deleted file mode 100644
index 8fe6ac03c..000000000
--- a/outscale/resource_outscale_vpn_gateway_link.go
+++ /dev/null
@@ -1,276 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "log"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/aws/aws-sdk-go/aws/awserr"
- "github.com/hashicorp/terraform/helper/hashcode"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func resourceOutscaleOAPIVpnGatewayLink() *schema.Resource {
- return &schema.Resource{
- Create: resourceOutscaleOAPIVpnGatewayLinkCreate,
- Read: resourceOutscaleOAPIVpnGatewayLinkRead,
- Delete: resourceOutscaleOAPIVpnGatewayLinkDelete,
- Importer: &schema.ResourceImporter{
- State: schema.ImportStatePassthrough,
- },
-
- Schema: map[string]*schema.Schema{
- "lin_id": &schema.Schema{
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- },
- "vpn_gateway_id": &schema.Schema{
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- },
- "state": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- "request_id": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- },
- }
-}
-
-func resourceOutscaleOAPIVpnGatewayLinkRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- vgwID := d.Get("vpn_gateway_id").(string)
-
- var resp *fcu.DescribeVpnGatewaysOutput
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribeVpnGateways(&fcu.DescribeVpnGatewaysInput{
- VpnGatewayIds: []*string{aws.String(vgwID)},
- })
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.NonRetryableError(err)
- })
-
- if err != nil {
- awsErr, ok := err.(awserr.Error)
- if ok && awsErr.Code() == "InvalidVPNGatewayID.NotFound" {
- log.Printf("[WARN] VPN Gateway %q not found.", vgwID)
- d.SetId("")
- return nil
- }
- return err
- }
-
- vgw := resp.VpnGateways[0]
- if *vgw.State == "deleted" {
- log.Printf("[INFO] VPN Gateway %q appears to have been deleted.", vgwID)
- d.SetId("")
- return nil
- }
-
- vga := oapiVpnGatewayGetAttachment(vgw)
- if len(vgw.VpcAttachments) == 0 || *vga.State == "detached" {
- d.Set("lin_id", "")
- return nil
- }
-
- d.Set("lin_id", *vga.VpcId)
- d.Set("state", vga.State)
- d.Set("request_id", resp.RequestId)
-
- return nil
-}
-
-func resourceOutscaleOAPIVpnGatewayLinkCreate(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- vpcID := d.Get("lin_id").(string)
- vgwID := d.Get("vpn_gateway_id").(string)
-
- createOpts := &fcu.AttachVpnGatewayInput{
- VpcId: aws.String(vpcID),
- VpnGatewayId: aws.String(vgwID),
- }
- log.Printf("[DEBUG] VPN Gateway attachment options: %#v", *createOpts)
-
- var err error
-
- err = resource.Retry(30*time.Second, func() *resource.RetryError {
- _, err = conn.VM.AttachVpnGateway(createOpts)
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "InvalidVpnGatewayID.NotFound") {
- return resource.RetryableError(
- fmt.Errorf("Gateway not found, retry for eventual consistancy"))
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- return fmt.Errorf("Error attaching VPN Gateway %q to VPC %q: %s",
- vgwID, vpcID, err)
- }
-
- d.SetId(vpnGatewayAttachmentID(vpcID, vgwID))
-
- stateConf := &resource.StateChangeConf{
- Pending: []string{"detached", "attaching"},
- Target: []string{"attached"},
- Refresh: vpnGatewayAttachmentStateRefresh(conn, vpcID, vgwID),
- Timeout: 15 * time.Minute,
- Delay: 10 * time.Second,
- MinTimeout: 5 * time.Second,
- }
-
- _, err = stateConf.WaitForState()
- if err != nil {
- return fmt.Errorf("Error waiting for VPN Gateway %q to attach to VPC %q: %s",
- vgwID, vpcID, err)
- }
- log.Printf("[DEBUG] VPN Gateway %q attached to VPC %q.", vgwID, vpcID)
-
- return resourceOutscaleOAPIVpnGatewayLinkRead(d, meta)
-}
-
-func resourceOutscaleOAPIVpnGatewayLinkDelete(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- // Get the old VPC ID to detach from
- vpcID, _ := d.GetChange("lin_id")
-
- if vpcID.(string) == "" {
- fmt.Printf(
- "[DEBUG] Not detaching VPN Gateway '%s' as no VPC ID is set",
- d.Get("vpn_gateway_id").(string))
- return nil
- }
-
- fmt.Printf(
- "[INFO] Detaching VPN Gateway '%s' from VPC '%s'",
- d.Get("vpn_gateway_id").(string),
- vpcID.(string))
-
- wait := true
-
- var err error
- err = resource.Retry(30*time.Second, func() *resource.RetryError {
- _, err = conn.VM.DetachVpnGateway(&fcu.DetachVpnGatewayInput{
- VpnGatewayId: aws.String(d.Get("vpn_gateway_id").(string)),
- VpcId: aws.String(vpcID.(string)),
- })
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "InvalidVpnGatewayID.NotFound") {
- return resource.RetryableError(
- fmt.Errorf("Gateway not found, retry for eventual consistancy"))
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "InvalidVpnGatewayID.NotFound") {
- err = nil
- wait = false
- } else if strings.Contains(fmt.Sprint(err), "InvalidVpnGatewayAttachment.NotFound") {
- err = nil
- wait = false
- }
-
- if err != nil {
- return err
- }
- }
-
- if !wait {
- return nil
- }
-
- // Wait for it to be fully detached before continuing
- log.Printf("[DEBUG] Waiting for VPN gateway (%s) to detach", d.Get("vpn_gateway_id").(string))
- stateConf := &resource.StateChangeConf{
- Pending: []string{"attached", "detaching", "available"},
- Target: []string{"detached"},
- Refresh: vpnGatewayAttachStateRefreshFunc(conn, d.Get("vpn_gateway_id").(string), "detached"),
- Timeout: 5 * time.Minute,
- }
- if _, err := stateConf.WaitForState(); err != nil {
- return fmt.Errorf(
- "Error waiting for vpn gateway (%s) to detach: %s",
- d.Get("vpn_gateway_id").(string), err)
- }
-
- return nil
-}
-
-func vpnGatewayAttachmentStateRefresh(conn *fcu.Client, vpcID, vgwID string) resource.StateRefreshFunc {
- return func() (interface{}, string, error) {
-
- var err error
- var resp *fcu.DescribeVpnGatewaysOutput
- err = resource.Retry(30*time.Second, func() *resource.RetryError {
- resp, err = conn.VM.DescribeVpnGateways(&fcu.DescribeVpnGatewaysInput{
- Filters: []*fcu.Filter{
- &fcu.Filter{
- Name: aws.String("attachment.vpc-id"),
- Values: []*string{aws.String(vpcID)},
- },
- },
- VpnGatewayIds: []*string{aws.String(vgwID)},
- })
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "InvalidVpnGatewayID.NotFound") {
- return resource.RetryableError(
- fmt.Errorf("Gateway not found, retry for eventual consistancy"))
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- awsErr, ok := err.(awserr.Error)
- if ok {
- switch awsErr.Code() {
- case "InvalidVPNGatewayID.NotFound":
- fallthrough
- case "InvalidVpnGatewayAttachment.NotFound":
- return nil, "", nil
- }
- }
-
- return nil, "", err
- }
-
- vgw := resp.VpnGateways[0]
- if len(vgw.VpcAttachments) == 0 {
- return vgw, "detached", nil
- }
-
- vga := oapiVpnGatewayGetAttachment(vgw)
-
- log.Printf("[DEBUG] VPN Gateway %q attachment status: %s", vgwID, *vga.State)
- return vgw, *vga.State, nil
- }
-}
-
-func vpnGatewayAttachmentID(vpcID, vgwID string) string {
- return fmt.Sprintf("vpn-attachment-%x", hashcode.String(fmt.Sprintf("%s-%s", vpcID, vgwID)))
-}
diff --git a/outscale/resource_outscale_vpn_gateway_link_test.go b/outscale/resource_outscale_vpn_gateway_link_test.go
deleted file mode 100644
index 52bca7789..000000000
--- a/outscale/resource_outscale_vpn_gateway_link_test.go
+++ /dev/null
@@ -1,190 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "strings"
- "testing"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func TestAccOutscaleOAPIVpnGatewayAttachment_basic(t *testing.T) {
- t.Skip()
-
- var vpc fcu.Vpc
- var vgw fcu.VpnGateway
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- testAccPreCheck(t)
- skipIfNoOAPI(t)
- },
- IDRefreshName: "outscale_vpn_gateway_link.test",
- Providers: testAccProviders,
- CheckDestroy: testAccCheckOAPIVpnGatewayAttachmentDestroy,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccOAPIVpnGatewayAttachmentConfig,
- Check: resource.ComposeTestCheckFunc(
- // testAccCheckOutscaleOAPILinExists(
- // "outscale_net.test",
- // &vpc), TODO: fix once we develop this resource
- testAccCheckOAPIVpnGatewayExists(
- "outscale_vpn_gateway.test",
- &vgw),
- testAccCheckOAPIVpnGatewayAttachmentExists(
- "outscale_vpn_gateway_link.test",
- &vpc, &vgw),
- ),
- },
- },
- })
-}
-
-func TestAccAWSOAPIVpnGatewayAttachment_deleted(t *testing.T) {
- t.Skip()
-
- var vpc fcu.Vpc
- var vgw fcu.VpnGateway
-
- testDeleted := func(n string) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- _, ok := s.RootModule().Resources[n]
- if ok {
- return fmt.Errorf("expected vpn gateway attachment resource %q to be deleted", n)
- }
- return nil
- }
- }
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- testAccPreCheck(t)
- skipIfNoOAPI(t)
- },
- IDRefreshName: "outscale_vpn_gateway_link.test",
- Providers: testAccProviders,
- CheckDestroy: testAccCheckOAPIVpnGatewayAttachmentDestroy,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccOAPIVpnGatewayAttachmentConfig,
- Check: resource.ComposeTestCheckFunc(
- // testAccCheckOutscaleOAPILinExists(
- // "outscale_net.test",
- // &vpc), TODO: Fix once we develop this resource
- testAccCheckOAPIVpnGatewayExists(
- "outscale_vpn_gateway.test",
- &vgw),
- testAccCheckOAPIVpnGatewayAttachmentExists(
- "outscale_vpn_gateway_link.test",
- &vpc, &vgw),
- ),
- },
- resource.TestStep{
- Config: testAccNoOAPIVpnGatewayAttachmentConfig,
- Check: resource.ComposeTestCheckFunc(
- testDeleted("outscale_vpn_gateway_link.test"),
- ),
- },
- },
- })
-}
-
-func testAccCheckOAPIVpnGatewayAttachmentExists(n string, vpc *fcu.Vpc, vgw *fcu.VpnGateway) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- rs, ok := s.RootModule().Resources[n]
- if !ok {
- return fmt.Errorf("Not found: %s", n)
- }
-
- if rs.Primary.ID == "" {
- return fmt.Errorf("No ID is set")
- }
-
- vpcID := rs.Primary.Attributes["lin_id"]
- vgwID := rs.Primary.Attributes["vpn_gateway_id"]
-
- if len(vgw.VpcAttachments) == 0 {
- return fmt.Errorf("vpn gateway %q has no attachments", vgwID)
- }
-
- if *vgw.VpcAttachments[0].State != "attached" {
- return fmt.Errorf("Expected VPN Gateway %q to be in attached state, but got: %q",
- vgwID, *vgw.VpcAttachments[0].State)
- }
-
- if *vgw.VpcAttachments[0].VpcId != *vpc.VpcId {
- return fmt.Errorf("Expected VPN Gateway %q to be attached to VPC %q, but got: %q",
- vgwID, vpcID, *vgw.VpcAttachments[0].VpcId)
- }
-
- return nil
- }
-}
-
-func testAccCheckOAPIVpnGatewayAttachmentDestroy(s *terraform.State) error {
- conn := testAccProvider.Meta().(*OutscaleClient).FCU
-
- for _, rs := range s.RootModule().Resources {
- if rs.Type != "outscale_vpn_gateway_link" {
- continue
- }
-
- vgwID := rs.Primary.Attributes["vpn_gateway_id"]
-
- var resp *fcu.DescribeVpnGatewaysOutput
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribeVpnGateways(&fcu.DescribeVpnGatewaysInput{
- VpnGatewayIds: []*string{aws.String(vgwID)},
- })
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.NonRetryableError(err)
- })
-
- if err != nil {
- return err
- }
-
- vgw := resp.VpnGateways[0]
- if *vgw.VpcAttachments[0].State != "detached" {
- return fmt.Errorf("Expected VPN Gateway %q to be in detached state, but got: %q",
- vgwID, *vgw.VpcAttachments[0].State)
- }
- }
-
- return nil
-}
-
-const testAccNoOAPIVpnGatewayAttachmentConfig = `
- resource "outscale_net" "test" {
- cidr_block = "10.0.0.0/16"
- }
-
- resource "outscale_vpn_gateway" "test" {}
-`
-
-const testAccOAPIVpnGatewayAttachmentConfig = `
- resource "outscale_net" "test" {
- ip_range = "10.0.0.0/16"
- }
-
- resource "outscale_vpn_gateway" "test" {
- type = "ipsec.1"
- }
-
- resource "outscale_vpn_gateway_link" "test" {
- net_id = "${outscale_net.test.id}"
- vpn_gateway_id = "${outscale_vpn_gateway.test.id}"
- }
-`
diff --git a/outscale/resource_outscale_vpn_gateway_route_propagation.go b/outscale/resource_outscale_vpn_gateway_route_propagation.go
deleted file mode 100644
index 108322e72..000000000
--- a/outscale/resource_outscale_vpn_gateway_route_propagation.go
+++ /dev/null
@@ -1,147 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "log"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func resourceOutscaleOAPIVpnGatewayRoutePropagation() *schema.Resource {
- return &schema.Resource{
- Create: resourceOutscaleOAPIVpnGatewayRoutePropagationEnable,
- Read: resourceOutscaleOAPIVpnGatewayRoutePropagationRead,
- Delete: resourceOutscaleOAPIVpnGatewayRoutePropagationDisable,
-
- Schema: map[string]*schema.Schema{
- "vpn_gateway_id": &schema.Schema{
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- },
- "route_table_id": &schema.Schema{
- Type: schema.TypeString,
- Required: true,
- ForceNew: true,
- },
- "request_id": &schema.Schema{
- Type: schema.TypeString,
- Computed: true,
- },
- },
- }
-}
-
-func resourceOutscaleOAPIVpnGatewayRoutePropagationEnable(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- gwID := d.Get("gateway_id").(string)
- rtID := d.Get("route_table_id").(string)
-
- log.Printf("\n\n[INFO] Enabling VGW propagation from %s to %s", gwID, rtID)
-
- var err error
- var resp *fcu.EnableVgwRoutePropagationOutput
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.EnableVgwRoutePropagation(&fcu.EnableVgwRoutePropagationInput{
- GatewayId: aws.String(gwID),
- RouteTableId: aws.String(rtID),
- })
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- return fmt.Errorf("error enabling VGW propagation: %s", err)
- }
-
- d.SetId(fmt.Sprintf("%s_%s", gwID, rtID))
- d.Set("request_id", *resp.RequestId)
-
- return nil
-}
-
-func resourceOutscaleOAPIVpnGatewayRoutePropagationDisable(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- gwID := d.Get("gateway_id").(string)
- rtID := d.Get("route_table_id").(string)
-
- log.Printf("\n\n[INFO] Disabling VGW propagation from %s to %s", gwID, rtID)
-
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- _, err = conn.VM.DisableVgwRoutePropagation(&fcu.DisableVgwRoutePropagationInput{
- GatewayId: aws.String(gwID),
- RouteTableId: aws.String(rtID),
- })
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- return fmt.Errorf("error disabling VGW propagation: %s", err)
- }
-
- d.SetId("")
- return nil
-}
-
-func resourceOutscaleOAPIVpnGatewayRoutePropagationRead(d *schema.ResourceData, meta interface{}) error {
- conn := meta.(*OutscaleClient).FCU
-
- gwID := d.Get("gateway_id").(string)
- rtID := d.Get("route_table_id").(string)
-
- var resp *fcu.DescribeRouteTablesOutput
- var err error
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribeRouteTables(&fcu.DescribeRouteTablesInput{
- RouteTableIds: []*string{aws.String(rtID)},
- })
- if err != nil {
- if strings.Contains(fmt.Sprint(err), "RequestLimitExceeded") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- return err
- }
-
- rt := resp.RouteTables[0]
-
- exists := false
- for _, vgw := range rt.PropagatingVgws {
- if *vgw.GatewayId == gwID {
- exists = true
- }
- }
- if !exists {
- log.Printf("\n\n[INFO] %s is no longer propagating to %s, so dropping route propagation from state", rtID, gwID)
- d.SetId("")
- return nil
- }
-
- return nil
-}
diff --git a/outscale/resource_outscale_vpn_gateway_route_propagation_test.go b/outscale/resource_outscale_vpn_gateway_route_propagation_test.go
deleted file mode 100644
index 998f253ad..000000000
--- a/outscale/resource_outscale_vpn_gateway_route_propagation_test.go
+++ /dev/null
@@ -1,151 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "strings"
- "testing"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/aws/aws-sdk-go/aws/awserr"
-
- "github.com/hashicorp/terraform/helper/acctest"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func TestAccOutscaleOAPIVpnRoutePropagation_basic(t *testing.T) {
- t.Skip()
- rBgpAsn := acctest.RandIntRange(64512, 65534)
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- testAccPreCheck(t)
- skipIfNoOAPI(t)
- },
- Providers: testAccProviders,
- CheckDestroy: testAccCheckOAPIVpnRoutePropagationDestroy,
- Steps: []resource.TestStep{
- {
- Config: testAccOutscaleOAPIVpnRoutePropagationConfig(rBgpAsn),
- Check: resource.ComposeTestCheckFunc(
- testAccOutscaleOAPIVpnRoutePropagation(
- "outscale_vpn_gateway_route_propagation.foo",
- ),
- ),
- },
- },
- })
-}
-
-func testAccCheckOAPIVpnRoutePropagationDestroy(s *terraform.State) error {
- FCU := testAccProvider.Meta().(*OutscaleClient).FCU
-
- for _, rs := range s.RootModule().Resources {
- if rs.Type != "outscale_vpn_gateway_route_propagation" {
- continue
- }
-
- var resp *fcu.DescribeVpnGatewaysOutput
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = FCU.VM.DescribeVpnGateways(&fcu.DescribeVpnGatewaysInput{
- VpnGatewayIds: []*string{aws.String(rs.Primary.Attributes["gateway_id"])},
- })
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return nil
- })
-
- if err != nil {
- return err
- }
-
- if len(resp.VpnGateways) > 0 {
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- _, err := FCU.VM.DeleteVpnGateway(&fcu.DeleteVpnGatewayInput{
- VpnGatewayId: resp.VpnGateways[0].VpnGatewayId,
- })
- if err == nil {
- return nil
- }
-
- ec2err, ok := err.(awserr.Error)
- if !ok {
- return resource.RetryableError(err)
- }
-
- switch ec2err.Code() {
- case "InvalidVpnGatewayID.NotFound":
- return nil
- case "IncorrectState":
- return resource.RetryableError(err)
- }
-
- return resource.NonRetryableError(err)
- })
-
- if err != nil {
- return fmt.Errorf("ERROR => %s", err)
- }
-
- } else {
- return nil
- }
- }
-
- return nil
-}
-
-func testAccOutscaleOAPIVpnRoutePropagation(routeProp string) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- rs, ok := s.RootModule().Resources[routeProp]
- if !ok {
- return fmt.Errorf("Not found: %s", routeProp)
- }
-
- if rs.Primary.ID == "" {
- return fmt.Errorf("No ID is set")
- }
-
- return nil
- }
-}
-
-func testAccOutscaleOAPIVpnRoutePropagationConfig(rBgpAsn int) string {
- return fmt.Sprintf(`
- resource "outscale_net" "outscale_net" {
- count = 1
- ip_range = "10.0.0.0/16"
- }
-
- resource "outscale_vpn_gateway" "outscale_vpn_gateway" {
- type = "ipsec.1"
- type = "ipsec.1"
- type = "ipsec.1"
- }
-
- resource "outscale_vpn_gateway_link" "test" {
- lin_id = "${outscale_net.outscale_net.id}"
- vpn_gateway_id = "${outscale_vpn_gateway.outscale_vpn_gateway.id}"
- }
-
- resource "outscale_route_table" "outscale_route_table" {
- net_id = "${outscale_net.outscale_net.id}"
- }
-
- resource "outscale_vpn_gateway_route_propagation" "foo" {
- vpn_gateway_id = "${outscale_vpn_gateway.outscale_vpn_gateway.vpn_gateway_id}"
- route_table_id = "${outscale_route_table.outscale_route_table.route_table_id}"
- }
-}
- }
- `)
-}
diff --git a/outscale/resource_outscale_vpn_gateway_test.go b/outscale/resource_outscale_vpn_gateway_test.go
deleted file mode 100644
index d5ba460ff..000000000
--- a/outscale/resource_outscale_vpn_gateway_test.go
+++ /dev/null
@@ -1,263 +0,0 @@
-package outscale
-
-import (
- "fmt"
- "strings"
- "testing"
- "time"
-
- "github.com/aws/aws-sdk-go/aws"
- "github.com/aws/aws-sdk-go/aws/awserr"
- "github.com/hashicorp/terraform/helper/resource"
- "github.com/hashicorp/terraform/terraform"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func TestAccOutscaleOAPIVpnGateway_basic(t *testing.T) {
- t.Skip()
- var v, v2 fcu.VpnGateway
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- IDRefreshName: "outscale_vpn_gateway.foo",
- Providers: testAccProviders,
- CheckDestroy: testAccCheckOAPIVpnGatewayDestroy,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccOAPIVpnGatewayConfig,
- Check: resource.ComposeTestCheckFunc(
- testAccCheckOAPIVpnGatewayExists(
- "outscale_vpn_gateway.foo", &v),
- ),
- },
-
- resource.TestStep{
- Config: testAccOAPIVpnGatewayConfigChangeVPC,
- Check: resource.ComposeTestCheckFunc(
- testAccCheckOAPIVpnGatewayExists(
- "outscale_vpn_gateway.foo", &v2),
- ),
- },
- },
- })
-}
-func TestAccOutscaleOAPIVpnGateway_delete(t *testing.T) {
- t.Skip()
-
- var vpnGateway fcu.VpnGateway
-
- testDeleted := func(r string) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- _, ok := s.RootModule().Resources[r]
- if ok {
- return fmt.Errorf("VPN Gateway %q should have been deleted", r)
- }
- return nil
- }
- }
-
- resource.Test(t, resource.TestCase{
- PreCheck: func() {
- skipIfNoOAPI(t)
- testAccPreCheck(t)
- },
- IDRefreshName: "outscale_vpn_gateway.foo",
- Providers: testAccProviders,
- CheckDestroy: testAccCheckOAPIVpnGatewayDestroy,
- Steps: []resource.TestStep{
- resource.TestStep{
- Config: testAccOAPIVpnGatewayConfig,
- Check: resource.ComposeTestCheckFunc(
- testAccCheckOAPIVpnGatewayExists("outscale_vpn_gateway.foo", &vpnGateway)),
- },
- resource.TestStep{
- Config: testAccOAPINoVpnGatewayConfig,
- Check: resource.ComposeTestCheckFunc(testDeleted("outscale_vpn_gateway.foo")),
- },
- },
- })
-}
-
-func testAccOutscaleOAPIVpnGatewayDisappears(gateway *fcu.VpnGateway) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- conn := testAccProvider.Meta().(*OutscaleClient).FCU
- var err error
-
- opts := &fcu.DeleteVpnGatewayInput{
- VpnGatewayId: gateway.VpnGatewayId,
- }
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- _, err = conn.VM.DeleteVpnGateway(opts)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.NonRetryableError(err)
- })
-
- if err != nil {
- return err
- }
-
- return resource.Retry(40*time.Minute, func() *resource.RetryError {
- opts := &fcu.DescribeVpnGatewaysInput{
- VpnGatewayIds: []*string{gateway.VpnGatewayId},
- }
-
- var resp *fcu.DescribeVpnGatewaysOutput
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = conn.VM.DescribeVpnGateways(opts)
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.NonRetryableError(err)
- })
- if err != nil {
- cgw, ok := err.(awserr.Error)
- if ok && cgw.Code() == "InvalidVpnGatewayID.NotFound" {
- return nil
- }
- if ok && cgw.Code() == "IncorrectState" {
- return resource.RetryableError(fmt.Errorf(
- "Waiting for VPN Gateway to be in the correct state: %v", gateway.VpnGatewayId))
- }
- return resource.NonRetryableError(
- fmt.Errorf("Error retrieving VPN Gateway: %s", err))
- }
- if *resp.VpnGateways[0].State == "deleted" {
- return nil
- }
- return resource.RetryableError(fmt.Errorf(
- "Waiting for VPN Gateway: %v", gateway.VpnGatewayId))
- })
- }
-}
-
-func testAccCheckOAPIVpnGatewayDestroy(s *terraform.State) error {
- FCU := testAccProvider.Meta().(*OutscaleClient).FCU
-
- for _, rs := range s.RootModule().Resources {
- if rs.Type != "outscale_vpn_gateway" {
- continue
- }
-
- var resp *fcu.DescribeVpnGatewaysOutput
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = FCU.VM.DescribeVpnGateways(&fcu.DescribeVpnGatewaysInput{
- VpnGatewayIds: []*string{aws.String(rs.Primary.ID)},
- })
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.NonRetryableError(err)
- })
- if err == nil {
- var v *fcu.VpnGateway
- for _, g := range resp.VpnGateways {
- if *g.VpnGatewayId == rs.Primary.ID {
- v = g
- }
- }
-
- if v == nil {
- // wasn't found
- return nil
- }
-
- if *v.State != "deleted" {
- return fmt.Errorf("Expected VPN Gateway to be in deleted state, but was not: %s", v)
- }
- return nil
- }
-
- // Verify the error is what we want
- ec2err, ok := err.(awserr.Error)
- if !ok {
- return err
- }
- if ec2err.Code() != "InvalidVpnGatewayID.NotFound" {
- return err
- }
- }
-
- return nil
-}
-
-func testAccCheckOAPIVpnGatewayExists(n string, ig *fcu.VpnGateway) resource.TestCheckFunc {
- return func(s *terraform.State) error {
- rs, ok := s.RootModule().Resources[n]
- if !ok {
- return fmt.Errorf("Not found: %s", n)
- }
-
- if rs.Primary.ID == "" {
- return fmt.Errorf("No ID is set")
- }
-
- FCU := testAccProvider.Meta().(*OutscaleClient).FCU
-
- var resp *fcu.DescribeVpnGatewaysOutput
- var err error
-
- err = resource.Retry(5*time.Minute, func() *resource.RetryError {
- resp, err = FCU.VM.DescribeVpnGateways(&fcu.DescribeVpnGatewaysInput{
- VpnGatewayIds: []*string{aws.String(rs.Primary.ID)},
- })
- if err != nil {
- if strings.Contains(err.Error(), "RequestLimitExceeded:") {
- return resource.RetryableError(err)
- }
- return resource.NonRetryableError(err)
- }
- return resource.NonRetryableError(err)
- })
- if err != nil {
- return err
- }
- if len(resp.VpnGateways) == 0 {
- return fmt.Errorf("VPN Gateway not found")
- }
-
- *ig = *resp.VpnGateways[0]
-
- return nil
- }
-}
-
-const testAccOAPINoVpnGatewayConfig = `
- resource "outscale_net" "foo" {
- ip_range = "10.1.0.0/16"
- }
-`
-
-const testAccOAPIVpnGatewayConfig = `
- resource "outscale_net" "foo" {
- ip_range = "10.1.0.0/16"
- }
-
- resource "outscale_vpn_gateway" "foo" {}
-`
-
-const testAccOAPIVpnGatewayConfigChangeVPC = `
- resource "outscale_net" "bar" {
- ip_range = "10.2.0.0/16"
- }
-
- resource "outscale_vpn_gateway" "foo" {}
-`
diff --git a/outscale/sweeper_test.go b/outscale/sweeper_test.go
index 20799db17..a15927576 100644
--- a/outscale/sweeper_test.go
+++ b/outscale/sweeper_test.go
@@ -1,24 +1,11 @@
package outscale
import (
- "fmt"
- "os"
"testing"
- "github.com/hashicorp/terraform/helper/resource"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/resource"
)
func TestMain(m *testing.M) {
resource.TestMain(m)
}
-
-func sharedConfigForRegion(region string) (interface{}, error) {
- if os.Getenv("OUTSCALE_ACCESSKEYID") == "" {
- return nil, fmt.Errorf("empty OUTSCALE_ACCESSKEYID")
- }
- if os.Getenv("OUTSCALE_SECRETKEYID") == "" {
- return nil, fmt.Errorf("empty OUTSCALE_SECRETKEYID")
- }
-
- return nil, nil
-}
diff --git a/test/main.go b/test/main.go
deleted file mode 100644
index 1af82e5e3..000000000
--- a/test/main.go
+++ /dev/null
@@ -1,137 +0,0 @@
-package main
-
-import (
- "fmt"
- "os"
-
- "github.com/aws/aws-sdk-go/aws"
-
- "github.com/terraform-providers/terraform-provider-outscale/osc"
- "github.com/terraform-providers/terraform-provider-outscale/osc/fcu"
-)
-
-func main() {
-
- ak := os.Getenv("OUTSCALE_ACCESSKEYID")
- sk := os.Getenv("OUTSCALE_SECRETKEYID")
-
- config := osc.Config{
- Credentials: &osc.Credentials{
- AccessKey: ak,
- SecretKey: sk,
- Region: "eu-west-2",
- },
- }
-
- c, err := fcu.NewFCUClient(config)
- if err != nil {
- fmt.Println(err)
- }
- // keyname := "TestKey"
- // var maxC int64
- // imageID := "ami-8a6a0120"
- // maxC = 1
- // instanceType := "t2.micro"
- // input := fcu.RunInstancesInput{
- // ImageId: &imageID,
- // MaxCount: &maxC,
- // MinCount: &maxC,
- // KeyName: &keyname,
- // InstanceType: &instanceType,
- // }
- // output, err := c.VM.RunInstance(&input)
- // fmt.Println(err)
- // fmt.Println(output)
-
- // input2 := fcu.DescribeInstancesInput{
- // InstanceIds: []*string{output.Instances[0].InstanceId},
- // }
-
- // output2, err := c.VM.DescribeInstances(&input2)
- // fmt.Println(err)
- // fmt.Println(output2)
- // // id := "i-751ebdf6"
- // // input3 := fcu.GetPasswordDataInput{
- // // InstanceId: &id,
- // // }
- // //
- // time.Sleep(120 * time.Second)
- //
-
- // output3, err := c.VM.GetPasswordData(&input3)
- // fmt.Println(err)
- // fmt.Println(output3)
-
- // fmt.Printf("Key (%+v)\n", output3)
- // fmt.Printf("ID (%+v)\n", *output3.InstanceId)
- // fmt.Printf("Passw (%+v)\n", *output3.PasswordData)
-
- // output3, err := c.VM.StopInstances(&fcu.StopInstancesInput{
- // InstanceIds: []*string{output.Instances[0].InstanceId},
- // })
-
- // fmt.Println(output3)
- // fmt.Println(err)
-
- // output3, err := c.VM.StartInstances(&fcu.StartInstancesInput{
- // InstanceIds: []*string{aws.String("i-bab4810b")},
- // })
-
- // fmt.Println(output3)
- // fmt.Println(err)
-
- output4, err := c.VM.ModifyInstanceAttribute(&fcu.ModifyInstanceAttributeInput{
- InstanceId: aws.String("i-bab4810b"),
- DisableApiTermination: &fcu.AttributeBooleanValue{
- Value: aws.Bool(false),
- },
- })
-
- fmt.Println(output4)
- fmt.Println(err)
-
- //
- // input4 := fcu.ModifyInstanceKeyPairInput{
- // InstanceId: output.Instances[0].InstanceId,
- // KeyName: &keyname,
- // }
- // err = c.VM.ModifyInstanceKeyPair(&input4)
- // fmt.Println(err)
- //
- // input6 := fcu.TerminateInstancesInput{
- // InstanceIds: []*string{&id},
- // }
- //
- // output6, err := c.VM.TerminateInstances(&input6)
- // fmt.Println(err)
- // fmt.Println(output6)
-
- // var runResp *ec2.Reservation
- // err = resource.Retry(30*time.Second, func() *resource.RetryError {
- // var err error
- // runResp, err = conn.RunInstances(runOpts)
- // // IAM instance profiles can take ~10 seconds to propagate in AWS:
- // // http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html#launch-instance-with-role-console
- // if isAWSErr(err, "InvalidParameterValue", "Invalid IAM Instance Profile") {
- // log.Print("[DEBUG] Invalid IAM Instance Profile referenced, retrying...")
- // return resource.RetryableError(err)
- // }
- // // IAM roles can also take time to propagate in AWS:
- // if isAWSErr(err, "InvalidParameterValue", " has no associated IAM Roles") {
- // log.Print("[DEBUG] IAM Instance Profile appears to have no IAM roles, retrying...")
- // return resource.RetryableError(err)
- // }
- // return resource.NonRetryableError(err)
- // })
-
- // Read the content
- // var bodyBytes []byte
- // if r.Body != nil {
- // bodyBytes, _ = ioutil.ReadAll(r.Body)
- // }
- // // Restore the io.ReadCloser to its original state
- // r.Body = ioutil.NopCloser(bytes.NewBuffer(bodyBytes))
- // // Use the content
- // bodyString := string(bodyBytes)
- // fmt.Println(bodyString)
-}
diff --git a/utils/utils.go b/utils/utils.go
index 41799b3aa..1116d0e82 100644
--- a/utils/utils.go
+++ b/utils/utils.go
@@ -23,7 +23,10 @@ func ToJSONString(v interface{}) string {
}
func GetErrorResponse(err error) error {
- return fmt.Errorf("%s %s", err, ToJSONString(err.(oscgo.GenericOpenAPIError).Model().(oscgo.ErrorResponse)))
+ if e, ok := err.(oscgo.GenericOpenAPIError); ok {
+ return fmt.Errorf("%s %s", err, ToJSONString(e.Model().(oscgo.ErrorResponse)))
+ }
+ return err
}
// StringSliceToPtrInt64Slice ...
diff --git a/vendor/github.com/blang/semver/.travis.yml b/vendor/github.com/blang/semver/.travis.yml
deleted file mode 100644
index 102fb9a69..000000000
--- a/vendor/github.com/blang/semver/.travis.yml
+++ /dev/null
@@ -1,21 +0,0 @@
-language: go
-matrix:
- include:
- - go: 1.4.3
- - go: 1.5.4
- - go: 1.6.3
- - go: 1.7
- - go: tip
- allow_failures:
- - go: tip
-install:
-- go get golang.org/x/tools/cmd/cover
-- go get github.com/mattn/goveralls
-script:
-- echo "Test and track coverage" ; $HOME/gopath/bin/goveralls -package "." -service=travis-ci
- -repotoken $COVERALLS_TOKEN
-- echo "Build examples" ; cd examples && go build
-- echo "Check if gofmt'd" ; diff -u <(echo -n) <(gofmt -d -s .)
-env:
- global:
- secure: HroGEAUQpVq9zX1b1VIkraLiywhGbzvNnTZq2TMxgK7JHP8xqNplAeF1izrR2i4QLL9nsY+9WtYss4QuPvEtZcVHUobw6XnL6radF7jS1LgfYZ9Y7oF+zogZ2I5QUMRLGA7rcxQ05s7mKq3XZQfeqaNts4bms/eZRefWuaFZbkw=
diff --git a/vendor/github.com/blang/semver/LICENSE b/vendor/github.com/blang/semver/LICENSE
deleted file mode 100644
index 5ba5c86fc..000000000
--- a/vendor/github.com/blang/semver/LICENSE
+++ /dev/null
@@ -1,22 +0,0 @@
-The MIT License
-
-Copyright (c) 2014 Benedikt Lang
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
-
diff --git a/vendor/github.com/blang/semver/README.md b/vendor/github.com/blang/semver/README.md
deleted file mode 100644
index 08b2e4a3d..000000000
--- a/vendor/github.com/blang/semver/README.md
+++ /dev/null
@@ -1,194 +0,0 @@
-semver for golang [![Build Status](https://travis-ci.org/blang/semver.svg?branch=master)](https://travis-ci.org/blang/semver) [![GoDoc](https://godoc.org/github.com/blang/semver?status.png)](https://godoc.org/github.com/blang/semver) [![Coverage Status](https://img.shields.io/coveralls/blang/semver.svg)](https://coveralls.io/r/blang/semver?branch=master)
-======
-
-semver is a [Semantic Versioning](http://semver.org/) library written in golang. It fully covers spec version `2.0.0`.
-
-Usage
------
-```bash
-$ go get github.com/blang/semver
-```
-Note: Always vendor your dependencies or fix on a specific version tag.
-
-```go
-import github.com/blang/semver
-v1, err := semver.Make("1.0.0-beta")
-v2, err := semver.Make("2.0.0-beta")
-v1.Compare(v2)
-```
-
-Also check the [GoDocs](http://godoc.org/github.com/blang/semver).
-
-Why should I use this lib?
------
-
-- Fully spec compatible
-- No reflection
-- No regex
-- Fully tested (Coverage >99%)
-- Readable parsing/validation errors
-- Fast (See [Benchmarks](#benchmarks))
-- Only Stdlib
-- Uses values instead of pointers
-- Many features, see below
-
-
-Features
------
-
-- Parsing and validation at all levels
-- Comparator-like comparisons
-- Compare Helper Methods
-- InPlace manipulation
-- Ranges `>=1.0.0 <2.0.0 || >=3.0.0 !3.0.1-beta.1`
-- Wildcards `>=1.x`, `<=2.5.x`
-- Sortable (implements sort.Interface)
-- database/sql compatible (sql.Scanner/Valuer)
-- encoding/json compatible (json.Marshaler/Unmarshaler)
-
-Ranges
-------
-
-A `Range` is a set of conditions which specify which versions satisfy the range.
-
-A condition is composed of an operator and a version. The supported operators are:
-
-- `<1.0.0` Less than `1.0.0`
-- `<=1.0.0` Less than or equal to `1.0.0`
-- `>1.0.0` Greater than `1.0.0`
-- `>=1.0.0` Greater than or equal to `1.0.0`
-- `1.0.0`, `=1.0.0`, `==1.0.0` Equal to `1.0.0`
-- `!1.0.0`, `!=1.0.0` Not equal to `1.0.0`. Excludes version `1.0.0`.
-
-Note that spaces between the operator and the version will be gracefully tolerated.
-
-A `Range` can link multiple `Ranges` separated by space:
-
-Ranges can be linked by logical AND:
-
- - `>1.0.0 <2.0.0` would match between both ranges, so `1.1.1` and `1.8.7` but not `1.0.0` or `2.0.0`
- - `>1.0.0 <3.0.0 !2.0.3-beta.2` would match every version between `1.0.0` and `3.0.0` except `2.0.3-beta.2`
-
-Ranges can also be linked by logical OR:
-
- - `<2.0.0 || >=3.0.0` would match `1.x.x` and `3.x.x` but not `2.x.x`
-
-AND has a higher precedence than OR. It's not possible to use brackets.
-
-Ranges can be combined by both AND and OR
-
- - `>1.0.0 <2.0.0 || >3.0.0 !4.2.1` would match `1.2.3`, `1.9.9`, `3.1.1`, but not `4.2.1`, `2.1.1`
-
-Range usage:
-
-```
-v, err := semver.Parse("1.2.3")
-range, err := semver.ParseRange(">1.0.0 <2.0.0 || >=3.0.0")
-if range(v) {
- //valid
-}
-
-```
-
-Example
------
-
-Have a look at full examples in [examples/main.go](examples/main.go)
-
-```go
-import github.com/blang/semver
-
-v, err := semver.Make("0.0.1-alpha.preview+123.github")
-fmt.Printf("Major: %d\n", v.Major)
-fmt.Printf("Minor: %d\n", v.Minor)
-fmt.Printf("Patch: %d\n", v.Patch)
-fmt.Printf("Pre: %s\n", v.Pre)
-fmt.Printf("Build: %s\n", v.Build)
-
-// Prerelease versions array
-if len(v.Pre) > 0 {
- fmt.Println("Prerelease versions:")
- for i, pre := range v.Pre {
- fmt.Printf("%d: %q\n", i, pre)
- }
-}
-
-// Build meta data array
-if len(v.Build) > 0 {
- fmt.Println("Build meta data:")
- for i, build := range v.Build {
- fmt.Printf("%d: %q\n", i, build)
- }
-}
-
-v001, err := semver.Make("0.0.1")
-// Compare using helpers: v.GT(v2), v.LT, v.GTE, v.LTE
-v001.GT(v) == true
-v.LT(v001) == true
-v.GTE(v) == true
-v.LTE(v) == true
-
-// Or use v.Compare(v2) for comparisons (-1, 0, 1):
-v001.Compare(v) == 1
-v.Compare(v001) == -1
-v.Compare(v) == 0
-
-// Manipulate Version in place:
-v.Pre[0], err = semver.NewPRVersion("beta")
-if err != nil {
- fmt.Printf("Error parsing pre release version: %q", err)
-}
-
-fmt.Println("\nValidate versions:")
-v.Build[0] = "?"
-
-err = v.Validate()
-if err != nil {
- fmt.Printf("Validation failed: %s\n", err)
-}
-```
-
-
-Benchmarks
------
-
- BenchmarkParseSimple-4 5000000 390 ns/op 48 B/op 1 allocs/op
- BenchmarkParseComplex-4 1000000 1813 ns/op 256 B/op 7 allocs/op
- BenchmarkParseAverage-4 1000000 1171 ns/op 163 B/op 4 allocs/op
- BenchmarkStringSimple-4 20000000 119 ns/op 16 B/op 1 allocs/op
- BenchmarkStringLarger-4 10000000 206 ns/op 32 B/op 2 allocs/op
- BenchmarkStringComplex-4 5000000 324 ns/op 80 B/op 3 allocs/op
- BenchmarkStringAverage-4 5000000 273 ns/op 53 B/op 2 allocs/op
- BenchmarkValidateSimple-4 200000000 9.33 ns/op 0 B/op 0 allocs/op
- BenchmarkValidateComplex-4 3000000 469 ns/op 0 B/op 0 allocs/op
- BenchmarkValidateAverage-4 5000000 256 ns/op 0 B/op 0 allocs/op
- BenchmarkCompareSimple-4 100000000 11.8 ns/op 0 B/op 0 allocs/op
- BenchmarkCompareComplex-4 50000000 30.8 ns/op 0 B/op 0 allocs/op
- BenchmarkCompareAverage-4 30000000 41.5 ns/op 0 B/op 0 allocs/op
- BenchmarkSort-4 3000000 419 ns/op 256 B/op 2 allocs/op
- BenchmarkRangeParseSimple-4 2000000 850 ns/op 192 B/op 5 allocs/op
- BenchmarkRangeParseAverage-4 1000000 1677 ns/op 400 B/op 10 allocs/op
- BenchmarkRangeParseComplex-4 300000 5214 ns/op 1440 B/op 30 allocs/op
- BenchmarkRangeMatchSimple-4 50000000 25.6 ns/op 0 B/op 0 allocs/op
- BenchmarkRangeMatchAverage-4 30000000 56.4 ns/op 0 B/op 0 allocs/op
- BenchmarkRangeMatchComplex-4 10000000 153 ns/op 0 B/op 0 allocs/op
-
-See benchmark cases at [semver_test.go](semver_test.go)
-
-
-Motivation
------
-
-I simply couldn't find any lib supporting the full spec. Others were just wrong or used reflection and regex which i don't like.
-
-
-Contribution
------
-
-Feel free to make a pull request. For bigger changes create a issue first to discuss about it.
-
-
-License
------
-
-See [LICENSE](LICENSE) file.
diff --git a/vendor/github.com/blang/semver/json.go b/vendor/github.com/blang/semver/json.go
deleted file mode 100644
index a74bf7c44..000000000
--- a/vendor/github.com/blang/semver/json.go
+++ /dev/null
@@ -1,23 +0,0 @@
-package semver
-
-import (
- "encoding/json"
-)
-
-// MarshalJSON implements the encoding/json.Marshaler interface.
-func (v Version) MarshalJSON() ([]byte, error) {
- return json.Marshal(v.String())
-}
-
-// UnmarshalJSON implements the encoding/json.Unmarshaler interface.
-func (v *Version) UnmarshalJSON(data []byte) (err error) {
- var versionString string
-
- if err = json.Unmarshal(data, &versionString); err != nil {
- return
- }
-
- *v, err = Parse(versionString)
-
- return
-}
diff --git a/vendor/github.com/blang/semver/package.json b/vendor/github.com/blang/semver/package.json
deleted file mode 100644
index 1cf8ebdd9..000000000
--- a/vendor/github.com/blang/semver/package.json
+++ /dev/null
@@ -1,17 +0,0 @@
-{
- "author": "blang",
- "bugs": {
- "URL": "https://github.com/blang/semver/issues",
- "url": "https://github.com/blang/semver/issues"
- },
- "gx": {
- "dvcsimport": "github.com/blang/semver"
- },
- "gxVersion": "0.10.0",
- "language": "go",
- "license": "MIT",
- "name": "semver",
- "releaseCmd": "git commit -a -m \"gx publish $VERSION\"",
- "version": "3.5.1"
-}
-
diff --git a/vendor/github.com/blang/semver/range.go b/vendor/github.com/blang/semver/range.go
deleted file mode 100644
index fca406d47..000000000
--- a/vendor/github.com/blang/semver/range.go
+++ /dev/null
@@ -1,416 +0,0 @@
-package semver
-
-import (
- "fmt"
- "strconv"
- "strings"
- "unicode"
-)
-
-type wildcardType int
-
-const (
- noneWildcard wildcardType = iota
- majorWildcard wildcardType = 1
- minorWildcard wildcardType = 2
- patchWildcard wildcardType = 3
-)
-
-func wildcardTypefromInt(i int) wildcardType {
- switch i {
- case 1:
- return majorWildcard
- case 2:
- return minorWildcard
- case 3:
- return patchWildcard
- default:
- return noneWildcard
- }
-}
-
-type comparator func(Version, Version) bool
-
-var (
- compEQ comparator = func(v1 Version, v2 Version) bool {
- return v1.Compare(v2) == 0
- }
- compNE = func(v1 Version, v2 Version) bool {
- return v1.Compare(v2) != 0
- }
- compGT = func(v1 Version, v2 Version) bool {
- return v1.Compare(v2) == 1
- }
- compGE = func(v1 Version, v2 Version) bool {
- return v1.Compare(v2) >= 0
- }
- compLT = func(v1 Version, v2 Version) bool {
- return v1.Compare(v2) == -1
- }
- compLE = func(v1 Version, v2 Version) bool {
- return v1.Compare(v2) <= 0
- }
-)
-
-type versionRange struct {
- v Version
- c comparator
-}
-
-// rangeFunc creates a Range from the given versionRange.
-func (vr *versionRange) rangeFunc() Range {
- return Range(func(v Version) bool {
- return vr.c(v, vr.v)
- })
-}
-
-// Range represents a range of versions.
-// A Range can be used to check if a Version satisfies it:
-//
-// range, err := semver.ParseRange(">1.0.0 <2.0.0")
-// range(semver.MustParse("1.1.1") // returns true
-type Range func(Version) bool
-
-// OR combines the existing Range with another Range using logical OR.
-func (rf Range) OR(f Range) Range {
- return Range(func(v Version) bool {
- return rf(v) || f(v)
- })
-}
-
-// AND combines the existing Range with another Range using logical AND.
-func (rf Range) AND(f Range) Range {
- return Range(func(v Version) bool {
- return rf(v) && f(v)
- })
-}
-
-// ParseRange parses a range and returns a Range.
-// If the range could not be parsed an error is returned.
-//
-// Valid ranges are:
-// - "<1.0.0"
-// - "<=1.0.0"
-// - ">1.0.0"
-// - ">=1.0.0"
-// - "1.0.0", "=1.0.0", "==1.0.0"
-// - "!1.0.0", "!=1.0.0"
-//
-// A Range can consist of multiple ranges separated by space:
-// Ranges can be linked by logical AND:
-// - ">1.0.0 <2.0.0" would match between both ranges, so "1.1.1" and "1.8.7" but not "1.0.0" or "2.0.0"
-// - ">1.0.0 <3.0.0 !2.0.3-beta.2" would match every version between 1.0.0 and 3.0.0 except 2.0.3-beta.2
-//
-// Ranges can also be linked by logical OR:
-// - "<2.0.0 || >=3.0.0" would match "1.x.x" and "3.x.x" but not "2.x.x"
-//
-// AND has a higher precedence than OR. It's not possible to use brackets.
-//
-// Ranges can be combined by both AND and OR
-//
-// - `>1.0.0 <2.0.0 || >3.0.0 !4.2.1` would match `1.2.3`, `1.9.9`, `3.1.1`, but not `4.2.1`, `2.1.1`
-func ParseRange(s string) (Range, error) {
- parts := splitAndTrim(s)
- orParts, err := splitORParts(parts)
- if err != nil {
- return nil, err
- }
- expandedParts, err := expandWildcardVersion(orParts)
- if err != nil {
- return nil, err
- }
- var orFn Range
- for _, p := range expandedParts {
- var andFn Range
- for _, ap := range p {
- opStr, vStr, err := splitComparatorVersion(ap)
- if err != nil {
- return nil, err
- }
- vr, err := buildVersionRange(opStr, vStr)
- if err != nil {
- return nil, fmt.Errorf("Could not parse Range %q: %s", ap, err)
- }
- rf := vr.rangeFunc()
-
- // Set function
- if andFn == nil {
- andFn = rf
- } else { // Combine with existing function
- andFn = andFn.AND(rf)
- }
- }
- if orFn == nil {
- orFn = andFn
- } else {
- orFn = orFn.OR(andFn)
- }
-
- }
- return orFn, nil
-}
-
-// splitORParts splits the already cleaned parts by '||'.
-// Checks for invalid positions of the operator and returns an
-// error if found.
-func splitORParts(parts []string) ([][]string, error) {
- var ORparts [][]string
- last := 0
- for i, p := range parts {
- if p == "||" {
- if i == 0 {
- return nil, fmt.Errorf("First element in range is '||'")
- }
- ORparts = append(ORparts, parts[last:i])
- last = i + 1
- }
- }
- if last == len(parts) {
- return nil, fmt.Errorf("Last element in range is '||'")
- }
- ORparts = append(ORparts, parts[last:])
- return ORparts, nil
-}
-
-// buildVersionRange takes a slice of 2: operator and version
-// and builds a versionRange, otherwise an error.
-func buildVersionRange(opStr, vStr string) (*versionRange, error) {
- c := parseComparator(opStr)
- if c == nil {
- return nil, fmt.Errorf("Could not parse comparator %q in %q", opStr, strings.Join([]string{opStr, vStr}, ""))
- }
- v, err := Parse(vStr)
- if err != nil {
- return nil, fmt.Errorf("Could not parse version %q in %q: %s", vStr, strings.Join([]string{opStr, vStr}, ""), err)
- }
-
- return &versionRange{
- v: v,
- c: c,
- }, nil
-
-}
-
-// inArray checks if a byte is contained in an array of bytes
-func inArray(s byte, list []byte) bool {
- for _, el := range list {
- if el == s {
- return true
- }
- }
- return false
-}
-
-// splitAndTrim splits a range string by spaces and cleans whitespaces
-func splitAndTrim(s string) (result []string) {
- last := 0
- var lastChar byte
- excludeFromSplit := []byte{'>', '<', '='}
- for i := 0; i < len(s); i++ {
- if s[i] == ' ' && !inArray(lastChar, excludeFromSplit) {
- if last < i-1 {
- result = append(result, s[last:i])
- }
- last = i + 1
- } else if s[i] != ' ' {
- lastChar = s[i]
- }
- }
- if last < len(s)-1 {
- result = append(result, s[last:])
- }
-
- for i, v := range result {
- result[i] = strings.Replace(v, " ", "", -1)
- }
-
- // parts := strings.Split(s, " ")
- // for _, x := range parts {
- // if s := strings.TrimSpace(x); len(s) != 0 {
- // result = append(result, s)
- // }
- // }
- return
-}
-
-// splitComparatorVersion splits the comparator from the version.
-// Input must be free of leading or trailing spaces.
-func splitComparatorVersion(s string) (string, string, error) {
- i := strings.IndexFunc(s, unicode.IsDigit)
- if i == -1 {
- return "", "", fmt.Errorf("Could not get version from string: %q", s)
- }
- return strings.TrimSpace(s[0:i]), s[i:], nil
-}
-
-// getWildcardType will return the type of wildcard that the
-// passed version contains
-func getWildcardType(vStr string) wildcardType {
- parts := strings.Split(vStr, ".")
- nparts := len(parts)
- wildcard := parts[nparts-1]
-
- possibleWildcardType := wildcardTypefromInt(nparts)
- if wildcard == "x" {
- return possibleWildcardType
- }
-
- return noneWildcard
-}
-
-// createVersionFromWildcard will convert a wildcard version
-// into a regular version, replacing 'x's with '0's, handling
-// special cases like '1.x.x' and '1.x'
-func createVersionFromWildcard(vStr string) string {
- // handle 1.x.x
- vStr2 := strings.Replace(vStr, ".x.x", ".x", 1)
- vStr2 = strings.Replace(vStr2, ".x", ".0", 1)
- parts := strings.Split(vStr2, ".")
-
- // handle 1.x
- if len(parts) == 2 {
- return vStr2 + ".0"
- }
-
- return vStr2
-}
-
-// incrementMajorVersion will increment the major version
-// of the passed version
-func incrementMajorVersion(vStr string) (string, error) {
- parts := strings.Split(vStr, ".")
- i, err := strconv.Atoi(parts[0])
- if err != nil {
- return "", err
- }
- parts[0] = strconv.Itoa(i + 1)
-
- return strings.Join(parts, "."), nil
-}
-
-// incrementMajorVersion will increment the minor version
-// of the passed version
-func incrementMinorVersion(vStr string) (string, error) {
- parts := strings.Split(vStr, ".")
- i, err := strconv.Atoi(parts[1])
- if err != nil {
- return "", err
- }
- parts[1] = strconv.Itoa(i + 1)
-
- return strings.Join(parts, "."), nil
-}
-
-// expandWildcardVersion will expand wildcards inside versions
-// following these rules:
-//
-// * when dealing with patch wildcards:
-// >= 1.2.x will become >= 1.2.0
-// <= 1.2.x will become < 1.3.0
-// > 1.2.x will become >= 1.3.0
-// < 1.2.x will become < 1.2.0
-// != 1.2.x will become < 1.2.0 >= 1.3.0
-//
-// * when dealing with minor wildcards:
-// >= 1.x will become >= 1.0.0
-// <= 1.x will become < 2.0.0
-// > 1.x will become >= 2.0.0
-// < 1.0 will become < 1.0.0
-// != 1.x will become < 1.0.0 >= 2.0.0
-//
-// * when dealing with wildcards without
-// version operator:
-// 1.2.x will become >= 1.2.0 < 1.3.0
-// 1.x will become >= 1.0.0 < 2.0.0
-func expandWildcardVersion(parts [][]string) ([][]string, error) {
- var expandedParts [][]string
- for _, p := range parts {
- var newParts []string
- for _, ap := range p {
- if strings.Index(ap, "x") != -1 {
- opStr, vStr, err := splitComparatorVersion(ap)
- if err != nil {
- return nil, err
- }
-
- versionWildcardType := getWildcardType(vStr)
- flatVersion := createVersionFromWildcard(vStr)
-
- var resultOperator string
- var shouldIncrementVersion bool
- switch opStr {
- case ">":
- resultOperator = ">="
- shouldIncrementVersion = true
- case ">=":
- resultOperator = ">="
- case "<":
- resultOperator = "<"
- case "<=":
- resultOperator = "<"
- shouldIncrementVersion = true
- case "", "=", "==":
- newParts = append(newParts, ">="+flatVersion)
- resultOperator = "<"
- shouldIncrementVersion = true
- case "!=", "!":
- newParts = append(newParts, "<"+flatVersion)
- resultOperator = ">="
- shouldIncrementVersion = true
- }
-
- var resultVersion string
- if shouldIncrementVersion {
- switch versionWildcardType {
- case patchWildcard:
- resultVersion, _ = incrementMinorVersion(flatVersion)
- case minorWildcard:
- resultVersion, _ = incrementMajorVersion(flatVersion)
- }
- } else {
- resultVersion = flatVersion
- }
-
- ap = resultOperator + resultVersion
- }
- newParts = append(newParts, ap)
- }
- expandedParts = append(expandedParts, newParts)
- }
-
- return expandedParts, nil
-}
-
-func parseComparator(s string) comparator {
- switch s {
- case "==":
- fallthrough
- case "":
- fallthrough
- case "=":
- return compEQ
- case ">":
- return compGT
- case ">=":
- return compGE
- case "<":
- return compLT
- case "<=":
- return compLE
- case "!":
- fallthrough
- case "!=":
- return compNE
- }
-
- return nil
-}
-
-// MustParseRange is like ParseRange but panics if the range cannot be parsed.
-func MustParseRange(s string) Range {
- r, err := ParseRange(s)
- if err != nil {
- panic(`semver: ParseRange(` + s + `): ` + err.Error())
- }
- return r
-}
diff --git a/vendor/github.com/blang/semver/semver.go b/vendor/github.com/blang/semver/semver.go
deleted file mode 100644
index 8ee0842e6..000000000
--- a/vendor/github.com/blang/semver/semver.go
+++ /dev/null
@@ -1,418 +0,0 @@
-package semver
-
-import (
- "errors"
- "fmt"
- "strconv"
- "strings"
-)
-
-const (
- numbers string = "0123456789"
- alphas = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ-"
- alphanum = alphas + numbers
-)
-
-// SpecVersion is the latest fully supported spec version of semver
-var SpecVersion = Version{
- Major: 2,
- Minor: 0,
- Patch: 0,
-}
-
-// Version represents a semver compatible version
-type Version struct {
- Major uint64
- Minor uint64
- Patch uint64
- Pre []PRVersion
- Build []string //No Precendence
-}
-
-// Version to string
-func (v Version) String() string {
- b := make([]byte, 0, 5)
- b = strconv.AppendUint(b, v.Major, 10)
- b = append(b, '.')
- b = strconv.AppendUint(b, v.Minor, 10)
- b = append(b, '.')
- b = strconv.AppendUint(b, v.Patch, 10)
-
- if len(v.Pre) > 0 {
- b = append(b, '-')
- b = append(b, v.Pre[0].String()...)
-
- for _, pre := range v.Pre[1:] {
- b = append(b, '.')
- b = append(b, pre.String()...)
- }
- }
-
- if len(v.Build) > 0 {
- b = append(b, '+')
- b = append(b, v.Build[0]...)
-
- for _, build := range v.Build[1:] {
- b = append(b, '.')
- b = append(b, build...)
- }
- }
-
- return string(b)
-}
-
-// Equals checks if v is equal to o.
-func (v Version) Equals(o Version) bool {
- return (v.Compare(o) == 0)
-}
-
-// EQ checks if v is equal to o.
-func (v Version) EQ(o Version) bool {
- return (v.Compare(o) == 0)
-}
-
-// NE checks if v is not equal to o.
-func (v Version) NE(o Version) bool {
- return (v.Compare(o) != 0)
-}
-
-// GT checks if v is greater than o.
-func (v Version) GT(o Version) bool {
- return (v.Compare(o) == 1)
-}
-
-// GTE checks if v is greater than or equal to o.
-func (v Version) GTE(o Version) bool {
- return (v.Compare(o) >= 0)
-}
-
-// GE checks if v is greater than or equal to o.
-func (v Version) GE(o Version) bool {
- return (v.Compare(o) >= 0)
-}
-
-// LT checks if v is less than o.
-func (v Version) LT(o Version) bool {
- return (v.Compare(o) == -1)
-}
-
-// LTE checks if v is less than or equal to o.
-func (v Version) LTE(o Version) bool {
- return (v.Compare(o) <= 0)
-}
-
-// LE checks if v is less than or equal to o.
-func (v Version) LE(o Version) bool {
- return (v.Compare(o) <= 0)
-}
-
-// Compare compares Versions v to o:
-// -1 == v is less than o
-// 0 == v is equal to o
-// 1 == v is greater than o
-func (v Version) Compare(o Version) int {
- if v.Major != o.Major {
- if v.Major > o.Major {
- return 1
- }
- return -1
- }
- if v.Minor != o.Minor {
- if v.Minor > o.Minor {
- return 1
- }
- return -1
- }
- if v.Patch != o.Patch {
- if v.Patch > o.Patch {
- return 1
- }
- return -1
- }
-
- // Quick comparison if a version has no prerelease versions
- if len(v.Pre) == 0 && len(o.Pre) == 0 {
- return 0
- } else if len(v.Pre) == 0 && len(o.Pre) > 0 {
- return 1
- } else if len(v.Pre) > 0 && len(o.Pre) == 0 {
- return -1
- }
-
- i := 0
- for ; i < len(v.Pre) && i < len(o.Pre); i++ {
- if comp := v.Pre[i].Compare(o.Pre[i]); comp == 0 {
- continue
- } else if comp == 1 {
- return 1
- } else {
- return -1
- }
- }
-
- // If all pr versions are the equal but one has further prversion, this one greater
- if i == len(v.Pre) && i == len(o.Pre) {
- return 0
- } else if i == len(v.Pre) && i < len(o.Pre) {
- return -1
- } else {
- return 1
- }
-
-}
-
-// Validate validates v and returns error in case
-func (v Version) Validate() error {
- // Major, Minor, Patch already validated using uint64
-
- for _, pre := range v.Pre {
- if !pre.IsNum { //Numeric prerelease versions already uint64
- if len(pre.VersionStr) == 0 {
- return fmt.Errorf("Prerelease can not be empty %q", pre.VersionStr)
- }
- if !containsOnly(pre.VersionStr, alphanum) {
- return fmt.Errorf("Invalid character(s) found in prerelease %q", pre.VersionStr)
- }
- }
- }
-
- for _, build := range v.Build {
- if len(build) == 0 {
- return fmt.Errorf("Build meta data can not be empty %q", build)
- }
- if !containsOnly(build, alphanum) {
- return fmt.Errorf("Invalid character(s) found in build meta data %q", build)
- }
- }
-
- return nil
-}
-
-// New is an alias for Parse and returns a pointer, parses version string and returns a validated Version or error
-func New(s string) (vp *Version, err error) {
- v, err := Parse(s)
- vp = &v
- return
-}
-
-// Make is an alias for Parse, parses version string and returns a validated Version or error
-func Make(s string) (Version, error) {
- return Parse(s)
-}
-
-// ParseTolerant allows for certain version specifications that do not strictly adhere to semver
-// specs to be parsed by this library. It does so by normalizing versions before passing them to
-// Parse(). It currently trims spaces, removes a "v" prefix, and adds a 0 patch number to versions
-// with only major and minor components specified
-func ParseTolerant(s string) (Version, error) {
- s = strings.TrimSpace(s)
- s = strings.TrimPrefix(s, "v")
-
- // Split into major.minor.(patch+pr+meta)
- parts := strings.SplitN(s, ".", 3)
- if len(parts) < 3 {
- if strings.ContainsAny(parts[len(parts)-1], "+-") {
- return Version{}, errors.New("Short version cannot contain PreRelease/Build meta data")
- }
- for len(parts) < 3 {
- parts = append(parts, "0")
- }
- s = strings.Join(parts, ".")
- }
-
- return Parse(s)
-}
-
-// Parse parses version string and returns a validated Version or error
-func Parse(s string) (Version, error) {
- if len(s) == 0 {
- return Version{}, errors.New("Version string empty")
- }
-
- // Split into major.minor.(patch+pr+meta)
- parts := strings.SplitN(s, ".", 3)
- if len(parts) != 3 {
- return Version{}, errors.New("No Major.Minor.Patch elements found")
- }
-
- // Major
- if !containsOnly(parts[0], numbers) {
- return Version{}, fmt.Errorf("Invalid character(s) found in major number %q", parts[0])
- }
- if hasLeadingZeroes(parts[0]) {
- return Version{}, fmt.Errorf("Major number must not contain leading zeroes %q", parts[0])
- }
- major, err := strconv.ParseUint(parts[0], 10, 64)
- if err != nil {
- return Version{}, err
- }
-
- // Minor
- if !containsOnly(parts[1], numbers) {
- return Version{}, fmt.Errorf("Invalid character(s) found in minor number %q", parts[1])
- }
- if hasLeadingZeroes(parts[1]) {
- return Version{}, fmt.Errorf("Minor number must not contain leading zeroes %q", parts[1])
- }
- minor, err := strconv.ParseUint(parts[1], 10, 64)
- if err != nil {
- return Version{}, err
- }
-
- v := Version{}
- v.Major = major
- v.Minor = minor
-
- var build, prerelease []string
- patchStr := parts[2]
-
- if buildIndex := strings.IndexRune(patchStr, '+'); buildIndex != -1 {
- build = strings.Split(patchStr[buildIndex+1:], ".")
- patchStr = patchStr[:buildIndex]
- }
-
- if preIndex := strings.IndexRune(patchStr, '-'); preIndex != -1 {
- prerelease = strings.Split(patchStr[preIndex+1:], ".")
- patchStr = patchStr[:preIndex]
- }
-
- if !containsOnly(patchStr, numbers) {
- return Version{}, fmt.Errorf("Invalid character(s) found in patch number %q", patchStr)
- }
- if hasLeadingZeroes(patchStr) {
- return Version{}, fmt.Errorf("Patch number must not contain leading zeroes %q", patchStr)
- }
- patch, err := strconv.ParseUint(patchStr, 10, 64)
- if err != nil {
- return Version{}, err
- }
-
- v.Patch = patch
-
- // Prerelease
- for _, prstr := range prerelease {
- parsedPR, err := NewPRVersion(prstr)
- if err != nil {
- return Version{}, err
- }
- v.Pre = append(v.Pre, parsedPR)
- }
-
- // Build meta data
- for _, str := range build {
- if len(str) == 0 {
- return Version{}, errors.New("Build meta data is empty")
- }
- if !containsOnly(str, alphanum) {
- return Version{}, fmt.Errorf("Invalid character(s) found in build meta data %q", str)
- }
- v.Build = append(v.Build, str)
- }
-
- return v, nil
-}
-
-// MustParse is like Parse but panics if the version cannot be parsed.
-func MustParse(s string) Version {
- v, err := Parse(s)
- if err != nil {
- panic(`semver: Parse(` + s + `): ` + err.Error())
- }
- return v
-}
-
-// PRVersion represents a PreRelease Version
-type PRVersion struct {
- VersionStr string
- VersionNum uint64
- IsNum bool
-}
-
-// NewPRVersion creates a new valid prerelease version
-func NewPRVersion(s string) (PRVersion, error) {
- if len(s) == 0 {
- return PRVersion{}, errors.New("Prerelease is empty")
- }
- v := PRVersion{}
- if containsOnly(s, numbers) {
- if hasLeadingZeroes(s) {
- return PRVersion{}, fmt.Errorf("Numeric PreRelease version must not contain leading zeroes %q", s)
- }
- num, err := strconv.ParseUint(s, 10, 64)
-
- // Might never be hit, but just in case
- if err != nil {
- return PRVersion{}, err
- }
- v.VersionNum = num
- v.IsNum = true
- } else if containsOnly(s, alphanum) {
- v.VersionStr = s
- v.IsNum = false
- } else {
- return PRVersion{}, fmt.Errorf("Invalid character(s) found in prerelease %q", s)
- }
- return v, nil
-}
-
-// IsNumeric checks if prerelease-version is numeric
-func (v PRVersion) IsNumeric() bool {
- return v.IsNum
-}
-
-// Compare compares two PreRelease Versions v and o:
-// -1 == v is less than o
-// 0 == v is equal to o
-// 1 == v is greater than o
-func (v PRVersion) Compare(o PRVersion) int {
- if v.IsNum && !o.IsNum {
- return -1
- } else if !v.IsNum && o.IsNum {
- return 1
- } else if v.IsNum && o.IsNum {
- if v.VersionNum == o.VersionNum {
- return 0
- } else if v.VersionNum > o.VersionNum {
- return 1
- } else {
- return -1
- }
- } else { // both are Alphas
- if v.VersionStr == o.VersionStr {
- return 0
- } else if v.VersionStr > o.VersionStr {
- return 1
- } else {
- return -1
- }
- }
-}
-
-// PreRelease version to string
-func (v PRVersion) String() string {
- if v.IsNum {
- return strconv.FormatUint(v.VersionNum, 10)
- }
- return v.VersionStr
-}
-
-func containsOnly(s string, set string) bool {
- return strings.IndexFunc(s, func(r rune) bool {
- return !strings.ContainsRune(set, r)
- }) == -1
-}
-
-func hasLeadingZeroes(s string) bool {
- return len(s) > 1 && s[0] == '0'
-}
-
-// NewBuildVersion creates a new valid build version
-func NewBuildVersion(s string) (string, error) {
- if len(s) == 0 {
- return "", errors.New("Buildversion is empty")
- }
- if !containsOnly(s, alphanum) {
- return "", fmt.Errorf("Invalid character(s) found in build meta data %q", s)
- }
- return s, nil
-}
diff --git a/vendor/github.com/blang/semver/sort.go b/vendor/github.com/blang/semver/sort.go
deleted file mode 100644
index e18f88082..000000000
--- a/vendor/github.com/blang/semver/sort.go
+++ /dev/null
@@ -1,28 +0,0 @@
-package semver
-
-import (
- "sort"
-)
-
-// Versions represents multiple versions.
-type Versions []Version
-
-// Len returns length of version collection
-func (s Versions) Len() int {
- return len(s)
-}
-
-// Swap swaps two versions inside the collection by its indices
-func (s Versions) Swap(i, j int) {
- s[i], s[j] = s[j], s[i]
-}
-
-// Less checks if version at index i is less than version at index j
-func (s Versions) Less(i, j int) bool {
- return s[i].LT(s[j])
-}
-
-// Sort sorts a slice of versions
-func Sort(versions []Version) {
- sort.Sort(Versions(versions))
-}
diff --git a/vendor/github.com/blang/semver/sql.go b/vendor/github.com/blang/semver/sql.go
deleted file mode 100644
index eb4d80266..000000000
--- a/vendor/github.com/blang/semver/sql.go
+++ /dev/null
@@ -1,30 +0,0 @@
-package semver
-
-import (
- "database/sql/driver"
- "fmt"
-)
-
-// Scan implements the database/sql.Scanner interface.
-func (v *Version) Scan(src interface{}) (err error) {
- var str string
- switch src := src.(type) {
- case string:
- str = src
- case []byte:
- str = string(src)
- default:
- return fmt.Errorf("Version.Scan: cannot convert %T to string.", src)
- }
-
- if t, err := Parse(str); err == nil {
- *v = t
- }
-
- return
-}
-
-// Value implements the database/sql/driver.Valuer interface.
-func (v Version) Value() (driver.Value, error) {
- return v.String(), nil
-}
diff --git a/vendor/github.com/bmatcuk/doublestar/.gitignore b/vendor/github.com/bmatcuk/doublestar/.gitignore
deleted file mode 100644
index af212ecc2..000000000
--- a/vendor/github.com/bmatcuk/doublestar/.gitignore
+++ /dev/null
@@ -1,32 +0,0 @@
-# vi
-*~
-*.swp
-*.swo
-
-# Compiled Object files, Static and Dynamic libs (Shared Objects)
-*.o
-*.a
-*.so
-
-# Folders
-_obj
-_test
-
-# Architecture specific extensions/prefixes
-*.[568vq]
-[568vq].out
-
-*.cgo1.go
-*.cgo2.c
-_cgo_defun.c
-_cgo_gotypes.go
-_cgo_export.*
-
-_testmain.go
-
-*.exe
-*.test
-*.prof
-
-# test directory
-test/
diff --git a/vendor/github.com/bmatcuk/doublestar/.travis.yml b/vendor/github.com/bmatcuk/doublestar/.travis.yml
deleted file mode 100644
index ec4fee889..000000000
--- a/vendor/github.com/bmatcuk/doublestar/.travis.yml
+++ /dev/null
@@ -1,15 +0,0 @@
-language: go
-
-go:
- - 1.11
- - 1.12
-
-before_install:
- - go get -t -v ./...
-
-script:
- - go test -race -coverprofile=coverage.txt -covermode=atomic
-
-after_success:
- - bash <(curl -s https://codecov.io/bash)
-
diff --git a/vendor/github.com/bmatcuk/doublestar/LICENSE b/vendor/github.com/bmatcuk/doublestar/LICENSE
deleted file mode 100644
index 309c9d1d1..000000000
--- a/vendor/github.com/bmatcuk/doublestar/LICENSE
+++ /dev/null
@@ -1,22 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2014 Bob Matcuk
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
-
diff --git a/vendor/github.com/bmatcuk/doublestar/README.md b/vendor/github.com/bmatcuk/doublestar/README.md
deleted file mode 100644
index 8e365c5e3..000000000
--- a/vendor/github.com/bmatcuk/doublestar/README.md
+++ /dev/null
@@ -1,109 +0,0 @@
-![Release](https://img.shields.io/github/release/bmatcuk/doublestar.svg?branch=master)
-[![Build Status](https://travis-ci.org/bmatcuk/doublestar.svg?branch=master)](https://travis-ci.org/bmatcuk/doublestar)
-[![codecov.io](https://img.shields.io/codecov/c/github/bmatcuk/doublestar.svg?branch=master)](https://codecov.io/github/bmatcuk/doublestar?branch=master)
-
-# doublestar
-
-**doublestar** is a [golang](http://golang.org/) implementation of path pattern
-matching and globbing with support for "doublestar" (aka globstar: `**`)
-patterns.
-
-doublestar patterns match files and directories recursively. For example, if
-you had the following directory structure:
-
-```
-grandparent
-`-- parent
- |-- child1
- `-- child2
-```
-
-You could find the children with patterns such as: `**/child*`,
-`grandparent/**/child?`, `**/parent/*`, or even just `**` by itself (which will
-return all files and directories recursively).
-
-Bash's globstar is doublestar's inspiration and, as such, works similarly.
-Note that the doublestar must appear as a path component by itself. A pattern
-such as `/path**` is invalid and will be treated the same as `/path*`, but
-`/path*/**` should achieve the desired result. Additionally, `/path/**` will
-match all directories and files under the path directory, but `/path/**/` will
-only match directories.
-
-## Installation
-
-**doublestar** can be installed via `go get`:
-
-```bash
-go get github.com/bmatcuk/doublestar
-```
-
-To use it in your code, you must import it:
-
-```go
-import "github.com/bmatcuk/doublestar"
-```
-
-## Functions
-
-### Match
-```go
-func Match(pattern, name string) (bool, error)
-```
-
-Match returns true if `name` matches the file name `pattern`
-([see below](#patterns)). `name` and `pattern` are split on forward slash (`/`)
-characters and may be relative or absolute.
-
-Note: `Match()` is meant to be a drop-in replacement for `path.Match()`. As
-such, it always uses `/` as the path separator. If you are writing code that
-will run on systems where `/` is not the path separator (such as Windows), you
-want to use `PathMatch()` (below) instead.
-
-
-### PathMatch
-```go
-func PathMatch(pattern, name string) (bool, error)
-```
-
-PathMatch returns true if `name` matches the file name `pattern`
-([see below](#patterns)). The difference between Match and PathMatch is that
-PathMatch will automatically use your system's path separator to split `name`
-and `pattern`.
-
-`PathMatch()` is meant to be a drop-in replacement for `filepath.Match()`.
-
-### Glob
-```go
-func Glob(pattern string) ([]string, error)
-```
-
-Glob finds all files and directories in the filesystem that match `pattern`
-([see below](#patterns)). `pattern` may be relative (to the current working
-directory), or absolute.
-
-`Glob()` is meant to be a drop-in replacement for `filepath.Glob()`.
-
-## Patterns
-
-**doublestar** supports the following special terms in the patterns:
-
-Special Terms | Meaning
-------------- | -------
-`*` | matches any sequence of non-path-separators
-`**` | matches any sequence of characters, including path separators
-`?` | matches any single non-path-separator character
-`[class]` | matches any single non-path-separator character against a class of characters ([see below](#character-classes))
-`{alt1,...}` | matches a sequence of characters if one of the comma-separated alternatives matches
-
-Any character with a special meaning can be escaped with a backslash (`\`).
-
-### Character Classes
-
-Character classes support the following:
-
-Class | Meaning
----------- | -------
-`[abc]` | matches any single character within the set
-`[a-z]` | matches any single character in the range
-`[^class]` | matches any single character which does *not* match the class
-
diff --git a/vendor/github.com/bmatcuk/doublestar/doublestar.go b/vendor/github.com/bmatcuk/doublestar/doublestar.go
deleted file mode 100644
index 0044dfa83..000000000
--- a/vendor/github.com/bmatcuk/doublestar/doublestar.go
+++ /dev/null
@@ -1,476 +0,0 @@
-package doublestar
-
-import (
- "fmt"
- "os"
- "path"
- "path/filepath"
- "strings"
- "unicode/utf8"
-)
-
-// ErrBadPattern indicates a pattern was malformed.
-var ErrBadPattern = path.ErrBadPattern
-
-// Split a path on the given separator, respecting escaping.
-func splitPathOnSeparator(path string, separator rune) (ret []string) {
- idx := 0
- if separator == '\\' {
- // if the separator is '\\', then we can just split...
- ret = strings.Split(path, string(separator))
- idx = len(ret)
- } else {
- // otherwise, we need to be careful of situations where the separator was escaped
- cnt := strings.Count(path, string(separator))
- if cnt == 0 {
- return []string{path}
- }
-
- ret = make([]string, cnt+1)
- pathlen := len(path)
- separatorLen := utf8.RuneLen(separator)
- emptyEnd := false
- for start := 0; start < pathlen; {
- end := indexRuneWithEscaping(path[start:], separator)
- if end == -1 {
- emptyEnd = false
- end = pathlen
- } else {
- emptyEnd = true
- end += start
- }
- ret[idx] = path[start:end]
- start = end + separatorLen
- idx++
- }
-
- // If the last rune is a path separator, we need to append an empty string to
- // represent the last, empty path component. By default, the strings from
- // make([]string, ...) will be empty, so we just need to icrement the count
- if emptyEnd {
- idx++
- }
- }
-
- return ret[:idx]
-}
-
-// Find the first index of a rune in a string,
-// ignoring any times the rune is escaped using "\".
-func indexRuneWithEscaping(s string, r rune) int {
- end := strings.IndexRune(s, r)
- if end == -1 {
- return -1
- }
- if end > 0 && s[end-1] == '\\' {
- start := end + utf8.RuneLen(r)
- end = indexRuneWithEscaping(s[start:], r)
- if end != -1 {
- end += start
- }
- }
- return end
-}
-
-// Match returns true if name matches the shell file name pattern.
-// The pattern syntax is:
-//
-// pattern:
-// { term }
-// term:
-// '*' matches any sequence of non-path-separators
-// '**' matches any sequence of characters, including
-// path separators.
-// '?' matches any single non-path-separator character
-// '[' [ '^' ] { character-range } ']'
-// character class (must be non-empty)
-// '{' { term } [ ',' { term } ... ] '}'
-// c matches character c (c != '*', '?', '\\', '[')
-// '\\' c matches character c
-//
-// character-range:
-// c matches character c (c != '\\', '-', ']')
-// '\\' c matches character c
-// lo '-' hi matches character c for lo <= c <= hi
-//
-// Match requires pattern to match all of name, not just a substring.
-// The path-separator defaults to the '/' character. The only possible
-// returned error is ErrBadPattern, when pattern is malformed.
-//
-// Note: this is meant as a drop-in replacement for path.Match() which
-// always uses '/' as the path separator. If you want to support systems
-// which use a different path separator (such as Windows), what you want
-// is the PathMatch() function below.
-//
-func Match(pattern, name string) (bool, error) {
- return matchWithSeparator(pattern, name, '/')
-}
-
-// PathMatch is like Match except that it uses your system's path separator.
-// For most systems, this will be '/'. However, for Windows, it would be '\\'.
-// Note that for systems where the path separator is '\\', escaping is
-// disabled.
-//
-// Note: this is meant as a drop-in replacement for filepath.Match().
-//
-func PathMatch(pattern, name string) (bool, error) {
- return matchWithSeparator(pattern, name, os.PathSeparator)
-}
-
-// Match returns true if name matches the shell file name pattern.
-// The pattern syntax is:
-//
-// pattern:
-// { term }
-// term:
-// '*' matches any sequence of non-path-separators
-// '**' matches any sequence of characters, including
-// path separators.
-// '?' matches any single non-path-separator character
-// '[' [ '^' ] { character-range } ']'
-// character class (must be non-empty)
-// '{' { term } [ ',' { term } ... ] '}'
-// c matches character c (c != '*', '?', '\\', '[')
-// '\\' c matches character c
-//
-// character-range:
-// c matches character c (c != '\\', '-', ']')
-// '\\' c matches character c, unless separator is '\\'
-// lo '-' hi matches character c for lo <= c <= hi
-//
-// Match requires pattern to match all of name, not just a substring.
-// The only possible returned error is ErrBadPattern, when pattern
-// is malformed.
-//
-func matchWithSeparator(pattern, name string, separator rune) (bool, error) {
- patternComponents := splitPathOnSeparator(pattern, separator)
- nameComponents := splitPathOnSeparator(name, separator)
- return doMatching(patternComponents, nameComponents)
-}
-
-func doMatching(patternComponents, nameComponents []string) (matched bool, err error) {
- // check for some base-cases
- patternLen, nameLen := len(patternComponents), len(nameComponents)
- if patternLen == 0 && nameLen == 0 {
- return true, nil
- }
- if patternLen == 0 || nameLen == 0 {
- return false, nil
- }
-
- patIdx, nameIdx := 0, 0
- for patIdx < patternLen && nameIdx < nameLen {
- if patternComponents[patIdx] == "**" {
- // if our last pattern component is a doublestar, we're done -
- // doublestar will match any remaining name components, if any.
- if patIdx++; patIdx >= patternLen {
- return true, nil
- }
-
- // otherwise, try matching remaining components
- for ; nameIdx < nameLen; nameIdx++ {
- if m, _ := doMatching(patternComponents[patIdx:], nameComponents[nameIdx:]); m {
- return true, nil
- }
- }
- return false, nil
- }
-
- // try matching components
- matched, err = matchComponent(patternComponents[patIdx], nameComponents[nameIdx])
- if !matched || err != nil {
- return
- }
-
- patIdx++
- nameIdx++
- }
- return patIdx >= patternLen && nameIdx >= nameLen, nil
-}
-
-// Glob returns the names of all files matching pattern or nil
-// if there is no matching file. The syntax of pattern is the same
-// as in Match. The pattern may describe hierarchical names such as
-// /usr/*/bin/ed (assuming the Separator is '/').
-//
-// Glob ignores file system errors such as I/O errors reading directories.
-// The only possible returned error is ErrBadPattern, when pattern
-// is malformed.
-//
-// Your system path separator is automatically used. This means on
-// systems where the separator is '\\' (Windows), escaping will be
-// disabled.
-//
-// Note: this is meant as a drop-in replacement for filepath.Glob().
-//
-func Glob(pattern string) (matches []string, err error) {
- patternComponents := splitPathOnSeparator(filepath.ToSlash(pattern), '/')
- if len(patternComponents) == 0 {
- return nil, nil
- }
-
- // On Windows systems, this will return the drive name ('C:') for filesystem
- // paths, or \\\ for UNC paths. On other systems, it will
- // return an empty string. Since absolute paths on non-Windows systems start
- // with a slash, patternComponent[0] == volumeName will return true for both
- // absolute Windows paths and absolute non-Windows paths, but we need a
- // separate check for UNC paths.
- volumeName := filepath.VolumeName(pattern)
- isWindowsUNC := strings.HasPrefix(pattern, `\\`)
- if isWindowsUNC || patternComponents[0] == volumeName {
- startComponentIndex := 1
- if isWindowsUNC {
- startComponentIndex = 4
- }
- return doGlob(fmt.Sprintf("%s%s", volumeName, string(os.PathSeparator)), patternComponents[startComponentIndex:], matches)
- }
-
- // otherwise, it's a relative pattern
- return doGlob(".", patternComponents, matches)
-}
-
-// Perform a glob
-func doGlob(basedir string, components, matches []string) (m []string, e error) {
- m = matches
- e = nil
-
- // figure out how many components we don't need to glob because they're
- // just names without patterns - we'll use os.Lstat below to check if that
- // path actually exists
- patLen := len(components)
- patIdx := 0
- for ; patIdx < patLen; patIdx++ {
- if strings.IndexAny(components[patIdx], "*?[{\\") >= 0 {
- break
- }
- }
- if patIdx > 0 {
- basedir = filepath.Join(basedir, filepath.Join(components[0:patIdx]...))
- }
-
- // Lstat will return an error if the file/directory doesn't exist
- fi, err := os.Lstat(basedir)
- if err != nil {
- return
- }
-
- // if there are no more components, we've found a match
- if patIdx >= patLen {
- m = append(m, basedir)
- return
- }
-
- // otherwise, we need to check each item in the directory...
- // first, if basedir is a symlink, follow it...
- if (fi.Mode() & os.ModeSymlink) != 0 {
- fi, err = os.Stat(basedir)
- if err != nil {
- return
- }
- }
-
- // confirm it's a directory...
- if !fi.IsDir() {
- return
- }
-
- // read directory
- dir, err := os.Open(basedir)
- if err != nil {
- return
- }
- defer dir.Close()
-
- files, _ := dir.Readdir(-1)
- lastComponent := (patIdx + 1) >= patLen
- if components[patIdx] == "**" {
- // if the current component is a doublestar, we'll try depth-first
- for _, file := range files {
- // if symlink, we may want to follow
- if (file.Mode() & os.ModeSymlink) != 0 {
- file, err = os.Stat(filepath.Join(basedir, file.Name()))
- if err != nil {
- continue
- }
- }
-
- if file.IsDir() {
- // recurse into directories
- if lastComponent {
- m = append(m, filepath.Join(basedir, file.Name()))
- }
- m, e = doGlob(filepath.Join(basedir, file.Name()), components[patIdx:], m)
- } else if lastComponent {
- // if the pattern's last component is a doublestar, we match filenames, too
- m = append(m, filepath.Join(basedir, file.Name()))
- }
- }
- if lastComponent {
- return // we're done
- }
- patIdx++
- lastComponent = (patIdx + 1) >= patLen
- }
-
- // check items in current directory and recurse
- var match bool
- for _, file := range files {
- match, e = matchComponent(components[patIdx], file.Name())
- if e != nil {
- return
- }
- if match {
- if lastComponent {
- m = append(m, filepath.Join(basedir, file.Name()))
- } else {
- m, e = doGlob(filepath.Join(basedir, file.Name()), components[patIdx+1:], m)
- }
- }
- }
- return
-}
-
-// Attempt to match a single pattern component with a path component
-func matchComponent(pattern, name string) (bool, error) {
- // check some base cases
- patternLen, nameLen := len(pattern), len(name)
- if patternLen == 0 && nameLen == 0 {
- return true, nil
- }
- if patternLen == 0 {
- return false, nil
- }
- if nameLen == 0 && pattern != "*" {
- return false, nil
- }
-
- // check for matches one rune at a time
- patIdx, nameIdx := 0, 0
- for patIdx < patternLen && nameIdx < nameLen {
- patRune, patAdj := utf8.DecodeRuneInString(pattern[patIdx:])
- nameRune, nameAdj := utf8.DecodeRuneInString(name[nameIdx:])
- if patRune == '\\' {
- // handle escaped runes
- patIdx += patAdj
- patRune, patAdj = utf8.DecodeRuneInString(pattern[patIdx:])
- if patRune == utf8.RuneError {
- return false, ErrBadPattern
- } else if patRune == nameRune {
- patIdx += patAdj
- nameIdx += nameAdj
- } else {
- return false, nil
- }
- } else if patRune == '*' {
- // handle stars
- if patIdx += patAdj; patIdx >= patternLen {
- // a star at the end of a pattern will always
- // match the rest of the path
- return true, nil
- }
-
- // check if we can make any matches
- for ; nameIdx < nameLen; nameIdx += nameAdj {
- if m, _ := matchComponent(pattern[patIdx:], name[nameIdx:]); m {
- return true, nil
- }
- }
- return false, nil
- } else if patRune == '[' {
- // handle character sets
- patIdx += patAdj
- endClass := indexRuneWithEscaping(pattern[patIdx:], ']')
- if endClass == -1 {
- return false, ErrBadPattern
- }
- endClass += patIdx
- classRunes := []rune(pattern[patIdx:endClass])
- classRunesLen := len(classRunes)
- if classRunesLen > 0 {
- classIdx := 0
- matchClass := false
- if classRunes[0] == '^' {
- classIdx++
- }
- for classIdx < classRunesLen {
- low := classRunes[classIdx]
- if low == '-' {
- return false, ErrBadPattern
- }
- classIdx++
- if low == '\\' {
- if classIdx < classRunesLen {
- low = classRunes[classIdx]
- classIdx++
- } else {
- return false, ErrBadPattern
- }
- }
- high := low
- if classIdx < classRunesLen && classRunes[classIdx] == '-' {
- // we have a range of runes
- if classIdx++; classIdx >= classRunesLen {
- return false, ErrBadPattern
- }
- high = classRunes[classIdx]
- if high == '-' {
- return false, ErrBadPattern
- }
- classIdx++
- if high == '\\' {
- if classIdx < classRunesLen {
- high = classRunes[classIdx]
- classIdx++
- } else {
- return false, ErrBadPattern
- }
- }
- }
- if low <= nameRune && nameRune <= high {
- matchClass = true
- }
- }
- if matchClass == (classRunes[0] == '^') {
- return false, nil
- }
- } else {
- return false, ErrBadPattern
- }
- patIdx = endClass + 1
- nameIdx += nameAdj
- } else if patRune == '{' {
- // handle alternatives such as {alt1,alt2,...}
- patIdx += patAdj
- endOptions := indexRuneWithEscaping(pattern[patIdx:], '}')
- if endOptions == -1 {
- return false, ErrBadPattern
- }
- endOptions += patIdx
- options := splitPathOnSeparator(pattern[patIdx:endOptions], ',')
- patIdx = endOptions + 1
- for _, o := range options {
- m, e := matchComponent(o+pattern[patIdx:], name[nameIdx:])
- if e != nil {
- return false, e
- }
- if m {
- return true, nil
- }
- }
- return false, nil
- } else if patRune == '?' || patRune == nameRune {
- // handle single-rune wildcard
- patIdx += patAdj
- nameIdx += nameAdj
- } else {
- return false, nil
- }
- }
- if patIdx >= patternLen && nameIdx >= nameLen {
- return true, nil
- }
- if nameIdx >= nameLen && pattern[patIdx:] == "*" || pattern[patIdx:] == "**" {
- return true, nil
- }
- return false, nil
-}
diff --git a/vendor/github.com/bmatcuk/doublestar/go.mod b/vendor/github.com/bmatcuk/doublestar/go.mod
deleted file mode 100644
index ce1688f73..000000000
--- a/vendor/github.com/bmatcuk/doublestar/go.mod
+++ /dev/null
@@ -1,3 +0,0 @@
-module github.com/bmatcuk/doublestar
-
-go 1.12
diff --git a/vendor/github.com/hashicorp/go-getter/.travis.yml b/vendor/github.com/hashicorp/go-getter/.travis.yml
deleted file mode 100644
index 4fe9176aa..000000000
--- a/vendor/github.com/hashicorp/go-getter/.travis.yml
+++ /dev/null
@@ -1,24 +0,0 @@
-sudo: false
-
-addons:
- apt:
- sources:
- - sourceline: 'ppa:git-core/ppa'
- packages:
- - git
-
-language: go
-
-os:
- - linux
- - osx
-
-go:
- - "1.11.x"
-
-before_script:
- - go build ./cmd/go-getter
-
-branches:
- only:
- - master
diff --git a/vendor/github.com/hashicorp/go-getter/README.md b/vendor/github.com/hashicorp/go-getter/README.md
index 3de23c709..bbcd15de9 100644
--- a/vendor/github.com/hashicorp/go-getter/README.md
+++ b/vendor/github.com/hashicorp/go-getter/README.md
@@ -1,10 +1,10 @@
# go-getter
-[![Build Status](http://img.shields.io/travis/hashicorp/go-getter.svg?style=flat-square)][travis]
+[![CircleCI](https://circleci.com/gh/hashicorp/go-getter/tree/master.svg?style=svg)][circleci]
[![Build status](https://ci.appveyor.com/api/projects/status/ulq3qr43n62croyq/branch/master?svg=true)][appveyor]
[![Go Documentation](http://img.shields.io/badge/go-documentation-blue.svg?style=flat-square)][godocs]
-[travis]: http://travis-ci.org/hashicorp/go-getter
+[circleci]: https://circleci.com/gh/hashicorp/go-getter/tree/master
[godocs]: http://godoc.org/github.com/hashicorp/go-getter
[appveyor]: https://ci.appveyor.com/project/hashicorp/go-getter/branch/master
@@ -356,3 +356,7 @@ In order to access to GCS, authentication credentials should be provided. More i
- gcs::https://www.googleapis.com/storage/v1/bucket
- gcs::https://www.googleapis.com/storage/v1/bucket/foo.zip
- www.googleapis.com/storage/v1/bucket/foo
+
+#### GCS Testing
+
+The tests for `get_gcs.go` require you to have GCP credentials set in your environment. These credentials can have any level of permissions to any project, they just need to exist. This means setting `GOOGLE_APPLICATION_CREDENTIALS="~/path/to/credentials.json"` or `GOOGLE_CREDENTIALS="{stringified-credentials-json}"`. Due to this configuration, `get_gcs_test.go` will fail for external contributors in CircleCI.
diff --git a/vendor/github.com/hashicorp/go-getter/client.go b/vendor/github.com/hashicorp/go-getter/client.go
index 007a78ba7..38fb43b8f 100644
--- a/vendor/github.com/hashicorp/go-getter/client.go
+++ b/vendor/github.com/hashicorp/go-getter/client.go
@@ -19,7 +19,7 @@ import (
// Using a client directly allows more fine-grained control over how downloading
// is done, as well as customizing the protocols supported.
type Client struct {
- // Ctx for cancellation
+ // Ctx for cancellation
Ctx context.Context
// Src is the source URL to get.
diff --git a/vendor/github.com/hashicorp/go-getter/get_git.go b/vendor/github.com/hashicorp/go-getter/get_git.go
index bb1ec316d..1b9f4be81 100644
--- a/vendor/github.com/hashicorp/go-getter/get_git.go
+++ b/vendor/github.com/hashicorp/go-getter/get_git.go
@@ -1,6 +1,7 @@
package getter
import (
+ "bytes"
"context"
"encoding/base64"
"fmt"
@@ -9,6 +10,7 @@ import (
"os"
"os/exec"
"path/filepath"
+ "regexp"
"runtime"
"strconv"
"strings"
@@ -24,6 +26,8 @@ type GitGetter struct {
getter
}
+var defaultBranchRegexp = regexp.MustCompile(`\s->\sorigin/(.*)`)
+
func (g *GitGetter) ClientMode(_ *url.URL) (ClientMode, error) {
return ClientModeDir, nil
}
@@ -182,10 +186,10 @@ func (g *GitGetter) update(ctx context.Context, dst, sshKeyFile, ref string, dep
cmd.Dir = dst
if getRunCommand(cmd) != nil {
- // Not a branch, switch to master. This will also catch non-existent
- // branches, in which case we want to switch to master and then
- // checkout the proper branch later.
- ref = "master"
+ // Not a branch, switch to default branch. This will also catch
+ // non-existent branches, in which case we want to switch to default
+ // and then checkout the proper branch later.
+ ref = findDefaultBranch(dst)
}
// We have to be on a branch to pull
@@ -216,6 +220,22 @@ func (g *GitGetter) fetchSubmodules(ctx context.Context, dst, sshKeyFile string,
return getRunCommand(cmd)
}
+// findDefaultBranch checks the repo's origin remote for its default branch
+// (generally "master"). "master" is returned if an origin default branch
+// can't be determined.
+func findDefaultBranch(dst string) string {
+ var stdoutbuf bytes.Buffer
+ cmd := exec.Command("git", "branch", "-r", "--points-at", "refs/remotes/origin/HEAD")
+ cmd.Dir = dst
+ cmd.Stdout = &stdoutbuf
+ err := cmd.Run()
+ matches := defaultBranchRegexp.FindStringSubmatch(stdoutbuf.String())
+ if err != nil || matches == nil {
+ return "master"
+ }
+ return matches[len(matches)-1]
+}
+
// setupGitEnv sets up the environment for the given command. This is used to
// pass configuration data to git and ssh and enables advanced cloning methods.
func setupGitEnv(cmd *exec.Cmd, sshKeyFile string) {
diff --git a/vendor/github.com/hashicorp/go-getter/get_http.go b/vendor/github.com/hashicorp/go-getter/get_http.go
index 7c4541c6e..9ffdba78a 100644
--- a/vendor/github.com/hashicorp/go-getter/get_http.go
+++ b/vendor/github.com/hashicorp/go-getter/get_http.go
@@ -9,7 +9,6 @@ import (
"net/url"
"os"
"path/filepath"
- "strconv"
"strings"
safetemp "github.com/hashicorp/go-safetemp"
@@ -88,7 +87,10 @@ func (g *HttpGetter) Get(dst string, u *url.URL) error {
return err
}
- req.Header = g.Header
+ if g.Header != nil {
+ req.Header = g.Header
+ }
+
resp, err := g.Client.Do(req)
if err != nil {
return err
@@ -128,6 +130,12 @@ func (g *HttpGetter) Get(dst string, u *url.URL) error {
return g.getSubdir(ctx, dst, source, subDir)
}
+// GetFile fetches the file from src and stores it at dst.
+// If the server supports Accept-Range, HttpGetter will attempt a range
+// request. This means it is the caller's responsibility to ensure that an
+// older version of the destination file does not exist, else it will be either
+// falsely identified as being replaced, or corrupted with extra bytes
+// appended.
func (g *HttpGetter) GetFile(dst string, src *url.URL) error {
ctx := g.Context()
if g.Netrc {
@@ -136,7 +144,6 @@ func (g *HttpGetter) GetFile(dst string, src *url.URL) error {
return err
}
}
-
// Create all the parent directories if needed
if err := os.MkdirAll(filepath.Dir(dst), 0755); err != nil {
return err
@@ -165,18 +172,17 @@ func (g *HttpGetter) GetFile(dst string, src *url.URL) error {
req.Header = g.Header
}
headResp, err := g.Client.Do(req)
- if err == nil && headResp != nil {
+ if err == nil {
headResp.Body.Close()
if headResp.StatusCode == 200 {
// If the HEAD request succeeded, then attempt to set the range
// query if we can.
- if headResp.Header.Get("Accept-Ranges") == "bytes" {
+ if headResp.Header.Get("Accept-Ranges") == "bytes" && headResp.ContentLength >= 0 {
if fi, err := f.Stat(); err == nil {
- if _, err = f.Seek(0, os.SEEK_END); err == nil {
- req.Header.Set("Range", fmt.Sprintf("bytes=%d-", fi.Size()))
+ if _, err = f.Seek(0, io.SeekEnd); err == nil {
currentFileSize = fi.Size()
- totalFileSize, _ := strconv.ParseInt(headResp.Header.Get("Content-Length"), 10, 64)
- if currentFileSize >= totalFileSize {
+ req.Header.Set("Range", fmt.Sprintf("bytes=%d-", currentFileSize))
+ if currentFileSize >= headResp.ContentLength {
// file already present
return nil
}
diff --git a/vendor/github.com/hashicorp/hcl/v2/CHANGELOG.md b/vendor/github.com/hashicorp/hcl/v2/CHANGELOG.md
index ccb46bbd8..4c644fcfb 100644
--- a/vendor/github.com/hashicorp/hcl/v2/CHANGELOG.md
+++ b/vendor/github.com/hashicorp/hcl/v2/CHANGELOG.md
@@ -1,5 +1,38 @@
# HCL Changelog
+## v2.3.0 (Jan 3, 2020)
+
+### Enhancements
+
+* ext/tryfunc: Optional functions `try` and `can` to include in your `hcl.EvalContext` when evaluating expressions, which allow users to make decisions based on the success of expressions. ([#330](https://github.com/hashicorp/hcl/pull/330))
+* ext/typeexpr: Now has an optional function `convert` which you can include in your `hcl.EvalContext` when evaluating expressions, allowing users to convert values to specific type constraints using the type constraint expression syntax. ([#330](https://github.com/hashicorp/hcl/pull/330))
+* ext/typeexpr: A new `cty` capsule type `typeexpr.TypeConstraintType` which, when used as either a type constraint for a function parameter or as a type constraint for a `hcldec` attribute specification will cause the given expression to be interpreted as a type constraint expression rather than a value expression. ([#330](https://github.com/hashicorp/hcl/pull/330))
+* ext/customdecode: An optional extension that allows overriding the static decoding behavior for expressions either in function arguments or `hcldec` attribute specifications. ([#330](https://github.com/hashicorp/hcl/pull/330))
+* ext/customdecode: New `cty` capsuletypes `customdecode.ExpressionType` and `customdecode.ExpressionClosureType` which, when used as either a type constraint for a function parameter or as a type constraint for a `hcldec` attribute specification will cause the given expression (and, for the closure type, also the `hcl.EvalContext` it was evaluated in) to be captured for later analysis, rather than immediately evaluated. ([#330](https://github.com/hashicorp/hcl/pull/330))
+
+## v2.2.0 (Dec 11, 2019)
+
+### Enhancements
+
+* hcldec: Attribute evaluation (as part of `AttrSpec` or `BlockAttrsSpec`) now captures expression evaluation metadata in any errors it produces during type conversions, allowing for better feedback in calling applications that are able to make use of this metadata when printing diagnostic messages. ([#329](https://github.com/hashicorp/hcl/pull/329))
+
+### Bugs Fixed
+
+* hclsyntax: `IndexExpr`, `SplatExpr`, and `RelativeTraversalExpr` will now report a source range that covers all of their child expression nodes. Previously they would report only the operator part, such as `["foo"]`, `[*]`, or `.foo`, which was problematic for callers using source ranges for code analysis. ([#328](https://github.com/hashicorp/hcl/pull/328))
+* hclwrite: Parser will no longer panic when the input includes index, splat, or relative traversal syntax. ([#328](https://github.com/hashicorp/hcl/pull/328))
+
+## v2.1.0 (Nov 19, 2019)
+
+### Enhancements
+
+* gohcl: When decoding into a struct value with some fields already populated, those values will be retained if not explicitly overwritten in the given HCL body, with similar overriding/merging behavior as `json.Unmarshal` in the Go standard library.
+* hclwrite: New interface to set the expression for an attribute to be a raw token sequence, with no special processing. This has some caveats, so if you intend to use it please refer to the godoc comments. ([#320](https://github.com/hashicorp/hcl/pull/320))
+
+### Bugs Fixed
+
+* hclwrite: The `Body.Blocks` method was returing the blocks in an indefined order, rather than preserving the order of declaration in the source input. ([#313](https://github.com/hashicorp/hcl/pull/313))
+* hclwrite: The `TokensForTraversal` function (and thus in turn the `Body.SetAttributeTraversal` method) was not correctly handling index steps in traversals, and thus producing invalid results. ([#319](https://github.com/hashicorp/hcl/pull/319))
+
## v2.0.0 (Oct 2, 2019)
Initial release of HCL 2, which is a new implementating combining the HCL 1
diff --git a/vendor/github.com/hashicorp/hcl/v2/README.md b/vendor/github.com/hashicorp/hcl/v2/README.md
index d807a4245..3d0d509d5 100644
--- a/vendor/github.com/hashicorp/hcl/v2/README.md
+++ b/vendor/github.com/hashicorp/hcl/v2/README.md
@@ -8,7 +8,7 @@ towards devops tools, servers, etc.
> **NOTE:** This is major version 2 of HCL, whose Go API is incompatible with
> major version 1. Both versions are available for selection in Go Modules
> projects. HCL 2 _cannot_ be imported from Go projects that are not using Go Modules. For more information, see
-> [our version selection guide](https://github.com/golang/go/wiki/Version-Selection).
+> [our version selection guide](https://github.com/hashicorp/hcl/wiki/Version-Selection).
HCL has both a _native syntax_, intended to be pleasant to read and write for
humans, and a JSON-based variant that is easier for machines to generate
@@ -51,7 +51,8 @@ func main() {
```
A lower-level API is available for applications that need more control over
-the parsing, decoding, and evaluation of configuration.
+the parsing, decoding, and evaluation of configuration. For more information,
+see [the package documentation](https://pkg.go.dev/github.com/hashicorp/hcl/v2).
## Why?
@@ -156,9 +157,9 @@ syntax allows use of arbitrary expressions within JSON strings:
For more information, see the detailed specifications:
-* [Syntax-agnostic Information Model](hcl/spec.md)
-* [HCL Native Syntax](hcl/hclsyntax/spec.md)
-* [JSON Representation](hcl/json/spec.md)
+* [Syntax-agnostic Information Model](spec.md)
+* [HCL Native Syntax](hclsyntax/spec.md)
+* [JSON Representation](json/spec.md)
## Changes in 2.0
diff --git a/vendor/github.com/hashicorp/hcl/v2/appveyor.yml b/vendor/github.com/hashicorp/hcl/v2/appveyor.yml
new file mode 100644
index 000000000..e382f8f57
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl/v2/appveyor.yml
@@ -0,0 +1,13 @@
+build: off
+
+clone_folder: c:\gopath\src\github.com\hashicorp\hcl
+
+environment:
+ GOPATH: c:\gopath
+ GO111MODULE: on
+ GOPROXY: https://goproxy.io
+
+stack: go 1.12
+
+test_script:
+ - go test ./...
diff --git a/vendor/github.com/hashicorp/hcl/v2/ext/customdecode/README.md b/vendor/github.com/hashicorp/hcl/v2/ext/customdecode/README.md
new file mode 100644
index 000000000..1636f577a
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl/v2/ext/customdecode/README.md
@@ -0,0 +1,209 @@
+# HCL Custom Static Decoding Extension
+
+This HCL extension provides a mechanism for defining arguments in an HCL-based
+language whose values are derived using custom decoding rules against the
+HCL expression syntax, overriding the usual behavior of normal expression
+evaluation.
+
+"Arguments", for the purpose of this extension, currently includes the
+following two contexts:
+
+* For applications using `hcldec` for dynamic decoding, a `hcldec.AttrSpec`
+ or `hcldec.BlockAttrsSpec` can be given a special type constraint that
+ opts in to custom decoding behavior for the attribute(s) that are selected
+ by that specification.
+
+* When working with the HCL native expression syntax, a function given in
+ the `hcl.EvalContext` during evaluation can have parameters with special
+ type constraints that opt in to custom decoding behavior for the argument
+ expression associated with that parameter in any call.
+
+The above use-cases are rather abstract, so we'll consider a motivating
+real-world example: sometimes we (language designers) need to allow users
+to specify type constraints directly in the language itself, such as in
+[Terraform's Input Variables](https://www.terraform.io/docs/configuration/variables.html).
+Terraform's `variable` blocks include an argument called `type` which takes
+a type constraint given using HCL expression building-blocks as defined by
+[the HCL `typeexpr` extension](../typeexpr/README.md).
+
+A "type constraint expression" of that sort is not an expression intended to
+be evaluated in the usual way. Instead, the physical expression is
+deconstructed using [the static analysis operations](../../spec.md#static-analysis)
+to produce a `cty.Type` as the result, rather than a `cty.Value`.
+
+The purpose of this Custom Static Decoding Extension, then, is to provide a
+bridge to allow that sort of custom decoding to be used via mechanisms that
+normally deal in `cty.Value`, such as `hcldec` and native syntax function
+calls as listed above.
+
+(Note: [`gohcl`](https://pkg.go.dev/github.com/hashicorp/hcl/v2/gohcl) has
+its own mechanism to support this use case, exploiting the fact that it is
+working directly with "normal" Go types. Decoding into a struct field of
+type `hcl.Expression` obtains the expression directly without evaluating it
+first. The Custom Static Decoding Extension is not necessary for that `gohcl`
+technique. You can also implement custom decoding by working directly with
+the lowest-level HCL API, which separates extraction of and evaluation of
+expressions into two steps.)
+
+## Custom Decoding Types
+
+This extension relies on a convention implemented in terms of
+[_Capsule Types_ in the underlying `cty` type system](https://github.com/zclconf/go-cty/blob/master/docs/types.md#capsule-types). `cty` allows a capsule type to carry arbitrary
+extension metadata values as an aid to creating higher-level abstractions like
+this extension.
+
+A custom argument decoding mode, then, is implemented by creating a new `cty`
+capsule type that implements the `ExtensionData` custom operation to return
+a decoding function when requested. For example:
+
+```go
+var keywordType cty.Type
+keywordType = cty.CapsuleWithOps("keyword", reflect.TypeOf(""), &cty.CapsuleOps{
+ ExtensionData: func(key interface{}) interface{} {
+ switch key {
+ case customdecode.CustomExpressionDecoder:
+ return customdecode.CustomExpressionDecoderFunc(
+ func(expr hcl.Expression, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
+ var diags hcl.Diagnostics
+ kw := hcl.ExprAsKeyword(expr)
+ if kw == "" {
+ diags = append(diags, &hcl.Diagnostic{
+ Severity: hcl.DiagError,
+ Summary: "Invalid keyword",
+ Detail: "A keyword is required",
+ Subject: expr.Range().Ptr(),
+ })
+ return cty.UnkownVal(keywordType), diags
+ }
+ return cty.CapsuleVal(keywordType, &kw)
+ },
+ )
+ default:
+ return nil
+ }
+ },
+})
+```
+
+The boilerplate here is a bit fussy, but the important part for our purposes
+is the `case customdecode.CustomExpressionDecoder:` clause, which uses
+a custom extension key type defined in this package to recognize when a
+component implementing this extension is checking to see if a target type
+has a custom decode implementation.
+
+In the above case we've defined a type that decodes expressions as static
+keywords, so a keyword like `foo` would decode as an encapsulated `"foo"`
+string, while any other sort of expression like `"baz"` or `1 + 1` would
+return an error.
+
+We could then use `keywordType` as a type constraint either for a function
+parameter or a `hcldec` attribute specification, which would require the
+argument for that function parameter or the expression for the matching
+attributes to be a static keyword, rather than an arbitrary expression.
+For example, in a `hcldec.AttrSpec`:
+
+```go
+keywordSpec := &hcldec.AttrSpec{
+ Name: "keyword",
+ Type: keywordType,
+}
+```
+
+The above would accept input like the following and would set its result to
+a `cty.Value` of `keywordType`, after decoding:
+
+```hcl
+keyword = foo
+```
+
+## The Expression and Expression Closure `cty` types
+
+Building on the above, this package also includes two capsule types that use
+the above mechanism to allow calling applications to capture expressions
+directly and thus defer analysis to a later step, after initial decoding.
+
+The `customdecode.ExpressionType` type encapsulates an `hcl.Expression` alone,
+for situations like our type constraint expression example above where it's
+the static structure of the expression we want to inspect, and thus any
+variables and functions defined in the evaluation context are irrelevant.
+
+The `customdecode.ExpressionClosureType` type encapsulates a
+`*customdecode.ExpressionClosure` value, which binds the given expression to
+the `hcl.EvalContext` it was asked to evaluate against and thus allows the
+receiver of that result to later perform normal evaluation of the expression
+with all the same variables and functions that would've been available to it
+naturally.
+
+Both of these types can be used as type constraints either for `hcldec`
+attribute specifications or for function arguments. Here's an example of
+`ExpressionClosureType` to implement a function that can evaluate
+an expression with some additional variables defined locally, which we'll
+call the `with(...)` function:
+
+```go
+var WithFunc = function.New(&function.Spec{
+ Params: []function.Parameter{
+ {
+ Name: "variables",
+ Type: cty.DynamicPseudoType,
+ },
+ {
+ Name: "expression",
+ Type: customdecode.ExpressionClosureType,
+ },
+ },
+ Type: func(args []cty.Value) (cty.Type, error) {
+ varsVal := args[0]
+ exprVal := args[1]
+ if !varsVal.Type().IsObjectType() {
+ return cty.NilVal, function.NewArgErrorf(0, "must be an object defining local variables")
+ }
+ if !varsVal.IsKnown() {
+ // We can't predict our result type until the variables object
+ // is known.
+ return cty.DynamicPseudoType, nil
+ }
+ vars := varsVal.AsValueMap()
+ closure := customdecode.ExpressionClosureFromVal(exprVal)
+ result, err := evalWithLocals(vars, closure)
+ if err != nil {
+ return cty.NilVal, err
+ }
+ return result.Type(), nil
+ },
+ Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) {
+ varsVal := args[0]
+ exprVal := args[1]
+ vars := varsVal.AsValueMap()
+ closure := customdecode.ExpressionClosureFromVal(exprVal)
+ return evalWithLocals(vars, closure)
+ },
+})
+
+func evalWithLocals(locals map[string]cty.Value, closure *customdecode.ExpressionClosure) (cty.Value, error) {
+ childCtx := closure.EvalContext.NewChild()
+ childCtx.Variables = locals
+ val, diags := closure.Expression.Value(childCtx)
+ if diags.HasErrors() {
+ return cty.NilVal, function.NewArgErrorf(1, "couldn't evaluate expression: %s", diags.Error())
+ }
+ return val, nil
+}
+```
+
+If the above function were placed into an `hcl.EvalContext` as `with`, it
+could be used in a native syntax call to that function as follows:
+
+```hcl
+ foo = with({name = "Cory"}, "${greeting}, ${name}!")
+```
+
+The above assumes a variable in the main context called `greeting`, to which
+the `with` function adds `name` before evaluating the expression given in
+its second argument. This makes that second argument context-sensitive -- it
+would behave differently if the user wrote the same thing somewhere else -- so
+this capability should be used with care to make sure it doesn't cause confusion
+for the end-users of your language.
+
+There are some other examples of this capability to evaluate expressions in
+unusual ways in the `tryfunc` directory that is a sibling of this one.
diff --git a/vendor/github.com/hashicorp/hcl/v2/ext/customdecode/customdecode.go b/vendor/github.com/hashicorp/hcl/v2/ext/customdecode/customdecode.go
new file mode 100644
index 000000000..c9d7a1efb
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl/v2/ext/customdecode/customdecode.go
@@ -0,0 +1,56 @@
+// Package customdecode contains a HCL extension that allows, in certain
+// contexts, expression evaluation to be overridden by custom static analysis.
+//
+// This mechanism is only supported in certain specific contexts where
+// expressions are decoded with a specific target type in mind. For more
+// information, see the documentation on CustomExpressionDecoder.
+package customdecode
+
+import (
+ "github.com/hashicorp/hcl/v2"
+ "github.com/zclconf/go-cty/cty"
+)
+
+type customDecoderImpl int
+
+// CustomExpressionDecoder is a value intended to be used as a cty capsule
+// type ExtensionData key for capsule types whose values are to be obtained
+// by static analysis of an expression rather than normal evaluation of that
+// expression.
+//
+// When a cooperating capsule type is asked for ExtensionData with this key,
+// it must return a non-nil CustomExpressionDecoderFunc value.
+//
+// This mechanism is not universally supported; instead, it's handled in a few
+// specific places where expressions are evaluated with the intent of producing
+// a cty.Value of a type given by the calling application.
+//
+// Specifically, this currently works for type constraints given in
+// hcldec.AttrSpec and hcldec.BlockAttrsSpec, and it works for arguments to
+// function calls in the HCL native syntax. HCL extensions implemented outside
+// of the main HCL module may also implement this; consult their own
+// documentation for details.
+const CustomExpressionDecoder = customDecoderImpl(1)
+
+// CustomExpressionDecoderFunc is the type of value that must be returned by
+// a capsule type handling the key CustomExpressionDecoder in its ExtensionData
+// implementation.
+//
+// If no error diagnostics are returned, the result value MUST be of the
+// capsule type that the decoder function was derived from. If the returned
+// error diagnostics prevent producing a value at all, return cty.NilVal.
+type CustomExpressionDecoderFunc func(expr hcl.Expression, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics)
+
+// CustomExpressionDecoderForType takes any cty type and returns its
+// custom expression decoder implementation if it has one. If it is not a
+// capsule type or it does not implement a custom expression decoder, this
+// function returns nil.
+func CustomExpressionDecoderForType(ty cty.Type) CustomExpressionDecoderFunc {
+ if !ty.IsCapsuleType() {
+ return nil
+ }
+ if fn, ok := ty.CapsuleExtensionData(CustomExpressionDecoder).(CustomExpressionDecoderFunc); ok {
+ return fn
+ }
+ return nil
+}
diff --git a/vendor/github.com/hashicorp/hcl/v2/ext/customdecode/expression_type.go b/vendor/github.com/hashicorp/hcl/v2/ext/customdecode/expression_type.go
new file mode 100644
index 000000000..af7c66c23
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl/v2/ext/customdecode/expression_type.go
@@ -0,0 +1,146 @@
+package customdecode
+
+import (
+ "fmt"
+ "reflect"
+
+ "github.com/hashicorp/hcl/v2"
+ "github.com/zclconf/go-cty/cty"
+)
+
+// ExpressionType is a cty capsule type that carries hcl.Expression values.
+//
+// This type implements custom decoding in the most general way possible: it
+// just captures whatever expression is given to it, with no further processing
+// whatsoever. It could therefore be useful in situations where an application
+// must defer processing of the expression content until a later step.
+//
+// ExpressionType only captures the expression, not the evaluation context it
+// was destined to be evaluated in. That means this type can be fine for
+// situations where the recipient of the value only intends to do static
+// analysis, but ExpressionClosureType is more appropriate in situations where
+// the recipient will eventually evaluate the given expression.
+var ExpressionType cty.Type
+
+// ExpressionVal returns a new cty value of type ExpressionType, wrapping the
+// given expression.
+func ExpressionVal(expr hcl.Expression) cty.Value {
+ return cty.CapsuleVal(ExpressionType, &expr)
+}
+
+// ExpressionFromVal returns the expression encapsulated in the given value, or
+// panics if the value is not a known value of ExpressionType.
+func ExpressionFromVal(v cty.Value) hcl.Expression {
+ if !v.Type().Equals(ExpressionType) {
+ panic("value is not of ExpressionType")
+ }
+ ptr := v.EncapsulatedValue().(*hcl.Expression)
+ return *ptr
+}
+
+// ExpressionClosureType is a cty capsule type that carries hcl.Expression
+// values along with their original evaluation contexts.
+//
+// This is similar to ExpressionType except that during custom decoding it
+// also captures the hcl.EvalContext that was provided, allowing callers to
+// evaluate the expression later in the same context where it would originally
+// have been evaluated, or a context derived from that one.
+var ExpressionClosureType cty.Type
+
+// ExpressionClosure is the type encapsulated in ExpressionClosureType
+type ExpressionClosure struct {
+ Expression hcl.Expression
+ EvalContext *hcl.EvalContext
+}
+
+// ExpressionClosureVal returns a new cty value of type ExpressionClosureType,
+// wrapping the given expression closure.
+func ExpressionClosureVal(closure *ExpressionClosure) cty.Value {
+ return cty.CapsuleVal(ExpressionClosureType, closure)
+}
+
+// Value evaluates the closure's expression using the closure's EvalContext,
+// returning the result.
+func (c *ExpressionClosure) Value() (cty.Value, hcl.Diagnostics) {
+ return c.Expression.Value(c.EvalContext)
+}
+
+// ExpressionClosureFromVal returns the expression closure encapsulated in the
+// given value, or panics if the value is not a known value of
+// ExpressionClosureType.
+//
+// The caller MUST NOT modify the returned closure or the EvalContext inside
+// it. To derive a new EvalContext, either create a child context or make
+// a copy.
+func ExpressionClosureFromVal(v cty.Value) *ExpressionClosure {
+ if !v.Type().Equals(ExpressionClosureType) {
+ panic("value is not of ExpressionClosureType")
+ }
+ return v.EncapsulatedValue().(*ExpressionClosure)
+}
+
+func init() {
+ // Getting hold of a reflect.Type for hcl.Expression is a bit tricky because
+ // it's an interface type, but we can do it with some indirection.
+ goExpressionType := reflect.TypeOf((*hcl.Expression)(nil)).Elem()
+
+ ExpressionType = cty.CapsuleWithOps("expression", goExpressionType, &cty.CapsuleOps{
+ ExtensionData: func(key interface{}) interface{} {
+ switch key {
+ case CustomExpressionDecoder:
+ return CustomExpressionDecoderFunc(
+ func(expr hcl.Expression, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
+ return ExpressionVal(expr), nil
+ },
+ )
+ default:
+ return nil
+ }
+ },
+ TypeGoString: func(_ reflect.Type) string {
+ return "customdecode.ExpressionType"
+ },
+ GoString: func(raw interface{}) string {
+ exprPtr := raw.(*hcl.Expression)
+ return fmt.Sprintf("customdecode.ExpressionVal(%#v)", *exprPtr)
+ },
+ RawEquals: func(a, b interface{}) bool {
+ aPtr := a.(*hcl.Expression)
+ bPtr := b.(*hcl.Expression)
+ return reflect.DeepEqual(*aPtr, *bPtr)
+ },
+ })
+ ExpressionClosureType = cty.CapsuleWithOps("expression closure", reflect.TypeOf(ExpressionClosure{}), &cty.CapsuleOps{
+ ExtensionData: func(key interface{}) interface{} {
+ switch key {
+ case CustomExpressionDecoder:
+ return CustomExpressionDecoderFunc(
+ func(expr hcl.Expression, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
+ return ExpressionClosureVal(&ExpressionClosure{
+ Expression: expr,
+ EvalContext: ctx,
+ }), nil
+ },
+ )
+ default:
+ return nil
+ }
+ },
+ TypeGoString: func(_ reflect.Type) string {
+ return "customdecode.ExpressionClosureType"
+ },
+ GoString: func(raw interface{}) string {
+ closure := raw.(*ExpressionClosure)
+ return fmt.Sprintf("customdecode.ExpressionClosureVal(%#v)", closure)
+ },
+ RawEquals: func(a, b interface{}) bool {
+ closureA := a.(*ExpressionClosure)
+ closureB := b.(*ExpressionClosure)
+ // The expression itself compares by deep equality, but EvalContexts
+ // conventionally compare by pointer identity, so we'll comply
+ // with both conventions here by testing them separately.
+ return closureA.EvalContext == closureB.EvalContext &&
+ reflect.DeepEqual(closureA.Expression, closureB.Expression)
+ },
+ })
+}
diff --git a/vendor/github.com/hashicorp/hcl/v2/ext/typeexpr/README.md b/vendor/github.com/hashicorp/hcl/v2/ext/typeexpr/README.md
index ec7094702..058f1e3d8 100644
--- a/vendor/github.com/hashicorp/hcl/v2/ext/typeexpr/README.md
+++ b/vendor/github.com/hashicorp/hcl/v2/ext/typeexpr/README.md
@@ -65,3 +65,71 @@ type checking it will be one that has identifiers as its attributes; object
types with weird attributes generally show up only from arbitrary object
constructors in configuration files, which are usually treated either as maps
or as the dynamic pseudo-type.
+
+## Type Constraints as Values
+
+Along with defining a convention for writing down types using HCL expression
+constructs, this package also includes a mechanism for representing types as
+values that can be used as data within an HCL-based language.
+
+`typeexpr.TypeConstraintType` is a
+[`cty` capsule type](https://github.com/zclconf/go-cty/blob/master/docs/types.md#capsule-types)
+that encapsulates `cty.Type` values. You can construct such a value directly
+using the `TypeConstraintVal` function:
+
+```go
+tyVal := typeexpr.TypeConstraintVal(cty.String)
+
+// We can unpack the type from a value using TypeConstraintFromVal
+ty := typeExpr.TypeConstraintFromVal(tyVal)
+```
+
+However, the primary purpose of `typeexpr.TypeConstraintType` is to be
+specified as the type constraint for an argument, in which case it serves
+as a signal for HCL to treat the argument expression as a type constraint
+expression as defined above, rather than as a normal value expression.
+
+"An argument" in the above in practice means the following two locations:
+
+* As the type constraint for a parameter of a cty function that will be
+ used in an `hcl.EvalContext`. In that case, function calls in the HCL
+ native expression syntax will require the argument to be valid type constraint
+ expression syntax and the function implementation will receive a
+ `TypeConstraintType` value as the argument value for that parameter.
+
+* As the type constraint for a `hcldec.AttrSpec` or `hcldec.BlockAttrsSpec`
+ when decoding an HCL body using `hcldec`. In that case, the attributes
+ with that type constraint will be required to be valid type constraint
+ expression syntax and the result will be a `TypeConstraintType` value.
+
+Note that the special handling of these arguments means that an argument
+marked in this way must use the type constraint syntax directly. It is not
+valid to pass in a value of `TypeConstraintType` that has been obtained
+dynamically via some other expression result.
+
+`TypeConstraintType` is provided with the intent of using it internally within
+application code when incorporating type constraint expression syntax into
+an HCL-based language, not to be used for dynamic "programming with types". A
+calling application could support programming with types by defining its _own_
+capsule type, but that is not the purpose of `TypeConstraintType`.
+
+## The "convert" `cty` Function
+
+Building on the `TypeConstraintType` described in the previous section, this
+package also provides `typeexpr.ConvertFunc` which is a cty function that
+can be placed into a `cty.EvalContext` (conventionally named "convert") in
+order to provide a general type conversion function in an HCL-based language:
+
+```hcl
+ foo = convert("true", bool)
+```
+
+The second parameter uses the mechanism described in the previous section to
+require its argument to be a type constraint expression rather than a value
+expression. In doing so, it allows converting with any type constraint that
+can be expressed in this package's type constraint syntax. In the above example,
+the `foo` argument would receive a boolean true, or `cty.True` in `cty` terms.
+
+The target type constraint must always be provided statically using inline
+type constraint syntax. There is no way to _dynamically_ select a type
+constraint using this function.
diff --git a/vendor/github.com/hashicorp/hcl/v2/ext/typeexpr/type_type.go b/vendor/github.com/hashicorp/hcl/v2/ext/typeexpr/type_type.go
new file mode 100644
index 000000000..5462d82c3
--- /dev/null
+++ b/vendor/github.com/hashicorp/hcl/v2/ext/typeexpr/type_type.go
@@ -0,0 +1,118 @@
+package typeexpr
+
+import (
+ "fmt"
+ "reflect"
+
+ "github.com/hashicorp/hcl/v2"
+ "github.com/hashicorp/hcl/v2/ext/customdecode"
+ "github.com/zclconf/go-cty/cty"
+ "github.com/zclconf/go-cty/cty/convert"
+ "github.com/zclconf/go-cty/cty/function"
+)
+
+// TypeConstraintType is a cty capsule type that allows cty type constraints to
+// be used as values.
+//
+// If TypeConstraintType is used in a context supporting the
+// customdecode.CustomExpressionDecoder extension then it will implement
+// expression decoding using the TypeConstraint function, thus allowing
+// type expressions to be used in contexts where value expressions might
+// normally be expected, such as in arguments to function calls.
+var TypeConstraintType cty.Type
+
+// TypeConstraintVal constructs a cty.Value whose type is
+// TypeConstraintType.
+func TypeConstraintVal(ty cty.Type) cty.Value {
+ return cty.CapsuleVal(TypeConstraintType, &ty)
+}
+
+// TypeConstraintFromVal extracts the type from a cty.Value of
+// TypeConstraintType that was previously constructed using TypeConstraintVal.
+//
+// If the given value isn't a known, non-null value of TypeConstraintType
+// then this function will panic.
+func TypeConstraintFromVal(v cty.Value) cty.Type {
+ if !v.Type().Equals(TypeConstraintType) {
+ panic("value is not of TypeConstraintType")
+ }
+ ptr := v.EncapsulatedValue().(*cty.Type)
+ return *ptr
+}
+
+// ConvertFunc is a cty function that implements type conversions.
+//
+// Its signature is as follows:
+// convert(value, type_constraint)
+//
+// ...where type_constraint is a type constraint expression as defined by
+// typeexpr.TypeConstraint.
+//
+// It relies on HCL's customdecode extension and so it's not suitable for use
+// in non-HCL contexts or if you are using a HCL syntax implementation that
+// does not support customdecode for function arguments. However, it _is_
+// supported for function calls in the HCL native expression syntax.
+var ConvertFunc function.Function
+
+func init() {
+ TypeConstraintType = cty.CapsuleWithOps("type constraint", reflect.TypeOf(cty.Type{}), &cty.CapsuleOps{
+ ExtensionData: func(key interface{}) interface{} {
+ switch key {
+ case customdecode.CustomExpressionDecoder:
+ return customdecode.CustomExpressionDecoderFunc(
+ func(expr hcl.Expression, ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
+ ty, diags := TypeConstraint(expr)
+ if diags.HasErrors() {
+ return cty.NilVal, diags
+ }
+ return TypeConstraintVal(ty), nil
+ },
+ )
+ default:
+ return nil
+ }
+ },
+ TypeGoString: func(_ reflect.Type) string {
+ return "typeexpr.TypeConstraintType"
+ },
+ GoString: func(raw interface{}) string {
+ tyPtr := raw.(*cty.Type)
+ return fmt.Sprintf("typeexpr.TypeConstraintVal(%#v)", *tyPtr)
+ },
+ RawEquals: func(a, b interface{}) bool {
+ aPtr := a.(*cty.Type)
+ bPtr := b.(*cty.Type)
+ return (*aPtr).Equals(*bPtr)
+ },
+ })
+
+ ConvertFunc = function.New(&function.Spec{
+ Params: []function.Parameter{
+ {
+ Name: "value",
+ Type: cty.DynamicPseudoType,
+ AllowNull: true,
+ AllowDynamicType: true,
+ },
+ {
+ Name: "type",
+ Type: TypeConstraintType,
+ },
+ },
+ Type: func(args []cty.Value) (cty.Type, error) {
+ wantTypePtr := args[1].EncapsulatedValue().(*cty.Type)
+ got, err := convert.Convert(args[0], *wantTypePtr)
+ if err != nil {
+ return cty.NilType, function.NewArgError(0, err)
+ }
+ return got.Type(), nil
+ },
+ Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) {
+ v, err := convert.Convert(args[0], retType)
+ if err != nil {
+ return cty.NilVal, function.NewArgError(0, err)
+ }
+ return v, nil
+ },
+ })
+}
diff --git a/vendor/github.com/hashicorp/hcl/v2/go.mod b/vendor/github.com/hashicorp/hcl/v2/go.mod
index c152e6016..d80c99d9b 100644
--- a/vendor/github.com/hashicorp/hcl/v2/go.mod
+++ b/vendor/github.com/hashicorp/hcl/v2/go.mod
@@ -6,7 +6,7 @@ require (
github.com/apparentlymart/go-textseg v1.0.0
github.com/davecgh/go-spew v1.1.1
github.com/go-test/deep v1.0.3
- github.com/google/go-cmp v0.2.0
+ github.com/google/go-cmp v0.3.1
github.com/kr/pretty v0.1.0
github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348
github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7
@@ -14,7 +14,7 @@ require (
github.com/sergi/go-diff v1.0.0
github.com/spf13/pflag v1.0.2
github.com/stretchr/testify v1.2.2 // indirect
- github.com/zclconf/go-cty v1.1.0
+ github.com/zclconf/go-cty v1.2.0
golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734
golang.org/x/sys v0.0.0-20190502175342-a43fa875dd82 // indirect
golang.org/x/text v0.3.2 // indirect
diff --git a/vendor/github.com/hashicorp/hcl/v2/go.sum b/vendor/github.com/hashicorp/hcl/v2/go.sum
index b3b95415f..76b135fb4 100644
--- a/vendor/github.com/hashicorp/hcl/v2/go.sum
+++ b/vendor/github.com/hashicorp/hcl/v2/go.sum
@@ -9,8 +9,8 @@ github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSs
github.com/go-test/deep v1.0.3 h1:ZrJSEWsXzPOxaZnFteGEfooLba+ju3FYIbOrS+rQd68=
github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA=
github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
-github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ=
-github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
+github.com/google/go-cmp v0.3.1 h1:Xye71clBPdm5HgqGwUkwhbynsUJZhDbS20FvLhQ2izg=
+github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
@@ -29,8 +29,8 @@ github.com/spf13/pflag v1.0.2/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnIn
github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk=
-github.com/zclconf/go-cty v1.1.0 h1:uJwc9HiBOCpoKIObTQaLR+tsEXx1HBHnOsOOpcdhZgw=
-github.com/zclconf/go-cty v1.1.0/go.mod h1:xnAOWiHeOqg2nWS62VtQ7pbOu17FtxJNW8RLEih+O3s=
+github.com/zclconf/go-cty v1.2.0 h1:sPHsy7ADcIZQP3vILvTjrh74ZA175TFP5vqiNK1UmlI=
+github.com/zclconf/go-cty v1.2.0/go.mod h1:hOPWgoHbaTUnI5k4D2ld+GRpFJSCe6bCM7m1q/N4PQ8=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734 h1:p/H982KKEjUnLJkM3tt/LemDnOc1GiZL5FCVlORJ5zo=
golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
diff --git a/vendor/github.com/hashicorp/hcl/v2/gohcl/decode.go b/vendor/github.com/hashicorp/hcl/v2/gohcl/decode.go
index 7ba08eee0..f0d589d77 100644
--- a/vendor/github.com/hashicorp/hcl/v2/gohcl/decode.go
+++ b/vendor/github.com/hashicorp/hcl/v2/gohcl/decode.go
@@ -147,7 +147,9 @@ func decodeBodyToStruct(body hcl.Body, ctx *hcl.EvalContext, val reflect.Value)
if len(blocks) == 0 {
if isSlice || isPtr {
- val.Field(fieldIdx).Set(reflect.Zero(field.Type))
+ if val.Field(fieldIdx).IsNil() {
+ val.Field(fieldIdx).Set(reflect.Zero(field.Type))
+ }
} else {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
@@ -166,11 +168,20 @@ func decodeBodyToStruct(body hcl.Body, ctx *hcl.EvalContext, val reflect.Value)
if isPtr {
elemType = reflect.PtrTo(ty)
}
- sli := reflect.MakeSlice(reflect.SliceOf(elemType), len(blocks), len(blocks))
+ sli := val.Field(fieldIdx)
+ if sli.IsNil() {
+ sli = reflect.MakeSlice(reflect.SliceOf(elemType), len(blocks), len(blocks))
+ }
for i, block := range blocks {
if isPtr {
- v := reflect.New(ty)
+ if i >= sli.Len() {
+ sli = reflect.Append(sli, reflect.New(ty))
+ }
+ v := sli.Index(i)
+ if v.IsNil() {
+ v = reflect.New(ty)
+ }
diags = append(diags, decodeBlockToValue(block, ctx, v.Elem())...)
sli.Index(i).Set(v)
} else {
@@ -178,12 +189,19 @@ func decodeBodyToStruct(body hcl.Body, ctx *hcl.EvalContext, val reflect.Value)
}
}
+ if sli.Len() > len(blocks) {
+ sli.SetLen(len(blocks))
+ }
+
val.Field(fieldIdx).Set(sli)
default:
block := blocks[0]
if isPtr {
- v := reflect.New(ty)
+ v := val.Field(fieldIdx)
+ if v.IsNil() {
+ v = reflect.New(ty)
+ }
diags = append(diags, decodeBlockToValue(block, ctx, v.Elem())...)
val.Field(fieldIdx).Set(v)
} else {
diff --git a/vendor/github.com/hashicorp/hcl/v2/hcldec/spec.go b/vendor/github.com/hashicorp/hcl/v2/hcldec/spec.go
index 6f2d9732c..a70818e1b 100644
--- a/vendor/github.com/hashicorp/hcl/v2/hcldec/spec.go
+++ b/vendor/github.com/hashicorp/hcl/v2/hcldec/spec.go
@@ -6,6 +6,7 @@ import (
"sort"
"github.com/hashicorp/hcl/v2"
+ "github.com/hashicorp/hcl/v2/ext/customdecode"
"github.com/zclconf/go-cty/cty"
"github.com/zclconf/go-cty/cty/convert"
"github.com/zclconf/go-cty/cty/function"
@@ -193,6 +194,14 @@ func (s *AttrSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ct
return cty.NullVal(s.Type), nil
}
+ if decodeFn := customdecode.CustomExpressionDecoderForType(s.Type); decodeFn != nil {
+ v, diags := decodeFn(attr.Expr, ctx)
+ if v == cty.NilVal {
+ v = cty.UnknownVal(s.Type)
+ }
+ return v, diags
+ }
+
val, diags := attr.Expr.Value(ctx)
convVal, err := convert.Convert(val, s.Type)
@@ -204,8 +213,10 @@ func (s *AttrSpec) decode(content *hcl.BodyContent, blockLabels []blockLabel, ct
"Inappropriate value for attribute %q: %s.",
s.Name, err.Error(),
),
- Subject: attr.Expr.StartRange().Ptr(),
- Context: hcl.RangeBetween(attr.NameRange, attr.Expr.StartRange()).Ptr(),
+ Subject: attr.Expr.Range().Ptr(),
+ Context: hcl.RangeBetween(attr.NameRange, attr.Expr.Range()).Ptr(),
+ Expression: attr.Expr,
+ EvalContext: ctx,
})
// We'll return an unknown value of the _correct_ type so that the
// incomplete result can still be used for some analysis use-cases.
@@ -1221,16 +1232,29 @@ func (s *BlockAttrsSpec) decode(content *hcl.BodyContent, blockLabels []blockLab
vals := make(map[string]cty.Value, len(attrs))
for name, attr := range attrs {
+ if decodeFn := customdecode.CustomExpressionDecoderForType(s.ElementType); decodeFn != nil {
+ attrVal, attrDiags := decodeFn(attr.Expr, ctx)
+ diags = append(diags, attrDiags...)
+ if attrVal == cty.NilVal {
+ attrVal = cty.UnknownVal(s.ElementType)
+ }
+ vals[name] = attrVal
+ continue
+ }
+
attrVal, attrDiags := attr.Expr.Value(ctx)
diags = append(diags, attrDiags...)
attrVal, err := convert.Convert(attrVal, s.ElementType)
if err != nil {
diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid attribute value",
- Detail: fmt.Sprintf("Invalid value for attribute of %q block: %s.", s.TypeName, err),
- Subject: attr.Expr.Range().Ptr(),
+ Severity: hcl.DiagError,
+ Summary: "Invalid attribute value",
+ Detail: fmt.Sprintf("Invalid value for attribute of %q block: %s.", s.TypeName, err),
+ Subject: attr.Expr.Range().Ptr(),
+ Context: hcl.RangeBetween(attr.NameRange, attr.Expr.Range()).Ptr(),
+ Expression: attr.Expr,
+ EvalContext: ctx,
})
attrVal = cty.UnknownVal(s.ElementType)
}
diff --git a/vendor/github.com/hashicorp/hcl/v2/hclsyntax/expression.go b/vendor/github.com/hashicorp/hcl/v2/hclsyntax/expression.go
index 963ed7752..3fe84ddc3 100644
--- a/vendor/github.com/hashicorp/hcl/v2/hclsyntax/expression.go
+++ b/vendor/github.com/hashicorp/hcl/v2/hclsyntax/expression.go
@@ -5,6 +5,7 @@ import (
"sync"
"github.com/hashicorp/hcl/v2"
+ "github.com/hashicorp/hcl/v2/ext/customdecode"
"github.com/zclconf/go-cty/cty"
"github.com/zclconf/go-cty/cty/convert"
"github.com/zclconf/go-cty/cty/function"
@@ -350,26 +351,38 @@ func (e *FunctionCallExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnosti
param = varParam
}
- val, argDiags := argExpr.Value(ctx)
- if len(argDiags) > 0 {
+ var val cty.Value
+ if decodeFn := customdecode.CustomExpressionDecoderForType(param.Type); decodeFn != nil {
+ var argDiags hcl.Diagnostics
+ val, argDiags = decodeFn(argExpr, ctx)
diags = append(diags, argDiags...)
- }
+ if val == cty.NilVal {
+ val = cty.UnknownVal(param.Type)
+ }
+ } else {
+ var argDiags hcl.Diagnostics
+ val, argDiags = argExpr.Value(ctx)
+ if len(argDiags) > 0 {
+ diags = append(diags, argDiags...)
+ }
- // Try to convert our value to the parameter type
- val, err := convert.Convert(val, param.Type)
- if err != nil {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid function argument",
- Detail: fmt.Sprintf(
- "Invalid value for %q parameter: %s.",
- param.Name, err,
- ),
- Subject: argExpr.StartRange().Ptr(),
- Context: e.Range().Ptr(),
- Expression: argExpr,
- EvalContext: ctx,
- })
+ // Try to convert our value to the parameter type
+ var err error
+ val, err = convert.Convert(val, param.Type)
+ if err != nil {
+ diags = append(diags, &hcl.Diagnostic{
+ Severity: hcl.DiagError,
+ Summary: "Invalid function argument",
+ Detail: fmt.Sprintf(
+ "Invalid value for %q parameter: %s.",
+ param.Name, err,
+ ),
+ Subject: argExpr.StartRange().Ptr(),
+ Context: e.Range().Ptr(),
+ Expression: argExpr,
+ EvalContext: ctx,
+ })
+ }
}
argVals[i] = val
@@ -615,8 +628,9 @@ type IndexExpr struct {
Collection Expression
Key Expression
- SrcRange hcl.Range
- OpenRange hcl.Range
+ SrcRange hcl.Range
+ OpenRange hcl.Range
+ BracketRange hcl.Range
}
func (e *IndexExpr) walkChildNodes(w internalWalkFunc) {
@@ -631,7 +645,7 @@ func (e *IndexExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
diags = append(diags, collDiags...)
diags = append(diags, keyDiags...)
- val, indexDiags := hcl.Index(coll, key, &e.SrcRange)
+ val, indexDiags := hcl.Index(coll, key, &e.BracketRange)
setDiagEvalContext(indexDiags, e, ctx)
diags = append(diags, indexDiags...)
return val, diags
diff --git a/vendor/github.com/hashicorp/hcl/v2/hclsyntax/parser.go b/vendor/github.com/hashicorp/hcl/v2/hclsyntax/parser.go
index 6fb284a8f..f67d989e5 100644
--- a/vendor/github.com/hashicorp/hcl/v2/hclsyntax/parser.go
+++ b/vendor/github.com/hashicorp/hcl/v2/hclsyntax/parser.go
@@ -760,7 +760,7 @@ Traversal:
Each: travExpr,
Item: itemExpr,
- SrcRange: hcl.RangeBetween(dot.Range, lastRange),
+ SrcRange: hcl.RangeBetween(from.Range(), lastRange),
MarkerRange: hcl.RangeBetween(dot.Range, marker.Range),
}
@@ -819,7 +819,7 @@ Traversal:
Each: travExpr,
Item: itemExpr,
- SrcRange: hcl.RangeBetween(open.Range, travExpr.Range()),
+ SrcRange: hcl.RangeBetween(from.Range(), travExpr.Range()),
MarkerRange: hcl.RangeBetween(open.Range, close.Range),
}
@@ -867,8 +867,9 @@ Traversal:
Collection: ret,
Key: keyExpr,
- SrcRange: rng,
- OpenRange: open.Range,
+ SrcRange: hcl.RangeBetween(from.Range(), rng),
+ OpenRange: open.Range,
+ BracketRange: rng,
}
}
}
@@ -899,7 +900,7 @@ func makeRelativeTraversal(expr Expression, next hcl.Traverser, rng hcl.Range) E
return &RelativeTraversalExpr{
Source: expr,
Traversal: hcl.Traversal{next},
- SrcRange: rng,
+ SrcRange: hcl.RangeBetween(expr.Range(), rng),
}
}
}
diff --git a/vendor/github.com/hashicorp/hcl/v2/hclwrite/ast_body.go b/vendor/github.com/hashicorp/hcl/v2/hclwrite/ast_body.go
index c16d13e3a..119f53e62 100644
--- a/vendor/github.com/hashicorp/hcl/v2/hclwrite/ast_body.go
+++ b/vendor/github.com/hashicorp/hcl/v2/hclwrite/ast_body.go
@@ -60,7 +60,7 @@ func (b *Body) Attributes() map[string]*Attribute {
// Blocks returns a new slice of all the blocks in the body.
func (b *Body) Blocks() []*Block {
ret := make([]*Block, 0, len(b.items))
- for n := range b.items {
+ for _, n := range b.items.List() {
if block, isBlock := n.content.(*Block); isBlock {
ret = append(ret, block)
}
@@ -134,6 +134,26 @@ func (b *Body) RemoveBlock(block *Block) bool {
return false
}
+// SetAttributeRaw either replaces the expression of an existing attribute
+// of the given name or adds a new attribute definition to the end of the block,
+// using the given tokens verbatim as the expression.
+//
+// The same caveats apply to this function as for NewExpressionRaw on which
+// it is based. If possible, prefer to use SetAttributeValue or
+// SetAttributeTraversal.
+func (b *Body) SetAttributeRaw(name string, tokens Tokens) *Attribute {
+ attr := b.GetAttribute(name)
+ expr := NewExpressionRaw(tokens)
+ if attr != nil {
+ attr.expr = attr.expr.ReplaceWith(expr)
+ } else {
+ attr := newAttribute()
+ attr.init(name, expr)
+ b.appendItem(attr)
+ }
+ return attr
+}
+
// SetAttributeValue either replaces the expression of an existing attribute
// of the given name or adds a new attribute definition to the end of the block.
//
diff --git a/vendor/github.com/hashicorp/hcl/v2/hclwrite/ast_expression.go b/vendor/github.com/hashicorp/hcl/v2/hclwrite/ast_expression.go
index 854e71690..073c30871 100644
--- a/vendor/github.com/hashicorp/hcl/v2/hclwrite/ast_expression.go
+++ b/vendor/github.com/hashicorp/hcl/v2/hclwrite/ast_expression.go
@@ -21,6 +21,29 @@ func newExpression() *Expression {
}
}
+// NewExpressionRaw constructs an expression containing the given raw tokens.
+//
+// There is no automatic validation that the given tokens produce a valid
+// expression. Callers of thus function must take care to produce invalid
+// expression tokens. Where possible, use the higher-level functions
+// NewExpressionLiteral or NewExpressionAbsTraversal instead.
+//
+// Because NewExpressionRaw does not interpret the given tokens in any way,
+// an expression created by NewExpressionRaw will produce an empty result
+// for calls to its method Variables, even if the given token sequence
+// contains a subslice that would normally be interpreted as a traversal under
+// parsing.
+func NewExpressionRaw(tokens Tokens) *Expression {
+ expr := newExpression()
+ // We copy the tokens here in order to make sure that later mutations
+ // by the caller don't inadvertently cause our expression to become
+ // invalid.
+ copyTokens := make(Tokens, len(tokens))
+ copy(copyTokens, tokens)
+ expr.children.AppendUnstructuredTokens(copyTokens)
+ return expr
+}
+
// NewExpressionLiteral constructs an an expression that represents the given
// literal value.
//
diff --git a/vendor/github.com/hashicorp/hcl/v2/hclwrite/generate.go b/vendor/github.com/hashicorp/hcl/v2/hclwrite/generate.go
index 289a30d68..4d439acd7 100644
--- a/vendor/github.com/hashicorp/hcl/v2/hclwrite/generate.go
+++ b/vendor/github.com/hashicorp/hcl/v2/hclwrite/generate.go
@@ -159,12 +159,12 @@ func appendTokensForValue(val cty.Value, toks Tokens) Tokens {
func appendTokensForTraversal(traversal hcl.Traversal, toks Tokens) Tokens {
for _, step := range traversal {
- appendTokensForTraversalStep(step, toks)
+ toks = appendTokensForTraversalStep(step, toks)
}
return toks
}
-func appendTokensForTraversalStep(step hcl.Traverser, toks Tokens) {
+func appendTokensForTraversalStep(step hcl.Traverser, toks Tokens) Tokens {
switch ts := step.(type) {
case hcl.TraverseRoot:
toks = append(toks, &Token{
@@ -188,7 +188,7 @@ func appendTokensForTraversalStep(step hcl.Traverser, toks Tokens) {
Type: hclsyntax.TokenOBrack,
Bytes: []byte{'['},
})
- appendTokensForValue(ts.Key, toks)
+ toks = appendTokensForValue(ts.Key, toks)
toks = append(toks, &Token{
Type: hclsyntax.TokenCBrack,
Bytes: []byte{']'},
@@ -196,6 +196,8 @@ func appendTokensForTraversalStep(step hcl.Traverser, toks Tokens) {
default:
panic(fmt.Sprintf("unsupported traversal step type %T", step))
}
+
+ return toks
}
func escapeQuotedStringLit(s string) []byte {
diff --git a/vendor/github.com/hashicorp/hcl2/LICENSE b/vendor/github.com/hashicorp/hcl2/LICENSE
deleted file mode 100644
index 82b4de97c..000000000
--- a/vendor/github.com/hashicorp/hcl2/LICENSE
+++ /dev/null
@@ -1,353 +0,0 @@
-Mozilla Public License, version 2.0
-
-1. Definitions
-
-1.1. “Contributor”
-
- means each individual or legal entity that creates, contributes to the
- creation of, or owns Covered Software.
-
-1.2. “Contributor Version”
-
- means the combination of the Contributions of others (if any) used by a
- Contributor and that particular Contributor’s Contribution.
-
-1.3. “Contribution”
-
- means Covered Software of a particular Contributor.
-
-1.4. “Covered Software”
-
- means Source Code Form to which the initial Contributor has attached the
- notice in Exhibit A, the Executable Form of such Source Code Form, and
- Modifications of such Source Code Form, in each case including portions
- thereof.
-
-1.5. “Incompatible With Secondary Licenses”
- means
-
- a. that the initial Contributor has attached the notice described in
- Exhibit B to the Covered Software; or
-
- b. that the Covered Software was made available under the terms of version
- 1.1 or earlier of the License, but not also under the terms of a
- Secondary License.
-
-1.6. “Executable Form”
-
- means any form of the work other than Source Code Form.
-
-1.7. “Larger Work”
-
- means a work that combines Covered Software with other material, in a separate
- file or files, that is not Covered Software.
-
-1.8. “License”
-
- means this document.
-
-1.9. “Licensable”
-
- means having the right to grant, to the maximum extent possible, whether at the
- time of the initial grant or subsequently, any and all of the rights conveyed by
- this License.
-
-1.10. “Modifications”
-
- means any of the following:
-
- a. any file in Source Code Form that results from an addition to, deletion
- from, or modification of the contents of Covered Software; or
-
- b. any new file in Source Code Form that contains any Covered Software.
-
-1.11. “Patent Claims” of a Contributor
-
- means any patent claim(s), including without limitation, method, process,
- and apparatus claims, in any patent Licensable by such Contributor that
- would be infringed, but for the grant of the License, by the making,
- using, selling, offering for sale, having made, import, or transfer of
- either its Contributions or its Contributor Version.
-
-1.12. “Secondary License”
-
- means either the GNU General Public License, Version 2.0, the GNU Lesser
- General Public License, Version 2.1, the GNU Affero General Public
- License, Version 3.0, or any later versions of those licenses.
-
-1.13. “Source Code Form”
-
- means the form of the work preferred for making modifications.
-
-1.14. “You” (or “Your”)
-
- means an individual or a legal entity exercising rights under this
- License. For legal entities, “You” includes any entity that controls, is
- controlled by, or is under common control with You. For purposes of this
- definition, “control” means (a) the power, direct or indirect, to cause
- the direction or management of such entity, whether by contract or
- otherwise, or (b) ownership of more than fifty percent (50%) of the
- outstanding shares or beneficial ownership of such entity.
-
-
-2. License Grants and Conditions
-
-2.1. Grants
-
- Each Contributor hereby grants You a world-wide, royalty-free,
- non-exclusive license:
-
- a. under intellectual property rights (other than patent or trademark)
- Licensable by such Contributor to use, reproduce, make available,
- modify, display, perform, distribute, and otherwise exploit its
- Contributions, either on an unmodified basis, with Modifications, or as
- part of a Larger Work; and
-
- b. under Patent Claims of such Contributor to make, use, sell, offer for
- sale, have made, import, and otherwise transfer either its Contributions
- or its Contributor Version.
-
-2.2. Effective Date
-
- The licenses granted in Section 2.1 with respect to any Contribution become
- effective for each Contribution on the date the Contributor first distributes
- such Contribution.
-
-2.3. Limitations on Grant Scope
-
- The licenses granted in this Section 2 are the only rights granted under this
- License. No additional rights or licenses will be implied from the distribution
- or licensing of Covered Software under this License. Notwithstanding Section
- 2.1(b) above, no patent license is granted by a Contributor:
-
- a. for any code that a Contributor has removed from Covered Software; or
-
- b. for infringements caused by: (i) Your and any other third party’s
- modifications of Covered Software, or (ii) the combination of its
- Contributions with other software (except as part of its Contributor
- Version); or
-
- c. under Patent Claims infringed by Covered Software in the absence of its
- Contributions.
-
- This License does not grant any rights in the trademarks, service marks, or
- logos of any Contributor (except as may be necessary to comply with the
- notice requirements in Section 3.4).
-
-2.4. Subsequent Licenses
-
- No Contributor makes additional grants as a result of Your choice to
- distribute the Covered Software under a subsequent version of this License
- (see Section 10.2) or under the terms of a Secondary License (if permitted
- under the terms of Section 3.3).
-
-2.5. Representation
-
- Each Contributor represents that the Contributor believes its Contributions
- are its original creation(s) or it has sufficient rights to grant the
- rights to its Contributions conveyed by this License.
-
-2.6. Fair Use
-
- This License is not intended to limit any rights You have under applicable
- copyright doctrines of fair use, fair dealing, or other equivalents.
-
-2.7. Conditions
-
- Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
- Section 2.1.
-
-
-3. Responsibilities
-
-3.1. Distribution of Source Form
-
- All distribution of Covered Software in Source Code Form, including any
- Modifications that You create or to which You contribute, must be under the
- terms of this License. You must inform recipients that the Source Code Form
- of the Covered Software is governed by the terms of this License, and how
- they can obtain a copy of this License. You may not attempt to alter or
- restrict the recipients’ rights in the Source Code Form.
-
-3.2. Distribution of Executable Form
-
- If You distribute Covered Software in Executable Form then:
-
- a. such Covered Software must also be made available in Source Code Form,
- as described in Section 3.1, and You must inform recipients of the
- Executable Form how they can obtain a copy of such Source Code Form by
- reasonable means in a timely manner, at a charge no more than the cost
- of distribution to the recipient; and
-
- b. You may distribute such Executable Form under the terms of this License,
- or sublicense it under different terms, provided that the license for
- the Executable Form does not attempt to limit or alter the recipients’
- rights in the Source Code Form under this License.
-
-3.3. Distribution of a Larger Work
-
- You may create and distribute a Larger Work under terms of Your choice,
- provided that You also comply with the requirements of this License for the
- Covered Software. If the Larger Work is a combination of Covered Software
- with a work governed by one or more Secondary Licenses, and the Covered
- Software is not Incompatible With Secondary Licenses, this License permits
- You to additionally distribute such Covered Software under the terms of
- such Secondary License(s), so that the recipient of the Larger Work may, at
- their option, further distribute the Covered Software under the terms of
- either this License or such Secondary License(s).
-
-3.4. Notices
-
- You may not remove or alter the substance of any license notices (including
- copyright notices, patent notices, disclaimers of warranty, or limitations
- of liability) contained within the Source Code Form of the Covered
- Software, except that You may alter any license notices to the extent
- required to remedy known factual inaccuracies.
-
-3.5. Application of Additional Terms
-
- You may choose to offer, and to charge a fee for, warranty, support,
- indemnity or liability obligations to one or more recipients of Covered
- Software. However, You may do so only on Your own behalf, and not on behalf
- of any Contributor. You must make it absolutely clear that any such
- warranty, support, indemnity, or liability obligation is offered by You
- alone, and You hereby agree to indemnify every Contributor for any
- liability incurred by such Contributor as a result of warranty, support,
- indemnity or liability terms You offer. You may include additional
- disclaimers of warranty and limitations of liability specific to any
- jurisdiction.
-
-4. Inability to Comply Due to Statute or Regulation
-
- If it is impossible for You to comply with any of the terms of this License
- with respect to some or all of the Covered Software due to statute, judicial
- order, or regulation then You must: (a) comply with the terms of this License
- to the maximum extent possible; and (b) describe the limitations and the code
- they affect. Such description must be placed in a text file included with all
- distributions of the Covered Software under this License. Except to the
- extent prohibited by statute or regulation, such description must be
- sufficiently detailed for a recipient of ordinary skill to be able to
- understand it.
-
-5. Termination
-
-5.1. The rights granted under this License will terminate automatically if You
- fail to comply with any of its terms. However, if You become compliant,
- then the rights granted under this License from a particular Contributor
- are reinstated (a) provisionally, unless and until such Contributor
- explicitly and finally terminates Your grants, and (b) on an ongoing basis,
- if such Contributor fails to notify You of the non-compliance by some
- reasonable means prior to 60 days after You have come back into compliance.
- Moreover, Your grants from a particular Contributor are reinstated on an
- ongoing basis if such Contributor notifies You of the non-compliance by
- some reasonable means, this is the first time You have received notice of
- non-compliance with this License from such Contributor, and You become
- compliant prior to 30 days after Your receipt of the notice.
-
-5.2. If You initiate litigation against any entity by asserting a patent
- infringement claim (excluding declaratory judgment actions, counter-claims,
- and cross-claims) alleging that a Contributor Version directly or
- indirectly infringes any patent, then the rights granted to You by any and
- all Contributors for the Covered Software under Section 2.1 of this License
- shall terminate.
-
-5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
- license agreements (excluding distributors and resellers) which have been
- validly granted by You or Your distributors under this License prior to
- termination shall survive termination.
-
-6. Disclaimer of Warranty
-
- Covered Software is provided under this License on an “as is” basis, without
- warranty of any kind, either expressed, implied, or statutory, including,
- without limitation, warranties that the Covered Software is free of defects,
- merchantable, fit for a particular purpose or non-infringing. The entire
- risk as to the quality and performance of the Covered Software is with You.
- Should any Covered Software prove defective in any respect, You (not any
- Contributor) assume the cost of any necessary servicing, repair, or
- correction. This disclaimer of warranty constitutes an essential part of this
- License. No use of any Covered Software is authorized under this License
- except under this disclaimer.
-
-7. Limitation of Liability
-
- Under no circumstances and under no legal theory, whether tort (including
- negligence), contract, or otherwise, shall any Contributor, or anyone who
- distributes Covered Software as permitted above, be liable to You for any
- direct, indirect, special, incidental, or consequential damages of any
- character including, without limitation, damages for lost profits, loss of
- goodwill, work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses, even if such party shall have been
- informed of the possibility of such damages. This limitation of liability
- shall not apply to liability for death or personal injury resulting from such
- party’s negligence to the extent applicable law prohibits such limitation.
- Some jurisdictions do not allow the exclusion or limitation of incidental or
- consequential damages, so this exclusion and limitation may not apply to You.
-
-8. Litigation
-
- Any litigation relating to this License may be brought only in the courts of
- a jurisdiction where the defendant maintains its principal place of business
- and such litigation shall be governed by laws of that jurisdiction, without
- reference to its conflict-of-law provisions. Nothing in this Section shall
- prevent a party’s ability to bring cross-claims or counter-claims.
-
-9. Miscellaneous
-
- This License represents the complete agreement concerning the subject matter
- hereof. If any provision of this License is held to be unenforceable, such
- provision shall be reformed only to the extent necessary to make it
- enforceable. Any law or regulation which provides that the language of a
- contract shall be construed against the drafter shall not be used to construe
- this License against a Contributor.
-
-
-10. Versions of the License
-
-10.1. New Versions
-
- Mozilla Foundation is the license steward. Except as provided in Section
- 10.3, no one other than the license steward has the right to modify or
- publish new versions of this License. Each version will be given a
- distinguishing version number.
-
-10.2. Effect of New Versions
-
- You may distribute the Covered Software under the terms of the version of
- the License under which You originally received the Covered Software, or
- under the terms of any subsequent version published by the license
- steward.
-
-10.3. Modified Versions
-
- If you create software not governed by this License, and you want to
- create a new license for such software, you may create and use a modified
- version of this License if you rename the license and remove any
- references to the name of the license steward (except to note that such
- modified license differs from this License).
-
-10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses
- If You choose to distribute Source Code Form that is Incompatible With
- Secondary Licenses under the terms of this version of the License, the
- notice described in Exhibit B of this License must be attached.
-
-Exhibit A - Source Code Form License Notice
-
- This Source Code Form is subject to the
- terms of the Mozilla Public License, v.
- 2.0. If a copy of the MPL was not
- distributed with this file, You can
- obtain one at
- http://mozilla.org/MPL/2.0/.
-
-If it is not possible or desirable to put the notice in a particular file, then
-You may include the notice in a location (such as a LICENSE file in a relevant
-directory) where a recipient would be likely to look for such a notice.
-
-You may add additional accurate notices of copyright ownership.
-
-Exhibit B - “Incompatible With Secondary Licenses” Notice
-
- This Source Code Form is “Incompatible
- With Secondary Licenses”, as defined by
- the Mozilla Public License, v. 2.0.
diff --git a/vendor/github.com/hashicorp/hcl2/gohcl/decode.go b/vendor/github.com/hashicorp/hcl2/gohcl/decode.go
deleted file mode 100644
index 3a149a8c2..000000000
--- a/vendor/github.com/hashicorp/hcl2/gohcl/decode.go
+++ /dev/null
@@ -1,304 +0,0 @@
-package gohcl
-
-import (
- "fmt"
- "reflect"
-
- "github.com/zclconf/go-cty/cty"
-
- "github.com/hashicorp/hcl2/hcl"
- "github.com/zclconf/go-cty/cty/convert"
- "github.com/zclconf/go-cty/cty/gocty"
-)
-
-// DecodeBody extracts the configuration within the given body into the given
-// value. This value must be a non-nil pointer to either a struct or
-// a map, where in the former case the configuration will be decoded using
-// struct tags and in the latter case only attributes are allowed and their
-// values are decoded into the map.
-//
-// The given EvalContext is used to resolve any variables or functions in
-// expressions encountered while decoding. This may be nil to require only
-// constant values, for simple applications that do not support variables or
-// functions.
-//
-// The returned diagnostics should be inspected with its HasErrors method to
-// determine if the populated value is valid and complete. If error diagnostics
-// are returned then the given value may have been partially-populated but
-// may still be accessed by a careful caller for static analysis and editor
-// integration use-cases.
-func DecodeBody(body hcl.Body, ctx *hcl.EvalContext, val interface{}) hcl.Diagnostics {
- rv := reflect.ValueOf(val)
- if rv.Kind() != reflect.Ptr {
- panic(fmt.Sprintf("target value must be a pointer, not %s", rv.Type().String()))
- }
-
- return decodeBodyToValue(body, ctx, rv.Elem())
-}
-
-func decodeBodyToValue(body hcl.Body, ctx *hcl.EvalContext, val reflect.Value) hcl.Diagnostics {
- et := val.Type()
- switch et.Kind() {
- case reflect.Struct:
- return decodeBodyToStruct(body, ctx, val)
- case reflect.Map:
- return decodeBodyToMap(body, ctx, val)
- default:
- panic(fmt.Sprintf("target value must be pointer to struct or map, not %s", et.String()))
- }
-}
-
-func decodeBodyToStruct(body hcl.Body, ctx *hcl.EvalContext, val reflect.Value) hcl.Diagnostics {
- schema, partial := ImpliedBodySchema(val.Interface())
-
- var content *hcl.BodyContent
- var leftovers hcl.Body
- var diags hcl.Diagnostics
- if partial {
- content, leftovers, diags = body.PartialContent(schema)
- } else {
- content, diags = body.Content(schema)
- }
- if content == nil {
- return diags
- }
-
- tags := getFieldTags(val.Type())
-
- if tags.Remain != nil {
- fieldIdx := *tags.Remain
- field := val.Type().Field(fieldIdx)
- fieldV := val.Field(fieldIdx)
- switch {
- case bodyType.AssignableTo(field.Type):
- fieldV.Set(reflect.ValueOf(leftovers))
- case attrsType.AssignableTo(field.Type):
- attrs, attrsDiags := leftovers.JustAttributes()
- if len(attrsDiags) > 0 {
- diags = append(diags, attrsDiags...)
- }
- fieldV.Set(reflect.ValueOf(attrs))
- default:
- diags = append(diags, decodeBodyToValue(leftovers, ctx, fieldV)...)
- }
- }
-
- for name, fieldIdx := range tags.Attributes {
- attr := content.Attributes[name]
- field := val.Type().Field(fieldIdx)
- fieldV := val.Field(fieldIdx)
-
- if attr == nil {
- if !exprType.AssignableTo(field.Type) {
- continue
- }
-
- // As a special case, if the target is of type hcl.Expression then
- // we'll assign an actual expression that evalues to a cty null,
- // so the caller can deal with it within the cty realm rather
- // than within the Go realm.
- synthExpr := hcl.StaticExpr(cty.NullVal(cty.DynamicPseudoType), body.MissingItemRange())
- fieldV.Set(reflect.ValueOf(synthExpr))
- continue
- }
-
- switch {
- case attrType.AssignableTo(field.Type):
- fieldV.Set(reflect.ValueOf(attr))
- case exprType.AssignableTo(field.Type):
- fieldV.Set(reflect.ValueOf(attr.Expr))
- default:
- diags = append(diags, DecodeExpression(
- attr.Expr, ctx, fieldV.Addr().Interface(),
- )...)
- }
- }
-
- blocksByType := content.Blocks.ByType()
-
- for typeName, fieldIdx := range tags.Blocks {
- blocks := blocksByType[typeName]
- field := val.Type().Field(fieldIdx)
-
- ty := field.Type
- isSlice := false
- isPtr := false
- if ty.Kind() == reflect.Slice {
- isSlice = true
- ty = ty.Elem()
- }
- if ty.Kind() == reflect.Ptr {
- isPtr = true
- ty = ty.Elem()
- }
-
- if len(blocks) > 1 && !isSlice {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: fmt.Sprintf("Duplicate %s block", typeName),
- Detail: fmt.Sprintf(
- "Only one %s block is allowed. Another was defined at %s.",
- typeName, blocks[0].DefRange.String(),
- ),
- Subject: &blocks[1].DefRange,
- })
- continue
- }
-
- if len(blocks) == 0 {
- if isSlice || isPtr {
- val.Field(fieldIdx).Set(reflect.Zero(field.Type))
- } else {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: fmt.Sprintf("Missing %s block", typeName),
- Detail: fmt.Sprintf("A %s block is required.", typeName),
- Subject: body.MissingItemRange().Ptr(),
- })
- }
- continue
- }
-
- switch {
-
- case isSlice:
- elemType := ty
- if isPtr {
- elemType = reflect.PtrTo(ty)
- }
- sli := reflect.MakeSlice(reflect.SliceOf(elemType), len(blocks), len(blocks))
-
- for i, block := range blocks {
- if isPtr {
- v := reflect.New(ty)
- diags = append(diags, decodeBlockToValue(block, ctx, v.Elem())...)
- sli.Index(i).Set(v)
- } else {
- diags = append(diags, decodeBlockToValue(block, ctx, sli.Index(i))...)
- }
- }
-
- val.Field(fieldIdx).Set(sli)
-
- default:
- block := blocks[0]
- if isPtr {
- v := reflect.New(ty)
- diags = append(diags, decodeBlockToValue(block, ctx, v.Elem())...)
- val.Field(fieldIdx).Set(v)
- } else {
- diags = append(diags, decodeBlockToValue(block, ctx, val.Field(fieldIdx))...)
- }
-
- }
-
- }
-
- return diags
-}
-
-func decodeBodyToMap(body hcl.Body, ctx *hcl.EvalContext, v reflect.Value) hcl.Diagnostics {
- attrs, diags := body.JustAttributes()
- if attrs == nil {
- return diags
- }
-
- mv := reflect.MakeMap(v.Type())
-
- for k, attr := range attrs {
- switch {
- case attrType.AssignableTo(v.Type().Elem()):
- mv.SetMapIndex(reflect.ValueOf(k), reflect.ValueOf(attr))
- case exprType.AssignableTo(v.Type().Elem()):
- mv.SetMapIndex(reflect.ValueOf(k), reflect.ValueOf(attr.Expr))
- default:
- ev := reflect.New(v.Type().Elem())
- diags = append(diags, DecodeExpression(attr.Expr, ctx, ev.Interface())...)
- mv.SetMapIndex(reflect.ValueOf(k), ev.Elem())
- }
- }
-
- v.Set(mv)
-
- return diags
-}
-
-func decodeBlockToValue(block *hcl.Block, ctx *hcl.EvalContext, v reflect.Value) hcl.Diagnostics {
- var diags hcl.Diagnostics
-
- ty := v.Type()
-
- switch {
- case blockType.AssignableTo(ty):
- v.Elem().Set(reflect.ValueOf(block))
- case bodyType.AssignableTo(ty):
- v.Elem().Set(reflect.ValueOf(block.Body))
- case attrsType.AssignableTo(ty):
- attrs, attrsDiags := block.Body.JustAttributes()
- if len(attrsDiags) > 0 {
- diags = append(diags, attrsDiags...)
- }
- v.Elem().Set(reflect.ValueOf(attrs))
- default:
- diags = append(diags, decodeBodyToValue(block.Body, ctx, v)...)
-
- if len(block.Labels) > 0 {
- blockTags := getFieldTags(ty)
- for li, lv := range block.Labels {
- lfieldIdx := blockTags.Labels[li].FieldIndex
- v.Field(lfieldIdx).Set(reflect.ValueOf(lv))
- }
- }
-
- }
-
- return diags
-}
-
-// DecodeExpression extracts the value of the given expression into the given
-// value. This value must be something that gocty is able to decode into,
-// since the final decoding is delegated to that package.
-//
-// The given EvalContext is used to resolve any variables or functions in
-// expressions encountered while decoding. This may be nil to require only
-// constant values, for simple applications that do not support variables or
-// functions.
-//
-// The returned diagnostics should be inspected with its HasErrors method to
-// determine if the populated value is valid and complete. If error diagnostics
-// are returned then the given value may have been partially-populated but
-// may still be accessed by a careful caller for static analysis and editor
-// integration use-cases.
-func DecodeExpression(expr hcl.Expression, ctx *hcl.EvalContext, val interface{}) hcl.Diagnostics {
- srcVal, diags := expr.Value(ctx)
-
- convTy, err := gocty.ImpliedType(val)
- if err != nil {
- panic(fmt.Sprintf("unsuitable DecodeExpression target: %s", err))
- }
-
- srcVal, err = convert.Convert(srcVal, convTy)
- if err != nil {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Unsuitable value type",
- Detail: fmt.Sprintf("Unsuitable value: %s", err.Error()),
- Subject: expr.StartRange().Ptr(),
- Context: expr.Range().Ptr(),
- })
- return diags
- }
-
- err = gocty.FromCtyValue(srcVal, val)
- if err != nil {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Unsuitable value type",
- Detail: fmt.Sprintf("Unsuitable value: %s", err.Error()),
- Subject: expr.StartRange().Ptr(),
- Context: expr.Range().Ptr(),
- })
- }
-
- return diags
-}
diff --git a/vendor/github.com/hashicorp/hcl2/gohcl/doc.go b/vendor/github.com/hashicorp/hcl2/gohcl/doc.go
deleted file mode 100644
index aa3c6ea9e..000000000
--- a/vendor/github.com/hashicorp/hcl2/gohcl/doc.go
+++ /dev/null
@@ -1,53 +0,0 @@
-// Package gohcl allows decoding HCL configurations into Go data structures.
-//
-// It provides a convenient and concise way of describing the schema for
-// configuration and then accessing the resulting data via native Go
-// types.
-//
-// A struct field tag scheme is used, similar to other decoding and
-// unmarshalling libraries. The tags are formatted as in the following example:
-//
-// ThingType string `hcl:"thing_type,attr"`
-//
-// Within each tag there are two comma-separated tokens. The first is the
-// name of the corresponding construct in configuration, while the second
-// is a keyword giving the kind of construct expected. The following
-// kind keywords are supported:
-//
-// attr (the default) indicates that the value is to be populated from an attribute
-// block indicates that the value is to populated from a block
-// label indicates that the value is to populated from a block label
-// remain indicates that the value is to be populated from the remaining body after populating other fields
-//
-// "attr" fields may either be of type *hcl.Expression, in which case the raw
-// expression is assigned, or of any type accepted by gocty, in which case
-// gocty will be used to assign the value to a native Go type.
-//
-// "block" fields may be of type *hcl.Block or hcl.Body, in which case the
-// corresponding raw value is assigned, or may be a struct that recursively
-// uses the same tags. Block fields may also be slices of any of these types,
-// in which case multiple blocks of the corresponding type are decoded into
-// the slice.
-//
-// "label" fields are considered only in a struct used as the type of a field
-// marked as "block", and are used sequentially to capture the labels of
-// the blocks being decoded. In this case, the name token is used only as
-// an identifier for the label in diagnostic messages.
-//
-// "remain" can be placed on a single field that may be either of type
-// hcl.Body or hcl.Attributes, in which case any remaining body content is
-// placed into this field for delayed processing. If no "remain" field is
-// present then any attributes or blocks not matched by another valid tag
-// will cause an error diagnostic.
-//
-// Only a subset of this tagging/typing vocabulary is supported for the
-// "Encode" family of functions. See the EncodeIntoBody docs for full details
-// on the constraints there.
-//
-// Broadly-speaking this package deals with two types of error. The first is
-// errors in the configuration itself, which are returned as diagnostics
-// written with the configuration author as the target audience. The second
-// is bugs in the calling program, such as invalid struct tags, which are
-// surfaced via panics since there can be no useful runtime handling of such
-// errors and they should certainly not be returned to the user as diagnostics.
-package gohcl
diff --git a/vendor/github.com/hashicorp/hcl2/gohcl/encode.go b/vendor/github.com/hashicorp/hcl2/gohcl/encode.go
deleted file mode 100644
index 3cbf7e48a..000000000
--- a/vendor/github.com/hashicorp/hcl2/gohcl/encode.go
+++ /dev/null
@@ -1,191 +0,0 @@
-package gohcl
-
-import (
- "fmt"
- "reflect"
- "sort"
-
- "github.com/hashicorp/hcl2/hclwrite"
- "github.com/zclconf/go-cty/cty/gocty"
-)
-
-// EncodeIntoBody replaces the contents of the given hclwrite Body with
-// attributes and blocks derived from the given value, which must be a
-// struct value or a pointer to a struct value with the struct tags defined
-// in this package.
-//
-// This function can work only with fully-decoded data. It will ignore any
-// fields tagged as "remain", any fields that decode attributes into either
-// hcl.Attribute or hcl.Expression values, and any fields that decode blocks
-// into hcl.Attributes values. This function does not have enough information
-// to complete the decoding of these types.
-//
-// Any fields tagged as "label" are ignored by this function. Use EncodeAsBlock
-// to produce a whole hclwrite.Block including block labels.
-//
-// As long as a suitable value is given to encode and the destination body
-// is non-nil, this function will always complete. It will panic in case of
-// any errors in the calling program, such as passing an inappropriate type
-// or a nil body.
-//
-// The layout of the resulting HCL source is derived from the ordering of
-// the struct fields, with blank lines around nested blocks of different types.
-// Fields representing attributes should usually precede those representing
-// blocks so that the attributes can group togather in the result. For more
-// control, use the hclwrite API directly.
-func EncodeIntoBody(val interface{}, dst *hclwrite.Body) {
- rv := reflect.ValueOf(val)
- ty := rv.Type()
- if ty.Kind() == reflect.Ptr {
- rv = rv.Elem()
- ty = rv.Type()
- }
- if ty.Kind() != reflect.Struct {
- panic(fmt.Sprintf("value is %s, not struct", ty.Kind()))
- }
-
- tags := getFieldTags(ty)
- populateBody(rv, ty, tags, dst)
-}
-
-// EncodeAsBlock creates a new hclwrite.Block populated with the data from
-// the given value, which must be a struct or pointer to struct with the
-// struct tags defined in this package.
-//
-// If the given struct type has fields tagged with "label" tags then they
-// will be used in order to annotate the created block with labels.
-//
-// This function has the same constraints as EncodeIntoBody and will panic
-// if they are violated.
-func EncodeAsBlock(val interface{}, blockType string) *hclwrite.Block {
- rv := reflect.ValueOf(val)
- ty := rv.Type()
- if ty.Kind() == reflect.Ptr {
- rv = rv.Elem()
- ty = rv.Type()
- }
- if ty.Kind() != reflect.Struct {
- panic(fmt.Sprintf("value is %s, not struct", ty.Kind()))
- }
-
- tags := getFieldTags(ty)
- labels := make([]string, len(tags.Labels))
- for i, lf := range tags.Labels {
- lv := rv.Field(lf.FieldIndex)
- // We just stringify whatever we find. It should always be a string
- // but if not then we'll still do something reasonable.
- labels[i] = fmt.Sprintf("%s", lv.Interface())
- }
-
- block := hclwrite.NewBlock(blockType, labels)
- populateBody(rv, ty, tags, block.Body())
- return block
-}
-
-func populateBody(rv reflect.Value, ty reflect.Type, tags *fieldTags, dst *hclwrite.Body) {
- nameIdxs := make(map[string]int, len(tags.Attributes)+len(tags.Blocks))
- namesOrder := make([]string, 0, len(tags.Attributes)+len(tags.Blocks))
- for n, i := range tags.Attributes {
- nameIdxs[n] = i
- namesOrder = append(namesOrder, n)
- }
- for n, i := range tags.Blocks {
- nameIdxs[n] = i
- namesOrder = append(namesOrder, n)
- }
- sort.SliceStable(namesOrder, func(i, j int) bool {
- ni, nj := namesOrder[i], namesOrder[j]
- return nameIdxs[ni] < nameIdxs[nj]
- })
-
- dst.Clear()
-
- prevWasBlock := false
- for _, name := range namesOrder {
- fieldIdx := nameIdxs[name]
- field := ty.Field(fieldIdx)
- fieldTy := field.Type
- fieldVal := rv.Field(fieldIdx)
-
- if fieldTy.Kind() == reflect.Ptr {
- fieldTy = fieldTy.Elem()
- fieldVal = fieldVal.Elem()
- }
-
- if _, isAttr := tags.Attributes[name]; isAttr {
-
- if exprType.AssignableTo(fieldTy) || attrType.AssignableTo(fieldTy) {
- continue // ignore undecoded fields
- }
- if !fieldVal.IsValid() {
- continue // ignore (field value is nil pointer)
- }
- if fieldTy.Kind() == reflect.Ptr && fieldVal.IsNil() {
- continue // ignore
- }
- if prevWasBlock {
- dst.AppendNewline()
- prevWasBlock = false
- }
-
- valTy, err := gocty.ImpliedType(fieldVal.Interface())
- if err != nil {
- panic(fmt.Sprintf("cannot encode %T as HCL expression: %s", fieldVal.Interface(), err))
- }
-
- val, err := gocty.ToCtyValue(fieldVal.Interface(), valTy)
- if err != nil {
- // This should never happen, since we should always be able
- // to decode into the implied type.
- panic(fmt.Sprintf("failed to encode %T as %#v: %s", fieldVal.Interface(), valTy, err))
- }
-
- dst.SetAttributeValue(name, val)
-
- } else { // must be a block, then
- elemTy := fieldTy
- isSeq := false
- if elemTy.Kind() == reflect.Slice || elemTy.Kind() == reflect.Array {
- isSeq = true
- elemTy = elemTy.Elem()
- }
-
- if bodyType.AssignableTo(elemTy) || attrsType.AssignableTo(elemTy) {
- continue // ignore undecoded fields
- }
- prevWasBlock = false
-
- if isSeq {
- l := fieldVal.Len()
- for i := 0; i < l; i++ {
- elemVal := fieldVal.Index(i)
- if !elemVal.IsValid() {
- continue // ignore (elem value is nil pointer)
- }
- if elemTy.Kind() == reflect.Ptr && elemVal.IsNil() {
- continue // ignore
- }
- block := EncodeAsBlock(elemVal.Interface(), name)
- if !prevWasBlock {
- dst.AppendNewline()
- prevWasBlock = true
- }
- dst.AppendBlock(block)
- }
- } else {
- if !fieldVal.IsValid() {
- continue // ignore (field value is nil pointer)
- }
- if elemTy.Kind() == reflect.Ptr && fieldVal.IsNil() {
- continue // ignore
- }
- block := EncodeAsBlock(fieldVal.Interface(), name)
- if !prevWasBlock {
- dst.AppendNewline()
- prevWasBlock = true
- }
- dst.AppendBlock(block)
- }
- }
- }
-}
diff --git a/vendor/github.com/hashicorp/hcl2/gohcl/schema.go b/vendor/github.com/hashicorp/hcl2/gohcl/schema.go
deleted file mode 100644
index 88164cb05..000000000
--- a/vendor/github.com/hashicorp/hcl2/gohcl/schema.go
+++ /dev/null
@@ -1,174 +0,0 @@
-package gohcl
-
-import (
- "fmt"
- "reflect"
- "sort"
- "strings"
-
- "github.com/hashicorp/hcl2/hcl"
-)
-
-// ImpliedBodySchema produces a hcl.BodySchema derived from the type of the
-// given value, which must be a struct value or a pointer to one. If an
-// inappropriate value is passed, this function will panic.
-//
-// The second return argument indicates whether the given struct includes
-// a "remain" field, and thus the returned schema is non-exhaustive.
-//
-// This uses the tags on the fields of the struct to discover how each
-// field's value should be expressed within configuration. If an invalid
-// mapping is attempted, this function will panic.
-func ImpliedBodySchema(val interface{}) (schema *hcl.BodySchema, partial bool) {
- ty := reflect.TypeOf(val)
-
- if ty.Kind() == reflect.Ptr {
- ty = ty.Elem()
- }
-
- if ty.Kind() != reflect.Struct {
- panic(fmt.Sprintf("given value must be struct, not %T", val))
- }
-
- var attrSchemas []hcl.AttributeSchema
- var blockSchemas []hcl.BlockHeaderSchema
-
- tags := getFieldTags(ty)
-
- attrNames := make([]string, 0, len(tags.Attributes))
- for n := range tags.Attributes {
- attrNames = append(attrNames, n)
- }
- sort.Strings(attrNames)
- for _, n := range attrNames {
- idx := tags.Attributes[n]
- optional := tags.Optional[n]
- field := ty.Field(idx)
-
- var required bool
-
- switch {
- case field.Type.AssignableTo(exprType):
- // If we're decoding to hcl.Expression then absense can be
- // indicated via a null value, so we don't specify that
- // the field is required during decoding.
- required = false
- case field.Type.Kind() != reflect.Ptr && !optional:
- required = true
- default:
- required = false
- }
-
- attrSchemas = append(attrSchemas, hcl.AttributeSchema{
- Name: n,
- Required: required,
- })
- }
-
- blockNames := make([]string, 0, len(tags.Blocks))
- for n := range tags.Blocks {
- blockNames = append(blockNames, n)
- }
- sort.Strings(blockNames)
- for _, n := range blockNames {
- idx := tags.Blocks[n]
- field := ty.Field(idx)
- fty := field.Type
- if fty.Kind() == reflect.Slice {
- fty = fty.Elem()
- }
- if fty.Kind() == reflect.Ptr {
- fty = fty.Elem()
- }
- if fty.Kind() != reflect.Struct {
- panic(fmt.Sprintf(
- "hcl 'block' tag kind cannot be applied to %s field %s: struct required", field.Type.String(), field.Name,
- ))
- }
- ftags := getFieldTags(fty)
- var labelNames []string
- if len(ftags.Labels) > 0 {
- labelNames = make([]string, len(ftags.Labels))
- for i, l := range ftags.Labels {
- labelNames[i] = l.Name
- }
- }
-
- blockSchemas = append(blockSchemas, hcl.BlockHeaderSchema{
- Type: n,
- LabelNames: labelNames,
- })
- }
-
- partial = tags.Remain != nil
- schema = &hcl.BodySchema{
- Attributes: attrSchemas,
- Blocks: blockSchemas,
- }
- return schema, partial
-}
-
-type fieldTags struct {
- Attributes map[string]int
- Blocks map[string]int
- Labels []labelField
- Remain *int
- Optional map[string]bool
-}
-
-type labelField struct {
- FieldIndex int
- Name string
-}
-
-func getFieldTags(ty reflect.Type) *fieldTags {
- ret := &fieldTags{
- Attributes: map[string]int{},
- Blocks: map[string]int{},
- Optional: map[string]bool{},
- }
-
- ct := ty.NumField()
- for i := 0; i < ct; i++ {
- field := ty.Field(i)
- tag := field.Tag.Get("hcl")
- if tag == "" {
- continue
- }
-
- comma := strings.Index(tag, ",")
- var name, kind string
- if comma != -1 {
- name = tag[:comma]
- kind = tag[comma+1:]
- } else {
- name = tag
- kind = "attr"
- }
-
- switch kind {
- case "attr":
- ret.Attributes[name] = i
- case "block":
- ret.Blocks[name] = i
- case "label":
- ret.Labels = append(ret.Labels, labelField{
- FieldIndex: i,
- Name: name,
- })
- case "remain":
- if ret.Remain != nil {
- panic("only one 'remain' tag is permitted")
- }
- idx := i // copy, because this loop will continue assigning to i
- ret.Remain = &idx
- case "optional":
- ret.Attributes[name] = i
- ret.Optional[name] = true
- default:
- panic(fmt.Sprintf("invalid hcl field tag kind %q on %s %q", kind, field.Type.String(), field.Name))
- }
- }
-
- return ret
-}
diff --git a/vendor/github.com/hashicorp/hcl2/gohcl/types.go b/vendor/github.com/hashicorp/hcl2/gohcl/types.go
deleted file mode 100644
index a94f275ad..000000000
--- a/vendor/github.com/hashicorp/hcl2/gohcl/types.go
+++ /dev/null
@@ -1,16 +0,0 @@
-package gohcl
-
-import (
- "reflect"
-
- "github.com/hashicorp/hcl2/hcl"
-)
-
-var victimExpr hcl.Expression
-var victimBody hcl.Body
-
-var exprType = reflect.TypeOf(&victimExpr).Elem()
-var bodyType = reflect.TypeOf(&victimBody).Elem()
-var blockType = reflect.TypeOf((*hcl.Block)(nil))
-var attrType = reflect.TypeOf((*hcl.Attribute)(nil))
-var attrsType = reflect.TypeOf(hcl.Attributes(nil))
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/diagnostic.go b/vendor/github.com/hashicorp/hcl2/hcl/diagnostic.go
deleted file mode 100644
index c320961e1..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/diagnostic.go
+++ /dev/null
@@ -1,143 +0,0 @@
-package hcl
-
-import (
- "fmt"
-)
-
-// DiagnosticSeverity represents the severity of a diagnostic.
-type DiagnosticSeverity int
-
-const (
- // DiagInvalid is the invalid zero value of DiagnosticSeverity
- DiagInvalid DiagnosticSeverity = iota
-
- // DiagError indicates that the problem reported by a diagnostic prevents
- // further progress in parsing and/or evaluating the subject.
- DiagError
-
- // DiagWarning indicates that the problem reported by a diagnostic warrants
- // user attention but does not prevent further progress. It is most
- // commonly used for showing deprecation notices.
- DiagWarning
-)
-
-// Diagnostic represents information to be presented to a user about an
-// error or anomoly in parsing or evaluating configuration.
-type Diagnostic struct {
- Severity DiagnosticSeverity
-
- // Summary and Detail contain the English-language description of the
- // problem. Summary is a terse description of the general problem and
- // detail is a more elaborate, often-multi-sentence description of
- // the probem and what might be done to solve it.
- Summary string
- Detail string
-
- // Subject and Context are both source ranges relating to the diagnostic.
- //
- // Subject is a tight range referring to exactly the construct that
- // is problematic, while Context is an optional broader range (which should
- // fully contain Subject) that ought to be shown around Subject when
- // generating isolated source-code snippets in diagnostic messages.
- // If Context is nil, the Subject is also the Context.
- //
- // Some diagnostics have no source ranges at all. If Context is set then
- // Subject should always also be set.
- Subject *Range
- Context *Range
-
- // For diagnostics that occur when evaluating an expression, Expression
- // may refer to that expression and EvalContext may point to the
- // EvalContext that was active when evaluating it. This may allow for the
- // inclusion of additional useful information when rendering a diagnostic
- // message to the user.
- //
- // It is not always possible to select a single EvalContext for a
- // diagnostic, and so in some cases this field may be nil even when an
- // expression causes a problem.
- //
- // EvalContexts form a tree, so the given EvalContext may refer to a parent
- // which in turn refers to another parent, etc. For a full picture of all
- // of the active variables and functions the caller must walk up this
- // chain, preferring definitions that are "closer" to the expression in
- // case of colliding names.
- Expression Expression
- EvalContext *EvalContext
-}
-
-// Diagnostics is a list of Diagnostic instances.
-type Diagnostics []*Diagnostic
-
-// error implementation, so that diagnostics can be returned via APIs
-// that normally deal in vanilla Go errors.
-//
-// This presents only minimal context about the error, for compatibility
-// with usual expectations about how errors will present as strings.
-func (d *Diagnostic) Error() string {
- return fmt.Sprintf("%s: %s; %s", d.Subject, d.Summary, d.Detail)
-}
-
-// error implementation, so that sets of diagnostics can be returned via
-// APIs that normally deal in vanilla Go errors.
-func (d Diagnostics) Error() string {
- count := len(d)
- switch {
- case count == 0:
- return "no diagnostics"
- case count == 1:
- return d[0].Error()
- default:
- return fmt.Sprintf("%s, and %d other diagnostic(s)", d[0].Error(), count-1)
- }
-}
-
-// Append appends a new error to a Diagnostics and return the whole Diagnostics.
-//
-// This is provided as a convenience for returning from a function that
-// collects and then returns a set of diagnostics:
-//
-// return nil, diags.Append(&hcl.Diagnostic{ ... })
-//
-// Note that this modifies the array underlying the diagnostics slice, so
-// must be used carefully within a single codepath. It is incorrect (and rude)
-// to extend a diagnostics created by a different subsystem.
-func (d Diagnostics) Append(diag *Diagnostic) Diagnostics {
- return append(d, diag)
-}
-
-// Extend concatenates the given Diagnostics with the receiver and returns
-// the whole new Diagnostics.
-//
-// This is similar to Append but accepts multiple diagnostics to add. It has
-// all the same caveats and constraints.
-func (d Diagnostics) Extend(diags Diagnostics) Diagnostics {
- return append(d, diags...)
-}
-
-// HasErrors returns true if the receiver contains any diagnostics of
-// severity DiagError.
-func (d Diagnostics) HasErrors() bool {
- for _, diag := range d {
- if diag.Severity == DiagError {
- return true
- }
- }
- return false
-}
-
-func (d Diagnostics) Errs() []error {
- var errs []error
- for _, diag := range d {
- if diag.Severity == DiagError {
- errs = append(errs, diag)
- }
- }
-
- return errs
-}
-
-// A DiagnosticWriter emits diagnostics somehow.
-type DiagnosticWriter interface {
- WriteDiagnostic(*Diagnostic) error
- WriteDiagnostics(Diagnostics) error
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/diagnostic_text.go b/vendor/github.com/hashicorp/hcl2/hcl/diagnostic_text.go
deleted file mode 100644
index 0b4a2629b..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/diagnostic_text.go
+++ /dev/null
@@ -1,311 +0,0 @@
-package hcl
-
-import (
- "bufio"
- "bytes"
- "errors"
- "fmt"
- "io"
- "sort"
-
- wordwrap "github.com/mitchellh/go-wordwrap"
- "github.com/zclconf/go-cty/cty"
-)
-
-type diagnosticTextWriter struct {
- files map[string]*File
- wr io.Writer
- width uint
- color bool
-}
-
-// NewDiagnosticTextWriter creates a DiagnosticWriter that writes diagnostics
-// to the given writer as formatted text.
-//
-// It is designed to produce text appropriate to print in a monospaced font
-// in a terminal of a particular width, or optionally with no width limit.
-//
-// The given width may be zero to disable word-wrapping of the detail text
-// and truncation of source code snippets.
-//
-// If color is set to true, the output will include VT100 escape sequences to
-// color-code the severity indicators. It is suggested to turn this off if
-// the target writer is not a terminal.
-func NewDiagnosticTextWriter(wr io.Writer, files map[string]*File, width uint, color bool) DiagnosticWriter {
- return &diagnosticTextWriter{
- files: files,
- wr: wr,
- width: width,
- color: color,
- }
-}
-
-func (w *diagnosticTextWriter) WriteDiagnostic(diag *Diagnostic) error {
- if diag == nil {
- return errors.New("nil diagnostic")
- }
-
- var colorCode, highlightCode, resetCode string
- if w.color {
- switch diag.Severity {
- case DiagError:
- colorCode = "\x1b[31m"
- case DiagWarning:
- colorCode = "\x1b[33m"
- }
- resetCode = "\x1b[0m"
- highlightCode = "\x1b[1;4m"
- }
-
- var severityStr string
- switch diag.Severity {
- case DiagError:
- severityStr = "Error"
- case DiagWarning:
- severityStr = "Warning"
- default:
- // should never happen
- severityStr = "???????"
- }
-
- fmt.Fprintf(w.wr, "%s%s%s: %s\n\n", colorCode, severityStr, resetCode, diag.Summary)
-
- if diag.Subject != nil {
- snipRange := *diag.Subject
- highlightRange := snipRange
- if diag.Context != nil {
- // Show enough of the source code to include both the subject
- // and context ranges, which overlap in all reasonable
- // situations.
- snipRange = RangeOver(snipRange, *diag.Context)
- }
- // We can't illustrate an empty range, so we'll turn such ranges into
- // single-character ranges, which might not be totally valid (may point
- // off the end of a line, or off the end of the file) but are good
- // enough for the bounds checks we do below.
- if snipRange.Empty() {
- snipRange.End.Byte++
- snipRange.End.Column++
- }
- if highlightRange.Empty() {
- highlightRange.End.Byte++
- highlightRange.End.Column++
- }
-
- file := w.files[diag.Subject.Filename]
- if file == nil || file.Bytes == nil {
- fmt.Fprintf(w.wr, " on %s line %d:\n (source code not available)\n\n", diag.Subject.Filename, diag.Subject.Start.Line)
- } else {
-
- var contextLine string
- if diag.Subject != nil {
- contextLine = contextString(file, diag.Subject.Start.Byte)
- if contextLine != "" {
- contextLine = ", in " + contextLine
- }
- }
-
- fmt.Fprintf(w.wr, " on %s line %d%s:\n", diag.Subject.Filename, diag.Subject.Start.Line, contextLine)
-
- src := file.Bytes
- sc := NewRangeScanner(src, diag.Subject.Filename, bufio.ScanLines)
-
- for sc.Scan() {
- lineRange := sc.Range()
- if !lineRange.Overlaps(snipRange) {
- continue
- }
-
- beforeRange, highlightedRange, afterRange := lineRange.PartitionAround(highlightRange)
- if highlightedRange.Empty() {
- fmt.Fprintf(w.wr, "%4d: %s\n", lineRange.Start.Line, sc.Bytes())
- } else {
- before := beforeRange.SliceBytes(src)
- highlighted := highlightedRange.SliceBytes(src)
- after := afterRange.SliceBytes(src)
- fmt.Fprintf(
- w.wr, "%4d: %s%s%s%s%s\n",
- lineRange.Start.Line,
- before,
- highlightCode, highlighted, resetCode,
- after,
- )
- }
-
- }
-
- w.wr.Write([]byte{'\n'})
- }
-
- if diag.Expression != nil && diag.EvalContext != nil {
- // We will attempt to render the values for any variables
- // referenced in the given expression as additional context, for
- // situations where the same expression is evaluated multiple
- // times in different scopes.
- expr := diag.Expression
- ctx := diag.EvalContext
-
- vars := expr.Variables()
- stmts := make([]string, 0, len(vars))
- seen := make(map[string]struct{}, len(vars))
- for _, traversal := range vars {
- val, diags := traversal.TraverseAbs(ctx)
- if diags.HasErrors() {
- // Skip anything that generates errors, since we probably
- // already have the same error in our diagnostics set
- // already.
- continue
- }
-
- traversalStr := w.traversalStr(traversal)
- if _, exists := seen[traversalStr]; exists {
- continue // don't show duplicates when the same variable is referenced multiple times
- }
- switch {
- case !val.IsKnown():
- // Can't say anything about this yet, then.
- continue
- case val.IsNull():
- stmts = append(stmts, fmt.Sprintf("%s set to null", traversalStr))
- default:
- stmts = append(stmts, fmt.Sprintf("%s as %s", traversalStr, w.valueStr(val)))
- }
- seen[traversalStr] = struct{}{}
- }
-
- sort.Strings(stmts) // FIXME: Should maybe use a traversal-aware sort that can sort numeric indexes properly?
- last := len(stmts) - 1
-
- for i, stmt := range stmts {
- switch i {
- case 0:
- w.wr.Write([]byte{'w', 'i', 't', 'h', ' '})
- default:
- w.wr.Write([]byte{' ', ' ', ' ', ' ', ' '})
- }
- w.wr.Write([]byte(stmt))
- switch i {
- case last:
- w.wr.Write([]byte{'.', '\n', '\n'})
- default:
- w.wr.Write([]byte{',', '\n'})
- }
- }
- }
- }
-
- if diag.Detail != "" {
- detail := diag.Detail
- if w.width != 0 {
- detail = wordwrap.WrapString(detail, w.width)
- }
- fmt.Fprintf(w.wr, "%s\n\n", detail)
- }
-
- return nil
-}
-
-func (w *diagnosticTextWriter) WriteDiagnostics(diags Diagnostics) error {
- for _, diag := range diags {
- err := w.WriteDiagnostic(diag)
- if err != nil {
- return err
- }
- }
- return nil
-}
-
-func (w *diagnosticTextWriter) traversalStr(traversal Traversal) string {
- // This is a specialized subset of traversal rendering tailored to
- // producing helpful contextual messages in diagnostics. It is not
- // comprehensive nor intended to be used for other purposes.
-
- var buf bytes.Buffer
- for _, step := range traversal {
- switch tStep := step.(type) {
- case TraverseRoot:
- buf.WriteString(tStep.Name)
- case TraverseAttr:
- buf.WriteByte('.')
- buf.WriteString(tStep.Name)
- case TraverseIndex:
- buf.WriteByte('[')
- if keyTy := tStep.Key.Type(); keyTy.IsPrimitiveType() {
- buf.WriteString(w.valueStr(tStep.Key))
- } else {
- // We'll just use a placeholder for more complex values,
- // since otherwise our result could grow ridiculously long.
- buf.WriteString("...")
- }
- buf.WriteByte(']')
- }
- }
- return buf.String()
-}
-
-func (w *diagnosticTextWriter) valueStr(val cty.Value) string {
- // This is a specialized subset of value rendering tailored to producing
- // helpful but concise messages in diagnostics. It is not comprehensive
- // nor intended to be used for other purposes.
-
- ty := val.Type()
- switch {
- case val.IsNull():
- return "null"
- case !val.IsKnown():
- // Should never happen here because we should filter before we get
- // in here, but we'll do something reasonable rather than panic.
- return "(not yet known)"
- case ty == cty.Bool:
- if val.True() {
- return "true"
- }
- return "false"
- case ty == cty.Number:
- bf := val.AsBigFloat()
- return bf.Text('g', 10)
- case ty == cty.String:
- // Go string syntax is not exactly the same as HCL native string syntax,
- // but we'll accept the minor edge-cases where this is different here
- // for now, just to get something reasonable here.
- return fmt.Sprintf("%q", val.AsString())
- case ty.IsCollectionType() || ty.IsTupleType():
- l := val.LengthInt()
- switch l {
- case 0:
- return "empty " + ty.FriendlyName()
- case 1:
- return ty.FriendlyName() + " with 1 element"
- default:
- return fmt.Sprintf("%s with %d elements", ty.FriendlyName(), l)
- }
- case ty.IsObjectType():
- atys := ty.AttributeTypes()
- l := len(atys)
- switch l {
- case 0:
- return "object with no attributes"
- case 1:
- var name string
- for k := range atys {
- name = k
- }
- return fmt.Sprintf("object with 1 attribute %q", name)
- default:
- return fmt.Sprintf("object with %d attributes", l)
- }
- default:
- return ty.FriendlyName()
- }
-}
-
-func contextString(file *File, offset int) string {
- type contextStringer interface {
- ContextString(offset int) string
- }
-
- if cser, ok := file.Nav.(contextStringer); ok {
- return cser.ContextString(offset)
- }
- return ""
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/didyoumean.go b/vendor/github.com/hashicorp/hcl2/hcl/didyoumean.go
deleted file mode 100644
index c12833440..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/didyoumean.go
+++ /dev/null
@@ -1,24 +0,0 @@
-package hcl
-
-import (
- "github.com/agext/levenshtein"
-)
-
-// nameSuggestion tries to find a name from the given slice of suggested names
-// that is close to the given name and returns it if found. If no suggestion
-// is close enough, returns the empty string.
-//
-// The suggestions are tried in order, so earlier suggestions take precedence
-// if the given string is similar to two or more suggestions.
-//
-// This function is intended to be used with a relatively-small number of
-// suggestions. It's not optimized for hundreds or thousands of them.
-func nameSuggestion(given string, suggestions []string) string {
- for _, suggestion := range suggestions {
- dist := levenshtein.Distance(given, suggestion, nil)
- if dist < 3 { // threshold determined experimentally
- return suggestion
- }
- }
- return ""
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/doc.go b/vendor/github.com/hashicorp/hcl2/hcl/doc.go
deleted file mode 100644
index 01318c96f..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/doc.go
+++ /dev/null
@@ -1 +0,0 @@
-package hcl
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/eval_context.go b/vendor/github.com/hashicorp/hcl2/hcl/eval_context.go
deleted file mode 100644
index 915910ad8..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/eval_context.go
+++ /dev/null
@@ -1,25 +0,0 @@
-package hcl
-
-import (
- "github.com/zclconf/go-cty/cty"
- "github.com/zclconf/go-cty/cty/function"
-)
-
-// An EvalContext provides the variables and functions that should be used
-// to evaluate an expression.
-type EvalContext struct {
- Variables map[string]cty.Value
- Functions map[string]function.Function
- parent *EvalContext
-}
-
-// NewChild returns a new EvalContext that is a child of the receiver.
-func (ctx *EvalContext) NewChild() *EvalContext {
- return &EvalContext{parent: ctx}
-}
-
-// Parent returns the parent of the receiver, or nil if the receiver has
-// no parent.
-func (ctx *EvalContext) Parent() *EvalContext {
- return ctx.parent
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/expr_call.go b/vendor/github.com/hashicorp/hcl2/hcl/expr_call.go
deleted file mode 100644
index 6963fbae3..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/expr_call.go
+++ /dev/null
@@ -1,46 +0,0 @@
-package hcl
-
-// ExprCall tests if the given expression is a function call and,
-// if so, extracts the function name and the expressions that represent
-// the arguments. If the given expression is not statically a function call,
-// error diagnostics are returned.
-//
-// A particular Expression implementation can support this function by
-// offering a method called ExprCall that takes no arguments and returns
-// *StaticCall. This method should return nil if a static call cannot
-// be extracted. Alternatively, an implementation can support
-// UnwrapExpression to delegate handling of this function to a wrapped
-// Expression object.
-func ExprCall(expr Expression) (*StaticCall, Diagnostics) {
- type exprCall interface {
- ExprCall() *StaticCall
- }
-
- physExpr := UnwrapExpressionUntil(expr, func(expr Expression) bool {
- _, supported := expr.(exprCall)
- return supported
- })
-
- if exC, supported := physExpr.(exprCall); supported {
- if call := exC.ExprCall(); call != nil {
- return call, nil
- }
- }
- return nil, Diagnostics{
- &Diagnostic{
- Severity: DiagError,
- Summary: "Invalid expression",
- Detail: "A static function call is required.",
- Subject: expr.StartRange().Ptr(),
- },
- }
-}
-
-// StaticCall represents a function call that was extracted statically from
-// an expression using ExprCall.
-type StaticCall struct {
- Name string
- NameRange Range
- Arguments []Expression
- ArgsRange Range
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/expr_list.go b/vendor/github.com/hashicorp/hcl2/hcl/expr_list.go
deleted file mode 100644
index d05cca0b9..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/expr_list.go
+++ /dev/null
@@ -1,37 +0,0 @@
-package hcl
-
-// ExprList tests if the given expression is a static list construct and,
-// if so, extracts the expressions that represent the list elements.
-// If the given expression is not a static list, error diagnostics are
-// returned.
-//
-// A particular Expression implementation can support this function by
-// offering a method called ExprList that takes no arguments and returns
-// []Expression. This method should return nil if a static list cannot
-// be extracted. Alternatively, an implementation can support
-// UnwrapExpression to delegate handling of this function to a wrapped
-// Expression object.
-func ExprList(expr Expression) ([]Expression, Diagnostics) {
- type exprList interface {
- ExprList() []Expression
- }
-
- physExpr := UnwrapExpressionUntil(expr, func(expr Expression) bool {
- _, supported := expr.(exprList)
- return supported
- })
-
- if exL, supported := physExpr.(exprList); supported {
- if list := exL.ExprList(); list != nil {
- return list, nil
- }
- }
- return nil, Diagnostics{
- &Diagnostic{
- Severity: DiagError,
- Summary: "Invalid expression",
- Detail: "A static list expression is required.",
- Subject: expr.StartRange().Ptr(),
- },
- }
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/expr_map.go b/vendor/github.com/hashicorp/hcl2/hcl/expr_map.go
deleted file mode 100644
index 96d1ce4bf..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/expr_map.go
+++ /dev/null
@@ -1,44 +0,0 @@
-package hcl
-
-// ExprMap tests if the given expression is a static map construct and,
-// if so, extracts the expressions that represent the map elements.
-// If the given expression is not a static map, error diagnostics are
-// returned.
-//
-// A particular Expression implementation can support this function by
-// offering a method called ExprMap that takes no arguments and returns
-// []KeyValuePair. This method should return nil if a static map cannot
-// be extracted. Alternatively, an implementation can support
-// UnwrapExpression to delegate handling of this function to a wrapped
-// Expression object.
-func ExprMap(expr Expression) ([]KeyValuePair, Diagnostics) {
- type exprMap interface {
- ExprMap() []KeyValuePair
- }
-
- physExpr := UnwrapExpressionUntil(expr, func(expr Expression) bool {
- _, supported := expr.(exprMap)
- return supported
- })
-
- if exM, supported := physExpr.(exprMap); supported {
- if pairs := exM.ExprMap(); pairs != nil {
- return pairs, nil
- }
- }
- return nil, Diagnostics{
- &Diagnostic{
- Severity: DiagError,
- Summary: "Invalid expression",
- Detail: "A static map expression is required.",
- Subject: expr.StartRange().Ptr(),
- },
- }
-}
-
-// KeyValuePair represents a pair of expressions that serve as a single item
-// within a map or object definition construct.
-type KeyValuePair struct {
- Key Expression
- Value Expression
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/expr_unwrap.go b/vendor/github.com/hashicorp/hcl2/hcl/expr_unwrap.go
deleted file mode 100644
index 6d5d205c4..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/expr_unwrap.go
+++ /dev/null
@@ -1,68 +0,0 @@
-package hcl
-
-type unwrapExpression interface {
- UnwrapExpression() Expression
-}
-
-// UnwrapExpression removes any "wrapper" expressions from the given expression,
-// to recover the representation of the physical expression given in source
-// code.
-//
-// Sometimes wrapping expressions are used to modify expression behavior, e.g.
-// in extensions that need to make some local variables available to certain
-// sub-trees of the configuration. This can make it difficult to reliably
-// type-assert on the physical AST types used by the underlying syntax.
-//
-// Unwrapping an expression may modify its behavior by stripping away any
-// additional constraints or capabilities being applied to the Value and
-// Variables methods, so this function should generally only be used prior
-// to operations that concern themselves with the static syntax of the input
-// configuration, and not with the effective value of the expression.
-//
-// Wrapper expression types must support unwrapping by implementing a method
-// called UnwrapExpression that takes no arguments and returns the embedded
-// Expression. Implementations of this method should peel away only one level
-// of wrapping, if multiple are present. This method may return nil to
-// indicate _dynamically_ that no wrapped expression is available, for
-// expression types that might only behave as wrappers in certain cases.
-func UnwrapExpression(expr Expression) Expression {
- for {
- unwrap, wrapped := expr.(unwrapExpression)
- if !wrapped {
- return expr
- }
- innerExpr := unwrap.UnwrapExpression()
- if innerExpr == nil {
- return expr
- }
- expr = innerExpr
- }
-}
-
-// UnwrapExpressionUntil is similar to UnwrapExpression except it gives the
-// caller an opportunity to test each level of unwrapping to see each a
-// particular expression is accepted.
-//
-// This could be used, for example, to unwrap until a particular other
-// interface is satisfied, regardless of wrap wrapping level it is satisfied
-// at.
-//
-// The given callback function must return false to continue wrapping, or
-// true to accept and return the proposed expression given. If the callback
-// function rejects even the final, physical expression then the result of
-// this function is nil.
-func UnwrapExpressionUntil(expr Expression, until func(Expression) bool) Expression {
- for {
- if until(expr) {
- return expr
- }
- unwrap, wrapped := expr.(unwrapExpression)
- if !wrapped {
- return nil
- }
- expr = unwrap.UnwrapExpression()
- if expr == nil {
- return nil
- }
- }
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/diagnostics.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/diagnostics.go
deleted file mode 100644
index 94eaf5892..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/diagnostics.go
+++ /dev/null
@@ -1,23 +0,0 @@
-package hclsyntax
-
-import (
- "github.com/hashicorp/hcl2/hcl"
-)
-
-// setDiagEvalContext is an internal helper that will impose a particular
-// EvalContext on a set of diagnostics in-place, for any diagnostic that
-// does not already have an EvalContext set.
-//
-// We generally expect diagnostics to be immutable, but this is safe to use
-// on any Diagnostics where none of the contained Diagnostic objects have yet
-// been seen by a caller. Its purpose is to apply additional context to a
-// set of diagnostics produced by a "deeper" component as the stack unwinds
-// during expression evaluation.
-func setDiagEvalContext(diags hcl.Diagnostics, expr hcl.Expression, ctx *hcl.EvalContext) {
- for _, diag := range diags {
- if diag.Expression == nil {
- diag.Expression = expr
- diag.EvalContext = ctx
- }
- }
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/didyoumean.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/didyoumean.go
deleted file mode 100644
index ccc1c0ae2..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/didyoumean.go
+++ /dev/null
@@ -1,24 +0,0 @@
-package hclsyntax
-
-import (
- "github.com/agext/levenshtein"
-)
-
-// nameSuggestion tries to find a name from the given slice of suggested names
-// that is close to the given name and returns it if found. If no suggestion
-// is close enough, returns the empty string.
-//
-// The suggestions are tried in order, so earlier suggestions take precedence
-// if the given string is similar to two or more suggestions.
-//
-// This function is intended to be used with a relatively-small number of
-// suggestions. It's not optimized for hundreds or thousands of them.
-func nameSuggestion(given string, suggestions []string) string {
- for _, suggestion := range suggestions {
- dist := levenshtein.Distance(given, suggestion, nil)
- if dist < 3 { // threshold determined experimentally
- return suggestion
- }
- }
- return ""
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/doc.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/doc.go
deleted file mode 100644
index 617bc29dc..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/doc.go
+++ /dev/null
@@ -1,7 +0,0 @@
-// Package hclsyntax contains the parser, AST, etc for HCL's native language,
-// as opposed to the JSON variant.
-//
-// In normal use applications should rarely depend on this package directly,
-// instead preferring the higher-level interface of the main hcl package and
-// its companion package hclparse.
-package hclsyntax
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression.go
deleted file mode 100644
index d3f7a74d3..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression.go
+++ /dev/null
@@ -1,1468 +0,0 @@
-package hclsyntax
-
-import (
- "fmt"
- "sync"
-
- "github.com/hashicorp/hcl2/hcl"
- "github.com/zclconf/go-cty/cty"
- "github.com/zclconf/go-cty/cty/convert"
- "github.com/zclconf/go-cty/cty/function"
-)
-
-// Expression is the abstract type for nodes that behave as HCL expressions.
-type Expression interface {
- Node
-
- // The hcl.Expression methods are duplicated here, rather than simply
- // embedded, because both Node and hcl.Expression have a Range method
- // and so they conflict.
-
- Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics)
- Variables() []hcl.Traversal
- StartRange() hcl.Range
-}
-
-// Assert that Expression implements hcl.Expression
-var assertExprImplExpr hcl.Expression = Expression(nil)
-
-// LiteralValueExpr is an expression that just always returns a given value.
-type LiteralValueExpr struct {
- Val cty.Value
- SrcRange hcl.Range
-}
-
-func (e *LiteralValueExpr) walkChildNodes(w internalWalkFunc) {
- // Literal values have no child nodes
-}
-
-func (e *LiteralValueExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
- return e.Val, nil
-}
-
-func (e *LiteralValueExpr) Range() hcl.Range {
- return e.SrcRange
-}
-
-func (e *LiteralValueExpr) StartRange() hcl.Range {
- return e.SrcRange
-}
-
-// Implementation for hcl.AbsTraversalForExpr.
-func (e *LiteralValueExpr) AsTraversal() hcl.Traversal {
- // This one's a little weird: the contract for AsTraversal is to interpret
- // an expression as if it were traversal syntax, and traversal syntax
- // doesn't have the special keywords "null", "true", and "false" so these
- // are expected to be treated like variables in that case.
- // Since our parser already turned them into LiteralValueExpr by the time
- // we get here, we need to undo this and infer the name that would've
- // originally led to our value.
- // We don't do anything for any other values, since they don't overlap
- // with traversal roots.
-
- if e.Val.IsNull() {
- // In practice the parser only generates null values of the dynamic
- // pseudo-type for literals, so we can safely assume that any null
- // was orignally the keyword "null".
- return hcl.Traversal{
- hcl.TraverseRoot{
- Name: "null",
- SrcRange: e.SrcRange,
- },
- }
- }
-
- switch e.Val {
- case cty.True:
- return hcl.Traversal{
- hcl.TraverseRoot{
- Name: "true",
- SrcRange: e.SrcRange,
- },
- }
- case cty.False:
- return hcl.Traversal{
- hcl.TraverseRoot{
- Name: "false",
- SrcRange: e.SrcRange,
- },
- }
- default:
- // No traversal is possible for any other value.
- return nil
- }
-}
-
-// ScopeTraversalExpr is an Expression that retrieves a value from the scope
-// using a traversal.
-type ScopeTraversalExpr struct {
- Traversal hcl.Traversal
- SrcRange hcl.Range
-}
-
-func (e *ScopeTraversalExpr) walkChildNodes(w internalWalkFunc) {
- // Scope traversals have no child nodes
-}
-
-func (e *ScopeTraversalExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
- val, diags := e.Traversal.TraverseAbs(ctx)
- setDiagEvalContext(diags, e, ctx)
- return val, diags
-}
-
-func (e *ScopeTraversalExpr) Range() hcl.Range {
- return e.SrcRange
-}
-
-func (e *ScopeTraversalExpr) StartRange() hcl.Range {
- return e.SrcRange
-}
-
-// Implementation for hcl.AbsTraversalForExpr.
-func (e *ScopeTraversalExpr) AsTraversal() hcl.Traversal {
- return e.Traversal
-}
-
-// RelativeTraversalExpr is an Expression that retrieves a value from another
-// value using a _relative_ traversal.
-type RelativeTraversalExpr struct {
- Source Expression
- Traversal hcl.Traversal
- SrcRange hcl.Range
-}
-
-func (e *RelativeTraversalExpr) walkChildNodes(w internalWalkFunc) {
- w(e.Source)
-}
-
-func (e *RelativeTraversalExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
- src, diags := e.Source.Value(ctx)
- ret, travDiags := e.Traversal.TraverseRel(src)
- setDiagEvalContext(travDiags, e, ctx)
- diags = append(diags, travDiags...)
- return ret, diags
-}
-
-func (e *RelativeTraversalExpr) Range() hcl.Range {
- return e.SrcRange
-}
-
-func (e *RelativeTraversalExpr) StartRange() hcl.Range {
- return e.SrcRange
-}
-
-// Implementation for hcl.AbsTraversalForExpr.
-func (e *RelativeTraversalExpr) AsTraversal() hcl.Traversal {
- // We can produce a traversal only if our source can.
- st, diags := hcl.AbsTraversalForExpr(e.Source)
- if diags.HasErrors() {
- return nil
- }
-
- ret := make(hcl.Traversal, len(st)+len(e.Traversal))
- copy(ret, st)
- copy(ret[len(st):], e.Traversal)
- return ret
-}
-
-// FunctionCallExpr is an Expression that calls a function from the EvalContext
-// and returns its result.
-type FunctionCallExpr struct {
- Name string
- Args []Expression
-
- // If true, the final argument should be a tuple, list or set which will
- // expand to be one argument per element.
- ExpandFinal bool
-
- NameRange hcl.Range
- OpenParenRange hcl.Range
- CloseParenRange hcl.Range
-}
-
-func (e *FunctionCallExpr) walkChildNodes(w internalWalkFunc) {
- for _, arg := range e.Args {
- w(arg)
- }
-}
-
-func (e *FunctionCallExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
- var diags hcl.Diagnostics
-
- var f function.Function
- exists := false
- hasNonNilMap := false
- thisCtx := ctx
- for thisCtx != nil {
- if thisCtx.Functions == nil {
- thisCtx = thisCtx.Parent()
- continue
- }
- hasNonNilMap = true
- f, exists = thisCtx.Functions[e.Name]
- if exists {
- break
- }
- thisCtx = thisCtx.Parent()
- }
-
- if !exists {
- if !hasNonNilMap {
- return cty.DynamicVal, hcl.Diagnostics{
- {
- Severity: hcl.DiagError,
- Summary: "Function calls not allowed",
- Detail: "Functions may not be called here.",
- Subject: e.Range().Ptr(),
- Expression: e,
- EvalContext: ctx,
- },
- }
- }
-
- avail := make([]string, 0, len(ctx.Functions))
- for name := range ctx.Functions {
- avail = append(avail, name)
- }
- suggestion := nameSuggestion(e.Name, avail)
- if suggestion != "" {
- suggestion = fmt.Sprintf(" Did you mean %q?", suggestion)
- }
-
- return cty.DynamicVal, hcl.Diagnostics{
- {
- Severity: hcl.DiagError,
- Summary: "Call to unknown function",
- Detail: fmt.Sprintf("There is no function named %q.%s", e.Name, suggestion),
- Subject: &e.NameRange,
- Context: e.Range().Ptr(),
- Expression: e,
- EvalContext: ctx,
- },
- }
- }
-
- params := f.Params()
- varParam := f.VarParam()
-
- args := e.Args
- if e.ExpandFinal {
- if len(args) < 1 {
- // should never happen if the parser is behaving
- panic("ExpandFinal set on function call with no arguments")
- }
- expandExpr := args[len(args)-1]
- expandVal, expandDiags := expandExpr.Value(ctx)
- diags = append(diags, expandDiags...)
- if expandDiags.HasErrors() {
- return cty.DynamicVal, diags
- }
-
- switch {
- case expandVal.Type().IsTupleType() || expandVal.Type().IsListType() || expandVal.Type().IsSetType():
- if expandVal.IsNull() {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid expanding argument value",
- Detail: "The expanding argument (indicated by ...) must not be null.",
- Subject: expandExpr.Range().Ptr(),
- Context: e.Range().Ptr(),
- Expression: expandExpr,
- EvalContext: ctx,
- })
- return cty.DynamicVal, diags
- }
- if !expandVal.IsKnown() {
- return cty.DynamicVal, diags
- }
-
- newArgs := make([]Expression, 0, (len(args)-1)+expandVal.LengthInt())
- newArgs = append(newArgs, args[:len(args)-1]...)
- it := expandVal.ElementIterator()
- for it.Next() {
- _, val := it.Element()
- newArgs = append(newArgs, &LiteralValueExpr{
- Val: val,
- SrcRange: expandExpr.Range(),
- })
- }
- args = newArgs
- default:
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid expanding argument value",
- Detail: "The expanding argument (indicated by ...) must be of a tuple, list, or set type.",
- Subject: expandExpr.Range().Ptr(),
- Context: e.Range().Ptr(),
- Expression: expandExpr,
- EvalContext: ctx,
- })
- return cty.DynamicVal, diags
- }
- }
-
- if len(args) < len(params) {
- missing := params[len(args)]
- qual := ""
- if varParam != nil {
- qual = " at least"
- }
- return cty.DynamicVal, hcl.Diagnostics{
- {
- Severity: hcl.DiagError,
- Summary: "Not enough function arguments",
- Detail: fmt.Sprintf(
- "Function %q expects%s %d argument(s). Missing value for %q.",
- e.Name, qual, len(params), missing.Name,
- ),
- Subject: &e.CloseParenRange,
- Context: e.Range().Ptr(),
- Expression: e,
- EvalContext: ctx,
- },
- }
- }
-
- if varParam == nil && len(args) > len(params) {
- return cty.DynamicVal, hcl.Diagnostics{
- {
- Severity: hcl.DiagError,
- Summary: "Too many function arguments",
- Detail: fmt.Sprintf(
- "Function %q expects only %d argument(s).",
- e.Name, len(params),
- ),
- Subject: args[len(params)].StartRange().Ptr(),
- Context: e.Range().Ptr(),
- Expression: e,
- EvalContext: ctx,
- },
- }
- }
-
- argVals := make([]cty.Value, len(args))
-
- for i, argExpr := range args {
- var param *function.Parameter
- if i < len(params) {
- param = ¶ms[i]
- } else {
- param = varParam
- }
-
- val, argDiags := argExpr.Value(ctx)
- if len(argDiags) > 0 {
- diags = append(diags, argDiags...)
- }
-
- // Try to convert our value to the parameter type
- val, err := convert.Convert(val, param.Type)
- if err != nil {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid function argument",
- Detail: fmt.Sprintf(
- "Invalid value for %q parameter: %s.",
- param.Name, err,
- ),
- Subject: argExpr.StartRange().Ptr(),
- Context: e.Range().Ptr(),
- Expression: argExpr,
- EvalContext: ctx,
- })
- }
-
- argVals[i] = val
- }
-
- if diags.HasErrors() {
- // Don't try to execute the function if we already have errors with
- // the arguments, because the result will probably be a confusing
- // error message.
- return cty.DynamicVal, diags
- }
-
- resultVal, err := f.Call(argVals)
- if err != nil {
- switch terr := err.(type) {
- case function.ArgError:
- i := terr.Index
- var param *function.Parameter
- if i < len(params) {
- param = ¶ms[i]
- } else {
- param = varParam
- }
- argExpr := e.Args[i]
-
- // TODO: we should also unpick a PathError here and show the
- // path to the deep value where the error was detected.
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid function argument",
- Detail: fmt.Sprintf(
- "Invalid value for %q parameter: %s.",
- param.Name, err,
- ),
- Subject: argExpr.StartRange().Ptr(),
- Context: e.Range().Ptr(),
- Expression: argExpr,
- EvalContext: ctx,
- })
-
- default:
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Error in function call",
- Detail: fmt.Sprintf(
- "Call to function %q failed: %s.",
- e.Name, err,
- ),
- Subject: e.StartRange().Ptr(),
- Context: e.Range().Ptr(),
- Expression: e,
- EvalContext: ctx,
- })
- }
-
- return cty.DynamicVal, diags
- }
-
- return resultVal, diags
-}
-
-func (e *FunctionCallExpr) Range() hcl.Range {
- return hcl.RangeBetween(e.NameRange, e.CloseParenRange)
-}
-
-func (e *FunctionCallExpr) StartRange() hcl.Range {
- return hcl.RangeBetween(e.NameRange, e.OpenParenRange)
-}
-
-// Implementation for hcl.ExprCall.
-func (e *FunctionCallExpr) ExprCall() *hcl.StaticCall {
- ret := &hcl.StaticCall{
- Name: e.Name,
- NameRange: e.NameRange,
- Arguments: make([]hcl.Expression, len(e.Args)),
- ArgsRange: hcl.RangeBetween(e.OpenParenRange, e.CloseParenRange),
- }
- // Need to convert our own Expression objects into hcl.Expression.
- for i, arg := range e.Args {
- ret.Arguments[i] = arg
- }
- return ret
-}
-
-type ConditionalExpr struct {
- Condition Expression
- TrueResult Expression
- FalseResult Expression
-
- SrcRange hcl.Range
-}
-
-func (e *ConditionalExpr) walkChildNodes(w internalWalkFunc) {
- w(e.Condition)
- w(e.TrueResult)
- w(e.FalseResult)
-}
-
-func (e *ConditionalExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
- trueResult, trueDiags := e.TrueResult.Value(ctx)
- falseResult, falseDiags := e.FalseResult.Value(ctx)
- var diags hcl.Diagnostics
-
- resultType := cty.DynamicPseudoType
- convs := make([]convert.Conversion, 2)
-
- switch {
- // If either case is a dynamic null value (which would result from a
- // literal null in the config), we know that it can convert to the expected
- // type of the opposite case, and we don't need to speculatively reduce the
- // final result type to DynamicPseudoType.
-
- // If we know that either Type is a DynamicPseudoType, we can be certain
- // that the other value can convert since it's a pass-through, and we don't
- // need to unify the types. If the final evaluation results in the dynamic
- // value being returned, there's no conversion we can do, so we return the
- // value directly.
- case trueResult.RawEquals(cty.NullVal(cty.DynamicPseudoType)):
- resultType = falseResult.Type()
- convs[0] = convert.GetConversionUnsafe(cty.DynamicPseudoType, resultType)
- case falseResult.RawEquals(cty.NullVal(cty.DynamicPseudoType)):
- resultType = trueResult.Type()
- convs[1] = convert.GetConversionUnsafe(cty.DynamicPseudoType, resultType)
- case trueResult.Type() == cty.DynamicPseudoType, falseResult.Type() == cty.DynamicPseudoType:
- // the final resultType type is still unknown
- // we don't need to get the conversion, because both are a noop.
-
- default:
- // Try to find a type that both results can be converted to.
- resultType, convs = convert.UnifyUnsafe([]cty.Type{trueResult.Type(), falseResult.Type()})
- }
-
- if resultType == cty.NilType {
- return cty.DynamicVal, hcl.Diagnostics{
- {
- Severity: hcl.DiagError,
- Summary: "Inconsistent conditional result types",
- Detail: fmt.Sprintf(
- // FIXME: Need a helper function for showing natural-language type diffs,
- // since this will generate some useless messages in some cases, like
- // "These expressions are object and object respectively" if the
- // object types don't exactly match.
- "The true and false result expressions must have consistent types. The given expressions are %s and %s, respectively.",
- trueResult.Type().FriendlyName(), falseResult.Type().FriendlyName(),
- ),
- Subject: hcl.RangeBetween(e.TrueResult.Range(), e.FalseResult.Range()).Ptr(),
- Context: &e.SrcRange,
- Expression: e,
- EvalContext: ctx,
- },
- }
- }
-
- condResult, condDiags := e.Condition.Value(ctx)
- diags = append(diags, condDiags...)
- if condResult.IsNull() {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Null condition",
- Detail: "The condition value is null. Conditions must either be true or false.",
- Subject: e.Condition.Range().Ptr(),
- Context: &e.SrcRange,
- Expression: e.Condition,
- EvalContext: ctx,
- })
- return cty.UnknownVal(resultType), diags
- }
- if !condResult.IsKnown() {
- return cty.UnknownVal(resultType), diags
- }
- condResult, err := convert.Convert(condResult, cty.Bool)
- if err != nil {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Incorrect condition type",
- Detail: fmt.Sprintf("The condition expression must be of type bool."),
- Subject: e.Condition.Range().Ptr(),
- Context: &e.SrcRange,
- Expression: e.Condition,
- EvalContext: ctx,
- })
- return cty.UnknownVal(resultType), diags
- }
-
- if condResult.True() {
- diags = append(diags, trueDiags...)
- if convs[0] != nil {
- var err error
- trueResult, err = convs[0](trueResult)
- if err != nil {
- // Unsafe conversion failed with the concrete result value
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Inconsistent conditional result types",
- Detail: fmt.Sprintf(
- "The true result value has the wrong type: %s.",
- err.Error(),
- ),
- Subject: e.TrueResult.Range().Ptr(),
- Context: &e.SrcRange,
- Expression: e.TrueResult,
- EvalContext: ctx,
- })
- trueResult = cty.UnknownVal(resultType)
- }
- }
- return trueResult, diags
- } else {
- diags = append(diags, falseDiags...)
- if convs[1] != nil {
- var err error
- falseResult, err = convs[1](falseResult)
- if err != nil {
- // Unsafe conversion failed with the concrete result value
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Inconsistent conditional result types",
- Detail: fmt.Sprintf(
- "The false result value has the wrong type: %s.",
- err.Error(),
- ),
- Subject: e.FalseResult.Range().Ptr(),
- Context: &e.SrcRange,
- Expression: e.FalseResult,
- EvalContext: ctx,
- })
- falseResult = cty.UnknownVal(resultType)
- }
- }
- return falseResult, diags
- }
-}
-
-func (e *ConditionalExpr) Range() hcl.Range {
- return e.SrcRange
-}
-
-func (e *ConditionalExpr) StartRange() hcl.Range {
- return e.Condition.StartRange()
-}
-
-type IndexExpr struct {
- Collection Expression
- Key Expression
-
- SrcRange hcl.Range
- OpenRange hcl.Range
-}
-
-func (e *IndexExpr) walkChildNodes(w internalWalkFunc) {
- w(e.Collection)
- w(e.Key)
-}
-
-func (e *IndexExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
- var diags hcl.Diagnostics
- coll, collDiags := e.Collection.Value(ctx)
- key, keyDiags := e.Key.Value(ctx)
- diags = append(diags, collDiags...)
- diags = append(diags, keyDiags...)
-
- val, indexDiags := hcl.Index(coll, key, &e.SrcRange)
- setDiagEvalContext(indexDiags, e, ctx)
- diags = append(diags, indexDiags...)
- return val, diags
-}
-
-func (e *IndexExpr) Range() hcl.Range {
- return e.SrcRange
-}
-
-func (e *IndexExpr) StartRange() hcl.Range {
- return e.OpenRange
-}
-
-type TupleConsExpr struct {
- Exprs []Expression
-
- SrcRange hcl.Range
- OpenRange hcl.Range
-}
-
-func (e *TupleConsExpr) walkChildNodes(w internalWalkFunc) {
- for _, expr := range e.Exprs {
- w(expr)
- }
-}
-
-func (e *TupleConsExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
- var vals []cty.Value
- var diags hcl.Diagnostics
-
- vals = make([]cty.Value, len(e.Exprs))
- for i, expr := range e.Exprs {
- val, valDiags := expr.Value(ctx)
- vals[i] = val
- diags = append(diags, valDiags...)
- }
-
- return cty.TupleVal(vals), diags
-}
-
-func (e *TupleConsExpr) Range() hcl.Range {
- return e.SrcRange
-}
-
-func (e *TupleConsExpr) StartRange() hcl.Range {
- return e.OpenRange
-}
-
-// Implementation for hcl.ExprList
-func (e *TupleConsExpr) ExprList() []hcl.Expression {
- ret := make([]hcl.Expression, len(e.Exprs))
- for i, expr := range e.Exprs {
- ret[i] = expr
- }
- return ret
-}
-
-type ObjectConsExpr struct {
- Items []ObjectConsItem
-
- SrcRange hcl.Range
- OpenRange hcl.Range
-}
-
-type ObjectConsItem struct {
- KeyExpr Expression
- ValueExpr Expression
-}
-
-func (e *ObjectConsExpr) walkChildNodes(w internalWalkFunc) {
- for _, item := range e.Items {
- w(item.KeyExpr)
- w(item.ValueExpr)
- }
-}
-
-func (e *ObjectConsExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
- var vals map[string]cty.Value
- var diags hcl.Diagnostics
-
- // This will get set to true if we fail to produce any of our keys,
- // either because they are actually unknown or if the evaluation produces
- // errors. In all of these case we must return DynamicPseudoType because
- // we're unable to know the full set of keys our object has, and thus
- // we can't produce a complete value of the intended type.
- //
- // We still evaluate all of the item keys and values to make sure that we
- // get as complete as possible a set of diagnostics.
- known := true
-
- vals = make(map[string]cty.Value, len(e.Items))
- for _, item := range e.Items {
- key, keyDiags := item.KeyExpr.Value(ctx)
- diags = append(diags, keyDiags...)
-
- val, valDiags := item.ValueExpr.Value(ctx)
- diags = append(diags, valDiags...)
-
- if keyDiags.HasErrors() {
- known = false
- continue
- }
-
- if key.IsNull() {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Null value as key",
- Detail: "Can't use a null value as a key.",
- Subject: item.ValueExpr.Range().Ptr(),
- Expression: item.KeyExpr,
- EvalContext: ctx,
- })
- known = false
- continue
- }
-
- var err error
- key, err = convert.Convert(key, cty.String)
- if err != nil {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Incorrect key type",
- Detail: fmt.Sprintf("Can't use this value as a key: %s.", err.Error()),
- Subject: item.KeyExpr.Range().Ptr(),
- Expression: item.KeyExpr,
- EvalContext: ctx,
- })
- known = false
- continue
- }
-
- if !key.IsKnown() {
- known = false
- continue
- }
-
- keyStr := key.AsString()
-
- vals[keyStr] = val
- }
-
- if !known {
- return cty.DynamicVal, diags
- }
-
- return cty.ObjectVal(vals), diags
-}
-
-func (e *ObjectConsExpr) Range() hcl.Range {
- return e.SrcRange
-}
-
-func (e *ObjectConsExpr) StartRange() hcl.Range {
- return e.OpenRange
-}
-
-// Implementation for hcl.ExprMap
-func (e *ObjectConsExpr) ExprMap() []hcl.KeyValuePair {
- ret := make([]hcl.KeyValuePair, len(e.Items))
- for i, item := range e.Items {
- ret[i] = hcl.KeyValuePair{
- Key: item.KeyExpr,
- Value: item.ValueExpr,
- }
- }
- return ret
-}
-
-// ObjectConsKeyExpr is a special wrapper used only for ObjectConsExpr keys,
-// which deals with the special case that a naked identifier in that position
-// must be interpreted as a literal string rather than evaluated directly.
-type ObjectConsKeyExpr struct {
- Wrapped Expression
-}
-
-func (e *ObjectConsKeyExpr) literalName() string {
- // This is our logic for deciding whether to behave like a literal string.
- // We lean on our AbsTraversalForExpr implementation here, which already
- // deals with some awkward cases like the expression being the result
- // of the keywords "null", "true" and "false" which we'd want to interpret
- // as keys here too.
- return hcl.ExprAsKeyword(e.Wrapped)
-}
-
-func (e *ObjectConsKeyExpr) walkChildNodes(w internalWalkFunc) {
- // We only treat our wrapped expression as a real expression if we're
- // not going to interpret it as a literal.
- if e.literalName() == "" {
- w(e.Wrapped)
- }
-}
-
-func (e *ObjectConsKeyExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
- // Because we accept a naked identifier as a literal key rather than a
- // reference, it's confusing to accept a traversal containing periods
- // here since we can't tell if the user intends to create a key with
- // periods or actually reference something. To avoid confusing downstream
- // errors we'll just prohibit a naked multi-step traversal here and
- // require the user to state their intent more clearly.
- // (This is handled at evaluation time rather than parse time because
- // an application using static analysis _can_ accept a naked multi-step
- // traversal here, if desired.)
- if travExpr, isTraversal := e.Wrapped.(*ScopeTraversalExpr); isTraversal && len(travExpr.Traversal) > 1 {
- var diags hcl.Diagnostics
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Ambiguous attribute key",
- Detail: "If this expression is intended to be a reference, wrap it in parentheses. If it's instead intended as a literal name containing periods, wrap it in quotes to create a string literal.",
- Subject: e.Range().Ptr(),
- })
- return cty.DynamicVal, diags
- }
-
- if ln := e.literalName(); ln != "" {
- return cty.StringVal(ln), nil
- }
- return e.Wrapped.Value(ctx)
-}
-
-func (e *ObjectConsKeyExpr) Range() hcl.Range {
- return e.Wrapped.Range()
-}
-
-func (e *ObjectConsKeyExpr) StartRange() hcl.Range {
- return e.Wrapped.StartRange()
-}
-
-// Implementation for hcl.AbsTraversalForExpr.
-func (e *ObjectConsKeyExpr) AsTraversal() hcl.Traversal {
- // We can produce a traversal only if our wrappee can.
- st, diags := hcl.AbsTraversalForExpr(e.Wrapped)
- if diags.HasErrors() {
- return nil
- }
-
- return st
-}
-
-func (e *ObjectConsKeyExpr) UnwrapExpression() Expression {
- return e.Wrapped
-}
-
-// ForExpr represents iteration constructs:
-//
-// tuple = [for i, v in list: upper(v) if i > 2]
-// object = {for k, v in map: k => upper(v)}
-// object_of_tuples = {for v in list: v.key: v...}
-type ForExpr struct {
- KeyVar string // empty if ignoring the key
- ValVar string
-
- CollExpr Expression
-
- KeyExpr Expression // nil when producing a tuple
- ValExpr Expression
- CondExpr Expression // null if no "if" clause is present
-
- Group bool // set if the ellipsis is used on the value in an object for
-
- SrcRange hcl.Range
- OpenRange hcl.Range
- CloseRange hcl.Range
-}
-
-func (e *ForExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
- var diags hcl.Diagnostics
-
- collVal, collDiags := e.CollExpr.Value(ctx)
- diags = append(diags, collDiags...)
-
- if collVal.IsNull() {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Iteration over null value",
- Detail: "A null value cannot be used as the collection in a 'for' expression.",
- Subject: e.CollExpr.Range().Ptr(),
- Context: &e.SrcRange,
- Expression: e.CollExpr,
- EvalContext: ctx,
- })
- return cty.DynamicVal, diags
- }
- if collVal.Type() == cty.DynamicPseudoType {
- return cty.DynamicVal, diags
- }
- if !collVal.CanIterateElements() {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Iteration over non-iterable value",
- Detail: fmt.Sprintf(
- "A value of type %s cannot be used as the collection in a 'for' expression.",
- collVal.Type().FriendlyName(),
- ),
- Subject: e.CollExpr.Range().Ptr(),
- Context: &e.SrcRange,
- Expression: e.CollExpr,
- EvalContext: ctx,
- })
- return cty.DynamicVal, diags
- }
- if !collVal.IsKnown() {
- return cty.DynamicVal, diags
- }
-
- // Before we start we'll do an early check to see if any CondExpr we've
- // been given is of the wrong type. This isn't 100% reliable (it may
- // be DynamicVal until real values are given) but it should catch some
- // straightforward cases and prevent a barrage of repeated errors.
- if e.CondExpr != nil {
- childCtx := ctx.NewChild()
- childCtx.Variables = map[string]cty.Value{}
- if e.KeyVar != "" {
- childCtx.Variables[e.KeyVar] = cty.DynamicVal
- }
- childCtx.Variables[e.ValVar] = cty.DynamicVal
-
- result, condDiags := e.CondExpr.Value(childCtx)
- diags = append(diags, condDiags...)
- if result.IsNull() {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Condition is null",
- Detail: "The value of the 'if' clause must not be null.",
- Subject: e.CondExpr.Range().Ptr(),
- Context: &e.SrcRange,
- Expression: e.CondExpr,
- EvalContext: ctx,
- })
- return cty.DynamicVal, diags
- }
- _, err := convert.Convert(result, cty.Bool)
- if err != nil {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid 'for' condition",
- Detail: fmt.Sprintf("The 'if' clause value is invalid: %s.", err.Error()),
- Subject: e.CondExpr.Range().Ptr(),
- Context: &e.SrcRange,
- Expression: e.CondExpr,
- EvalContext: ctx,
- })
- return cty.DynamicVal, diags
- }
- if condDiags.HasErrors() {
- return cty.DynamicVal, diags
- }
- }
-
- if e.KeyExpr != nil {
- // Producing an object
- var vals map[string]cty.Value
- var groupVals map[string][]cty.Value
- if e.Group {
- groupVals = map[string][]cty.Value{}
- } else {
- vals = map[string]cty.Value{}
- }
-
- it := collVal.ElementIterator()
-
- known := true
- for it.Next() {
- k, v := it.Element()
- childCtx := ctx.NewChild()
- childCtx.Variables = map[string]cty.Value{}
- if e.KeyVar != "" {
- childCtx.Variables[e.KeyVar] = k
- }
- childCtx.Variables[e.ValVar] = v
-
- if e.CondExpr != nil {
- includeRaw, condDiags := e.CondExpr.Value(childCtx)
- diags = append(diags, condDiags...)
- if includeRaw.IsNull() {
- if known {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid 'for' condition",
- Detail: "The value of the 'if' clause must not be null.",
- Subject: e.CondExpr.Range().Ptr(),
- Context: &e.SrcRange,
- Expression: e.CondExpr,
- EvalContext: childCtx,
- })
- }
- known = false
- continue
- }
- include, err := convert.Convert(includeRaw, cty.Bool)
- if err != nil {
- if known {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid 'for' condition",
- Detail: fmt.Sprintf("The 'if' clause value is invalid: %s.", err.Error()),
- Subject: e.CondExpr.Range().Ptr(),
- Context: &e.SrcRange,
- Expression: e.CondExpr,
- EvalContext: childCtx,
- })
- }
- known = false
- continue
- }
- if !include.IsKnown() {
- known = false
- continue
- }
-
- if include.False() {
- // Skip this element
- continue
- }
- }
-
- keyRaw, keyDiags := e.KeyExpr.Value(childCtx)
- diags = append(diags, keyDiags...)
- if keyRaw.IsNull() {
- if known {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid object key",
- Detail: "Key expression in 'for' expression must not produce a null value.",
- Subject: e.KeyExpr.Range().Ptr(),
- Context: &e.SrcRange,
- Expression: e.KeyExpr,
- EvalContext: childCtx,
- })
- }
- known = false
- continue
- }
- if !keyRaw.IsKnown() {
- known = false
- continue
- }
-
- key, err := convert.Convert(keyRaw, cty.String)
- if err != nil {
- if known {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid object key",
- Detail: fmt.Sprintf("The key expression produced an invalid result: %s.", err.Error()),
- Subject: e.KeyExpr.Range().Ptr(),
- Context: &e.SrcRange,
- Expression: e.KeyExpr,
- EvalContext: childCtx,
- })
- }
- known = false
- continue
- }
-
- val, valDiags := e.ValExpr.Value(childCtx)
- diags = append(diags, valDiags...)
-
- if e.Group {
- k := key.AsString()
- groupVals[k] = append(groupVals[k], val)
- } else {
- k := key.AsString()
- if _, exists := vals[k]; exists {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Duplicate object key",
- Detail: fmt.Sprintf(
- "Two different items produced the key %q in this 'for' expression. If duplicates are expected, use the ellipsis (...) after the value expression to enable grouping by key.",
- k,
- ),
- Subject: e.KeyExpr.Range().Ptr(),
- Context: &e.SrcRange,
- Expression: e.KeyExpr,
- EvalContext: childCtx,
- })
- } else {
- vals[key.AsString()] = val
- }
- }
- }
-
- if !known {
- return cty.DynamicVal, diags
- }
-
- if e.Group {
- vals = map[string]cty.Value{}
- for k, gvs := range groupVals {
- vals[k] = cty.TupleVal(gvs)
- }
- }
-
- return cty.ObjectVal(vals), diags
-
- } else {
- // Producing a tuple
- vals := []cty.Value{}
-
- it := collVal.ElementIterator()
-
- known := true
- for it.Next() {
- k, v := it.Element()
- childCtx := ctx.NewChild()
- childCtx.Variables = map[string]cty.Value{}
- if e.KeyVar != "" {
- childCtx.Variables[e.KeyVar] = k
- }
- childCtx.Variables[e.ValVar] = v
-
- if e.CondExpr != nil {
- includeRaw, condDiags := e.CondExpr.Value(childCtx)
- diags = append(diags, condDiags...)
- if includeRaw.IsNull() {
- if known {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid 'for' condition",
- Detail: "The value of the 'if' clause must not be null.",
- Subject: e.CondExpr.Range().Ptr(),
- Context: &e.SrcRange,
- Expression: e.CondExpr,
- EvalContext: childCtx,
- })
- }
- known = false
- continue
- }
- if !includeRaw.IsKnown() {
- // We will eventually return DynamicVal, but we'll continue
- // iterating in case there are other diagnostics to gather
- // for later elements.
- known = false
- continue
- }
-
- include, err := convert.Convert(includeRaw, cty.Bool)
- if err != nil {
- if known {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid 'for' condition",
- Detail: fmt.Sprintf("The 'if' clause value is invalid: %s.", err.Error()),
- Subject: e.CondExpr.Range().Ptr(),
- Context: &e.SrcRange,
- Expression: e.CondExpr,
- EvalContext: childCtx,
- })
- }
- known = false
- continue
- }
-
- if include.False() {
- // Skip this element
- continue
- }
- }
-
- val, valDiags := e.ValExpr.Value(childCtx)
- diags = append(diags, valDiags...)
- vals = append(vals, val)
- }
-
- if !known {
- return cty.DynamicVal, diags
- }
-
- return cty.TupleVal(vals), diags
- }
-}
-
-func (e *ForExpr) walkChildNodes(w internalWalkFunc) {
- w(e.CollExpr)
-
- scopeNames := map[string]struct{}{}
- if e.KeyVar != "" {
- scopeNames[e.KeyVar] = struct{}{}
- }
- if e.ValVar != "" {
- scopeNames[e.ValVar] = struct{}{}
- }
-
- if e.KeyExpr != nil {
- w(ChildScope{
- LocalNames: scopeNames,
- Expr: e.KeyExpr,
- })
- }
- w(ChildScope{
- LocalNames: scopeNames,
- Expr: e.ValExpr,
- })
- if e.CondExpr != nil {
- w(ChildScope{
- LocalNames: scopeNames,
- Expr: e.CondExpr,
- })
- }
-}
-
-func (e *ForExpr) Range() hcl.Range {
- return e.SrcRange
-}
-
-func (e *ForExpr) StartRange() hcl.Range {
- return e.OpenRange
-}
-
-type SplatExpr struct {
- Source Expression
- Each Expression
- Item *AnonSymbolExpr
-
- SrcRange hcl.Range
- MarkerRange hcl.Range
-}
-
-func (e *SplatExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
- sourceVal, diags := e.Source.Value(ctx)
- if diags.HasErrors() {
- // We'll evaluate our "Each" expression here just to see if it
- // produces any more diagnostics we can report. Since we're not
- // assigning a value to our AnonSymbolExpr here it will return
- // DynamicVal, which should short-circuit any use of it.
- _, itemDiags := e.Item.Value(ctx)
- diags = append(diags, itemDiags...)
- return cty.DynamicVal, diags
- }
-
- sourceTy := sourceVal.Type()
- if sourceTy == cty.DynamicPseudoType {
- // If we don't even know the _type_ of our source value yet then
- // we'll need to defer all processing, since we can't decide our
- // result type either.
- return cty.DynamicVal, diags
- }
-
- // A "special power" of splat expressions is that they can be applied
- // both to tuples/lists and to other values, and in the latter case
- // the value will be treated as an implicit single-item tuple, or as
- // an empty tuple if the value is null.
- autoUpgrade := !(sourceTy.IsTupleType() || sourceTy.IsListType() || sourceTy.IsSetType())
-
- if sourceVal.IsNull() {
- if autoUpgrade {
- return cty.EmptyTupleVal, diags
- }
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Splat of null value",
- Detail: "Splat expressions (with the * symbol) cannot be applied to null sequences.",
- Subject: e.Source.Range().Ptr(),
- Context: hcl.RangeBetween(e.Source.Range(), e.MarkerRange).Ptr(),
- Expression: e.Source,
- EvalContext: ctx,
- })
- return cty.DynamicVal, diags
- }
-
- if autoUpgrade {
- sourceVal = cty.TupleVal([]cty.Value{sourceVal})
- sourceTy = sourceVal.Type()
- }
-
- // We'll compute our result type lazily if we need it. In the normal case
- // it's inferred automatically from the value we construct.
- resultTy := func() (cty.Type, hcl.Diagnostics) {
- chiCtx := ctx.NewChild()
- var diags hcl.Diagnostics
- switch {
- case sourceTy.IsListType() || sourceTy.IsSetType():
- ety := sourceTy.ElementType()
- e.Item.setValue(chiCtx, cty.UnknownVal(ety))
- val, itemDiags := e.Each.Value(chiCtx)
- diags = append(diags, itemDiags...)
- e.Item.clearValue(chiCtx) // clean up our temporary value
- return cty.List(val.Type()), diags
- case sourceTy.IsTupleType():
- etys := sourceTy.TupleElementTypes()
- resultTys := make([]cty.Type, 0, len(etys))
- for _, ety := range etys {
- e.Item.setValue(chiCtx, cty.UnknownVal(ety))
- val, itemDiags := e.Each.Value(chiCtx)
- diags = append(diags, itemDiags...)
- e.Item.clearValue(chiCtx) // clean up our temporary value
- resultTys = append(resultTys, val.Type())
- }
- return cty.Tuple(resultTys), diags
- default:
- // Should never happen because of our promotion to list above.
- return cty.DynamicPseudoType, diags
- }
- }
-
- if !sourceVal.IsKnown() {
- // We can't produce a known result in this case, but we'll still
- // indicate what the result type would be, allowing any downstream type
- // checking to proceed.
- ty, tyDiags := resultTy()
- diags = append(diags, tyDiags...)
- return cty.UnknownVal(ty), diags
- }
-
- vals := make([]cty.Value, 0, sourceVal.LengthInt())
- it := sourceVal.ElementIterator()
- if ctx == nil {
- // we need a context to use our AnonSymbolExpr, so we'll just
- // make an empty one here to use as a placeholder.
- ctx = ctx.NewChild()
- }
- isKnown := true
- for it.Next() {
- _, sourceItem := it.Element()
- e.Item.setValue(ctx, sourceItem)
- newItem, itemDiags := e.Each.Value(ctx)
- diags = append(diags, itemDiags...)
- if itemDiags.HasErrors() {
- isKnown = false
- }
- vals = append(vals, newItem)
- }
- e.Item.clearValue(ctx) // clean up our temporary value
-
- if !isKnown {
- // We'll ingore the resultTy diagnostics in this case since they
- // will just be the same errors we saw while iterating above.
- ty, _ := resultTy()
- return cty.UnknownVal(ty), diags
- }
-
- switch {
- case sourceTy.IsListType() || sourceTy.IsSetType():
- if len(vals) == 0 {
- ty, tyDiags := resultTy()
- diags = append(diags, tyDiags...)
- return cty.ListValEmpty(ty.ElementType()), diags
- }
- return cty.ListVal(vals), diags
- default:
- return cty.TupleVal(vals), diags
- }
-}
-
-func (e *SplatExpr) walkChildNodes(w internalWalkFunc) {
- w(e.Source)
- w(e.Each)
-}
-
-func (e *SplatExpr) Range() hcl.Range {
- return e.SrcRange
-}
-
-func (e *SplatExpr) StartRange() hcl.Range {
- return e.MarkerRange
-}
-
-// AnonSymbolExpr is used as a placeholder for a value in an expression that
-// can be applied dynamically to any value at runtime.
-//
-// This is a rather odd, synthetic expression. It is used as part of the
-// representation of splat expressions as a placeholder for the current item
-// being visited in the splat evaluation.
-//
-// AnonSymbolExpr cannot be evaluated in isolation. If its Value is called
-// directly then cty.DynamicVal will be returned. Instead, it is evaluated
-// in terms of another node (i.e. a splat expression) which temporarily
-// assigns it a value.
-type AnonSymbolExpr struct {
- SrcRange hcl.Range
-
- // values and its associated lock are used to isolate concurrent
- // evaluations of a symbol from one another. It is the calling application's
- // responsibility to ensure that the same splat expression is not evalauted
- // concurrently within the _same_ EvalContext, but it is fine and safe to
- // do cuncurrent evaluations with distinct EvalContexts.
- values map[*hcl.EvalContext]cty.Value
- valuesLock sync.RWMutex
-}
-
-func (e *AnonSymbolExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
- if ctx == nil {
- return cty.DynamicVal, nil
- }
-
- e.valuesLock.RLock()
- defer e.valuesLock.RUnlock()
-
- val, exists := e.values[ctx]
- if !exists {
- return cty.DynamicVal, nil
- }
- return val, nil
-}
-
-// setValue sets a temporary local value for the expression when evaluated
-// in the given context, which must be non-nil.
-func (e *AnonSymbolExpr) setValue(ctx *hcl.EvalContext, val cty.Value) {
- e.valuesLock.Lock()
- defer e.valuesLock.Unlock()
-
- if e.values == nil {
- e.values = make(map[*hcl.EvalContext]cty.Value)
- }
- if ctx == nil {
- panic("can't setValue for a nil EvalContext")
- }
- e.values[ctx] = val
-}
-
-func (e *AnonSymbolExpr) clearValue(ctx *hcl.EvalContext) {
- e.valuesLock.Lock()
- defer e.valuesLock.Unlock()
-
- if e.values == nil {
- return
- }
- if ctx == nil {
- panic("can't clearValue for a nil EvalContext")
- }
- delete(e.values, ctx)
-}
-
-func (e *AnonSymbolExpr) walkChildNodes(w internalWalkFunc) {
- // AnonSymbolExpr is a leaf node in the tree
-}
-
-func (e *AnonSymbolExpr) Range() hcl.Range {
- return e.SrcRange
-}
-
-func (e *AnonSymbolExpr) StartRange() hcl.Range {
- return e.SrcRange
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_ops.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_ops.go
deleted file mode 100644
index 7f59f1a27..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_ops.go
+++ /dev/null
@@ -1,268 +0,0 @@
-package hclsyntax
-
-import (
- "fmt"
-
- "github.com/hashicorp/hcl2/hcl"
- "github.com/zclconf/go-cty/cty"
- "github.com/zclconf/go-cty/cty/convert"
- "github.com/zclconf/go-cty/cty/function"
- "github.com/zclconf/go-cty/cty/function/stdlib"
-)
-
-type Operation struct {
- Impl function.Function
- Type cty.Type
-}
-
-var (
- OpLogicalOr = &Operation{
- Impl: stdlib.OrFunc,
- Type: cty.Bool,
- }
- OpLogicalAnd = &Operation{
- Impl: stdlib.AndFunc,
- Type: cty.Bool,
- }
- OpLogicalNot = &Operation{
- Impl: stdlib.NotFunc,
- Type: cty.Bool,
- }
-
- OpEqual = &Operation{
- Impl: stdlib.EqualFunc,
- Type: cty.Bool,
- }
- OpNotEqual = &Operation{
- Impl: stdlib.NotEqualFunc,
- Type: cty.Bool,
- }
-
- OpGreaterThan = &Operation{
- Impl: stdlib.GreaterThanFunc,
- Type: cty.Bool,
- }
- OpGreaterThanOrEqual = &Operation{
- Impl: stdlib.GreaterThanOrEqualToFunc,
- Type: cty.Bool,
- }
- OpLessThan = &Operation{
- Impl: stdlib.LessThanFunc,
- Type: cty.Bool,
- }
- OpLessThanOrEqual = &Operation{
- Impl: stdlib.LessThanOrEqualToFunc,
- Type: cty.Bool,
- }
-
- OpAdd = &Operation{
- Impl: stdlib.AddFunc,
- Type: cty.Number,
- }
- OpSubtract = &Operation{
- Impl: stdlib.SubtractFunc,
- Type: cty.Number,
- }
- OpMultiply = &Operation{
- Impl: stdlib.MultiplyFunc,
- Type: cty.Number,
- }
- OpDivide = &Operation{
- Impl: stdlib.DivideFunc,
- Type: cty.Number,
- }
- OpModulo = &Operation{
- Impl: stdlib.ModuloFunc,
- Type: cty.Number,
- }
- OpNegate = &Operation{
- Impl: stdlib.NegateFunc,
- Type: cty.Number,
- }
-)
-
-var binaryOps []map[TokenType]*Operation
-
-func init() {
- // This operation table maps from the operator's token type
- // to the AST operation type. All expressions produced from
- // binary operators are BinaryOp nodes.
- //
- // Binary operator groups are listed in order of precedence, with
- // the *lowest* precedence first. Operators within the same group
- // have left-to-right associativity.
- binaryOps = []map[TokenType]*Operation{
- {
- TokenOr: OpLogicalOr,
- },
- {
- TokenAnd: OpLogicalAnd,
- },
- {
- TokenEqualOp: OpEqual,
- TokenNotEqual: OpNotEqual,
- },
- {
- TokenGreaterThan: OpGreaterThan,
- TokenGreaterThanEq: OpGreaterThanOrEqual,
- TokenLessThan: OpLessThan,
- TokenLessThanEq: OpLessThanOrEqual,
- },
- {
- TokenPlus: OpAdd,
- TokenMinus: OpSubtract,
- },
- {
- TokenStar: OpMultiply,
- TokenSlash: OpDivide,
- TokenPercent: OpModulo,
- },
- }
-}
-
-type BinaryOpExpr struct {
- LHS Expression
- Op *Operation
- RHS Expression
-
- SrcRange hcl.Range
-}
-
-func (e *BinaryOpExpr) walkChildNodes(w internalWalkFunc) {
- w(e.LHS)
- w(e.RHS)
-}
-
-func (e *BinaryOpExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
- impl := e.Op.Impl // assumed to be a function taking exactly two arguments
- params := impl.Params()
- lhsParam := params[0]
- rhsParam := params[1]
-
- var diags hcl.Diagnostics
-
- givenLHSVal, lhsDiags := e.LHS.Value(ctx)
- givenRHSVal, rhsDiags := e.RHS.Value(ctx)
- diags = append(diags, lhsDiags...)
- diags = append(diags, rhsDiags...)
-
- lhsVal, err := convert.Convert(givenLHSVal, lhsParam.Type)
- if err != nil {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid operand",
- Detail: fmt.Sprintf("Unsuitable value for left operand: %s.", err),
- Subject: e.LHS.Range().Ptr(),
- Context: &e.SrcRange,
- Expression: e.LHS,
- EvalContext: ctx,
- })
- }
- rhsVal, err := convert.Convert(givenRHSVal, rhsParam.Type)
- if err != nil {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid operand",
- Detail: fmt.Sprintf("Unsuitable value for right operand: %s.", err),
- Subject: e.RHS.Range().Ptr(),
- Context: &e.SrcRange,
- Expression: e.RHS,
- EvalContext: ctx,
- })
- }
-
- if diags.HasErrors() {
- // Don't actually try the call if we have errors already, since the
- // this will probably just produce a confusing duplicative diagnostic.
- return cty.UnknownVal(e.Op.Type), diags
- }
-
- args := []cty.Value{lhsVal, rhsVal}
- result, err := impl.Call(args)
- if err != nil {
- diags = append(diags, &hcl.Diagnostic{
- // FIXME: This diagnostic is useless.
- Severity: hcl.DiagError,
- Summary: "Operation failed",
- Detail: fmt.Sprintf("Error during operation: %s.", err),
- Subject: &e.SrcRange,
- Expression: e,
- EvalContext: ctx,
- })
- return cty.UnknownVal(e.Op.Type), diags
- }
-
- return result, diags
-}
-
-func (e *BinaryOpExpr) Range() hcl.Range {
- return e.SrcRange
-}
-
-func (e *BinaryOpExpr) StartRange() hcl.Range {
- return e.LHS.StartRange()
-}
-
-type UnaryOpExpr struct {
- Op *Operation
- Val Expression
-
- SrcRange hcl.Range
- SymbolRange hcl.Range
-}
-
-func (e *UnaryOpExpr) walkChildNodes(w internalWalkFunc) {
- w(e.Val)
-}
-
-func (e *UnaryOpExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
- impl := e.Op.Impl // assumed to be a function taking exactly one argument
- params := impl.Params()
- param := params[0]
-
- givenVal, diags := e.Val.Value(ctx)
-
- val, err := convert.Convert(givenVal, param.Type)
- if err != nil {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid operand",
- Detail: fmt.Sprintf("Unsuitable value for unary operand: %s.", err),
- Subject: e.Val.Range().Ptr(),
- Context: &e.SrcRange,
- Expression: e.Val,
- EvalContext: ctx,
- })
- }
-
- if diags.HasErrors() {
- // Don't actually try the call if we have errors already, since the
- // this will probably just produce a confusing duplicative diagnostic.
- return cty.UnknownVal(e.Op.Type), diags
- }
-
- args := []cty.Value{val}
- result, err := impl.Call(args)
- if err != nil {
- diags = append(diags, &hcl.Diagnostic{
- // FIXME: This diagnostic is useless.
- Severity: hcl.DiagError,
- Summary: "Operation failed",
- Detail: fmt.Sprintf("Error during operation: %s.", err),
- Subject: &e.SrcRange,
- Expression: e,
- EvalContext: ctx,
- })
- return cty.UnknownVal(e.Op.Type), diags
- }
-
- return result, diags
-}
-
-func (e *UnaryOpExpr) Range() hcl.Range {
- return e.SrcRange
-}
-
-func (e *UnaryOpExpr) StartRange() hcl.Range {
- return e.SymbolRange
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_template.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_template.go
deleted file mode 100644
index ca3dae189..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_template.go
+++ /dev/null
@@ -1,220 +0,0 @@
-package hclsyntax
-
-import (
- "bytes"
- "fmt"
-
- "github.com/hashicorp/hcl2/hcl"
- "github.com/zclconf/go-cty/cty"
- "github.com/zclconf/go-cty/cty/convert"
-)
-
-type TemplateExpr struct {
- Parts []Expression
-
- SrcRange hcl.Range
-}
-
-func (e *TemplateExpr) walkChildNodes(w internalWalkFunc) {
- for _, part := range e.Parts {
- w(part)
- }
-}
-
-func (e *TemplateExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
- buf := &bytes.Buffer{}
- var diags hcl.Diagnostics
- isKnown := true
-
- for _, part := range e.Parts {
- partVal, partDiags := part.Value(ctx)
- diags = append(diags, partDiags...)
-
- if partVal.IsNull() {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid template interpolation value",
- Detail: fmt.Sprintf(
- "The expression result is null. Cannot include a null value in a string template.",
- ),
- Subject: part.Range().Ptr(),
- Context: &e.SrcRange,
- Expression: part,
- EvalContext: ctx,
- })
- continue
- }
-
- if !partVal.IsKnown() {
- // If any part is unknown then the result as a whole must be
- // unknown too. We'll keep on processing the rest of the parts
- // anyway, because we want to still emit any diagnostics resulting
- // from evaluating those.
- isKnown = false
- continue
- }
-
- strVal, err := convert.Convert(partVal, cty.String)
- if err != nil {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid template interpolation value",
- Detail: fmt.Sprintf(
- "Cannot include the given value in a string template: %s.",
- err.Error(),
- ),
- Subject: part.Range().Ptr(),
- Context: &e.SrcRange,
- Expression: part,
- EvalContext: ctx,
- })
- continue
- }
-
- buf.WriteString(strVal.AsString())
- }
-
- if !isKnown {
- return cty.UnknownVal(cty.String), diags
- }
-
- return cty.StringVal(buf.String()), diags
-}
-
-func (e *TemplateExpr) Range() hcl.Range {
- return e.SrcRange
-}
-
-func (e *TemplateExpr) StartRange() hcl.Range {
- return e.Parts[0].StartRange()
-}
-
-// IsStringLiteral returns true if and only if the template consists only of
-// single string literal, as would be created for a simple quoted string like
-// "foo".
-//
-// If this function returns true, then calling Value on the same expression
-// with a nil EvalContext will return the literal value.
-//
-// Note that "${"foo"}", "${1}", etc aren't considered literal values for the
-// purposes of this method, because the intent of this method is to identify
-// situations where the user seems to be explicitly intending literal string
-// interpretation, not situations that result in literals as a technicality
-// of the template expression unwrapping behavior.
-func (e *TemplateExpr) IsStringLiteral() bool {
- if len(e.Parts) != 1 {
- return false
- }
- _, ok := e.Parts[0].(*LiteralValueExpr)
- return ok
-}
-
-// TemplateJoinExpr is used to convert tuples of strings produced by template
-// constructs (i.e. for loops) into flat strings, by converting the values
-// tos strings and joining them. This AST node is not used directly; it's
-// produced as part of the AST of a "for" loop in a template.
-type TemplateJoinExpr struct {
- Tuple Expression
-}
-
-func (e *TemplateJoinExpr) walkChildNodes(w internalWalkFunc) {
- w(e.Tuple)
-}
-
-func (e *TemplateJoinExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
- tuple, diags := e.Tuple.Value(ctx)
-
- if tuple.IsNull() {
- // This indicates a bug in the code that constructed the AST.
- panic("TemplateJoinExpr got null tuple")
- }
- if tuple.Type() == cty.DynamicPseudoType {
- return cty.UnknownVal(cty.String), diags
- }
- if !tuple.Type().IsTupleType() {
- // This indicates a bug in the code that constructed the AST.
- panic("TemplateJoinExpr got non-tuple tuple")
- }
- if !tuple.IsKnown() {
- return cty.UnknownVal(cty.String), diags
- }
-
- buf := &bytes.Buffer{}
- it := tuple.ElementIterator()
- for it.Next() {
- _, val := it.Element()
-
- if val.IsNull() {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid template interpolation value",
- Detail: fmt.Sprintf(
- "An iteration result is null. Cannot include a null value in a string template.",
- ),
- Subject: e.Range().Ptr(),
- Expression: e,
- EvalContext: ctx,
- })
- continue
- }
- if val.Type() == cty.DynamicPseudoType {
- return cty.UnknownVal(cty.String), diags
- }
- strVal, err := convert.Convert(val, cty.String)
- if err != nil {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid template interpolation value",
- Detail: fmt.Sprintf(
- "Cannot include one of the interpolation results into the string template: %s.",
- err.Error(),
- ),
- Subject: e.Range().Ptr(),
- Expression: e,
- EvalContext: ctx,
- })
- continue
- }
- if !val.IsKnown() {
- return cty.UnknownVal(cty.String), diags
- }
-
- buf.WriteString(strVal.AsString())
- }
-
- return cty.StringVal(buf.String()), diags
-}
-
-func (e *TemplateJoinExpr) Range() hcl.Range {
- return e.Tuple.Range()
-}
-
-func (e *TemplateJoinExpr) StartRange() hcl.Range {
- return e.Tuple.StartRange()
-}
-
-// TemplateWrapExpr is used instead of a TemplateExpr when a template
-// consists _only_ of a single interpolation sequence. In that case, the
-// template's result is the single interpolation's result, verbatim with
-// no type conversions.
-type TemplateWrapExpr struct {
- Wrapped Expression
-
- SrcRange hcl.Range
-}
-
-func (e *TemplateWrapExpr) walkChildNodes(w internalWalkFunc) {
- w(e.Wrapped)
-}
-
-func (e *TemplateWrapExpr) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
- return e.Wrapped.Value(ctx)
-}
-
-func (e *TemplateWrapExpr) Range() hcl.Range {
- return e.SrcRange
-}
-
-func (e *TemplateWrapExpr) StartRange() hcl.Range {
- return e.SrcRange
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_vars.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_vars.go
deleted file mode 100644
index 9177092ce..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/expression_vars.go
+++ /dev/null
@@ -1,76 +0,0 @@
-package hclsyntax
-
-// Generated by expression_vars_get.go. DO NOT EDIT.
-// Run 'go generate' on this package to update the set of functions here.
-
-import (
- "github.com/hashicorp/hcl2/hcl"
-)
-
-func (e *AnonSymbolExpr) Variables() []hcl.Traversal {
- return Variables(e)
-}
-
-func (e *BinaryOpExpr) Variables() []hcl.Traversal {
- return Variables(e)
-}
-
-func (e *ConditionalExpr) Variables() []hcl.Traversal {
- return Variables(e)
-}
-
-func (e *ForExpr) Variables() []hcl.Traversal {
- return Variables(e)
-}
-
-func (e *FunctionCallExpr) Variables() []hcl.Traversal {
- return Variables(e)
-}
-
-func (e *IndexExpr) Variables() []hcl.Traversal {
- return Variables(e)
-}
-
-func (e *LiteralValueExpr) Variables() []hcl.Traversal {
- return Variables(e)
-}
-
-func (e *ObjectConsExpr) Variables() []hcl.Traversal {
- return Variables(e)
-}
-
-func (e *ObjectConsKeyExpr) Variables() []hcl.Traversal {
- return Variables(e)
-}
-
-func (e *RelativeTraversalExpr) Variables() []hcl.Traversal {
- return Variables(e)
-}
-
-func (e *ScopeTraversalExpr) Variables() []hcl.Traversal {
- return Variables(e)
-}
-
-func (e *SplatExpr) Variables() []hcl.Traversal {
- return Variables(e)
-}
-
-func (e *TemplateExpr) Variables() []hcl.Traversal {
- return Variables(e)
-}
-
-func (e *TemplateJoinExpr) Variables() []hcl.Traversal {
- return Variables(e)
-}
-
-func (e *TemplateWrapExpr) Variables() []hcl.Traversal {
- return Variables(e)
-}
-
-func (e *TupleConsExpr) Variables() []hcl.Traversal {
- return Variables(e)
-}
-
-func (e *UnaryOpExpr) Variables() []hcl.Traversal {
- return Variables(e)
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/file.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/file.go
deleted file mode 100644
index 490c02556..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/file.go
+++ /dev/null
@@ -1,20 +0,0 @@
-package hclsyntax
-
-import (
- "github.com/hashicorp/hcl2/hcl"
-)
-
-// File is the top-level object resulting from parsing a configuration file.
-type File struct {
- Body *Body
- Bytes []byte
-}
-
-func (f *File) AsHCLFile() *hcl.File {
- return &hcl.File{
- Body: f.Body,
- Bytes: f.Bytes,
-
- // TODO: The Nav object, once we have an implementation of it
- }
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/generate.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/generate.go
deleted file mode 100644
index 841656a6a..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/generate.go
+++ /dev/null
@@ -1,9 +0,0 @@
-package hclsyntax
-
-//go:generate go run expression_vars_gen.go
-//go:generate ruby unicode2ragel.rb --url=http://www.unicode.org/Public/9.0.0/ucd/DerivedCoreProperties.txt -m UnicodeDerived -p ID_Start,ID_Continue -o unicode_derived.rl
-//go:generate ragel -Z scan_tokens.rl
-//go:generate gofmt -w scan_tokens.go
-//go:generate ragel -Z scan_string_lit.rl
-//go:generate gofmt -w scan_string_lit.go
-//go:generate stringer -type TokenType -output token_type_string.go
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/keywords.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/keywords.go
deleted file mode 100644
index eef8b9626..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/keywords.go
+++ /dev/null
@@ -1,21 +0,0 @@
-package hclsyntax
-
-import (
- "bytes"
-)
-
-type Keyword []byte
-
-var forKeyword = Keyword([]byte{'f', 'o', 'r'})
-var inKeyword = Keyword([]byte{'i', 'n'})
-var ifKeyword = Keyword([]byte{'i', 'f'})
-var elseKeyword = Keyword([]byte{'e', 'l', 's', 'e'})
-var endifKeyword = Keyword([]byte{'e', 'n', 'd', 'i', 'f'})
-var endforKeyword = Keyword([]byte{'e', 'n', 'd', 'f', 'o', 'r'})
-
-func (kw Keyword) TokenMatches(token Token) bool {
- if token.Type != TokenIdent {
- return false
- }
- return bytes.Equal([]byte(kw), token.Bytes)
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/navigation.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/navigation.go
deleted file mode 100644
index c8c97f37c..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/navigation.go
+++ /dev/null
@@ -1,59 +0,0 @@
-package hclsyntax
-
-import (
- "bytes"
- "fmt"
-
- "github.com/hashicorp/hcl2/hcl"
-)
-
-type navigation struct {
- root *Body
-}
-
-// Implementation of hcled.ContextString
-func (n navigation) ContextString(offset int) string {
- // We will walk our top-level blocks until we find one that contains
- // the given offset, and then construct a representation of the header
- // of the block.
-
- var block *Block
- for _, candidate := range n.root.Blocks {
- if candidate.Range().ContainsOffset(offset) {
- block = candidate
- break
- }
- }
-
- if block == nil {
- return ""
- }
-
- if len(block.Labels) == 0 {
- // Easy case!
- return block.Type
- }
-
- buf := &bytes.Buffer{}
- buf.WriteString(block.Type)
- for _, label := range block.Labels {
- fmt.Fprintf(buf, " %q", label)
- }
- return buf.String()
-}
-
-func (n navigation) ContextDefRange(offset int) hcl.Range {
- var block *Block
- for _, candidate := range n.root.Blocks {
- if candidate.Range().ContainsOffset(offset) {
- block = candidate
- break
- }
- }
-
- if block == nil {
- return hcl.Range{}
- }
-
- return block.DefRange()
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/node.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/node.go
deleted file mode 100644
index 75812e63d..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/node.go
+++ /dev/null
@@ -1,22 +0,0 @@
-package hclsyntax
-
-import (
- "github.com/hashicorp/hcl2/hcl"
-)
-
-// Node is the abstract type that every AST node implements.
-//
-// This is a closed interface, so it cannot be implemented from outside of
-// this package.
-type Node interface {
- // This is the mechanism by which the public-facing walk functions
- // are implemented. Implementations should call the given function
- // for each child node and then replace that node with its return value.
- // The return value might just be the same node, for non-transforming
- // walks.
- walkChildNodes(w internalWalkFunc)
-
- Range() hcl.Range
-}
-
-type internalWalkFunc func(Node)
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser.go
deleted file mode 100644
index 772ebae2b..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser.go
+++ /dev/null
@@ -1,2044 +0,0 @@
-package hclsyntax
-
-import (
- "bytes"
- "fmt"
- "strconv"
- "unicode/utf8"
-
- "github.com/apparentlymart/go-textseg/textseg"
- "github.com/hashicorp/hcl2/hcl"
- "github.com/zclconf/go-cty/cty"
-)
-
-type parser struct {
- *peeker
-
- // set to true if any recovery is attempted. The parser can use this
- // to attempt to reduce error noise by suppressing "bad token" errors
- // in recovery mode, assuming that the recovery heuristics have failed
- // in this case and left the peeker in a wrong place.
- recovery bool
-}
-
-func (p *parser) ParseBody(end TokenType) (*Body, hcl.Diagnostics) {
- attrs := Attributes{}
- blocks := Blocks{}
- var diags hcl.Diagnostics
-
- startRange := p.PrevRange()
- var endRange hcl.Range
-
-Token:
- for {
- next := p.Peek()
- if next.Type == end {
- endRange = p.NextRange()
- p.Read()
- break Token
- }
-
- switch next.Type {
- case TokenNewline:
- p.Read()
- continue
- case TokenIdent:
- item, itemDiags := p.ParseBodyItem()
- diags = append(diags, itemDiags...)
- switch titem := item.(type) {
- case *Block:
- blocks = append(blocks, titem)
- case *Attribute:
- if existing, exists := attrs[titem.Name]; exists {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Attribute redefined",
- Detail: fmt.Sprintf(
- "The argument %q was already set at %s. Each argument may be set only once.",
- titem.Name, existing.NameRange.String(),
- ),
- Subject: &titem.NameRange,
- })
- } else {
- attrs[titem.Name] = titem
- }
- default:
- // This should never happen for valid input, but may if a
- // syntax error was detected in ParseBodyItem that prevented
- // it from even producing a partially-broken item. In that
- // case, it would've left at least one error in the diagnostics
- // slice we already dealt with above.
- //
- // We'll assume ParseBodyItem attempted recovery to leave
- // us in a reasonable position to try parsing the next item.
- continue
- }
- default:
- bad := p.Read()
- if !p.recovery {
- if bad.Type == TokenOQuote {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid argument name",
- Detail: "Argument names must not be quoted.",
- Subject: &bad.Range,
- })
- } else {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Argument or block definition required",
- Detail: "An argument or block definition is required here.",
- Subject: &bad.Range,
- })
- }
- }
- endRange = p.PrevRange() // arbitrary, but somewhere inside the body means better diagnostics
-
- p.recover(end) // attempt to recover to the token after the end of this body
- break Token
- }
- }
-
- return &Body{
- Attributes: attrs,
- Blocks: blocks,
-
- SrcRange: hcl.RangeBetween(startRange, endRange),
- EndRange: hcl.Range{
- Filename: endRange.Filename,
- Start: endRange.End,
- End: endRange.End,
- },
- }, diags
-}
-
-func (p *parser) ParseBodyItem() (Node, hcl.Diagnostics) {
- ident := p.Read()
- if ident.Type != TokenIdent {
- p.recoverAfterBodyItem()
- return nil, hcl.Diagnostics{
- {
- Severity: hcl.DiagError,
- Summary: "Argument or block definition required",
- Detail: "An argument or block definition is required here.",
- Subject: &ident.Range,
- },
- }
- }
-
- next := p.Peek()
-
- switch next.Type {
- case TokenEqual:
- return p.finishParsingBodyAttribute(ident, false)
- case TokenOQuote, TokenOBrace, TokenIdent:
- return p.finishParsingBodyBlock(ident)
- default:
- p.recoverAfterBodyItem()
- return nil, hcl.Diagnostics{
- {
- Severity: hcl.DiagError,
- Summary: "Argument or block definition required",
- Detail: "An argument or block definition is required here. To set an argument, use the equals sign \"=\" to introduce the argument value.",
- Subject: &ident.Range,
- },
- }
- }
-
- return nil, nil
-}
-
-// parseSingleAttrBody is a weird variant of ParseBody that deals with the
-// body of a nested block containing only one attribute value all on a single
-// line, like foo { bar = baz } . It expects to find a single attribute item
-// immediately followed by the end token type with no intervening newlines.
-func (p *parser) parseSingleAttrBody(end TokenType) (*Body, hcl.Diagnostics) {
- ident := p.Read()
- if ident.Type != TokenIdent {
- p.recoverAfterBodyItem()
- return nil, hcl.Diagnostics{
- {
- Severity: hcl.DiagError,
- Summary: "Argument or block definition required",
- Detail: "An argument or block definition is required here.",
- Subject: &ident.Range,
- },
- }
- }
-
- var attr *Attribute
- var diags hcl.Diagnostics
-
- next := p.Peek()
-
- switch next.Type {
- case TokenEqual:
- node, attrDiags := p.finishParsingBodyAttribute(ident, true)
- diags = append(diags, attrDiags...)
- attr = node.(*Attribute)
- case TokenOQuote, TokenOBrace, TokenIdent:
- p.recoverAfterBodyItem()
- return nil, hcl.Diagnostics{
- {
- Severity: hcl.DiagError,
- Summary: "Argument definition required",
- Detail: fmt.Sprintf("A single-line block definition can contain only a single argument. If you meant to define argument %q, use an equals sign to assign it a value. To define a nested block, place it on a line of its own within its parent block.", ident.Bytes),
- Subject: hcl.RangeBetween(ident.Range, next.Range).Ptr(),
- },
- }
- default:
- p.recoverAfterBodyItem()
- return nil, hcl.Diagnostics{
- {
- Severity: hcl.DiagError,
- Summary: "Argument or block definition required",
- Detail: "An argument or block definition is required here. To set an argument, use the equals sign \"=\" to introduce the argument value.",
- Subject: &ident.Range,
- },
- }
- }
-
- return &Body{
- Attributes: Attributes{
- string(ident.Bytes): attr,
- },
-
- SrcRange: attr.SrcRange,
- EndRange: hcl.Range{
- Filename: attr.SrcRange.Filename,
- Start: attr.SrcRange.End,
- End: attr.SrcRange.End,
- },
- }, diags
-
-}
-
-func (p *parser) finishParsingBodyAttribute(ident Token, singleLine bool) (Node, hcl.Diagnostics) {
- eqTok := p.Read() // eat equals token
- if eqTok.Type != TokenEqual {
- // should never happen if caller behaves
- panic("finishParsingBodyAttribute called with next not equals")
- }
-
- var endRange hcl.Range
-
- expr, diags := p.ParseExpression()
- if p.recovery && diags.HasErrors() {
- // recovery within expressions tends to be tricky, so we've probably
- // landed somewhere weird. We'll try to reset to the start of a body
- // item so parsing can continue.
- endRange = p.PrevRange()
- p.recoverAfterBodyItem()
- } else {
- endRange = p.PrevRange()
- if !singleLine {
- end := p.Peek()
- if end.Type != TokenNewline && end.Type != TokenEOF {
- if !p.recovery {
- summary := "Missing newline after argument"
- detail := "An argument definition must end with a newline."
-
- if end.Type == TokenComma {
- summary = "Unexpected comma after argument"
- detail = "Argument definitions must be separated by newlines, not commas. " + detail
- }
-
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: summary,
- Detail: detail,
- Subject: &end.Range,
- Context: hcl.RangeBetween(ident.Range, end.Range).Ptr(),
- })
- }
- endRange = p.PrevRange()
- p.recoverAfterBodyItem()
- } else {
- endRange = p.PrevRange()
- p.Read() // eat newline
- }
- }
- }
-
- return &Attribute{
- Name: string(ident.Bytes),
- Expr: expr,
-
- SrcRange: hcl.RangeBetween(ident.Range, endRange),
- NameRange: ident.Range,
- EqualsRange: eqTok.Range,
- }, diags
-}
-
-func (p *parser) finishParsingBodyBlock(ident Token) (Node, hcl.Diagnostics) {
- var blockType = string(ident.Bytes)
- var diags hcl.Diagnostics
- var labels []string
- var labelRanges []hcl.Range
-
- var oBrace Token
-
-Token:
- for {
- tok := p.Peek()
-
- switch tok.Type {
-
- case TokenOBrace:
- oBrace = p.Read()
- break Token
-
- case TokenOQuote:
- label, labelRange, labelDiags := p.parseQuotedStringLiteral()
- diags = append(diags, labelDiags...)
- labels = append(labels, label)
- labelRanges = append(labelRanges, labelRange)
- // parseQuoteStringLiteral recovers up to the closing quote
- // if it encounters problems, so we can continue looking for
- // more labels and eventually the block body even.
-
- case TokenIdent:
- tok = p.Read() // eat token
- label, labelRange := string(tok.Bytes), tok.Range
- labels = append(labels, label)
- labelRanges = append(labelRanges, labelRange)
-
- default:
- switch tok.Type {
- case TokenEqual:
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid block definition",
- Detail: "The equals sign \"=\" indicates an argument definition, and must not be used when defining a block.",
- Subject: &tok.Range,
- Context: hcl.RangeBetween(ident.Range, tok.Range).Ptr(),
- })
- case TokenNewline:
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid block definition",
- Detail: "A block definition must have block content delimited by \"{\" and \"}\", starting on the same line as the block header.",
- Subject: &tok.Range,
- Context: hcl.RangeBetween(ident.Range, tok.Range).Ptr(),
- })
- default:
- if !p.recovery {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid block definition",
- Detail: "Either a quoted string block label or an opening brace (\"{\") is expected here.",
- Subject: &tok.Range,
- Context: hcl.RangeBetween(ident.Range, tok.Range).Ptr(),
- })
- }
- }
-
- p.recoverAfterBodyItem()
-
- return &Block{
- Type: blockType,
- Labels: labels,
- Body: &Body{
- SrcRange: ident.Range,
- EndRange: ident.Range,
- },
-
- TypeRange: ident.Range,
- LabelRanges: labelRanges,
- OpenBraceRange: ident.Range, // placeholder
- CloseBraceRange: ident.Range, // placeholder
- }, diags
- }
- }
-
- // Once we fall out here, the peeker is pointed just after our opening
- // brace, so we can begin our nested body parsing.
- var body *Body
- var bodyDiags hcl.Diagnostics
- switch p.Peek().Type {
- case TokenNewline, TokenEOF, TokenCBrace:
- body, bodyDiags = p.ParseBody(TokenCBrace)
- default:
- // Special one-line, single-attribute block parsing mode.
- body, bodyDiags = p.parseSingleAttrBody(TokenCBrace)
- switch p.Peek().Type {
- case TokenCBrace:
- p.Read() // the happy path - just consume the closing brace
- case TokenComma:
- // User seems to be trying to use the object-constructor
- // comma-separated style, which isn't permitted for blocks.
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid single-argument block definition",
- Detail: "Single-line block syntax can include only one argument definition. To define multiple arguments, use the multi-line block syntax with one argument definition per line.",
- Subject: p.Peek().Range.Ptr(),
- })
- p.recover(TokenCBrace)
- case TokenNewline:
- // We don't allow weird mixtures of single and multi-line syntax.
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid single-argument block definition",
- Detail: "An argument definition on the same line as its containing block creates a single-line block definition, which must also be closed on the same line. Place the block's closing brace immediately after the argument definition.",
- Subject: p.Peek().Range.Ptr(),
- })
- p.recover(TokenCBrace)
- default:
- // Some other weird thing is going on. Since we can't guess a likely
- // user intent for this one, we'll skip it if we're already in
- // recovery mode.
- if !p.recovery {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid single-argument block definition",
- Detail: "A single-line block definition must end with a closing brace immediately after its single argument definition.",
- Subject: p.Peek().Range.Ptr(),
- })
- }
- p.recover(TokenCBrace)
- }
- }
- diags = append(diags, bodyDiags...)
- cBraceRange := p.PrevRange()
-
- eol := p.Peek()
- if eol.Type == TokenNewline || eol.Type == TokenEOF {
- p.Read() // eat newline
- } else {
- if !p.recovery {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Missing newline after block definition",
- Detail: "A block definition must end with a newline.",
- Subject: &eol.Range,
- Context: hcl.RangeBetween(ident.Range, eol.Range).Ptr(),
- })
- }
- p.recoverAfterBodyItem()
- }
-
- // We must never produce a nil body, since the caller may attempt to
- // do analysis of a partial result when there's an error, so we'll
- // insert a placeholder if we otherwise failed to produce a valid
- // body due to one of the syntax error paths above.
- if body == nil && diags.HasErrors() {
- body = &Body{
- SrcRange: hcl.RangeBetween(oBrace.Range, cBraceRange),
- EndRange: cBraceRange,
- }
- }
-
- return &Block{
- Type: blockType,
- Labels: labels,
- Body: body,
-
- TypeRange: ident.Range,
- LabelRanges: labelRanges,
- OpenBraceRange: oBrace.Range,
- CloseBraceRange: cBraceRange,
- }, diags
-}
-
-func (p *parser) ParseExpression() (Expression, hcl.Diagnostics) {
- return p.parseTernaryConditional()
-}
-
-func (p *parser) parseTernaryConditional() (Expression, hcl.Diagnostics) {
- // The ternary conditional operator (.. ? .. : ..) behaves somewhat
- // like a binary operator except that the "symbol" is itself
- // an expression enclosed in two punctuation characters.
- // The middle expression is parsed as if the ? and : symbols
- // were parentheses. The "rhs" (the "false expression") is then
- // treated right-associatively so it behaves similarly to the
- // middle in terms of precedence.
-
- startRange := p.NextRange()
- var condExpr, trueExpr, falseExpr Expression
- var diags hcl.Diagnostics
-
- condExpr, condDiags := p.parseBinaryOps(binaryOps)
- diags = append(diags, condDiags...)
- if p.recovery && condDiags.HasErrors() {
- return condExpr, diags
- }
-
- questionMark := p.Peek()
- if questionMark.Type != TokenQuestion {
- return condExpr, diags
- }
-
- p.Read() // eat question mark
-
- trueExpr, trueDiags := p.ParseExpression()
- diags = append(diags, trueDiags...)
- if p.recovery && trueDiags.HasErrors() {
- return condExpr, diags
- }
-
- colon := p.Peek()
- if colon.Type != TokenColon {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Missing false expression in conditional",
- Detail: "The conditional operator (...?...:...) requires a false expression, delimited by a colon.",
- Subject: &colon.Range,
- Context: hcl.RangeBetween(startRange, colon.Range).Ptr(),
- })
- return condExpr, diags
- }
-
- p.Read() // eat colon
-
- falseExpr, falseDiags := p.ParseExpression()
- diags = append(diags, falseDiags...)
- if p.recovery && falseDiags.HasErrors() {
- return condExpr, diags
- }
-
- return &ConditionalExpr{
- Condition: condExpr,
- TrueResult: trueExpr,
- FalseResult: falseExpr,
-
- SrcRange: hcl.RangeBetween(startRange, falseExpr.Range()),
- }, diags
-}
-
-// parseBinaryOps calls itself recursively to work through all of the
-// operator precedence groups, and then eventually calls parseExpressionTerm
-// for each operand.
-func (p *parser) parseBinaryOps(ops []map[TokenType]*Operation) (Expression, hcl.Diagnostics) {
- if len(ops) == 0 {
- // We've run out of operators, so now we'll just try to parse a term.
- return p.parseExpressionWithTraversals()
- }
-
- thisLevel := ops[0]
- remaining := ops[1:]
-
- var lhs, rhs Expression
- var operation *Operation
- var diags hcl.Diagnostics
-
- // Parse a term that might be the first operand of a binary
- // operation or it might just be a standalone term.
- // We won't know until we've parsed it and can look ahead
- // to see if there's an operator token for this level.
- lhs, lhsDiags := p.parseBinaryOps(remaining)
- diags = append(diags, lhsDiags...)
- if p.recovery && lhsDiags.HasErrors() {
- return lhs, diags
- }
-
- // We'll keep eating up operators until we run out, so that operators
- // with the same precedence will combine in a left-associative manner:
- // a+b+c => (a+b)+c, not a+(b+c)
- //
- // Should we later want to have right-associative operators, a way
- // to achieve that would be to call back up to ParseExpression here
- // instead of iteratively parsing only the remaining operators.
- for {
- next := p.Peek()
- var newOp *Operation
- var ok bool
- if newOp, ok = thisLevel[next.Type]; !ok {
- break
- }
-
- // Are we extending an expression started on the previous iteration?
- if operation != nil {
- lhs = &BinaryOpExpr{
- LHS: lhs,
- Op: operation,
- RHS: rhs,
-
- SrcRange: hcl.RangeBetween(lhs.Range(), rhs.Range()),
- }
- }
-
- operation = newOp
- p.Read() // eat operator token
- var rhsDiags hcl.Diagnostics
- rhs, rhsDiags = p.parseBinaryOps(remaining)
- diags = append(diags, rhsDiags...)
- if p.recovery && rhsDiags.HasErrors() {
- return lhs, diags
- }
- }
-
- if operation == nil {
- return lhs, diags
- }
-
- return &BinaryOpExpr{
- LHS: lhs,
- Op: operation,
- RHS: rhs,
-
- SrcRange: hcl.RangeBetween(lhs.Range(), rhs.Range()),
- }, diags
-}
-
-func (p *parser) parseExpressionWithTraversals() (Expression, hcl.Diagnostics) {
- term, diags := p.parseExpressionTerm()
- ret, moreDiags := p.parseExpressionTraversals(term)
- diags = append(diags, moreDiags...)
- return ret, diags
-}
-
-func (p *parser) parseExpressionTraversals(from Expression) (Expression, hcl.Diagnostics) {
- var diags hcl.Diagnostics
- ret := from
-
-Traversal:
- for {
- next := p.Peek()
-
- switch next.Type {
- case TokenDot:
- // Attribute access or splat
- dot := p.Read()
- attrTok := p.Peek()
-
- switch attrTok.Type {
- case TokenIdent:
- attrTok = p.Read() // eat token
- name := string(attrTok.Bytes)
- rng := hcl.RangeBetween(dot.Range, attrTok.Range)
- step := hcl.TraverseAttr{
- Name: name,
- SrcRange: rng,
- }
-
- ret = makeRelativeTraversal(ret, step, rng)
-
- case TokenNumberLit:
- // This is a weird form we inherited from HIL, allowing numbers
- // to be used as attributes as a weird way of writing [n].
- // This was never actually a first-class thing in HIL, but
- // HIL tolerated sequences like .0. in its variable names and
- // calling applications like Terraform exploited that to
- // introduce indexing syntax where none existed.
- numTok := p.Read() // eat token
- attrTok = numTok
-
- // This syntax is ambiguous if multiple indices are used in
- // succession, like foo.0.1.baz: that actually parses as
- // a fractional number 0.1. Since we're only supporting this
- // syntax for compatibility with legacy Terraform
- // configurations, and Terraform does not tend to have lists
- // of lists, we'll choose to reject that here with a helpful
- // error message, rather than failing later because the index
- // isn't a whole number.
- if dotIdx := bytes.IndexByte(numTok.Bytes, '.'); dotIdx >= 0 {
- first := numTok.Bytes[:dotIdx]
- second := numTok.Bytes[dotIdx+1:]
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid legacy index syntax",
- Detail: fmt.Sprintf("When using the legacy index syntax, chaining two indexes together is not permitted. Use the proper index syntax instead, like [%s][%s].", first, second),
- Subject: &attrTok.Range,
- })
- rng := hcl.RangeBetween(dot.Range, numTok.Range)
- step := hcl.TraverseIndex{
- Key: cty.DynamicVal,
- SrcRange: rng,
- }
- ret = makeRelativeTraversal(ret, step, rng)
- break
- }
-
- numVal, numDiags := p.numberLitValue(numTok)
- diags = append(diags, numDiags...)
-
- rng := hcl.RangeBetween(dot.Range, numTok.Range)
- step := hcl.TraverseIndex{
- Key: numVal,
- SrcRange: rng,
- }
-
- ret = makeRelativeTraversal(ret, step, rng)
-
- case TokenStar:
- // "Attribute-only" splat expression.
- // (This is a kinda weird construct inherited from HIL, which
- // behaves a bit like a [*] splat except that it is only able
- // to do attribute traversals into each of its elements,
- // whereas foo[*] can support _any_ traversal.
- marker := p.Read() // eat star
- trav := make(hcl.Traversal, 0, 1)
- var firstRange, lastRange hcl.Range
- firstRange = p.NextRange()
- for p.Peek().Type == TokenDot {
- dot := p.Read()
-
- if p.Peek().Type == TokenNumberLit {
- // Continuing the "weird stuff inherited from HIL"
- // theme, we also allow numbers as attribute names
- // inside splats and interpret them as indexing
- // into a list, for expressions like:
- // foo.bar.*.baz.0.foo
- numTok := p.Read()
-
- // Weird special case if the user writes something
- // like foo.bar.*.baz.0.0.foo, where 0.0 parses
- // as a number.
- if dotIdx := bytes.IndexByte(numTok.Bytes, '.'); dotIdx >= 0 {
- first := numTok.Bytes[:dotIdx]
- second := numTok.Bytes[dotIdx+1:]
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid legacy index syntax",
- Detail: fmt.Sprintf("When using the legacy index syntax, chaining two indexes together is not permitted. Use the proper index syntax with a full splat expression [*] instead, like [%s][%s].", first, second),
- Subject: &attrTok.Range,
- })
- trav = append(trav, hcl.TraverseIndex{
- Key: cty.DynamicVal,
- SrcRange: hcl.RangeBetween(dot.Range, numTok.Range),
- })
- lastRange = numTok.Range
- continue
- }
-
- numVal, numDiags := p.numberLitValue(numTok)
- diags = append(diags, numDiags...)
- trav = append(trav, hcl.TraverseIndex{
- Key: numVal,
- SrcRange: hcl.RangeBetween(dot.Range, numTok.Range),
- })
- lastRange = numTok.Range
- continue
- }
-
- if p.Peek().Type != TokenIdent {
- if !p.recovery {
- if p.Peek().Type == TokenStar {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Nested splat expression not allowed",
- Detail: "A splat expression (*) cannot be used inside another attribute-only splat expression.",
- Subject: p.Peek().Range.Ptr(),
- })
- } else {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid attribute name",
- Detail: "An attribute name is required after a dot.",
- Subject: &attrTok.Range,
- })
- }
- }
- p.setRecovery()
- continue Traversal
- }
-
- attrTok := p.Read()
- trav = append(trav, hcl.TraverseAttr{
- Name: string(attrTok.Bytes),
- SrcRange: hcl.RangeBetween(dot.Range, attrTok.Range),
- })
- lastRange = attrTok.Range
- }
-
- itemExpr := &AnonSymbolExpr{
- SrcRange: hcl.RangeBetween(dot.Range, marker.Range),
- }
- var travExpr Expression
- if len(trav) == 0 {
- travExpr = itemExpr
- } else {
- travExpr = &RelativeTraversalExpr{
- Source: itemExpr,
- Traversal: trav,
- SrcRange: hcl.RangeBetween(firstRange, lastRange),
- }
- }
-
- ret = &SplatExpr{
- Source: ret,
- Each: travExpr,
- Item: itemExpr,
-
- SrcRange: hcl.RangeBetween(dot.Range, lastRange),
- MarkerRange: hcl.RangeBetween(dot.Range, marker.Range),
- }
-
- default:
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid attribute name",
- Detail: "An attribute name is required after a dot.",
- Subject: &attrTok.Range,
- })
- // This leaves the peeker in a bad place, so following items
- // will probably be misparsed until we hit something that
- // allows us to re-sync.
- //
- // We will probably need to do something better here eventually
- // in order to support autocomplete triggered by typing a
- // period.
- p.setRecovery()
- }
-
- case TokenOBrack:
- // Indexing of a collection.
- // This may or may not be a hcl.Traverser, depending on whether
- // the key value is something constant.
-
- open := p.Read()
- switch p.Peek().Type {
- case TokenStar:
- // This is a full splat expression, like foo[*], which consumes
- // the rest of the traversal steps after it using a recursive
- // call to this function.
- p.Read() // consume star
- close := p.Read()
- if close.Type != TokenCBrack && !p.recovery {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Missing close bracket on splat index",
- Detail: "The star for a full splat operator must be immediately followed by a closing bracket (\"]\").",
- Subject: &close.Range,
- })
- close = p.recover(TokenCBrack)
- }
- // Splat expressions use a special "anonymous symbol" as a
- // placeholder in an expression to be evaluated once for each
- // item in the source expression.
- itemExpr := &AnonSymbolExpr{
- SrcRange: hcl.RangeBetween(open.Range, close.Range),
- }
- // Now we'll recursively call this same function to eat any
- // remaining traversal steps against the anonymous symbol.
- travExpr, nestedDiags := p.parseExpressionTraversals(itemExpr)
- diags = append(diags, nestedDiags...)
-
- ret = &SplatExpr{
- Source: ret,
- Each: travExpr,
- Item: itemExpr,
-
- SrcRange: hcl.RangeBetween(open.Range, travExpr.Range()),
- MarkerRange: hcl.RangeBetween(open.Range, close.Range),
- }
-
- default:
-
- var close Token
- p.PushIncludeNewlines(false) // arbitrary newlines allowed in brackets
- keyExpr, keyDiags := p.ParseExpression()
- diags = append(diags, keyDiags...)
- if p.recovery && keyDiags.HasErrors() {
- close = p.recover(TokenCBrack)
- } else {
- close = p.Read()
- if close.Type != TokenCBrack && !p.recovery {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Missing close bracket on index",
- Detail: "The index operator must end with a closing bracket (\"]\").",
- Subject: &close.Range,
- })
- close = p.recover(TokenCBrack)
- }
- }
- p.PopIncludeNewlines()
-
- if lit, isLit := keyExpr.(*LiteralValueExpr); isLit {
- litKey, _ := lit.Value(nil)
- rng := hcl.RangeBetween(open.Range, close.Range)
- step := hcl.TraverseIndex{
- Key: litKey,
- SrcRange: rng,
- }
- ret = makeRelativeTraversal(ret, step, rng)
- } else if tmpl, isTmpl := keyExpr.(*TemplateExpr); isTmpl && tmpl.IsStringLiteral() {
- litKey, _ := tmpl.Value(nil)
- rng := hcl.RangeBetween(open.Range, close.Range)
- step := hcl.TraverseIndex{
- Key: litKey,
- SrcRange: rng,
- }
- ret = makeRelativeTraversal(ret, step, rng)
- } else {
- rng := hcl.RangeBetween(open.Range, close.Range)
- ret = &IndexExpr{
- Collection: ret,
- Key: keyExpr,
-
- SrcRange: rng,
- OpenRange: open.Range,
- }
- }
- }
-
- default:
- break Traversal
- }
- }
-
- return ret, diags
-}
-
-// makeRelativeTraversal takes an expression and a traverser and returns
-// a traversal expression that combines the two. If the given expression
-// is already a traversal, it is extended in place (mutating it) and
-// returned. If it isn't, a new RelativeTraversalExpr is created and returned.
-func makeRelativeTraversal(expr Expression, next hcl.Traverser, rng hcl.Range) Expression {
- switch texpr := expr.(type) {
- case *ScopeTraversalExpr:
- texpr.Traversal = append(texpr.Traversal, next)
- texpr.SrcRange = hcl.RangeBetween(texpr.SrcRange, rng)
- return texpr
- case *RelativeTraversalExpr:
- texpr.Traversal = append(texpr.Traversal, next)
- texpr.SrcRange = hcl.RangeBetween(texpr.SrcRange, rng)
- return texpr
- default:
- return &RelativeTraversalExpr{
- Source: expr,
- Traversal: hcl.Traversal{next},
- SrcRange: rng,
- }
- }
-}
-
-func (p *parser) parseExpressionTerm() (Expression, hcl.Diagnostics) {
- start := p.Peek()
-
- switch start.Type {
- case TokenOParen:
- p.Read() // eat open paren
-
- p.PushIncludeNewlines(false)
-
- expr, diags := p.ParseExpression()
- if diags.HasErrors() {
- // attempt to place the peeker after our closing paren
- // before we return, so that the next parser has some
- // chance of finding a valid expression.
- p.recover(TokenCParen)
- p.PopIncludeNewlines()
- return expr, diags
- }
-
- close := p.Peek()
- if close.Type != TokenCParen {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Unbalanced parentheses",
- Detail: "Expected a closing parenthesis to terminate the expression.",
- Subject: &close.Range,
- Context: hcl.RangeBetween(start.Range, close.Range).Ptr(),
- })
- p.setRecovery()
- }
-
- p.Read() // eat closing paren
- p.PopIncludeNewlines()
-
- return expr, diags
-
- case TokenNumberLit:
- tok := p.Read() // eat number token
-
- numVal, diags := p.numberLitValue(tok)
- return &LiteralValueExpr{
- Val: numVal,
- SrcRange: tok.Range,
- }, diags
-
- case TokenIdent:
- tok := p.Read() // eat identifier token
-
- if p.Peek().Type == TokenOParen {
- return p.finishParsingFunctionCall(tok)
- }
-
- name := string(tok.Bytes)
- switch name {
- case "true":
- return &LiteralValueExpr{
- Val: cty.True,
- SrcRange: tok.Range,
- }, nil
- case "false":
- return &LiteralValueExpr{
- Val: cty.False,
- SrcRange: tok.Range,
- }, nil
- case "null":
- return &LiteralValueExpr{
- Val: cty.NullVal(cty.DynamicPseudoType),
- SrcRange: tok.Range,
- }, nil
- default:
- return &ScopeTraversalExpr{
- Traversal: hcl.Traversal{
- hcl.TraverseRoot{
- Name: name,
- SrcRange: tok.Range,
- },
- },
- SrcRange: tok.Range,
- }, nil
- }
-
- case TokenOQuote, TokenOHeredoc:
- open := p.Read() // eat opening marker
- closer := p.oppositeBracket(open.Type)
- exprs, passthru, _, diags := p.parseTemplateInner(closer, tokenOpensFlushHeredoc(open))
-
- closeRange := p.PrevRange()
-
- if passthru {
- if len(exprs) != 1 {
- panic("passthru set with len(exprs) != 1")
- }
- return &TemplateWrapExpr{
- Wrapped: exprs[0],
- SrcRange: hcl.RangeBetween(open.Range, closeRange),
- }, diags
- }
-
- return &TemplateExpr{
- Parts: exprs,
- SrcRange: hcl.RangeBetween(open.Range, closeRange),
- }, diags
-
- case TokenMinus:
- tok := p.Read() // eat minus token
-
- // Important to use parseExpressionWithTraversals rather than parseExpression
- // here, otherwise we can capture a following binary expression into
- // our negation.
- // e.g. -46+5 should parse as (-46)+5, not -(46+5)
- operand, diags := p.parseExpressionWithTraversals()
- return &UnaryOpExpr{
- Op: OpNegate,
- Val: operand,
-
- SrcRange: hcl.RangeBetween(tok.Range, operand.Range()),
- SymbolRange: tok.Range,
- }, diags
-
- case TokenBang:
- tok := p.Read() // eat bang token
-
- // Important to use parseExpressionWithTraversals rather than parseExpression
- // here, otherwise we can capture a following binary expression into
- // our negation.
- operand, diags := p.parseExpressionWithTraversals()
- return &UnaryOpExpr{
- Op: OpLogicalNot,
- Val: operand,
-
- SrcRange: hcl.RangeBetween(tok.Range, operand.Range()),
- SymbolRange: tok.Range,
- }, diags
-
- case TokenOBrack:
- return p.parseTupleCons()
-
- case TokenOBrace:
- return p.parseObjectCons()
-
- default:
- var diags hcl.Diagnostics
- if !p.recovery {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid expression",
- Detail: "Expected the start of an expression, but found an invalid expression token.",
- Subject: &start.Range,
- })
- }
- p.setRecovery()
-
- // Return a placeholder so that the AST is still structurally sound
- // even in the presence of parse errors.
- return &LiteralValueExpr{
- Val: cty.DynamicVal,
- SrcRange: start.Range,
- }, diags
- }
-}
-
-func (p *parser) numberLitValue(tok Token) (cty.Value, hcl.Diagnostics) {
- // The cty.ParseNumberVal is always the same behavior as converting a
- // string to a number, ensuring we always interpret decimal numbers in
- // the same way.
- numVal, err := cty.ParseNumberVal(string(tok.Bytes))
- if err != nil {
- ret := cty.UnknownVal(cty.Number)
- return ret, hcl.Diagnostics{
- {
- Severity: hcl.DiagError,
- Summary: "Invalid number literal",
- // FIXME: not a very good error message, but convert only
- // gives us "a number is required", so not much help either.
- Detail: "Failed to recognize the value of this number literal.",
- Subject: &tok.Range,
- },
- }
- }
- return numVal, nil
-}
-
-// finishParsingFunctionCall parses a function call assuming that the function
-// name was already read, and so the peeker should be pointing at the opening
-// parenthesis after the name.
-func (p *parser) finishParsingFunctionCall(name Token) (Expression, hcl.Diagnostics) {
- openTok := p.Read()
- if openTok.Type != TokenOParen {
- // should never happen if callers behave
- panic("finishParsingFunctionCall called with non-parenthesis as next token")
- }
-
- var args []Expression
- var diags hcl.Diagnostics
- var expandFinal bool
- var closeTok Token
-
- // Arbitrary newlines are allowed inside the function call parentheses.
- p.PushIncludeNewlines(false)
-
-Token:
- for {
- tok := p.Peek()
-
- if tok.Type == TokenCParen {
- closeTok = p.Read() // eat closing paren
- break Token
- }
-
- arg, argDiags := p.ParseExpression()
- args = append(args, arg)
- diags = append(diags, argDiags...)
- if p.recovery && argDiags.HasErrors() {
- // if there was a parse error in the argument then we've
- // probably been left in a weird place in the token stream,
- // so we'll bail out with a partial argument list.
- p.recover(TokenCParen)
- break Token
- }
-
- sep := p.Read()
- if sep.Type == TokenCParen {
- closeTok = sep
- break Token
- }
-
- if sep.Type == TokenEllipsis {
- expandFinal = true
-
- if p.Peek().Type != TokenCParen {
- if !p.recovery {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Missing closing parenthesis",
- Detail: "An expanded function argument (with ...) must be immediately followed by closing parentheses.",
- Subject: &sep.Range,
- Context: hcl.RangeBetween(name.Range, sep.Range).Ptr(),
- })
- }
- closeTok = p.recover(TokenCParen)
- } else {
- closeTok = p.Read() // eat closing paren
- }
- break Token
- }
-
- if sep.Type != TokenComma {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Missing argument separator",
- Detail: "A comma is required to separate each function argument from the next.",
- Subject: &sep.Range,
- Context: hcl.RangeBetween(name.Range, sep.Range).Ptr(),
- })
- closeTok = p.recover(TokenCParen)
- break Token
- }
-
- if p.Peek().Type == TokenCParen {
- // A trailing comma after the last argument gets us in here.
- closeTok = p.Read() // eat closing paren
- break Token
- }
-
- }
-
- p.PopIncludeNewlines()
-
- return &FunctionCallExpr{
- Name: string(name.Bytes),
- Args: args,
-
- ExpandFinal: expandFinal,
-
- NameRange: name.Range,
- OpenParenRange: openTok.Range,
- CloseParenRange: closeTok.Range,
- }, diags
-}
-
-func (p *parser) parseTupleCons() (Expression, hcl.Diagnostics) {
- open := p.Read()
- if open.Type != TokenOBrack {
- // Should never happen if callers are behaving
- panic("parseTupleCons called without peeker pointing to open bracket")
- }
-
- p.PushIncludeNewlines(false)
- defer p.PopIncludeNewlines()
-
- if forKeyword.TokenMatches(p.Peek()) {
- return p.finishParsingForExpr(open)
- }
-
- var close Token
-
- var diags hcl.Diagnostics
- var exprs []Expression
-
- for {
- next := p.Peek()
- if next.Type == TokenCBrack {
- close = p.Read() // eat closer
- break
- }
-
- expr, exprDiags := p.ParseExpression()
- exprs = append(exprs, expr)
- diags = append(diags, exprDiags...)
-
- if p.recovery && exprDiags.HasErrors() {
- // If expression parsing failed then we are probably in a strange
- // place in the token stream, so we'll bail out and try to reset
- // to after our closing bracket to allow parsing to continue.
- close = p.recover(TokenCBrack)
- break
- }
-
- next = p.Peek()
- if next.Type == TokenCBrack {
- close = p.Read() // eat closer
- break
- }
-
- if next.Type != TokenComma {
- if !p.recovery {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Missing item separator",
- Detail: "Expected a comma to mark the beginning of the next item.",
- Subject: &next.Range,
- Context: hcl.RangeBetween(open.Range, next.Range).Ptr(),
- })
- }
- close = p.recover(TokenCBrack)
- break
- }
-
- p.Read() // eat comma
-
- }
-
- return &TupleConsExpr{
- Exprs: exprs,
-
- SrcRange: hcl.RangeBetween(open.Range, close.Range),
- OpenRange: open.Range,
- }, diags
-}
-
-func (p *parser) parseObjectCons() (Expression, hcl.Diagnostics) {
- open := p.Read()
- if open.Type != TokenOBrace {
- // Should never happen if callers are behaving
- panic("parseObjectCons called without peeker pointing to open brace")
- }
-
- // We must temporarily stop looking at newlines here while we check for
- // a "for" keyword, since for expressions are _not_ newline-sensitive,
- // even though object constructors are.
- p.PushIncludeNewlines(false)
- isFor := forKeyword.TokenMatches(p.Peek())
- p.PopIncludeNewlines()
- if isFor {
- return p.finishParsingForExpr(open)
- }
-
- p.PushIncludeNewlines(true)
- defer p.PopIncludeNewlines()
-
- var close Token
-
- var diags hcl.Diagnostics
- var items []ObjectConsItem
-
- for {
- next := p.Peek()
- if next.Type == TokenNewline {
- p.Read() // eat newline
- continue
- }
-
- if next.Type == TokenCBrace {
- close = p.Read() // eat closer
- break
- }
-
- var key Expression
- var keyDiags hcl.Diagnostics
- key, keyDiags = p.ParseExpression()
- diags = append(diags, keyDiags...)
-
- if p.recovery && keyDiags.HasErrors() {
- // If expression parsing failed then we are probably in a strange
- // place in the token stream, so we'll bail out and try to reset
- // to after our closing brace to allow parsing to continue.
- close = p.recover(TokenCBrace)
- break
- }
-
- // We wrap up the key expression in a special wrapper that deals
- // with our special case that naked identifiers as object keys
- // are interpreted as literal strings.
- key = &ObjectConsKeyExpr{Wrapped: key}
-
- next = p.Peek()
- if next.Type != TokenEqual && next.Type != TokenColon {
- if !p.recovery {
- switch next.Type {
- case TokenNewline, TokenComma:
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Missing attribute value",
- Detail: "Expected an attribute value, introduced by an equals sign (\"=\").",
- Subject: &next.Range,
- Context: hcl.RangeBetween(open.Range, next.Range).Ptr(),
- })
- case TokenIdent:
- // Although this might just be a plain old missing equals
- // sign before a reference, one way to get here is to try
- // to write an attribute name containing a period followed
- // by a digit, which was valid in HCL1, like this:
- // foo1.2_bar = "baz"
- // We can't know exactly what the user intended here, but
- // we'll augment our message with an extra hint in this case
- // in case it is helpful.
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Missing key/value separator",
- Detail: "Expected an equals sign (\"=\") to mark the beginning of the attribute value. If you intended to given an attribute name containing periods or spaces, write the name in quotes to create a string literal.",
- Subject: &next.Range,
- Context: hcl.RangeBetween(open.Range, next.Range).Ptr(),
- })
- default:
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Missing key/value separator",
- Detail: "Expected an equals sign (\"=\") to mark the beginning of the attribute value.",
- Subject: &next.Range,
- Context: hcl.RangeBetween(open.Range, next.Range).Ptr(),
- })
- }
- }
- close = p.recover(TokenCBrace)
- break
- }
-
- p.Read() // eat equals sign or colon
-
- value, valueDiags := p.ParseExpression()
- diags = append(diags, valueDiags...)
-
- if p.recovery && valueDiags.HasErrors() {
- // If expression parsing failed then we are probably in a strange
- // place in the token stream, so we'll bail out and try to reset
- // to after our closing brace to allow parsing to continue.
- close = p.recover(TokenCBrace)
- break
- }
-
- items = append(items, ObjectConsItem{
- KeyExpr: key,
- ValueExpr: value,
- })
-
- next = p.Peek()
- if next.Type == TokenCBrace {
- close = p.Read() // eat closer
- break
- }
-
- if next.Type != TokenComma && next.Type != TokenNewline {
- if !p.recovery {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Missing attribute separator",
- Detail: "Expected a newline or comma to mark the beginning of the next attribute.",
- Subject: &next.Range,
- Context: hcl.RangeBetween(open.Range, next.Range).Ptr(),
- })
- }
- close = p.recover(TokenCBrace)
- break
- }
-
- p.Read() // eat comma or newline
-
- }
-
- return &ObjectConsExpr{
- Items: items,
-
- SrcRange: hcl.RangeBetween(open.Range, close.Range),
- OpenRange: open.Range,
- }, diags
-}
-
-func (p *parser) finishParsingForExpr(open Token) (Expression, hcl.Diagnostics) {
- p.PushIncludeNewlines(false)
- defer p.PopIncludeNewlines()
- introducer := p.Read()
- if !forKeyword.TokenMatches(introducer) {
- // Should never happen if callers are behaving
- panic("finishParsingForExpr called without peeker pointing to 'for' identifier")
- }
-
- var makeObj bool
- var closeType TokenType
- switch open.Type {
- case TokenOBrace:
- makeObj = true
- closeType = TokenCBrace
- case TokenOBrack:
- makeObj = false // making a tuple
- closeType = TokenCBrack
- default:
- // Should never happen if callers are behaving
- panic("finishParsingForExpr called with invalid open token")
- }
-
- var diags hcl.Diagnostics
- var keyName, valName string
-
- if p.Peek().Type != TokenIdent {
- if !p.recovery {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid 'for' expression",
- Detail: "For expression requires variable name after 'for'.",
- Subject: p.Peek().Range.Ptr(),
- Context: hcl.RangeBetween(open.Range, p.Peek().Range).Ptr(),
- })
- }
- close := p.recover(closeType)
- return &LiteralValueExpr{
- Val: cty.DynamicVal,
- SrcRange: hcl.RangeBetween(open.Range, close.Range),
- }, diags
- }
-
- valName = string(p.Read().Bytes)
-
- if p.Peek().Type == TokenComma {
- // What we just read was actually the key, then.
- keyName = valName
- p.Read() // eat comma
-
- if p.Peek().Type != TokenIdent {
- if !p.recovery {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid 'for' expression",
- Detail: "For expression requires value variable name after comma.",
- Subject: p.Peek().Range.Ptr(),
- Context: hcl.RangeBetween(open.Range, p.Peek().Range).Ptr(),
- })
- }
- close := p.recover(closeType)
- return &LiteralValueExpr{
- Val: cty.DynamicVal,
- SrcRange: hcl.RangeBetween(open.Range, close.Range),
- }, diags
- }
-
- valName = string(p.Read().Bytes)
- }
-
- if !inKeyword.TokenMatches(p.Peek()) {
- if !p.recovery {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid 'for' expression",
- Detail: "For expression requires the 'in' keyword after its name declarations.",
- Subject: p.Peek().Range.Ptr(),
- Context: hcl.RangeBetween(open.Range, p.Peek().Range).Ptr(),
- })
- }
- close := p.recover(closeType)
- return &LiteralValueExpr{
- Val: cty.DynamicVal,
- SrcRange: hcl.RangeBetween(open.Range, close.Range),
- }, diags
- }
- p.Read() // eat 'in' keyword
-
- collExpr, collDiags := p.ParseExpression()
- diags = append(diags, collDiags...)
- if p.recovery && collDiags.HasErrors() {
- close := p.recover(closeType)
- return &LiteralValueExpr{
- Val: cty.DynamicVal,
- SrcRange: hcl.RangeBetween(open.Range, close.Range),
- }, diags
- }
-
- if p.Peek().Type != TokenColon {
- if !p.recovery {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid 'for' expression",
- Detail: "For expression requires a colon after the collection expression.",
- Subject: p.Peek().Range.Ptr(),
- Context: hcl.RangeBetween(open.Range, p.Peek().Range).Ptr(),
- })
- }
- close := p.recover(closeType)
- return &LiteralValueExpr{
- Val: cty.DynamicVal,
- SrcRange: hcl.RangeBetween(open.Range, close.Range),
- }, diags
- }
- p.Read() // eat colon
-
- var keyExpr, valExpr Expression
- var keyDiags, valDiags hcl.Diagnostics
- valExpr, valDiags = p.ParseExpression()
- if p.Peek().Type == TokenFatArrow {
- // What we just parsed was actually keyExpr
- p.Read() // eat the fat arrow
- keyExpr, keyDiags = valExpr, valDiags
-
- valExpr, valDiags = p.ParseExpression()
- }
- diags = append(diags, keyDiags...)
- diags = append(diags, valDiags...)
- if p.recovery && (keyDiags.HasErrors() || valDiags.HasErrors()) {
- close := p.recover(closeType)
- return &LiteralValueExpr{
- Val: cty.DynamicVal,
- SrcRange: hcl.RangeBetween(open.Range, close.Range),
- }, diags
- }
-
- group := false
- var ellipsis Token
- if p.Peek().Type == TokenEllipsis {
- ellipsis = p.Read()
- group = true
- }
-
- var condExpr Expression
- var condDiags hcl.Diagnostics
- if ifKeyword.TokenMatches(p.Peek()) {
- p.Read() // eat "if"
- condExpr, condDiags = p.ParseExpression()
- diags = append(diags, condDiags...)
- if p.recovery && condDiags.HasErrors() {
- close := p.recover(p.oppositeBracket(open.Type))
- return &LiteralValueExpr{
- Val: cty.DynamicVal,
- SrcRange: hcl.RangeBetween(open.Range, close.Range),
- }, diags
- }
- }
-
- var close Token
- if p.Peek().Type == closeType {
- close = p.Read()
- } else {
- if !p.recovery {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid 'for' expression",
- Detail: "Extra characters after the end of the 'for' expression.",
- Subject: p.Peek().Range.Ptr(),
- Context: hcl.RangeBetween(open.Range, p.Peek().Range).Ptr(),
- })
- }
- close = p.recover(closeType)
- }
-
- if !makeObj {
- if keyExpr != nil {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid 'for' expression",
- Detail: "Key expression is not valid when building a tuple.",
- Subject: keyExpr.Range().Ptr(),
- Context: hcl.RangeBetween(open.Range, close.Range).Ptr(),
- })
- }
-
- if group {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid 'for' expression",
- Detail: "Grouping ellipsis (...) cannot be used when building a tuple.",
- Subject: &ellipsis.Range,
- Context: hcl.RangeBetween(open.Range, close.Range).Ptr(),
- })
- }
- } else {
- if keyExpr == nil {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid 'for' expression",
- Detail: "Key expression is required when building an object.",
- Subject: valExpr.Range().Ptr(),
- Context: hcl.RangeBetween(open.Range, close.Range).Ptr(),
- })
- }
- }
-
- return &ForExpr{
- KeyVar: keyName,
- ValVar: valName,
- CollExpr: collExpr,
- KeyExpr: keyExpr,
- ValExpr: valExpr,
- CondExpr: condExpr,
- Group: group,
-
- SrcRange: hcl.RangeBetween(open.Range, close.Range),
- OpenRange: open.Range,
- CloseRange: close.Range,
- }, diags
-}
-
-// parseQuotedStringLiteral is a helper for parsing quoted strings that
-// aren't allowed to contain any interpolations, such as block labels.
-func (p *parser) parseQuotedStringLiteral() (string, hcl.Range, hcl.Diagnostics) {
- oQuote := p.Read()
- if oQuote.Type != TokenOQuote {
- return "", oQuote.Range, hcl.Diagnostics{
- {
- Severity: hcl.DiagError,
- Summary: "Invalid string literal",
- Detail: "A quoted string is required here.",
- Subject: &oQuote.Range,
- },
- }
- }
-
- var diags hcl.Diagnostics
- ret := &bytes.Buffer{}
- var cQuote Token
-
-Token:
- for {
- tok := p.Read()
- switch tok.Type {
-
- case TokenCQuote:
- cQuote = tok
- break Token
-
- case TokenQuotedLit:
- s, sDiags := p.decodeStringLit(tok)
- diags = append(diags, sDiags...)
- ret.WriteString(s)
-
- case TokenTemplateControl, TokenTemplateInterp:
- which := "$"
- if tok.Type == TokenTemplateControl {
- which = "%"
- }
-
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid string literal",
- Detail: fmt.Sprintf(
- "Template sequences are not allowed in this string. To include a literal %q, double it (as \"%s%s\") to escape it.",
- which, which, which,
- ),
- Subject: &tok.Range,
- Context: hcl.RangeBetween(oQuote.Range, tok.Range).Ptr(),
- })
-
- // Now that we're returning an error callers won't attempt to use
- // the result for any real operations, but they might try to use
- // the partial AST for other analyses, so we'll leave a marker
- // to indicate that there was something invalid in the string to
- // help avoid misinterpretation of the partial result
- ret.WriteString(which)
- ret.WriteString("{ ... }")
-
- p.recover(TokenTemplateSeqEnd) // we'll try to keep parsing after the sequence ends
-
- case TokenEOF:
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Unterminated string literal",
- Detail: "Unable to find the closing quote mark before the end of the file.",
- Subject: &tok.Range,
- Context: hcl.RangeBetween(oQuote.Range, tok.Range).Ptr(),
- })
- break Token
-
- default:
- // Should never happen, as long as the scanner is behaving itself
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid string literal",
- Detail: "This item is not valid in a string literal.",
- Subject: &tok.Range,
- Context: hcl.RangeBetween(oQuote.Range, tok.Range).Ptr(),
- })
- p.recover(TokenCQuote)
- break Token
-
- }
-
- }
-
- return ret.String(), hcl.RangeBetween(oQuote.Range, cQuote.Range), diags
-}
-
-// decodeStringLit processes the given token, which must be either a
-// TokenQuotedLit or a TokenStringLit, returning the string resulting from
-// resolving any escape sequences.
-//
-// If any error diagnostics are returned, the returned string may be incomplete
-// or otherwise invalid.
-func (p *parser) decodeStringLit(tok Token) (string, hcl.Diagnostics) {
- var quoted bool
- switch tok.Type {
- case TokenQuotedLit:
- quoted = true
- case TokenStringLit:
- quoted = false
- default:
- panic("decodeQuotedLit can only be used with TokenStringLit and TokenQuotedLit tokens")
- }
- var diags hcl.Diagnostics
-
- ret := make([]byte, 0, len(tok.Bytes))
- slices := scanStringLit(tok.Bytes, quoted)
-
- // We will mutate rng constantly as we walk through our token slices below.
- // Any diagnostics must take a copy of this rng rather than simply pointing
- // to it, e.g. by using rng.Ptr() rather than &rng.
- rng := tok.Range
- rng.End = rng.Start
-
-Slices:
- for _, slice := range slices {
- if len(slice) == 0 {
- continue
- }
-
- // Advance the start of our range to where the previous token ended
- rng.Start = rng.End
-
- // Advance the end of our range to after our token.
- b := slice
- for len(b) > 0 {
- adv, ch, _ := textseg.ScanGraphemeClusters(b, true)
- rng.End.Byte += adv
- switch ch[0] {
- case '\r', '\n':
- rng.End.Line++
- rng.End.Column = 1
- default:
- rng.End.Column++
- }
- b = b[adv:]
- }
-
- TokenType:
- switch slice[0] {
- case '\\':
- if !quoted {
- // If we're not in quoted mode then just treat this token as
- // normal. (Slices can still start with backslash even if we're
- // not specifically looking for backslash sequences.)
- break TokenType
- }
- if len(slice) < 2 {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid escape sequence",
- Detail: "Backslash must be followed by an escape sequence selector character.",
- Subject: rng.Ptr(),
- })
- break TokenType
- }
-
- switch slice[1] {
-
- case 'n':
- ret = append(ret, '\n')
- continue Slices
- case 'r':
- ret = append(ret, '\r')
- continue Slices
- case 't':
- ret = append(ret, '\t')
- continue Slices
- case '"':
- ret = append(ret, '"')
- continue Slices
- case '\\':
- ret = append(ret, '\\')
- continue Slices
- case 'u', 'U':
- if slice[1] == 'u' && len(slice) != 6 {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid escape sequence",
- Detail: "The \\u escape sequence must be followed by four hexadecimal digits.",
- Subject: rng.Ptr(),
- })
- break TokenType
- } else if slice[1] == 'U' && len(slice) != 10 {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid escape sequence",
- Detail: "The \\U escape sequence must be followed by eight hexadecimal digits.",
- Subject: rng.Ptr(),
- })
- break TokenType
- }
-
- numHex := string(slice[2:])
- num, err := strconv.ParseUint(numHex, 16, 32)
- if err != nil {
- // Should never happen because the scanner won't match
- // a sequence of digits that isn't valid.
- panic(err)
- }
-
- r := rune(num)
- l := utf8.RuneLen(r)
- if l == -1 {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid escape sequence",
- Detail: fmt.Sprintf("Cannot encode character U+%04x in UTF-8.", num),
- Subject: rng.Ptr(),
- })
- break TokenType
- }
- for i := 0; i < l; i++ {
- ret = append(ret, 0)
- }
- rb := ret[len(ret)-l:]
- utf8.EncodeRune(rb, r)
-
- continue Slices
-
- default:
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid escape sequence",
- Detail: fmt.Sprintf("The symbol %q is not a valid escape sequence selector.", slice[1:]),
- Subject: rng.Ptr(),
- })
- ret = append(ret, slice[1:]...)
- continue Slices
- }
-
- case '$', '%':
- if len(slice) != 3 {
- // Not long enough to be our escape sequence, so it's literal.
- break TokenType
- }
-
- if slice[1] == slice[0] && slice[2] == '{' {
- ret = append(ret, slice[0])
- ret = append(ret, '{')
- continue Slices
- }
-
- break TokenType
- }
-
- // If we fall out here or break out of here from the switch above
- // then this slice is just a literal.
- ret = append(ret, slice...)
- }
-
- return string(ret), diags
-}
-
-// setRecovery turns on recovery mode without actually doing any recovery.
-// This can be used when a parser knowingly leaves the peeker in a useless
-// place and wants to suppress errors that might result from that decision.
-func (p *parser) setRecovery() {
- p.recovery = true
-}
-
-// recover seeks forward in the token stream until it finds TokenType "end",
-// then returns with the peeker pointed at the following token.
-//
-// If the given token type is a bracketer, this function will additionally
-// count nested instances of the brackets to try to leave the peeker at
-// the end of the _current_ instance of that bracketer, skipping over any
-// nested instances. This is a best-effort operation and may have
-// unpredictable results on input with bad bracketer nesting.
-func (p *parser) recover(end TokenType) Token {
- start := p.oppositeBracket(end)
- p.recovery = true
-
- nest := 0
- for {
- tok := p.Read()
- ty := tok.Type
- if end == TokenTemplateSeqEnd && ty == TokenTemplateControl {
- // normalize so that our matching behavior can work, since
- // TokenTemplateControl/TokenTemplateInterp are asymmetrical
- // with TokenTemplateSeqEnd and thus we need to count both
- // openers if that's the closer we're looking for.
- ty = TokenTemplateInterp
- }
-
- switch ty {
- case start:
- nest++
- case end:
- if nest < 1 {
- return tok
- }
-
- nest--
- case TokenEOF:
- return tok
- }
- }
-}
-
-// recoverOver seeks forward in the token stream until it finds a block
-// starting with TokenType "start", then finds the corresponding end token,
-// leaving the peeker pointed at the token after that end token.
-//
-// The given token type _must_ be a bracketer. For example, if the given
-// start token is TokenOBrace then the parser will be left at the _end_ of
-// the next brace-delimited block encountered, or at EOF if no such block
-// is found or it is unclosed.
-func (p *parser) recoverOver(start TokenType) {
- end := p.oppositeBracket(start)
-
- // find the opening bracket first
-Token:
- for {
- tok := p.Read()
- switch tok.Type {
- case start, TokenEOF:
- break Token
- }
- }
-
- // Now use our existing recover function to locate the _end_ of the
- // container we've found.
- p.recover(end)
-}
-
-func (p *parser) recoverAfterBodyItem() {
- p.recovery = true
- var open []TokenType
-
-Token:
- for {
- tok := p.Read()
-
- switch tok.Type {
-
- case TokenNewline:
- if len(open) == 0 {
- break Token
- }
-
- case TokenEOF:
- break Token
-
- case TokenOBrace, TokenOBrack, TokenOParen, TokenOQuote, TokenOHeredoc, TokenTemplateInterp, TokenTemplateControl:
- open = append(open, tok.Type)
-
- case TokenCBrace, TokenCBrack, TokenCParen, TokenCQuote, TokenCHeredoc:
- opener := p.oppositeBracket(tok.Type)
- for len(open) > 0 && open[len(open)-1] != opener {
- open = open[:len(open)-1]
- }
- if len(open) > 0 {
- open = open[:len(open)-1]
- }
-
- case TokenTemplateSeqEnd:
- for len(open) > 0 && open[len(open)-1] != TokenTemplateInterp && open[len(open)-1] != TokenTemplateControl {
- open = open[:len(open)-1]
- }
- if len(open) > 0 {
- open = open[:len(open)-1]
- }
-
- }
- }
-}
-
-// oppositeBracket finds the bracket that opposes the given bracketer, or
-// NilToken if the given token isn't a bracketer.
-//
-// "Bracketer", for the sake of this function, is one end of a matching
-// open/close set of tokens that establish a bracketing context.
-func (p *parser) oppositeBracket(ty TokenType) TokenType {
- switch ty {
-
- case TokenOBrace:
- return TokenCBrace
- case TokenOBrack:
- return TokenCBrack
- case TokenOParen:
- return TokenCParen
- case TokenOQuote:
- return TokenCQuote
- case TokenOHeredoc:
- return TokenCHeredoc
-
- case TokenCBrace:
- return TokenOBrace
- case TokenCBrack:
- return TokenOBrack
- case TokenCParen:
- return TokenOParen
- case TokenCQuote:
- return TokenOQuote
- case TokenCHeredoc:
- return TokenOHeredoc
-
- case TokenTemplateControl:
- return TokenTemplateSeqEnd
- case TokenTemplateInterp:
- return TokenTemplateSeqEnd
- case TokenTemplateSeqEnd:
- // This is ambigous, but we return Interp here because that's
- // what's assumed by the "recover" method.
- return TokenTemplateInterp
-
- default:
- return TokenNil
- }
-}
-
-func errPlaceholderExpr(rng hcl.Range) Expression {
- return &LiteralValueExpr{
- Val: cty.DynamicVal,
- SrcRange: rng,
- }
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser_template.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser_template.go
deleted file mode 100644
index a141626fe..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser_template.go
+++ /dev/null
@@ -1,799 +0,0 @@
-package hclsyntax
-
-import (
- "fmt"
- "strings"
- "unicode"
-
- "github.com/apparentlymart/go-textseg/textseg"
- "github.com/hashicorp/hcl2/hcl"
- "github.com/zclconf/go-cty/cty"
-)
-
-func (p *parser) ParseTemplate() (Expression, hcl.Diagnostics) {
- return p.parseTemplate(TokenEOF, false)
-}
-
-func (p *parser) parseTemplate(end TokenType, flushHeredoc bool) (Expression, hcl.Diagnostics) {
- exprs, passthru, rng, diags := p.parseTemplateInner(end, flushHeredoc)
-
- if passthru {
- if len(exprs) != 1 {
- panic("passthru set with len(exprs) != 1")
- }
- return &TemplateWrapExpr{
- Wrapped: exprs[0],
- SrcRange: rng,
- }, diags
- }
-
- return &TemplateExpr{
- Parts: exprs,
- SrcRange: rng,
- }, diags
-}
-
-func (p *parser) parseTemplateInner(end TokenType, flushHeredoc bool) ([]Expression, bool, hcl.Range, hcl.Diagnostics) {
- parts, diags := p.parseTemplateParts(end)
- if flushHeredoc {
- flushHeredocTemplateParts(parts) // Trim off leading spaces on lines per the flush heredoc spec
- }
- tp := templateParser{
- Tokens: parts.Tokens,
- SrcRange: parts.SrcRange,
- }
- exprs, exprsDiags := tp.parseRoot()
- diags = append(diags, exprsDiags...)
-
- passthru := false
- if len(parts.Tokens) == 2 { // one real token and one synthetic "end" token
- if _, isInterp := parts.Tokens[0].(*templateInterpToken); isInterp {
- passthru = true
- }
- }
-
- return exprs, passthru, parts.SrcRange, diags
-}
-
-type templateParser struct {
- Tokens []templateToken
- SrcRange hcl.Range
-
- pos int
-}
-
-func (p *templateParser) parseRoot() ([]Expression, hcl.Diagnostics) {
- var exprs []Expression
- var diags hcl.Diagnostics
-
- for {
- next := p.Peek()
- if _, isEnd := next.(*templateEndToken); isEnd {
- break
- }
-
- expr, exprDiags := p.parseExpr()
- diags = append(diags, exprDiags...)
- exprs = append(exprs, expr)
- }
-
- return exprs, diags
-}
-
-func (p *templateParser) parseExpr() (Expression, hcl.Diagnostics) {
- next := p.Peek()
- switch tok := next.(type) {
-
- case *templateLiteralToken:
- p.Read() // eat literal
- return &LiteralValueExpr{
- Val: cty.StringVal(tok.Val),
- SrcRange: tok.SrcRange,
- }, nil
-
- case *templateInterpToken:
- p.Read() // eat interp
- return tok.Expr, nil
-
- case *templateIfToken:
- return p.parseIf()
-
- case *templateForToken:
- return p.parseFor()
-
- case *templateEndToken:
- p.Read() // eat erroneous token
- return errPlaceholderExpr(tok.SrcRange), hcl.Diagnostics{
- {
- // This is a particularly unhelpful diagnostic, so callers
- // should attempt to pre-empt it and produce a more helpful
- // diagnostic that is context-aware.
- Severity: hcl.DiagError,
- Summary: "Unexpected end of template",
- Detail: "The control directives within this template are unbalanced.",
- Subject: &tok.SrcRange,
- },
- }
-
- case *templateEndCtrlToken:
- p.Read() // eat erroneous token
- return errPlaceholderExpr(tok.SrcRange), hcl.Diagnostics{
- {
- Severity: hcl.DiagError,
- Summary: fmt.Sprintf("Unexpected %s directive", tok.Name()),
- Detail: "The control directives within this template are unbalanced.",
- Subject: &tok.SrcRange,
- },
- }
-
- default:
- // should never happen, because above should be exhaustive
- panic(fmt.Sprintf("unhandled template token type %T", next))
- }
-}
-
-func (p *templateParser) parseIf() (Expression, hcl.Diagnostics) {
- open := p.Read()
- openIf, isIf := open.(*templateIfToken)
- if !isIf {
- // should never happen if caller is behaving
- panic("parseIf called with peeker not pointing at if token")
- }
-
- var ifExprs, elseExprs []Expression
- var diags hcl.Diagnostics
- var endifRange hcl.Range
-
- currentExprs := &ifExprs
-Token:
- for {
- next := p.Peek()
- if end, isEnd := next.(*templateEndToken); isEnd {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Unexpected end of template",
- Detail: fmt.Sprintf(
- "The if directive at %s is missing its corresponding endif directive.",
- openIf.SrcRange,
- ),
- Subject: &end.SrcRange,
- })
- return errPlaceholderExpr(end.SrcRange), diags
- }
- if end, isCtrlEnd := next.(*templateEndCtrlToken); isCtrlEnd {
- p.Read() // eat end directive
-
- switch end.Type {
-
- case templateElse:
- if currentExprs == &ifExprs {
- currentExprs = &elseExprs
- continue Token
- }
-
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Unexpected else directive",
- Detail: fmt.Sprintf(
- "Already in the else clause for the if started at %s.",
- openIf.SrcRange,
- ),
- Subject: &end.SrcRange,
- })
-
- case templateEndIf:
- endifRange = end.SrcRange
- break Token
-
- default:
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: fmt.Sprintf("Unexpected %s directive", end.Name()),
- Detail: fmt.Sprintf(
- "Expecting an endif directive for the if started at %s.",
- openIf.SrcRange,
- ),
- Subject: &end.SrcRange,
- })
- }
-
- return errPlaceholderExpr(end.SrcRange), diags
- }
-
- expr, exprDiags := p.parseExpr()
- diags = append(diags, exprDiags...)
- *currentExprs = append(*currentExprs, expr)
- }
-
- if len(ifExprs) == 0 {
- ifExprs = append(ifExprs, &LiteralValueExpr{
- Val: cty.StringVal(""),
- SrcRange: hcl.Range{
- Filename: openIf.SrcRange.Filename,
- Start: openIf.SrcRange.End,
- End: openIf.SrcRange.End,
- },
- })
- }
- if len(elseExprs) == 0 {
- elseExprs = append(elseExprs, &LiteralValueExpr{
- Val: cty.StringVal(""),
- SrcRange: hcl.Range{
- Filename: endifRange.Filename,
- Start: endifRange.Start,
- End: endifRange.Start,
- },
- })
- }
-
- trueExpr := &TemplateExpr{
- Parts: ifExprs,
- SrcRange: hcl.RangeBetween(ifExprs[0].Range(), ifExprs[len(ifExprs)-1].Range()),
- }
- falseExpr := &TemplateExpr{
- Parts: elseExprs,
- SrcRange: hcl.RangeBetween(elseExprs[0].Range(), elseExprs[len(elseExprs)-1].Range()),
- }
-
- return &ConditionalExpr{
- Condition: openIf.CondExpr,
- TrueResult: trueExpr,
- FalseResult: falseExpr,
-
- SrcRange: hcl.RangeBetween(openIf.SrcRange, endifRange),
- }, diags
-}
-
-func (p *templateParser) parseFor() (Expression, hcl.Diagnostics) {
- open := p.Read()
- openFor, isFor := open.(*templateForToken)
- if !isFor {
- // should never happen if caller is behaving
- panic("parseFor called with peeker not pointing at for token")
- }
-
- var contentExprs []Expression
- var diags hcl.Diagnostics
- var endforRange hcl.Range
-
-Token:
- for {
- next := p.Peek()
- if end, isEnd := next.(*templateEndToken); isEnd {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Unexpected end of template",
- Detail: fmt.Sprintf(
- "The for directive at %s is missing its corresponding endfor directive.",
- openFor.SrcRange,
- ),
- Subject: &end.SrcRange,
- })
- return errPlaceholderExpr(end.SrcRange), diags
- }
- if end, isCtrlEnd := next.(*templateEndCtrlToken); isCtrlEnd {
- p.Read() // eat end directive
-
- switch end.Type {
-
- case templateElse:
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Unexpected else directive",
- Detail: "An else clause is not expected for a for directive.",
- Subject: &end.SrcRange,
- })
-
- case templateEndFor:
- endforRange = end.SrcRange
- break Token
-
- default:
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: fmt.Sprintf("Unexpected %s directive", end.Name()),
- Detail: fmt.Sprintf(
- "Expecting an endfor directive corresponding to the for directive at %s.",
- openFor.SrcRange,
- ),
- Subject: &end.SrcRange,
- })
- }
-
- return errPlaceholderExpr(end.SrcRange), diags
- }
-
- expr, exprDiags := p.parseExpr()
- diags = append(diags, exprDiags...)
- contentExprs = append(contentExprs, expr)
- }
-
- if len(contentExprs) == 0 {
- contentExprs = append(contentExprs, &LiteralValueExpr{
- Val: cty.StringVal(""),
- SrcRange: hcl.Range{
- Filename: openFor.SrcRange.Filename,
- Start: openFor.SrcRange.End,
- End: openFor.SrcRange.End,
- },
- })
- }
-
- contentExpr := &TemplateExpr{
- Parts: contentExprs,
- SrcRange: hcl.RangeBetween(contentExprs[0].Range(), contentExprs[len(contentExprs)-1].Range()),
- }
-
- forExpr := &ForExpr{
- KeyVar: openFor.KeyVar,
- ValVar: openFor.ValVar,
-
- CollExpr: openFor.CollExpr,
- ValExpr: contentExpr,
-
- SrcRange: hcl.RangeBetween(openFor.SrcRange, endforRange),
- OpenRange: openFor.SrcRange,
- CloseRange: endforRange,
- }
-
- return &TemplateJoinExpr{
- Tuple: forExpr,
- }, diags
-}
-
-func (p *templateParser) Peek() templateToken {
- return p.Tokens[p.pos]
-}
-
-func (p *templateParser) Read() templateToken {
- ret := p.Peek()
- if _, end := ret.(*templateEndToken); !end {
- p.pos++
- }
- return ret
-}
-
-// parseTemplateParts produces a flat sequence of "template tokens", which are
-// either literal values (with any "trimming" already applied), interpolation
-// sequences, or control flow markers.
-//
-// A further pass is required on the result to turn it into an AST.
-func (p *parser) parseTemplateParts(end TokenType) (*templateParts, hcl.Diagnostics) {
- var parts []templateToken
- var diags hcl.Diagnostics
-
- startRange := p.NextRange()
- ltrimNext := false
- nextCanTrimPrev := false
- var endRange hcl.Range
-
-Token:
- for {
- next := p.Read()
- if next.Type == end {
- // all done!
- endRange = next.Range
- break
- }
-
- ltrim := ltrimNext
- ltrimNext = false
- canTrimPrev := nextCanTrimPrev
- nextCanTrimPrev = false
-
- switch next.Type {
- case TokenStringLit, TokenQuotedLit:
- str, strDiags := p.decodeStringLit(next)
- diags = append(diags, strDiags...)
-
- if ltrim {
- str = strings.TrimLeftFunc(str, unicode.IsSpace)
- }
-
- parts = append(parts, &templateLiteralToken{
- Val: str,
- SrcRange: next.Range,
- })
- nextCanTrimPrev = true
-
- case TokenTemplateInterp:
- // if the opener is ${~ then we want to eat any trailing whitespace
- // in the preceding literal token, assuming it is indeed a literal
- // token.
- if canTrimPrev && len(next.Bytes) == 3 && next.Bytes[2] == '~' && len(parts) > 0 {
- prevExpr := parts[len(parts)-1]
- if lexpr, ok := prevExpr.(*templateLiteralToken); ok {
- lexpr.Val = strings.TrimRightFunc(lexpr.Val, unicode.IsSpace)
- }
- }
-
- p.PushIncludeNewlines(false)
- expr, exprDiags := p.ParseExpression()
- diags = append(diags, exprDiags...)
- close := p.Peek()
- if close.Type != TokenTemplateSeqEnd {
- if !p.recovery {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Extra characters after interpolation expression",
- Detail: "Expected a closing brace to end the interpolation expression, but found extra characters.",
- Subject: &close.Range,
- Context: hcl.RangeBetween(startRange, close.Range).Ptr(),
- })
- }
- p.recover(TokenTemplateSeqEnd)
- } else {
- p.Read() // eat closing brace
-
- // If the closer is ~} then we want to eat any leading
- // whitespace on the next token, if it turns out to be a
- // literal token.
- if len(close.Bytes) == 2 && close.Bytes[0] == '~' {
- ltrimNext = true
- }
- }
- p.PopIncludeNewlines()
- parts = append(parts, &templateInterpToken{
- Expr: expr,
- SrcRange: hcl.RangeBetween(next.Range, close.Range),
- })
-
- case TokenTemplateControl:
- // if the opener is %{~ then we want to eat any trailing whitespace
- // in the preceding literal token, assuming it is indeed a literal
- // token.
- if canTrimPrev && len(next.Bytes) == 3 && next.Bytes[2] == '~' && len(parts) > 0 {
- prevExpr := parts[len(parts)-1]
- if lexpr, ok := prevExpr.(*templateLiteralToken); ok {
- lexpr.Val = strings.TrimRightFunc(lexpr.Val, unicode.IsSpace)
- }
- }
- p.PushIncludeNewlines(false)
-
- kw := p.Peek()
- if kw.Type != TokenIdent {
- if !p.recovery {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid template directive",
- Detail: "A template directive keyword (\"if\", \"for\", etc) is expected at the beginning of a %{ sequence.",
- Subject: &kw.Range,
- Context: hcl.RangeBetween(next.Range, kw.Range).Ptr(),
- })
- }
- p.recover(TokenTemplateSeqEnd)
- p.PopIncludeNewlines()
- continue Token
- }
- p.Read() // eat keyword token
-
- switch {
-
- case ifKeyword.TokenMatches(kw):
- condExpr, exprDiags := p.ParseExpression()
- diags = append(diags, exprDiags...)
- parts = append(parts, &templateIfToken{
- CondExpr: condExpr,
- SrcRange: hcl.RangeBetween(next.Range, p.NextRange()),
- })
-
- case elseKeyword.TokenMatches(kw):
- parts = append(parts, &templateEndCtrlToken{
- Type: templateElse,
- SrcRange: hcl.RangeBetween(next.Range, p.NextRange()),
- })
-
- case endifKeyword.TokenMatches(kw):
- parts = append(parts, &templateEndCtrlToken{
- Type: templateEndIf,
- SrcRange: hcl.RangeBetween(next.Range, p.NextRange()),
- })
-
- case forKeyword.TokenMatches(kw):
- var keyName, valName string
- if p.Peek().Type != TokenIdent {
- if !p.recovery {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid 'for' directive",
- Detail: "For directive requires variable name after 'for'.",
- Subject: p.Peek().Range.Ptr(),
- })
- }
- p.recover(TokenTemplateSeqEnd)
- p.PopIncludeNewlines()
- continue Token
- }
-
- valName = string(p.Read().Bytes)
-
- if p.Peek().Type == TokenComma {
- // What we just read was actually the key, then.
- keyName = valName
- p.Read() // eat comma
-
- if p.Peek().Type != TokenIdent {
- if !p.recovery {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid 'for' directive",
- Detail: "For directive requires value variable name after comma.",
- Subject: p.Peek().Range.Ptr(),
- })
- }
- p.recover(TokenTemplateSeqEnd)
- p.PopIncludeNewlines()
- continue Token
- }
-
- valName = string(p.Read().Bytes)
- }
-
- if !inKeyword.TokenMatches(p.Peek()) {
- if !p.recovery {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid 'for' directive",
- Detail: "For directive requires 'in' keyword after names.",
- Subject: p.Peek().Range.Ptr(),
- })
- }
- p.recover(TokenTemplateSeqEnd)
- p.PopIncludeNewlines()
- continue Token
- }
- p.Read() // eat 'in' keyword
-
- collExpr, collDiags := p.ParseExpression()
- diags = append(diags, collDiags...)
- parts = append(parts, &templateForToken{
- KeyVar: keyName,
- ValVar: valName,
- CollExpr: collExpr,
-
- SrcRange: hcl.RangeBetween(next.Range, p.NextRange()),
- })
-
- case endforKeyword.TokenMatches(kw):
- parts = append(parts, &templateEndCtrlToken{
- Type: templateEndFor,
- SrcRange: hcl.RangeBetween(next.Range, p.NextRange()),
- })
-
- default:
- if !p.recovery {
- suggestions := []string{"if", "for", "else", "endif", "endfor"}
- given := string(kw.Bytes)
- suggestion := nameSuggestion(given, suggestions)
- if suggestion != "" {
- suggestion = fmt.Sprintf(" Did you mean %q?", suggestion)
- }
-
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid template control keyword",
- Detail: fmt.Sprintf("%q is not a valid template control keyword.%s", given, suggestion),
- Subject: &kw.Range,
- Context: hcl.RangeBetween(next.Range, kw.Range).Ptr(),
- })
- }
- p.recover(TokenTemplateSeqEnd)
- p.PopIncludeNewlines()
- continue Token
-
- }
-
- close := p.Peek()
- if close.Type != TokenTemplateSeqEnd {
- if !p.recovery {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: fmt.Sprintf("Extra characters in %s marker", kw.Bytes),
- Detail: "Expected a closing brace to end the sequence, but found extra characters.",
- Subject: &close.Range,
- Context: hcl.RangeBetween(startRange, close.Range).Ptr(),
- })
- }
- p.recover(TokenTemplateSeqEnd)
- } else {
- p.Read() // eat closing brace
-
- // If the closer is ~} then we want to eat any leading
- // whitespace on the next token, if it turns out to be a
- // literal token.
- if len(close.Bytes) == 2 && close.Bytes[0] == '~' {
- ltrimNext = true
- }
- }
- p.PopIncludeNewlines()
-
- default:
- if !p.recovery {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Unterminated template string",
- Detail: "No closing marker was found for the string.",
- Subject: &next.Range,
- Context: hcl.RangeBetween(startRange, next.Range).Ptr(),
- })
- }
- final := p.recover(end)
- endRange = final.Range
- break Token
- }
- }
-
- if len(parts) == 0 {
- // If a sequence has no content, we'll treat it as if it had an
- // empty string in it because that's what the user probably means
- // if they write "" in configuration.
- parts = append(parts, &templateLiteralToken{
- Val: "",
- SrcRange: hcl.Range{
- // Range is the zero-character span immediately after the
- // opening quote.
- Filename: startRange.Filename,
- Start: startRange.End,
- End: startRange.End,
- },
- })
- }
-
- // Always end with an end token, so the parser can produce diagnostics
- // about unclosed items with proper position information.
- parts = append(parts, &templateEndToken{
- SrcRange: endRange,
- })
-
- ret := &templateParts{
- Tokens: parts,
- SrcRange: hcl.RangeBetween(startRange, endRange),
- }
-
- return ret, diags
-}
-
-// flushHeredocTemplateParts modifies in-place the line-leading literal strings
-// to apply the flush heredoc processing rule: find the line with the smallest
-// number of whitespace characters as prefix and then trim that number of
-// characters from all of the lines.
-//
-// This rule is applied to static tokens rather than to the rendered result,
-// so interpolating a string with leading whitespace cannot affect the chosen
-// prefix length.
-func flushHeredocTemplateParts(parts *templateParts) {
- if len(parts.Tokens) == 0 {
- // Nothing to do
- return
- }
-
- const maxInt = int((^uint(0)) >> 1)
-
- minSpaces := maxInt
- newline := true
- var adjust []*templateLiteralToken
- for _, ttok := range parts.Tokens {
- if newline {
- newline = false
- var spaces int
- if lit, ok := ttok.(*templateLiteralToken); ok {
- orig := lit.Val
- trimmed := strings.TrimLeftFunc(orig, unicode.IsSpace)
- // If a token is entirely spaces and ends with a newline
- // then it's a "blank line" and thus not considered for
- // space-prefix-counting purposes.
- if len(trimmed) == 0 && strings.HasSuffix(orig, "\n") {
- spaces = maxInt
- } else {
- spaceBytes := len(lit.Val) - len(trimmed)
- spaces, _ = textseg.TokenCount([]byte(orig[:spaceBytes]), textseg.ScanGraphemeClusters)
- adjust = append(adjust, lit)
- }
- } else if _, ok := ttok.(*templateEndToken); ok {
- break // don't process the end token since it never has spaces before it
- }
- if spaces < minSpaces {
- minSpaces = spaces
- }
- }
- if lit, ok := ttok.(*templateLiteralToken); ok {
- if strings.HasSuffix(lit.Val, "\n") {
- newline = true // The following token, if any, begins a new line
- }
- }
- }
-
- for _, lit := range adjust {
- // Since we want to count space _characters_ rather than space _bytes_,
- // we can't just do a straightforward slice operation here and instead
- // need to hunt for the split point with a scanner.
- valBytes := []byte(lit.Val)
- spaceByteCount := 0
- for i := 0; i < minSpaces; i++ {
- adv, _, _ := textseg.ScanGraphemeClusters(valBytes, true)
- spaceByteCount += adv
- valBytes = valBytes[adv:]
- }
- lit.Val = lit.Val[spaceByteCount:]
- lit.SrcRange.Start.Column += minSpaces
- lit.SrcRange.Start.Byte += spaceByteCount
- }
-}
-
-type templateParts struct {
- Tokens []templateToken
- SrcRange hcl.Range
-}
-
-// templateToken is a higher-level token that represents a single atom within
-// the template language. Our template parsing first raises the raw token
-// stream to a sequence of templateToken, and then transforms the result into
-// an expression tree.
-type templateToken interface {
- templateToken() templateToken
-}
-
-type templateLiteralToken struct {
- Val string
- SrcRange hcl.Range
- isTemplateToken
-}
-
-type templateInterpToken struct {
- Expr Expression
- SrcRange hcl.Range
- isTemplateToken
-}
-
-type templateIfToken struct {
- CondExpr Expression
- SrcRange hcl.Range
- isTemplateToken
-}
-
-type templateForToken struct {
- KeyVar string // empty if ignoring key
- ValVar string
- CollExpr Expression
- SrcRange hcl.Range
- isTemplateToken
-}
-
-type templateEndCtrlType int
-
-const (
- templateEndIf templateEndCtrlType = iota
- templateElse
- templateEndFor
-)
-
-type templateEndCtrlToken struct {
- Type templateEndCtrlType
- SrcRange hcl.Range
- isTemplateToken
-}
-
-func (t *templateEndCtrlToken) Name() string {
- switch t.Type {
- case templateEndIf:
- return "endif"
- case templateElse:
- return "else"
- case templateEndFor:
- return "endfor"
- default:
- // should never happen
- panic("invalid templateEndCtrlType")
- }
-}
-
-type templateEndToken struct {
- SrcRange hcl.Range
- isTemplateToken
-}
-
-type isTemplateToken [0]int
-
-func (t isTemplateToken) templateToken() templateToken {
- return t
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser_traversal.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser_traversal.go
deleted file mode 100644
index 2ff3ed6c1..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/parser_traversal.go
+++ /dev/null
@@ -1,159 +0,0 @@
-package hclsyntax
-
-import (
- "github.com/hashicorp/hcl2/hcl"
- "github.com/zclconf/go-cty/cty"
-)
-
-// ParseTraversalAbs parses an absolute traversal that is assumed to consume
-// all of the remaining tokens in the peeker. The usual parser recovery
-// behavior is not supported here because traversals are not expected to
-// be parsed as part of a larger program.
-func (p *parser) ParseTraversalAbs() (hcl.Traversal, hcl.Diagnostics) {
- var ret hcl.Traversal
- var diags hcl.Diagnostics
-
- // Absolute traversal must always begin with a variable name
- varTok := p.Read()
- if varTok.Type != TokenIdent {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Variable name required",
- Detail: "Must begin with a variable name.",
- Subject: &varTok.Range,
- })
- return ret, diags
- }
-
- varName := string(varTok.Bytes)
- ret = append(ret, hcl.TraverseRoot{
- Name: varName,
- SrcRange: varTok.Range,
- })
-
- for {
- next := p.Peek()
-
- if next.Type == TokenEOF {
- return ret, diags
- }
-
- switch next.Type {
- case TokenDot:
- // Attribute access
- dot := p.Read() // eat dot
- nameTok := p.Read()
- if nameTok.Type != TokenIdent {
- if nameTok.Type == TokenStar {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Attribute name required",
- Detail: "Splat expressions (.*) may not be used here.",
- Subject: &nameTok.Range,
- Context: hcl.RangeBetween(varTok.Range, nameTok.Range).Ptr(),
- })
- } else {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Attribute name required",
- Detail: "Dot must be followed by attribute name.",
- Subject: &nameTok.Range,
- Context: hcl.RangeBetween(varTok.Range, nameTok.Range).Ptr(),
- })
- }
- return ret, diags
- }
-
- attrName := string(nameTok.Bytes)
- ret = append(ret, hcl.TraverseAttr{
- Name: attrName,
- SrcRange: hcl.RangeBetween(dot.Range, nameTok.Range),
- })
- case TokenOBrack:
- // Index
- open := p.Read() // eat open bracket
- next := p.Peek()
-
- switch next.Type {
- case TokenNumberLit:
- tok := p.Read() // eat number
- numVal, numDiags := p.numberLitValue(tok)
- diags = append(diags, numDiags...)
-
- close := p.Read()
- if close.Type != TokenCBrack {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Unclosed index brackets",
- Detail: "Index key must be followed by a closing bracket.",
- Subject: &close.Range,
- Context: hcl.RangeBetween(open.Range, close.Range).Ptr(),
- })
- }
-
- ret = append(ret, hcl.TraverseIndex{
- Key: numVal,
- SrcRange: hcl.RangeBetween(open.Range, close.Range),
- })
-
- if diags.HasErrors() {
- return ret, diags
- }
-
- case TokenOQuote:
- str, _, strDiags := p.parseQuotedStringLiteral()
- diags = append(diags, strDiags...)
-
- close := p.Read()
- if close.Type != TokenCBrack {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Unclosed index brackets",
- Detail: "Index key must be followed by a closing bracket.",
- Subject: &close.Range,
- Context: hcl.RangeBetween(open.Range, close.Range).Ptr(),
- })
- }
-
- ret = append(ret, hcl.TraverseIndex{
- Key: cty.StringVal(str),
- SrcRange: hcl.RangeBetween(open.Range, close.Range),
- })
-
- if diags.HasErrors() {
- return ret, diags
- }
-
- default:
- if next.Type == TokenStar {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Attribute name required",
- Detail: "Splat expressions ([*]) may not be used here.",
- Subject: &next.Range,
- Context: hcl.RangeBetween(varTok.Range, next.Range).Ptr(),
- })
- } else {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Index value required",
- Detail: "Index brackets must contain either a literal number or a literal string.",
- Subject: &next.Range,
- Context: hcl.RangeBetween(varTok.Range, next.Range).Ptr(),
- })
- }
- return ret, diags
- }
-
- default:
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid character",
- Detail: "Expected an attribute access or an index operator.",
- Subject: &next.Range,
- Context: hcl.RangeBetween(varTok.Range, next.Range).Ptr(),
- })
- return ret, diags
- }
- }
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/peeker.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/peeker.go
deleted file mode 100644
index 5a4b50e2f..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/peeker.go
+++ /dev/null
@@ -1,212 +0,0 @@
-package hclsyntax
-
-import (
- "bytes"
- "fmt"
- "path/filepath"
- "runtime"
- "strings"
-
- "github.com/hashicorp/hcl2/hcl"
-)
-
-// This is set to true at init() time in tests, to enable more useful output
-// if a stack discipline error is detected. It should not be enabled in
-// normal mode since there is a performance penalty from accessing the
-// runtime stack to produce the traces, but could be temporarily set to
-// true for debugging if desired.
-var tracePeekerNewlinesStack = false
-
-type peeker struct {
- Tokens Tokens
- NextIndex int
-
- IncludeComments bool
- IncludeNewlinesStack []bool
-
- // used only when tracePeekerNewlinesStack is set
- newlineStackChanges []peekerNewlineStackChange
-}
-
-// for use in debugging the stack usage only
-type peekerNewlineStackChange struct {
- Pushing bool // if false, then popping
- Frame runtime.Frame
- Include bool
-}
-
-func newPeeker(tokens Tokens, includeComments bool) *peeker {
- return &peeker{
- Tokens: tokens,
- IncludeComments: includeComments,
-
- IncludeNewlinesStack: []bool{true},
- }
-}
-
-func (p *peeker) Peek() Token {
- ret, _ := p.nextToken()
- return ret
-}
-
-func (p *peeker) Read() Token {
- ret, nextIdx := p.nextToken()
- p.NextIndex = nextIdx
- return ret
-}
-
-func (p *peeker) NextRange() hcl.Range {
- return p.Peek().Range
-}
-
-func (p *peeker) PrevRange() hcl.Range {
- if p.NextIndex == 0 {
- return p.NextRange()
- }
-
- return p.Tokens[p.NextIndex-1].Range
-}
-
-func (p *peeker) nextToken() (Token, int) {
- for i := p.NextIndex; i < len(p.Tokens); i++ {
- tok := p.Tokens[i]
- switch tok.Type {
- case TokenComment:
- if !p.IncludeComments {
- // Single-line comment tokens, starting with # or //, absorb
- // the trailing newline that terminates them as part of their
- // bytes. When we're filtering out comments, we must as a
- // special case transform these to newline tokens in order
- // to properly parse newline-terminated block items.
-
- if p.includingNewlines() {
- if len(tok.Bytes) > 0 && tok.Bytes[len(tok.Bytes)-1] == '\n' {
- fakeNewline := Token{
- Type: TokenNewline,
- Bytes: tok.Bytes[len(tok.Bytes)-1 : len(tok.Bytes)],
-
- // We use the whole token range as the newline
- // range, even though that's a little... weird,
- // because otherwise we'd need to go count
- // characters again in order to figure out the
- // column of the newline, and that complexity
- // isn't justified when ranges of newlines are
- // so rarely printed anyway.
- Range: tok.Range,
- }
- return fakeNewline, i + 1
- }
- }
-
- continue
- }
- case TokenNewline:
- if !p.includingNewlines() {
- continue
- }
- }
-
- return tok, i + 1
- }
-
- // if we fall out here then we'll return the EOF token, and leave
- // our index pointed off the end of the array so we'll keep
- // returning EOF in future too.
- return p.Tokens[len(p.Tokens)-1], len(p.Tokens)
-}
-
-func (p *peeker) includingNewlines() bool {
- return p.IncludeNewlinesStack[len(p.IncludeNewlinesStack)-1]
-}
-
-func (p *peeker) PushIncludeNewlines(include bool) {
- if tracePeekerNewlinesStack {
- // Record who called us so that we can more easily track down any
- // mismanagement of the stack in the parser.
- callers := []uintptr{0}
- runtime.Callers(2, callers)
- frames := runtime.CallersFrames(callers)
- frame, _ := frames.Next()
- p.newlineStackChanges = append(p.newlineStackChanges, peekerNewlineStackChange{
- true, frame, include,
- })
- }
-
- p.IncludeNewlinesStack = append(p.IncludeNewlinesStack, include)
-}
-
-func (p *peeker) PopIncludeNewlines() bool {
- stack := p.IncludeNewlinesStack
- remain, ret := stack[:len(stack)-1], stack[len(stack)-1]
- p.IncludeNewlinesStack = remain
-
- if tracePeekerNewlinesStack {
- // Record who called us so that we can more easily track down any
- // mismanagement of the stack in the parser.
- callers := []uintptr{0}
- runtime.Callers(2, callers)
- frames := runtime.CallersFrames(callers)
- frame, _ := frames.Next()
- p.newlineStackChanges = append(p.newlineStackChanges, peekerNewlineStackChange{
- false, frame, ret,
- })
- }
-
- return ret
-}
-
-// AssertEmptyNewlinesStack checks if the IncludeNewlinesStack is empty, doing
-// panicking if it is not. This can be used to catch stack mismanagement that
-// might otherwise just cause confusing downstream errors.
-//
-// This function is a no-op if the stack is empty when called.
-//
-// If newlines stack tracing is enabled by setting the global variable
-// tracePeekerNewlinesStack at init time, a full log of all of the push/pop
-// calls will be produced to help identify which caller in the parser is
-// misbehaving.
-func (p *peeker) AssertEmptyIncludeNewlinesStack() {
- if len(p.IncludeNewlinesStack) != 1 {
- // Should never happen; indicates mismanagement of the stack inside
- // the parser.
- if p.newlineStackChanges != nil { // only if traceNewlinesStack is enabled above
- panic(fmt.Errorf(
- "non-empty IncludeNewlinesStack after parse with %d calls unaccounted for:\n%s",
- len(p.IncludeNewlinesStack)-1,
- formatPeekerNewlineStackChanges(p.newlineStackChanges),
- ))
- } else {
- panic(fmt.Errorf("non-empty IncludeNewlinesStack after parse: %#v", p.IncludeNewlinesStack))
- }
- }
-}
-
-func formatPeekerNewlineStackChanges(changes []peekerNewlineStackChange) string {
- indent := 0
- var buf bytes.Buffer
- for _, change := range changes {
- funcName := change.Frame.Function
- if idx := strings.LastIndexByte(funcName, '.'); idx != -1 {
- funcName = funcName[idx+1:]
- }
- filename := change.Frame.File
- if idx := strings.LastIndexByte(filename, filepath.Separator); idx != -1 {
- filename = filename[idx+1:]
- }
-
- switch change.Pushing {
-
- case true:
- buf.WriteString(strings.Repeat(" ", indent))
- fmt.Fprintf(&buf, "PUSH %#v (%s at %s:%d)\n", change.Include, funcName, filename, change.Frame.Line)
- indent++
-
- case false:
- indent--
- buf.WriteString(strings.Repeat(" ", indent))
- fmt.Fprintf(&buf, "POP %#v (%s at %s:%d)\n", change.Include, funcName, filename, change.Frame.Line)
-
- }
- }
- return buf.String()
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/public.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/public.go
deleted file mode 100644
index cf0ee2976..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/public.go
+++ /dev/null
@@ -1,171 +0,0 @@
-package hclsyntax
-
-import (
- "github.com/hashicorp/hcl2/hcl"
-)
-
-// ParseConfig parses the given buffer as a whole HCL config file, returning
-// a *hcl.File representing its contents. If HasErrors called on the returned
-// diagnostics returns true, the returned body is likely to be incomplete
-// and should therefore be used with care.
-//
-// The body in the returned file has dynamic type *hclsyntax.Body, so callers
-// may freely type-assert this to get access to the full hclsyntax API in
-// situations where detailed access is required. However, most common use-cases
-// should be served using the hcl.Body interface to ensure compatibility with
-// other configurationg syntaxes, such as JSON.
-func ParseConfig(src []byte, filename string, start hcl.Pos) (*hcl.File, hcl.Diagnostics) {
- tokens, diags := LexConfig(src, filename, start)
- peeker := newPeeker(tokens, false)
- parser := &parser{peeker: peeker}
- body, parseDiags := parser.ParseBody(TokenEOF)
- diags = append(diags, parseDiags...)
-
- // Panic if the parser uses incorrect stack discipline with the peeker's
- // newlines stack, since otherwise it will produce confusing downstream
- // errors.
- peeker.AssertEmptyIncludeNewlinesStack()
-
- return &hcl.File{
- Body: body,
- Bytes: src,
-
- Nav: navigation{
- root: body,
- },
- }, diags
-}
-
-// ParseExpression parses the given buffer as a standalone HCL expression,
-// returning it as an instance of Expression.
-func ParseExpression(src []byte, filename string, start hcl.Pos) (Expression, hcl.Diagnostics) {
- tokens, diags := LexExpression(src, filename, start)
- peeker := newPeeker(tokens, false)
- parser := &parser{peeker: peeker}
-
- // Bare expressions are always parsed in "ignore newlines" mode, as if
- // they were wrapped in parentheses.
- parser.PushIncludeNewlines(false)
-
- expr, parseDiags := parser.ParseExpression()
- diags = append(diags, parseDiags...)
-
- next := parser.Peek()
- if next.Type != TokenEOF && !parser.recovery {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Extra characters after expression",
- Detail: "An expression was successfully parsed, but extra characters were found after it.",
- Subject: &next.Range,
- })
- }
-
- parser.PopIncludeNewlines()
-
- // Panic if the parser uses incorrect stack discipline with the peeker's
- // newlines stack, since otherwise it will produce confusing downstream
- // errors.
- peeker.AssertEmptyIncludeNewlinesStack()
-
- return expr, diags
-}
-
-// ParseTemplate parses the given buffer as a standalone HCL template,
-// returning it as an instance of Expression.
-func ParseTemplate(src []byte, filename string, start hcl.Pos) (Expression, hcl.Diagnostics) {
- tokens, diags := LexTemplate(src, filename, start)
- peeker := newPeeker(tokens, false)
- parser := &parser{peeker: peeker}
- expr, parseDiags := parser.ParseTemplate()
- diags = append(diags, parseDiags...)
-
- // Panic if the parser uses incorrect stack discipline with the peeker's
- // newlines stack, since otherwise it will produce confusing downstream
- // errors.
- peeker.AssertEmptyIncludeNewlinesStack()
-
- return expr, diags
-}
-
-// ParseTraversalAbs parses the given buffer as a standalone absolute traversal.
-//
-// Parsing as a traversal is more limited than parsing as an expession since
-// it allows only attribute and indexing operations on variables. Traverals
-// are useful as a syntax for referring to objects without necessarily
-// evaluating them.
-func ParseTraversalAbs(src []byte, filename string, start hcl.Pos) (hcl.Traversal, hcl.Diagnostics) {
- tokens, diags := LexExpression(src, filename, start)
- peeker := newPeeker(tokens, false)
- parser := &parser{peeker: peeker}
-
- // Bare traverals are always parsed in "ignore newlines" mode, as if
- // they were wrapped in parentheses.
- parser.PushIncludeNewlines(false)
-
- expr, parseDiags := parser.ParseTraversalAbs()
- diags = append(diags, parseDiags...)
-
- parser.PopIncludeNewlines()
-
- // Panic if the parser uses incorrect stack discipline with the peeker's
- // newlines stack, since otherwise it will produce confusing downstream
- // errors.
- peeker.AssertEmptyIncludeNewlinesStack()
-
- return expr, diags
-}
-
-// LexConfig performs lexical analysis on the given buffer, treating it as a
-// whole HCL config file, and returns the resulting tokens.
-//
-// Only minimal validation is done during lexical analysis, so the returned
-// diagnostics may include errors about lexical issues such as bad character
-// encodings or unrecognized characters, but full parsing is required to
-// detect _all_ syntax errors.
-func LexConfig(src []byte, filename string, start hcl.Pos) (Tokens, hcl.Diagnostics) {
- tokens := scanTokens(src, filename, start, scanNormal)
- diags := checkInvalidTokens(tokens)
- return tokens, diags
-}
-
-// LexExpression performs lexical analysis on the given buffer, treating it as
-// a standalone HCL expression, and returns the resulting tokens.
-//
-// Only minimal validation is done during lexical analysis, so the returned
-// diagnostics may include errors about lexical issues such as bad character
-// encodings or unrecognized characters, but full parsing is required to
-// detect _all_ syntax errors.
-func LexExpression(src []byte, filename string, start hcl.Pos) (Tokens, hcl.Diagnostics) {
- // This is actually just the same thing as LexConfig, since configs
- // and expressions lex in the same way.
- tokens := scanTokens(src, filename, start, scanNormal)
- diags := checkInvalidTokens(tokens)
- return tokens, diags
-}
-
-// LexTemplate performs lexical analysis on the given buffer, treating it as a
-// standalone HCL template, and returns the resulting tokens.
-//
-// Only minimal validation is done during lexical analysis, so the returned
-// diagnostics may include errors about lexical issues such as bad character
-// encodings or unrecognized characters, but full parsing is required to
-// detect _all_ syntax errors.
-func LexTemplate(src []byte, filename string, start hcl.Pos) (Tokens, hcl.Diagnostics) {
- tokens := scanTokens(src, filename, start, scanTemplate)
- diags := checkInvalidTokens(tokens)
- return tokens, diags
-}
-
-// ValidIdentifier tests if the given string could be a valid identifier in
-// a native syntax expression.
-//
-// This is useful when accepting names from the user that will be used as
-// variable or attribute names in the scope, to ensure that any name chosen
-// will be traversable using the variable or attribute traversal syntax.
-func ValidIdentifier(s string) bool {
- // This is a kinda-expensive way to do something pretty simple, but it
- // is easiest to do with our existing scanner-related infrastructure here
- // and nobody should be validating identifiers in a tight loop.
- tokens := scanTokens([]byte(s), "", hcl.Pos{}, scanIdentOnly)
- return len(tokens) == 2 && tokens[0].Type == TokenIdent && tokens[1].Type == TokenEOF
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_string_lit.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_string_lit.go
deleted file mode 100644
index 2895ade75..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_string_lit.go
+++ /dev/null
@@ -1,301 +0,0 @@
-//line scan_string_lit.rl:1
-
-package hclsyntax
-
-// This file is generated from scan_string_lit.rl. DO NOT EDIT.
-
-//line scan_string_lit.go:9
-var _hclstrtok_actions []byte = []byte{
- 0, 1, 0, 1, 1, 2, 1, 0,
-}
-
-var _hclstrtok_key_offsets []byte = []byte{
- 0, 0, 2, 4, 6, 10, 14, 18,
- 22, 27, 31, 36, 41, 46, 51, 57,
- 62, 74, 85, 96, 107, 118, 129, 140,
- 151,
-}
-
-var _hclstrtok_trans_keys []byte = []byte{
- 128, 191, 128, 191, 128, 191, 10, 13,
- 36, 37, 10, 13, 36, 37, 10, 13,
- 36, 37, 10, 13, 36, 37, 10, 13,
- 36, 37, 123, 10, 13, 36, 37, 10,
- 13, 36, 37, 92, 10, 13, 36, 37,
- 92, 10, 13, 36, 37, 92, 10, 13,
- 36, 37, 92, 10, 13, 36, 37, 92,
- 123, 10, 13, 36, 37, 92, 85, 117,
- 128, 191, 192, 223, 224, 239, 240, 247,
- 248, 255, 10, 13, 36, 37, 92, 48,
- 57, 65, 70, 97, 102, 10, 13, 36,
- 37, 92, 48, 57, 65, 70, 97, 102,
- 10, 13, 36, 37, 92, 48, 57, 65,
- 70, 97, 102, 10, 13, 36, 37, 92,
- 48, 57, 65, 70, 97, 102, 10, 13,
- 36, 37, 92, 48, 57, 65, 70, 97,
- 102, 10, 13, 36, 37, 92, 48, 57,
- 65, 70, 97, 102, 10, 13, 36, 37,
- 92, 48, 57, 65, 70, 97, 102, 10,
- 13, 36, 37, 92, 48, 57, 65, 70,
- 97, 102,
-}
-
-var _hclstrtok_single_lengths []byte = []byte{
- 0, 0, 0, 0, 4, 4, 4, 4,
- 5, 4, 5, 5, 5, 5, 6, 5,
- 2, 5, 5, 5, 5, 5, 5, 5,
- 5,
-}
-
-var _hclstrtok_range_lengths []byte = []byte{
- 0, 1, 1, 1, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 5, 3, 3, 3, 3, 3, 3, 3,
- 3,
-}
-
-var _hclstrtok_index_offsets []byte = []byte{
- 0, 0, 2, 4, 6, 11, 16, 21,
- 26, 32, 37, 43, 49, 55, 61, 68,
- 74, 82, 91, 100, 109, 118, 127, 136,
- 145,
-}
-
-var _hclstrtok_indicies []byte = []byte{
- 0, 1, 2, 1, 3, 1, 5, 6,
- 7, 8, 4, 10, 11, 12, 13, 9,
- 14, 11, 12, 13, 9, 10, 11, 15,
- 13, 9, 10, 11, 12, 13, 14, 9,
- 10, 11, 12, 15, 9, 17, 18, 19,
- 20, 21, 16, 23, 24, 25, 26, 27,
- 22, 0, 24, 25, 26, 27, 22, 23,
- 24, 28, 26, 27, 22, 23, 24, 25,
- 26, 27, 0, 22, 23, 24, 25, 28,
- 27, 22, 29, 30, 22, 2, 3, 31,
- 22, 0, 23, 24, 25, 26, 27, 32,
- 32, 32, 22, 23, 24, 25, 26, 27,
- 33, 33, 33, 22, 23, 24, 25, 26,
- 27, 34, 34, 34, 22, 23, 24, 25,
- 26, 27, 30, 30, 30, 22, 23, 24,
- 25, 26, 27, 35, 35, 35, 22, 23,
- 24, 25, 26, 27, 36, 36, 36, 22,
- 23, 24, 25, 26, 27, 37, 37, 37,
- 22, 23, 24, 25, 26, 27, 0, 0,
- 0, 22,
-}
-
-var _hclstrtok_trans_targs []byte = []byte{
- 11, 0, 1, 2, 4, 5, 6, 7,
- 9, 4, 5, 6, 7, 9, 5, 8,
- 10, 11, 12, 13, 15, 16, 10, 11,
- 12, 13, 15, 16, 14, 17, 21, 3,
- 18, 19, 20, 22, 23, 24,
-}
-
-var _hclstrtok_trans_actions []byte = []byte{
- 0, 0, 0, 0, 0, 1, 1, 1,
- 1, 3, 5, 5, 5, 5, 0, 0,
- 0, 1, 1, 1, 1, 1, 3, 5,
- 5, 5, 5, 5, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0,
-}
-
-var _hclstrtok_eof_actions []byte = []byte{
- 0, 0, 0, 0, 0, 3, 3, 3,
- 3, 3, 0, 3, 3, 3, 3, 3,
- 3, 3, 3, 3, 3, 3, 3, 3,
- 3,
-}
-
-const hclstrtok_start int = 4
-const hclstrtok_first_final int = 4
-const hclstrtok_error int = 0
-
-const hclstrtok_en_quoted int = 10
-const hclstrtok_en_unquoted int = 4
-
-//line scan_string_lit.rl:10
-
-func scanStringLit(data []byte, quoted bool) [][]byte {
- var ret [][]byte
-
-//line scan_string_lit.rl:61
-
- // Ragel state
- p := 0 // "Pointer" into data
- pe := len(data) // End-of-data "pointer"
- ts := 0
- te := 0
- eof := pe
-
- var cs int // current state
- switch {
- case quoted:
- cs = hclstrtok_en_quoted
- default:
- cs = hclstrtok_en_unquoted
- }
-
- // Make Go compiler happy
- _ = ts
- _ = eof
-
- /*token := func () {
- ret = append(ret, data[ts:te])
- }*/
-
-//line scan_string_lit.go:154
- {
- }
-
-//line scan_string_lit.go:158
- {
- var _klen int
- var _trans int
- var _acts int
- var _nacts uint
- var _keys int
- if p == pe {
- goto _test_eof
- }
- if cs == 0 {
- goto _out
- }
- _resume:
- _keys = int(_hclstrtok_key_offsets[cs])
- _trans = int(_hclstrtok_index_offsets[cs])
-
- _klen = int(_hclstrtok_single_lengths[cs])
- if _klen > 0 {
- _lower := int(_keys)
- var _mid int
- _upper := int(_keys + _klen - 1)
- for {
- if _upper < _lower {
- break
- }
-
- _mid = _lower + ((_upper - _lower) >> 1)
- switch {
- case data[p] < _hclstrtok_trans_keys[_mid]:
- _upper = _mid - 1
- case data[p] > _hclstrtok_trans_keys[_mid]:
- _lower = _mid + 1
- default:
- _trans += int(_mid - int(_keys))
- goto _match
- }
- }
- _keys += _klen
- _trans += _klen
- }
-
- _klen = int(_hclstrtok_range_lengths[cs])
- if _klen > 0 {
- _lower := int(_keys)
- var _mid int
- _upper := int(_keys + (_klen << 1) - 2)
- for {
- if _upper < _lower {
- break
- }
-
- _mid = _lower + (((_upper - _lower) >> 1) & ^1)
- switch {
- case data[p] < _hclstrtok_trans_keys[_mid]:
- _upper = _mid - 2
- case data[p] > _hclstrtok_trans_keys[_mid+1]:
- _lower = _mid + 2
- default:
- _trans += int((_mid - int(_keys)) >> 1)
- goto _match
- }
- }
- _trans += _klen
- }
-
- _match:
- _trans = int(_hclstrtok_indicies[_trans])
- cs = int(_hclstrtok_trans_targs[_trans])
-
- if _hclstrtok_trans_actions[_trans] == 0 {
- goto _again
- }
-
- _acts = int(_hclstrtok_trans_actions[_trans])
- _nacts = uint(_hclstrtok_actions[_acts])
- _acts++
- for ; _nacts > 0; _nacts-- {
- _acts++
- switch _hclstrtok_actions[_acts-1] {
- case 0:
-//line scan_string_lit.rl:40
-
- // If te is behind p then we've skipped over some literal
- // characters which we must now return.
- if te < p {
- ret = append(ret, data[te:p])
- }
- ts = p
-
- case 1:
-//line scan_string_lit.rl:48
-
- te = p
- ret = append(ret, data[ts:te])
-
-//line scan_string_lit.go:253
- }
- }
-
- _again:
- if cs == 0 {
- goto _out
- }
- p++
- if p != pe {
- goto _resume
- }
- _test_eof:
- {
- }
- if p == eof {
- __acts := _hclstrtok_eof_actions[cs]
- __nacts := uint(_hclstrtok_actions[__acts])
- __acts++
- for ; __nacts > 0; __nacts-- {
- __acts++
- switch _hclstrtok_actions[__acts-1] {
- case 1:
-//line scan_string_lit.rl:48
-
- te = p
- ret = append(ret, data[ts:te])
-
-//line scan_string_lit.go:278
- }
- }
- }
-
- _out:
- {
- }
- }
-
-//line scan_string_lit.rl:89
-
- if te < p {
- // Collect any leftover literal characters at the end of the input
- ret = append(ret, data[te:p])
- }
-
- // If we fall out here without being in a final state then we've
- // encountered something that the scanner can't match, which should
- // be impossible (the scanner matches all bytes _somehow_) but we'll
- // tolerate it and let the caller deal with it.
- if cs < hclstrtok_first_final {
- ret = append(ret, data[p:len(data)])
- }
-
- return ret
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_string_lit.rl b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_string_lit.rl
deleted file mode 100644
index f8ac11751..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_string_lit.rl
+++ /dev/null
@@ -1,105 +0,0 @@
-
-package hclsyntax
-
-// This file is generated from scan_string_lit.rl. DO NOT EDIT.
-%%{
- # (except you are actually in scan_string_lit.rl here, so edit away!)
-
- machine hclstrtok;
- write data;
-}%%
-
-func scanStringLit(data []byte, quoted bool) [][]byte {
- var ret [][]byte
-
- %%{
- include UnicodeDerived "unicode_derived.rl";
-
- UTF8Cont = 0x80 .. 0xBF;
- AnyUTF8 = (
- 0x00..0x7F |
- 0xC0..0xDF . UTF8Cont |
- 0xE0..0xEF . UTF8Cont . UTF8Cont |
- 0xF0..0xF7 . UTF8Cont . UTF8Cont . UTF8Cont
- );
- BadUTF8 = any - AnyUTF8;
-
- Hex = ('0'..'9' | 'a'..'f' | 'A'..'F');
-
- # Our goal with this patterns is to capture user intent as best as
- # possible, even if the input is invalid. The caller will then verify
- # whether each token is valid and generate suitable error messages
- # if not.
- UnicodeEscapeShort = "\\u" . Hex{0,4};
- UnicodeEscapeLong = "\\U" . Hex{0,8};
- UnicodeEscape = (UnicodeEscapeShort | UnicodeEscapeLong);
- SimpleEscape = "\\" . (AnyUTF8 - ('U'|'u'))?;
- TemplateEscape = ("$" . ("$" . ("{"?))?) | ("%" . ("%" . ("{"?))?);
- Newline = ("\r\n" | "\r" | "\n");
-
- action Begin {
- // If te is behind p then we've skipped over some literal
- // characters which we must now return.
- if te < p {
- ret = append(ret, data[te:p])
- }
- ts = p;
- }
- action End {
- te = p;
- ret = append(ret, data[ts:te]);
- }
-
- QuotedToken = (UnicodeEscape | SimpleEscape | TemplateEscape | Newline) >Begin %End;
- UnquotedToken = (TemplateEscape | Newline) >Begin %End;
- QuotedLiteral = (any - ("\\" | "$" | "%" | "\r" | "\n"));
- UnquotedLiteral = (any - ("$" | "%" | "\r" | "\n"));
-
- quoted := (QuotedToken | QuotedLiteral)**;
- unquoted := (UnquotedToken | UnquotedLiteral)**;
-
- }%%
-
- // Ragel state
- p := 0 // "Pointer" into data
- pe := len(data) // End-of-data "pointer"
- ts := 0
- te := 0
- eof := pe
-
- var cs int // current state
- switch {
- case quoted:
- cs = hclstrtok_en_quoted
- default:
- cs = hclstrtok_en_unquoted
- }
-
- // Make Go compiler happy
- _ = ts
- _ = eof
-
- /*token := func () {
- ret = append(ret, data[ts:te])
- }*/
-
- %%{
- write init nocs;
- write exec;
- }%%
-
- if te < p {
- // Collect any leftover literal characters at the end of the input
- ret = append(ret, data[te:p])
- }
-
- // If we fall out here without being in a final state then we've
- // encountered something that the scanner can't match, which should
- // be impossible (the scanner matches all bytes _somehow_) but we'll
- // tolerate it and let the caller deal with it.
- if cs < hclstrtok_first_final {
- ret = append(ret, data[p:len(data)])
- }
-
- return ret
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_tokens.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_tokens.go
deleted file mode 100644
index 581e35e00..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_tokens.go
+++ /dev/null
@@ -1,5265 +0,0 @@
-//line scan_tokens.rl:1
-
-package hclsyntax
-
-import (
- "bytes"
-
- "github.com/hashicorp/hcl2/hcl"
-)
-
-// This file is generated from scan_tokens.rl. DO NOT EDIT.
-
-//line scan_tokens.go:15
-var _hcltok_actions []byte = []byte{
- 0, 1, 0, 1, 1, 1, 3, 1, 4,
- 1, 7, 1, 8, 1, 9, 1, 10,
- 1, 11, 1, 12, 1, 13, 1, 14,
- 1, 15, 1, 16, 1, 17, 1, 18,
- 1, 19, 1, 20, 1, 23, 1, 24,
- 1, 25, 1, 26, 1, 27, 1, 28,
- 1, 29, 1, 30, 1, 31, 1, 32,
- 1, 35, 1, 36, 1, 37, 1, 38,
- 1, 39, 1, 40, 1, 41, 1, 42,
- 1, 43, 1, 44, 1, 47, 1, 48,
- 1, 49, 1, 50, 1, 51, 1, 52,
- 1, 53, 1, 56, 1, 57, 1, 58,
- 1, 59, 1, 60, 1, 61, 1, 62,
- 1, 63, 1, 64, 1, 65, 1, 66,
- 1, 67, 1, 68, 1, 69, 1, 70,
- 1, 71, 1, 72, 1, 73, 1, 74,
- 1, 75, 1, 76, 1, 77, 1, 78,
- 1, 79, 1, 80, 1, 81, 1, 82,
- 1, 83, 1, 84, 1, 85, 2, 0,
- 14, 2, 0, 25, 2, 0, 29, 2,
- 0, 37, 2, 0, 41, 2, 1, 2,
- 2, 4, 5, 2, 4, 6, 2, 4,
- 21, 2, 4, 22, 2, 4, 33, 2,
- 4, 34, 2, 4, 45, 2, 4, 46,
- 2, 4, 54, 2, 4, 55,
-}
-
-var _hcltok_key_offsets []int16 = []int16{
- 0, 0, 1, 2, 4, 9, 13, 15,
- 57, 98, 144, 145, 149, 155, 155, 157,
- 159, 168, 174, 181, 182, 185, 186, 190,
- 195, 204, 208, 212, 220, 222, 224, 226,
- 229, 261, 263, 265, 269, 273, 276, 287,
- 300, 319, 332, 348, 360, 376, 391, 412,
- 422, 434, 445, 459, 474, 484, 496, 505,
- 517, 519, 523, 544, 553, 563, 569, 575,
- 576, 625, 627, 631, 633, 639, 646, 654,
- 661, 664, 670, 674, 678, 680, 684, 688,
- 692, 698, 706, 714, 720, 722, 726, 728,
- 734, 738, 742, 746, 750, 755, 762, 768,
- 770, 772, 776, 778, 784, 788, 792, 802,
- 807, 821, 836, 838, 846, 848, 853, 867,
- 872, 874, 878, 879, 883, 889, 895, 905,
- 915, 926, 934, 937, 940, 944, 948, 950,
- 953, 953, 956, 958, 988, 990, 992, 996,
- 1001, 1005, 1010, 1012, 1014, 1016, 1025, 1029,
- 1033, 1039, 1041, 1049, 1057, 1069, 1072, 1078,
- 1082, 1084, 1088, 1108, 1110, 1112, 1123, 1129,
- 1131, 1133, 1135, 1139, 1145, 1151, 1153, 1158,
- 1162, 1164, 1172, 1190, 1230, 1240, 1244, 1246,
- 1248, 1249, 1253, 1257, 1261, 1265, 1269, 1274,
- 1278, 1282, 1286, 1288, 1290, 1294, 1304, 1308,
- 1310, 1314, 1318, 1322, 1335, 1337, 1339, 1343,
- 1345, 1349, 1351, 1353, 1383, 1387, 1391, 1395,
- 1398, 1405, 1410, 1421, 1425, 1441, 1455, 1459,
- 1464, 1468, 1472, 1478, 1480, 1486, 1488, 1492,
- 1494, 1500, 1505, 1510, 1520, 1522, 1524, 1528,
- 1532, 1534, 1547, 1549, 1553, 1557, 1565, 1567,
- 1571, 1573, 1574, 1577, 1582, 1584, 1586, 1590,
- 1592, 1596, 1602, 1622, 1628, 1634, 1636, 1637,
- 1647, 1648, 1656, 1663, 1665, 1668, 1670, 1672,
- 1674, 1679, 1683, 1687, 1692, 1702, 1712, 1716,
- 1720, 1734, 1760, 1770, 1772, 1774, 1777, 1779,
- 1782, 1784, 1788, 1790, 1791, 1795, 1797, 1800,
- 1807, 1815, 1817, 1819, 1823, 1825, 1831, 1842,
- 1845, 1847, 1851, 1856, 1886, 1891, 1893, 1896,
- 1901, 1915, 1922, 1936, 1941, 1954, 1958, 1971,
- 1976, 1994, 1995, 2004, 2008, 2020, 2025, 2032,
- 2039, 2046, 2048, 2052, 2074, 2079, 2080, 2084,
- 2086, 2136, 2139, 2150, 2154, 2156, 2162, 2168,
- 2170, 2175, 2177, 2181, 2183, 2184, 2186, 2188,
- 2194, 2196, 2198, 2202, 2208, 2221, 2223, 2229,
- 2233, 2241, 2252, 2260, 2263, 2293, 2299, 2302,
- 2307, 2309, 2313, 2317, 2321, 2323, 2330, 2332,
- 2341, 2348, 2356, 2358, 2378, 2390, 2394, 2396,
- 2414, 2453, 2455, 2459, 2461, 2468, 2472, 2500,
- 2502, 2504, 2506, 2508, 2511, 2513, 2517, 2521,
- 2523, 2526, 2528, 2530, 2533, 2535, 2537, 2538,
- 2540, 2542, 2546, 2550, 2553, 2566, 2568, 2574,
- 2578, 2580, 2584, 2588, 2602, 2605, 2614, 2616,
- 2620, 2626, 2626, 2628, 2630, 2639, 2645, 2652,
- 2653, 2656, 2657, 2661, 2666, 2675, 2679, 2683,
- 2691, 2693, 2695, 2697, 2700, 2732, 2734, 2736,
- 2740, 2744, 2747, 2758, 2771, 2790, 2803, 2819,
- 2831, 2847, 2862, 2883, 2893, 2905, 2916, 2930,
- 2945, 2955, 2967, 2976, 2988, 2990, 2994, 3015,
- 3024, 3034, 3040, 3046, 3047, 3096, 3098, 3102,
- 3104, 3110, 3117, 3125, 3132, 3135, 3141, 3145,
- 3149, 3151, 3155, 3159, 3163, 3169, 3177, 3185,
- 3191, 3193, 3197, 3199, 3205, 3209, 3213, 3217,
- 3221, 3226, 3233, 3239, 3241, 3243, 3247, 3249,
- 3255, 3259, 3263, 3273, 3278, 3292, 3307, 3309,
- 3317, 3319, 3324, 3338, 3343, 3345, 3349, 3350,
- 3354, 3360, 3366, 3376, 3386, 3397, 3405, 3408,
- 3411, 3415, 3419, 3421, 3424, 3424, 3427, 3429,
- 3459, 3461, 3463, 3467, 3472, 3476, 3481, 3483,
- 3485, 3487, 3496, 3500, 3504, 3510, 3512, 3520,
- 3528, 3540, 3543, 3549, 3553, 3555, 3559, 3579,
- 3581, 3583, 3594, 3600, 3602, 3604, 3606, 3610,
- 3616, 3622, 3624, 3629, 3633, 3635, 3643, 3661,
- 3701, 3711, 3715, 3717, 3719, 3720, 3724, 3728,
- 3732, 3736, 3740, 3745, 3749, 3753, 3757, 3759,
- 3761, 3765, 3775, 3779, 3781, 3785, 3789, 3793,
- 3806, 3808, 3810, 3814, 3816, 3820, 3822, 3824,
- 3854, 3858, 3862, 3866, 3869, 3876, 3881, 3892,
- 3896, 3912, 3926, 3930, 3935, 3939, 3943, 3949,
- 3951, 3957, 3959, 3963, 3965, 3971, 3976, 3981,
- 3991, 3993, 3995, 3999, 4003, 4005, 4018, 4020,
- 4024, 4028, 4036, 4038, 4042, 4044, 4045, 4048,
- 4053, 4055, 4057, 4061, 4063, 4067, 4073, 4093,
- 4099, 4105, 4107, 4108, 4118, 4119, 4127, 4134,
- 4136, 4139, 4141, 4143, 4145, 4150, 4154, 4158,
- 4163, 4173, 4183, 4187, 4191, 4205, 4231, 4241,
- 4243, 4245, 4248, 4250, 4253, 4255, 4259, 4261,
- 4262, 4266, 4268, 4270, 4277, 4281, 4288, 4295,
- 4304, 4320, 4332, 4350, 4361, 4373, 4381, 4399,
- 4407, 4437, 4440, 4450, 4460, 4472, 4483, 4492,
- 4505, 4517, 4521, 4527, 4554, 4563, 4566, 4571,
- 4577, 4582, 4603, 4607, 4613, 4613, 4620, 4629,
- 4637, 4640, 4644, 4650, 4656, 4659, 4663, 4670,
- 4676, 4685, 4694, 4698, 4702, 4706, 4710, 4717,
- 4721, 4725, 4735, 4741, 4745, 4751, 4755, 4758,
- 4764, 4770, 4782, 4786, 4790, 4800, 4804, 4815,
- 4817, 4819, 4823, 4835, 4840, 4864, 4868, 4874,
- 4896, 4905, 4909, 4912, 4913, 4921, 4929, 4935,
- 4945, 4952, 4970, 4973, 4976, 4984, 4990, 4994,
- 4998, 5002, 5008, 5016, 5021, 5027, 5031, 5039,
- 5046, 5050, 5057, 5063, 5071, 5079, 5085, 5091,
- 5102, 5106, 5118, 5127, 5144, 5161, 5164, 5168,
- 5170, 5176, 5178, 5182, 5197, 5201, 5205, 5209,
- 5213, 5217, 5219, 5225, 5230, 5234, 5240, 5247,
- 5250, 5268, 5270, 5315, 5321, 5327, 5331, 5335,
- 5341, 5345, 5351, 5357, 5364, 5366, 5372, 5378,
- 5382, 5386, 5394, 5407, 5413, 5420, 5428, 5434,
- 5443, 5449, 5453, 5458, 5462, 5470, 5474, 5478,
- 5508, 5514, 5520, 5526, 5532, 5539, 5545, 5552,
- 5557, 5567, 5571, 5578, 5584, 5588, 5595, 5599,
- 5605, 5608, 5612, 5616, 5620, 5624, 5629, 5634,
- 5638, 5649, 5653, 5657, 5663, 5671, 5675, 5692,
- 5696, 5702, 5712, 5718, 5724, 5727, 5732, 5741,
- 5745, 5749, 5755, 5759, 5765, 5773, 5791, 5792,
- 5802, 5803, 5812, 5820, 5822, 5825, 5827, 5829,
- 5831, 5836, 5849, 5853, 5868, 5897, 5908, 5910,
- 5914, 5918, 5923, 5927, 5929, 5936, 5940, 5948,
- 5952, 5964, 5966, 5968, 5970, 5972, 5974, 5975,
- 5977, 5979, 5981, 5983, 5985, 5986, 5988, 5990,
- 5992, 5994, 5996, 6000, 6006, 6006, 6008, 6010,
- 6019, 6025, 6032, 6033, 6036, 6037, 6041, 6046,
- 6055, 6059, 6063, 6071, 6073, 6075, 6077, 6080,
- 6112, 6114, 6116, 6120, 6124, 6127, 6138, 6151,
- 6170, 6183, 6199, 6211, 6227, 6242, 6263, 6273,
- 6285, 6296, 6310, 6325, 6335, 6347, 6356, 6368,
- 6370, 6374, 6395, 6404, 6414, 6420, 6426, 6427,
- 6476, 6478, 6482, 6484, 6490, 6497, 6505, 6512,
- 6515, 6521, 6525, 6529, 6531, 6535, 6539, 6543,
- 6549, 6557, 6565, 6571, 6573, 6577, 6579, 6585,
- 6589, 6593, 6597, 6601, 6606, 6613, 6619, 6621,
- 6623, 6627, 6629, 6635, 6639, 6643, 6653, 6658,
- 6672, 6687, 6689, 6697, 6699, 6704, 6718, 6723,
- 6725, 6729, 6730, 6734, 6740, 6746, 6756, 6766,
- 6777, 6785, 6788, 6791, 6795, 6799, 6801, 6804,
- 6804, 6807, 6809, 6839, 6841, 6843, 6847, 6852,
- 6856, 6861, 6863, 6865, 6867, 6876, 6880, 6884,
- 6890, 6892, 6900, 6908, 6920, 6923, 6929, 6933,
- 6935, 6939, 6959, 6961, 6963, 6974, 6980, 6982,
- 6984, 6986, 6990, 6996, 7002, 7004, 7009, 7013,
- 7015, 7023, 7041, 7081, 7091, 7095, 7097, 7099,
- 7100, 7104, 7108, 7112, 7116, 7120, 7125, 7129,
- 7133, 7137, 7139, 7141, 7145, 7155, 7159, 7161,
- 7165, 7169, 7173, 7186, 7188, 7190, 7194, 7196,
- 7200, 7202, 7204, 7234, 7238, 7242, 7246, 7249,
- 7256, 7261, 7272, 7276, 7292, 7306, 7310, 7315,
- 7319, 7323, 7329, 7331, 7337, 7339, 7343, 7345,
- 7351, 7356, 7361, 7371, 7373, 7375, 7379, 7383,
- 7385, 7398, 7400, 7404, 7408, 7416, 7418, 7422,
- 7424, 7425, 7428, 7433, 7435, 7437, 7441, 7443,
- 7447, 7453, 7473, 7479, 7485, 7487, 7488, 7498,
- 7499, 7507, 7514, 7516, 7519, 7521, 7523, 7525,
- 7530, 7534, 7538, 7543, 7553, 7563, 7567, 7571,
- 7585, 7611, 7621, 7623, 7625, 7628, 7630, 7633,
- 7635, 7639, 7641, 7642, 7646, 7648, 7650, 7657,
- 7661, 7668, 7675, 7684, 7700, 7712, 7730, 7741,
- 7753, 7761, 7779, 7787, 7817, 7820, 7830, 7840,
- 7852, 7863, 7872, 7885, 7897, 7901, 7907, 7934,
- 7943, 7946, 7951, 7957, 7962, 7983, 7987, 7993,
- 7993, 8000, 8009, 8017, 8020, 8024, 8030, 8036,
- 8039, 8043, 8050, 8056, 8065, 8074, 8078, 8082,
- 8086, 8090, 8097, 8101, 8105, 8115, 8121, 8125,
- 8131, 8135, 8138, 8144, 8150, 8162, 8166, 8170,
- 8180, 8184, 8195, 8197, 8199, 8203, 8215, 8220,
- 8244, 8248, 8254, 8276, 8285, 8289, 8292, 8293,
- 8301, 8309, 8315, 8325, 8332, 8350, 8353, 8356,
- 8364, 8370, 8374, 8378, 8382, 8388, 8396, 8401,
- 8407, 8411, 8419, 8426, 8430, 8437, 8443, 8451,
- 8459, 8465, 8471, 8482, 8486, 8498, 8507, 8524,
- 8541, 8544, 8548, 8550, 8556, 8558, 8562, 8577,
- 8581, 8585, 8589, 8593, 8597, 8599, 8605, 8610,
- 8614, 8620, 8627, 8630, 8648, 8650, 8695, 8701,
- 8707, 8711, 8715, 8721, 8725, 8731, 8737, 8744,
- 8746, 8752, 8758, 8762, 8766, 8774, 8787, 8793,
- 8800, 8808, 8814, 8823, 8829, 8833, 8838, 8842,
- 8850, 8854, 8858, 8888, 8894, 8900, 8906, 8912,
- 8919, 8925, 8932, 8937, 8947, 8951, 8958, 8964,
- 8968, 8975, 8979, 8985, 8988, 8992, 8996, 9000,
- 9004, 9009, 9014, 9018, 9029, 9033, 9037, 9043,
- 9051, 9055, 9072, 9076, 9082, 9092, 9098, 9104,
- 9107, 9112, 9121, 9125, 9129, 9135, 9139, 9145,
- 9153, 9171, 9172, 9182, 9183, 9192, 9200, 9202,
- 9205, 9207, 9209, 9211, 9216, 9229, 9233, 9248,
- 9277, 9288, 9290, 9294, 9298, 9303, 9307, 9309,
- 9316, 9320, 9328, 9332, 9407, 9409, 9410, 9411,
- 9412, 9413, 9414, 9416, 9421, 9423, 9425, 9426,
- 9470, 9471, 9472, 9474, 9479, 9483, 9483, 9485,
- 9487, 9498, 9508, 9516, 9517, 9519, 9520, 9524,
- 9528, 9538, 9542, 9549, 9560, 9567, 9571, 9577,
- 9588, 9620, 9669, 9684, 9699, 9704, 9706, 9711,
- 9743, 9751, 9753, 9775, 9797, 9799, 9815, 9831,
- 9833, 9835, 9835, 9836, 9837, 9838, 9840, 9841,
- 9853, 9855, 9857, 9859, 9873, 9887, 9889, 9892,
- 9895, 9897, 9898, 9899, 9901, 9903, 9905, 9919,
- 9933, 9935, 9938, 9941, 9943, 9944, 9945, 9947,
- 9949, 9951, 10000, 10044, 10046, 10051, 10055, 10055,
- 10057, 10059, 10070, 10080, 10088, 10089, 10091, 10092,
- 10096, 10100, 10110, 10114, 10121, 10132, 10139, 10143,
- 10149, 10160, 10192, 10241, 10256, 10271, 10276, 10278,
- 10283, 10315, 10323, 10325, 10347, 10369,
-}
-
-var _hcltok_trans_keys []byte = []byte{
- 46, 42, 42, 47, 46, 69, 101, 48,
- 57, 43, 45, 48, 57, 48, 57, 45,
- 95, 194, 195, 198, 199, 203, 205, 206,
- 207, 210, 212, 213, 214, 215, 216, 217,
- 219, 220, 221, 222, 223, 224, 225, 226,
- 227, 228, 233, 234, 237, 239, 240, 65,
- 90, 97, 122, 196, 202, 208, 218, 229,
- 236, 95, 194, 195, 198, 199, 203, 205,
- 206, 207, 210, 212, 213, 214, 215, 216,
- 217, 219, 220, 221, 222, 223, 224, 225,
- 226, 227, 228, 233, 234, 237, 239, 240,
- 65, 90, 97, 122, 196, 202, 208, 218,
- 229, 236, 10, 13, 45, 95, 194, 195,
- 198, 199, 203, 204, 205, 206, 207, 210,
- 212, 213, 214, 215, 216, 217, 219, 220,
- 221, 222, 223, 224, 225, 226, 227, 228,
- 233, 234, 237, 239, 240, 243, 48, 57,
- 65, 90, 97, 122, 196, 218, 229, 236,
- 10, 170, 181, 183, 186, 128, 150, 152,
- 182, 184, 255, 192, 255, 128, 255, 173,
- 130, 133, 146, 159, 165, 171, 175, 255,
- 181, 190, 184, 185, 192, 255, 140, 134,
- 138, 142, 161, 163, 255, 182, 130, 136,
- 137, 176, 151, 152, 154, 160, 190, 136,
- 144, 192, 255, 135, 129, 130, 132, 133,
- 144, 170, 176, 178, 144, 154, 160, 191,
- 128, 169, 174, 255, 148, 169, 157, 158,
- 189, 190, 192, 255, 144, 255, 139, 140,
- 178, 255, 186, 128, 181, 160, 161, 162,
- 163, 164, 165, 166, 167, 168, 169, 170,
- 171, 172, 173, 174, 175, 176, 177, 178,
- 179, 180, 181, 182, 183, 184, 185, 186,
- 187, 188, 189, 190, 191, 128, 173, 128,
- 155, 160, 180, 182, 189, 148, 161, 163,
- 255, 176, 164, 165, 132, 169, 177, 141,
- 142, 145, 146, 179, 181, 186, 187, 158,
- 133, 134, 137, 138, 143, 150, 152, 155,
- 164, 165, 178, 255, 188, 129, 131, 133,
- 138, 143, 144, 147, 168, 170, 176, 178,
- 179, 181, 182, 184, 185, 190, 255, 157,
- 131, 134, 137, 138, 142, 144, 146, 152,
- 159, 165, 182, 255, 129, 131, 133, 141,
- 143, 145, 147, 168, 170, 176, 178, 179,
- 181, 185, 188, 255, 134, 138, 142, 143,
- 145, 159, 164, 165, 176, 184, 186, 255,
- 129, 131, 133, 140, 143, 144, 147, 168,
- 170, 176, 178, 179, 181, 185, 188, 191,
- 177, 128, 132, 135, 136, 139, 141, 150,
- 151, 156, 157, 159, 163, 166, 175, 156,
- 130, 131, 133, 138, 142, 144, 146, 149,
- 153, 154, 158, 159, 163, 164, 168, 170,
- 174, 185, 190, 191, 144, 151, 128, 130,
- 134, 136, 138, 141, 166, 175, 128, 131,
- 133, 140, 142, 144, 146, 168, 170, 185,
- 189, 255, 133, 137, 151, 142, 148, 155,
- 159, 164, 165, 176, 255, 128, 131, 133,
- 140, 142, 144, 146, 168, 170, 179, 181,
- 185, 188, 191, 158, 128, 132, 134, 136,
- 138, 141, 149, 150, 160, 163, 166, 175,
- 177, 178, 129, 131, 133, 140, 142, 144,
- 146, 186, 189, 255, 133, 137, 143, 147,
- 152, 158, 164, 165, 176, 185, 192, 255,
- 189, 130, 131, 133, 150, 154, 177, 179,
- 187, 138, 150, 128, 134, 143, 148, 152,
- 159, 166, 175, 178, 179, 129, 186, 128,
- 142, 144, 153, 132, 138, 141, 165, 167,
- 129, 130, 135, 136, 148, 151, 153, 159,
- 161, 163, 170, 171, 173, 185, 187, 189,
- 134, 128, 132, 136, 141, 144, 153, 156,
- 159, 128, 181, 183, 185, 152, 153, 160,
- 169, 190, 191, 128, 135, 137, 172, 177,
- 191, 128, 132, 134, 151, 153, 188, 134,
- 128, 129, 130, 131, 137, 138, 139, 140,
- 141, 142, 143, 144, 153, 154, 155, 156,
- 157, 158, 159, 160, 161, 162, 163, 164,
- 165, 166, 167, 168, 169, 170, 173, 175,
- 176, 177, 178, 179, 181, 182, 183, 188,
- 189, 190, 191, 132, 152, 172, 184, 185,
- 187, 128, 191, 128, 137, 144, 255, 158,
- 159, 134, 187, 136, 140, 142, 143, 137,
- 151, 153, 142, 143, 158, 159, 137, 177,
- 142, 143, 182, 183, 191, 255, 128, 130,
- 133, 136, 150, 152, 255, 145, 150, 151,
- 155, 156, 160, 168, 178, 255, 128, 143,
- 160, 255, 182, 183, 190, 255, 129, 255,
- 173, 174, 192, 255, 129, 154, 160, 255,
- 171, 173, 185, 255, 128, 140, 142, 148,
- 160, 180, 128, 147, 160, 172, 174, 176,
- 178, 179, 148, 150, 152, 155, 158, 159,
- 170, 255, 139, 141, 144, 153, 160, 255,
- 184, 255, 128, 170, 176, 255, 182, 255,
- 128, 158, 160, 171, 176, 187, 134, 173,
- 176, 180, 128, 171, 176, 255, 138, 143,
- 155, 255, 128, 155, 160, 255, 159, 189,
- 190, 192, 255, 167, 128, 137, 144, 153,
- 176, 189, 140, 143, 154, 170, 180, 255,
- 180, 255, 128, 183, 128, 137, 141, 189,
- 128, 136, 144, 146, 148, 182, 184, 185,
- 128, 181, 187, 191, 150, 151, 158, 159,
- 152, 154, 156, 158, 134, 135, 142, 143,
- 190, 255, 190, 128, 180, 182, 188, 130,
- 132, 134, 140, 144, 147, 150, 155, 160,
- 172, 178, 180, 182, 188, 128, 129, 130,
- 131, 132, 133, 134, 176, 177, 178, 179,
- 180, 181, 182, 183, 191, 255, 129, 147,
- 149, 176, 178, 190, 192, 255, 144, 156,
- 161, 144, 156, 165, 176, 130, 135, 149,
- 164, 166, 168, 138, 147, 152, 157, 170,
- 185, 188, 191, 142, 133, 137, 160, 255,
- 137, 255, 128, 174, 176, 255, 159, 165,
- 170, 180, 255, 167, 173, 128, 165, 176,
- 255, 168, 174, 176, 190, 192, 255, 128,
- 150, 160, 166, 168, 174, 176, 182, 184,
- 190, 128, 134, 136, 142, 144, 150, 152,
- 158, 160, 191, 128, 129, 130, 131, 132,
- 133, 134, 135, 144, 145, 255, 133, 135,
- 161, 175, 177, 181, 184, 188, 160, 151,
- 152, 187, 192, 255, 133, 173, 177, 255,
- 143, 159, 187, 255, 176, 191, 182, 183,
- 184, 191, 192, 255, 150, 255, 128, 146,
- 147, 148, 152, 153, 154, 155, 156, 158,
- 159, 160, 161, 162, 163, 164, 165, 166,
- 167, 168, 169, 170, 171, 172, 173, 174,
- 175, 176, 129, 255, 141, 255, 144, 189,
- 141, 143, 172, 255, 191, 128, 175, 180,
- 189, 151, 159, 162, 255, 175, 137, 138,
- 184, 255, 183, 255, 168, 255, 128, 179,
- 188, 134, 143, 154, 159, 184, 186, 190,
- 255, 128, 173, 176, 255, 148, 159, 189,
- 255, 129, 142, 154, 159, 191, 255, 128,
- 182, 128, 141, 144, 153, 160, 182, 186,
- 255, 128, 130, 155, 157, 160, 175, 178,
- 182, 129, 134, 137, 142, 145, 150, 160,
- 166, 168, 174, 176, 255, 155, 166, 175,
- 128, 170, 172, 173, 176, 185, 158, 159,
- 160, 255, 164, 175, 135, 138, 188, 255,
- 164, 169, 171, 172, 173, 174, 175, 180,
- 181, 182, 183, 184, 185, 187, 188, 189,
- 190, 191, 165, 186, 174, 175, 154, 255,
- 190, 128, 134, 147, 151, 157, 168, 170,
- 182, 184, 188, 128, 129, 131, 132, 134,
- 255, 147, 255, 190, 255, 144, 145, 136,
- 175, 188, 255, 128, 143, 160, 175, 179,
- 180, 141, 143, 176, 180, 182, 255, 189,
- 255, 191, 144, 153, 161, 186, 129, 154,
- 166, 255, 191, 255, 130, 135, 138, 143,
- 146, 151, 154, 156, 144, 145, 146, 147,
- 148, 150, 151, 152, 155, 157, 158, 160,
- 170, 171, 172, 175, 161, 169, 128, 129,
- 130, 131, 133, 135, 138, 139, 140, 141,
- 142, 143, 144, 145, 146, 147, 148, 149,
- 152, 156, 157, 160, 161, 162, 163, 164,
- 166, 168, 169, 170, 171, 172, 173, 174,
- 176, 177, 153, 155, 178, 179, 128, 139,
- 141, 166, 168, 186, 188, 189, 191, 255,
- 142, 143, 158, 255, 187, 255, 128, 180,
- 189, 128, 156, 160, 255, 145, 159, 161,
- 255, 128, 159, 176, 255, 139, 143, 187,
- 255, 128, 157, 160, 255, 144, 132, 135,
- 150, 255, 158, 159, 170, 175, 148, 151,
- 188, 255, 128, 167, 176, 255, 164, 255,
- 183, 255, 128, 149, 160, 167, 136, 188,
- 128, 133, 138, 181, 183, 184, 191, 255,
- 150, 159, 183, 255, 128, 158, 160, 178,
- 180, 181, 128, 149, 160, 185, 128, 183,
- 190, 191, 191, 128, 131, 133, 134, 140,
- 147, 149, 151, 153, 179, 184, 186, 160,
- 188, 128, 156, 128, 135, 137, 166, 128,
- 181, 128, 149, 160, 178, 128, 145, 128,
- 178, 129, 130, 131, 132, 133, 135, 136,
- 138, 139, 140, 141, 144, 145, 146, 147,
- 150, 151, 152, 153, 154, 155, 156, 162,
- 163, 171, 176, 177, 178, 128, 134, 135,
- 165, 176, 190, 144, 168, 176, 185, 128,
- 180, 182, 191, 182, 144, 179, 155, 133,
- 137, 141, 143, 157, 255, 190, 128, 145,
- 147, 183, 136, 128, 134, 138, 141, 143,
- 157, 159, 168, 176, 255, 171, 175, 186,
- 255, 128, 131, 133, 140, 143, 144, 147,
- 168, 170, 176, 178, 179, 181, 185, 188,
- 191, 144, 151, 128, 132, 135, 136, 139,
- 141, 157, 163, 166, 172, 176, 180, 128,
- 138, 144, 153, 134, 136, 143, 154, 255,
- 128, 181, 184, 255, 129, 151, 158, 255,
- 129, 131, 133, 143, 154, 255, 128, 137,
- 128, 153, 157, 171, 176, 185, 160, 255,
- 170, 190, 192, 255, 128, 184, 128, 136,
- 138, 182, 184, 191, 128, 144, 153, 178,
- 255, 168, 144, 145, 183, 255, 128, 142,
- 145, 149, 129, 141, 144, 146, 147, 148,
- 175, 255, 132, 255, 128, 144, 129, 143,
- 144, 153, 145, 152, 135, 255, 160, 168,
- 169, 171, 172, 173, 174, 188, 189, 190,
- 191, 161, 167, 185, 255, 128, 158, 160,
- 169, 144, 173, 176, 180, 128, 131, 144,
- 153, 163, 183, 189, 255, 144, 255, 133,
- 143, 191, 255, 143, 159, 160, 128, 129,
- 255, 159, 160, 171, 172, 255, 173, 255,
- 179, 255, 128, 176, 177, 178, 128, 129,
- 171, 175, 189, 255, 128, 136, 144, 153,
- 157, 158, 133, 134, 137, 144, 145, 146,
- 147, 148, 149, 154, 155, 156, 157, 158,
- 159, 168, 169, 170, 150, 153, 165, 169,
- 173, 178, 187, 255, 131, 132, 140, 169,
- 174, 255, 130, 132, 149, 157, 173, 186,
- 188, 160, 161, 163, 164, 167, 168, 132,
- 134, 149, 157, 186, 139, 140, 191, 255,
- 134, 128, 132, 138, 144, 146, 255, 166,
- 167, 129, 155, 187, 149, 181, 143, 175,
- 137, 169, 131, 140, 141, 192, 255, 128,
- 182, 187, 255, 173, 180, 182, 255, 132,
- 155, 159, 161, 175, 128, 160, 163, 164,
- 165, 184, 185, 186, 161, 162, 128, 134,
- 136, 152, 155, 161, 163, 164, 166, 170,
- 133, 143, 151, 255, 139, 143, 154, 255,
- 164, 167, 185, 187, 128, 131, 133, 159,
- 161, 162, 169, 178, 180, 183, 130, 135,
- 137, 139, 148, 151, 153, 155, 157, 159,
- 164, 190, 141, 143, 145, 146, 161, 162,
- 167, 170, 172, 178, 180, 183, 185, 188,
- 128, 137, 139, 155, 161, 163, 165, 169,
- 171, 187, 155, 156, 151, 255, 156, 157,
- 160, 181, 255, 186, 187, 255, 162, 255,
- 160, 168, 161, 167, 158, 255, 160, 132,
- 135, 133, 134, 176, 255, 170, 181, 186,
- 191, 176, 180, 182, 183, 186, 189, 134,
- 140, 136, 138, 142, 161, 163, 255, 130,
- 137, 136, 255, 144, 170, 176, 178, 160,
- 191, 128, 138, 174, 175, 177, 255, 148,
- 150, 164, 167, 173, 176, 185, 189, 190,
- 192, 255, 144, 146, 175, 141, 255, 166,
- 176, 178, 255, 186, 138, 170, 180, 181,
- 160, 161, 162, 164, 165, 166, 167, 168,
- 169, 170, 171, 172, 173, 174, 175, 176,
- 177, 178, 179, 180, 181, 182, 184, 186,
- 187, 188, 189, 190, 183, 185, 154, 164,
- 168, 128, 149, 128, 152, 189, 132, 185,
- 144, 152, 161, 177, 255, 169, 177, 129,
- 132, 141, 142, 145, 146, 179, 181, 186,
- 188, 190, 255, 142, 156, 157, 159, 161,
- 176, 177, 133, 138, 143, 144, 147, 168,
- 170, 176, 178, 179, 181, 182, 184, 185,
- 158, 153, 156, 178, 180, 189, 133, 141,
- 143, 145, 147, 168, 170, 176, 178, 179,
- 181, 185, 144, 185, 160, 161, 189, 133,
- 140, 143, 144, 147, 168, 170, 176, 178,
- 179, 181, 185, 177, 156, 157, 159, 161,
- 131, 156, 133, 138, 142, 144, 146, 149,
- 153, 154, 158, 159, 163, 164, 168, 170,
- 174, 185, 144, 189, 133, 140, 142, 144,
- 146, 168, 170, 185, 152, 154, 160, 161,
- 128, 189, 133, 140, 142, 144, 146, 168,
- 170, 179, 181, 185, 158, 160, 161, 177,
- 178, 189, 133, 140, 142, 144, 146, 186,
- 142, 148, 150, 159, 161, 186, 191, 189,
- 133, 150, 154, 177, 179, 187, 128, 134,
- 129, 176, 178, 179, 132, 138, 141, 165,
- 167, 189, 129, 130, 135, 136, 148, 151,
- 153, 159, 161, 163, 170, 171, 173, 176,
- 178, 179, 134, 128, 132, 156, 159, 128,
- 128, 135, 137, 172, 136, 140, 128, 129,
- 130, 131, 137, 138, 139, 140, 141, 142,
- 143, 144, 153, 154, 155, 156, 157, 158,
- 159, 160, 161, 162, 163, 164, 165, 166,
- 167, 168, 169, 170, 172, 173, 174, 175,
- 176, 177, 178, 179, 180, 181, 182, 184,
- 188, 189, 190, 191, 132, 152, 185, 187,
- 191, 128, 170, 161, 144, 149, 154, 157,
- 165, 166, 174, 176, 181, 255, 130, 141,
- 143, 159, 155, 255, 128, 140, 142, 145,
- 160, 177, 128, 145, 160, 172, 174, 176,
- 151, 156, 170, 128, 168, 176, 255, 138,
- 255, 128, 150, 160, 255, 149, 255, 167,
- 133, 179, 133, 139, 131, 160, 174, 175,
- 186, 255, 166, 255, 128, 163, 141, 143,
- 154, 189, 169, 172, 174, 177, 181, 182,
- 129, 130, 132, 133, 134, 176, 177, 178,
- 179, 180, 181, 182, 183, 177, 191, 165,
- 170, 175, 177, 180, 255, 168, 174, 176,
- 255, 128, 134, 136, 142, 144, 150, 152,
- 158, 128, 129, 130, 131, 132, 133, 134,
- 135, 144, 145, 255, 133, 135, 161, 169,
- 177, 181, 184, 188, 160, 151, 154, 128,
- 146, 147, 148, 152, 153, 154, 155, 156,
- 158, 159, 160, 161, 162, 163, 164, 165,
- 166, 167, 168, 169, 170, 171, 172, 173,
- 174, 175, 176, 129, 255, 141, 143, 160,
- 169, 172, 255, 191, 128, 174, 130, 134,
- 139, 163, 255, 130, 179, 187, 189, 178,
- 183, 138, 165, 176, 255, 135, 159, 189,
- 255, 132, 178, 143, 160, 164, 166, 175,
- 186, 190, 128, 168, 186, 128, 130, 132,
- 139, 160, 182, 190, 255, 176, 178, 180,
- 183, 184, 190, 255, 128, 130, 155, 157,
- 160, 170, 178, 180, 128, 162, 164, 169,
- 171, 172, 173, 174, 175, 180, 181, 182,
- 183, 185, 186, 187, 188, 189, 190, 191,
- 165, 179, 157, 190, 128, 134, 147, 151,
- 159, 168, 170, 182, 184, 188, 176, 180,
- 182, 255, 161, 186, 144, 145, 146, 147,
- 148, 150, 151, 152, 155, 157, 158, 160,
- 170, 171, 172, 175, 161, 169, 128, 129,
- 130, 131, 133, 138, 139, 140, 141, 142,
- 143, 144, 145, 146, 147, 148, 149, 152,
- 156, 157, 160, 161, 162, 163, 164, 166,
- 168, 169, 170, 171, 172, 173, 174, 176,
- 177, 153, 155, 178, 179, 145, 255, 139,
- 143, 182, 255, 158, 175, 128, 144, 147,
- 149, 151, 153, 179, 128, 135, 137, 164,
- 128, 130, 131, 132, 133, 134, 135, 136,
- 138, 139, 140, 141, 144, 145, 146, 147,
- 150, 151, 152, 153, 154, 156, 162, 163,
- 171, 176, 177, 178, 131, 183, 131, 175,
- 144, 168, 131, 166, 182, 144, 178, 131,
- 178, 154, 156, 129, 132, 128, 145, 147,
- 171, 159, 255, 144, 157, 161, 135, 138,
- 128, 175, 135, 132, 133, 128, 174, 152,
- 155, 132, 128, 170, 128, 153, 160, 190,
- 192, 255, 128, 136, 138, 174, 128, 178,
- 255, 160, 168, 169, 171, 172, 173, 174,
- 188, 189, 190, 191, 161, 167, 144, 173,
- 128, 131, 163, 183, 189, 255, 133, 143,
- 145, 255, 147, 159, 128, 176, 177, 178,
- 128, 136, 144, 153, 144, 145, 146, 147,
- 148, 149, 154, 155, 156, 157, 158, 159,
- 150, 153, 131, 140, 255, 160, 163, 164,
- 165, 184, 185, 186, 161, 162, 133, 255,
- 170, 181, 183, 186, 128, 150, 152, 182,
- 184, 255, 192, 255, 128, 255, 173, 130,
- 133, 146, 159, 165, 171, 175, 255, 181,
- 190, 184, 185, 192, 255, 140, 134, 138,
- 142, 161, 163, 255, 182, 130, 136, 137,
- 176, 151, 152, 154, 160, 190, 136, 144,
- 192, 255, 135, 129, 130, 132, 133, 144,
- 170, 176, 178, 144, 154, 160, 191, 128,
- 169, 174, 255, 148, 169, 157, 158, 189,
- 190, 192, 255, 144, 255, 139, 140, 178,
- 255, 186, 128, 181, 160, 161, 162, 163,
- 164, 165, 166, 167, 168, 169, 170, 171,
- 172, 173, 174, 175, 176, 177, 178, 179,
- 180, 181, 182, 183, 184, 185, 186, 187,
- 188, 189, 190, 191, 128, 173, 128, 155,
- 160, 180, 182, 189, 148, 161, 163, 255,
- 176, 164, 165, 132, 169, 177, 141, 142,
- 145, 146, 179, 181, 186, 187, 158, 133,
- 134, 137, 138, 143, 150, 152, 155, 164,
- 165, 178, 255, 188, 129, 131, 133, 138,
- 143, 144, 147, 168, 170, 176, 178, 179,
- 181, 182, 184, 185, 190, 255, 157, 131,
- 134, 137, 138, 142, 144, 146, 152, 159,
- 165, 182, 255, 129, 131, 133, 141, 143,
- 145, 147, 168, 170, 176, 178, 179, 181,
- 185, 188, 255, 134, 138, 142, 143, 145,
- 159, 164, 165, 176, 184, 186, 255, 129,
- 131, 133, 140, 143, 144, 147, 168, 170,
- 176, 178, 179, 181, 185, 188, 191, 177,
- 128, 132, 135, 136, 139, 141, 150, 151,
- 156, 157, 159, 163, 166, 175, 156, 130,
- 131, 133, 138, 142, 144, 146, 149, 153,
- 154, 158, 159, 163, 164, 168, 170, 174,
- 185, 190, 191, 144, 151, 128, 130, 134,
- 136, 138, 141, 166, 175, 128, 131, 133,
- 140, 142, 144, 146, 168, 170, 185, 189,
- 255, 133, 137, 151, 142, 148, 155, 159,
- 164, 165, 176, 255, 128, 131, 133, 140,
- 142, 144, 146, 168, 170, 179, 181, 185,
- 188, 191, 158, 128, 132, 134, 136, 138,
- 141, 149, 150, 160, 163, 166, 175, 177,
- 178, 129, 131, 133, 140, 142, 144, 146,
- 186, 189, 255, 133, 137, 143, 147, 152,
- 158, 164, 165, 176, 185, 192, 255, 189,
- 130, 131, 133, 150, 154, 177, 179, 187,
- 138, 150, 128, 134, 143, 148, 152, 159,
- 166, 175, 178, 179, 129, 186, 128, 142,
- 144, 153, 132, 138, 141, 165, 167, 129,
- 130, 135, 136, 148, 151, 153, 159, 161,
- 163, 170, 171, 173, 185, 187, 189, 134,
- 128, 132, 136, 141, 144, 153, 156, 159,
- 128, 181, 183, 185, 152, 153, 160, 169,
- 190, 191, 128, 135, 137, 172, 177, 191,
- 128, 132, 134, 151, 153, 188, 134, 128,
- 129, 130, 131, 137, 138, 139, 140, 141,
- 142, 143, 144, 153, 154, 155, 156, 157,
- 158, 159, 160, 161, 162, 163, 164, 165,
- 166, 167, 168, 169, 170, 173, 175, 176,
- 177, 178, 179, 181, 182, 183, 188, 189,
- 190, 191, 132, 152, 172, 184, 185, 187,
- 128, 191, 128, 137, 144, 255, 158, 159,
- 134, 187, 136, 140, 142, 143, 137, 151,
- 153, 142, 143, 158, 159, 137, 177, 142,
- 143, 182, 183, 191, 255, 128, 130, 133,
- 136, 150, 152, 255, 145, 150, 151, 155,
- 156, 160, 168, 178, 255, 128, 143, 160,
- 255, 182, 183, 190, 255, 129, 255, 173,
- 174, 192, 255, 129, 154, 160, 255, 171,
- 173, 185, 255, 128, 140, 142, 148, 160,
- 180, 128, 147, 160, 172, 174, 176, 178,
- 179, 148, 150, 152, 155, 158, 159, 170,
- 255, 139, 141, 144, 153, 160, 255, 184,
- 255, 128, 170, 176, 255, 182, 255, 128,
- 158, 160, 171, 176, 187, 134, 173, 176,
- 180, 128, 171, 176, 255, 138, 143, 155,
- 255, 128, 155, 160, 255, 159, 189, 190,
- 192, 255, 167, 128, 137, 144, 153, 176,
- 189, 140, 143, 154, 170, 180, 255, 180,
- 255, 128, 183, 128, 137, 141, 189, 128,
- 136, 144, 146, 148, 182, 184, 185, 128,
- 181, 187, 191, 150, 151, 158, 159, 152,
- 154, 156, 158, 134, 135, 142, 143, 190,
- 255, 190, 128, 180, 182, 188, 130, 132,
- 134, 140, 144, 147, 150, 155, 160, 172,
- 178, 180, 182, 188, 128, 129, 130, 131,
- 132, 133, 134, 176, 177, 178, 179, 180,
- 181, 182, 183, 191, 255, 129, 147, 149,
- 176, 178, 190, 192, 255, 144, 156, 161,
- 144, 156, 165, 176, 130, 135, 149, 164,
- 166, 168, 138, 147, 152, 157, 170, 185,
- 188, 191, 142, 133, 137, 160, 255, 137,
- 255, 128, 174, 176, 255, 159, 165, 170,
- 180, 255, 167, 173, 128, 165, 176, 255,
- 168, 174, 176, 190, 192, 255, 128, 150,
- 160, 166, 168, 174, 176, 182, 184, 190,
- 128, 134, 136, 142, 144, 150, 152, 158,
- 160, 191, 128, 129, 130, 131, 132, 133,
- 134, 135, 144, 145, 255, 133, 135, 161,
- 175, 177, 181, 184, 188, 160, 151, 152,
- 187, 192, 255, 133, 173, 177, 255, 143,
- 159, 187, 255, 176, 191, 182, 183, 184,
- 191, 192, 255, 150, 255, 128, 146, 147,
- 148, 152, 153, 154, 155, 156, 158, 159,
- 160, 161, 162, 163, 164, 165, 166, 167,
- 168, 169, 170, 171, 172, 173, 174, 175,
- 176, 129, 255, 141, 255, 144, 189, 141,
- 143, 172, 255, 191, 128, 175, 180, 189,
- 151, 159, 162, 255, 175, 137, 138, 184,
- 255, 183, 255, 168, 255, 128, 179, 188,
- 134, 143, 154, 159, 184, 186, 190, 255,
- 128, 173, 176, 255, 148, 159, 189, 255,
- 129, 142, 154, 159, 191, 255, 128, 182,
- 128, 141, 144, 153, 160, 182, 186, 255,
- 128, 130, 155, 157, 160, 175, 178, 182,
- 129, 134, 137, 142, 145, 150, 160, 166,
- 168, 174, 176, 255, 155, 166, 175, 128,
- 170, 172, 173, 176, 185, 158, 159, 160,
- 255, 164, 175, 135, 138, 188, 255, 164,
- 169, 171, 172, 173, 174, 175, 180, 181,
- 182, 183, 184, 185, 187, 188, 189, 190,
- 191, 165, 186, 174, 175, 154, 255, 190,
- 128, 134, 147, 151, 157, 168, 170, 182,
- 184, 188, 128, 129, 131, 132, 134, 255,
- 147, 255, 190, 255, 144, 145, 136, 175,
- 188, 255, 128, 143, 160, 175, 179, 180,
- 141, 143, 176, 180, 182, 255, 189, 255,
- 191, 144, 153, 161, 186, 129, 154, 166,
- 255, 191, 255, 130, 135, 138, 143, 146,
- 151, 154, 156, 144, 145, 146, 147, 148,
- 150, 151, 152, 155, 157, 158, 160, 170,
- 171, 172, 175, 161, 169, 128, 129, 130,
- 131, 133, 135, 138, 139, 140, 141, 142,
- 143, 144, 145, 146, 147, 148, 149, 152,
- 156, 157, 160, 161, 162, 163, 164, 166,
- 168, 169, 170, 171, 172, 173, 174, 176,
- 177, 153, 155, 178, 179, 128, 139, 141,
- 166, 168, 186, 188, 189, 191, 255, 142,
- 143, 158, 255, 187, 255, 128, 180, 189,
- 128, 156, 160, 255, 145, 159, 161, 255,
- 128, 159, 176, 255, 139, 143, 187, 255,
- 128, 157, 160, 255, 144, 132, 135, 150,
- 255, 158, 159, 170, 175, 148, 151, 188,
- 255, 128, 167, 176, 255, 164, 255, 183,
- 255, 128, 149, 160, 167, 136, 188, 128,
- 133, 138, 181, 183, 184, 191, 255, 150,
- 159, 183, 255, 128, 158, 160, 178, 180,
- 181, 128, 149, 160, 185, 128, 183, 190,
- 191, 191, 128, 131, 133, 134, 140, 147,
- 149, 151, 153, 179, 184, 186, 160, 188,
- 128, 156, 128, 135, 137, 166, 128, 181,
- 128, 149, 160, 178, 128, 145, 128, 178,
- 129, 130, 131, 132, 133, 135, 136, 138,
- 139, 140, 141, 144, 145, 146, 147, 150,
- 151, 152, 153, 154, 155, 156, 162, 163,
- 171, 176, 177, 178, 128, 134, 135, 165,
- 176, 190, 144, 168, 176, 185, 128, 180,
- 182, 191, 182, 144, 179, 155, 133, 137,
- 141, 143, 157, 255, 190, 128, 145, 147,
- 183, 136, 128, 134, 138, 141, 143, 157,
- 159, 168, 176, 255, 171, 175, 186, 255,
- 128, 131, 133, 140, 143, 144, 147, 168,
- 170, 176, 178, 179, 181, 185, 188, 191,
- 144, 151, 128, 132, 135, 136, 139, 141,
- 157, 163, 166, 172, 176, 180, 128, 138,
- 144, 153, 134, 136, 143, 154, 255, 128,
- 181, 184, 255, 129, 151, 158, 255, 129,
- 131, 133, 143, 154, 255, 128, 137, 128,
- 153, 157, 171, 176, 185, 160, 255, 170,
- 190, 192, 255, 128, 184, 128, 136, 138,
- 182, 184, 191, 128, 144, 153, 178, 255,
- 168, 144, 145, 183, 255, 128, 142, 145,
- 149, 129, 141, 144, 146, 147, 148, 175,
- 255, 132, 255, 128, 144, 129, 143, 144,
- 153, 145, 152, 135, 255, 160, 168, 169,
- 171, 172, 173, 174, 188, 189, 190, 191,
- 161, 167, 185, 255, 128, 158, 160, 169,
- 144, 173, 176, 180, 128, 131, 144, 153,
- 163, 183, 189, 255, 144, 255, 133, 143,
- 191, 255, 143, 159, 160, 128, 129, 255,
- 159, 160, 171, 172, 255, 173, 255, 179,
- 255, 128, 176, 177, 178, 128, 129, 171,
- 175, 189, 255, 128, 136, 144, 153, 157,
- 158, 133, 134, 137, 144, 145, 146, 147,
- 148, 149, 154, 155, 156, 157, 158, 159,
- 168, 169, 170, 150, 153, 165, 169, 173,
- 178, 187, 255, 131, 132, 140, 169, 174,
- 255, 130, 132, 149, 157, 173, 186, 188,
- 160, 161, 163, 164, 167, 168, 132, 134,
- 149, 157, 186, 139, 140, 191, 255, 134,
- 128, 132, 138, 144, 146, 255, 166, 167,
- 129, 155, 187, 149, 181, 143, 175, 137,
- 169, 131, 140, 141, 192, 255, 128, 182,
- 187, 255, 173, 180, 182, 255, 132, 155,
- 159, 161, 175, 128, 160, 163, 164, 165,
- 184, 185, 186, 161, 162, 128, 134, 136,
- 152, 155, 161, 163, 164, 166, 170, 133,
- 143, 151, 255, 139, 143, 154, 255, 164,
- 167, 185, 187, 128, 131, 133, 159, 161,
- 162, 169, 178, 180, 183, 130, 135, 137,
- 139, 148, 151, 153, 155, 157, 159, 164,
- 190, 141, 143, 145, 146, 161, 162, 167,
- 170, 172, 178, 180, 183, 185, 188, 128,
- 137, 139, 155, 161, 163, 165, 169, 171,
- 187, 155, 156, 151, 255, 156, 157, 160,
- 181, 255, 186, 187, 255, 162, 255, 160,
- 168, 161, 167, 158, 255, 160, 132, 135,
- 133, 134, 176, 255, 128, 191, 154, 164,
- 168, 128, 149, 150, 191, 128, 152, 153,
- 191, 181, 128, 159, 160, 189, 190, 191,
- 189, 128, 131, 132, 185, 186, 191, 144,
- 128, 151, 152, 161, 162, 176, 177, 255,
- 169, 177, 129, 132, 141, 142, 145, 146,
- 179, 181, 186, 188, 190, 191, 192, 255,
- 142, 158, 128, 155, 156, 161, 162, 175,
- 176, 177, 178, 191, 169, 177, 180, 183,
- 128, 132, 133, 138, 139, 142, 143, 144,
- 145, 146, 147, 185, 186, 191, 157, 128,
- 152, 153, 158, 159, 177, 178, 180, 181,
- 191, 142, 146, 169, 177, 180, 189, 128,
- 132, 133, 185, 186, 191, 144, 185, 128,
- 159, 160, 161, 162, 191, 169, 177, 180,
- 189, 128, 132, 133, 140, 141, 142, 143,
- 144, 145, 146, 147, 185, 186, 191, 158,
- 177, 128, 155, 156, 161, 162, 191, 131,
- 145, 155, 157, 128, 132, 133, 138, 139,
- 141, 142, 149, 150, 152, 153, 159, 160,
- 162, 163, 164, 165, 167, 168, 170, 171,
- 173, 174, 185, 186, 191, 144, 128, 191,
- 141, 145, 169, 189, 128, 132, 133, 185,
- 186, 191, 128, 151, 152, 154, 155, 159,
- 160, 161, 162, 191, 128, 141, 145, 169,
- 180, 189, 129, 132, 133, 185, 186, 191,
- 158, 128, 159, 160, 161, 162, 176, 177,
- 178, 179, 191, 141, 145, 189, 128, 132,
- 133, 186, 187, 191, 142, 128, 147, 148,
- 150, 151, 158, 159, 161, 162, 185, 186,
- 191, 178, 188, 128, 132, 133, 150, 151,
- 153, 154, 189, 190, 191, 128, 134, 135,
- 191, 128, 177, 129, 179, 180, 191, 128,
- 131, 137, 141, 152, 160, 164, 166, 172,
- 177, 189, 129, 132, 133, 134, 135, 138,
- 139, 147, 148, 167, 168, 169, 170, 179,
- 180, 191, 133, 128, 134, 135, 155, 156,
- 159, 160, 191, 128, 129, 191, 136, 128,
- 172, 173, 191, 128, 135, 136, 140, 141,
- 191, 191, 128, 170, 171, 190, 161, 128,
- 143, 144, 149, 150, 153, 154, 157, 158,
- 164, 165, 166, 167, 173, 174, 176, 177,
- 180, 181, 255, 130, 141, 143, 159, 134,
- 187, 136, 140, 142, 143, 137, 151, 153,
- 142, 143, 158, 159, 137, 177, 191, 142,
- 143, 182, 183, 192, 255, 129, 151, 128,
- 133, 134, 135, 136, 255, 145, 150, 151,
- 155, 191, 192, 255, 128, 143, 144, 159,
- 160, 255, 182, 183, 190, 191, 192, 255,
- 128, 129, 255, 173, 174, 192, 255, 128,
- 129, 154, 155, 159, 160, 255, 171, 173,
- 185, 191, 192, 255, 141, 128, 145, 146,
- 159, 160, 177, 178, 191, 173, 128, 145,
- 146, 159, 160, 176, 177, 191, 128, 179,
- 180, 191, 151, 156, 128, 191, 128, 159,
- 160, 255, 184, 191, 192, 255, 169, 128,
- 170, 171, 175, 176, 255, 182, 191, 192,
- 255, 128, 158, 159, 191, 128, 143, 144,
- 173, 174, 175, 176, 180, 181, 191, 128,
- 171, 172, 175, 176, 255, 138, 191, 192,
- 255, 128, 150, 151, 159, 160, 255, 149,
- 191, 192, 255, 167, 128, 191, 128, 132,
- 133, 179, 180, 191, 128, 132, 133, 139,
- 140, 191, 128, 130, 131, 160, 161, 173,
- 174, 175, 176, 185, 186, 255, 166, 191,
- 192, 255, 128, 163, 164, 191, 128, 140,
- 141, 143, 144, 153, 154, 189, 190, 191,
- 128, 136, 137, 191, 173, 128, 168, 169,
- 177, 178, 180, 181, 182, 183, 191, 0,
- 127, 192, 255, 150, 151, 158, 159, 152,
- 154, 156, 158, 134, 135, 142, 143, 190,
- 191, 192, 255, 181, 189, 191, 128, 190,
- 133, 181, 128, 129, 130, 140, 141, 143,
- 144, 147, 148, 149, 150, 155, 156, 159,
- 160, 172, 173, 177, 178, 188, 189, 191,
- 177, 191, 128, 190, 128, 143, 144, 156,
- 157, 191, 130, 135, 148, 164, 166, 168,
- 128, 137, 138, 149, 150, 151, 152, 157,
- 158, 169, 170, 185, 186, 187, 188, 191,
- 142, 128, 132, 133, 137, 138, 159, 160,
- 255, 137, 191, 192, 255, 175, 128, 255,
- 159, 165, 170, 175, 177, 180, 191, 192,
- 255, 166, 173, 128, 167, 168, 175, 176,
- 255, 168, 174, 176, 191, 192, 255, 167,
- 175, 183, 191, 128, 150, 151, 159, 160,
- 190, 135, 143, 151, 128, 158, 159, 191,
- 128, 132, 133, 135, 136, 160, 161, 169,
- 170, 176, 177, 181, 182, 183, 184, 188,
- 189, 191, 160, 151, 154, 187, 192, 255,
- 128, 132, 133, 173, 174, 176, 177, 255,
- 143, 159, 187, 191, 192, 255, 128, 175,
- 176, 191, 150, 191, 192, 255, 141, 191,
- 192, 255, 128, 143, 144, 189, 190, 191,
- 141, 143, 160, 169, 172, 191, 192, 255,
- 191, 128, 174, 175, 190, 128, 157, 158,
- 159, 160, 255, 176, 191, 192, 255, 128,
- 150, 151, 159, 160, 161, 162, 255, 175,
- 137, 138, 184, 191, 192, 255, 128, 182,
- 183, 255, 130, 134, 139, 163, 191, 192,
- 255, 128, 129, 130, 179, 180, 191, 187,
- 189, 128, 177, 178, 183, 184, 191, 128,
- 137, 138, 165, 166, 175, 176, 255, 135,
- 159, 189, 191, 192, 255, 128, 131, 132,
- 178, 179, 191, 143, 165, 191, 128, 159,
- 160, 175, 176, 185, 186, 190, 128, 168,
- 169, 191, 131, 186, 128, 139, 140, 159,
- 160, 182, 183, 189, 190, 255, 176, 178,
- 180, 183, 184, 190, 191, 192, 255, 129,
- 128, 130, 131, 154, 155, 157, 158, 159,
- 160, 170, 171, 177, 178, 180, 181, 191,
- 128, 167, 175, 129, 134, 135, 136, 137,
- 142, 143, 144, 145, 150, 151, 159, 160,
- 255, 155, 166, 175, 128, 162, 163, 191,
- 164, 175, 135, 138, 188, 191, 192, 255,
- 174, 175, 154, 191, 192, 255, 157, 169,
- 183, 189, 191, 128, 134, 135, 146, 147,
- 151, 152, 158, 159, 190, 130, 133, 128,
- 255, 178, 191, 192, 255, 128, 146, 147,
- 255, 190, 191, 192, 255, 128, 143, 144,
- 255, 144, 145, 136, 175, 188, 191, 192,
- 255, 181, 128, 175, 176, 255, 189, 191,
- 192, 255, 128, 160, 161, 186, 187, 191,
- 128, 129, 154, 155, 165, 166, 255, 191,
- 192, 255, 128, 129, 130, 135, 136, 137,
- 138, 143, 144, 145, 146, 151, 152, 153,
- 154, 156, 157, 191, 128, 191, 128, 129,
- 130, 131, 133, 138, 139, 140, 141, 142,
- 143, 144, 145, 146, 147, 148, 149, 152,
- 156, 157, 160, 161, 162, 163, 164, 166,
- 168, 169, 170, 171, 172, 173, 174, 176,
- 177, 132, 151, 153, 155, 158, 175, 178,
- 179, 180, 191, 140, 167, 187, 190, 128,
- 255, 142, 143, 158, 191, 192, 255, 187,
- 191, 192, 255, 128, 180, 181, 191, 128,
- 156, 157, 159, 160, 255, 145, 191, 192,
- 255, 128, 159, 160, 175, 176, 255, 139,
- 143, 182, 191, 192, 255, 144, 132, 135,
- 150, 191, 192, 255, 158, 175, 148, 151,
- 188, 191, 192, 255, 128, 167, 168, 175,
- 176, 255, 164, 191, 192, 255, 183, 191,
- 192, 255, 128, 149, 150, 159, 160, 167,
- 168, 191, 136, 182, 188, 128, 133, 134,
- 137, 138, 184, 185, 190, 191, 255, 150,
- 159, 183, 191, 192, 255, 179, 128, 159,
- 160, 181, 182, 191, 128, 149, 150, 159,
- 160, 185, 186, 191, 128, 183, 184, 189,
- 190, 191, 128, 148, 152, 129, 143, 144,
- 179, 180, 191, 128, 159, 160, 188, 189,
- 191, 128, 156, 157, 191, 136, 128, 164,
- 165, 191, 128, 181, 182, 191, 128, 149,
- 150, 159, 160, 178, 179, 191, 128, 145,
- 146, 191, 128, 178, 179, 191, 128, 130,
- 131, 132, 133, 134, 135, 136, 138, 139,
- 140, 141, 144, 145, 146, 147, 150, 151,
- 152, 153, 154, 156, 162, 163, 171, 176,
- 177, 178, 129, 191, 128, 130, 131, 183,
- 184, 191, 128, 130, 131, 175, 176, 191,
- 128, 143, 144, 168, 169, 191, 128, 130,
- 131, 166, 167, 191, 182, 128, 143, 144,
- 178, 179, 191, 128, 130, 131, 178, 179,
- 191, 128, 154, 156, 129, 132, 133, 191,
- 146, 128, 171, 172, 191, 135, 137, 142,
- 158, 128, 168, 169, 175, 176, 255, 159,
- 191, 192, 255, 144, 128, 156, 157, 161,
- 162, 191, 128, 134, 135, 138, 139, 191,
- 128, 175, 176, 191, 134, 128, 131, 132,
- 135, 136, 191, 128, 174, 175, 191, 128,
- 151, 152, 155, 156, 191, 132, 128, 191,
- 128, 170, 171, 191, 128, 153, 154, 191,
- 160, 190, 192, 255, 128, 184, 185, 191,
- 137, 128, 174, 175, 191, 128, 129, 177,
- 178, 255, 144, 191, 192, 255, 128, 142,
- 143, 144, 145, 146, 149, 129, 148, 150,
- 191, 175, 191, 192, 255, 132, 191, 192,
- 255, 128, 144, 129, 143, 145, 191, 144,
- 153, 128, 143, 145, 152, 154, 191, 135,
- 191, 192, 255, 160, 168, 169, 171, 172,
- 173, 174, 188, 189, 190, 191, 128, 159,
- 161, 167, 170, 187, 185, 191, 192, 255,
- 128, 143, 144, 173, 174, 191, 128, 131,
- 132, 162, 163, 183, 184, 188, 189, 255,
- 133, 143, 145, 191, 192, 255, 128, 146,
- 147, 159, 160, 191, 160, 128, 191, 128,
- 129, 191, 192, 255, 159, 160, 171, 128,
- 170, 172, 191, 192, 255, 173, 191, 192,
- 255, 179, 191, 192, 255, 128, 176, 177,
- 178, 129, 191, 128, 129, 130, 191, 171,
- 175, 189, 191, 192, 255, 128, 136, 137,
- 143, 144, 153, 154, 191, 144, 145, 146,
- 147, 148, 149, 154, 155, 156, 157, 158,
- 159, 128, 143, 150, 153, 160, 191, 149,
- 157, 173, 186, 188, 160, 161, 163, 164,
- 167, 168, 132, 134, 149, 157, 186, 191,
- 139, 140, 192, 255, 133, 145, 128, 134,
- 135, 137, 138, 255, 166, 167, 129, 155,
- 187, 149, 181, 143, 175, 137, 169, 131,
- 140, 191, 192, 255, 160, 163, 164, 165,
- 184, 185, 186, 128, 159, 161, 162, 166,
- 191, 133, 191, 192, 255, 132, 160, 163,
- 167, 179, 184, 186, 128, 164, 165, 168,
- 169, 187, 188, 191, 130, 135, 137, 139,
- 144, 147, 151, 153, 155, 157, 159, 163,
- 171, 179, 184, 189, 191, 128, 140, 141,
- 148, 149, 160, 161, 164, 165, 166, 167,
- 190, 138, 164, 170, 128, 155, 156, 160,
- 161, 187, 188, 191, 128, 191, 155, 156,
- 128, 191, 151, 191, 192, 255, 156, 157,
- 160, 128, 191, 181, 191, 192, 255, 158,
- 159, 186, 128, 185, 187, 191, 192, 255,
- 162, 191, 192, 255, 160, 168, 128, 159,
- 161, 167, 169, 191, 158, 191, 192, 255,
- 10, 13, 128, 191, 192, 223, 224, 239,
- 240, 247, 248, 255, 128, 191, 128, 191,
- 128, 191, 128, 191, 128, 191, 10, 128,
- 191, 128, 191, 128, 191, 36, 123, 37,
- 123, 10, 128, 191, 128, 191, 128, 191,
- 36, 123, 37, 123, 170, 181, 183, 186,
- 128, 150, 152, 182, 184, 255, 192, 255,
- 128, 255, 173, 130, 133, 146, 159, 165,
- 171, 175, 255, 181, 190, 184, 185, 192,
- 255, 140, 134, 138, 142, 161, 163, 255,
- 182, 130, 136, 137, 176, 151, 152, 154,
- 160, 190, 136, 144, 192, 255, 135, 129,
- 130, 132, 133, 144, 170, 176, 178, 144,
- 154, 160, 191, 128, 169, 174, 255, 148,
- 169, 157, 158, 189, 190, 192, 255, 144,
- 255, 139, 140, 178, 255, 186, 128, 181,
- 160, 161, 162, 163, 164, 165, 166, 167,
- 168, 169, 170, 171, 172, 173, 174, 175,
- 176, 177, 178, 179, 180, 181, 182, 183,
- 184, 185, 186, 187, 188, 189, 190, 191,
- 128, 173, 128, 155, 160, 180, 182, 189,
- 148, 161, 163, 255, 176, 164, 165, 132,
- 169, 177, 141, 142, 145, 146, 179, 181,
- 186, 187, 158, 133, 134, 137, 138, 143,
- 150, 152, 155, 164, 165, 178, 255, 188,
- 129, 131, 133, 138, 143, 144, 147, 168,
- 170, 176, 178, 179, 181, 182, 184, 185,
- 190, 255, 157, 131, 134, 137, 138, 142,
- 144, 146, 152, 159, 165, 182, 255, 129,
- 131, 133, 141, 143, 145, 147, 168, 170,
- 176, 178, 179, 181, 185, 188, 255, 134,
- 138, 142, 143, 145, 159, 164, 165, 176,
- 184, 186, 255, 129, 131, 133, 140, 143,
- 144, 147, 168, 170, 176, 178, 179, 181,
- 185, 188, 191, 177, 128, 132, 135, 136,
- 139, 141, 150, 151, 156, 157, 159, 163,
- 166, 175, 156, 130, 131, 133, 138, 142,
- 144, 146, 149, 153, 154, 158, 159, 163,
- 164, 168, 170, 174, 185, 190, 191, 144,
- 151, 128, 130, 134, 136, 138, 141, 166,
- 175, 128, 131, 133, 140, 142, 144, 146,
- 168, 170, 185, 189, 255, 133, 137, 151,
- 142, 148, 155, 159, 164, 165, 176, 255,
- 128, 131, 133, 140, 142, 144, 146, 168,
- 170, 179, 181, 185, 188, 191, 158, 128,
- 132, 134, 136, 138, 141, 149, 150, 160,
- 163, 166, 175, 177, 178, 129, 131, 133,
- 140, 142, 144, 146, 186, 189, 255, 133,
- 137, 143, 147, 152, 158, 164, 165, 176,
- 185, 192, 255, 189, 130, 131, 133, 150,
- 154, 177, 179, 187, 138, 150, 128, 134,
- 143, 148, 152, 159, 166, 175, 178, 179,
- 129, 186, 128, 142, 144, 153, 132, 138,
- 141, 165, 167, 129, 130, 135, 136, 148,
- 151, 153, 159, 161, 163, 170, 171, 173,
- 185, 187, 189, 134, 128, 132, 136, 141,
- 144, 153, 156, 159, 128, 181, 183, 185,
- 152, 153, 160, 169, 190, 191, 128, 135,
- 137, 172, 177, 191, 128, 132, 134, 151,
- 153, 188, 134, 128, 129, 130, 131, 137,
- 138, 139, 140, 141, 142, 143, 144, 153,
- 154, 155, 156, 157, 158, 159, 160, 161,
- 162, 163, 164, 165, 166, 167, 168, 169,
- 170, 173, 175, 176, 177, 178, 179, 181,
- 182, 183, 188, 189, 190, 191, 132, 152,
- 172, 184, 185, 187, 128, 191, 128, 137,
- 144, 255, 158, 159, 134, 187, 136, 140,
- 142, 143, 137, 151, 153, 142, 143, 158,
- 159, 137, 177, 142, 143, 182, 183, 191,
- 255, 128, 130, 133, 136, 150, 152, 255,
- 145, 150, 151, 155, 156, 160, 168, 178,
- 255, 128, 143, 160, 255, 182, 183, 190,
- 255, 129, 255, 173, 174, 192, 255, 129,
- 154, 160, 255, 171, 173, 185, 255, 128,
- 140, 142, 148, 160, 180, 128, 147, 160,
- 172, 174, 176, 178, 179, 148, 150, 152,
- 155, 158, 159, 170, 255, 139, 141, 144,
- 153, 160, 255, 184, 255, 128, 170, 176,
- 255, 182, 255, 128, 158, 160, 171, 176,
- 187, 134, 173, 176, 180, 128, 171, 176,
- 255, 138, 143, 155, 255, 128, 155, 160,
- 255, 159, 189, 190, 192, 255, 167, 128,
- 137, 144, 153, 176, 189, 140, 143, 154,
- 170, 180, 255, 180, 255, 128, 183, 128,
- 137, 141, 189, 128, 136, 144, 146, 148,
- 182, 184, 185, 128, 181, 187, 191, 150,
- 151, 158, 159, 152, 154, 156, 158, 134,
- 135, 142, 143, 190, 255, 190, 128, 180,
- 182, 188, 130, 132, 134, 140, 144, 147,
- 150, 155, 160, 172, 178, 180, 182, 188,
- 128, 129, 130, 131, 132, 133, 134, 176,
- 177, 178, 179, 180, 181, 182, 183, 191,
- 255, 129, 147, 149, 176, 178, 190, 192,
- 255, 144, 156, 161, 144, 156, 165, 176,
- 130, 135, 149, 164, 166, 168, 138, 147,
- 152, 157, 170, 185, 188, 191, 142, 133,
- 137, 160, 255, 137, 255, 128, 174, 176,
- 255, 159, 165, 170, 180, 255, 167, 173,
- 128, 165, 176, 255, 168, 174, 176, 190,
- 192, 255, 128, 150, 160, 166, 168, 174,
- 176, 182, 184, 190, 128, 134, 136, 142,
- 144, 150, 152, 158, 160, 191, 128, 129,
- 130, 131, 132, 133, 134, 135, 144, 145,
- 255, 133, 135, 161, 175, 177, 181, 184,
- 188, 160, 151, 152, 187, 192, 255, 133,
- 173, 177, 255, 143, 159, 187, 255, 176,
- 191, 182, 183, 184, 191, 192, 255, 150,
- 255, 128, 146, 147, 148, 152, 153, 154,
- 155, 156, 158, 159, 160, 161, 162, 163,
- 164, 165, 166, 167, 168, 169, 170, 171,
- 172, 173, 174, 175, 176, 129, 255, 141,
- 255, 144, 189, 141, 143, 172, 255, 191,
- 128, 175, 180, 189, 151, 159, 162, 255,
- 175, 137, 138, 184, 255, 183, 255, 168,
- 255, 128, 179, 188, 134, 143, 154, 159,
- 184, 186, 190, 255, 128, 173, 176, 255,
- 148, 159, 189, 255, 129, 142, 154, 159,
- 191, 255, 128, 182, 128, 141, 144, 153,
- 160, 182, 186, 255, 128, 130, 155, 157,
- 160, 175, 178, 182, 129, 134, 137, 142,
- 145, 150, 160, 166, 168, 174, 176, 255,
- 155, 166, 175, 128, 170, 172, 173, 176,
- 185, 158, 159, 160, 255, 164, 175, 135,
- 138, 188, 255, 164, 169, 171, 172, 173,
- 174, 175, 180, 181, 182, 183, 184, 185,
- 187, 188, 189, 190, 191, 165, 186, 174,
- 175, 154, 255, 190, 128, 134, 147, 151,
- 157, 168, 170, 182, 184, 188, 128, 129,
- 131, 132, 134, 255, 147, 255, 190, 255,
- 144, 145, 136, 175, 188, 255, 128, 143,
- 160, 175, 179, 180, 141, 143, 176, 180,
- 182, 255, 189, 255, 191, 144, 153, 161,
- 186, 129, 154, 166, 255, 191, 255, 130,
- 135, 138, 143, 146, 151, 154, 156, 144,
- 145, 146, 147, 148, 150, 151, 152, 155,
- 157, 158, 160, 170, 171, 172, 175, 161,
- 169, 128, 129, 130, 131, 133, 135, 138,
- 139, 140, 141, 142, 143, 144, 145, 146,
- 147, 148, 149, 152, 156, 157, 160, 161,
- 162, 163, 164, 166, 168, 169, 170, 171,
- 172, 173, 174, 176, 177, 153, 155, 178,
- 179, 128, 139, 141, 166, 168, 186, 188,
- 189, 191, 255, 142, 143, 158, 255, 187,
- 255, 128, 180, 189, 128, 156, 160, 255,
- 145, 159, 161, 255, 128, 159, 176, 255,
- 139, 143, 187, 255, 128, 157, 160, 255,
- 144, 132, 135, 150, 255, 158, 159, 170,
- 175, 148, 151, 188, 255, 128, 167, 176,
- 255, 164, 255, 183, 255, 128, 149, 160,
- 167, 136, 188, 128, 133, 138, 181, 183,
- 184, 191, 255, 150, 159, 183, 255, 128,
- 158, 160, 178, 180, 181, 128, 149, 160,
- 185, 128, 183, 190, 191, 191, 128, 131,
- 133, 134, 140, 147, 149, 151, 153, 179,
- 184, 186, 160, 188, 128, 156, 128, 135,
- 137, 166, 128, 181, 128, 149, 160, 178,
- 128, 145, 128, 178, 129, 130, 131, 132,
- 133, 135, 136, 138, 139, 140, 141, 144,
- 145, 146, 147, 150, 151, 152, 153, 154,
- 155, 156, 162, 163, 171, 176, 177, 178,
- 128, 134, 135, 165, 176, 190, 144, 168,
- 176, 185, 128, 180, 182, 191, 182, 144,
- 179, 155, 133, 137, 141, 143, 157, 255,
- 190, 128, 145, 147, 183, 136, 128, 134,
- 138, 141, 143, 157, 159, 168, 176, 255,
- 171, 175, 186, 255, 128, 131, 133, 140,
- 143, 144, 147, 168, 170, 176, 178, 179,
- 181, 185, 188, 191, 144, 151, 128, 132,
- 135, 136, 139, 141, 157, 163, 166, 172,
- 176, 180, 128, 138, 144, 153, 134, 136,
- 143, 154, 255, 128, 181, 184, 255, 129,
- 151, 158, 255, 129, 131, 133, 143, 154,
- 255, 128, 137, 128, 153, 157, 171, 176,
- 185, 160, 255, 170, 190, 192, 255, 128,
- 184, 128, 136, 138, 182, 184, 191, 128,
- 144, 153, 178, 255, 168, 144, 145, 183,
- 255, 128, 142, 145, 149, 129, 141, 144,
- 146, 147, 148, 175, 255, 132, 255, 128,
- 144, 129, 143, 144, 153, 145, 152, 135,
- 255, 160, 168, 169, 171, 172, 173, 174,
- 188, 189, 190, 191, 161, 167, 185, 255,
- 128, 158, 160, 169, 144, 173, 176, 180,
- 128, 131, 144, 153, 163, 183, 189, 255,
- 144, 255, 133, 143, 191, 255, 143, 159,
- 160, 128, 129, 255, 159, 160, 171, 172,
- 255, 173, 255, 179, 255, 128, 176, 177,
- 178, 128, 129, 171, 175, 189, 255, 128,
- 136, 144, 153, 157, 158, 133, 134, 137,
- 144, 145, 146, 147, 148, 149, 154, 155,
- 156, 157, 158, 159, 168, 169, 170, 150,
- 153, 165, 169, 173, 178, 187, 255, 131,
- 132, 140, 169, 174, 255, 130, 132, 149,
- 157, 173, 186, 188, 160, 161, 163, 164,
- 167, 168, 132, 134, 149, 157, 186, 139,
- 140, 191, 255, 134, 128, 132, 138, 144,
- 146, 255, 166, 167, 129, 155, 187, 149,
- 181, 143, 175, 137, 169, 131, 140, 141,
- 192, 255, 128, 182, 187, 255, 173, 180,
- 182, 255, 132, 155, 159, 161, 175, 128,
- 160, 163, 164, 165, 184, 185, 186, 161,
- 162, 128, 134, 136, 152, 155, 161, 163,
- 164, 166, 170, 133, 143, 151, 255, 139,
- 143, 154, 255, 164, 167, 185, 187, 128,
- 131, 133, 159, 161, 162, 169, 178, 180,
- 183, 130, 135, 137, 139, 148, 151, 153,
- 155, 157, 159, 164, 190, 141, 143, 145,
- 146, 161, 162, 167, 170, 172, 178, 180,
- 183, 185, 188, 128, 137, 139, 155, 161,
- 163, 165, 169, 171, 187, 155, 156, 151,
- 255, 156, 157, 160, 181, 255, 186, 187,
- 255, 162, 255, 160, 168, 161, 167, 158,
- 255, 160, 132, 135, 133, 134, 176, 255,
- 128, 191, 154, 164, 168, 128, 149, 150,
- 191, 128, 152, 153, 191, 181, 128, 159,
- 160, 189, 190, 191, 189, 128, 131, 132,
- 185, 186, 191, 144, 128, 151, 152, 161,
- 162, 176, 177, 255, 169, 177, 129, 132,
- 141, 142, 145, 146, 179, 181, 186, 188,
- 190, 191, 192, 255, 142, 158, 128, 155,
- 156, 161, 162, 175, 176, 177, 178, 191,
- 169, 177, 180, 183, 128, 132, 133, 138,
- 139, 142, 143, 144, 145, 146, 147, 185,
- 186, 191, 157, 128, 152, 153, 158, 159,
- 177, 178, 180, 181, 191, 142, 146, 169,
- 177, 180, 189, 128, 132, 133, 185, 186,
- 191, 144, 185, 128, 159, 160, 161, 162,
- 191, 169, 177, 180, 189, 128, 132, 133,
- 140, 141, 142, 143, 144, 145, 146, 147,
- 185, 186, 191, 158, 177, 128, 155, 156,
- 161, 162, 191, 131, 145, 155, 157, 128,
- 132, 133, 138, 139, 141, 142, 149, 150,
- 152, 153, 159, 160, 162, 163, 164, 165,
- 167, 168, 170, 171, 173, 174, 185, 186,
- 191, 144, 128, 191, 141, 145, 169, 189,
- 128, 132, 133, 185, 186, 191, 128, 151,
- 152, 154, 155, 159, 160, 161, 162, 191,
- 128, 141, 145, 169, 180, 189, 129, 132,
- 133, 185, 186, 191, 158, 128, 159, 160,
- 161, 162, 176, 177, 178, 179, 191, 141,
- 145, 189, 128, 132, 133, 186, 187, 191,
- 142, 128, 147, 148, 150, 151, 158, 159,
- 161, 162, 185, 186, 191, 178, 188, 128,
- 132, 133, 150, 151, 153, 154, 189, 190,
- 191, 128, 134, 135, 191, 128, 177, 129,
- 179, 180, 191, 128, 131, 137, 141, 152,
- 160, 164, 166, 172, 177, 189, 129, 132,
- 133, 134, 135, 138, 139, 147, 148, 167,
- 168, 169, 170, 179, 180, 191, 133, 128,
- 134, 135, 155, 156, 159, 160, 191, 128,
- 129, 191, 136, 128, 172, 173, 191, 128,
- 135, 136, 140, 141, 191, 191, 128, 170,
- 171, 190, 161, 128, 143, 144, 149, 150,
- 153, 154, 157, 158, 164, 165, 166, 167,
- 173, 174, 176, 177, 180, 181, 255, 130,
- 141, 143, 159, 134, 187, 136, 140, 142,
- 143, 137, 151, 153, 142, 143, 158, 159,
- 137, 177, 191, 142, 143, 182, 183, 192,
- 255, 129, 151, 128, 133, 134, 135, 136,
- 255, 145, 150, 151, 155, 191, 192, 255,
- 128, 143, 144, 159, 160, 255, 182, 183,
- 190, 191, 192, 255, 128, 129, 255, 173,
- 174, 192, 255, 128, 129, 154, 155, 159,
- 160, 255, 171, 173, 185, 191, 192, 255,
- 141, 128, 145, 146, 159, 160, 177, 178,
- 191, 173, 128, 145, 146, 159, 160, 176,
- 177, 191, 128, 179, 180, 191, 151, 156,
- 128, 191, 128, 159, 160, 255, 184, 191,
- 192, 255, 169, 128, 170, 171, 175, 176,
- 255, 182, 191, 192, 255, 128, 158, 159,
- 191, 128, 143, 144, 173, 174, 175, 176,
- 180, 181, 191, 128, 171, 172, 175, 176,
- 255, 138, 191, 192, 255, 128, 150, 151,
- 159, 160, 255, 149, 191, 192, 255, 167,
- 128, 191, 128, 132, 133, 179, 180, 191,
- 128, 132, 133, 139, 140, 191, 128, 130,
- 131, 160, 161, 173, 174, 175, 176, 185,
- 186, 255, 166, 191, 192, 255, 128, 163,
- 164, 191, 128, 140, 141, 143, 144, 153,
- 154, 189, 190, 191, 128, 136, 137, 191,
- 173, 128, 168, 169, 177, 178, 180, 181,
- 182, 183, 191, 0, 127, 192, 255, 150,
- 151, 158, 159, 152, 154, 156, 158, 134,
- 135, 142, 143, 190, 191, 192, 255, 181,
- 189, 191, 128, 190, 133, 181, 128, 129,
- 130, 140, 141, 143, 144, 147, 148, 149,
- 150, 155, 156, 159, 160, 172, 173, 177,
- 178, 188, 189, 191, 177, 191, 128, 190,
- 128, 143, 144, 156, 157, 191, 130, 135,
- 148, 164, 166, 168, 128, 137, 138, 149,
- 150, 151, 152, 157, 158, 169, 170, 185,
- 186, 187, 188, 191, 142, 128, 132, 133,
- 137, 138, 159, 160, 255, 137, 191, 192,
- 255, 175, 128, 255, 159, 165, 170, 175,
- 177, 180, 191, 192, 255, 166, 173, 128,
- 167, 168, 175, 176, 255, 168, 174, 176,
- 191, 192, 255, 167, 175, 183, 191, 128,
- 150, 151, 159, 160, 190, 135, 143, 151,
- 128, 158, 159, 191, 128, 132, 133, 135,
- 136, 160, 161, 169, 170, 176, 177, 181,
- 182, 183, 184, 188, 189, 191, 160, 151,
- 154, 187, 192, 255, 128, 132, 133, 173,
- 174, 176, 177, 255, 143, 159, 187, 191,
- 192, 255, 128, 175, 176, 191, 150, 191,
- 192, 255, 141, 191, 192, 255, 128, 143,
- 144, 189, 190, 191, 141, 143, 160, 169,
- 172, 191, 192, 255, 191, 128, 174, 175,
- 190, 128, 157, 158, 159, 160, 255, 176,
- 191, 192, 255, 128, 150, 151, 159, 160,
- 161, 162, 255, 175, 137, 138, 184, 191,
- 192, 255, 128, 182, 183, 255, 130, 134,
- 139, 163, 191, 192, 255, 128, 129, 130,
- 179, 180, 191, 187, 189, 128, 177, 178,
- 183, 184, 191, 128, 137, 138, 165, 166,
- 175, 176, 255, 135, 159, 189, 191, 192,
- 255, 128, 131, 132, 178, 179, 191, 143,
- 165, 191, 128, 159, 160, 175, 176, 185,
- 186, 190, 128, 168, 169, 191, 131, 186,
- 128, 139, 140, 159, 160, 182, 183, 189,
- 190, 255, 176, 178, 180, 183, 184, 190,
- 191, 192, 255, 129, 128, 130, 131, 154,
- 155, 157, 158, 159, 160, 170, 171, 177,
- 178, 180, 181, 191, 128, 167, 175, 129,
- 134, 135, 136, 137, 142, 143, 144, 145,
- 150, 151, 159, 160, 255, 155, 166, 175,
- 128, 162, 163, 191, 164, 175, 135, 138,
- 188, 191, 192, 255, 174, 175, 154, 191,
- 192, 255, 157, 169, 183, 189, 191, 128,
- 134, 135, 146, 147, 151, 152, 158, 159,
- 190, 130, 133, 128, 255, 178, 191, 192,
- 255, 128, 146, 147, 255, 190, 191, 192,
- 255, 128, 143, 144, 255, 144, 145, 136,
- 175, 188, 191, 192, 255, 181, 128, 175,
- 176, 255, 189, 191, 192, 255, 128, 160,
- 161, 186, 187, 191, 128, 129, 154, 155,
- 165, 166, 255, 191, 192, 255, 128, 129,
- 130, 135, 136, 137, 138, 143, 144, 145,
- 146, 151, 152, 153, 154, 156, 157, 191,
- 128, 191, 128, 129, 130, 131, 133, 138,
- 139, 140, 141, 142, 143, 144, 145, 146,
- 147, 148, 149, 152, 156, 157, 160, 161,
- 162, 163, 164, 166, 168, 169, 170, 171,
- 172, 173, 174, 176, 177, 132, 151, 153,
- 155, 158, 175, 178, 179, 180, 191, 140,
- 167, 187, 190, 128, 255, 142, 143, 158,
- 191, 192, 255, 187, 191, 192, 255, 128,
- 180, 181, 191, 128, 156, 157, 159, 160,
- 255, 145, 191, 192, 255, 128, 159, 160,
- 175, 176, 255, 139, 143, 182, 191, 192,
- 255, 144, 132, 135, 150, 191, 192, 255,
- 158, 175, 148, 151, 188, 191, 192, 255,
- 128, 167, 168, 175, 176, 255, 164, 191,
- 192, 255, 183, 191, 192, 255, 128, 149,
- 150, 159, 160, 167, 168, 191, 136, 182,
- 188, 128, 133, 134, 137, 138, 184, 185,
- 190, 191, 255, 150, 159, 183, 191, 192,
- 255, 179, 128, 159, 160, 181, 182, 191,
- 128, 149, 150, 159, 160, 185, 186, 191,
- 128, 183, 184, 189, 190, 191, 128, 148,
- 152, 129, 143, 144, 179, 180, 191, 128,
- 159, 160, 188, 189, 191, 128, 156, 157,
- 191, 136, 128, 164, 165, 191, 128, 181,
- 182, 191, 128, 149, 150, 159, 160, 178,
- 179, 191, 128, 145, 146, 191, 128, 178,
- 179, 191, 128, 130, 131, 132, 133, 134,
- 135, 136, 138, 139, 140, 141, 144, 145,
- 146, 147, 150, 151, 152, 153, 154, 156,
- 162, 163, 171, 176, 177, 178, 129, 191,
- 128, 130, 131, 183, 184, 191, 128, 130,
- 131, 175, 176, 191, 128, 143, 144, 168,
- 169, 191, 128, 130, 131, 166, 167, 191,
- 182, 128, 143, 144, 178, 179, 191, 128,
- 130, 131, 178, 179, 191, 128, 154, 156,
- 129, 132, 133, 191, 146, 128, 171, 172,
- 191, 135, 137, 142, 158, 128, 168, 169,
- 175, 176, 255, 159, 191, 192, 255, 144,
- 128, 156, 157, 161, 162, 191, 128, 134,
- 135, 138, 139, 191, 128, 175, 176, 191,
- 134, 128, 131, 132, 135, 136, 191, 128,
- 174, 175, 191, 128, 151, 152, 155, 156,
- 191, 132, 128, 191, 128, 170, 171, 191,
- 128, 153, 154, 191, 160, 190, 192, 255,
- 128, 184, 185, 191, 137, 128, 174, 175,
- 191, 128, 129, 177, 178, 255, 144, 191,
- 192, 255, 128, 142, 143, 144, 145, 146,
- 149, 129, 148, 150, 191, 175, 191, 192,
- 255, 132, 191, 192, 255, 128, 144, 129,
- 143, 145, 191, 144, 153, 128, 143, 145,
- 152, 154, 191, 135, 191, 192, 255, 160,
- 168, 169, 171, 172, 173, 174, 188, 189,
- 190, 191, 128, 159, 161, 167, 170, 187,
- 185, 191, 192, 255, 128, 143, 144, 173,
- 174, 191, 128, 131, 132, 162, 163, 183,
- 184, 188, 189, 255, 133, 143, 145, 191,
- 192, 255, 128, 146, 147, 159, 160, 191,
- 160, 128, 191, 128, 129, 191, 192, 255,
- 159, 160, 171, 128, 170, 172, 191, 192,
- 255, 173, 191, 192, 255, 179, 191, 192,
- 255, 128, 176, 177, 178, 129, 191, 128,
- 129, 130, 191, 171, 175, 189, 191, 192,
- 255, 128, 136, 137, 143, 144, 153, 154,
- 191, 144, 145, 146, 147, 148, 149, 154,
- 155, 156, 157, 158, 159, 128, 143, 150,
- 153, 160, 191, 149, 157, 173, 186, 188,
- 160, 161, 163, 164, 167, 168, 132, 134,
- 149, 157, 186, 191, 139, 140, 192, 255,
- 133, 145, 128, 134, 135, 137, 138, 255,
- 166, 167, 129, 155, 187, 149, 181, 143,
- 175, 137, 169, 131, 140, 191, 192, 255,
- 160, 163, 164, 165, 184, 185, 186, 128,
- 159, 161, 162, 166, 191, 133, 191, 192,
- 255, 132, 160, 163, 167, 179, 184, 186,
- 128, 164, 165, 168, 169, 187, 188, 191,
- 130, 135, 137, 139, 144, 147, 151, 153,
- 155, 157, 159, 163, 171, 179, 184, 189,
- 191, 128, 140, 141, 148, 149, 160, 161,
- 164, 165, 166, 167, 190, 138, 164, 170,
- 128, 155, 156, 160, 161, 187, 188, 191,
- 128, 191, 155, 156, 128, 191, 151, 191,
- 192, 255, 156, 157, 160, 128, 191, 181,
- 191, 192, 255, 158, 159, 186, 128, 185,
- 187, 191, 192, 255, 162, 191, 192, 255,
- 160, 168, 128, 159, 161, 167, 169, 191,
- 158, 191, 192, 255, 9, 10, 13, 32,
- 33, 34, 35, 38, 46, 47, 60, 61,
- 62, 64, 92, 95, 123, 124, 125, 126,
- 127, 194, 195, 198, 199, 203, 204, 205,
- 206, 207, 210, 212, 213, 214, 215, 216,
- 217, 219, 220, 221, 222, 223, 224, 225,
- 226, 227, 228, 233, 234, 237, 238, 239,
- 240, 0, 36, 37, 45, 48, 57, 58,
- 63, 65, 90, 91, 96, 97, 122, 192,
- 193, 196, 218, 229, 236, 241, 247, 9,
- 32, 10, 61, 10, 38, 46, 42, 47,
- 46, 69, 101, 48, 57, 60, 61, 61,
- 62, 61, 45, 95, 194, 195, 198, 199,
- 203, 204, 205, 206, 207, 210, 212, 213,
- 214, 215, 216, 217, 219, 220, 221, 222,
- 223, 224, 225, 226, 227, 228, 233, 234,
- 237, 239, 240, 243, 48, 57, 65, 90,
- 97, 122, 196, 218, 229, 236, 124, 125,
- 128, 191, 170, 181, 186, 128, 191, 151,
- 183, 128, 255, 192, 255, 0, 127, 173,
- 130, 133, 146, 159, 165, 171, 175, 191,
- 192, 255, 181, 190, 128, 175, 176, 183,
- 184, 185, 186, 191, 134, 139, 141, 162,
- 128, 135, 136, 255, 182, 130, 137, 176,
- 151, 152, 154, 160, 136, 191, 192, 255,
- 128, 143, 144, 170, 171, 175, 176, 178,
- 179, 191, 128, 159, 160, 191, 176, 128,
- 138, 139, 173, 174, 255, 148, 150, 164,
- 167, 173, 176, 185, 189, 190, 192, 255,
- 144, 128, 145, 146, 175, 176, 191, 128,
- 140, 141, 255, 166, 176, 178, 191, 192,
- 255, 186, 128, 137, 138, 170, 171, 179,
- 180, 181, 182, 191, 160, 161, 162, 164,
- 165, 166, 167, 168, 169, 170, 171, 172,
- 173, 174, 175, 176, 177, 178, 179, 180,
- 181, 182, 183, 184, 185, 186, 187, 188,
- 189, 190, 128, 191, 128, 129, 130, 131,
- 137, 138, 139, 140, 141, 142, 143, 144,
- 153, 154, 155, 156, 157, 158, 159, 160,
- 161, 162, 163, 164, 165, 166, 167, 168,
- 169, 170, 171, 172, 173, 174, 175, 176,
- 177, 178, 179, 180, 182, 183, 184, 188,
- 189, 190, 191, 132, 187, 129, 130, 132,
- 133, 134, 176, 177, 178, 179, 180, 181,
- 182, 183, 128, 191, 128, 129, 130, 131,
- 132, 133, 134, 135, 144, 136, 143, 145,
- 191, 192, 255, 182, 183, 184, 128, 191,
- 128, 191, 191, 128, 190, 192, 255, 128,
- 146, 147, 148, 152, 153, 154, 155, 156,
- 158, 159, 160, 161, 162, 163, 164, 165,
- 166, 167, 168, 169, 170, 171, 172, 173,
- 174, 175, 176, 129, 191, 192, 255, 158,
- 159, 128, 157, 160, 191, 192, 255, 128,
- 191, 164, 169, 171, 172, 173, 174, 175,
- 180, 181, 182, 183, 184, 185, 187, 188,
- 189, 190, 191, 128, 163, 165, 186, 144,
- 145, 146, 147, 148, 150, 151, 152, 155,
- 157, 158, 160, 170, 171, 172, 175, 128,
- 159, 161, 169, 173, 191, 128, 191, 10,
- 13, 34, 36, 37, 92, 128, 191, 192,
- 223, 224, 239, 240, 247, 248, 255, 10,
- 13, 34, 92, 36, 37, 128, 191, 192,
- 223, 224, 239, 240, 247, 248, 255, 10,
- 13, 36, 123, 123, 126, 126, 37, 123,
- 126, 10, 13, 128, 191, 192, 223, 224,
- 239, 240, 247, 248, 255, 128, 191, 128,
- 191, 128, 191, 10, 13, 36, 37, 128,
- 191, 192, 223, 224, 239, 240, 247, 248,
- 255, 10, 13, 36, 37, 128, 191, 192,
- 223, 224, 239, 240, 247, 248, 255, 10,
- 13, 10, 13, 123, 10, 13, 126, 10,
- 13, 126, 126, 128, 191, 128, 191, 128,
- 191, 10, 13, 36, 37, 128, 191, 192,
- 223, 224, 239, 240, 247, 248, 255, 10,
- 13, 36, 37, 128, 191, 192, 223, 224,
- 239, 240, 247, 248, 255, 10, 13, 10,
- 13, 123, 10, 13, 126, 10, 13, 126,
- 126, 128, 191, 128, 191, 128, 191, 95,
- 194, 195, 198, 199, 203, 204, 205, 206,
- 207, 210, 212, 213, 214, 215, 216, 217,
- 219, 220, 221, 222, 223, 224, 225, 226,
- 227, 228, 233, 234, 237, 238, 239, 240,
- 65, 90, 97, 122, 128, 191, 192, 193,
- 196, 218, 229, 236, 241, 247, 248, 255,
- 45, 95, 194, 195, 198, 199, 203, 204,
- 205, 206, 207, 210, 212, 213, 214, 215,
- 216, 217, 219, 220, 221, 222, 223, 224,
- 225, 226, 227, 228, 233, 234, 237, 239,
- 240, 243, 48, 57, 65, 90, 97, 122,
- 196, 218, 229, 236, 128, 191, 170, 181,
- 186, 128, 191, 151, 183, 128, 255, 192,
- 255, 0, 127, 173, 130, 133, 146, 159,
- 165, 171, 175, 191, 192, 255, 181, 190,
- 128, 175, 176, 183, 184, 185, 186, 191,
- 134, 139, 141, 162, 128, 135, 136, 255,
- 182, 130, 137, 176, 151, 152, 154, 160,
- 136, 191, 192, 255, 128, 143, 144, 170,
- 171, 175, 176, 178, 179, 191, 128, 159,
- 160, 191, 176, 128, 138, 139, 173, 174,
- 255, 148, 150, 164, 167, 173, 176, 185,
- 189, 190, 192, 255, 144, 128, 145, 146,
- 175, 176, 191, 128, 140, 141, 255, 166,
- 176, 178, 191, 192, 255, 186, 128, 137,
- 138, 170, 171, 179, 180, 181, 182, 191,
- 160, 161, 162, 164, 165, 166, 167, 168,
- 169, 170, 171, 172, 173, 174, 175, 176,
- 177, 178, 179, 180, 181, 182, 183, 184,
- 185, 186, 187, 188, 189, 190, 128, 191,
- 128, 129, 130, 131, 137, 138, 139, 140,
- 141, 142, 143, 144, 153, 154, 155, 156,
- 157, 158, 159, 160, 161, 162, 163, 164,
- 165, 166, 167, 168, 169, 170, 171, 172,
- 173, 174, 175, 176, 177, 178, 179, 180,
- 182, 183, 184, 188, 189, 190, 191, 132,
- 187, 129, 130, 132, 133, 134, 176, 177,
- 178, 179, 180, 181, 182, 183, 128, 191,
- 128, 129, 130, 131, 132, 133, 134, 135,
- 144, 136, 143, 145, 191, 192, 255, 182,
- 183, 184, 128, 191, 128, 191, 191, 128,
- 190, 192, 255, 128, 146, 147, 148, 152,
- 153, 154, 155, 156, 158, 159, 160, 161,
- 162, 163, 164, 165, 166, 167, 168, 169,
- 170, 171, 172, 173, 174, 175, 176, 129,
- 191, 192, 255, 158, 159, 128, 157, 160,
- 191, 192, 255, 128, 191, 164, 169, 171,
- 172, 173, 174, 175, 180, 181, 182, 183,
- 184, 185, 187, 188, 189, 190, 191, 128,
- 163, 165, 186, 144, 145, 146, 147, 148,
- 150, 151, 152, 155, 157, 158, 160, 170,
- 171, 172, 175, 128, 159, 161, 169, 173,
- 191, 128, 191,
-}
-
-var _hcltok_single_lengths []byte = []byte{
- 0, 1, 1, 2, 3, 2, 0, 32,
- 31, 36, 1, 4, 0, 0, 0, 0,
- 1, 2, 1, 1, 1, 1, 0, 1,
- 1, 0, 0, 2, 0, 0, 0, 1,
- 32, 0, 0, 0, 0, 1, 3, 1,
- 1, 1, 0, 2, 0, 1, 1, 2,
- 0, 3, 0, 1, 0, 2, 1, 2,
- 0, 0, 5, 1, 4, 0, 0, 1,
- 43, 0, 0, 0, 2, 3, 2, 1,
- 1, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 1, 1, 0, 0,
- 0, 0, 0, 0, 0, 0, 4, 1,
- 0, 15, 0, 0, 0, 1, 6, 1,
- 0, 0, 1, 0, 2, 0, 0, 0,
- 9, 0, 1, 1, 0, 0, 0, 3,
- 0, 1, 0, 28, 0, 0, 0, 1,
- 0, 1, 0, 0, 0, 1, 0, 0,
- 0, 0, 0, 0, 0, 1, 0, 2,
- 0, 0, 18, 0, 0, 1, 0, 0,
- 0, 0, 0, 0, 0, 0, 1, 0,
- 0, 0, 16, 36, 0, 0, 0, 0,
- 1, 0, 0, 0, 0, 0, 1, 0,
- 0, 0, 0, 0, 0, 2, 0, 0,
- 0, 0, 0, 1, 0, 0, 0, 0,
- 0, 0, 0, 28, 0, 0, 0, 1,
- 1, 1, 1, 0, 0, 2, 0, 1,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 1, 1, 4, 0, 0, 2, 2,
- 0, 11, 0, 0, 0, 0, 0, 0,
- 0, 1, 1, 3, 0, 0, 4, 0,
- 0, 0, 18, 0, 0, 0, 1, 4,
- 1, 4, 1, 0, 3, 2, 2, 2,
- 1, 0, 0, 1, 8, 0, 0, 0,
- 4, 12, 0, 2, 0, 3, 0, 1,
- 0, 2, 0, 1, 2, 0, 3, 1,
- 2, 0, 0, 0, 0, 0, 1, 1,
- 0, 0, 1, 28, 3, 0, 1, 1,
- 2, 1, 0, 1, 1, 2, 1, 1,
- 2, 1, 1, 0, 2, 1, 1, 1,
- 1, 0, 0, 6, 1, 1, 0, 0,
- 46, 1, 1, 0, 0, 0, 0, 2,
- 1, 0, 0, 0, 1, 0, 0, 0,
- 0, 0, 0, 0, 13, 2, 0, 0,
- 0, 9, 0, 1, 28, 0, 1, 3,
- 0, 2, 0, 0, 0, 1, 0, 1,
- 1, 2, 0, 18, 2, 0, 0, 16,
- 35, 0, 0, 0, 1, 0, 28, 0,
- 0, 0, 0, 1, 0, 2, 0, 0,
- 1, 0, 0, 1, 0, 0, 1, 0,
- 0, 0, 0, 1, 11, 0, 0, 0,
- 0, 4, 0, 12, 1, 7, 0, 4,
- 0, 0, 0, 0, 1, 2, 1, 1,
- 1, 1, 0, 1, 1, 0, 0, 2,
- 0, 0, 0, 1, 32, 0, 0, 0,
- 0, 1, 3, 1, 1, 1, 0, 2,
- 0, 1, 1, 2, 0, 3, 0, 1,
- 0, 2, 1, 2, 0, 0, 5, 1,
- 4, 0, 0, 1, 43, 0, 0, 0,
- 2, 3, 2, 1, 1, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 1, 1, 0, 0, 0, 0, 0, 0,
- 0, 0, 4, 1, 0, 15, 0, 0,
- 0, 1, 6, 1, 0, 0, 1, 0,
- 2, 0, 0, 0, 9, 0, 1, 1,
- 0, 0, 0, 3, 0, 1, 0, 28,
- 0, 0, 0, 1, 0, 1, 0, 0,
- 0, 1, 0, 0, 0, 0, 0, 0,
- 0, 1, 0, 2, 0, 0, 18, 0,
- 0, 1, 0, 0, 0, 0, 0, 0,
- 0, 0, 1, 0, 0, 0, 16, 36,
- 0, 0, 0, 0, 1, 0, 0, 0,
- 0, 0, 1, 0, 0, 0, 0, 0,
- 0, 2, 0, 0, 0, 0, 0, 1,
- 0, 0, 0, 0, 0, 0, 0, 28,
- 0, 0, 0, 1, 1, 1, 1, 0,
- 0, 2, 0, 1, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 1, 1, 4,
- 0, 0, 2, 2, 0, 11, 0, 0,
- 0, 0, 0, 0, 0, 1, 1, 3,
- 0, 0, 4, 0, 0, 0, 18, 0,
- 0, 0, 1, 4, 1, 4, 1, 0,
- 3, 2, 2, 2, 1, 0, 0, 1,
- 8, 0, 0, 0, 4, 12, 0, 2,
- 0, 3, 0, 1, 0, 2, 0, 1,
- 2, 0, 0, 3, 0, 1, 1, 1,
- 2, 2, 4, 1, 6, 2, 4, 2,
- 4, 1, 4, 0, 6, 1, 3, 1,
- 2, 0, 2, 11, 1, 1, 1, 0,
- 1, 1, 0, 2, 0, 3, 3, 2,
- 1, 0, 0, 0, 1, 0, 1, 0,
- 1, 1, 0, 2, 0, 0, 1, 0,
- 0, 0, 0, 0, 0, 0, 1, 0,
- 0, 0, 0, 0, 0, 0, 1, 0,
- 0, 0, 4, 3, 2, 2, 0, 6,
- 1, 0, 1, 1, 0, 2, 0, 4,
- 3, 0, 1, 1, 0, 0, 0, 0,
- 0, 0, 0, 1, 0, 0, 0, 1,
- 0, 3, 0, 2, 0, 0, 0, 3,
- 0, 2, 1, 1, 3, 1, 0, 0,
- 0, 0, 0, 5, 2, 0, 0, 0,
- 0, 0, 0, 1, 0, 0, 1, 1,
- 0, 0, 35, 4, 0, 0, 0, 0,
- 0, 0, 0, 1, 0, 0, 0, 0,
- 0, 0, 3, 0, 1, 0, 0, 3,
- 0, 0, 1, 0, 0, 0, 0, 28,
- 0, 0, 0, 0, 1, 0, 3, 1,
- 4, 0, 1, 0, 0, 1, 0, 0,
- 1, 0, 0, 0, 0, 1, 1, 0,
- 7, 0, 0, 2, 2, 0, 11, 0,
- 0, 0, 0, 0, 1, 1, 3, 0,
- 0, 4, 0, 0, 0, 12, 1, 4,
- 1, 5, 2, 0, 3, 2, 2, 2,
- 1, 7, 0, 7, 17, 3, 0, 2,
- 0, 3, 0, 0, 1, 0, 2, 0,
- 2, 0, 0, 0, 0, 0, 1, 0,
- 0, 0, 2, 2, 1, 0, 0, 0,
- 2, 2, 4, 0, 0, 0, 0, 1,
- 2, 1, 1, 1, 1, 0, 1, 1,
- 0, 0, 2, 0, 0, 0, 1, 32,
- 0, 0, 0, 0, 1, 3, 1, 1,
- 1, 0, 2, 0, 1, 1, 2, 0,
- 3, 0, 1, 0, 2, 1, 2, 0,
- 0, 5, 1, 4, 0, 0, 1, 43,
- 0, 0, 0, 2, 3, 2, 1, 1,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 1, 1, 0, 0, 0,
- 0, 0, 0, 0, 0, 4, 1, 0,
- 15, 0, 0, 0, 1, 6, 1, 0,
- 0, 1, 0, 2, 0, 0, 0, 9,
- 0, 1, 1, 0, 0, 0, 3, 0,
- 1, 0, 28, 0, 0, 0, 1, 0,
- 1, 0, 0, 0, 1, 0, 0, 0,
- 0, 0, 0, 0, 1, 0, 2, 0,
- 0, 18, 0, 0, 1, 0, 0, 0,
- 0, 0, 0, 0, 0, 1, 0, 0,
- 0, 16, 36, 0, 0, 0, 0, 1,
- 0, 0, 0, 0, 0, 1, 0, 0,
- 0, 0, 0, 0, 2, 0, 0, 0,
- 0, 0, 1, 0, 0, 0, 0, 0,
- 0, 0, 28, 0, 0, 0, 1, 1,
- 1, 1, 0, 0, 2, 0, 1, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 1, 1, 4, 0, 0, 2, 2, 0,
- 11, 0, 0, 0, 0, 0, 0, 0,
- 1, 1, 3, 0, 0, 4, 0, 0,
- 0, 18, 0, 0, 0, 1, 4, 1,
- 4, 1, 0, 3, 2, 2, 2, 1,
- 0, 0, 1, 8, 0, 0, 0, 4,
- 12, 0, 2, 0, 3, 0, 1, 0,
- 2, 0, 1, 2, 0, 0, 3, 0,
- 1, 1, 1, 2, 2, 4, 1, 6,
- 2, 4, 2, 4, 1, 4, 0, 6,
- 1, 3, 1, 2, 0, 2, 11, 1,
- 1, 1, 0, 1, 1, 0, 2, 0,
- 3, 3, 2, 1, 0, 0, 0, 1,
- 0, 1, 0, 1, 1, 0, 2, 0,
- 0, 1, 0, 0, 0, 0, 0, 0,
- 0, 1, 0, 0, 0, 0, 0, 0,
- 0, 1, 0, 0, 0, 4, 3, 2,
- 2, 0, 6, 1, 0, 1, 1, 0,
- 2, 0, 4, 3, 0, 1, 1, 0,
- 0, 0, 0, 0, 0, 0, 1, 0,
- 0, 0, 1, 0, 3, 0, 2, 0,
- 0, 0, 3, 0, 2, 1, 1, 3,
- 1, 0, 0, 0, 0, 0, 5, 2,
- 0, 0, 0, 0, 0, 0, 1, 0,
- 0, 1, 1, 0, 0, 35, 4, 0,
- 0, 0, 0, 0, 0, 0, 1, 0,
- 0, 0, 0, 0, 0, 3, 0, 1,
- 0, 0, 3, 0, 0, 1, 0, 0,
- 0, 0, 28, 0, 0, 0, 0, 1,
- 0, 3, 1, 4, 0, 1, 0, 0,
- 1, 0, 0, 1, 0, 0, 0, 0,
- 1, 1, 0, 7, 0, 0, 2, 2,
- 0, 11, 0, 0, 0, 0, 0, 1,
- 1, 3, 0, 0, 4, 0, 0, 0,
- 12, 1, 4, 1, 5, 2, 0, 3,
- 2, 2, 2, 1, 7, 0, 7, 17,
- 3, 0, 2, 0, 3, 0, 0, 1,
- 0, 2, 0, 53, 2, 1, 1, 1,
- 1, 1, 2, 3, 2, 2, 1, 34,
- 1, 1, 0, 3, 2, 0, 0, 0,
- 1, 2, 4, 1, 0, 1, 0, 0,
- 0, 0, 1, 1, 1, 0, 0, 1,
- 30, 47, 13, 9, 3, 0, 1, 28,
- 2, 0, 18, 16, 0, 6, 4, 2,
- 2, 0, 1, 1, 1, 2, 1, 2,
- 0, 0, 0, 4, 2, 2, 3, 3,
- 2, 1, 1, 0, 0, 0, 4, 2,
- 2, 3, 3, 2, 1, 1, 0, 0,
- 0, 33, 34, 0, 3, 2, 0, 0,
- 0, 1, 2, 4, 1, 0, 1, 0,
- 0, 0, 0, 1, 1, 1, 0, 0,
- 1, 30, 47, 13, 9, 3, 0, 1,
- 28, 2, 0, 18, 16, 0,
-}
-
-var _hcltok_range_lengths []byte = []byte{
- 0, 0, 0, 0, 1, 1, 1, 5,
- 5, 5, 0, 0, 3, 0, 1, 1,
- 4, 2, 3, 0, 1, 0, 2, 2,
- 4, 2, 2, 3, 1, 1, 1, 1,
- 0, 1, 1, 2, 2, 1, 4, 6,
- 9, 6, 8, 5, 8, 7, 10, 4,
- 6, 4, 7, 7, 5, 5, 4, 5,
- 1, 2, 8, 4, 3, 3, 3, 0,
- 3, 1, 2, 1, 2, 2, 3, 3,
- 1, 3, 2, 2, 1, 2, 2, 2,
- 3, 4, 4, 3, 1, 2, 1, 3,
- 2, 2, 2, 2, 2, 3, 3, 1,
- 1, 2, 1, 3, 2, 2, 3, 2,
- 7, 0, 1, 4, 1, 2, 4, 2,
- 1, 2, 0, 2, 2, 3, 5, 5,
- 1, 4, 1, 1, 2, 2, 1, 0,
- 0, 1, 1, 1, 1, 1, 2, 2,
- 2, 2, 1, 1, 1, 4, 2, 2,
- 3, 1, 4, 4, 6, 1, 3, 1,
- 1, 2, 1, 1, 1, 5, 3, 1,
- 1, 1, 2, 3, 3, 1, 2, 2,
- 1, 4, 1, 2, 5, 2, 1, 1,
- 0, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 1, 1, 2, 4, 2, 1,
- 2, 2, 2, 6, 1, 1, 2, 1,
- 2, 1, 1, 1, 2, 2, 2, 1,
- 3, 2, 5, 2, 8, 6, 2, 2,
- 2, 2, 3, 1, 3, 1, 2, 1,
- 3, 2, 2, 3, 1, 1, 1, 1,
- 1, 1, 1, 2, 2, 4, 1, 2,
- 1, 0, 1, 1, 1, 1, 0, 1,
- 2, 3, 1, 3, 3, 1, 0, 3,
- 0, 2, 3, 1, 0, 0, 0, 0,
- 2, 2, 2, 2, 1, 5, 2, 2,
- 5, 7, 5, 0, 1, 0, 1, 1,
- 1, 1, 1, 0, 1, 1, 0, 3,
- 3, 1, 1, 2, 1, 3, 5, 1,
- 1, 2, 2, 1, 1, 1, 1, 2,
- 6, 3, 7, 2, 6, 1, 6, 2,
- 8, 0, 4, 2, 5, 2, 3, 3,
- 3, 1, 2, 8, 2, 0, 2, 1,
- 2, 1, 5, 2, 1, 3, 3, 0,
- 2, 1, 2, 1, 0, 1, 1, 3,
- 1, 1, 2, 3, 0, 0, 3, 2,
- 4, 1, 4, 1, 1, 3, 1, 1,
- 1, 1, 2, 2, 1, 3, 1, 4,
- 3, 3, 1, 1, 5, 2, 1, 1,
- 2, 1, 2, 1, 3, 2, 0, 1,
- 1, 1, 1, 1, 1, 1, 2, 1,
- 1, 1, 1, 1, 1, 1, 0, 1,
- 1, 2, 2, 1, 1, 1, 3, 2,
- 1, 0, 2, 1, 1, 1, 1, 0,
- 3, 0, 1, 1, 4, 2, 3, 0,
- 1, 0, 2, 2, 4, 2, 2, 3,
- 1, 1, 1, 1, 0, 1, 1, 2,
- 2, 1, 4, 6, 9, 6, 8, 5,
- 8, 7, 10, 4, 6, 4, 7, 7,
- 5, 5, 4, 5, 1, 2, 8, 4,
- 3, 3, 3, 0, 3, 1, 2, 1,
- 2, 2, 3, 3, 1, 3, 2, 2,
- 1, 2, 2, 2, 3, 4, 4, 3,
- 1, 2, 1, 3, 2, 2, 2, 2,
- 2, 3, 3, 1, 1, 2, 1, 3,
- 2, 2, 3, 2, 7, 0, 1, 4,
- 1, 2, 4, 2, 1, 2, 0, 2,
- 2, 3, 5, 5, 1, 4, 1, 1,
- 2, 2, 1, 0, 0, 1, 1, 1,
- 1, 1, 2, 2, 2, 2, 1, 1,
- 1, 4, 2, 2, 3, 1, 4, 4,
- 6, 1, 3, 1, 1, 2, 1, 1,
- 1, 5, 3, 1, 1, 1, 2, 3,
- 3, 1, 2, 2, 1, 4, 1, 2,
- 5, 2, 1, 1, 0, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 1, 1,
- 2, 4, 2, 1, 2, 2, 2, 6,
- 1, 1, 2, 1, 2, 1, 1, 1,
- 2, 2, 2, 1, 3, 2, 5, 2,
- 8, 6, 2, 2, 2, 2, 3, 1,
- 3, 1, 2, 1, 3, 2, 2, 3,
- 1, 1, 1, 1, 1, 1, 1, 2,
- 2, 4, 1, 2, 1, 0, 1, 1,
- 1, 1, 0, 1, 2, 3, 1, 3,
- 3, 1, 0, 3, 0, 2, 3, 1,
- 0, 0, 0, 0, 2, 2, 2, 2,
- 1, 5, 2, 2, 5, 7, 5, 0,
- 1, 0, 1, 1, 1, 1, 1, 0,
- 1, 1, 1, 2, 2, 3, 3, 4,
- 7, 5, 7, 5, 3, 3, 7, 3,
- 13, 1, 3, 5, 3, 5, 3, 6,
- 5, 2, 2, 8, 4, 1, 2, 3,
- 2, 10, 2, 2, 0, 2, 3, 3,
- 1, 2, 3, 3, 1, 2, 3, 3,
- 4, 4, 2, 1, 2, 2, 3, 2,
- 2, 5, 3, 2, 3, 2, 1, 3,
- 3, 6, 2, 2, 5, 2, 5, 1,
- 1, 2, 4, 1, 11, 1, 3, 8,
- 4, 2, 1, 0, 4, 3, 3, 3,
- 2, 9, 1, 1, 4, 3, 2, 2,
- 2, 3, 4, 2, 3, 2, 4, 3,
- 2, 2, 3, 3, 4, 3, 3, 4,
- 2, 5, 4, 8, 7, 1, 2, 1,
- 3, 1, 2, 5, 1, 2, 2, 2,
- 2, 1, 3, 2, 2, 3, 3, 1,
- 9, 1, 5, 1, 3, 2, 2, 3,
- 2, 3, 3, 3, 1, 3, 3, 2,
- 2, 4, 5, 3, 3, 4, 3, 3,
- 3, 2, 2, 2, 4, 2, 2, 1,
- 3, 3, 3, 3, 3, 3, 2, 2,
- 3, 2, 3, 3, 2, 3, 2, 3,
- 1, 2, 2, 2, 2, 2, 2, 2,
- 2, 2, 2, 2, 3, 2, 3, 2,
- 3, 5, 3, 3, 1, 2, 3, 2,
- 2, 1, 2, 3, 4, 3, 0, 3,
- 0, 2, 3, 1, 0, 0, 0, 0,
- 2, 3, 2, 4, 6, 4, 1, 1,
- 2, 1, 2, 1, 3, 2, 3, 2,
- 5, 1, 1, 1, 1, 1, 0, 1,
- 1, 1, 0, 0, 0, 1, 1, 1,
- 0, 0, 0, 3, 0, 1, 1, 4,
- 2, 3, 0, 1, 0, 2, 2, 4,
- 2, 2, 3, 1, 1, 1, 1, 0,
- 1, 1, 2, 2, 1, 4, 6, 9,
- 6, 8, 5, 8, 7, 10, 4, 6,
- 4, 7, 7, 5, 5, 4, 5, 1,
- 2, 8, 4, 3, 3, 3, 0, 3,
- 1, 2, 1, 2, 2, 3, 3, 1,
- 3, 2, 2, 1, 2, 2, 2, 3,
- 4, 4, 3, 1, 2, 1, 3, 2,
- 2, 2, 2, 2, 3, 3, 1, 1,
- 2, 1, 3, 2, 2, 3, 2, 7,
- 0, 1, 4, 1, 2, 4, 2, 1,
- 2, 0, 2, 2, 3, 5, 5, 1,
- 4, 1, 1, 2, 2, 1, 0, 0,
- 1, 1, 1, 1, 1, 2, 2, 2,
- 2, 1, 1, 1, 4, 2, 2, 3,
- 1, 4, 4, 6, 1, 3, 1, 1,
- 2, 1, 1, 1, 5, 3, 1, 1,
- 1, 2, 3, 3, 1, 2, 2, 1,
- 4, 1, 2, 5, 2, 1, 1, 0,
- 2, 2, 2, 2, 2, 2, 2, 2,
- 2, 1, 1, 2, 4, 2, 1, 2,
- 2, 2, 6, 1, 1, 2, 1, 2,
- 1, 1, 1, 2, 2, 2, 1, 3,
- 2, 5, 2, 8, 6, 2, 2, 2,
- 2, 3, 1, 3, 1, 2, 1, 3,
- 2, 2, 3, 1, 1, 1, 1, 1,
- 1, 1, 2, 2, 4, 1, 2, 1,
- 0, 1, 1, 1, 1, 0, 1, 2,
- 3, 1, 3, 3, 1, 0, 3, 0,
- 2, 3, 1, 0, 0, 0, 0, 2,
- 2, 2, 2, 1, 5, 2, 2, 5,
- 7, 5, 0, 1, 0, 1, 1, 1,
- 1, 1, 0, 1, 1, 1, 2, 2,
- 3, 3, 4, 7, 5, 7, 5, 3,
- 3, 7, 3, 13, 1, 3, 5, 3,
- 5, 3, 6, 5, 2, 2, 8, 4,
- 1, 2, 3, 2, 10, 2, 2, 0,
- 2, 3, 3, 1, 2, 3, 3, 1,
- 2, 3, 3, 4, 4, 2, 1, 2,
- 2, 3, 2, 2, 5, 3, 2, 3,
- 2, 1, 3, 3, 6, 2, 2, 5,
- 2, 5, 1, 1, 2, 4, 1, 11,
- 1, 3, 8, 4, 2, 1, 0, 4,
- 3, 3, 3, 2, 9, 1, 1, 4,
- 3, 2, 2, 2, 3, 4, 2, 3,
- 2, 4, 3, 2, 2, 3, 3, 4,
- 3, 3, 4, 2, 5, 4, 8, 7,
- 1, 2, 1, 3, 1, 2, 5, 1,
- 2, 2, 2, 2, 1, 3, 2, 2,
- 3, 3, 1, 9, 1, 5, 1, 3,
- 2, 2, 3, 2, 3, 3, 3, 1,
- 3, 3, 2, 2, 4, 5, 3, 3,
- 4, 3, 3, 3, 2, 2, 2, 4,
- 2, 2, 1, 3, 3, 3, 3, 3,
- 3, 2, 2, 3, 2, 3, 3, 2,
- 3, 2, 3, 1, 2, 2, 2, 2,
- 2, 2, 2, 2, 2, 2, 2, 3,
- 2, 3, 2, 3, 5, 3, 3, 1,
- 2, 3, 2, 2, 1, 2, 3, 4,
- 3, 0, 3, 0, 2, 3, 1, 0,
- 0, 0, 0, 2, 3, 2, 4, 6,
- 4, 1, 1, 2, 1, 2, 1, 3,
- 2, 3, 2, 11, 0, 0, 0, 0,
- 0, 0, 0, 1, 0, 0, 0, 5,
- 0, 0, 1, 1, 1, 0, 1, 1,
- 5, 4, 2, 0, 1, 0, 2, 2,
- 5, 2, 3, 5, 3, 2, 3, 5,
- 1, 1, 1, 3, 1, 1, 2, 2,
- 3, 1, 2, 3, 1, 5, 6, 0,
- 0, 0, 0, 0, 0, 0, 0, 5,
- 1, 1, 1, 5, 6, 0, 0, 0,
- 0, 0, 0, 1, 1, 1, 5, 6,
- 0, 0, 0, 0, 0, 0, 1, 1,
- 1, 8, 5, 1, 1, 1, 0, 1,
- 1, 5, 4, 2, 0, 1, 0, 2,
- 2, 5, 2, 3, 5, 3, 2, 3,
- 5, 1, 1, 1, 3, 1, 1, 2,
- 2, 3, 1, 2, 3, 1,
-}
-
-var _hcltok_index_offsets []int16 = []int16{
- 0, 0, 2, 4, 7, 12, 16, 18,
- 56, 93, 135, 137, 142, 146, 147, 149,
- 151, 157, 162, 167, 169, 172, 174, 177,
- 181, 187, 190, 193, 199, 201, 203, 205,
- 208, 241, 243, 245, 248, 251, 254, 262,
- 270, 281, 289, 298, 306, 315, 324, 336,
- 343, 350, 358, 366, 375, 381, 389, 395,
- 403, 405, 408, 422, 428, 436, 440, 444,
- 446, 493, 495, 498, 500, 505, 511, 517,
- 522, 525, 529, 532, 535, 537, 540, 543,
- 546, 550, 555, 560, 564, 566, 569, 571,
- 575, 578, 581, 584, 587, 591, 596, 600,
- 602, 604, 607, 609, 613, 616, 619, 627,
- 631, 639, 655, 657, 662, 664, 668, 679,
- 683, 685, 688, 690, 693, 698, 702, 708,
- 714, 725, 730, 733, 736, 739, 742, 744,
- 748, 749, 752, 754, 784, 786, 788, 791,
- 795, 798, 802, 804, 806, 808, 814, 817,
- 820, 824, 826, 831, 836, 843, 846, 850,
- 854, 856, 859, 879, 881, 883, 890, 894,
- 896, 898, 900, 903, 907, 911, 913, 917,
- 920, 922, 927, 945, 984, 990, 993, 995,
- 997, 999, 1002, 1005, 1008, 1011, 1014, 1018,
- 1021, 1024, 1027, 1029, 1031, 1034, 1041, 1044,
- 1046, 1049, 1052, 1055, 1063, 1065, 1067, 1070,
- 1072, 1075, 1077, 1079, 1109, 1112, 1115, 1118,
- 1121, 1126, 1130, 1137, 1140, 1149, 1158, 1161,
- 1165, 1168, 1171, 1175, 1177, 1181, 1183, 1186,
- 1188, 1192, 1196, 1200, 1208, 1210, 1212, 1216,
- 1220, 1222, 1235, 1237, 1240, 1243, 1248, 1250,
- 1253, 1255, 1257, 1260, 1265, 1267, 1269, 1274,
- 1276, 1279, 1283, 1303, 1307, 1311, 1313, 1315,
- 1323, 1325, 1332, 1337, 1339, 1343, 1346, 1349,
- 1352, 1356, 1359, 1362, 1366, 1376, 1382, 1385,
- 1388, 1398, 1418, 1424, 1427, 1429, 1433, 1435,
- 1438, 1440, 1444, 1446, 1448, 1452, 1454, 1458,
- 1463, 1469, 1471, 1473, 1476, 1478, 1482, 1489,
- 1492, 1494, 1497, 1501, 1531, 1536, 1538, 1541,
- 1545, 1554, 1559, 1567, 1571, 1579, 1583, 1591,
- 1595, 1606, 1608, 1614, 1617, 1625, 1629, 1634,
- 1639, 1644, 1646, 1649, 1664, 1668, 1670, 1673,
- 1675, 1724, 1727, 1734, 1737, 1739, 1743, 1747,
- 1750, 1754, 1756, 1759, 1761, 1763, 1765, 1767,
- 1771, 1773, 1775, 1778, 1782, 1796, 1799, 1803,
- 1806, 1811, 1822, 1827, 1830, 1860, 1864, 1867,
- 1872, 1874, 1878, 1881, 1884, 1886, 1891, 1893,
- 1899, 1904, 1910, 1912, 1932, 1940, 1943, 1945,
- 1963, 2001, 2003, 2006, 2008, 2013, 2016, 2045,
- 2047, 2049, 2051, 2053, 2056, 2058, 2062, 2065,
- 2067, 2070, 2072, 2074, 2077, 2079, 2081, 2083,
- 2085, 2087, 2090, 2093, 2096, 2109, 2111, 2115,
- 2118, 2120, 2125, 2128, 2142, 2145, 2154, 2156,
- 2161, 2165, 2166, 2168, 2170, 2176, 2181, 2186,
- 2188, 2191, 2193, 2196, 2200, 2206, 2209, 2212,
- 2218, 2220, 2222, 2224, 2227, 2260, 2262, 2264,
- 2267, 2270, 2273, 2281, 2289, 2300, 2308, 2317,
- 2325, 2334, 2343, 2355, 2362, 2369, 2377, 2385,
- 2394, 2400, 2408, 2414, 2422, 2424, 2427, 2441,
- 2447, 2455, 2459, 2463, 2465, 2512, 2514, 2517,
- 2519, 2524, 2530, 2536, 2541, 2544, 2548, 2551,
- 2554, 2556, 2559, 2562, 2565, 2569, 2574, 2579,
- 2583, 2585, 2588, 2590, 2594, 2597, 2600, 2603,
- 2606, 2610, 2615, 2619, 2621, 2623, 2626, 2628,
- 2632, 2635, 2638, 2646, 2650, 2658, 2674, 2676,
- 2681, 2683, 2687, 2698, 2702, 2704, 2707, 2709,
- 2712, 2717, 2721, 2727, 2733, 2744, 2749, 2752,
- 2755, 2758, 2761, 2763, 2767, 2768, 2771, 2773,
- 2803, 2805, 2807, 2810, 2814, 2817, 2821, 2823,
- 2825, 2827, 2833, 2836, 2839, 2843, 2845, 2850,
- 2855, 2862, 2865, 2869, 2873, 2875, 2878, 2898,
- 2900, 2902, 2909, 2913, 2915, 2917, 2919, 2922,
- 2926, 2930, 2932, 2936, 2939, 2941, 2946, 2964,
- 3003, 3009, 3012, 3014, 3016, 3018, 3021, 3024,
- 3027, 3030, 3033, 3037, 3040, 3043, 3046, 3048,
- 3050, 3053, 3060, 3063, 3065, 3068, 3071, 3074,
- 3082, 3084, 3086, 3089, 3091, 3094, 3096, 3098,
- 3128, 3131, 3134, 3137, 3140, 3145, 3149, 3156,
- 3159, 3168, 3177, 3180, 3184, 3187, 3190, 3194,
- 3196, 3200, 3202, 3205, 3207, 3211, 3215, 3219,
- 3227, 3229, 3231, 3235, 3239, 3241, 3254, 3256,
- 3259, 3262, 3267, 3269, 3272, 3274, 3276, 3279,
- 3284, 3286, 3288, 3293, 3295, 3298, 3302, 3322,
- 3326, 3330, 3332, 3334, 3342, 3344, 3351, 3356,
- 3358, 3362, 3365, 3368, 3371, 3375, 3378, 3381,
- 3385, 3395, 3401, 3404, 3407, 3417, 3437, 3443,
- 3446, 3448, 3452, 3454, 3457, 3459, 3463, 3465,
- 3467, 3471, 3473, 3475, 3481, 3484, 3489, 3494,
- 3500, 3510, 3518, 3530, 3537, 3547, 3553, 3565,
- 3571, 3589, 3592, 3600, 3606, 3616, 3623, 3630,
- 3638, 3646, 3649, 3654, 3674, 3680, 3683, 3687,
- 3691, 3695, 3707, 3710, 3715, 3716, 3722, 3729,
- 3735, 3738, 3741, 3745, 3749, 3752, 3755, 3760,
- 3764, 3770, 3776, 3779, 3783, 3786, 3789, 3794,
- 3797, 3800, 3806, 3810, 3813, 3817, 3820, 3823,
- 3827, 3831, 3838, 3841, 3844, 3850, 3853, 3860,
- 3862, 3864, 3867, 3876, 3881, 3895, 3899, 3903,
- 3918, 3924, 3927, 3930, 3932, 3937, 3943, 3947,
- 3955, 3961, 3971, 3974, 3977, 3982, 3986, 3989,
- 3992, 3995, 3999, 4004, 4008, 4012, 4015, 4020,
- 4025, 4028, 4034, 4038, 4044, 4049, 4053, 4057,
- 4065, 4068, 4076, 4082, 4092, 4103, 4106, 4109,
- 4111, 4115, 4117, 4120, 4131, 4135, 4138, 4141,
- 4144, 4147, 4149, 4153, 4157, 4160, 4164, 4169,
- 4172, 4182, 4184, 4225, 4231, 4235, 4238, 4241,
- 4245, 4248, 4252, 4256, 4261, 4263, 4267, 4271,
- 4274, 4277, 4282, 4291, 4295, 4300, 4305, 4309,
- 4316, 4320, 4323, 4327, 4330, 4335, 4338, 4341,
- 4371, 4375, 4379, 4383, 4387, 4392, 4396, 4402,
- 4406, 4414, 4417, 4422, 4426, 4429, 4434, 4437,
- 4441, 4444, 4447, 4450, 4453, 4456, 4460, 4464,
- 4467, 4477, 4480, 4483, 4488, 4494, 4497, 4512,
- 4515, 4519, 4525, 4529, 4533, 4536, 4540, 4547,
- 4550, 4553, 4559, 4562, 4566, 4571, 4587, 4589,
- 4597, 4599, 4607, 4613, 4615, 4619, 4622, 4625,
- 4628, 4632, 4643, 4646, 4658, 4682, 4690, 4692,
- 4696, 4699, 4704, 4707, 4709, 4714, 4717, 4723,
- 4726, 4734, 4736, 4738, 4740, 4742, 4744, 4746,
- 4748, 4750, 4752, 4755, 4758, 4760, 4762, 4764,
- 4766, 4769, 4772, 4777, 4781, 4782, 4784, 4786,
- 4792, 4797, 4802, 4804, 4807, 4809, 4812, 4816,
- 4822, 4825, 4828, 4834, 4836, 4838, 4840, 4843,
- 4876, 4878, 4880, 4883, 4886, 4889, 4897, 4905,
- 4916, 4924, 4933, 4941, 4950, 4959, 4971, 4978,
- 4985, 4993, 5001, 5010, 5016, 5024, 5030, 5038,
- 5040, 5043, 5057, 5063, 5071, 5075, 5079, 5081,
- 5128, 5130, 5133, 5135, 5140, 5146, 5152, 5157,
- 5160, 5164, 5167, 5170, 5172, 5175, 5178, 5181,
- 5185, 5190, 5195, 5199, 5201, 5204, 5206, 5210,
- 5213, 5216, 5219, 5222, 5226, 5231, 5235, 5237,
- 5239, 5242, 5244, 5248, 5251, 5254, 5262, 5266,
- 5274, 5290, 5292, 5297, 5299, 5303, 5314, 5318,
- 5320, 5323, 5325, 5328, 5333, 5337, 5343, 5349,
- 5360, 5365, 5368, 5371, 5374, 5377, 5379, 5383,
- 5384, 5387, 5389, 5419, 5421, 5423, 5426, 5430,
- 5433, 5437, 5439, 5441, 5443, 5449, 5452, 5455,
- 5459, 5461, 5466, 5471, 5478, 5481, 5485, 5489,
- 5491, 5494, 5514, 5516, 5518, 5525, 5529, 5531,
- 5533, 5535, 5538, 5542, 5546, 5548, 5552, 5555,
- 5557, 5562, 5580, 5619, 5625, 5628, 5630, 5632,
- 5634, 5637, 5640, 5643, 5646, 5649, 5653, 5656,
- 5659, 5662, 5664, 5666, 5669, 5676, 5679, 5681,
- 5684, 5687, 5690, 5698, 5700, 5702, 5705, 5707,
- 5710, 5712, 5714, 5744, 5747, 5750, 5753, 5756,
- 5761, 5765, 5772, 5775, 5784, 5793, 5796, 5800,
- 5803, 5806, 5810, 5812, 5816, 5818, 5821, 5823,
- 5827, 5831, 5835, 5843, 5845, 5847, 5851, 5855,
- 5857, 5870, 5872, 5875, 5878, 5883, 5885, 5888,
- 5890, 5892, 5895, 5900, 5902, 5904, 5909, 5911,
- 5914, 5918, 5938, 5942, 5946, 5948, 5950, 5958,
- 5960, 5967, 5972, 5974, 5978, 5981, 5984, 5987,
- 5991, 5994, 5997, 6001, 6011, 6017, 6020, 6023,
- 6033, 6053, 6059, 6062, 6064, 6068, 6070, 6073,
- 6075, 6079, 6081, 6083, 6087, 6089, 6091, 6097,
- 6100, 6105, 6110, 6116, 6126, 6134, 6146, 6153,
- 6163, 6169, 6181, 6187, 6205, 6208, 6216, 6222,
- 6232, 6239, 6246, 6254, 6262, 6265, 6270, 6290,
- 6296, 6299, 6303, 6307, 6311, 6323, 6326, 6331,
- 6332, 6338, 6345, 6351, 6354, 6357, 6361, 6365,
- 6368, 6371, 6376, 6380, 6386, 6392, 6395, 6399,
- 6402, 6405, 6410, 6413, 6416, 6422, 6426, 6429,
- 6433, 6436, 6439, 6443, 6447, 6454, 6457, 6460,
- 6466, 6469, 6476, 6478, 6480, 6483, 6492, 6497,
- 6511, 6515, 6519, 6534, 6540, 6543, 6546, 6548,
- 6553, 6559, 6563, 6571, 6577, 6587, 6590, 6593,
- 6598, 6602, 6605, 6608, 6611, 6615, 6620, 6624,
- 6628, 6631, 6636, 6641, 6644, 6650, 6654, 6660,
- 6665, 6669, 6673, 6681, 6684, 6692, 6698, 6708,
- 6719, 6722, 6725, 6727, 6731, 6733, 6736, 6747,
- 6751, 6754, 6757, 6760, 6763, 6765, 6769, 6773,
- 6776, 6780, 6785, 6788, 6798, 6800, 6841, 6847,
- 6851, 6854, 6857, 6861, 6864, 6868, 6872, 6877,
- 6879, 6883, 6887, 6890, 6893, 6898, 6907, 6911,
- 6916, 6921, 6925, 6932, 6936, 6939, 6943, 6946,
- 6951, 6954, 6957, 6987, 6991, 6995, 6999, 7003,
- 7008, 7012, 7018, 7022, 7030, 7033, 7038, 7042,
- 7045, 7050, 7053, 7057, 7060, 7063, 7066, 7069,
- 7072, 7076, 7080, 7083, 7093, 7096, 7099, 7104,
- 7110, 7113, 7128, 7131, 7135, 7141, 7145, 7149,
- 7152, 7156, 7163, 7166, 7169, 7175, 7178, 7182,
- 7187, 7203, 7205, 7213, 7215, 7223, 7229, 7231,
- 7235, 7238, 7241, 7244, 7248, 7259, 7262, 7274,
- 7298, 7306, 7308, 7312, 7315, 7320, 7323, 7325,
- 7330, 7333, 7339, 7342, 7407, 7410, 7412, 7414,
- 7416, 7418, 7420, 7423, 7428, 7431, 7434, 7436,
- 7476, 7478, 7480, 7482, 7487, 7491, 7492, 7494,
- 7496, 7503, 7510, 7517, 7519, 7521, 7523, 7526,
- 7529, 7535, 7538, 7543, 7550, 7555, 7558, 7562,
- 7569, 7601, 7650, 7665, 7678, 7683, 7685, 7689,
- 7720, 7726, 7728, 7749, 7769, 7771, 7783, 7794,
- 7797, 7800, 7801, 7803, 7805, 7807, 7810, 7812,
- 7820, 7822, 7824, 7826, 7836, 7845, 7848, 7852,
- 7856, 7859, 7861, 7863, 7865, 7867, 7869, 7879,
- 7888, 7891, 7895, 7899, 7902, 7904, 7906, 7908,
- 7910, 7912, 7954, 7994, 7996, 8001, 8005, 8006,
- 8008, 8010, 8017, 8024, 8031, 8033, 8035, 8037,
- 8040, 8043, 8049, 8052, 8057, 8064, 8069, 8072,
- 8076, 8083, 8115, 8164, 8179, 8192, 8197, 8199,
- 8203, 8234, 8240, 8242, 8263, 8283,
-}
-
-var _hcltok_indicies []int16 = []int16{
- 1, 0, 3, 2, 3, 4, 2, 6,
- 8, 8, 7, 5, 9, 9, 7, 5,
- 7, 5, 10, 11, 12, 13, 15, 16,
- 17, 18, 19, 20, 21, 22, 23, 24,
- 25, 26, 27, 28, 29, 30, 31, 32,
- 33, 34, 35, 36, 37, 39, 40, 41,
- 42, 43, 11, 11, 14, 14, 38, 0,
- 11, 12, 13, 15, 16, 17, 18, 19,
- 20, 21, 22, 23, 24, 25, 26, 27,
- 28, 29, 30, 31, 32, 33, 34, 35,
- 36, 37, 39, 40, 41, 42, 43, 11,
- 11, 14, 14, 38, 0, 44, 45, 11,
- 11, 46, 13, 15, 16, 17, 16, 47,
- 48, 20, 49, 22, 23, 50, 51, 52,
- 53, 54, 55, 56, 57, 58, 59, 60,
- 61, 62, 37, 39, 63, 41, 64, 65,
- 66, 11, 11, 11, 14, 38, 0, 44,
- 0, 11, 11, 11, 11, 0, 11, 11,
- 11, 0, 11, 0, 11, 11, 0, 0,
- 0, 0, 0, 0, 11, 0, 0, 0,
- 0, 11, 11, 11, 11, 11, 0, 0,
- 11, 0, 0, 11, 0, 11, 0, 0,
- 11, 0, 0, 0, 11, 11, 11, 11,
- 11, 11, 0, 11, 11, 0, 11, 11,
- 0, 0, 0, 0, 0, 0, 11, 11,
- 0, 0, 11, 0, 11, 11, 11, 0,
- 67, 68, 69, 70, 14, 71, 72, 73,
- 74, 75, 76, 77, 78, 79, 80, 81,
- 82, 83, 84, 85, 86, 87, 88, 89,
- 90, 91, 92, 93, 94, 95, 96, 97,
- 0, 11, 0, 11, 0, 11, 11, 0,
- 11, 11, 0, 0, 0, 11, 0, 0,
- 0, 0, 0, 0, 0, 11, 0, 0,
- 0, 0, 0, 0, 0, 11, 11, 11,
- 11, 11, 11, 11, 11, 11, 11, 11,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 11, 11, 11, 11, 11, 11, 11, 11,
- 11, 0, 0, 0, 0, 0, 0, 0,
- 0, 11, 11, 11, 11, 11, 11, 11,
- 11, 11, 0, 11, 11, 11, 11, 11,
- 11, 11, 11, 0, 11, 11, 11, 11,
- 11, 11, 11, 11, 11, 11, 11, 0,
- 11, 11, 11, 11, 11, 11, 0, 11,
- 11, 11, 11, 11, 11, 0, 0, 0,
- 0, 0, 0, 0, 0, 11, 11, 11,
- 11, 11, 11, 11, 11, 0, 11, 11,
- 11, 11, 11, 11, 11, 11, 0, 11,
- 11, 11, 11, 11, 0, 0, 0, 0,
- 0, 0, 0, 0, 11, 11, 11, 11,
- 11, 11, 0, 11, 11, 11, 11, 11,
- 11, 11, 0, 11, 0, 11, 11, 0,
- 11, 11, 11, 11, 11, 11, 11, 11,
- 11, 11, 11, 11, 11, 0, 11, 11,
- 11, 11, 11, 0, 11, 11, 11, 11,
- 11, 11, 11, 0, 11, 11, 11, 0,
- 11, 11, 11, 0, 11, 0, 98, 99,
- 100, 101, 102, 103, 104, 105, 106, 107,
- 108, 109, 110, 111, 112, 113, 114, 16,
- 115, 116, 117, 118, 119, 120, 121, 122,
- 123, 124, 125, 126, 127, 128, 129, 130,
- 131, 132, 14, 15, 133, 134, 135, 136,
- 137, 14, 16, 14, 0, 11, 0, 11,
- 11, 0, 0, 11, 0, 0, 0, 0,
- 11, 0, 0, 0, 0, 0, 11, 0,
- 0, 0, 0, 0, 11, 11, 11, 11,
- 11, 0, 0, 0, 11, 0, 0, 0,
- 11, 11, 11, 0, 0, 0, 11, 11,
- 0, 0, 0, 11, 11, 11, 0, 0,
- 0, 11, 11, 11, 11, 0, 11, 11,
- 11, 11, 0, 0, 0, 0, 0, 11,
- 11, 11, 11, 0, 0, 11, 11, 11,
- 0, 0, 11, 11, 11, 11, 0, 11,
- 11, 0, 11, 11, 0, 0, 0, 11,
- 11, 11, 0, 0, 0, 0, 11, 11,
- 11, 11, 11, 0, 0, 0, 0, 11,
- 0, 11, 11, 0, 11, 11, 0, 11,
- 0, 11, 11, 11, 0, 11, 11, 0,
- 0, 0, 11, 0, 0, 0, 0, 0,
- 0, 0, 11, 11, 11, 11, 0, 11,
- 11, 11, 11, 11, 11, 11, 0, 138,
- 139, 140, 141, 142, 143, 144, 145, 146,
- 14, 147, 148, 149, 150, 151, 0, 11,
- 0, 0, 0, 0, 0, 11, 11, 0,
- 11, 11, 11, 0, 11, 11, 11, 11,
- 11, 11, 11, 11, 11, 11, 0, 11,
- 11, 11, 0, 0, 11, 11, 11, 0,
- 0, 11, 0, 0, 11, 11, 11, 11,
- 11, 0, 0, 0, 0, 11, 11, 11,
- 11, 11, 11, 0, 11, 11, 11, 11,
- 11, 0, 152, 109, 153, 154, 155, 14,
- 156, 157, 16, 14, 0, 11, 11, 11,
- 11, 0, 0, 0, 11, 0, 0, 11,
- 11, 11, 0, 0, 0, 11, 11, 0,
- 119, 0, 16, 14, 14, 158, 0, 14,
- 0, 11, 16, 159, 160, 16, 161, 162,
- 16, 57, 163, 164, 165, 166, 167, 16,
- 168, 169, 170, 16, 171, 172, 173, 15,
- 174, 175, 176, 15, 177, 16, 14, 0,
- 0, 11, 11, 0, 0, 0, 11, 11,
- 11, 11, 0, 11, 11, 0, 0, 0,
- 0, 11, 11, 0, 0, 11, 11, 0,
- 0, 0, 0, 0, 0, 11, 11, 11,
- 0, 0, 0, 11, 0, 0, 0, 11,
- 11, 0, 11, 11, 11, 11, 0, 11,
- 11, 11, 11, 0, 11, 11, 11, 11,
- 11, 11, 0, 0, 0, 11, 11, 11,
- 11, 0, 178, 179, 0, 14, 0, 11,
- 0, 0, 11, 16, 180, 181, 182, 183,
- 57, 184, 185, 55, 186, 187, 188, 189,
- 190, 191, 192, 193, 194, 14, 0, 0,
- 11, 0, 11, 11, 11, 11, 11, 11,
- 11, 0, 11, 11, 11, 0, 11, 0,
- 0, 11, 0, 11, 0, 0, 11, 11,
- 11, 11, 0, 11, 11, 11, 0, 0,
- 11, 11, 11, 11, 0, 11, 11, 0,
- 0, 11, 11, 11, 11, 11, 0, 195,
- 196, 197, 198, 199, 200, 201, 202, 203,
- 204, 205, 201, 206, 207, 208, 209, 38,
- 0, 210, 211, 16, 212, 213, 214, 215,
- 216, 217, 218, 219, 220, 16, 14, 221,
- 222, 223, 224, 16, 225, 226, 227, 228,
- 229, 230, 231, 232, 233, 234, 235, 236,
- 237, 238, 239, 16, 144, 14, 240, 0,
- 11, 11, 11, 11, 11, 0, 0, 0,
- 11, 0, 11, 11, 0, 11, 0, 11,
- 11, 0, 0, 0, 11, 11, 11, 0,
- 0, 0, 11, 11, 11, 0, 0, 0,
- 0, 11, 0, 0, 11, 0, 0, 11,
- 11, 11, 0, 0, 11, 0, 11, 11,
- 11, 0, 11, 11, 11, 11, 11, 11,
- 0, 0, 0, 11, 11, 0, 11, 11,
- 0, 11, 11, 0, 11, 11, 0, 11,
- 11, 11, 11, 11, 11, 11, 0, 11,
- 0, 11, 0, 11, 11, 0, 11, 0,
- 11, 11, 0, 11, 0, 11, 0, 241,
- 212, 242, 243, 244, 245, 246, 247, 248,
- 249, 250, 98, 251, 16, 252, 253, 254,
- 16, 255, 129, 256, 257, 258, 259, 260,
- 261, 262, 263, 16, 0, 0, 0, 11,
- 11, 11, 0, 11, 11, 0, 11, 11,
- 0, 0, 0, 0, 0, 11, 11, 11,
- 11, 0, 11, 11, 11, 11, 11, 11,
- 0, 0, 0, 11, 11, 11, 11, 11,
- 11, 11, 11, 11, 0, 11, 11, 11,
- 11, 11, 11, 11, 11, 0, 11, 11,
- 0, 0, 0, 0, 11, 11, 11, 0,
- 0, 0, 11, 0, 0, 0, 11, 11,
- 0, 11, 11, 11, 0, 11, 0, 0,
- 0, 11, 11, 0, 11, 11, 11, 0,
- 11, 11, 11, 0, 0, 0, 0, 11,
- 16, 181, 264, 265, 14, 16, 14, 0,
- 0, 11, 0, 11, 16, 264, 14, 0,
- 16, 266, 14, 0, 0, 11, 16, 267,
- 268, 269, 172, 270, 271, 16, 272, 273,
- 274, 14, 0, 0, 11, 11, 11, 0,
- 11, 11, 0, 11, 11, 11, 11, 0,
- 0, 11, 0, 0, 11, 11, 0, 11,
- 0, 16, 14, 0, 275, 16, 276, 0,
- 14, 0, 11, 0, 11, 277, 16, 278,
- 279, 0, 11, 0, 0, 0, 11, 11,
- 11, 11, 0, 280, 281, 282, 16, 283,
- 284, 285, 286, 287, 288, 289, 290, 291,
- 292, 293, 294, 295, 296, 14, 0, 11,
- 11, 11, 0, 0, 0, 0, 11, 11,
- 0, 0, 11, 0, 0, 0, 0, 0,
- 0, 0, 11, 0, 11, 0, 0, 0,
- 0, 0, 0, 11, 11, 11, 11, 11,
- 0, 0, 11, 0, 0, 0, 11, 0,
- 0, 11, 0, 0, 11, 0, 0, 11,
- 0, 0, 0, 11, 11, 11, 0, 0,
- 0, 11, 11, 11, 11, 0, 297, 16,
- 298, 16, 299, 300, 301, 302, 14, 0,
- 11, 11, 11, 11, 11, 0, 0, 0,
- 11, 0, 0, 11, 11, 11, 11, 11,
- 11, 11, 11, 11, 11, 0, 11, 11,
- 11, 11, 11, 11, 11, 11, 11, 11,
- 11, 11, 11, 11, 11, 11, 11, 11,
- 11, 0, 11, 11, 11, 11, 11, 0,
- 303, 16, 14, 0, 11, 304, 16, 100,
- 14, 0, 11, 305, 0, 14, 0, 11,
- 16, 306, 14, 0, 0, 11, 307, 0,
- 16, 308, 14, 0, 0, 11, 11, 11,
- 11, 0, 11, 11, 11, 11, 0, 11,
- 11, 11, 11, 11, 0, 0, 11, 0,
- 11, 11, 11, 0, 11, 0, 11, 11,
- 11, 0, 0, 0, 0, 0, 0, 0,
- 11, 11, 11, 0, 11, 0, 0, 0,
- 11, 11, 11, 11, 0, 309, 310, 69,
- 311, 312, 313, 314, 315, 316, 317, 318,
- 319, 320, 321, 322, 323, 324, 325, 326,
- 327, 328, 329, 331, 332, 333, 334, 335,
- 336, 330, 0, 11, 11, 11, 11, 0,
- 11, 0, 11, 11, 0, 11, 11, 11,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 11, 11, 11, 11, 11, 0, 11,
- 11, 11, 11, 11, 11, 11, 0, 11,
- 11, 11, 0, 11, 11, 11, 11, 11,
- 11, 11, 0, 11, 11, 11, 0, 11,
- 11, 11, 11, 11, 11, 11, 0, 11,
- 11, 11, 0, 11, 11, 11, 11, 11,
- 11, 11, 11, 11, 11, 0, 11, 0,
- 11, 11, 11, 11, 11, 0, 11, 11,
- 0, 11, 11, 11, 11, 11, 11, 11,
- 0, 11, 11, 11, 0, 11, 11, 11,
- 11, 0, 11, 11, 11, 11, 0, 11,
- 11, 11, 11, 0, 11, 0, 11, 11,
- 0, 11, 11, 11, 11, 11, 11, 11,
- 11, 11, 11, 11, 11, 11, 11, 0,
- 11, 11, 11, 0, 11, 0, 11, 11,
- 0, 11, 0, 337, 338, 339, 101, 102,
- 103, 104, 105, 340, 107, 108, 109, 110,
- 111, 112, 341, 342, 167, 343, 258, 117,
- 344, 119, 229, 269, 122, 345, 346, 347,
- 348, 349, 350, 351, 352, 353, 354, 131,
- 355, 16, 14, 15, 16, 134, 135, 136,
- 137, 14, 14, 0, 11, 11, 0, 11,
- 11, 11, 11, 11, 11, 0, 0, 0,
- 11, 0, 11, 11, 11, 11, 0, 11,
- 11, 11, 0, 11, 11, 0, 11, 11,
- 11, 0, 0, 11, 11, 11, 0, 0,
- 11, 11, 0, 11, 0, 11, 0, 11,
- 11, 11, 0, 0, 11, 11, 0, 11,
- 11, 0, 11, 11, 11, 0, 356, 140,
- 142, 143, 144, 145, 146, 14, 357, 148,
- 358, 150, 359, 0, 11, 11, 0, 0,
- 0, 0, 11, 0, 0, 11, 11, 11,
- 11, 11, 0, 360, 109, 361, 154, 155,
- 14, 156, 157, 16, 14, 0, 11, 11,
- 11, 11, 0, 0, 0, 11, 16, 159,
- 160, 16, 362, 363, 219, 308, 163, 164,
- 165, 364, 167, 365, 366, 367, 368, 369,
- 370, 371, 372, 373, 374, 175, 176, 15,
- 375, 16, 14, 0, 0, 0, 0, 11,
- 11, 11, 0, 0, 0, 0, 0, 11,
- 11, 0, 11, 11, 11, 0, 11, 11,
- 0, 0, 0, 11, 11, 0, 11, 11,
- 11, 11, 0, 11, 0, 11, 11, 11,
- 11, 11, 0, 0, 0, 0, 0, 11,
- 11, 11, 11, 11, 11, 0, 11, 0,
- 16, 180, 181, 376, 183, 57, 184, 185,
- 55, 186, 187, 377, 14, 190, 378, 192,
- 193, 194, 14, 0, 11, 11, 11, 11,
- 11, 11, 11, 0, 11, 11, 0, 11,
- 0, 379, 380, 197, 198, 199, 381, 201,
- 202, 382, 383, 384, 201, 206, 207, 208,
- 209, 38, 0, 210, 211, 16, 212, 213,
- 215, 385, 217, 386, 219, 220, 16, 14,
- 387, 222, 223, 224, 16, 225, 226, 227,
- 228, 229, 230, 231, 232, 388, 234, 235,
- 389, 237, 238, 239, 16, 144, 14, 240,
- 0, 0, 11, 0, 0, 11, 0, 11,
- 11, 11, 11, 11, 0, 11, 11, 0,
- 390, 391, 392, 393, 394, 395, 396, 397,
- 247, 398, 319, 399, 213, 400, 401, 402,
- 403, 404, 401, 405, 406, 407, 258, 408,
- 260, 409, 410, 271, 0, 11, 0, 11,
- 0, 11, 0, 11, 0, 11, 11, 0,
- 11, 0, 11, 11, 11, 0, 11, 11,
- 0, 0, 11, 11, 11, 0, 11, 0,
- 11, 0, 11, 11, 0, 11, 0, 11,
- 0, 11, 0, 11, 0, 11, 0, 0,
- 0, 11, 11, 11, 0, 11, 11, 0,
- 16, 267, 229, 411, 401, 412, 271, 16,
- 413, 414, 274, 14, 0, 11, 0, 11,
- 11, 11, 0, 0, 0, 11, 11, 0,
- 277, 16, 278, 415, 0, 11, 11, 0,
- 16, 283, 284, 285, 286, 287, 288, 289,
- 290, 291, 292, 416, 14, 0, 0, 0,
- 11, 16, 417, 16, 265, 300, 301, 302,
- 14, 0, 0, 11, 419, 419, 419, 419,
- 418, 419, 419, 419, 418, 419, 418, 419,
- 419, 418, 418, 418, 418, 418, 418, 419,
- 418, 418, 418, 418, 419, 419, 419, 419,
- 419, 418, 418, 419, 418, 418, 419, 418,
- 419, 418, 418, 419, 418, 418, 418, 419,
- 419, 419, 419, 419, 419, 418, 419, 419,
- 418, 419, 419, 418, 418, 418, 418, 418,
- 418, 419, 419, 418, 418, 419, 418, 419,
- 419, 419, 418, 421, 422, 423, 424, 425,
- 426, 427, 428, 429, 430, 431, 432, 433,
- 434, 435, 436, 437, 438, 439, 440, 441,
- 442, 443, 444, 445, 446, 447, 448, 449,
- 450, 451, 452, 418, 419, 418, 419, 418,
- 419, 419, 418, 419, 419, 418, 418, 418,
- 419, 418, 418, 418, 418, 418, 418, 418,
- 419, 418, 418, 418, 418, 418, 418, 418,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 418, 418, 418, 418, 418,
- 418, 418, 418, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 418, 418, 418, 418,
- 418, 418, 418, 418, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 418, 419, 419,
- 419, 419, 419, 419, 419, 419, 418, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 418, 419, 419, 419, 419, 419,
- 419, 418, 419, 419, 419, 419, 419, 419,
- 418, 418, 418, 418, 418, 418, 418, 418,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 418, 419, 419, 419, 419, 419, 419, 419,
- 419, 418, 419, 419, 419, 419, 419, 418,
- 418, 418, 418, 418, 418, 418, 418, 419,
- 419, 419, 419, 419, 419, 418, 419, 419,
- 419, 419, 419, 419, 419, 418, 419, 418,
- 419, 419, 418, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 418, 419, 419, 419, 419, 419, 418, 419,
- 419, 419, 419, 419, 419, 419, 418, 419,
- 419, 419, 418, 419, 419, 419, 418, 419,
- 418, 453, 454, 455, 456, 457, 458, 459,
- 460, 461, 462, 463, 464, 465, 466, 467,
- 468, 469, 470, 471, 472, 473, 474, 475,
- 476, 477, 478, 479, 480, 481, 482, 483,
- 484, 485, 486, 487, 488, 425, 489, 490,
- 491, 492, 493, 494, 425, 470, 425, 418,
- 419, 418, 419, 419, 418, 418, 419, 418,
- 418, 418, 418, 419, 418, 418, 418, 418,
- 418, 419, 418, 418, 418, 418, 418, 419,
- 419, 419, 419, 419, 418, 418, 418, 419,
- 418, 418, 418, 419, 419, 419, 418, 418,
- 418, 419, 419, 418, 418, 418, 419, 419,
- 419, 418, 418, 418, 419, 419, 419, 419,
- 418, 419, 419, 419, 419, 418, 418, 418,
- 418, 418, 419, 419, 419, 419, 418, 418,
- 419, 419, 419, 418, 418, 419, 419, 419,
- 419, 418, 419, 419, 418, 419, 419, 418,
- 418, 418, 419, 419, 419, 418, 418, 418,
- 418, 419, 419, 419, 419, 419, 418, 418,
- 418, 418, 419, 418, 419, 419, 418, 419,
- 419, 418, 419, 418, 419, 419, 419, 418,
- 419, 419, 418, 418, 418, 419, 418, 418,
- 418, 418, 418, 418, 418, 419, 419, 419,
- 419, 418, 419, 419, 419, 419, 419, 419,
- 419, 418, 495, 496, 497, 498, 499, 500,
- 501, 502, 503, 425, 504, 505, 506, 507,
- 508, 418, 419, 418, 418, 418, 418, 418,
- 419, 419, 418, 419, 419, 419, 418, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 418, 419, 419, 419, 418, 418, 419,
- 419, 419, 418, 418, 419, 418, 418, 419,
- 419, 419, 419, 419, 418, 418, 418, 418,
- 419, 419, 419, 419, 419, 419, 418, 419,
- 419, 419, 419, 419, 418, 509, 464, 510,
- 511, 512, 425, 513, 514, 470, 425, 418,
- 419, 419, 419, 419, 418, 418, 418, 419,
- 418, 418, 419, 419, 419, 418, 418, 418,
- 419, 419, 418, 475, 418, 470, 425, 425,
- 515, 418, 425, 418, 419, 470, 516, 517,
- 470, 518, 519, 470, 520, 521, 522, 523,
- 524, 525, 470, 526, 527, 528, 470, 529,
- 530, 531, 489, 532, 533, 534, 489, 535,
- 470, 425, 418, 418, 419, 419, 418, 418,
- 418, 419, 419, 419, 419, 418, 419, 419,
- 418, 418, 418, 418, 419, 419, 418, 418,
- 419, 419, 418, 418, 418, 418, 418, 418,
- 419, 419, 419, 418, 418, 418, 419, 418,
- 418, 418, 419, 419, 418, 419, 419, 419,
- 419, 418, 419, 419, 419, 419, 418, 419,
- 419, 419, 419, 419, 419, 418, 418, 418,
- 419, 419, 419, 419, 418, 536, 537, 418,
- 425, 418, 419, 418, 418, 419, 470, 538,
- 539, 540, 541, 520, 542, 543, 544, 545,
- 546, 547, 548, 549, 550, 551, 552, 553,
- 425, 418, 418, 419, 418, 419, 419, 419,
- 419, 419, 419, 419, 418, 419, 419, 419,
- 418, 419, 418, 418, 419, 418, 419, 418,
- 418, 419, 419, 419, 419, 418, 419, 419,
- 419, 418, 418, 419, 419, 419, 419, 418,
- 419, 419, 418, 418, 419, 419, 419, 419,
- 419, 418, 554, 555, 556, 557, 558, 559,
- 560, 561, 562, 563, 564, 560, 566, 567,
- 568, 569, 565, 418, 570, 571, 470, 572,
- 573, 574, 575, 576, 577, 578, 579, 580,
- 470, 425, 581, 582, 583, 584, 470, 585,
- 586, 587, 588, 589, 590, 591, 592, 593,
- 594, 595, 596, 597, 598, 599, 470, 501,
- 425, 600, 418, 419, 419, 419, 419, 419,
- 418, 418, 418, 419, 418, 419, 419, 418,
- 419, 418, 419, 419, 418, 418, 418, 419,
- 419, 419, 418, 418, 418, 419, 419, 419,
- 418, 418, 418, 418, 419, 418, 418, 419,
- 418, 418, 419, 419, 419, 418, 418, 419,
- 418, 419, 419, 419, 418, 419, 419, 419,
- 419, 419, 419, 418, 418, 418, 419, 419,
- 418, 419, 419, 418, 419, 419, 418, 419,
- 419, 418, 419, 419, 419, 419, 419, 419,
- 419, 418, 419, 418, 419, 418, 419, 419,
- 418, 419, 418, 419, 419, 418, 419, 418,
- 419, 418, 601, 572, 602, 603, 604, 605,
- 606, 607, 608, 609, 610, 453, 611, 470,
- 612, 613, 614, 470, 615, 485, 616, 617,
- 618, 619, 620, 621, 622, 623, 470, 418,
- 418, 418, 419, 419, 419, 418, 419, 419,
- 418, 419, 419, 418, 418, 418, 418, 418,
- 419, 419, 419, 419, 418, 419, 419, 419,
- 419, 419, 419, 418, 418, 418, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 418,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 418, 419, 419, 418, 418, 418, 418, 419,
- 419, 419, 418, 418, 418, 419, 418, 418,
- 418, 419, 419, 418, 419, 419, 419, 418,
- 419, 418, 418, 418, 419, 419, 418, 419,
- 419, 419, 418, 419, 419, 419, 418, 418,
- 418, 418, 419, 470, 539, 624, 625, 425,
- 470, 425, 418, 418, 419, 418, 419, 470,
- 624, 425, 418, 470, 626, 425, 418, 418,
- 419, 470, 627, 628, 629, 530, 630, 631,
- 470, 632, 633, 634, 425, 418, 418, 419,
- 419, 419, 418, 419, 419, 418, 419, 419,
- 419, 419, 418, 418, 419, 418, 418, 419,
- 419, 418, 419, 418, 470, 425, 418, 635,
- 470, 636, 418, 425, 418, 419, 418, 419,
- 637, 470, 638, 639, 418, 419, 418, 418,
- 418, 419, 419, 419, 419, 418, 640, 641,
- 642, 470, 643, 644, 645, 646, 647, 648,
- 649, 650, 651, 652, 653, 654, 655, 656,
- 425, 418, 419, 419, 419, 418, 418, 418,
- 418, 419, 419, 418, 418, 419, 418, 418,
- 418, 418, 418, 418, 418, 419, 418, 419,
- 418, 418, 418, 418, 418, 418, 419, 419,
- 419, 419, 419, 418, 418, 419, 418, 418,
- 418, 419, 418, 418, 419, 418, 418, 419,
- 418, 418, 419, 418, 418, 418, 419, 419,
- 419, 418, 418, 418, 419, 419, 419, 419,
- 418, 657, 470, 658, 470, 659, 660, 661,
- 662, 425, 418, 419, 419, 419, 419, 419,
- 418, 418, 418, 419, 418, 418, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 418, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 418, 419, 419, 419,
- 419, 419, 418, 663, 470, 425, 418, 419,
- 664, 470, 455, 425, 418, 419, 665, 418,
- 425, 418, 419, 470, 666, 425, 418, 418,
- 419, 667, 418, 470, 668, 425, 418, 418,
- 419, 670, 669, 419, 419, 419, 419, 670,
- 669, 419, 670, 669, 670, 670, 419, 670,
- 669, 419, 670, 419, 670, 669, 419, 670,
- 419, 670, 419, 669, 670, 670, 670, 670,
- 670, 670, 670, 670, 669, 419, 419, 670,
- 670, 419, 670, 419, 670, 669, 670, 670,
- 670, 670, 670, 419, 670, 419, 670, 419,
- 670, 669, 670, 670, 419, 670, 419, 670,
- 669, 670, 670, 670, 670, 670, 419, 670,
- 419, 670, 669, 419, 419, 670, 419, 670,
- 669, 670, 670, 670, 419, 670, 419, 670,
- 419, 670, 419, 670, 669, 670, 419, 670,
- 419, 670, 669, 419, 670, 670, 670, 670,
- 419, 670, 419, 670, 419, 670, 419, 670,
- 419, 670, 419, 670, 669, 419, 670, 669,
- 670, 670, 670, 419, 670, 419, 670, 669,
- 670, 419, 670, 419, 670, 669, 419, 670,
- 670, 670, 670, 419, 670, 419, 670, 669,
- 419, 670, 419, 670, 419, 670, 669, 670,
- 670, 419, 670, 419, 670, 669, 419, 670,
- 419, 670, 419, 670, 419, 669, 670, 670,
- 670, 419, 670, 419, 670, 669, 419, 670,
- 669, 670, 670, 419, 670, 669, 670, 670,
- 670, 419, 670, 670, 670, 670, 670, 670,
- 419, 419, 670, 419, 670, 419, 670, 419,
- 670, 669, 670, 419, 670, 419, 670, 669,
- 419, 670, 669, 670, 419, 670, 669, 670,
- 419, 670, 669, 419, 419, 670, 669, 419,
- 670, 419, 670, 419, 670, 419, 670, 419,
- 670, 419, 669, 670, 670, 419, 670, 670,
- 670, 670, 419, 419, 670, 670, 670, 670,
- 670, 419, 670, 670, 670, 670, 670, 669,
- 419, 670, 670, 419, 670, 419, 669, 670,
- 670, 419, 670, 669, 419, 419, 670, 419,
- 669, 670, 670, 669, 419, 670, 419, 669,
- 670, 669, 419, 670, 419, 670, 419, 669,
- 670, 670, 669, 419, 670, 419, 670, 419,
- 670, 669, 670, 419, 670, 419, 670, 669,
- 419, 670, 669, 419, 419, 670, 669, 670,
- 419, 669, 670, 669, 419, 670, 419, 670,
- 419, 669, 670, 669, 419, 419, 670, 669,
- 670, 419, 670, 419, 670, 669, 419, 670,
- 419, 669, 670, 669, 419, 419, 670, 419,
- 669, 670, 669, 419, 419, 670, 669, 670,
- 419, 670, 669, 670, 419, 670, 669, 670,
- 419, 670, 419, 670, 419, 669, 670, 669,
- 419, 419, 670, 669, 670, 419, 670, 419,
- 670, 669, 419, 670, 669, 670, 670, 419,
- 670, 419, 670, 669, 669, 419, 669, 419,
- 670, 670, 419, 670, 670, 670, 670, 670,
- 670, 670, 669, 419, 670, 670, 670, 419,
- 669, 670, 670, 670, 419, 670, 419, 670,
- 419, 670, 419, 670, 419, 670, 669, 419,
- 419, 670, 669, 670, 419, 670, 669, 419,
- 419, 670, 419, 419, 419, 670, 419, 670,
- 419, 670, 419, 670, 419, 669, 419, 670,
- 419, 670, 419, 669, 670, 669, 419, 670,
- 419, 669, 670, 419, 670, 670, 670, 669,
- 419, 670, 419, 419, 670, 419, 669, 670,
- 670, 669, 419, 670, 670, 670, 670, 419,
- 670, 419, 669, 670, 670, 670, 419, 670,
- 669, 670, 419, 670, 419, 670, 419, 670,
- 419, 670, 669, 670, 670, 419, 670, 669,
- 419, 670, 419, 670, 419, 669, 670, 670,
- 669, 419, 670, 419, 669, 670, 669, 419,
- 670, 669, 419, 670, 419, 670, 669, 670,
- 670, 670, 669, 419, 419, 419, 670, 669,
- 419, 670, 419, 669, 670, 669, 419, 670,
- 419, 670, 419, 669, 670, 670, 670, 669,
- 419, 670, 419, 669, 670, 670, 670, 670,
- 669, 419, 670, 419, 670, 669, 419, 419,
- 670, 419, 670, 669, 670, 419, 670, 419,
- 669, 670, 670, 669, 419, 670, 419, 670,
- 669, 419, 670, 670, 670, 419, 670, 419,
- 669, 419, 670, 669, 670, 419, 419, 670,
- 419, 670, 419, 669, 670, 670, 670, 670,
- 669, 419, 670, 419, 670, 419, 670, 419,
- 670, 419, 670, 669, 670, 670, 670, 419,
- 670, 419, 670, 419, 670, 419, 669, 670,
- 670, 419, 419, 670, 669, 670, 419, 670,
- 670, 669, 419, 670, 419, 670, 669, 419,
- 419, 670, 670, 670, 670, 419, 670, 419,
- 670, 419, 669, 670, 670, 419, 669, 670,
- 669, 419, 670, 419, 669, 670, 669, 419,
- 670, 419, 669, 670, 419, 670, 670, 669,
- 419, 670, 670, 419, 669, 670, 669, 419,
- 670, 419, 670, 669, 670, 419, 670, 419,
- 669, 670, 669, 419, 670, 419, 670, 419,
- 670, 419, 670, 419, 670, 669, 671, 669,
- 672, 673, 674, 675, 676, 677, 678, 679,
- 680, 681, 682, 674, 683, 684, 685, 686,
- 687, 674, 688, 689, 690, 691, 692, 693,
- 694, 695, 696, 697, 698, 699, 700, 701,
- 702, 674, 703, 671, 683, 671, 704, 671,
- 669, 670, 670, 670, 670, 419, 669, 670,
- 670, 669, 419, 670, 669, 419, 419, 670,
- 669, 419, 670, 419, 669, 670, 669, 419,
- 419, 670, 419, 669, 670, 670, 669, 419,
- 670, 670, 670, 669, 419, 670, 419, 670,
- 670, 669, 419, 419, 670, 419, 669, 670,
- 669, 419, 670, 669, 419, 419, 670, 419,
- 670, 669, 419, 670, 419, 419, 670, 419,
- 670, 419, 669, 670, 670, 669, 419, 670,
- 670, 419, 670, 669, 419, 670, 419, 670,
- 669, 419, 670, 419, 669, 419, 670, 670,
- 670, 419, 670, 669, 670, 419, 670, 669,
- 419, 670, 669, 670, 419, 670, 669, 419,
- 670, 669, 419, 670, 419, 670, 669, 419,
- 670, 669, 419, 670, 669, 705, 706, 707,
- 708, 709, 710, 711, 712, 713, 714, 715,
- 716, 676, 717, 718, 719, 720, 721, 718,
- 722, 723, 724, 725, 726, 727, 728, 729,
- 730, 671, 669, 670, 419, 670, 669, 670,
- 419, 670, 669, 670, 419, 670, 669, 670,
- 419, 670, 669, 419, 670, 419, 670, 669,
- 670, 419, 670, 669, 670, 419, 419, 419,
- 670, 669, 670, 419, 670, 669, 670, 670,
- 670, 670, 419, 670, 419, 669, 670, 669,
- 419, 419, 670, 419, 670, 669, 670, 419,
- 670, 669, 419, 670, 669, 670, 670, 419,
- 670, 669, 419, 670, 669, 670, 419, 670,
- 669, 419, 670, 669, 419, 670, 669, 419,
- 670, 669, 670, 669, 419, 419, 670, 669,
- 670, 419, 670, 669, 419, 670, 419, 669,
- 670, 669, 419, 674, 731, 671, 674, 732,
- 674, 733, 683, 671, 669, 670, 669, 419,
- 670, 669, 419, 674, 732, 683, 671, 669,
- 674, 734, 671, 683, 671, 669, 670, 669,
- 419, 674, 735, 692, 736, 718, 737, 730,
- 674, 738, 739, 740, 671, 683, 671, 669,
- 670, 669, 419, 670, 419, 670, 669, 419,
- 670, 419, 670, 419, 669, 670, 670, 669,
- 419, 670, 419, 670, 669, 419, 670, 669,
- 674, 683, 425, 669, 741, 674, 742, 683,
- 671, 669, 425, 670, 669, 419, 670, 669,
- 419, 743, 674, 744, 745, 671, 669, 419,
- 670, 669, 670, 670, 669, 419, 419, 670,
- 419, 670, 669, 674, 746, 747, 748, 749,
- 750, 751, 752, 753, 754, 755, 756, 671,
- 683, 671, 669, 670, 419, 670, 670, 670,
- 670, 670, 670, 670, 419, 670, 419, 670,
- 670, 670, 670, 670, 670, 669, 419, 670,
- 670, 419, 670, 419, 669, 670, 419, 670,
- 670, 670, 419, 670, 670, 419, 670, 670,
- 419, 670, 670, 419, 670, 670, 669, 419,
- 674, 757, 674, 733, 758, 759, 760, 671,
- 683, 671, 669, 670, 669, 419, 670, 670,
- 670, 419, 670, 670, 670, 419, 670, 419,
- 670, 669, 419, 419, 419, 419, 670, 670,
- 419, 419, 419, 419, 419, 670, 670, 670,
- 670, 670, 670, 670, 419, 670, 419, 670,
- 419, 669, 670, 670, 670, 419, 670, 419,
- 670, 669, 683, 425, 761, 674, 683, 425,
- 670, 669, 419, 762, 674, 763, 683, 425,
- 670, 669, 419, 670, 419, 764, 683, 671,
- 669, 425, 670, 669, 419, 674, 765, 671,
- 683, 671, 669, 670, 669, 419, 766, 766,
- 766, 768, 769, 770, 766, 767, 767, 771,
- 768, 771, 769, 771, 767, 772, 773, 772,
- 775, 774, 776, 774, 777, 774, 779, 778,
- 781, 782, 780, 781, 783, 780, 785, 784,
- 786, 784, 787, 784, 789, 788, 791, 792,
- 790, 791, 793, 790, 795, 795, 795, 795,
- 794, 795, 795, 795, 794, 795, 794, 795,
- 795, 794, 794, 794, 794, 794, 794, 795,
- 794, 794, 794, 794, 795, 795, 795, 795,
- 795, 794, 794, 795, 794, 794, 795, 794,
- 795, 794, 794, 795, 794, 794, 794, 795,
- 795, 795, 795, 795, 795, 794, 795, 795,
- 794, 795, 795, 794, 794, 794, 794, 794,
- 794, 795, 795, 794, 794, 795, 794, 795,
- 795, 795, 794, 797, 798, 799, 800, 801,
- 802, 803, 804, 805, 806, 807, 808, 809,
- 810, 811, 812, 813, 814, 815, 816, 817,
- 818, 819, 820, 821, 822, 823, 824, 825,
- 826, 827, 828, 794, 795, 794, 795, 794,
- 795, 795, 794, 795, 795, 794, 794, 794,
- 795, 794, 794, 794, 794, 794, 794, 794,
- 795, 794, 794, 794, 794, 794, 794, 794,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 794, 794, 794, 794, 794,
- 794, 794, 794, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 794, 794, 794, 794,
- 794, 794, 794, 794, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 794, 795, 795,
- 795, 795, 795, 795, 795, 795, 794, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 794, 795, 795, 795, 795, 795,
- 795, 794, 795, 795, 795, 795, 795, 795,
- 794, 794, 794, 794, 794, 794, 794, 794,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 794, 795, 795, 795, 795, 795, 795, 795,
- 795, 794, 795, 795, 795, 795, 795, 794,
- 794, 794, 794, 794, 794, 794, 794, 795,
- 795, 795, 795, 795, 795, 794, 795, 795,
- 795, 795, 795, 795, 795, 794, 795, 794,
- 795, 795, 794, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 794, 795, 795, 795, 795, 795, 794, 795,
- 795, 795, 795, 795, 795, 795, 794, 795,
- 795, 795, 794, 795, 795, 795, 794, 795,
- 794, 829, 830, 831, 832, 833, 834, 835,
- 836, 837, 838, 839, 840, 841, 842, 843,
- 844, 845, 846, 847, 848, 849, 850, 851,
- 852, 853, 854, 855, 856, 857, 858, 859,
- 860, 861, 862, 863, 864, 801, 865, 866,
- 867, 868, 869, 870, 801, 846, 801, 794,
- 795, 794, 795, 795, 794, 794, 795, 794,
- 794, 794, 794, 795, 794, 794, 794, 794,
- 794, 795, 794, 794, 794, 794, 794, 795,
- 795, 795, 795, 795, 794, 794, 794, 795,
- 794, 794, 794, 795, 795, 795, 794, 794,
- 794, 795, 795, 794, 794, 794, 795, 795,
- 795, 794, 794, 794, 795, 795, 795, 795,
- 794, 795, 795, 795, 795, 794, 794, 794,
- 794, 794, 795, 795, 795, 795, 794, 794,
- 795, 795, 795, 794, 794, 795, 795, 795,
- 795, 794, 795, 795, 794, 795, 795, 794,
- 794, 794, 795, 795, 795, 794, 794, 794,
- 794, 795, 795, 795, 795, 795, 794, 794,
- 794, 794, 795, 794, 795, 795, 794, 795,
- 795, 794, 795, 794, 795, 795, 795, 794,
- 795, 795, 794, 794, 794, 795, 794, 794,
- 794, 794, 794, 794, 794, 795, 795, 795,
- 795, 794, 795, 795, 795, 795, 795, 795,
- 795, 794, 871, 872, 873, 874, 875, 876,
- 877, 878, 879, 801, 880, 881, 882, 883,
- 884, 794, 795, 794, 794, 794, 794, 794,
- 795, 795, 794, 795, 795, 795, 794, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 794, 795, 795, 795, 794, 794, 795,
- 795, 795, 794, 794, 795, 794, 794, 795,
- 795, 795, 795, 795, 794, 794, 794, 794,
- 795, 795, 795, 795, 795, 795, 794, 795,
- 795, 795, 795, 795, 794, 885, 840, 886,
- 887, 888, 801, 889, 890, 846, 801, 794,
- 795, 795, 795, 795, 794, 794, 794, 795,
- 794, 794, 795, 795, 795, 794, 794, 794,
- 795, 795, 794, 851, 794, 846, 801, 801,
- 891, 794, 801, 794, 795, 846, 892, 893,
- 846, 894, 895, 846, 896, 897, 898, 899,
- 900, 901, 846, 902, 903, 904, 846, 905,
- 906, 907, 865, 908, 909, 910, 865, 911,
- 846, 801, 794, 794, 795, 795, 794, 794,
- 794, 795, 795, 795, 795, 794, 795, 795,
- 794, 794, 794, 794, 795, 795, 794, 794,
- 795, 795, 794, 794, 794, 794, 794, 794,
- 795, 795, 795, 794, 794, 794, 795, 794,
- 794, 794, 795, 795, 794, 795, 795, 795,
- 795, 794, 795, 795, 795, 795, 794, 795,
- 795, 795, 795, 795, 795, 794, 794, 794,
- 795, 795, 795, 795, 794, 912, 913, 794,
- 801, 794, 795, 794, 794, 795, 846, 914,
- 915, 916, 917, 896, 918, 919, 920, 921,
- 922, 923, 924, 925, 926, 927, 928, 929,
- 801, 794, 794, 795, 794, 795, 795, 795,
- 795, 795, 795, 795, 794, 795, 795, 795,
- 794, 795, 794, 794, 795, 794, 795, 794,
- 794, 795, 795, 795, 795, 794, 795, 795,
- 795, 794, 794, 795, 795, 795, 795, 794,
- 795, 795, 794, 794, 795, 795, 795, 795,
- 795, 794, 930, 931, 932, 933, 934, 935,
- 936, 937, 938, 939, 940, 936, 942, 943,
- 944, 945, 941, 794, 946, 947, 846, 948,
- 949, 950, 951, 952, 953, 954, 955, 956,
- 846, 801, 957, 958, 959, 960, 846, 961,
- 962, 963, 964, 965, 966, 967, 968, 969,
- 970, 971, 972, 973, 974, 975, 846, 877,
- 801, 976, 794, 795, 795, 795, 795, 795,
- 794, 794, 794, 795, 794, 795, 795, 794,
- 795, 794, 795, 795, 794, 794, 794, 795,
- 795, 795, 794, 794, 794, 795, 795, 795,
- 794, 794, 794, 794, 795, 794, 794, 795,
- 794, 794, 795, 795, 795, 794, 794, 795,
- 794, 795, 795, 795, 794, 795, 795, 795,
- 795, 795, 795, 794, 794, 794, 795, 795,
- 794, 795, 795, 794, 795, 795, 794, 795,
- 795, 794, 795, 795, 795, 795, 795, 795,
- 795, 794, 795, 794, 795, 794, 795, 795,
- 794, 795, 794, 795, 795, 794, 795, 794,
- 795, 794, 977, 948, 978, 979, 980, 981,
- 982, 983, 984, 985, 986, 829, 987, 846,
- 988, 989, 990, 846, 991, 861, 992, 993,
- 994, 995, 996, 997, 998, 999, 846, 794,
- 794, 794, 795, 795, 795, 794, 795, 795,
- 794, 795, 795, 794, 794, 794, 794, 794,
- 795, 795, 795, 795, 794, 795, 795, 795,
- 795, 795, 795, 794, 794, 794, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 794,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 794, 795, 795, 794, 794, 794, 794, 795,
- 795, 795, 794, 794, 794, 795, 794, 794,
- 794, 795, 795, 794, 795, 795, 795, 794,
- 795, 794, 794, 794, 795, 795, 794, 795,
- 795, 795, 794, 795, 795, 795, 794, 794,
- 794, 794, 795, 846, 915, 1000, 1001, 801,
- 846, 801, 794, 794, 795, 794, 795, 846,
- 1000, 801, 794, 846, 1002, 801, 794, 794,
- 795, 846, 1003, 1004, 1005, 906, 1006, 1007,
- 846, 1008, 1009, 1010, 801, 794, 794, 795,
- 795, 795, 794, 795, 795, 794, 795, 795,
- 795, 795, 794, 794, 795, 794, 794, 795,
- 795, 794, 795, 794, 846, 801, 794, 1011,
- 846, 1012, 794, 801, 794, 795, 794, 795,
- 1013, 846, 1014, 1015, 794, 795, 794, 794,
- 794, 795, 795, 795, 795, 794, 1016, 1017,
- 1018, 846, 1019, 1020, 1021, 1022, 1023, 1024,
- 1025, 1026, 1027, 1028, 1029, 1030, 1031, 1032,
- 801, 794, 795, 795, 795, 794, 794, 794,
- 794, 795, 795, 794, 794, 795, 794, 794,
- 794, 794, 794, 794, 794, 795, 794, 795,
- 794, 794, 794, 794, 794, 794, 795, 795,
- 795, 795, 795, 794, 794, 795, 794, 794,
- 794, 795, 794, 794, 795, 794, 794, 795,
- 794, 794, 795, 794, 794, 794, 795, 795,
- 795, 794, 794, 794, 795, 795, 795, 795,
- 794, 1033, 846, 1034, 846, 1035, 1036, 1037,
- 1038, 801, 794, 795, 795, 795, 795, 795,
- 794, 794, 794, 795, 794, 794, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 794, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 794, 795, 795, 795,
- 795, 795, 794, 1039, 846, 801, 794, 795,
- 1040, 846, 831, 801, 794, 795, 1041, 794,
- 801, 794, 795, 846, 1042, 801, 794, 794,
- 795, 1043, 794, 846, 1044, 801, 794, 794,
- 795, 1046, 1045, 795, 795, 795, 795, 1046,
- 1045, 795, 1046, 1045, 1046, 1046, 795, 1046,
- 1045, 795, 1046, 795, 1046, 1045, 795, 1046,
- 795, 1046, 795, 1045, 1046, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 1045, 795, 795, 1046,
- 1046, 795, 1046, 795, 1046, 1045, 1046, 1046,
- 1046, 1046, 1046, 795, 1046, 795, 1046, 795,
- 1046, 1045, 1046, 1046, 795, 1046, 795, 1046,
- 1045, 1046, 1046, 1046, 1046, 1046, 795, 1046,
- 795, 1046, 1045, 795, 795, 1046, 795, 1046,
- 1045, 1046, 1046, 1046, 795, 1046, 795, 1046,
- 795, 1046, 795, 1046, 1045, 1046, 795, 1046,
- 795, 1046, 1045, 795, 1046, 1046, 1046, 1046,
- 795, 1046, 795, 1046, 795, 1046, 795, 1046,
- 795, 1046, 795, 1046, 1045, 795, 1046, 1045,
- 1046, 1046, 1046, 795, 1046, 795, 1046, 1045,
- 1046, 795, 1046, 795, 1046, 1045, 795, 1046,
- 1046, 1046, 1046, 795, 1046, 795, 1046, 1045,
- 795, 1046, 795, 1046, 795, 1046, 1045, 1046,
- 1046, 795, 1046, 795, 1046, 1045, 795, 1046,
- 795, 1046, 795, 1046, 795, 1045, 1046, 1046,
- 1046, 795, 1046, 795, 1046, 1045, 795, 1046,
- 1045, 1046, 1046, 795, 1046, 1045, 1046, 1046,
- 1046, 795, 1046, 1046, 1046, 1046, 1046, 1046,
- 795, 795, 1046, 795, 1046, 795, 1046, 795,
- 1046, 1045, 1046, 795, 1046, 795, 1046, 1045,
- 795, 1046, 1045, 1046, 795, 1046, 1045, 1046,
- 795, 1046, 1045, 795, 795, 1046, 1045, 795,
- 1046, 795, 1046, 795, 1046, 795, 1046, 795,
- 1046, 795, 1045, 1046, 1046, 795, 1046, 1046,
- 1046, 1046, 795, 795, 1046, 1046, 1046, 1046,
- 1046, 795, 1046, 1046, 1046, 1046, 1046, 1045,
- 795, 1046, 1046, 795, 1046, 795, 1045, 1046,
- 1046, 795, 1046, 1045, 795, 795, 1046, 795,
- 1045, 1046, 1046, 1045, 795, 1046, 795, 1045,
- 1046, 1045, 795, 1046, 795, 1046, 795, 1045,
- 1046, 1046, 1045, 795, 1046, 795, 1046, 795,
- 1046, 1045, 1046, 795, 1046, 795, 1046, 1045,
- 795, 1046, 1045, 795, 795, 1046, 1045, 1046,
- 795, 1045, 1046, 1045, 795, 1046, 795, 1046,
- 795, 1045, 1046, 1045, 795, 795, 1046, 1045,
- 1046, 795, 1046, 795, 1046, 1045, 795, 1046,
- 795, 1045, 1046, 1045, 795, 795, 1046, 795,
- 1045, 1046, 1045, 795, 795, 1046, 1045, 1046,
- 795, 1046, 1045, 1046, 795, 1046, 1045, 1046,
- 795, 1046, 795, 1046, 795, 1045, 1046, 1045,
- 795, 795, 1046, 1045, 1046, 795, 1046, 795,
- 1046, 1045, 795, 1046, 1045, 1046, 1046, 795,
- 1046, 795, 1046, 1045, 1045, 795, 1045, 795,
- 1046, 1046, 795, 1046, 1046, 1046, 1046, 1046,
- 1046, 1046, 1045, 795, 1046, 1046, 1046, 795,
- 1045, 1046, 1046, 1046, 795, 1046, 795, 1046,
- 795, 1046, 795, 1046, 795, 1046, 1045, 795,
- 795, 1046, 1045, 1046, 795, 1046, 1045, 795,
- 795, 1046, 795, 795, 795, 1046, 795, 1046,
- 795, 1046, 795, 1046, 795, 1045, 795, 1046,
- 795, 1046, 795, 1045, 1046, 1045, 795, 1046,
- 795, 1045, 1046, 795, 1046, 1046, 1046, 1045,
- 795, 1046, 795, 795, 1046, 795, 1045, 1046,
- 1046, 1045, 795, 1046, 1046, 1046, 1046, 795,
- 1046, 795, 1045, 1046, 1046, 1046, 795, 1046,
- 1045, 1046, 795, 1046, 795, 1046, 795, 1046,
- 795, 1046, 1045, 1046, 1046, 795, 1046, 1045,
- 795, 1046, 795, 1046, 795, 1045, 1046, 1046,
- 1045, 795, 1046, 795, 1045, 1046, 1045, 795,
- 1046, 1045, 795, 1046, 795, 1046, 1045, 1046,
- 1046, 1046, 1045, 795, 795, 795, 1046, 1045,
- 795, 1046, 795, 1045, 1046, 1045, 795, 1046,
- 795, 1046, 795, 1045, 1046, 1046, 1046, 1045,
- 795, 1046, 795, 1045, 1046, 1046, 1046, 1046,
- 1045, 795, 1046, 795, 1046, 1045, 795, 795,
- 1046, 795, 1046, 1045, 1046, 795, 1046, 795,
- 1045, 1046, 1046, 1045, 795, 1046, 795, 1046,
- 1045, 795, 1046, 1046, 1046, 795, 1046, 795,
- 1045, 795, 1046, 1045, 1046, 795, 795, 1046,
- 795, 1046, 795, 1045, 1046, 1046, 1046, 1046,
- 1045, 795, 1046, 795, 1046, 795, 1046, 795,
- 1046, 795, 1046, 1045, 1046, 1046, 1046, 795,
- 1046, 795, 1046, 795, 1046, 795, 1045, 1046,
- 1046, 795, 795, 1046, 1045, 1046, 795, 1046,
- 1046, 1045, 795, 1046, 795, 1046, 1045, 795,
- 795, 1046, 1046, 1046, 1046, 795, 1046, 795,
- 1046, 795, 1045, 1046, 1046, 795, 1045, 1046,
- 1045, 795, 1046, 795, 1045, 1046, 1045, 795,
- 1046, 795, 1045, 1046, 795, 1046, 1046, 1045,
- 795, 1046, 1046, 795, 1045, 1046, 1045, 795,
- 1046, 795, 1046, 1045, 1046, 795, 1046, 795,
- 1045, 1046, 1045, 795, 1046, 795, 1046, 795,
- 1046, 795, 1046, 795, 1046, 1045, 1047, 1045,
- 1048, 1049, 1050, 1051, 1052, 1053, 1054, 1055,
- 1056, 1057, 1058, 1050, 1059, 1060, 1061, 1062,
- 1063, 1050, 1064, 1065, 1066, 1067, 1068, 1069,
- 1070, 1071, 1072, 1073, 1074, 1075, 1076, 1077,
- 1078, 1050, 1079, 1047, 1059, 1047, 1080, 1047,
- 1045, 1046, 1046, 1046, 1046, 795, 1045, 1046,
- 1046, 1045, 795, 1046, 1045, 795, 795, 1046,
- 1045, 795, 1046, 795, 1045, 1046, 1045, 795,
- 795, 1046, 795, 1045, 1046, 1046, 1045, 795,
- 1046, 1046, 1046, 1045, 795, 1046, 795, 1046,
- 1046, 1045, 795, 795, 1046, 795, 1045, 1046,
- 1045, 795, 1046, 1045, 795, 795, 1046, 795,
- 1046, 1045, 795, 1046, 795, 795, 1046, 795,
- 1046, 795, 1045, 1046, 1046, 1045, 795, 1046,
- 1046, 795, 1046, 1045, 795, 1046, 795, 1046,
- 1045, 795, 1046, 795, 1045, 795, 1046, 1046,
- 1046, 795, 1046, 1045, 1046, 795, 1046, 1045,
- 795, 1046, 1045, 1046, 795, 1046, 1045, 795,
- 1046, 1045, 795, 1046, 795, 1046, 1045, 795,
- 1046, 1045, 795, 1046, 1045, 1081, 1082, 1083,
- 1084, 1085, 1086, 1087, 1088, 1089, 1090, 1091,
- 1092, 1052, 1093, 1094, 1095, 1096, 1097, 1094,
- 1098, 1099, 1100, 1101, 1102, 1103, 1104, 1105,
- 1106, 1047, 1045, 1046, 795, 1046, 1045, 1046,
- 795, 1046, 1045, 1046, 795, 1046, 1045, 1046,
- 795, 1046, 1045, 795, 1046, 795, 1046, 1045,
- 1046, 795, 1046, 1045, 1046, 795, 795, 795,
- 1046, 1045, 1046, 795, 1046, 1045, 1046, 1046,
- 1046, 1046, 795, 1046, 795, 1045, 1046, 1045,
- 795, 795, 1046, 795, 1046, 1045, 1046, 795,
- 1046, 1045, 795, 1046, 1045, 1046, 1046, 795,
- 1046, 1045, 795, 1046, 1045, 1046, 795, 1046,
- 1045, 795, 1046, 1045, 795, 1046, 1045, 795,
- 1046, 1045, 1046, 1045, 795, 795, 1046, 1045,
- 1046, 795, 1046, 1045, 795, 1046, 795, 1045,
- 1046, 1045, 795, 1050, 1107, 1047, 1050, 1108,
- 1050, 1109, 1059, 1047, 1045, 1046, 1045, 795,
- 1046, 1045, 795, 1050, 1108, 1059, 1047, 1045,
- 1050, 1110, 1047, 1059, 1047, 1045, 1046, 1045,
- 795, 1050, 1111, 1068, 1112, 1094, 1113, 1106,
- 1050, 1114, 1115, 1116, 1047, 1059, 1047, 1045,
- 1046, 1045, 795, 1046, 795, 1046, 1045, 795,
- 1046, 795, 1046, 795, 1045, 1046, 1046, 1045,
- 795, 1046, 795, 1046, 1045, 795, 1046, 1045,
- 1050, 1059, 801, 1045, 1117, 1050, 1118, 1059,
- 1047, 1045, 801, 1046, 1045, 795, 1046, 1045,
- 795, 1119, 1050, 1120, 1121, 1047, 1045, 795,
- 1046, 1045, 1046, 1046, 1045, 795, 795, 1046,
- 795, 1046, 1045, 1050, 1122, 1123, 1124, 1125,
- 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1047,
- 1059, 1047, 1045, 1046, 795, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 795, 1046, 795, 1046,
- 1046, 1046, 1046, 1046, 1046, 1045, 795, 1046,
- 1046, 795, 1046, 795, 1045, 1046, 795, 1046,
- 1046, 1046, 795, 1046, 1046, 795, 1046, 1046,
- 795, 1046, 1046, 795, 1046, 1046, 1045, 795,
- 1050, 1133, 1050, 1109, 1134, 1135, 1136, 1047,
- 1059, 1047, 1045, 1046, 1045, 795, 1046, 1046,
- 1046, 795, 1046, 1046, 1046, 795, 1046, 795,
- 1046, 1045, 795, 795, 795, 795, 1046, 1046,
- 795, 795, 795, 795, 795, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 795, 1046, 795, 1046,
- 795, 1045, 1046, 1046, 1046, 795, 1046, 795,
- 1046, 1045, 1059, 801, 1137, 1050, 1059, 801,
- 1046, 1045, 795, 1138, 1050, 1139, 1059, 801,
- 1046, 1045, 795, 1046, 795, 1140, 1059, 1047,
- 1045, 801, 1046, 1045, 795, 1050, 1141, 1047,
- 1059, 1047, 1045, 1046, 1045, 795, 1142, 1143,
- 1144, 1142, 1145, 1146, 1147, 1149, 1150, 1151,
- 1152, 1153, 1154, 670, 670, 419, 1155, 1156,
- 1157, 1158, 670, 1161, 1162, 1164, 1165, 1166,
- 1160, 1167, 1168, 1169, 1170, 1171, 1172, 1173,
- 1174, 1175, 1176, 1177, 1178, 1179, 1180, 1181,
- 1182, 1183, 1184, 1185, 1186, 1188, 1189, 1190,
- 1191, 1192, 1193, 670, 1148, 7, 1148, 419,
- 1148, 419, 1160, 1163, 1187, 1194, 1159, 1142,
- 1142, 1195, 1143, 1196, 1198, 1197, 4, 1147,
- 1200, 1197, 1201, 1197, 2, 1147, 1197, 6,
- 8, 8, 7, 1202, 1203, 1204, 1197, 1205,
- 1206, 1197, 1207, 1197, 419, 419, 1209, 1210,
- 489, 470, 1211, 470, 1212, 1213, 1214, 1215,
- 1216, 1217, 1218, 1219, 1220, 1221, 1222, 544,
- 1223, 520, 1224, 1225, 1226, 1227, 1228, 1229,
- 1230, 1231, 1232, 1233, 1234, 1235, 419, 419,
- 419, 425, 565, 1208, 1236, 1197, 1237, 1197,
- 670, 1238, 419, 419, 419, 670, 1238, 670,
- 670, 419, 1238, 419, 1238, 419, 1238, 419,
- 670, 670, 670, 670, 670, 1238, 419, 670,
- 670, 670, 419, 670, 419, 1238, 419, 670,
- 670, 670, 670, 419, 1238, 670, 419, 670,
- 419, 670, 419, 670, 670, 419, 670, 1238,
- 419, 670, 419, 670, 419, 670, 1238, 670,
- 419, 1238, 670, 419, 670, 419, 1238, 670,
- 670, 670, 670, 670, 1238, 419, 419, 670,
- 419, 670, 1238, 670, 419, 1238, 670, 670,
- 1238, 419, 419, 670, 419, 670, 419, 670,
- 1238, 1239, 1240, 1241, 1242, 1243, 1244, 1245,
- 1246, 1247, 1248, 1249, 715, 1250, 1251, 1252,
- 1253, 1254, 1255, 1256, 1257, 1258, 1259, 1260,
- 1261, 1260, 1262, 1263, 1264, 1265, 1266, 671,
- 1238, 1267, 1268, 1269, 1270, 1271, 1272, 1273,
- 1274, 1275, 1276, 1277, 1278, 1279, 1280, 1281,
- 1282, 1283, 1284, 1285, 725, 1286, 1287, 1288,
- 692, 1289, 1290, 1291, 1292, 1293, 1294, 671,
- 1295, 1296, 1297, 1298, 1299, 1300, 1301, 1302,
- 674, 1303, 671, 674, 1304, 1305, 1306, 1307,
- 683, 1238, 1308, 1309, 1310, 1311, 703, 1312,
- 1313, 683, 1314, 1315, 1316, 1317, 1318, 671,
- 1238, 1319, 1278, 1320, 1321, 1322, 683, 1323,
- 1324, 674, 671, 683, 425, 1238, 1288, 671,
- 674, 683, 425, 683, 425, 1325, 683, 1238,
- 425, 674, 1326, 1327, 674, 1328, 1329, 681,
- 1330, 1331, 1332, 1333, 1334, 1284, 1335, 1336,
- 1337, 1338, 1339, 1340, 1341, 1342, 1343, 1344,
- 1345, 1346, 1303, 1347, 674, 683, 425, 1238,
- 1348, 1349, 683, 671, 1238, 425, 671, 1238,
- 674, 1350, 731, 1351, 1352, 1353, 1354, 1355,
- 1356, 1357, 1358, 671, 1359, 1360, 1361, 1362,
- 1363, 1364, 671, 683, 1238, 1366, 1367, 1368,
- 1369, 1370, 1371, 1372, 1373, 1374, 1375, 1376,
- 1372, 1378, 1379, 1380, 1381, 1365, 1377, 1365,
- 1238, 1365, 1238, 1382, 1382, 1383, 1384, 1385,
- 1386, 1387, 1388, 1389, 1390, 1387, 767, 1391,
- 1391, 1391, 1392, 1391, 1391, 768, 769, 770,
- 1391, 767, 1382, 1382, 1393, 1396, 1397, 1395,
- 1398, 1399, 1398, 1400, 1391, 1402, 1401, 1396,
- 1403, 1395, 1405, 1404, 1394, 1394, 1394, 768,
- 769, 770, 1394, 767, 767, 1406, 773, 1406,
- 1407, 1406, 775, 1408, 1409, 1410, 1411, 1412,
- 1413, 1414, 1411, 776, 775, 1408, 1415, 1415,
- 777, 779, 1416, 1415, 776, 1418, 1419, 1417,
- 1418, 1419, 1420, 1417, 775, 1408, 1421, 1415,
- 775, 1408, 1415, 1423, 1422, 1425, 1424, 776,
- 1426, 777, 1426, 779, 1426, 785, 1427, 1428,
- 1429, 1430, 1431, 1432, 1433, 1430, 786, 785,
- 1427, 1434, 1434, 787, 789, 1435, 1434, 786,
- 1437, 1438, 1436, 1437, 1438, 1439, 1436, 785,
- 1427, 1440, 1434, 785, 1427, 1434, 1442, 1441,
- 1444, 1443, 786, 1445, 787, 1445, 789, 1445,
- 795, 1448, 1449, 1451, 1452, 1453, 1447, 1454,
- 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462,
- 1463, 1464, 1465, 1466, 1467, 1468, 1469, 1470,
- 1471, 1472, 1473, 1475, 1476, 1477, 1478, 1479,
- 1480, 795, 795, 1446, 1447, 1450, 1474, 1481,
- 1446, 1046, 795, 795, 1483, 1484, 865, 846,
- 1485, 846, 1486, 1487, 1488, 1489, 1490, 1491,
- 1492, 1493, 1494, 1495, 1496, 920, 1497, 896,
- 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505,
- 1506, 1507, 1508, 1509, 795, 795, 795, 801,
- 941, 1482, 1046, 1510, 795, 795, 795, 1046,
- 1510, 1046, 1046, 795, 1510, 795, 1510, 795,
- 1510, 795, 1046, 1046, 1046, 1046, 1046, 1510,
- 795, 1046, 1046, 1046, 795, 1046, 795, 1510,
- 795, 1046, 1046, 1046, 1046, 795, 1510, 1046,
- 795, 1046, 795, 1046, 795, 1046, 1046, 795,
- 1046, 1510, 795, 1046, 795, 1046, 795, 1046,
- 1510, 1046, 795, 1510, 1046, 795, 1046, 795,
- 1510, 1046, 1046, 1046, 1046, 1046, 1510, 795,
- 795, 1046, 795, 1046, 1510, 1046, 795, 1510,
- 1046, 1046, 1510, 795, 795, 1046, 795, 1046,
- 795, 1046, 1510, 1511, 1512, 1513, 1514, 1515,
- 1516, 1517, 1518, 1519, 1520, 1521, 1091, 1522,
- 1523, 1524, 1525, 1526, 1527, 1528, 1529, 1530,
- 1531, 1532, 1533, 1532, 1534, 1535, 1536, 1537,
- 1538, 1047, 1510, 1539, 1540, 1541, 1542, 1543,
- 1544, 1545, 1546, 1547, 1548, 1549, 1550, 1551,
- 1552, 1553, 1554, 1555, 1556, 1557, 1101, 1558,
- 1559, 1560, 1068, 1561, 1562, 1563, 1564, 1565,
- 1566, 1047, 1567, 1568, 1569, 1570, 1571, 1572,
- 1573, 1574, 1050, 1575, 1047, 1050, 1576, 1577,
- 1578, 1579, 1059, 1510, 1580, 1581, 1582, 1583,
- 1079, 1584, 1585, 1059, 1586, 1587, 1588, 1589,
- 1590, 1047, 1510, 1591, 1550, 1592, 1593, 1594,
- 1059, 1595, 1596, 1050, 1047, 1059, 801, 1510,
- 1560, 1047, 1050, 1059, 801, 1059, 801, 1597,
- 1059, 1510, 801, 1050, 1598, 1599, 1050, 1600,
- 1601, 1057, 1602, 1603, 1604, 1605, 1606, 1556,
- 1607, 1608, 1609, 1610, 1611, 1612, 1613, 1614,
- 1615, 1616, 1617, 1618, 1575, 1619, 1050, 1059,
- 801, 1510, 1620, 1621, 1059, 1047, 1510, 801,
- 1047, 1510, 1050, 1622, 1107, 1623, 1624, 1625,
- 1626, 1627, 1628, 1629, 1630, 1047, 1631, 1632,
- 1633, 1634, 1635, 1636, 1047, 1059, 1510, 1638,
- 1639, 1640, 1641, 1642, 1643, 1644, 1645, 1646,
- 1647, 1648, 1644, 1650, 1651, 1652, 1653, 1637,
- 1649, 1637, 1510, 1637, 1510,
-}
-
-var _hcltok_trans_targs []int16 = []int16{
- 1459, 1459, 2, 3, 1459, 1459, 4, 1467,
- 5, 6, 8, 9, 286, 12, 13, 14,
- 15, 16, 287, 288, 19, 289, 21, 22,
- 290, 291, 292, 293, 294, 295, 296, 297,
- 298, 299, 328, 348, 353, 127, 128, 129,
- 356, 151, 371, 375, 1459, 10, 11, 17,
- 18, 20, 23, 24, 25, 26, 27, 28,
- 29, 30, 31, 32, 64, 105, 120, 131,
- 154, 170, 283, 33, 34, 35, 36, 37,
- 38, 39, 40, 41, 42, 43, 44, 45,
- 46, 47, 48, 49, 50, 51, 52, 53,
- 54, 55, 56, 57, 58, 59, 60, 61,
- 62, 63, 65, 66, 67, 68, 69, 70,
- 71, 72, 73, 74, 75, 76, 77, 78,
- 79, 80, 81, 82, 83, 84, 85, 86,
- 87, 88, 89, 90, 91, 92, 93, 94,
- 95, 96, 97, 98, 99, 100, 101, 102,
- 103, 104, 106, 107, 108, 109, 110, 111,
- 112, 113, 114, 115, 116, 117, 118, 119,
- 121, 122, 123, 124, 125, 126, 130, 132,
- 133, 134, 135, 136, 137, 138, 139, 140,
- 141, 142, 143, 144, 145, 146, 147, 148,
- 149, 150, 152, 153, 155, 156, 157, 158,
- 159, 160, 161, 162, 163, 164, 165, 166,
- 167, 168, 169, 171, 203, 227, 230, 231,
- 233, 242, 243, 246, 250, 268, 275, 277,
- 279, 281, 172, 173, 174, 175, 176, 177,
- 178, 179, 180, 181, 182, 183, 184, 185,
- 186, 187, 188, 189, 190, 191, 192, 193,
- 194, 195, 196, 197, 198, 199, 200, 201,
- 202, 204, 205, 206, 207, 208, 209, 210,
- 211, 212, 213, 214, 215, 216, 217, 218,
- 219, 220, 221, 222, 223, 224, 225, 226,
- 228, 229, 232, 234, 235, 236, 237, 238,
- 239, 240, 241, 244, 245, 247, 248, 249,
- 251, 252, 253, 254, 255, 256, 257, 258,
- 259, 260, 261, 262, 263, 264, 265, 266,
- 267, 269, 270, 271, 272, 273, 274, 276,
- 278, 280, 282, 284, 285, 300, 301, 302,
- 303, 304, 305, 306, 307, 308, 309, 310,
- 311, 312, 313, 314, 315, 316, 317, 318,
- 319, 320, 321, 322, 323, 324, 325, 326,
- 327, 329, 330, 331, 332, 333, 334, 335,
- 336, 337, 338, 339, 340, 341, 342, 343,
- 344, 345, 346, 347, 349, 350, 351, 352,
- 354, 355, 357, 358, 359, 360, 361, 362,
- 363, 364, 365, 366, 367, 368, 369, 370,
- 372, 373, 374, 376, 382, 404, 409, 411,
- 413, 377, 378, 379, 380, 381, 383, 384,
- 385, 386, 387, 388, 389, 390, 391, 392,
- 393, 394, 395, 396, 397, 398, 399, 400,
- 401, 402, 403, 405, 406, 407, 408, 410,
- 412, 414, 1459, 1471, 1459, 437, 438, 439,
- 440, 417, 441, 442, 443, 444, 445, 446,
- 447, 448, 449, 450, 451, 452, 453, 454,
- 455, 456, 457, 458, 459, 460, 461, 462,
- 463, 464, 465, 466, 467, 469, 470, 471,
- 472, 473, 474, 475, 476, 477, 478, 479,
- 480, 481, 482, 483, 484, 485, 419, 486,
- 487, 488, 489, 490, 491, 492, 493, 494,
- 495, 496, 497, 498, 499, 500, 501, 502,
- 503, 418, 504, 505, 506, 507, 508, 510,
- 511, 512, 513, 514, 515, 516, 517, 518,
- 519, 520, 521, 522, 523, 525, 526, 527,
- 528, 529, 530, 534, 536, 537, 538, 539,
- 434, 540, 541, 542, 543, 544, 545, 546,
- 547, 548, 549, 550, 551, 552, 553, 554,
- 556, 557, 559, 560, 561, 562, 563, 564,
- 432, 565, 566, 567, 568, 569, 570, 571,
- 572, 573, 575, 607, 631, 634, 635, 637,
- 646, 647, 650, 654, 672, 532, 679, 681,
- 683, 685, 576, 577, 578, 579, 580, 581,
- 582, 583, 584, 585, 586, 587, 588, 589,
- 590, 591, 592, 593, 594, 595, 596, 597,
- 598, 599, 600, 601, 602, 603, 604, 605,
- 606, 608, 609, 610, 611, 612, 613, 614,
- 615, 616, 617, 618, 619, 620, 621, 622,
- 623, 624, 625, 626, 627, 628, 629, 630,
- 632, 633, 636, 638, 639, 640, 641, 642,
- 643, 644, 645, 648, 649, 651, 652, 653,
- 655, 656, 657, 658, 659, 660, 661, 662,
- 663, 664, 665, 666, 667, 668, 669, 670,
- 671, 673, 674, 675, 676, 677, 678, 680,
- 682, 684, 686, 688, 689, 1459, 1459, 690,
- 827, 828, 759, 829, 830, 831, 832, 833,
- 834, 788, 835, 724, 836, 837, 838, 839,
- 840, 841, 842, 843, 744, 844, 845, 846,
- 847, 848, 849, 850, 851, 852, 853, 769,
- 854, 856, 857, 858, 859, 860, 861, 862,
- 863, 864, 865, 702, 866, 867, 868, 869,
- 870, 871, 872, 873, 874, 740, 875, 876,
- 877, 878, 879, 810, 881, 882, 885, 887,
- 888, 889, 890, 891, 892, 895, 896, 898,
- 899, 900, 902, 903, 904, 905, 906, 907,
- 908, 909, 910, 911, 912, 914, 915, 916,
- 917, 920, 922, 923, 925, 927, 1509, 1510,
- 929, 930, 931, 1509, 1509, 932, 1523, 1523,
- 1524, 935, 1523, 936, 1525, 1526, 1529, 1530,
- 1534, 1534, 1535, 941, 1534, 942, 1536, 1537,
- 1540, 1541, 1545, 1546, 1545, 968, 969, 970,
- 971, 948, 972, 973, 974, 975, 976, 977,
- 978, 979, 980, 981, 982, 983, 984, 985,
- 986, 987, 988, 989, 990, 991, 992, 993,
- 994, 995, 996, 997, 998, 1000, 1001, 1002,
- 1003, 1004, 1005, 1006, 1007, 1008, 1009, 1010,
- 1011, 1012, 1013, 1014, 1015, 1016, 950, 1017,
- 1018, 1019, 1020, 1021, 1022, 1023, 1024, 1025,
- 1026, 1027, 1028, 1029, 1030, 1031, 1032, 1033,
- 1034, 949, 1035, 1036, 1037, 1038, 1039, 1041,
- 1042, 1043, 1044, 1045, 1046, 1047, 1048, 1049,
- 1050, 1051, 1052, 1053, 1054, 1056, 1057, 1058,
- 1059, 1060, 1061, 1065, 1067, 1068, 1069, 1070,
- 965, 1071, 1072, 1073, 1074, 1075, 1076, 1077,
- 1078, 1079, 1080, 1081, 1082, 1083, 1084, 1085,
- 1087, 1088, 1090, 1091, 1092, 1093, 1094, 1095,
- 963, 1096, 1097, 1098, 1099, 1100, 1101, 1102,
- 1103, 1104, 1106, 1138, 1162, 1165, 1166, 1168,
- 1177, 1178, 1181, 1185, 1203, 1063, 1210, 1212,
- 1214, 1216, 1107, 1108, 1109, 1110, 1111, 1112,
- 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1120,
- 1121, 1122, 1123, 1124, 1125, 1126, 1127, 1128,
- 1129, 1130, 1131, 1132, 1133, 1134, 1135, 1136,
- 1137, 1139, 1140, 1141, 1142, 1143, 1144, 1145,
- 1146, 1147, 1148, 1149, 1150, 1151, 1152, 1153,
- 1154, 1155, 1156, 1157, 1158, 1159, 1160, 1161,
- 1163, 1164, 1167, 1169, 1170, 1171, 1172, 1173,
- 1174, 1175, 1176, 1179, 1180, 1182, 1183, 1184,
- 1186, 1187, 1188, 1189, 1190, 1191, 1192, 1193,
- 1194, 1195, 1196, 1197, 1198, 1199, 1200, 1201,
- 1202, 1204, 1205, 1206, 1207, 1208, 1209, 1211,
- 1213, 1215, 1217, 1219, 1220, 1545, 1545, 1221,
- 1358, 1359, 1290, 1360, 1361, 1362, 1363, 1364,
- 1365, 1319, 1366, 1255, 1367, 1368, 1369, 1370,
- 1371, 1372, 1373, 1374, 1275, 1375, 1376, 1377,
- 1378, 1379, 1380, 1381, 1382, 1383, 1384, 1300,
- 1385, 1387, 1388, 1389, 1390, 1391, 1392, 1393,
- 1394, 1395, 1396, 1233, 1397, 1398, 1399, 1400,
- 1401, 1402, 1403, 1404, 1405, 1271, 1406, 1407,
- 1408, 1409, 1410, 1341, 1412, 1413, 1416, 1418,
- 1419, 1420, 1421, 1422, 1423, 1426, 1427, 1429,
- 1430, 1431, 1433, 1434, 1435, 1436, 1437, 1438,
- 1439, 1440, 1441, 1442, 1443, 1445, 1446, 1447,
- 1448, 1451, 1453, 1454, 1456, 1458, 1460, 1459,
- 1461, 1462, 1459, 1463, 1459, 1464, 1465, 1466,
- 1468, 1469, 1470, 1459, 1472, 1459, 1473, 1459,
- 1474, 1475, 1476, 1477, 1478, 1479, 1480, 1481,
- 1482, 1483, 1484, 1485, 1486, 1487, 1488, 1489,
- 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497,
- 1498, 1499, 1500, 1501, 1502, 1503, 1504, 1505,
- 1506, 1507, 1508, 1459, 1459, 1459, 1459, 1459,
- 1459, 1, 1459, 7, 1459, 1459, 1459, 1459,
- 1459, 415, 416, 420, 421, 422, 423, 424,
- 425, 426, 427, 428, 429, 430, 431, 433,
- 435, 436, 468, 509, 524, 531, 533, 535,
- 555, 558, 574, 687, 1459, 1459, 1459, 691,
- 692, 693, 694, 695, 696, 697, 698, 699,
- 700, 701, 703, 704, 705, 706, 707, 708,
- 709, 710, 711, 712, 713, 714, 715, 716,
- 717, 718, 719, 720, 721, 722, 723, 725,
- 726, 727, 728, 729, 730, 731, 732, 733,
- 734, 735, 736, 737, 738, 739, 741, 742,
- 743, 745, 746, 747, 748, 749, 750, 751,
- 752, 753, 754, 755, 756, 757, 758, 760,
- 761, 762, 763, 764, 765, 766, 767, 768,
- 770, 771, 772, 773, 774, 775, 776, 777,
- 778, 779, 780, 781, 782, 783, 784, 785,
- 786, 787, 789, 790, 791, 792, 793, 794,
- 795, 796, 797, 798, 799, 800, 801, 802,
- 803, 804, 805, 806, 807, 808, 809, 811,
- 812, 813, 814, 815, 816, 817, 818, 819,
- 820, 821, 822, 823, 824, 825, 826, 855,
- 880, 883, 884, 886, 893, 894, 897, 901,
- 913, 918, 919, 921, 924, 926, 1511, 1509,
- 1512, 1517, 1519, 1509, 1520, 1521, 1522, 1509,
- 928, 1509, 1509, 1513, 1514, 1516, 1509, 1515,
- 1509, 1509, 1509, 1518, 1509, 1509, 1509, 933,
- 934, 938, 939, 1523, 1531, 1532, 1533, 1523,
- 937, 1523, 1523, 934, 1527, 1528, 1523, 1523,
- 1523, 1523, 1523, 940, 944, 945, 1534, 1542,
- 1543, 1544, 1534, 943, 1534, 1534, 940, 1538,
- 1539, 1534, 1534, 1534, 1534, 1534, 1545, 1547,
- 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1555,
- 1556, 1557, 1558, 1559, 1560, 1561, 1562, 1563,
- 1564, 1565, 1566, 1567, 1568, 1569, 1570, 1571,
- 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1579,
- 1580, 1581, 1545, 946, 947, 951, 952, 953,
- 954, 955, 956, 957, 958, 959, 960, 961,
- 962, 964, 966, 967, 999, 1040, 1055, 1062,
- 1064, 1066, 1086, 1089, 1105, 1218, 1545, 1222,
- 1223, 1224, 1225, 1226, 1227, 1228, 1229, 1230,
- 1231, 1232, 1234, 1235, 1236, 1237, 1238, 1239,
- 1240, 1241, 1242, 1243, 1244, 1245, 1246, 1247,
- 1248, 1249, 1250, 1251, 1252, 1253, 1254, 1256,
- 1257, 1258, 1259, 1260, 1261, 1262, 1263, 1264,
- 1265, 1266, 1267, 1268, 1269, 1270, 1272, 1273,
- 1274, 1276, 1277, 1278, 1279, 1280, 1281, 1282,
- 1283, 1284, 1285, 1286, 1287, 1288, 1289, 1291,
- 1292, 1293, 1294, 1295, 1296, 1297, 1298, 1299,
- 1301, 1302, 1303, 1304, 1305, 1306, 1307, 1308,
- 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316,
- 1317, 1318, 1320, 1321, 1322, 1323, 1324, 1325,
- 1326, 1327, 1328, 1329, 1330, 1331, 1332, 1333,
- 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1342,
- 1343, 1344, 1345, 1346, 1347, 1348, 1349, 1350,
- 1351, 1352, 1353, 1354, 1355, 1356, 1357, 1386,
- 1411, 1414, 1415, 1417, 1424, 1425, 1428, 1432,
- 1444, 1449, 1450, 1452, 1455, 1457,
-}
-
-var _hcltok_trans_actions []byte = []byte{
- 145, 107, 0, 0, 91, 141, 0, 7,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 121, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 143, 193, 149, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 147, 125, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 31, 169,
- 0, 0, 0, 35, 33, 0, 55, 41,
- 175, 0, 53, 0, 175, 175, 0, 0,
- 75, 61, 181, 0, 73, 0, 181, 181,
- 0, 0, 85, 187, 89, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 87, 79, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 93,
- 0, 0, 119, 0, 111, 0, 7, 7,
- 7, 0, 0, 113, 0, 115, 0, 123,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 7, 7,
- 7, 196, 196, 196, 196, 196, 196, 7,
- 7, 196, 7, 127, 139, 135, 97, 133,
- 103, 0, 129, 0, 101, 95, 109, 99,
- 131, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 105, 117, 137, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 13,
- 0, 0, 172, 17, 0, 7, 7, 23,
- 0, 25, 27, 0, 0, 0, 151, 0,
- 15, 19, 9, 0, 21, 11, 29, 0,
- 0, 0, 0, 43, 0, 178, 178, 49,
- 0, 157, 154, 1, 175, 175, 45, 37,
- 47, 39, 51, 0, 0, 0, 63, 0,
- 184, 184, 69, 0, 163, 160, 1, 181,
- 181, 65, 57, 67, 59, 71, 77, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 7, 7, 7,
- 190, 190, 190, 190, 190, 190, 7, 7,
- 190, 7, 81, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 83, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0,
-}
-
-var _hcltok_to_state_actions []byte = []byte{
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 3, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 3, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 166, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 166, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 3, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0,
-}
-
-var _hcltok_from_state_actions []byte = []byte{
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 5, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 5, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 5, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 5, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 5, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0, 0, 0,
- 0, 0, 0, 0, 0, 0,
-}
-
-var _hcltok_eof_trans []int16 = []int16{
- 0, 1, 1, 1, 6, 6, 6, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 1,
- 1, 1, 1, 1, 1, 1, 1, 419,
- 419, 421, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 419, 419, 419, 419, 419, 419,
- 419, 419, 670, 670, 670, 670, 670, 670,
- 670, 670, 670, 670, 670, 670, 670, 670,
- 670, 670, 670, 670, 670, 670, 670, 670,
- 670, 670, 670, 670, 670, 670, 670, 670,
- 670, 670, 670, 670, 670, 670, 670, 670,
- 670, 670, 670, 670, 670, 670, 670, 670,
- 670, 670, 670, 670, 670, 670, 670, 670,
- 670, 670, 670, 670, 670, 670, 670, 670,
- 670, 670, 670, 670, 670, 670, 670, 670,
- 670, 670, 670, 670, 670, 670, 670, 670,
- 670, 670, 670, 670, 670, 670, 670, 670,
- 670, 670, 670, 670, 670, 670, 670, 670,
- 670, 670, 670, 670, 670, 670, 670, 670,
- 670, 670, 670, 670, 670, 670, 670, 670,
- 670, 670, 670, 670, 670, 670, 670, 670,
- 670, 670, 670, 670, 670, 670, 670, 670,
- 670, 670, 670, 670, 670, 670, 670, 670,
- 670, 670, 670, 670, 670, 670, 670, 670,
- 670, 670, 670, 670, 670, 670, 670, 670,
- 670, 670, 670, 670, 670, 670, 670, 670,
- 670, 670, 670, 670, 670, 670, 670, 670,
- 670, 670, 670, 670, 670, 670, 670, 670,
- 670, 670, 670, 670, 670, 670, 670, 670,
- 670, 670, 670, 670, 670, 670, 670, 670,
- 670, 670, 670, 670, 670, 670, 670, 670,
- 670, 670, 670, 670, 670, 670, 670, 670,
- 670, 670, 670, 670, 670, 670, 670, 670,
- 670, 670, 670, 670, 670, 670, 670, 670,
- 670, 670, 670, 670, 670, 670, 670, 670,
- 670, 670, 670, 670, 670, 670, 670, 670,
- 767, 772, 772, 772, 773, 773, 775, 775,
- 775, 779, 0, 0, 785, 785, 785, 789,
- 0, 0, 795, 795, 797, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 795, 795, 795,
- 795, 795, 795, 795, 795, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
- 1046, 1046, 1046, 1046, 1046, 1046, 1046, 1046,
- 1046, 1046, 1046, 0, 1196, 1197, 1198, 1200,
- 1198, 1198, 1198, 1203, 1198, 1198, 1198, 1209,
- 1198, 1198, 1239, 1239, 1239, 1239, 1239, 1239,
- 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239,
- 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239,
- 1239, 1239, 1239, 1239, 1239, 1239, 1239, 1239,
- 1239, 1239, 1239, 1239, 1239, 0, 1392, 1394,
- 1395, 1399, 1399, 1392, 1402, 1395, 1405, 1395,
- 1407, 1407, 1407, 0, 1416, 1418, 1418, 1416,
- 1416, 1423, 1425, 1427, 1427, 1427, 0, 1435,
- 1437, 1437, 1435, 1435, 1442, 1444, 1446, 1446,
- 1446, 0, 1483, 1511, 1511, 1511, 1511, 1511,
- 1511, 1511, 1511, 1511, 1511, 1511, 1511, 1511,
- 1511, 1511, 1511, 1511, 1511, 1511, 1511, 1511,
- 1511, 1511, 1511, 1511, 1511, 1511, 1511, 1511,
- 1511, 1511, 1511, 1511, 1511, 1511,
-}
-
-const hcltok_start int = 1459
-const hcltok_first_final int = 1459
-const hcltok_error int = 0
-
-const hcltok_en_stringTemplate int = 1509
-const hcltok_en_heredocTemplate int = 1523
-const hcltok_en_bareTemplate int = 1534
-const hcltok_en_identOnly int = 1545
-const hcltok_en_main int = 1459
-
-//line scan_tokens.rl:16
-
-func scanTokens(data []byte, filename string, start hcl.Pos, mode scanMode) []Token {
- stripData := stripUTF8BOM(data)
- start.Byte += len(data) - len(stripData)
- data = stripData
-
- f := &tokenAccum{
- Filename: filename,
- Bytes: data,
- Pos: start,
- StartByte: start.Byte,
- }
-
-//line scan_tokens.rl:305
-
- // Ragel state
- p := 0 // "Pointer" into data
- pe := len(data) // End-of-data "pointer"
- ts := 0
- te := 0
- act := 0
- eof := pe
- var stack []int
- var top int
-
- var cs int // current state
- switch mode {
- case scanNormal:
- cs = hcltok_en_main
- case scanTemplate:
- cs = hcltok_en_bareTemplate
- case scanIdentOnly:
- cs = hcltok_en_identOnly
- default:
- panic("invalid scanMode")
- }
-
- braces := 0
- var retBraces []int // stack of brace levels that cause us to use fret
- var heredocs []heredocInProgress // stack of heredocs we're currently processing
-
-//line scan_tokens.rl:340
-
- // Make Go compiler happy
- _ = ts
- _ = te
- _ = act
- _ = eof
-
- token := func(ty TokenType) {
- f.emitToken(ty, ts, te)
- }
- selfToken := func() {
- b := data[ts:te]
- if len(b) != 1 {
- // should never happen
- panic("selfToken only works for single-character tokens")
- }
- f.emitToken(TokenType(b[0]), ts, te)
- }
-
-//line scan_tokens.go:4289
- {
- top = 0
- ts = 0
- te = 0
- act = 0
- }
-
-//line scan_tokens.go:4297
- {
- var _klen int
- var _trans int
- var _acts int
- var _nacts uint
- var _keys int
- if p == pe {
- goto _test_eof
- }
- if cs == 0 {
- goto _out
- }
- _resume:
- _acts = int(_hcltok_from_state_actions[cs])
- _nacts = uint(_hcltok_actions[_acts])
- _acts++
- for ; _nacts > 0; _nacts-- {
- _acts++
- switch _hcltok_actions[_acts-1] {
- case 3:
-//line NONE:1
- ts = p
-
-//line scan_tokens.go:4320
- }
- }
-
- _keys = int(_hcltok_key_offsets[cs])
- _trans = int(_hcltok_index_offsets[cs])
-
- _klen = int(_hcltok_single_lengths[cs])
- if _klen > 0 {
- _lower := int(_keys)
- var _mid int
- _upper := int(_keys + _klen - 1)
- for {
- if _upper < _lower {
- break
- }
-
- _mid = _lower + ((_upper - _lower) >> 1)
- switch {
- case data[p] < _hcltok_trans_keys[_mid]:
- _upper = _mid - 1
- case data[p] > _hcltok_trans_keys[_mid]:
- _lower = _mid + 1
- default:
- _trans += int(_mid - int(_keys))
- goto _match
- }
- }
- _keys += _klen
- _trans += _klen
- }
-
- _klen = int(_hcltok_range_lengths[cs])
- if _klen > 0 {
- _lower := int(_keys)
- var _mid int
- _upper := int(_keys + (_klen << 1) - 2)
- for {
- if _upper < _lower {
- break
- }
-
- _mid = _lower + (((_upper - _lower) >> 1) & ^1)
- switch {
- case data[p] < _hcltok_trans_keys[_mid]:
- _upper = _mid - 2
- case data[p] > _hcltok_trans_keys[_mid+1]:
- _lower = _mid + 2
- default:
- _trans += int((_mid - int(_keys)) >> 1)
- goto _match
- }
- }
- _trans += _klen
- }
-
- _match:
- _trans = int(_hcltok_indicies[_trans])
- _eof_trans:
- cs = int(_hcltok_trans_targs[_trans])
-
- if _hcltok_trans_actions[_trans] == 0 {
- goto _again
- }
-
- _acts = int(_hcltok_trans_actions[_trans])
- _nacts = uint(_hcltok_actions[_acts])
- _acts++
- for ; _nacts > 0; _nacts-- {
- _acts++
- switch _hcltok_actions[_acts-1] {
- case 0:
-//line scan_tokens.rl:224
- p--
-
- case 4:
-//line NONE:1
- te = p + 1
-
- case 5:
-//line scan_tokens.rl:248
- act = 4
- case 6:
-//line scan_tokens.rl:250
- act = 6
- case 7:
-//line scan_tokens.rl:160
- te = p + 1
- {
- token(TokenTemplateInterp)
- braces++
- retBraces = append(retBraces, braces)
- if len(heredocs) > 0 {
- heredocs[len(heredocs)-1].StartOfLine = false
- }
- {
- stack = append(stack, 0)
- stack[top] = cs
- top++
- cs = 1459
- goto _again
- }
- }
- case 8:
-//line scan_tokens.rl:170
- te = p + 1
- {
- token(TokenTemplateControl)
- braces++
- retBraces = append(retBraces, braces)
- if len(heredocs) > 0 {
- heredocs[len(heredocs)-1].StartOfLine = false
- }
- {
- stack = append(stack, 0)
- stack[top] = cs
- top++
- cs = 1459
- goto _again
- }
- }
- case 9:
-//line scan_tokens.rl:84
- te = p + 1
- {
- token(TokenCQuote)
- top--
- cs = stack[top]
- {
- stack = stack[:len(stack)-1]
- }
- goto _again
-
- }
- case 10:
-//line scan_tokens.rl:248
- te = p + 1
- {
- token(TokenQuotedLit)
- }
- case 11:
-//line scan_tokens.rl:251
- te = p + 1
- {
- token(TokenBadUTF8)
- }
- case 12:
-//line scan_tokens.rl:160
- te = p
- p--
- {
- token(TokenTemplateInterp)
- braces++
- retBraces = append(retBraces, braces)
- if len(heredocs) > 0 {
- heredocs[len(heredocs)-1].StartOfLine = false
- }
- {
- stack = append(stack, 0)
- stack[top] = cs
- top++
- cs = 1459
- goto _again
- }
- }
- case 13:
-//line scan_tokens.rl:170
- te = p
- p--
- {
- token(TokenTemplateControl)
- braces++
- retBraces = append(retBraces, braces)
- if len(heredocs) > 0 {
- heredocs[len(heredocs)-1].StartOfLine = false
- }
- {
- stack = append(stack, 0)
- stack[top] = cs
- top++
- cs = 1459
- goto _again
- }
- }
- case 14:
-//line scan_tokens.rl:248
- te = p
- p--
- {
- token(TokenQuotedLit)
- }
- case 15:
-//line scan_tokens.rl:249
- te = p
- p--
- {
- token(TokenQuotedNewline)
- }
- case 16:
-//line scan_tokens.rl:250
- te = p
- p--
- {
- token(TokenInvalid)
- }
- case 17:
-//line scan_tokens.rl:251
- te = p
- p--
- {
- token(TokenBadUTF8)
- }
- case 18:
-//line scan_tokens.rl:248
- p = (te) - 1
- {
- token(TokenQuotedLit)
- }
- case 19:
-//line scan_tokens.rl:251
- p = (te) - 1
- {
- token(TokenBadUTF8)
- }
- case 20:
-//line NONE:1
- switch act {
- case 4:
- {
- p = (te) - 1
- token(TokenQuotedLit)
- }
- case 6:
- {
- p = (te) - 1
- token(TokenInvalid)
- }
- }
-
- case 21:
-//line scan_tokens.rl:148
- act = 11
- case 22:
-//line scan_tokens.rl:259
- act = 12
- case 23:
-//line scan_tokens.rl:160
- te = p + 1
- {
- token(TokenTemplateInterp)
- braces++
- retBraces = append(retBraces, braces)
- if len(heredocs) > 0 {
- heredocs[len(heredocs)-1].StartOfLine = false
- }
- {
- stack = append(stack, 0)
- stack[top] = cs
- top++
- cs = 1459
- goto _again
- }
- }
- case 24:
-//line scan_tokens.rl:170
- te = p + 1
- {
- token(TokenTemplateControl)
- braces++
- retBraces = append(retBraces, braces)
- if len(heredocs) > 0 {
- heredocs[len(heredocs)-1].StartOfLine = false
- }
- {
- stack = append(stack, 0)
- stack[top] = cs
- top++
- cs = 1459
- goto _again
- }
- }
- case 25:
-//line scan_tokens.rl:111
- te = p + 1
- {
- // This action is called specificially when a heredoc literal
- // ends with a newline character.
-
- // This might actually be our end marker.
- topdoc := &heredocs[len(heredocs)-1]
- if topdoc.StartOfLine {
- maybeMarker := bytes.TrimSpace(data[ts:te])
- if bytes.Equal(maybeMarker, topdoc.Marker) {
- // We actually emit two tokens here: the end-of-heredoc
- // marker first, and then separately the newline that
- // follows it. This then avoids issues with the closing
- // marker consuming a newline that would normally be used
- // to mark the end of an attribute definition.
- // We might have either a \n sequence or an \r\n sequence
- // here, so we must handle both.
- nls := te - 1
- nle := te
- te--
- if data[te-1] == '\r' {
- // back up one more byte
- nls--
- te--
- }
- token(TokenCHeredoc)
- ts = nls
- te = nle
- token(TokenNewline)
- heredocs = heredocs[:len(heredocs)-1]
- top--
- cs = stack[top]
- {
- stack = stack[:len(stack)-1]
- }
- goto _again
-
- }
- }
-
- topdoc.StartOfLine = true
- token(TokenStringLit)
- }
- case 26:
-//line scan_tokens.rl:259
- te = p + 1
- {
- token(TokenBadUTF8)
- }
- case 27:
-//line scan_tokens.rl:160
- te = p
- p--
- {
- token(TokenTemplateInterp)
- braces++
- retBraces = append(retBraces, braces)
- if len(heredocs) > 0 {
- heredocs[len(heredocs)-1].StartOfLine = false
- }
- {
- stack = append(stack, 0)
- stack[top] = cs
- top++
- cs = 1459
- goto _again
- }
- }
- case 28:
-//line scan_tokens.rl:170
- te = p
- p--
- {
- token(TokenTemplateControl)
- braces++
- retBraces = append(retBraces, braces)
- if len(heredocs) > 0 {
- heredocs[len(heredocs)-1].StartOfLine = false
- }
- {
- stack = append(stack, 0)
- stack[top] = cs
- top++
- cs = 1459
- goto _again
- }
- }
- case 29:
-//line scan_tokens.rl:148
- te = p
- p--
- {
- // This action is called when a heredoc literal _doesn't_ end
- // with a newline character, e.g. because we're about to enter
- // an interpolation sequence.
- heredocs[len(heredocs)-1].StartOfLine = false
- token(TokenStringLit)
- }
- case 30:
-//line scan_tokens.rl:259
- te = p
- p--
- {
- token(TokenBadUTF8)
- }
- case 31:
-//line scan_tokens.rl:148
- p = (te) - 1
- {
- // This action is called when a heredoc literal _doesn't_ end
- // with a newline character, e.g. because we're about to enter
- // an interpolation sequence.
- heredocs[len(heredocs)-1].StartOfLine = false
- token(TokenStringLit)
- }
- case 32:
-//line NONE:1
- switch act {
- case 0:
- {
- cs = 0
- goto _again
- }
- case 11:
- {
- p = (te) - 1
-
- // This action is called when a heredoc literal _doesn't_ end
- // with a newline character, e.g. because we're about to enter
- // an interpolation sequence.
- heredocs[len(heredocs)-1].StartOfLine = false
- token(TokenStringLit)
- }
- case 12:
- {
- p = (te) - 1
- token(TokenBadUTF8)
- }
- }
-
- case 33:
-//line scan_tokens.rl:156
- act = 15
- case 34:
-//line scan_tokens.rl:266
- act = 16
- case 35:
-//line scan_tokens.rl:160
- te = p + 1
- {
- token(TokenTemplateInterp)
- braces++
- retBraces = append(retBraces, braces)
- if len(heredocs) > 0 {
- heredocs[len(heredocs)-1].StartOfLine = false
- }
- {
- stack = append(stack, 0)
- stack[top] = cs
- top++
- cs = 1459
- goto _again
- }
- }
- case 36:
-//line scan_tokens.rl:170
- te = p + 1
- {
- token(TokenTemplateControl)
- braces++
- retBraces = append(retBraces, braces)
- if len(heredocs) > 0 {
- heredocs[len(heredocs)-1].StartOfLine = false
- }
- {
- stack = append(stack, 0)
- stack[top] = cs
- top++
- cs = 1459
- goto _again
- }
- }
- case 37:
-//line scan_tokens.rl:156
- te = p + 1
- {
- token(TokenStringLit)
- }
- case 38:
-//line scan_tokens.rl:266
- te = p + 1
- {
- token(TokenBadUTF8)
- }
- case 39:
-//line scan_tokens.rl:160
- te = p
- p--
- {
- token(TokenTemplateInterp)
- braces++
- retBraces = append(retBraces, braces)
- if len(heredocs) > 0 {
- heredocs[len(heredocs)-1].StartOfLine = false
- }
- {
- stack = append(stack, 0)
- stack[top] = cs
- top++
- cs = 1459
- goto _again
- }
- }
- case 40:
-//line scan_tokens.rl:170
- te = p
- p--
- {
- token(TokenTemplateControl)
- braces++
- retBraces = append(retBraces, braces)
- if len(heredocs) > 0 {
- heredocs[len(heredocs)-1].StartOfLine = false
- }
- {
- stack = append(stack, 0)
- stack[top] = cs
- top++
- cs = 1459
- goto _again
- }
- }
- case 41:
-//line scan_tokens.rl:156
- te = p
- p--
- {
- token(TokenStringLit)
- }
- case 42:
-//line scan_tokens.rl:266
- te = p
- p--
- {
- token(TokenBadUTF8)
- }
- case 43:
-//line scan_tokens.rl:156
- p = (te) - 1
- {
- token(TokenStringLit)
- }
- case 44:
-//line NONE:1
- switch act {
- case 0:
- {
- cs = 0
- goto _again
- }
- case 15:
- {
- p = (te) - 1
-
- token(TokenStringLit)
- }
- case 16:
- {
- p = (te) - 1
- token(TokenBadUTF8)
- }
- }
-
- case 45:
-//line scan_tokens.rl:270
- act = 17
- case 46:
-//line scan_tokens.rl:271
- act = 18
- case 47:
-//line scan_tokens.rl:271
- te = p + 1
- {
- token(TokenBadUTF8)
- }
- case 48:
-//line scan_tokens.rl:272
- te = p + 1
- {
- token(TokenInvalid)
- }
- case 49:
-//line scan_tokens.rl:270
- te = p
- p--
- {
- token(TokenIdent)
- }
- case 50:
-//line scan_tokens.rl:271
- te = p
- p--
- {
- token(TokenBadUTF8)
- }
- case 51:
-//line scan_tokens.rl:270
- p = (te) - 1
- {
- token(TokenIdent)
- }
- case 52:
-//line scan_tokens.rl:271
- p = (te) - 1
- {
- token(TokenBadUTF8)
- }
- case 53:
-//line NONE:1
- switch act {
- case 17:
- {
- p = (te) - 1
- token(TokenIdent)
- }
- case 18:
- {
- p = (te) - 1
- token(TokenBadUTF8)
- }
- }
-
- case 54:
-//line scan_tokens.rl:278
- act = 22
- case 55:
-//line scan_tokens.rl:301
- act = 39
- case 56:
-//line scan_tokens.rl:280
- te = p + 1
- {
- token(TokenComment)
- }
- case 57:
-//line scan_tokens.rl:281
- te = p + 1
- {
- token(TokenNewline)
- }
- case 58:
-//line scan_tokens.rl:283
- te = p + 1
- {
- token(TokenEqualOp)
- }
- case 59:
-//line scan_tokens.rl:284
- te = p + 1
- {
- token(TokenNotEqual)
- }
- case 60:
-//line scan_tokens.rl:285
- te = p + 1
- {
- token(TokenGreaterThanEq)
- }
- case 61:
-//line scan_tokens.rl:286
- te = p + 1
- {
- token(TokenLessThanEq)
- }
- case 62:
-//line scan_tokens.rl:287
- te = p + 1
- {
- token(TokenAnd)
- }
- case 63:
-//line scan_tokens.rl:288
- te = p + 1
- {
- token(TokenOr)
- }
- case 64:
-//line scan_tokens.rl:289
- te = p + 1
- {
- token(TokenEllipsis)
- }
- case 65:
-//line scan_tokens.rl:290
- te = p + 1
- {
- token(TokenFatArrow)
- }
- case 66:
-//line scan_tokens.rl:291
- te = p + 1
- {
- selfToken()
- }
- case 67:
-//line scan_tokens.rl:180
- te = p + 1
- {
- token(TokenOBrace)
- braces++
- }
- case 68:
-//line scan_tokens.rl:185
- te = p + 1
- {
- if len(retBraces) > 0 && retBraces[len(retBraces)-1] == braces {
- token(TokenTemplateSeqEnd)
- braces--
- retBraces = retBraces[0 : len(retBraces)-1]
- top--
- cs = stack[top]
- {
- stack = stack[:len(stack)-1]
- }
- goto _again
-
- } else {
- token(TokenCBrace)
- braces--
- }
- }
- case 69:
-//line scan_tokens.rl:197
- te = p + 1
- {
- // Only consume from the retBraces stack and return if we are at
- // a suitable brace nesting level, otherwise things will get
- // confused. (Not entering this branch indicates a syntax error,
- // which we will catch in the parser.)
- if len(retBraces) > 0 && retBraces[len(retBraces)-1] == braces {
- token(TokenTemplateSeqEnd)
- braces--
- retBraces = retBraces[0 : len(retBraces)-1]
- top--
- cs = stack[top]
- {
- stack = stack[:len(stack)-1]
- }
- goto _again
-
- } else {
- // We intentionally generate a TokenTemplateSeqEnd here,
- // even though the user apparently wanted a brace, because
- // we want to allow the parser to catch the incorrect use
- // of a ~} to balance a generic opening brace, rather than
- // a template sequence.
- token(TokenTemplateSeqEnd)
- braces--
- }
- }
- case 70:
-//line scan_tokens.rl:79
- te = p + 1
- {
- token(TokenOQuote)
- {
- stack = append(stack, 0)
- stack[top] = cs
- top++
- cs = 1509
- goto _again
- }
- }
- case 71:
-//line scan_tokens.rl:89
- te = p + 1
- {
- token(TokenOHeredoc)
- // the token is currently the whole heredoc introducer, like
- // < 0; _nacts-- {
- _acts++
- switch _hcltok_actions[_acts-1] {
- case 1:
-//line NONE:1
- ts = 0
-
- case 2:
-//line NONE:1
- act = 0
-
-//line scan_tokens.go:5073
- }
- }
-
- if cs == 0 {
- goto _out
- }
- p++
- if p != pe {
- goto _resume
- }
- _test_eof:
- {
- }
- if p == eof {
- if _hcltok_eof_trans[cs] > 0 {
- _trans = int(_hcltok_eof_trans[cs] - 1)
- goto _eof_trans
- }
- }
-
- _out:
- {
- }
- }
-
-//line scan_tokens.rl:363
-
- // If we fall out here without being in a final state then we've
- // encountered something that the scanner can't match, which we'll
- // deal with as an invalid.
- if cs < hcltok_first_final {
- if mode == scanTemplate && len(stack) == 0 {
- // If we're scanning a bare template then any straggling
- // top-level stuff is actually literal string, rather than
- // invalid. This handles the case where the template ends
- // with a single "$" or "%", which trips us up because we
- // want to see another character to decide if it's a sequence
- // or an escape.
- f.emitToken(TokenStringLit, ts, len(data))
- } else {
- f.emitToken(TokenInvalid, ts, len(data))
- }
- }
-
- // We always emit a synthetic EOF token at the end, since it gives the
- // parser position information for an "unexpected EOF" diagnostic.
- f.emitToken(TokenEOF, len(data), len(data))
-
- return f.Tokens
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_tokens.rl b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_tokens.rl
deleted file mode 100644
index 4443dc480..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/scan_tokens.rl
+++ /dev/null
@@ -1,395 +0,0 @@
-
-package hclsyntax
-
-import (
- "bytes"
-
- "github.com/hashicorp/hcl2/hcl"
-)
-
-// This file is generated from scan_tokens.rl. DO NOT EDIT.
-%%{
- # (except when you are actually in scan_tokens.rl here, so edit away!)
-
- machine hcltok;
- write data;
-}%%
-
-func scanTokens(data []byte, filename string, start hcl.Pos, mode scanMode) []Token {
- stripData := stripUTF8BOM(data)
- start.Byte += len(data) - len(stripData)
- data = stripData
-
- f := &tokenAccum{
- Filename: filename,
- Bytes: data,
- Pos: start,
- StartByte: start.Byte,
- }
-
- %%{
- include UnicodeDerived "unicode_derived.rl";
-
- UTF8Cont = 0x80 .. 0xBF;
- AnyUTF8 = (
- 0x00..0x7F |
- 0xC0..0xDF . UTF8Cont |
- 0xE0..0xEF . UTF8Cont . UTF8Cont |
- 0xF0..0xF7 . UTF8Cont . UTF8Cont . UTF8Cont
- );
- BrokenUTF8 = any - AnyUTF8;
-
- NumberLitContinue = (digit|'.'|('e'|'E') ('+'|'-')? digit);
- NumberLit = digit ("" | (NumberLitContinue - '.') | (NumberLitContinue* (NumberLitContinue - '.')));
- Ident = (ID_Start | '_') (ID_Continue | '-')*;
-
- # Symbols that just represent themselves are handled as a single rule.
- SelfToken = "[" | "]" | "(" | ")" | "." | "," | "*" | "/" | "%" | "+" | "-" | "=" | "<" | ">" | "!" | "?" | ":" | "\n" | "&" | "|" | "~" | "^" | ";" | "`" | "'";
-
- EqualOp = "==";
- NotEqual = "!=";
- GreaterThanEqual = ">=";
- LessThanEqual = "<=";
- LogicalAnd = "&&";
- LogicalOr = "||";
-
- Ellipsis = "...";
- FatArrow = "=>";
-
- Newline = '\r' ? '\n';
- EndOfLine = Newline;
-
- BeginStringTmpl = '"';
- BeginHeredocTmpl = '<<' ('-')? Ident Newline;
-
- Comment = (
- # The :>> operator in these is a "finish-guarded concatenation",
- # which terminates the sequence on its left when it completes
- # the sequence on its right.
- # In the single-line comment cases this is allowing us to make
- # the trailing EndOfLine optional while still having the overall
- # pattern terminate. In the multi-line case it ensures that
- # the first comment in the file ends at the first */, rather than
- # gobbling up all of the "any*" until the _final_ */ in the file.
- ("#" (any - EndOfLine)* :>> EndOfLine?) |
- ("//" (any - EndOfLine)* :>> EndOfLine?) |
- ("/*" any* :>> "*/")
- );
-
- # Note: hclwrite assumes that only ASCII spaces appear between tokens,
- # and uses this assumption to recreate the spaces between tokens by
- # looking at byte offset differences. This means it will produce
- # incorrect results in the presence of tabs, but that's acceptable
- # because the canonical style (which hclwrite itself can impose
- # automatically is to never use tabs).
- Spaces = (' ' | 0x09)+;
-
- action beginStringTemplate {
- token(TokenOQuote);
- fcall stringTemplate;
- }
-
- action endStringTemplate {
- token(TokenCQuote);
- fret;
- }
-
- action beginHeredocTemplate {
- token(TokenOHeredoc);
- // the token is currently the whole heredoc introducer, like
- // < 0 {
- heredocs[len(heredocs)-1].StartOfLine = false;
- }
- fcall main;
- }
-
- action beginTemplateControl {
- token(TokenTemplateControl);
- braces++;
- retBraces = append(retBraces, braces);
- if len(heredocs) > 0 {
- heredocs[len(heredocs)-1].StartOfLine = false;
- }
- fcall main;
- }
-
- action openBrace {
- token(TokenOBrace);
- braces++;
- }
-
- action closeBrace {
- if len(retBraces) > 0 && retBraces[len(retBraces)-1] == braces {
- token(TokenTemplateSeqEnd);
- braces--;
- retBraces = retBraces[0:len(retBraces)-1]
- fret;
- } else {
- token(TokenCBrace);
- braces--;
- }
- }
-
- action closeTemplateSeqEatWhitespace {
- // Only consume from the retBraces stack and return if we are at
- // a suitable brace nesting level, otherwise things will get
- // confused. (Not entering this branch indicates a syntax error,
- // which we will catch in the parser.)
- if len(retBraces) > 0 && retBraces[len(retBraces)-1] == braces {
- token(TokenTemplateSeqEnd);
- braces--;
- retBraces = retBraces[0:len(retBraces)-1]
- fret;
- } else {
- // We intentionally generate a TokenTemplateSeqEnd here,
- // even though the user apparently wanted a brace, because
- // we want to allow the parser to catch the incorrect use
- // of a ~} to balance a generic opening brace, rather than
- // a template sequence.
- token(TokenTemplateSeqEnd);
- braces--;
- }
- }
-
- TemplateInterp = "${" ("~")?;
- TemplateControl = "%{" ("~")?;
- EndStringTmpl = '"';
- NewlineChars = ("\r"|"\n");
- NewlineCharsSeq = NewlineChars+;
- StringLiteralChars = (AnyUTF8 - NewlineChars);
- TemplateIgnoredNonBrace = (^'{' %{ fhold; });
- TemplateNotInterp = '$' (TemplateIgnoredNonBrace | TemplateInterp);
- TemplateNotControl = '%' (TemplateIgnoredNonBrace | TemplateControl);
- QuotedStringLiteralWithEsc = ('\\' StringLiteralChars) | (StringLiteralChars - ("$" | '%' | '"' | "\\"));
- TemplateStringLiteral = (
- (TemplateNotInterp) |
- (TemplateNotControl) |
- (QuotedStringLiteralWithEsc)+
- );
- HeredocStringLiteral = (
- (TemplateNotInterp) |
- (TemplateNotControl) |
- (StringLiteralChars - ("$" | '%'))*
- );
- BareStringLiteral = (
- (TemplateNotInterp) |
- (TemplateNotControl) |
- (StringLiteralChars - ("$" | '%'))*
- ) Newline?;
-
- stringTemplate := |*
- TemplateInterp => beginTemplateInterp;
- TemplateControl => beginTemplateControl;
- EndStringTmpl => endStringTemplate;
- TemplateStringLiteral => { token(TokenQuotedLit); };
- NewlineCharsSeq => { token(TokenQuotedNewline); };
- AnyUTF8 => { token(TokenInvalid); };
- BrokenUTF8 => { token(TokenBadUTF8); };
- *|;
-
- heredocTemplate := |*
- TemplateInterp => beginTemplateInterp;
- TemplateControl => beginTemplateControl;
- HeredocStringLiteral EndOfLine => heredocLiteralEOL;
- HeredocStringLiteral => heredocLiteralMidline;
- BrokenUTF8 => { token(TokenBadUTF8); };
- *|;
-
- bareTemplate := |*
- TemplateInterp => beginTemplateInterp;
- TemplateControl => beginTemplateControl;
- BareStringLiteral => bareTemplateLiteral;
- BrokenUTF8 => { token(TokenBadUTF8); };
- *|;
-
- identOnly := |*
- Ident => { token(TokenIdent) };
- BrokenUTF8 => { token(TokenBadUTF8) };
- AnyUTF8 => { token(TokenInvalid) };
- *|;
-
- main := |*
- Spaces => {};
- NumberLit => { token(TokenNumberLit) };
- Ident => { token(TokenIdent) };
-
- Comment => { token(TokenComment) };
- Newline => { token(TokenNewline) };
-
- EqualOp => { token(TokenEqualOp); };
- NotEqual => { token(TokenNotEqual); };
- GreaterThanEqual => { token(TokenGreaterThanEq); };
- LessThanEqual => { token(TokenLessThanEq); };
- LogicalAnd => { token(TokenAnd); };
- LogicalOr => { token(TokenOr); };
- Ellipsis => { token(TokenEllipsis); };
- FatArrow => { token(TokenFatArrow); };
- SelfToken => { selfToken() };
-
- "{" => openBrace;
- "}" => closeBrace;
-
- "~}" => closeTemplateSeqEatWhitespace;
-
- BeginStringTmpl => beginStringTemplate;
- BeginHeredocTmpl => beginHeredocTemplate;
-
- BrokenUTF8 => { token(TokenBadUTF8) };
- AnyUTF8 => { token(TokenInvalid) };
- *|;
-
- }%%
-
- // Ragel state
- p := 0 // "Pointer" into data
- pe := len(data) // End-of-data "pointer"
- ts := 0
- te := 0
- act := 0
- eof := pe
- var stack []int
- var top int
-
- var cs int // current state
- switch mode {
- case scanNormal:
- cs = hcltok_en_main
- case scanTemplate:
- cs = hcltok_en_bareTemplate
- case scanIdentOnly:
- cs = hcltok_en_identOnly
- default:
- panic("invalid scanMode")
- }
-
- braces := 0
- var retBraces []int // stack of brace levels that cause us to use fret
- var heredocs []heredocInProgress // stack of heredocs we're currently processing
-
- %%{
- prepush {
- stack = append(stack, 0);
- }
- postpop {
- stack = stack[:len(stack)-1];
- }
- }%%
-
- // Make Go compiler happy
- _ = ts
- _ = te
- _ = act
- _ = eof
-
- token := func (ty TokenType) {
- f.emitToken(ty, ts, te)
- }
- selfToken := func () {
- b := data[ts:te]
- if len(b) != 1 {
- // should never happen
- panic("selfToken only works for single-character tokens")
- }
- f.emitToken(TokenType(b[0]), ts, te)
- }
-
- %%{
- write init nocs;
- write exec;
- }%%
-
- // If we fall out here without being in a final state then we've
- // encountered something that the scanner can't match, which we'll
- // deal with as an invalid.
- if cs < hcltok_first_final {
- if mode == scanTemplate && len(stack) == 0 {
- // If we're scanning a bare template then any straggling
- // top-level stuff is actually literal string, rather than
- // invalid. This handles the case where the template ends
- // with a single "$" or "%", which trips us up because we
- // want to see another character to decide if it's a sequence
- // or an escape.
- f.emitToken(TokenStringLit, ts, len(data))
- } else {
- f.emitToken(TokenInvalid, ts, len(data))
- }
- }
-
- // We always emit a synthetic EOF token at the end, since it gives the
- // parser position information for an "unexpected EOF" diagnostic.
- f.emitToken(TokenEOF, len(data), len(data))
-
- return f.Tokens
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/spec.md b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/spec.md
deleted file mode 100644
index d7faeedce..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/spec.md
+++ /dev/null
@@ -1,926 +0,0 @@
-# HCL Native Syntax Specification
-
-This is the specification of the syntax and semantics of the native syntax
-for HCL. HCL is a system for defining configuration languages for applications.
-The HCL information model is designed to support multiple concrete syntaxes
-for configuration, but this native syntax is considered the primary format
-and is optimized for human authoring and maintenance, as opposed to machine
-generation of configuration.
-
-The language consists of three integrated sub-languages:
-
-- The _structural_ language defines the overall hierarchical configuration
- structure, and is a serialization of HCL bodies, blocks and attributes.
-
-- The _expression_ language is used to express attribute values, either as
- literals or as derivations of other values.
-
-- The _template_ language is used to compose values together into strings,
- as one of several types of expression in the expression language.
-
-In normal use these three sub-languages are used together within configuration
-files to describe an overall configuration, with the structural language
-being used at the top level. The expression and template languages can also
-be used in isolation, to implement features such as REPLs, debuggers, and
-integration into more limited HCL syntaxes such as the JSON profile.
-
-## Syntax Notation
-
-Within this specification a semi-formal notation is used to illustrate the
-details of syntax. This notation is intended for human consumption rather
-than machine consumption, with the following conventions:
-
-- A naked name starting with an uppercase letter is a global production,
- common to all of the syntax specifications in this document.
-- A naked name starting with a lowercase letter is a local production,
- meaningful only within the specification where it is defined.
-- Double and single quotes (`"` and `'`) are used to mark literal character
- sequences, which may be either punctuation markers or keywords.
-- The default operator for combining items, which has no punctuation,
- is concatenation.
-- The symbol `|` indicates that any one of its left and right operands may
- be present.
-- The `*` symbol indicates zero or more repetitions of the item to its left.
-- The `?` symbol indicates zero or one of the item to its left.
-- Parentheses (`(` and `)`) are used to group items together to apply
- the `|`, `*` and `?` operators to them collectively.
-
-The grammar notation does not fully describe the language. The prose may
-augment or conflict with the illustrated grammar. In case of conflict, prose
-has priority.
-
-## Source Code Representation
-
-Source code is unicode text expressed in the UTF-8 encoding. The language
-itself does not perform unicode normalization, so syntax features such as
-identifiers are sequences of unicode code points and so e.g. a precombined
-accented character is distinct from a letter associated with a combining
-accent. (String literals have some special handling with regard to Unicode
-normalization which will be covered later in the relevant section.)
-
-UTF-8 encoded Unicode byte order marks are not permitted. Invalid or
-non-normalized UTF-8 encoding is always a parse error.
-
-## Lexical Elements
-
-### Comments and Whitespace
-
-Comments and Whitespace are recognized as lexical elements but are ignored
-except as described below.
-
-Whitespace is defined as a sequence of zero or more space characters
-(U+0020). Newline sequences (either U+000A or U+000D followed by U+000A)
-are _not_ considered whitespace but are ignored as such in certain contexts.
-
-Horizontal tab characters (U+0009) are not considered to be whitespace and
-are not valid within HCL native syntax.
-
-Comments serve as program documentation and come in two forms:
-
-- _Line comments_ start with either the `//` or `#` sequences and end with
- the next newline sequence. A line comments is considered equivalent to a
- newline sequence.
-
-- _Inline comments_ start with the `/*` sequence and end with the `*/`
- sequence, and may have any characters within except the ending sequence.
- An inline comments is considered equivalent to a whitespace sequence.
-
-Comments and whitespace cannot begin within within other comments, or within
-template literals except inside an interpolation sequence or template directive.
-
-### Identifiers
-
-Identifiers name entities such as blocks, attributes and expression variables.
-Identifiers are interpreted as per [UAX #31][uax31] Section 2. Specifically,
-their syntax is defined in terms of the `ID_Start` and `ID_Continue`
-character properties as follows:
-
-```ebnf
-Identifier = ID_Start (ID_Continue | '-')*;
-```
-
-The Unicode specification provides the normative requirements for identifier
-parsing. Non-normatively, the spirit of this specification is that `ID_Start`
-consists of Unicode letter and certain unambiguous punctuation tokens, while
-`ID_Continue` augments that set with Unicode digits, combining marks, etc.
-
-The dash character `-` is additionally allowed in identifiers, even though
-that is not part of the unicode `ID_Continue` definition. This is to allow
-attribute names and block type names to contain dashes, although underscores
-as word separators are considered the idiomatic usage.
-
-[uax31]: http://unicode.org/reports/tr31/ "Unicode Identifier and Pattern Syntax"
-
-### Keywords
-
-There are no globally-reserved words, but in some contexts certain identifiers
-are reserved to function as keywords. These are discussed further in the
-relevant documentation sections that follow. In such situations, the
-identifier's role as a keyword supersedes any other valid interpretation that
-may be possible. Outside of these specific situations, the keywords have no
-special meaning and are interpreted as regular identifiers.
-
-### Operators and Delimiters
-
-The following character sequences represent operators, delimiters, and other
-special tokens:
-
-```
-+ && == < : { [ ( ${
-- || != > ? } ] ) %{
-* ! <= = .
-/ >= => ,
-% ...
-```
-
-### Numeric Literals
-
-A numeric literal is a decimal representation of a
-real number. It has an integer part, a fractional part,
-and an exponent part.
-
-```ebnf
-NumericLit = decimal+ ("." decimal+)? (expmark decimal+)?;
-decimal = '0' .. '9';
-expmark = ('e' | 'E') ("+" | "-")?;
-```
-
-## Structural Elements
-
-The structural language consists of syntax representing the following
-constructs:
-
-- _Attributes_, which assign a value to a specified name.
-- _Blocks_, which create a child body annotated by a type and optional labels.
-- _Body Content_, which consists of a collection of attributes and blocks.
-
-These constructs correspond to the similarly-named concepts in the
-language-agnostic HCL information model.
-
-```ebnf
-ConfigFile = Body;
-Body = (Attribute | Block | OneLineBlock)*;
-Attribute = Identifier "=" Expression Newline;
-Block = Identifier (StringLit|Identifier)* "{" Newline Body "}" Newline;
-OneLineBlock = Identifier (StringLit|Identifier)* "{" (Identifier "=" Expression)? "}" Newline;
-```
-
-### Configuration Files
-
-A _configuration file_ is a sequence of characters whose top-level is
-interpreted as a Body.
-
-### Bodies
-
-A _body_ is a collection of associated attributes and blocks. The meaning of
-this association is defined by the calling application.
-
-### Attribute Definitions
-
-An _attribute definition_ assigns a value to a particular attribute name within
-a body. Each distinct attribute name may be defined no more than once within a
-single body.
-
-The attribute value is given as an expression, which is retained literally
-for later evaluation by the calling application.
-
-### Blocks
-
-A _block_ creates a child body that is annotated with a block _type_ and
-zero or more block _labels_. Blocks create a structural hierarchy which can be
-interpreted by the calling application.
-
-Block labels can either be quoted literal strings or naked identifiers.
-
-## Expressions
-
-The expression sub-language is used within attribute definitions to specify
-values.
-
-```ebnf
-Expression = (
- ExprTerm |
- Operation |
- Conditional
-);
-```
-
-### Types
-
-The value types used within the expression language are those defined by the
-syntax-agnostic HCL information model. An expression may return any valid
-type, but only a subset of the available types have first-class syntax.
-A calling application may make other types available via _variables_ and
-_functions_.
-
-### Expression Terms
-
-Expression _terms_ are the operands for unary and binary expressions, as well
-as acting as expressions in their own right.
-
-```ebnf
-ExprTerm = (
- LiteralValue |
- CollectionValue |
- TemplateExpr |
- VariableExpr |
- FunctionCall |
- ForExpr |
- ExprTerm Index |
- ExprTerm GetAttr |
- ExprTerm Splat |
- "(" Expression ")"
-);
-```
-
-The productions for these different term types are given in their corresponding
-sections.
-
-Between the `(` and `)` characters denoting a sub-expression, newline
-characters are ignored as whitespace.
-
-### Literal Values
-
-A _literal value_ immediately represents a particular value of a primitive
-type.
-
-```ebnf
-LiteralValue = (
- NumericLit |
- "true" |
- "false" |
- "null"
-);
-```
-
-- Numeric literals represent values of type _number_.
-- The `true` and `false` keywords represent values of type _bool_.
-- The `null` keyword represents a null value of the dynamic pseudo-type.
-
-String literals are not directly available in the expression sub-language, but
-are available via the template sub-language, which can in turn be incorporated
-via _template expressions_.
-
-### Collection Values
-
-A _collection value_ combines zero or more other expressions to produce a
-collection value.
-
-```ebnf
-CollectionValue = tuple | object;
-tuple = "[" (
- (Expression ("," Expression)* ","?)?
-) "]";
-object = "{" (
- (objectelem ("," objectelem)* ","?)?
-) "}";
-objectelem = (Identifier | Expression) "=" Expression;
-```
-
-Only tuple and object values can be directly constructed via native syntax.
-Tuple and object values can in turn be converted to list, set and map values
-with other operations, which behaves as defined by the syntax-agnostic HCL
-information model.
-
-When specifying an object element, an identifier is interpreted as a literal
-attribute name as opposed to a variable reference. To populate an item key
-from a variable, use parentheses to disambiguate:
-
-- `{foo = "baz"}` is interpreted as an attribute literally named `foo`.
-- `{(foo) = "baz"}` is interpreted as an attribute whose name is taken
- from the variable named `foo`.
-
-Between the open and closing delimiters of these sequences, newline sequences
-are ignored as whitespace.
-
-There is a syntax ambiguity between _for expressions_ and collection values
-whose first element is a reference to a variable named `for`. The
-_for expression_ interpretation has priority, so to produce a tuple whose
-first element is the value of a variable named `for`, or an object with a
-key named `for`, use parentheses to disambiguate:
-
-- `[for, foo, baz]` is a syntax error.
-- `[(for), foo, baz]` is a tuple whose first element is the value of variable
- `for`.
-- `{for: 1, baz: 2}` is a syntax error.
-- `{(for): 1, baz: 2}` is an object with an attribute literally named `for`.
-- `{baz: 2, for: 1}` is equivalent to the previous example, and resolves the
- ambiguity by reordering.
-
-### Template Expressions
-
-A _template expression_ embeds a program written in the template sub-language
-as an expression. Template expressions come in two forms:
-
-- A _quoted_ template expression is delimited by quote characters (`"`) and
- defines a template as a single-line expression with escape characters.
-- A _heredoc_ template expression is introduced by a `<<` sequence and
- defines a template via a multi-line sequence terminated by a user-chosen
- delimiter.
-
-In both cases the template interpolation and directive syntax is available for
-use within the delimiters, and any text outside of these special sequences is
-interpreted as a literal string.
-
-In _quoted_ template expressions any literal string sequences within the
-template behave in a special way: literal newline sequences are not permitted
-and instead _escape sequences_ can be included, starting with the
-backslash `\`:
-
-```
- \n Unicode newline control character
- \r Unicode carriage return control character
- \t Unicode tab control character
- \" Literal quote mark, used to prevent interpretation as end of string
- \\ Literal backslash, used to prevent interpretation as escape sequence
- \uNNNN Unicode character from Basic Multilingual Plane (NNNN is four hexadecimal digits)
- \UNNNNNNNN Unicode character from supplementary planes (NNNNNNNN is eight hexadecimal digits)
-```
-
-The _heredoc_ template expression type is introduced by either `<<` or `<<-`,
-followed by an identifier. The template expression ends when the given
-identifier subsequently appears again on a line of its own.
-
-If a heredoc template is introduced with the `<<-` symbol, any literal string
-at the start of each line is analyzed to find the minimum number of leading
-spaces, and then that number of prefix spaces is removed from all line-leading
-literal strings. The final closing marker may also have an arbitrary number
-of spaces preceding it on its line.
-
-```ebnf
-TemplateExpr = quotedTemplate | heredocTemplate;
-quotedTemplate = (as defined in prose above);
-heredocTemplate = (
- ("<<" | "<<-") Identifier Newline
- (content as defined in prose above)
- Identifier Newline
-);
-```
-
-A quoted template expression containing only a single literal string serves
-as a syntax for defining literal string _expressions_. In certain contexts
-the template syntax is restricted in this manner:
-
-```ebnf
-StringLit = '"' (quoted literals as defined in prose above) '"';
-```
-
-The `StringLit` production permits the escape sequences discussed for quoted
-template expressions as above, but does _not_ permit template interpolation
-or directive sequences.
-
-### Variables and Variable Expressions
-
-A _variable_ is a value that has been assigned a symbolic name. Variables are
-made available for use in expressions by the calling application, by populating
-the _global scope_ used for expression evaluation.
-
-Variables can also be created by expressions themselves, which always creates
-a _child scope_ that incorporates the variables from its parent scope but
-(re-)defines zero or more names with new values.
-
-The value of a variable is accessed using a _variable expression_, which is
-a standalone `Identifier` whose name corresponds to a defined variable:
-
-```ebnf
-VariableExpr = Identifier;
-```
-
-Variables in a particular scope are immutable, but child scopes may _hide_
-a variable from an ancestor scope by defining a new variable of the same name.
-When looking up variables, the most locally-defined variable of the given name
-is used, and ancestor-scoped variables of the same name cannot be accessed.
-
-No direct syntax is provided for declaring or assigning variables, but other
-expression constructs implicitly create child scopes and define variables as
-part of their evaluation.
-
-### Functions and Function Calls
-
-A _function_ is an operation that has been assigned a symbolic name. Functions
-are made available for use in expressions by the calling application, by
-populating the _function table_ used for expression evaluation.
-
-The namespace of functions is distinct from the namespace of variables. A
-function and a variable may share the same name with no implication that they
-are in any way related.
-
-A function can be executed via a _function call_ expression:
-
-```ebnf
-FunctionCall = Identifier "(" arguments ")";
-Arguments = (
- () ||
- (Expression ("," Expression)* ("," | "...")?)
-);
-```
-
-The definition of functions and the semantics of calling them are defined by
-the language-agnostic HCL information model. The given arguments are mapped
-onto the function's _parameters_ and the result of a function call expression
-is the return value of the named function when given those arguments.
-
-If the final argument expression is followed by the ellipsis symbol (`...`),
-the final argument expression must evaluate to either a list or tuple value.
-The elements of the value are each mapped to a single parameter of the
-named function, beginning at the first parameter remaining after all other
-argument expressions have been mapped.
-
-Within the parentheses that delimit the function arguments, newline sequences
-are ignored as whitespace.
-
-### For Expressions
-
-A _for expression_ is a construct for constructing a collection by projecting
-the items from another collection.
-
-```ebnf
-ForExpr = forTupleExpr | forObjectExpr;
-forTupleExpr = "[" forIntro Expression forCond? "]";
-forObjectExpr = "{" forIntro Expression "=>" Expression "..."? forCond? "}";
-forIntro = "for" Identifier ("," Identifier)? "in" Expression ":";
-forCond = "if" Expression;
-```
-
-The punctuation used to delimit a for expression decide whether it will produce
-a tuple value (`[` and `]`) or an object value (`{` and `}`).
-
-The "introduction" is equivalent in both cases: the keyword `for` followed by
-either one or two identifiers separated by a comma which define the temporary
-variable names used for iteration, followed by the keyword `in` and then
-an expression that must evaluate to a value that can be iterated. The
-introduction is then terminated by the colon (`:`) symbol.
-
-If only one identifier is provided, it is the name of a variable that will
-be temporarily assigned the value of each element during iteration. If both
-are provided, the first is the key and the second is the value.
-
-Tuple, object, list, map, and set types are iterable. The type of collection
-used defines how the key and value variables are populated:
-
-- For tuple and list types, the _key_ is the zero-based index into the
- sequence for each element, and the _value_ is the element value. The
- elements are visited in index order.
-- For object and map types, the _key_ is the string attribute name or element
- key, and the _value_ is the attribute or element value. The elements are
- visited in the order defined by a lexicographic sort of the attribute names
- or keys.
-- For set types, the _key_ and _value_ are both the element value. The elements
- are visited in an undefined but consistent order.
-
-The expression after the colon and (in the case of object `for`) the expression
-after the `=>` are both evaluated once for each element of the source
-collection, in a local scope that defines the key and value variable names
-specified.
-
-The results of evaluating these expressions for each input element are used
-to populate an element in the new collection. In the case of tuple `for`, the
-single expression becomes an element, appending values to the tuple in visit
-order. In the case of object `for`, the pair of expressions is used as an
-attribute name and value respectively, creating an element in the resulting
-object.
-
-In the case of object `for`, it is an error if two input elements produce
-the same result from the attribute name expression, since duplicate
-attributes are not possible. If the ellipsis symbol (`...`) appears
-immediately after the value expression, this activates the grouping mode in
-which each value in the resulting object is a _tuple_ of all of the values
-that were produced against each distinct key.
-
-- `[for v in ["a", "b"]: v]` returns `["a", "b"]`.
-- `[for i, v in ["a", "b"]: i]` returns `[0, 1]`.
-- `{for i, v in ["a", "b"]: v => i}` returns `{a = 0, b = 1}`.
-- `{for i, v in ["a", "a", "b"]: k => v}` produces an error, because attribute
- `a` is defined twice.
-- `{for i, v in ["a", "a", "b"]: v => i...}` returns `{a = [0, 1], b = [2]}`.
-
-If the `if` keyword is used after the element expression(s), it applies an
-additional predicate that can be used to conditionally filter elements from
-the source collection from consideration. The expression following `if` is
-evaluated once for each source element, in the same scope used for the
-element expression(s). It must evaluate to a boolean value; if `true`, the
-element will be evaluated as normal, while if `false` the element will be
-skipped.
-
-- `[for i, v in ["a", "b", "c"]: v if i < 2]` returns `["a", "b"]`.
-
-If the collection value, element expression(s) or condition expression return
-unknown values that are otherwise type-valid, the result is a value of the
-dynamic pseudo-type.
-
-### Index Operator
-
-The _index_ operator returns the value of a single element of a collection
-value. It is a postfix operator and can be applied to any value that has
-a tuple, object, map, or list type.
-
-```ebnf
-Index = "[" Expression "]";
-```
-
-The expression delimited by the brackets is the _key_ by which an element
-will be looked up.
-
-If the index operator is applied to a value of tuple or list type, the
-key expression must be an non-negative integer number representing the
-zero-based element index to access. If applied to a value of object or map
-type, the key expression must be a string representing the attribute name
-or element key. If the given key value is not of the appropriate type, a
-conversion is attempted using the conversion rules from the HCL
-syntax-agnostic information model.
-
-An error is produced if the given key expression does not correspond to
-an element in the collection, either because it is of an unconvertable type,
-because it is outside the range of elements for a tuple or list, or because
-the given attribute or key does not exist.
-
-If either the collection or the key are an unknown value of an
-otherwise-suitable type, the return value is an unknown value whose type
-matches what type would be returned given known values, or a value of the
-dynamic pseudo-type if type information alone cannot determine a suitable
-return type.
-
-Within the brackets that delimit the index key, newline sequences are ignored
-as whitespace.
-
-### Attribute Access Operator
-
-The _attribute access_ operator returns the value of a single attribute in
-an object value. It is a postfix operator and can be applied to any value
-that has an object type.
-
-```ebnf
-GetAttr = "." Identifier;
-```
-
-The given identifier is interpreted as the name of the attribute to access.
-An error is produced if the object to which the operator is applied does not
-have an attribute with the given name.
-
-If the object is an unknown value of a type that has the attribute named, the
-result is an unknown value of the attribute's type.
-
-### Splat Operators
-
-The _splat operators_ allow convenient access to attributes or elements of
-elements in a tuple, list, or set value.
-
-There are two kinds of "splat" operator:
-
-- The _attribute-only_ splat operator supports only attribute lookups into
- the elements from a list, but supports an arbitrary number of them.
-
-- The _full_ splat operator additionally supports indexing into the elements
- from a list, and allows any combination of attribute access and index
- operations.
-
-```ebnf
-Splat = attrSplat | fullSplat;
-attrSplat = "." "*" GetAttr*;
-fullSplat = "[" "*" "]" (GetAttr | Index)*;
-```
-
-The splat operators can be thought of as shorthands for common operations that
-could otherwise be performed using _for expressions_:
-
-- `tuple.*.foo.bar[0]` is approximately equivalent to
- `[for v in tuple: v.foo.bar][0]`.
-- `tuple[*].foo.bar[0]` is approximately equivalent to
- `[for v in tuple: v.foo.bar[0]]`
-
-Note the difference in how the trailing index operator is interpreted in
-each case. This different interpretation is the key difference between the
-_attribute-only_ and _full_ splat operators.
-
-Splat operators have one additional behavior compared to the equivalent
-_for expressions_ shown above: if a splat operator is applied to a value that
-is _not_ of tuple, list, or set type, the value is coerced automatically into
-a single-value list of the value type:
-
-- `any_object.*.id` is equivalent to `[any_object.id]`, assuming that `any_object`
- is a single object.
-- `any_number.*` is equivalent to `[any_number]`, assuming that `any_number`
- is a single number.
-
-If applied to a null value that is not tuple, list, or set, the result is always
-an empty tuple, which allows conveniently converting a possibly-null scalar
-value into a tuple of zero or one elements. It is illegal to apply a splat
-operator to a null value of tuple, list, or set type.
-
-### Operations
-
-Operations apply a particular operator to either one or two expression terms.
-
-```ebnf
-Operation = unaryOp | binaryOp;
-unaryOp = ("-" | "!") ExprTerm;
-binaryOp = ExprTerm binaryOperator ExprTerm;
-binaryOperator = compareOperator | arithmeticOperator | logicOperator;
-compareOperator = "==" | "!=" | "<" | ">" | "<=" | ">=";
-arithmeticOperator = "+" | "-" | "*" | "/" | "%";
-logicOperator = "&&" | "||" | "!";
-```
-
-The unary operators have the highest precedence.
-
-The binary operators are grouped into the following precedence levels:
-
-```
-Level Operators
- 6 * / %
- 5 + -
- 4 > >= < <=
- 3 == !=
- 2 &&
- 1 ||
-```
-
-Higher values of "level" bind tighter. Operators within the same precedence
-level have left-to-right associativity. For example, `x / y * z` is equivalent
-to `(x / y) * z`.
-
-### Comparison Operators
-
-Comparison operators always produce boolean values, as a result of testing
-the relationship between two values.
-
-The two equality operators apply to values of any type:
-
-```
-a == b equal
-a != b not equal
-```
-
-Two values are equal if the are of identical types and their values are
-equal as defined in the HCL syntax-agnostic information model. The equality
-operators are commutative and opposite, such that `(a == b) == !(a != b)`
-and `(a == b) == (b == a)` for all values `a` and `b`.
-
-The four numeric comparison operators apply only to numbers:
-
-```
-a < b less than
-a <= b less than or equal to
-a > b greater than
-a >= b greater than or equal to
-```
-
-If either operand of a comparison operator is a correctly-typed unknown value
-or a value of the dynamic pseudo-type, the result is an unknown boolean.
-
-### Arithmetic Operators
-
-Arithmetic operators apply only to number values and always produce number
-values as results.
-
-```
-a + b sum (addition)
-a - b difference (subtraction)
-a * b product (multiplication)
-a / b quotient (division)
-a % b remainder (modulo)
--a negation
-```
-
-Arithmetic operations are considered to be performed in an arbitrary-precision
-number space.
-
-If either operand of an arithmetic operator is an unknown number or a value
-of the dynamic pseudo-type, the result is an unknown number.
-
-### Logic Operators
-
-Logic operators apply only to boolean values and always produce boolean values
-as results.
-
-```
-a && b logical AND
-a || b logical OR
-!a logical NOT
-```
-
-If either operand of a logic operator is an unknown bool value or a value
-of the dynamic pseudo-type, the result is an unknown bool value.
-
-### Conditional Operator
-
-The conditional operator allows selecting from one of two expressions based on
-the outcome of a boolean expression.
-
-```ebnf
-Conditional = Expression "?" Expression ":" Expression;
-```
-
-The first expression is the _predicate_, which is evaluated and must produce
-a boolean result. If the predicate value is `true`, the result of the second
-expression is the result of the conditional. If the predicate value is
-`false`, the result of the third expression is the result of the conditional.
-
-The second and third expressions must be of the same type or must be able to
-unify into a common type using the type unification rules defined in the
-HCL syntax-agnostic information model. This unified type is the result type
-of the conditional, with both expressions converted as necessary to the
-unified type.
-
-If the predicate is an unknown boolean value or a value of the dynamic
-pseudo-type then the result is an unknown value of the unified type of the
-other two expressions.
-
-If either the second or third expressions produce errors when evaluated,
-these errors are passed through only if the erroneous expression is selected.
-This allows for expressions such as
-`length(some_list) > 0 ? some_list[0] : default` (given some suitable `length`
-function) without producing an error when the predicate is `false`.
-
-## Templates
-
-The template sub-language is used within template expressions to concisely
-combine strings and other values to produce other strings. It can also be
-used in isolation as a standalone template language.
-
-```ebnf
-Template = (
- TemplateLiteral |
- TemplateInterpolation |
- TemplateDirective
-)*
-TemplateDirective = TemplateIf | TemplateFor;
-```
-
-A template behaves like an expression that always returns a string value.
-The different elements of the template are evaluated and combined into a
-single string to return. If any of the elements produce an unknown string
-or a value of the dynamic pseudo-type, the result is an unknown string.
-
-An important use-case for standalone templates is to enable the use of
-expressions in alternative HCL syntaxes where a native expression grammar is
-not available. For example, the HCL JSON profile treats the values of JSON
-strings as standalone templates when attributes are evaluated in expression
-mode.
-
-### Template Literals
-
-A template literal is a literal sequence of characters to include in the
-resulting string. When the template sub-language is used standalone, a
-template literal can contain any unicode character, with the exception
-of the sequences that introduce interpolations and directives, and for the
-sequences that escape those introductions.
-
-The interpolation and directive introductions are escaped by doubling their
-leading characters. The `${` sequence is escaped as `$${` and the `%{`
-sequence is escaped as `%%{`.
-
-When the template sub-language is embedded in the expression language via
-_template expressions_, additional constraints and transforms are applied to
-template literals as described in the definition of template expressions.
-
-The value of a template literal can be modified by _strip markers_ in any
-interpolations or directives that are adjacent to it. A strip marker is
-a tilde (`~`) placed immediately after the opening `{` or before the closing
-`}` of a template sequence:
-
-- `hello ${~ "world" }` produces `"helloworld"`.
-- `%{ if true ~} hello %{~ endif }` produces `"hello"`.
-
-When a strip marker is present, any spaces adjacent to it in the corresponding
-string literal (if any) are removed before producing the final value. Space
-characters are interpreted as per Unicode's definition.
-
-Stripping is done at syntax level rather than value level. Values returned
-by interpolations or directives are not subject to stripping:
-
-- `${"hello" ~}${" world"}` produces `"hello world"`, and not `"helloworld"`,
- because the space is not in a template literal directly adjacent to the
- strip marker.
-
-### Template Interpolations
-
-An _interpolation sequence_ evaluates an expression (written in the
-expression sub-language), converts the result to a string value, and
-replaces itself with the resulting string.
-
-```ebnf
-TemplateInterpolation = ("${" | "${~") Expression ("}" | "~}";
-```
-
-If the expression result cannot be converted to a string, an error is
-produced.
-
-### Template If Directive
-
-The template `if` directive is the template equivalent of the
-_conditional expression_, allowing selection of one of two sub-templates based
-on the value of a predicate expression.
-
-```ebnf
-TemplateIf = (
- ("%{" | "%{~") "if" Expression ("}" | "~}")
- Template
- (
- ("%{" | "%{~") "else" ("}" | "~}")
- Template
- )?
- ("%{" | "%{~") "endif" ("}" | "~}")
-);
-```
-
-The evaluation of the `if` directive is equivalent to the conditional
-expression, with the following exceptions:
-
-- The two sub-templates always produce strings, and thus the result value is
- also always a string.
-- The `else` clause may be omitted, in which case the conditional's third
- expression result is implied to be the empty string.
-
-### Template For Directive
-
-The template `for` directive is the template equivalent of the _for expression_,
-producing zero or more copies of its sub-template based on the elements of
-a collection.
-
-```ebnf
-TemplateFor = (
- ("%{" | "%{~") "for" Identifier ("," Identifier) "in" Expression ("}" | "~}")
- Template
- ("%{" | "%{~") "endfor" ("}" | "~}")
-);
-```
-
-The evaluation of the `for` directive is equivalent to the _for expression_
-when producing a tuple, with the following exceptions:
-
-- The sub-template always produces a string.
-- There is no equivalent of the "if" clause on the for expression.
-- The elements of the resulting tuple are all converted to strings and
- concatenated to produce a flat string result.
-
-### Template Interpolation Unwrapping
-
-As a special case, a template that consists only of a single interpolation,
-with no surrounding literals, directives or other interpolations, is
-"unwrapped". In this case, the result of the interpolation expression is
-returned verbatim, without conversion to string.
-
-This special case exists primarily to enable the native template language
-to be used inside strings in alternative HCL syntaxes that lack a first-class
-template or expression syntax. Unwrapping allows arbitrary expressions to be
-used to populate attributes when strings in such languages are interpreted
-as templates.
-
-- `${true}` produces the boolean value `true`
-- `${"${true}"}` produces the boolean value `true`, because both the inner
- and outer interpolations are subject to unwrapping.
-- `hello ${true}` produces the string `"hello true"`
-- `${""}${true}` produces the string `"true"` because there are two
- interpolation sequences, even though one produces an empty result.
-- `%{ for v in [true] }${v}%{ endif }` produces the string `true` because
- the presence of the `for` directive circumvents the unwrapping even though
- the final result is a single value.
-
-In some contexts this unwrapping behavior may be circumvented by the calling
-application, by converting the final template result to string. This is
-necessary, for example, if a standalone template is being used to produce
-the direct contents of a file, since the result in that case must always be a
-string.
-
-## Static Analysis
-
-The HCL static analysis operations are implemented for some expression types
-in the native syntax, as described in the following sections.
-
-A goal for static analysis of the native syntax is for the interpretation to
-be as consistent as possible with the dynamic evaluation interpretation of
-the given expression, though some deviations are intentionally made in order
-to maximize the potential for analysis.
-
-### Static List
-
-The tuple construction syntax can be interpreted as a static list. All of
-the expression elements given are returned as the static list elements,
-with no further interpretation.
-
-### Static Map
-
-The object construction syntax can be interpreted as a static map. All of the
-key/value pairs given are returned as the static pairs, with no further
-interpretation.
-
-The usual requirement that an attribute name be interpretable as a string
-does not apply to this static analysis, allowing callers to provide map-like
-constructs with different key types by building on the map syntax.
-
-### Static Call
-
-The function call syntax can be interpreted as a static call. The called
-function name is returned verbatim and the given argument expressions are
-returned as the static arguments, with no further interpretation.
-
-### Static Traversal
-
-A variable expression and any attached attribute access operations and
-constant index operations can be interpreted as a static traversal.
-
-The keywords `true`, `false` and `null` can also be interpreted as
-static traversals, behaving as if they were references to variables of those
-names, to allow callers to redefine the meaning of those keywords in certain
-contexts.
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/structure.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/structure.go
deleted file mode 100644
index 476025d1b..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/structure.go
+++ /dev/null
@@ -1,394 +0,0 @@
-package hclsyntax
-
-import (
- "fmt"
- "strings"
-
- "github.com/hashicorp/hcl2/hcl"
-)
-
-// AsHCLBlock returns the block data expressed as a *hcl.Block.
-func (b *Block) AsHCLBlock() *hcl.Block {
- if b == nil {
- return nil
- }
-
- lastHeaderRange := b.TypeRange
- if len(b.LabelRanges) > 0 {
- lastHeaderRange = b.LabelRanges[len(b.LabelRanges)-1]
- }
-
- return &hcl.Block{
- Type: b.Type,
- Labels: b.Labels,
- Body: b.Body,
-
- DefRange: hcl.RangeBetween(b.TypeRange, lastHeaderRange),
- TypeRange: b.TypeRange,
- LabelRanges: b.LabelRanges,
- }
-}
-
-// Body is the implementation of hcl.Body for the HCL native syntax.
-type Body struct {
- Attributes Attributes
- Blocks Blocks
-
- // These are used with PartialContent to produce a "remaining items"
- // body to return. They are nil on all bodies fresh out of the parser.
- hiddenAttrs map[string]struct{}
- hiddenBlocks map[string]struct{}
-
- SrcRange hcl.Range
- EndRange hcl.Range // Final token of the body, for reporting missing items
-}
-
-// Assert that *Body implements hcl.Body
-var assertBodyImplBody hcl.Body = &Body{}
-
-func (b *Body) walkChildNodes(w internalWalkFunc) {
- w(b.Attributes)
- w(b.Blocks)
-}
-
-func (b *Body) Range() hcl.Range {
- return b.SrcRange
-}
-
-func (b *Body) Content(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Diagnostics) {
- content, remainHCL, diags := b.PartialContent(schema)
-
- // No we'll see if anything actually remains, to produce errors about
- // extraneous items.
- remain := remainHCL.(*Body)
-
- for name, attr := range b.Attributes {
- if _, hidden := remain.hiddenAttrs[name]; !hidden {
- var suggestions []string
- for _, attrS := range schema.Attributes {
- if _, defined := content.Attributes[attrS.Name]; defined {
- continue
- }
- suggestions = append(suggestions, attrS.Name)
- }
- suggestion := nameSuggestion(name, suggestions)
- if suggestion != "" {
- suggestion = fmt.Sprintf(" Did you mean %q?", suggestion)
- } else {
- // Is there a block of the same name?
- for _, blockS := range schema.Blocks {
- if blockS.Type == name {
- suggestion = fmt.Sprintf(" Did you mean to define a block of type %q?", name)
- break
- }
- }
- }
-
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Unsupported argument",
- Detail: fmt.Sprintf("An argument named %q is not expected here.%s", name, suggestion),
- Subject: &attr.NameRange,
- })
- }
- }
-
- for _, block := range b.Blocks {
- blockTy := block.Type
- if _, hidden := remain.hiddenBlocks[blockTy]; !hidden {
- var suggestions []string
- for _, blockS := range schema.Blocks {
- suggestions = append(suggestions, blockS.Type)
- }
- suggestion := nameSuggestion(blockTy, suggestions)
- if suggestion != "" {
- suggestion = fmt.Sprintf(" Did you mean %q?", suggestion)
- } else {
- // Is there an attribute of the same name?
- for _, attrS := range schema.Attributes {
- if attrS.Name == blockTy {
- suggestion = fmt.Sprintf(" Did you mean to define argument %q? If so, use the equals sign to assign it a value.", blockTy)
- break
- }
- }
- }
-
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Unsupported block type",
- Detail: fmt.Sprintf("Blocks of type %q are not expected here.%s", blockTy, suggestion),
- Subject: &block.TypeRange,
- })
- }
- }
-
- return content, diags
-}
-
-func (b *Body) PartialContent(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Body, hcl.Diagnostics) {
- attrs := make(hcl.Attributes)
- var blocks hcl.Blocks
- var diags hcl.Diagnostics
- hiddenAttrs := make(map[string]struct{})
- hiddenBlocks := make(map[string]struct{})
-
- if b.hiddenAttrs != nil {
- for k, v := range b.hiddenAttrs {
- hiddenAttrs[k] = v
- }
- }
- if b.hiddenBlocks != nil {
- for k, v := range b.hiddenBlocks {
- hiddenBlocks[k] = v
- }
- }
-
- for _, attrS := range schema.Attributes {
- name := attrS.Name
- attr, exists := b.Attributes[name]
- _, hidden := hiddenAttrs[name]
- if hidden || !exists {
- if attrS.Required {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Missing required argument",
- Detail: fmt.Sprintf("The argument %q is required, but no definition was found.", attrS.Name),
- Subject: b.MissingItemRange().Ptr(),
- })
- }
- continue
- }
-
- hiddenAttrs[name] = struct{}{}
- attrs[name] = attr.AsHCLAttribute()
- }
-
- blocksWanted := make(map[string]hcl.BlockHeaderSchema)
- for _, blockS := range schema.Blocks {
- blocksWanted[blockS.Type] = blockS
- }
-
- for _, block := range b.Blocks {
- if _, hidden := hiddenBlocks[block.Type]; hidden {
- continue
- }
- blockS, wanted := blocksWanted[block.Type]
- if !wanted {
- continue
- }
-
- if len(block.Labels) > len(blockS.LabelNames) {
- name := block.Type
- if len(blockS.LabelNames) == 0 {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: fmt.Sprintf("Extraneous label for %s", name),
- Detail: fmt.Sprintf(
- "No labels are expected for %s blocks.", name,
- ),
- Subject: block.LabelRanges[0].Ptr(),
- Context: hcl.RangeBetween(block.TypeRange, block.OpenBraceRange).Ptr(),
- })
- } else {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: fmt.Sprintf("Extraneous label for %s", name),
- Detail: fmt.Sprintf(
- "Only %d labels (%s) are expected for %s blocks.",
- len(blockS.LabelNames), strings.Join(blockS.LabelNames, ", "), name,
- ),
- Subject: block.LabelRanges[len(blockS.LabelNames)].Ptr(),
- Context: hcl.RangeBetween(block.TypeRange, block.OpenBraceRange).Ptr(),
- })
- }
- continue
- }
-
- if len(block.Labels) < len(blockS.LabelNames) {
- name := block.Type
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: fmt.Sprintf("Missing %s for %s", blockS.LabelNames[len(block.Labels)], name),
- Detail: fmt.Sprintf(
- "All %s blocks must have %d labels (%s).",
- name, len(blockS.LabelNames), strings.Join(blockS.LabelNames, ", "),
- ),
- Subject: &block.OpenBraceRange,
- Context: hcl.RangeBetween(block.TypeRange, block.OpenBraceRange).Ptr(),
- })
- continue
- }
-
- blocks = append(blocks, block.AsHCLBlock())
- }
-
- // We hide blocks only after we've processed all of them, since otherwise
- // we can't process more than one of the same type.
- for _, blockS := range schema.Blocks {
- hiddenBlocks[blockS.Type] = struct{}{}
- }
-
- remain := &Body{
- Attributes: b.Attributes,
- Blocks: b.Blocks,
-
- hiddenAttrs: hiddenAttrs,
- hiddenBlocks: hiddenBlocks,
-
- SrcRange: b.SrcRange,
- EndRange: b.EndRange,
- }
-
- return &hcl.BodyContent{
- Attributes: attrs,
- Blocks: blocks,
-
- MissingItemRange: b.MissingItemRange(),
- }, remain, diags
-}
-
-func (b *Body) JustAttributes() (hcl.Attributes, hcl.Diagnostics) {
- attrs := make(hcl.Attributes)
- var diags hcl.Diagnostics
-
- if len(b.Blocks) > 0 {
- example := b.Blocks[0]
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: fmt.Sprintf("Unexpected %q block", example.Type),
- Detail: "Blocks are not allowed here.",
- Subject: &example.TypeRange,
- })
- // we will continue processing anyway, and return the attributes
- // we are able to find so that certain analyses can still be done
- // in the face of errors.
- }
-
- if b.Attributes == nil {
- return attrs, diags
- }
-
- for name, attr := range b.Attributes {
- if _, hidden := b.hiddenAttrs[name]; hidden {
- continue
- }
- attrs[name] = attr.AsHCLAttribute()
- }
-
- return attrs, diags
-}
-
-func (b *Body) MissingItemRange() hcl.Range {
- return hcl.Range{
- Filename: b.SrcRange.Filename,
- Start: b.SrcRange.Start,
- End: b.SrcRange.Start,
- }
-}
-
-// Attributes is the collection of attribute definitions within a body.
-type Attributes map[string]*Attribute
-
-func (a Attributes) walkChildNodes(w internalWalkFunc) {
- for _, attr := range a {
- w(attr)
- }
-}
-
-// Range returns the range of some arbitrary point within the set of
-// attributes, or an invalid range if there are no attributes.
-//
-// This is provided only to complete the Node interface, but has no practical
-// use.
-func (a Attributes) Range() hcl.Range {
- // An attributes doesn't really have a useful range to report, since
- // it's just a grouping construct. So we'll arbitrarily take the
- // range of one of the attributes, or produce an invalid range if we have
- // none. In practice, there's little reason to ask for the range of
- // an Attributes.
- for _, attr := range a {
- return attr.Range()
- }
- return hcl.Range{
- Filename: "",
- }
-}
-
-// Attribute represents a single attribute definition within a body.
-type Attribute struct {
- Name string
- Expr Expression
-
- SrcRange hcl.Range
- NameRange hcl.Range
- EqualsRange hcl.Range
-}
-
-func (a *Attribute) walkChildNodes(w internalWalkFunc) {
- w(a.Expr)
-}
-
-func (a *Attribute) Range() hcl.Range {
- return a.SrcRange
-}
-
-// AsHCLAttribute returns the block data expressed as a *hcl.Attribute.
-func (a *Attribute) AsHCLAttribute() *hcl.Attribute {
- if a == nil {
- return nil
- }
- return &hcl.Attribute{
- Name: a.Name,
- Expr: a.Expr,
-
- Range: a.SrcRange,
- NameRange: a.NameRange,
- }
-}
-
-// Blocks is the list of nested blocks within a body.
-type Blocks []*Block
-
-func (bs Blocks) walkChildNodes(w internalWalkFunc) {
- for _, block := range bs {
- w(block)
- }
-}
-
-// Range returns the range of some arbitrary point within the list of
-// blocks, or an invalid range if there are no blocks.
-//
-// This is provided only to complete the Node interface, but has no practical
-// use.
-func (bs Blocks) Range() hcl.Range {
- if len(bs) > 0 {
- return bs[0].Range()
- }
- return hcl.Range{
- Filename: "",
- }
-}
-
-// Block represents a nested block structure
-type Block struct {
- Type string
- Labels []string
- Body *Body
-
- TypeRange hcl.Range
- LabelRanges []hcl.Range
- OpenBraceRange hcl.Range
- CloseBraceRange hcl.Range
-}
-
-func (b *Block) walkChildNodes(w internalWalkFunc) {
- w(b.Body)
-}
-
-func (b *Block) Range() hcl.Range {
- return hcl.RangeBetween(b.TypeRange, b.CloseBraceRange)
-}
-
-func (b *Block) DefRange() hcl.Range {
- return hcl.RangeBetween(b.TypeRange, b.OpenBraceRange)
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/structure_at_pos.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/structure_at_pos.go
deleted file mode 100644
index d8f023ba0..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/structure_at_pos.go
+++ /dev/null
@@ -1,118 +0,0 @@
-package hclsyntax
-
-import (
- "github.com/hashicorp/hcl2/hcl"
-)
-
-// -----------------------------------------------------------------------------
-// The methods in this file are all optional extension methods that serve to
-// implement the methods of the same name on *hcl.File when its root body
-// is provided by this package.
-// -----------------------------------------------------------------------------
-
-// BlocksAtPos implements the method of the same name for an *hcl.File that
-// is backed by a *Body.
-func (b *Body) BlocksAtPos(pos hcl.Pos) []*hcl.Block {
- list, _ := b.blocksAtPos(pos, true)
- return list
-}
-
-// InnermostBlockAtPos implements the method of the same name for an *hcl.File
-// that is backed by a *Body.
-func (b *Body) InnermostBlockAtPos(pos hcl.Pos) *hcl.Block {
- _, innermost := b.blocksAtPos(pos, false)
- return innermost.AsHCLBlock()
-}
-
-// OutermostBlockAtPos implements the method of the same name for an *hcl.File
-// that is backed by a *Body.
-func (b *Body) OutermostBlockAtPos(pos hcl.Pos) *hcl.Block {
- return b.outermostBlockAtPos(pos).AsHCLBlock()
-}
-
-// blocksAtPos is the internal engine of both BlocksAtPos and
-// InnermostBlockAtPos, which both need to do the same logic but return a
-// differently-shaped result.
-//
-// list is nil if makeList is false, avoiding an allocation. Innermost is
-// always set, and if the returned list is non-nil it will always match the
-// final element from that list.
-func (b *Body) blocksAtPos(pos hcl.Pos, makeList bool) (list []*hcl.Block, innermost *Block) {
- current := b
-
-Blocks:
- for current != nil {
- for _, block := range current.Blocks {
- wholeRange := hcl.RangeBetween(block.TypeRange, block.CloseBraceRange)
- if wholeRange.ContainsPos(pos) {
- innermost = block
- if makeList {
- list = append(list, innermost.AsHCLBlock())
- }
- current = block.Body
- continue Blocks
- }
- }
-
- // If we fall out here then none of the current body's nested blocks
- // contain the position we are looking for, and so we're done.
- break
- }
-
- return
-}
-
-// outermostBlockAtPos is the internal version of OutermostBlockAtPos that
-// returns a hclsyntax.Block rather than an hcl.Block, allowing for further
-// analysis if necessary.
-func (b *Body) outermostBlockAtPos(pos hcl.Pos) *Block {
- // This is similar to blocksAtPos, but simpler because we know it only
- // ever needs to search the first level of nested blocks.
-
- for _, block := range b.Blocks {
- wholeRange := hcl.RangeBetween(block.TypeRange, block.CloseBraceRange)
- if wholeRange.ContainsPos(pos) {
- return block
- }
- }
-
- return nil
-}
-
-// AttributeAtPos implements the method of the same name for an *hcl.File
-// that is backed by a *Body.
-func (b *Body) AttributeAtPos(pos hcl.Pos) *hcl.Attribute {
- return b.attributeAtPos(pos).AsHCLAttribute()
-}
-
-// attributeAtPos is the internal version of AttributeAtPos that returns a
-// hclsyntax.Block rather than an hcl.Block, allowing for further analysis if
-// necessary.
-func (b *Body) attributeAtPos(pos hcl.Pos) *Attribute {
- searchBody := b
- _, block := b.blocksAtPos(pos, false)
- if block != nil {
- searchBody = block.Body
- }
-
- for _, attr := range searchBody.Attributes {
- if attr.SrcRange.ContainsPos(pos) {
- return attr
- }
- }
-
- return nil
-}
-
-// OutermostExprAtPos implements the method of the same name for an *hcl.File
-// that is backed by a *Body.
-func (b *Body) OutermostExprAtPos(pos hcl.Pos) hcl.Expression {
- attr := b.attributeAtPos(pos)
- if attr == nil {
- return nil
- }
- if !attr.Expr.Range().ContainsPos(pos) {
- return nil
- }
- return attr.Expr
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/token.go b/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/token.go
deleted file mode 100644
index 3d898fd73..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/hclsyntax/token.go
+++ /dev/null
@@ -1,320 +0,0 @@
-package hclsyntax
-
-import (
- "bytes"
- "fmt"
-
- "github.com/apparentlymart/go-textseg/textseg"
- "github.com/hashicorp/hcl2/hcl"
-)
-
-// Token represents a sequence of bytes from some HCL code that has been
-// tagged with a type and its range within the source file.
-type Token struct {
- Type TokenType
- Bytes []byte
- Range hcl.Range
-}
-
-// Tokens is a slice of Token.
-type Tokens []Token
-
-// TokenType is an enumeration used for the Type field on Token.
-type TokenType rune
-
-const (
- // Single-character tokens are represented by their own character, for
- // convenience in producing these within the scanner. However, the values
- // are otherwise arbitrary and just intended to be mnemonic for humans
- // who might see them in debug output.
-
- TokenOBrace TokenType = '{'
- TokenCBrace TokenType = '}'
- TokenOBrack TokenType = '['
- TokenCBrack TokenType = ']'
- TokenOParen TokenType = '('
- TokenCParen TokenType = ')'
- TokenOQuote TokenType = '«'
- TokenCQuote TokenType = '»'
- TokenOHeredoc TokenType = 'H'
- TokenCHeredoc TokenType = 'h'
-
- TokenStar TokenType = '*'
- TokenSlash TokenType = '/'
- TokenPlus TokenType = '+'
- TokenMinus TokenType = '-'
- TokenPercent TokenType = '%'
-
- TokenEqual TokenType = '='
- TokenEqualOp TokenType = '≔'
- TokenNotEqual TokenType = '≠'
- TokenLessThan TokenType = '<'
- TokenLessThanEq TokenType = '≤'
- TokenGreaterThan TokenType = '>'
- TokenGreaterThanEq TokenType = '≥'
-
- TokenAnd TokenType = '∧'
- TokenOr TokenType = '∨'
- TokenBang TokenType = '!'
-
- TokenDot TokenType = '.'
- TokenComma TokenType = ','
-
- TokenEllipsis TokenType = '…'
- TokenFatArrow TokenType = '⇒'
-
- TokenQuestion TokenType = '?'
- TokenColon TokenType = ':'
-
- TokenTemplateInterp TokenType = '∫'
- TokenTemplateControl TokenType = 'λ'
- TokenTemplateSeqEnd TokenType = '∎'
-
- TokenQuotedLit TokenType = 'Q' // might contain backslash escapes
- TokenStringLit TokenType = 'S' // cannot contain backslash escapes
- TokenNumberLit TokenType = 'N'
- TokenIdent TokenType = 'I'
-
- TokenComment TokenType = 'C'
-
- TokenNewline TokenType = '\n'
- TokenEOF TokenType = '␄'
-
- // The rest are not used in the language but recognized by the scanner so
- // we can generate good diagnostics in the parser when users try to write
- // things that might work in other languages they are familiar with, or
- // simply make incorrect assumptions about the HCL language.
-
- TokenBitwiseAnd TokenType = '&'
- TokenBitwiseOr TokenType = '|'
- TokenBitwiseNot TokenType = '~'
- TokenBitwiseXor TokenType = '^'
- TokenStarStar TokenType = '➚'
- TokenApostrophe TokenType = '\''
- TokenBacktick TokenType = '`'
- TokenSemicolon TokenType = ';'
- TokenTabs TokenType = '␉'
- TokenInvalid TokenType = '�'
- TokenBadUTF8 TokenType = '💩'
- TokenQuotedNewline TokenType = ''
-
- // TokenNil is a placeholder for when a token is required but none is
- // available, e.g. when reporting errors. The scanner will never produce
- // this as part of a token stream.
- TokenNil TokenType = '\x00'
-)
-
-func (t TokenType) GoString() string {
- return fmt.Sprintf("hclsyntax.%s", t.String())
-}
-
-type scanMode int
-
-const (
- scanNormal scanMode = iota
- scanTemplate
- scanIdentOnly
-)
-
-type tokenAccum struct {
- Filename string
- Bytes []byte
- Pos hcl.Pos
- Tokens []Token
- StartByte int
-}
-
-func (f *tokenAccum) emitToken(ty TokenType, startOfs, endOfs int) {
- // Walk through our buffer to figure out how much we need to adjust
- // the start pos to get our end pos.
-
- start := f.Pos
- start.Column += startOfs + f.StartByte - f.Pos.Byte // Safe because only ASCII spaces can be in the offset
- start.Byte = startOfs + f.StartByte
-
- end := start
- end.Byte = endOfs + f.StartByte
- b := f.Bytes[startOfs:endOfs]
- for len(b) > 0 {
- advance, seq, _ := textseg.ScanGraphemeClusters(b, true)
- if (len(seq) == 1 && seq[0] == '\n') || (len(seq) == 2 && seq[0] == '\r' && seq[1] == '\n') {
- end.Line++
- end.Column = 1
- } else {
- end.Column++
- }
- b = b[advance:]
- }
-
- f.Pos = end
-
- f.Tokens = append(f.Tokens, Token{
- Type: ty,
- Bytes: f.Bytes[startOfs:endOfs],
- Range: hcl.Range{
- Filename: f.Filename,
- Start: start,
- End: end,
- },
- })
-}
-
-type heredocInProgress struct {
- Marker []byte
- StartOfLine bool
-}
-
-func tokenOpensFlushHeredoc(tok Token) bool {
- if tok.Type != TokenOHeredoc {
- return false
- }
- return bytes.HasPrefix(tok.Bytes, []byte{'<', '<', '-'})
-}
-
-// checkInvalidTokens does a simple pass across the given tokens and generates
-// diagnostics for tokens that should _never_ appear in HCL source. This
-// is intended to avoid the need for the parser to have special support
-// for them all over.
-//
-// Returns a diagnostics with no errors if everything seems acceptable.
-// Otherwise, returns zero or more error diagnostics, though tries to limit
-// repetition of the same information.
-func checkInvalidTokens(tokens Tokens) hcl.Diagnostics {
- var diags hcl.Diagnostics
-
- toldBitwise := 0
- toldExponent := 0
- toldBacktick := 0
- toldApostrophe := 0
- toldSemicolon := 0
- toldTabs := 0
- toldBadUTF8 := 0
-
- for _, tok := range tokens {
- // copy token so it's safe to point to it
- tok := tok
-
- switch tok.Type {
- case TokenBitwiseAnd, TokenBitwiseOr, TokenBitwiseXor, TokenBitwiseNot:
- if toldBitwise < 4 {
- var suggestion string
- switch tok.Type {
- case TokenBitwiseAnd:
- suggestion = " Did you mean boolean AND (\"&&\")?"
- case TokenBitwiseOr:
- suggestion = " Did you mean boolean OR (\"&&\")?"
- case TokenBitwiseNot:
- suggestion = " Did you mean boolean NOT (\"!\")?"
- }
-
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Unsupported operator",
- Detail: fmt.Sprintf("Bitwise operators are not supported.%s", suggestion),
- Subject: &tok.Range,
- })
- toldBitwise++
- }
- case TokenStarStar:
- if toldExponent < 1 {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Unsupported operator",
- Detail: "\"**\" is not a supported operator. Exponentiation is not supported as an operator.",
- Subject: &tok.Range,
- })
-
- toldExponent++
- }
- case TokenBacktick:
- // Only report for alternating (even) backticks, so we won't report both start and ends of the same
- // backtick-quoted string.
- if (toldBacktick % 2) == 0 {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid character",
- Detail: "The \"`\" character is not valid. To create a multi-line string, use the \"heredoc\" syntax, like \"<
-#
-# This script uses the unicode spec to generate a Ragel state machine
-# that recognizes unicode alphanumeric characters. It generates 5
-# character classes: uupper, ulower, ualpha, udigit, and ualnum.
-# Currently supported encodings are UTF-8 [default] and UCS-4.
-#
-# Usage: unicode2ragel.rb [options]
-# -e, --encoding [ucs4 | utf8] Data encoding
-# -h, --help Show this message
-#
-# This script was originally written as part of the Ferret search
-# engine library.
-#
-# Author: Rakan El-Khalil
-
-require 'optparse'
-require 'open-uri'
-
-ENCODINGS = [ :utf8, :ucs4 ]
-ALPHTYPES = { :utf8 => "byte", :ucs4 => "rune" }
-DEFAULT_CHART_URL = "http://www.unicode.org/Public/5.1.0/ucd/DerivedCoreProperties.txt"
-DEFAULT_MACHINE_NAME= "WChar"
-
-###
-# Display vars & default option
-
-TOTAL_WIDTH = 80
-RANGE_WIDTH = 23
-@encoding = :utf8
-@chart_url = DEFAULT_CHART_URL
-machine_name = DEFAULT_MACHINE_NAME
-properties = []
-@output = $stdout
-
-###
-# Option parsing
-
-cli_opts = OptionParser.new do |opts|
- opts.on("-e", "--encoding [ucs4 | utf8]", "Data encoding") do |o|
- @encoding = o.downcase.to_sym
- end
- opts.on("-h", "--help", "Show this message") do
- puts opts
- exit
- end
- opts.on("-u", "--url URL", "URL to process") do |o|
- @chart_url = o
- end
- opts.on("-m", "--machine MACHINE_NAME", "Machine name") do |o|
- machine_name = o
- end
- opts.on("-p", "--properties x,y,z", Array, "Properties to add to machine") do |o|
- properties = o
- end
- opts.on("-o", "--output FILE", "output file") do |o|
- @output = File.new(o, "w+")
- end
-end
-
-cli_opts.parse(ARGV)
-unless ENCODINGS.member? @encoding
- puts "Invalid encoding: #{@encoding}"
- puts cli_opts
- exit
-end
-
-##
-# Downloads the document at url and yields every alpha line's hex
-# range and description.
-
-def each_alpha( url, property )
- open( url ) do |file|
- file.each_line do |line|
- next if line =~ /^#/;
- next if line !~ /; #{property} #/;
-
- range, description = line.split(/;/)
- range.strip!
- description.gsub!(/.*#/, '').strip!
-
- if range =~ /\.\./
- start, stop = range.split '..'
- else start = stop = range
- end
-
- yield start.hex .. stop.hex, description
- end
- end
-end
-
-###
-# Formats to hex at minimum width
-
-def to_hex( n )
- r = "%0X" % n
- r = "0#{r}" unless (r.length % 2).zero?
- r
-end
-
-###
-# UCS4 is just a straight hex conversion of the unicode codepoint.
-
-def to_ucs4( range )
- rangestr = "0x" + to_hex(range.begin)
- rangestr << "..0x" + to_hex(range.end) if range.begin != range.end
- [ rangestr ]
-end
-
-##
-# 0x00 - 0x7f -> 0zzzzzzz[7]
-# 0x80 - 0x7ff -> 110yyyyy[5] 10zzzzzz[6]
-# 0x800 - 0xffff -> 1110xxxx[4] 10yyyyyy[6] 10zzzzzz[6]
-# 0x010000 - 0x10ffff -> 11110www[3] 10xxxxxx[6] 10yyyyyy[6] 10zzzzzz[6]
-
-UTF8_BOUNDARIES = [0x7f, 0x7ff, 0xffff, 0x10ffff]
-
-def to_utf8_enc( n )
- r = 0
- if n <= 0x7f
- r = n
- elsif n <= 0x7ff
- y = 0xc0 | (n >> 6)
- z = 0x80 | (n & 0x3f)
- r = y << 8 | z
- elsif n <= 0xffff
- x = 0xe0 | (n >> 12)
- y = 0x80 | (n >> 6) & 0x3f
- z = 0x80 | n & 0x3f
- r = x << 16 | y << 8 | z
- elsif n <= 0x10ffff
- w = 0xf0 | (n >> 18)
- x = 0x80 | (n >> 12) & 0x3f
- y = 0x80 | (n >> 6) & 0x3f
- z = 0x80 | n & 0x3f
- r = w << 24 | x << 16 | y << 8 | z
- end
-
- to_hex(r)
-end
-
-def from_utf8_enc( n )
- n = n.hex
- r = 0
- if n <= 0x7f
- r = n
- elsif n <= 0xdfff
- y = (n >> 8) & 0x1f
- z = n & 0x3f
- r = y << 6 | z
- elsif n <= 0xefffff
- x = (n >> 16) & 0x0f
- y = (n >> 8) & 0x3f
- z = n & 0x3f
- r = x << 10 | y << 6 | z
- elsif n <= 0xf7ffffff
- w = (n >> 24) & 0x07
- x = (n >> 16) & 0x3f
- y = (n >> 8) & 0x3f
- z = n & 0x3f
- r = w << 18 | x << 12 | y << 6 | z
- end
- r
-end
-
-###
-# Given a range, splits it up into ranges that can be continuously
-# encoded into utf8. Eg: 0x00 .. 0xff => [0x00..0x7f, 0x80..0xff]
-# This is not strictly needed since the current [5.1] unicode standard
-# doesn't have ranges that straddle utf8 boundaries. This is included
-# for completeness as there is no telling if that will ever change.
-
-def utf8_ranges( range )
- ranges = []
- UTF8_BOUNDARIES.each do |max|
- if range.begin <= max
- if range.end <= max
- ranges << range
- return ranges
- end
-
- ranges << (range.begin .. max)
- range = (max + 1) .. range.end
- end
- end
- ranges
-end
-
-def build_range( start, stop )
- size = start.size/2
- left = size - 1
- return [""] if size < 1
-
- a = start[0..1]
- b = stop[0..1]
-
- ###
- # Shared prefix
-
- if a == b
- return build_range(start[2..-1], stop[2..-1]).map do |elt|
- "0x#{a} " + elt
- end
- end
-
- ###
- # Unshared prefix, end of run
-
- return ["0x#{a}..0x#{b} "] if left.zero?
-
- ###
- # Unshared prefix, not end of run
- # Range can be 0x123456..0x56789A
- # Which is equivalent to:
- # 0x123456 .. 0x12FFFF
- # 0x130000 .. 0x55FFFF
- # 0x560000 .. 0x56789A
-
- ret = []
- ret << build_range(start, a + "FF" * left)
-
- ###
- # Only generate middle range if need be.
-
- if a.hex+1 != b.hex
- max = to_hex(b.hex - 1)
- max = "FF" if b == "FF"
- ret << "0x#{to_hex(a.hex+1)}..0x#{max} " + "0x00..0xFF " * left
- end
-
- ###
- # Don't generate last range if it is covered by first range
-
- ret << build_range(b + "00" * left, stop) unless b == "FF"
- ret.flatten!
-end
-
-def to_utf8( range )
- utf8_ranges( range ).map do |r|
- begin_enc = to_utf8_enc(r.begin)
- end_enc = to_utf8_enc(r.end)
- build_range begin_enc, end_enc
- end.flatten!
-end
-
-##
-# Perform a 3-way comparison of the number of codepoints advertised by
-# the unicode spec for the given range, the originally parsed range,
-# and the resulting utf8 encoded range.
-
-def count_codepoints( code )
- code.split(' ').inject(1) do |acc, elt|
- if elt =~ /0x(.+)\.\.0x(.+)/
- if @encoding == :utf8
- acc * (from_utf8_enc($2) - from_utf8_enc($1) + 1)
- else
- acc * ($2.hex - $1.hex + 1)
- end
- else
- acc
- end
- end
-end
-
-def is_valid?( range, desc, codes )
- spec_count = 1
- spec_count = $1.to_i if desc =~ /\[(\d+)\]/
- range_count = range.end - range.begin + 1
-
- sum = codes.inject(0) { |acc, elt| acc + count_codepoints(elt) }
- sum == spec_count and sum == range_count
-end
-
-##
-# Generate the state maching to stdout
-
-def generate_machine( name, property )
- pipe = " "
- @output.puts " #{name} = "
- each_alpha( @chart_url, property ) do |range, desc|
-
- codes = (@encoding == :ucs4) ? to_ucs4(range) : to_utf8(range)
-
- #raise "Invalid encoding of range #{range}: #{codes.inspect}" unless
- # is_valid? range, desc, codes
-
- range_width = codes.map { |a| a.size }.max
- range_width = RANGE_WIDTH if range_width < RANGE_WIDTH
-
- desc_width = TOTAL_WIDTH - RANGE_WIDTH - 11
- desc_width -= (range_width - RANGE_WIDTH) if range_width > RANGE_WIDTH
-
- if desc.size > desc_width
- desc = desc[0..desc_width - 4] + "..."
- end
-
- codes.each_with_index do |r, idx|
- desc = "" unless idx.zero?
- code = "%-#{range_width}s" % r
- @output.puts " #{pipe} #{code} ##{desc}"
- pipe = "|"
- end
- end
- @output.puts " ;"
- @output.puts ""
-end
-
-@output.puts < 0 && ret[0] == '.' {
- ret = ret[1:]
- }
- return ret
-}
-
-func navigationStepsRev(v node, offset int) []string {
- switch tv := v.(type) {
- case *objectVal:
- // Do any of our properties have an object that contains the target
- // offset?
- for _, attr := range tv.Attrs {
- k := attr.Name
- av := attr.Value
-
- switch av.(type) {
- case *objectVal, *arrayVal:
- // okay
- default:
- continue
- }
-
- if av.Range().ContainsOffset(offset) {
- return append(navigationStepsRev(av, offset), "."+k)
- }
- }
- case *arrayVal:
- // Do any of our elements contain the target offset?
- for i, elem := range tv.Values {
-
- switch elem.(type) {
- case *objectVal, *arrayVal:
- // okay
- default:
- continue
- }
-
- if elem.Range().ContainsOffset(offset) {
- return append(navigationStepsRev(elem, offset), fmt.Sprintf("[%d]", i))
- }
- }
- }
-
- return nil
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/parser.go b/vendor/github.com/hashicorp/hcl2/hcl/json/parser.go
deleted file mode 100644
index d368ea8fc..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/json/parser.go
+++ /dev/null
@@ -1,496 +0,0 @@
-package json
-
-import (
- "encoding/json"
- "fmt"
-
- "github.com/hashicorp/hcl2/hcl"
- "github.com/zclconf/go-cty/cty"
-)
-
-func parseFileContent(buf []byte, filename string) (node, hcl.Diagnostics) {
- tokens := scan(buf, pos{
- Filename: filename,
- Pos: hcl.Pos{
- Byte: 0,
- Line: 1,
- Column: 1,
- },
- })
- p := newPeeker(tokens)
- node, diags := parseValue(p)
- if len(diags) == 0 && p.Peek().Type != tokenEOF {
- diags = diags.Append(&hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Extraneous data after value",
- Detail: "Extra characters appear after the JSON value.",
- Subject: p.Peek().Range.Ptr(),
- })
- }
- return node, diags
-}
-
-func parseValue(p *peeker) (node, hcl.Diagnostics) {
- tok := p.Peek()
-
- wrapInvalid := func(n node, diags hcl.Diagnostics) (node, hcl.Diagnostics) {
- if n != nil {
- return n, diags
- }
- return invalidVal{tok.Range}, diags
- }
-
- switch tok.Type {
- case tokenBraceO:
- return wrapInvalid(parseObject(p))
- case tokenBrackO:
- return wrapInvalid(parseArray(p))
- case tokenNumber:
- return wrapInvalid(parseNumber(p))
- case tokenString:
- return wrapInvalid(parseString(p))
- case tokenKeyword:
- return wrapInvalid(parseKeyword(p))
- case tokenBraceC:
- return wrapInvalid(nil, hcl.Diagnostics{
- {
- Severity: hcl.DiagError,
- Summary: "Missing JSON value",
- Detail: "A JSON value must start with a brace, a bracket, a number, a string, or a keyword.",
- Subject: &tok.Range,
- },
- })
- case tokenBrackC:
- return wrapInvalid(nil, hcl.Diagnostics{
- {
- Severity: hcl.DiagError,
- Summary: "Missing array element value",
- Detail: "A JSON value must start with a brace, a bracket, a number, a string, or a keyword.",
- Subject: &tok.Range,
- },
- })
- case tokenEOF:
- return wrapInvalid(nil, hcl.Diagnostics{
- {
- Severity: hcl.DiagError,
- Summary: "Missing value",
- Detail: "The JSON data ends prematurely.",
- Subject: &tok.Range,
- },
- })
- default:
- return wrapInvalid(nil, hcl.Diagnostics{
- {
- Severity: hcl.DiagError,
- Summary: "Invalid start of value",
- Detail: "A JSON value must start with a brace, a bracket, a number, a string, or a keyword.",
- Subject: &tok.Range,
- },
- })
- }
-}
-
-func tokenCanStartValue(tok token) bool {
- switch tok.Type {
- case tokenBraceO, tokenBrackO, tokenNumber, tokenString, tokenKeyword:
- return true
- default:
- return false
- }
-}
-
-func parseObject(p *peeker) (node, hcl.Diagnostics) {
- var diags hcl.Diagnostics
-
- open := p.Read()
- attrs := []*objectAttr{}
-
- // recover is used to shift the peeker to what seems to be the end of
- // our object, so that when we encounter an error we leave the peeker
- // at a reasonable point in the token stream to continue parsing.
- recover := func(tok token) {
- open := 1
- for {
- switch tok.Type {
- case tokenBraceO:
- open++
- case tokenBraceC:
- open--
- if open <= 1 {
- return
- }
- case tokenEOF:
- // Ran out of source before we were able to recover,
- // so we'll bail here and let the caller deal with it.
- return
- }
- tok = p.Read()
- }
- }
-
-Token:
- for {
- if p.Peek().Type == tokenBraceC {
- break Token
- }
-
- keyNode, keyDiags := parseValue(p)
- diags = diags.Extend(keyDiags)
- if keyNode == nil {
- return nil, diags
- }
-
- keyStrNode, ok := keyNode.(*stringVal)
- if !ok {
- return nil, diags.Append(&hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid object property name",
- Detail: "A JSON object property name must be a string",
- Subject: keyNode.StartRange().Ptr(),
- })
- }
-
- key := keyStrNode.Value
-
- colon := p.Read()
- if colon.Type != tokenColon {
- recover(colon)
-
- if colon.Type == tokenBraceC || colon.Type == tokenComma {
- // Catch common mistake of using braces instead of brackets
- // for an object.
- return nil, diags.Append(&hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Missing object value",
- Detail: "A JSON object attribute must have a value, introduced by a colon.",
- Subject: &colon.Range,
- })
- }
-
- if colon.Type == tokenEquals {
- // Possible confusion with native HCL syntax.
- return nil, diags.Append(&hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Missing property value colon",
- Detail: "JSON uses a colon as its name/value delimiter, not an equals sign.",
- Subject: &colon.Range,
- })
- }
-
- return nil, diags.Append(&hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Missing property value colon",
- Detail: "A colon must appear between an object property's name and its value.",
- Subject: &colon.Range,
- })
- }
-
- valNode, valDiags := parseValue(p)
- diags = diags.Extend(valDiags)
- if valNode == nil {
- return nil, diags
- }
-
- attrs = append(attrs, &objectAttr{
- Name: key,
- Value: valNode,
- NameRange: keyStrNode.SrcRange,
- })
-
- switch p.Peek().Type {
- case tokenComma:
- comma := p.Read()
- if p.Peek().Type == tokenBraceC {
- // Special error message for this common mistake
- return nil, diags.Append(&hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Trailing comma in object",
- Detail: "JSON does not permit a trailing comma after the final property in an object.",
- Subject: &comma.Range,
- })
- }
- continue Token
- case tokenEOF:
- return nil, diags.Append(&hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Unclosed object",
- Detail: "No closing brace was found for this JSON object.",
- Subject: &open.Range,
- })
- case tokenBrackC:
- // Consume the bracket anyway, so that we don't return with the peeker
- // at a strange place.
- p.Read()
- return nil, diags.Append(&hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Mismatched braces",
- Detail: "A JSON object must be closed with a brace, not a bracket.",
- Subject: p.Peek().Range.Ptr(),
- })
- case tokenBraceC:
- break Token
- default:
- recover(p.Read())
- return nil, diags.Append(&hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Missing attribute seperator comma",
- Detail: "A comma must appear between each property definition in an object.",
- Subject: p.Peek().Range.Ptr(),
- })
- }
-
- }
-
- close := p.Read()
- return &objectVal{
- Attrs: attrs,
- SrcRange: hcl.RangeBetween(open.Range, close.Range),
- OpenRange: open.Range,
- CloseRange: close.Range,
- }, diags
-}
-
-func parseArray(p *peeker) (node, hcl.Diagnostics) {
- var diags hcl.Diagnostics
-
- open := p.Read()
- vals := []node{}
-
- // recover is used to shift the peeker to what seems to be the end of
- // our array, so that when we encounter an error we leave the peeker
- // at a reasonable point in the token stream to continue parsing.
- recover := func(tok token) {
- open := 1
- for {
- switch tok.Type {
- case tokenBrackO:
- open++
- case tokenBrackC:
- open--
- if open <= 1 {
- return
- }
- case tokenEOF:
- // Ran out of source before we were able to recover,
- // so we'll bail here and let the caller deal with it.
- return
- }
- tok = p.Read()
- }
- }
-
-Token:
- for {
- if p.Peek().Type == tokenBrackC {
- break Token
- }
-
- valNode, valDiags := parseValue(p)
- diags = diags.Extend(valDiags)
- if valNode == nil {
- return nil, diags
- }
-
- vals = append(vals, valNode)
-
- switch p.Peek().Type {
- case tokenComma:
- comma := p.Read()
- if p.Peek().Type == tokenBrackC {
- // Special error message for this common mistake
- return nil, diags.Append(&hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Trailing comma in array",
- Detail: "JSON does not permit a trailing comma after the final value in an array.",
- Subject: &comma.Range,
- })
- }
- continue Token
- case tokenColon:
- recover(p.Read())
- return nil, diags.Append(&hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid array value",
- Detail: "A colon is not used to introduce values in a JSON array.",
- Subject: p.Peek().Range.Ptr(),
- })
- case tokenEOF:
- recover(p.Read())
- return nil, diags.Append(&hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Unclosed object",
- Detail: "No closing bracket was found for this JSON array.",
- Subject: &open.Range,
- })
- case tokenBraceC:
- recover(p.Read())
- return nil, diags.Append(&hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Mismatched brackets",
- Detail: "A JSON array must be closed with a bracket, not a brace.",
- Subject: p.Peek().Range.Ptr(),
- })
- case tokenBrackC:
- break Token
- default:
- recover(p.Read())
- return nil, diags.Append(&hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Missing attribute seperator comma",
- Detail: "A comma must appear between each value in an array.",
- Subject: p.Peek().Range.Ptr(),
- })
- }
-
- }
-
- close := p.Read()
- return &arrayVal{
- Values: vals,
- SrcRange: hcl.RangeBetween(open.Range, close.Range),
- OpenRange: open.Range,
- }, diags
-}
-
-func parseNumber(p *peeker) (node, hcl.Diagnostics) {
- tok := p.Read()
-
- // Use encoding/json to validate the number syntax.
- // TODO: Do this more directly to produce better diagnostics.
- var num json.Number
- err := json.Unmarshal(tok.Bytes, &num)
- if err != nil {
- return nil, hcl.Diagnostics{
- {
- Severity: hcl.DiagError,
- Summary: "Invalid JSON number",
- Detail: fmt.Sprintf("There is a syntax error in the given JSON number."),
- Subject: &tok.Range,
- },
- }
- }
-
- // We want to guarantee that we parse numbers the same way as cty (and thus
- // native syntax HCL) would here, so we'll use the cty parser even though
- // in most other cases we don't actually introduce cty concepts until
- // decoding time. We'll unwrap the parsed float immediately afterwards, so
- // the cty value is just a temporary helper.
- nv, err := cty.ParseNumberVal(string(num))
- if err != nil {
- // Should never happen if above passed, since JSON numbers are a subset
- // of what cty can parse...
- return nil, hcl.Diagnostics{
- {
- Severity: hcl.DiagError,
- Summary: "Invalid JSON number",
- Detail: fmt.Sprintf("There is a syntax error in the given JSON number."),
- Subject: &tok.Range,
- },
- }
- }
-
- return &numberVal{
- Value: nv.AsBigFloat(),
- SrcRange: tok.Range,
- }, nil
-}
-
-func parseString(p *peeker) (node, hcl.Diagnostics) {
- tok := p.Read()
- var str string
- err := json.Unmarshal(tok.Bytes, &str)
-
- if err != nil {
- var errRange hcl.Range
- if serr, ok := err.(*json.SyntaxError); ok {
- errOfs := serr.Offset
- errPos := tok.Range.Start
- errPos.Byte += int(errOfs)
-
- // TODO: Use the byte offset to properly count unicode
- // characters for the column, and mark the whole of the
- // character that was wrong as part of our range.
- errPos.Column += int(errOfs)
-
- errEndPos := errPos
- errEndPos.Byte++
- errEndPos.Column++
-
- errRange = hcl.Range{
- Filename: tok.Range.Filename,
- Start: errPos,
- End: errEndPos,
- }
- } else {
- errRange = tok.Range
- }
-
- var contextRange *hcl.Range
- if errRange != tok.Range {
- contextRange = &tok.Range
- }
-
- // FIXME: Eventually we should parse strings directly here so
- // we can produce a more useful error message in the face fo things
- // such as invalid escapes, etc.
- return nil, hcl.Diagnostics{
- {
- Severity: hcl.DiagError,
- Summary: "Invalid JSON string",
- Detail: fmt.Sprintf("There is a syntax error in the given JSON string."),
- Subject: &errRange,
- Context: contextRange,
- },
- }
- }
-
- return &stringVal{
- Value: str,
- SrcRange: tok.Range,
- }, nil
-}
-
-func parseKeyword(p *peeker) (node, hcl.Diagnostics) {
- tok := p.Read()
- s := string(tok.Bytes)
-
- switch s {
- case "true":
- return &booleanVal{
- Value: true,
- SrcRange: tok.Range,
- }, nil
- case "false":
- return &booleanVal{
- Value: false,
- SrcRange: tok.Range,
- }, nil
- case "null":
- return &nullVal{
- SrcRange: tok.Range,
- }, nil
- case "undefined", "NaN", "Infinity":
- return nil, hcl.Diagnostics{
- {
- Severity: hcl.DiagError,
- Summary: "Invalid JSON keyword",
- Detail: fmt.Sprintf("The JavaScript identifier %q cannot be used in JSON.", s),
- Subject: &tok.Range,
- },
- }
- default:
- var dym string
- if suggest := keywordSuggestion(s); suggest != "" {
- dym = fmt.Sprintf(" Did you mean %q?", suggest)
- }
-
- return nil, hcl.Diagnostics{
- {
- Severity: hcl.DiagError,
- Summary: "Invalid JSON keyword",
- Detail: fmt.Sprintf("%q is not a valid JSON keyword.%s", s, dym),
- Subject: &tok.Range,
- },
- }
- }
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/peeker.go b/vendor/github.com/hashicorp/hcl2/hcl/json/peeker.go
deleted file mode 100644
index fc7bbf582..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/json/peeker.go
+++ /dev/null
@@ -1,25 +0,0 @@
-package json
-
-type peeker struct {
- tokens []token
- pos int
-}
-
-func newPeeker(tokens []token) *peeker {
- return &peeker{
- tokens: tokens,
- pos: 0,
- }
-}
-
-func (p *peeker) Peek() token {
- return p.tokens[p.pos]
-}
-
-func (p *peeker) Read() token {
- ret := p.tokens[p.pos]
- if ret.Type != tokenEOF {
- p.pos++
- }
- return ret
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/public.go b/vendor/github.com/hashicorp/hcl2/hcl/json/public.go
deleted file mode 100644
index 2728aa130..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/json/public.go
+++ /dev/null
@@ -1,94 +0,0 @@
-package json
-
-import (
- "fmt"
- "io/ioutil"
- "os"
-
- "github.com/hashicorp/hcl2/hcl"
-)
-
-// Parse attempts to parse the given buffer as JSON and, if successful, returns
-// a hcl.File for the HCL configuration represented by it.
-//
-// This is not a generic JSON parser. Instead, it deals only with the profile
-// of JSON used to express HCL configuration.
-//
-// The returned file is valid only if the returned diagnostics returns false
-// from its HasErrors method. If HasErrors returns true, the file represents
-// the subset of data that was able to be parsed, which may be none.
-func Parse(src []byte, filename string) (*hcl.File, hcl.Diagnostics) {
- rootNode, diags := parseFileContent(src, filename)
-
- switch rootNode.(type) {
- case *objectVal, *arrayVal:
- // okay
- default:
- diags = diags.Append(&hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Root value must be object",
- Detail: "The root value in a JSON-based configuration must be either a JSON object or a JSON array of objects.",
- Subject: rootNode.StartRange().Ptr(),
- })
-
- // Since we've already produced an error message for this being
- // invalid, we'll return an empty placeholder here so that trying to
- // extract content from our root body won't produce a redundant
- // error saying the same thing again in more general terms.
- fakePos := hcl.Pos{
- Byte: 0,
- Line: 1,
- Column: 1,
- }
- fakeRange := hcl.Range{
- Filename: filename,
- Start: fakePos,
- End: fakePos,
- }
- rootNode = &objectVal{
- Attrs: []*objectAttr{},
- SrcRange: fakeRange,
- OpenRange: fakeRange,
- }
- }
-
- file := &hcl.File{
- Body: &body{
- val: rootNode,
- },
- Bytes: src,
- Nav: navigation{rootNode},
- }
- return file, diags
-}
-
-// ParseFile is a convenience wrapper around Parse that first attempts to load
-// data from the given filename, passing the result to Parse if successful.
-//
-// If the file cannot be read, an error diagnostic with nil context is returned.
-func ParseFile(filename string) (*hcl.File, hcl.Diagnostics) {
- f, err := os.Open(filename)
- if err != nil {
- return nil, hcl.Diagnostics{
- {
- Severity: hcl.DiagError,
- Summary: "Failed to open file",
- Detail: fmt.Sprintf("The file %q could not be opened.", filename),
- },
- }
- }
- defer f.Close()
-
- src, err := ioutil.ReadAll(f)
- if err != nil {
- return nil, hcl.Diagnostics{
- {
- Severity: hcl.DiagError,
- Summary: "Failed to read file",
- Detail: fmt.Sprintf("The file %q was opened, but an error occured while reading it.", filename),
- },
- }
- }
-
- return Parse(src, filename)
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/scanner.go b/vendor/github.com/hashicorp/hcl2/hcl/json/scanner.go
deleted file mode 100644
index da7288423..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/json/scanner.go
+++ /dev/null
@@ -1,297 +0,0 @@
-package json
-
-import (
- "fmt"
-
- "github.com/apparentlymart/go-textseg/textseg"
- "github.com/hashicorp/hcl2/hcl"
-)
-
-//go:generate stringer -type tokenType scanner.go
-type tokenType rune
-
-const (
- tokenBraceO tokenType = '{'
- tokenBraceC tokenType = '}'
- tokenBrackO tokenType = '['
- tokenBrackC tokenType = ']'
- tokenComma tokenType = ','
- tokenColon tokenType = ':'
- tokenKeyword tokenType = 'K'
- tokenString tokenType = 'S'
- tokenNumber tokenType = 'N'
- tokenEOF tokenType = '␄'
- tokenInvalid tokenType = 0
- tokenEquals tokenType = '=' // used only for reminding the user of JSON syntax
-)
-
-type token struct {
- Type tokenType
- Bytes []byte
- Range hcl.Range
-}
-
-// scan returns the primary tokens for the given JSON buffer in sequence.
-//
-// The responsibility of this pass is to just mark the slices of the buffer
-// as being of various types. It is lax in how it interprets the multi-byte
-// token types keyword, string and number, preferring to capture erroneous
-// extra bytes that we presume the user intended to be part of the token
-// so that we can generate more helpful diagnostics in the parser.
-func scan(buf []byte, start pos) []token {
- var tokens []token
- p := start
- for {
- if len(buf) == 0 {
- tokens = append(tokens, token{
- Type: tokenEOF,
- Bytes: nil,
- Range: posRange(p, p),
- })
- return tokens
- }
-
- buf, p = skipWhitespace(buf, p)
-
- if len(buf) == 0 {
- tokens = append(tokens, token{
- Type: tokenEOF,
- Bytes: nil,
- Range: posRange(p, p),
- })
- return tokens
- }
-
- start = p
-
- first := buf[0]
- switch {
- case first == '{' || first == '}' || first == '[' || first == ']' || first == ',' || first == ':' || first == '=':
- p.Pos.Column++
- p.Pos.Byte++
- tokens = append(tokens, token{
- Type: tokenType(first),
- Bytes: buf[0:1],
- Range: posRange(start, p),
- })
- buf = buf[1:]
- case first == '"':
- var tokBuf []byte
- tokBuf, buf, p = scanString(buf, p)
- tokens = append(tokens, token{
- Type: tokenString,
- Bytes: tokBuf,
- Range: posRange(start, p),
- })
- case byteCanStartNumber(first):
- var tokBuf []byte
- tokBuf, buf, p = scanNumber(buf, p)
- tokens = append(tokens, token{
- Type: tokenNumber,
- Bytes: tokBuf,
- Range: posRange(start, p),
- })
- case byteCanStartKeyword(first):
- var tokBuf []byte
- tokBuf, buf, p = scanKeyword(buf, p)
- tokens = append(tokens, token{
- Type: tokenKeyword,
- Bytes: tokBuf,
- Range: posRange(start, p),
- })
- default:
- tokens = append(tokens, token{
- Type: tokenInvalid,
- Bytes: buf[:1],
- Range: start.Range(1, 1),
- })
- // If we've encountered an invalid then we might as well stop
- // scanning since the parser won't proceed beyond this point.
- return tokens
- }
- }
-}
-
-func byteCanStartNumber(b byte) bool {
- switch b {
- // We are slightly more tolerant than JSON requires here since we
- // expect the parser will make a stricter interpretation of the
- // number bytes, but we specifically don't allow 'e' or 'E' here
- // since we want the scanner to treat that as the start of an
- // invalid keyword instead, to produce more intelligible error messages.
- case '-', '+', '.', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
- return true
- default:
- return false
- }
-}
-
-func scanNumber(buf []byte, start pos) ([]byte, []byte, pos) {
- // The scanner doesn't check that the sequence of digit-ish bytes is
- // in a valid order. The parser must do this when decoding a number
- // token.
- var i int
- p := start
-Byte:
- for i = 0; i < len(buf); i++ {
- switch buf[i] {
- case '-', '+', '.', 'e', 'E', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
- p.Pos.Byte++
- p.Pos.Column++
- default:
- break Byte
- }
- }
- return buf[:i], buf[i:], p
-}
-
-func byteCanStartKeyword(b byte) bool {
- switch {
- // We allow any sequence of alphabetical characters here, even though
- // JSON is more constrained, so that we can collect what we presume
- // the user intended to be a single keyword and then check its validity
- // in the parser, where we can generate better diagnostics.
- // So e.g. we want to be able to say:
- // unrecognized keyword "True". Did you mean "true"?
- case isAlphabetical(b):
- return true
- default:
- return false
- }
-}
-
-func scanKeyword(buf []byte, start pos) ([]byte, []byte, pos) {
- var i int
- p := start
-Byte:
- for i = 0; i < len(buf); i++ {
- b := buf[i]
- switch {
- case isAlphabetical(b) || b == '_':
- p.Pos.Byte++
- p.Pos.Column++
- default:
- break Byte
- }
- }
- return buf[:i], buf[i:], p
-}
-
-func scanString(buf []byte, start pos) ([]byte, []byte, pos) {
- // The scanner doesn't validate correct use of escapes, etc. It pays
- // attention to escapes only for the purpose of identifying the closing
- // quote character. It's the parser's responsibility to do proper
- // validation.
- //
- // The scanner also doesn't specifically detect unterminated string
- // literals, though they can be identified in the parser by checking if
- // the final byte in a string token is the double-quote character.
-
- // Skip the opening quote symbol
- i := 1
- p := start
- p.Pos.Byte++
- p.Pos.Column++
- escaping := false
-Byte:
- for i < len(buf) {
- b := buf[i]
-
- switch {
- case b == '\\':
- escaping = !escaping
- p.Pos.Byte++
- p.Pos.Column++
- i++
- case b == '"':
- p.Pos.Byte++
- p.Pos.Column++
- i++
- if !escaping {
- break Byte
- }
- escaping = false
- case b < 32:
- break Byte
- default:
- // Advance by one grapheme cluster, so that we consider each
- // grapheme to be a "column".
- // Ignoring error because this scanner cannot produce errors.
- advance, _, _ := textseg.ScanGraphemeClusters(buf[i:], true)
-
- p.Pos.Byte += advance
- p.Pos.Column++
- i += advance
-
- escaping = false
- }
- }
- return buf[:i], buf[i:], p
-}
-
-func skipWhitespace(buf []byte, start pos) ([]byte, pos) {
- var i int
- p := start
-Byte:
- for i = 0; i < len(buf); i++ {
- switch buf[i] {
- case ' ':
- p.Pos.Byte++
- p.Pos.Column++
- case '\n':
- p.Pos.Byte++
- p.Pos.Column = 1
- p.Pos.Line++
- case '\r':
- // For the purpose of line/column counting we consider a
- // carriage return to take up no space, assuming that it will
- // be paired up with a newline (on Windows, for example) that
- // will account for both of them.
- p.Pos.Byte++
- case '\t':
- // We arbitrarily count a tab as if it were two spaces, because
- // we need to choose _some_ number here. This means any system
- // that renders code on-screen with markers must itself treat
- // tabs as a pair of spaces for rendering purposes, or instead
- // use the byte offset and back into its own column position.
- p.Pos.Byte++
- p.Pos.Column += 2
- default:
- break Byte
- }
- }
- return buf[i:], p
-}
-
-type pos struct {
- Filename string
- Pos hcl.Pos
-}
-
-func (p *pos) Range(byteLen, charLen int) hcl.Range {
- start := p.Pos
- end := p.Pos
- end.Byte += byteLen
- end.Column += charLen
- return hcl.Range{
- Filename: p.Filename,
- Start: start,
- End: end,
- }
-}
-
-func posRange(start, end pos) hcl.Range {
- return hcl.Range{
- Filename: start.Filename,
- Start: start.Pos,
- End: end.Pos,
- }
-}
-
-func (t token) GoString() string {
- return fmt.Sprintf("json.token{json.%s, []byte(%q), %#v}", t.Type, t.Bytes, t.Range)
-}
-
-func isAlphabetical(b byte) bool {
- return (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z')
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/spec.md b/vendor/github.com/hashicorp/hcl2/hcl/json/spec.md
deleted file mode 100644
index dac5729d4..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/json/spec.md
+++ /dev/null
@@ -1,405 +0,0 @@
-# HCL JSON Syntax Specification
-
-This is the specification for the JSON serialization for hcl. HCL is a system
-for defining configuration languages for applications. The HCL information
-model is designed to support multiple concrete syntaxes for configuration,
-and this JSON-based format complements [the native syntax](../hclsyntax/spec.md)
-by being easy to machine-generate, whereas the native syntax is oriented
-towards human authoring and maintenance
-
-This syntax is defined in terms of JSON as defined in
-[RFC7159](https://tools.ietf.org/html/rfc7159). As such it inherits the JSON
-grammar as-is, and merely defines a specific methodology for interpreting
-JSON constructs into HCL structural elements and expressions.
-
-This mapping is defined such that valid JSON-serialized HCL input can be
-_produced_ using standard JSON implementations in various programming languages.
-_Parsing_ such JSON has some additional constraints not beyond what is normally
-supported by JSON parsers, so a specialized parser may be required that
-is able to:
-
-- Preserve the relative ordering of properties defined in an object.
-- Preserve multiple definitions of the same property name.
-- Preserve numeric values to the precision required by the number type
- in [the HCL syntax-agnostic information model](../spec.md).
-- Retain source location information for parsed tokens/constructs in order
- to produce good error messages.
-
-## Structural Elements
-
-[The HCL syntax-agnostic information model](../spec.md) defines a _body_ as an
-abstract container for attribute definitions and child blocks. A body is
-represented in JSON as either a single JSON object or a JSON array of objects.
-
-Body processing is in terms of JSON object properties, visited in the order
-they appear in the input. Where a body is represented by a single JSON object,
-the properties of that object are visited in order. Where a body is
-represented by a JSON array, each of its elements are visited in order and
-each element has its properties visited in order. If any element of the array
-is not a JSON object then the input is erroneous.
-
-When a body is being processed in the _dynamic attributes_ mode, the allowance
-of a JSON array in the previous paragraph does not apply and instead a single
-JSON object is always required.
-
-As defined in the language-agnostic model, body processing is in terms
-of a schema which provides context for interpreting the body's content. For
-JSON bodies, the schema is crucial to allow differentiation of attribute
-definitions and block definitions, both of which are represented via object
-properties.
-
-The special property name `"//"`, when used in an object representing a HCL
-body, is parsed and ignored. A property with this name can be used to
-include human-readable comments. (This special property name is _not_
-processed in this way for any _other_ HCL constructs that are represented as
-JSON objects.)
-
-### Attributes
-
-Where the given schema describes an attribute with a given name, the object
-property with the matching name — if present — serves as the attribute's
-definition.
-
-When a body is being processed in the _dynamic attributes_ mode, each object
-property serves as an attribute definition for the attribute whose name
-matches the property name.
-
-The value of an attribute definition property is interpreted as an _expression_,
-as described in a later section.
-
-Given a schema that calls for an attribute named "foo", a JSON object like
-the following provides a definition for that attribute:
-
-```json
-{
- "foo": "bar baz"
-}
-```
-
-### Blocks
-
-Where the given schema describes a block with a given type name, each object
-property with the matching name serves as a definition of zero or more blocks
-of that type.
-
-Processing of child blocks is in terms of nested JSON objects and arrays.
-If the schema defines one or more _labels_ for the block type, a nested JSON
-object or JSON array of objects is required for each labelling level. These
-are flattened to a single ordered sequence of object properties using the
-same algorithm as for body content as defined above. Each object property
-serves as a label value at the corresponding level.
-
-After any labelling levels, the next nested value is either a JSON object
-representing a single block body, or a JSON array of JSON objects that each
-represent a single block body. Use of an array accommodates the definition
-of multiple blocks that have identical type and labels.
-
-Given a schema that calls for a block type named "foo" with no labels, the
-following JSON objects are all valid definitions of zero or more blocks of this
-type:
-
-```json
-{
- "foo": {
- "child_attr": "baz"
- }
-}
-```
-
-```json
-{
- "foo": [
- {
- "child_attr": "baz"
- },
- {
- "child_attr": "boz"
- }
- ]
-}
-```
-
-```json
-{
- "foo": []
-}
-```
-
-The first of these defines a single child block of type "foo". The second
-defines _two_ such blocks. The final example shows a degenerate definition
-of zero blocks, though generators should prefer to omit the property entirely
-in this scenario.
-
-Given a schema that calls for a block type named "foo" with _two_ labels, the
-extra label levels must be represented as objects or arrays of objects as in
-the following examples:
-
-```json
-{
- "foo": {
- "bar": {
- "baz": {
- "child_attr": "baz"
- },
- "boz": {
- "child_attr": "baz"
- }
- },
- "boz": {
- "baz": {
- "child_attr": "baz"
- }
- }
- }
-}
-```
-
-```json
-{
- "foo": {
- "bar": {
- "baz": {
- "child_attr": "baz"
- },
- "boz": {
- "child_attr": "baz"
- }
- },
- "boz": {
- "baz": [
- {
- "child_attr": "baz"
- },
- {
- "child_attr": "boz"
- }
- ]
- }
- }
-}
-```
-
-```json
-{
- "foo": [
- {
- "bar": {
- "baz": {
- "child_attr": "baz"
- },
- "boz": {
- "child_attr": "baz"
- }
- }
- },
- {
- "bar": {
- "baz": [
- {
- "child_attr": "baz"
- },
- {
- "child_attr": "boz"
- }
- ]
- }
- }
- ]
-}
-```
-
-```json
-{
- "foo": {
- "bar": {
- "baz": {
- "child_attr": "baz"
- },
- "boz": {
- "child_attr": "baz"
- }
- },
- "bar": {
- "baz": [
- {
- "child_attr": "baz"
- },
- {
- "child_attr": "boz"
- }
- ]
- }
- }
-}
-```
-
-Arrays can be introduced at either the label definition or block body
-definition levels to define multiple definitions of the same block type
-or labels while preserving order.
-
-A JSON HCL parser _must_ support duplicate definitions of the same property
-name within a single object, preserving all of them and the relative ordering
-between them. The array-based forms are also required so that JSON HCL
-configurations can be produced with JSON producing libraries that are not
-able to preserve property definition order and multiple definitions of
-the same property.
-
-## Expressions
-
-JSON lacks a native expression syntax, so the HCL JSON syntax instead defines
-a mapping for each of the JSON value types, including a special mapping for
-strings that allows optional use of arbitrary expressions.
-
-### Objects
-
-When interpreted as an expression, a JSON object represents a value of a HCL
-object type.
-
-Each property of the JSON object represents an attribute of the HCL object type.
-The property name string given in the JSON input is interpreted as a string
-expression as described below, and its result is converted to string as defined
-by the syntax-agnostic information model. If such a conversion is not possible,
-an error is produced and evaluation fails.
-
-An instance of the constructed object type is then created, whose values
-are interpreted by again recursively applying the mapping rules defined in
-this section to each of the property values.
-
-If any evaluated property name strings produce null values, an error is
-produced and evaluation fails. If any produce _unknown_ values, the _entire
-object's_ result is an unknown value of the dynamic pseudo-type, signalling
-that the type of the object cannot be determined.
-
-It is an error to define the same property name multiple times within a single
-JSON object interpreted as an expression. In full expression mode, this
-constraint applies to the name expression results after conversion to string,
-rather than the raw string that may contain interpolation expressions.
-
-### Arrays
-
-When interpreted as an expression, a JSON array represents a value of a HCL
-tuple type.
-
-Each element of the JSON array represents an element of the HCL tuple type.
-The tuple type is constructed by enumerating the JSON array elements, creating
-for each an element whose type is the result of recursively applying the
-expression mapping rules. Correspondence is preserved between the array element
-indices and the tuple element indices.
-
-An instance of the constructed tuple type is then created, whose values are
-interpreted by again recursively applying the mapping rules defined in this
-section.
-
-### Numbers
-
-When interpreted as an expression, a JSON number represents a HCL number value.
-
-HCL numbers are arbitrary-precision decimal values, so a JSON HCL parser must
-be able to translate exactly the value given to a number of corresponding
-precision, within the constraints set by the HCL syntax-agnostic information
-model.
-
-In practice, off-the-shelf JSON serializers often do not support customizing the
-processing of numbers, and instead force processing as 32-bit or 64-bit
-floating point values.
-
-A _producer_ of JSON HCL that uses such a serializer can provide numeric values
-as JSON strings where they have precision too great for representation in the
-serializer's chosen numeric type in situations where the result will be
-converted to number (using the standard conversion rules) by a calling
-application.
-
-Alternatively, for expressions that are evaluated in full expression mode an
-embedded template interpolation can be used to faithfully represent a number,
-such as `"${1e150}"`, which will then be evaluated by the underlying HCL native
-syntax expression evaluator.
-
-### Boolean Values
-
-The JSON boolean values `true` and `false`, when interpreted as expressions,
-represent the corresponding HCL boolean values.
-
-### The Null Value
-
-The JSON value `null`, when interpreted as an expression, represents a
-HCL null value of the dynamic pseudo-type.
-
-### Strings
-
-When interpreted as an expression, a JSON string may be interpreted in one of
-two ways depending on the evaluation mode.
-
-If evaluating in literal-only mode (as defined by the syntax-agnostic
-information model) the literal string is intepreted directly as a HCL string
-value, by directly using the exact sequence of unicode characters represented.
-Template interpolations and directives MUST NOT be processed in this mode,
-allowing any characters that appear as introduction sequences to pass through
-literally:
-
-```json
-"Hello world! Template sequences like ${ are not intepreted here."
-```
-
-When evaluating in full expression mode (again, as defined by the syntax-
-agnostic information model) the literal string is instead interpreted as a
-_standalone template_ in the HCL Native Syntax. The expression evaluation
-result is then the direct result of evaluating that template with the current
-variable scope and function table.
-
-```json
-"Hello, ${name}! Template sequences are interpreted in full expression mode."
-```
-
-In particular the _Template Interpolation Unwrapping_ requirement from the
-HCL native syntax specification must be implemented, allowing the use of
-single-interpolation templates to represent expressions that would not
-otherwise be representable in JSON, such as the following example where
-the result must be a number, rather than a string representation of a number:
-
-```json
-"${ a + b }"
-```
-
-## Static Analysis
-
-The HCL static analysis operations are implemented for JSON values that
-represent expressions, as described in the following sections.
-
-Due to the limited expressive power of the JSON syntax alone, use of these
-static analyses functions rather than normal expression evaluation is used
-as additional context for how a JSON value is to be interpreted, which means
-that static analyses can result in a different interpretation of a given
-expression than normal evaluation.
-
-### Static List
-
-An expression interpreted as a static list must be a JSON array. Each of the
-values in the array is interpreted as an expression and returned.
-
-### Static Map
-
-An expression interpreted as a static map must be a JSON object. Each of the
-key/value pairs in the object is presented as a pair of expressions. Since
-object property names are always strings, evaluating the key expression with
-a non-`nil` evaluation context will evaluate any template sequences given
-in the property name.
-
-### Static Call
-
-An expression interpreted as a static call must be a string. The content of
-the string is interpreted as a native syntax expression (not a _template_,
-unlike normal evaluation) and then the static call analysis is delegated to
-that expression.
-
-If the original expression is not a string or its contents cannot be parsed
-as a native syntax expression then static call analysis is not supported.
-
-### Static Traversal
-
-An expression interpreted as a static traversal must be a string. The content
-of the string is interpreted as a native syntax expression (not a _template_,
-unlike normal evaluation) and then static traversal analysis is delegated
-to that expression.
-
-If the original expression is not a string or its contents cannot be parsed
-as a native syntax expression then static call analysis is not supported.
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/structure.go b/vendor/github.com/hashicorp/hcl2/hcl/json/structure.go
deleted file mode 100644
index 74847c79a..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/json/structure.go
+++ /dev/null
@@ -1,637 +0,0 @@
-package json
-
-import (
- "fmt"
-
- "github.com/hashicorp/hcl2/hcl"
- "github.com/hashicorp/hcl2/hcl/hclsyntax"
- "github.com/zclconf/go-cty/cty"
- "github.com/zclconf/go-cty/cty/convert"
-)
-
-// body is the implementation of "Body" used for files processed with the JSON
-// parser.
-type body struct {
- val node
-
- // If non-nil, the keys of this map cause the corresponding attributes to
- // be treated as non-existing. This is used when Body.PartialContent is
- // called, to produce the "remaining content" Body.
- hiddenAttrs map[string]struct{}
-}
-
-// expression is the implementation of "Expression" used for files processed
-// with the JSON parser.
-type expression struct {
- src node
-}
-
-func (b *body) Content(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Diagnostics) {
- content, newBody, diags := b.PartialContent(schema)
-
- hiddenAttrs := newBody.(*body).hiddenAttrs
-
- var nameSuggestions []string
- for _, attrS := range schema.Attributes {
- if _, ok := hiddenAttrs[attrS.Name]; !ok {
- // Only suggest an attribute name if we didn't use it already.
- nameSuggestions = append(nameSuggestions, attrS.Name)
- }
- }
- for _, blockS := range schema.Blocks {
- // Blocks can appear multiple times, so we'll suggest their type
- // names regardless of whether they've already been used.
- nameSuggestions = append(nameSuggestions, blockS.Type)
- }
-
- jsonAttrs, attrDiags := b.collectDeepAttrs(b.val, nil)
- diags = append(diags, attrDiags...)
-
- for _, attr := range jsonAttrs {
- k := attr.Name
- if k == "//" {
- // Ignore "//" keys in objects representing bodies, to allow
- // their use as comments.
- continue
- }
-
- if _, ok := hiddenAttrs[k]; !ok {
- suggestion := nameSuggestion(k, nameSuggestions)
- if suggestion != "" {
- suggestion = fmt.Sprintf(" Did you mean %q?", suggestion)
- }
-
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Extraneous JSON object property",
- Detail: fmt.Sprintf("No argument or block type is named %q.%s", k, suggestion),
- Subject: &attr.NameRange,
- Context: attr.Range().Ptr(),
- })
- }
- }
-
- return content, diags
-}
-
-func (b *body) PartialContent(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Body, hcl.Diagnostics) {
- var diags hcl.Diagnostics
-
- jsonAttrs, attrDiags := b.collectDeepAttrs(b.val, nil)
- diags = append(diags, attrDiags...)
-
- usedNames := map[string]struct{}{}
- if b.hiddenAttrs != nil {
- for k := range b.hiddenAttrs {
- usedNames[k] = struct{}{}
- }
- }
-
- content := &hcl.BodyContent{
- Attributes: map[string]*hcl.Attribute{},
- Blocks: nil,
-
- MissingItemRange: b.MissingItemRange(),
- }
-
- // Create some more convenient data structures for our work below.
- attrSchemas := map[string]hcl.AttributeSchema{}
- blockSchemas := map[string]hcl.BlockHeaderSchema{}
- for _, attrS := range schema.Attributes {
- attrSchemas[attrS.Name] = attrS
- }
- for _, blockS := range schema.Blocks {
- blockSchemas[blockS.Type] = blockS
- }
-
- for _, jsonAttr := range jsonAttrs {
- attrName := jsonAttr.Name
- if _, used := b.hiddenAttrs[attrName]; used {
- continue
- }
-
- if attrS, defined := attrSchemas[attrName]; defined {
- if existing, exists := content.Attributes[attrName]; exists {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Duplicate argument",
- Detail: fmt.Sprintf("The argument %q was already set at %s.", attrName, existing.Range),
- Subject: &jsonAttr.NameRange,
- Context: jsonAttr.Range().Ptr(),
- })
- continue
- }
-
- content.Attributes[attrS.Name] = &hcl.Attribute{
- Name: attrS.Name,
- Expr: &expression{src: jsonAttr.Value},
- Range: hcl.RangeBetween(jsonAttr.NameRange, jsonAttr.Value.Range()),
- NameRange: jsonAttr.NameRange,
- }
- usedNames[attrName] = struct{}{}
-
- } else if blockS, defined := blockSchemas[attrName]; defined {
- bv := jsonAttr.Value
- blockDiags := b.unpackBlock(bv, blockS.Type, &jsonAttr.NameRange, blockS.LabelNames, nil, nil, &content.Blocks)
- diags = append(diags, blockDiags...)
- usedNames[attrName] = struct{}{}
- }
-
- // We ignore anything that isn't defined because that's the
- // PartialContent contract. The Content method will catch leftovers.
- }
-
- // Make sure we got all the required attributes.
- for _, attrS := range schema.Attributes {
- if !attrS.Required {
- continue
- }
- if _, defined := content.Attributes[attrS.Name]; !defined {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Missing required argument",
- Detail: fmt.Sprintf("The argument %q is required, but no definition was found.", attrS.Name),
- Subject: b.MissingItemRange().Ptr(),
- })
- }
- }
-
- unusedBody := &body{
- val: b.val,
- hiddenAttrs: usedNames,
- }
-
- return content, unusedBody, diags
-}
-
-// JustAttributes for JSON bodies interprets all properties of the wrapped
-// JSON object as attributes and returns them.
-func (b *body) JustAttributes() (hcl.Attributes, hcl.Diagnostics) {
- var diags hcl.Diagnostics
- attrs := make(map[string]*hcl.Attribute)
-
- obj, ok := b.val.(*objectVal)
- if !ok {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Incorrect JSON value type",
- Detail: "A JSON object is required here, setting the arguments for this block.",
- Subject: b.val.StartRange().Ptr(),
- })
- return attrs, diags
- }
-
- for _, jsonAttr := range obj.Attrs {
- name := jsonAttr.Name
- if name == "//" {
- // Ignore "//" keys in objects representing bodies, to allow
- // their use as comments.
- continue
- }
-
- if _, hidden := b.hiddenAttrs[name]; hidden {
- continue
- }
-
- if existing, exists := attrs[name]; exists {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Duplicate attribute definition",
- Detail: fmt.Sprintf("The argument %q was already set at %s.", name, existing.Range),
- Subject: &jsonAttr.NameRange,
- })
- continue
- }
-
- attrs[name] = &hcl.Attribute{
- Name: name,
- Expr: &expression{src: jsonAttr.Value},
- Range: hcl.RangeBetween(jsonAttr.NameRange, jsonAttr.Value.Range()),
- NameRange: jsonAttr.NameRange,
- }
- }
-
- // No diagnostics possible here, since the parser already took care of
- // finding duplicates and every JSON value can be a valid attribute value.
- return attrs, diags
-}
-
-func (b *body) MissingItemRange() hcl.Range {
- switch tv := b.val.(type) {
- case *objectVal:
- return tv.CloseRange
- case *arrayVal:
- return tv.OpenRange
- default:
- // Should not happen in correct operation, but might show up if the
- // input is invalid and we are producing partial results.
- return tv.StartRange()
- }
-}
-
-func (b *body) unpackBlock(v node, typeName string, typeRange *hcl.Range, labelsLeft []string, labelsUsed []string, labelRanges []hcl.Range, blocks *hcl.Blocks) (diags hcl.Diagnostics) {
- if len(labelsLeft) > 0 {
- labelName := labelsLeft[0]
- jsonAttrs, attrDiags := b.collectDeepAttrs(v, &labelName)
- diags = append(diags, attrDiags...)
-
- if len(jsonAttrs) == 0 {
- diags = diags.Append(&hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Missing block label",
- Detail: fmt.Sprintf("At least one object property is required, whose name represents the %s block's %s.", typeName, labelName),
- Subject: v.StartRange().Ptr(),
- })
- return
- }
- labelsUsed := append(labelsUsed, "")
- labelRanges := append(labelRanges, hcl.Range{})
- for _, p := range jsonAttrs {
- pk := p.Name
- labelsUsed[len(labelsUsed)-1] = pk
- labelRanges[len(labelRanges)-1] = p.NameRange
- diags = append(diags, b.unpackBlock(p.Value, typeName, typeRange, labelsLeft[1:], labelsUsed, labelRanges, blocks)...)
- }
- return
- }
-
- // By the time we get here, we've peeled off all the labels and we're ready
- // to deal with the block's actual content.
-
- // need to copy the label slices because their underlying arrays will
- // continue to be mutated after we return.
- labels := make([]string, len(labelsUsed))
- copy(labels, labelsUsed)
- labelR := make([]hcl.Range, len(labelRanges))
- copy(labelR, labelRanges)
-
- switch tv := v.(type) {
- case *nullVal:
- // There is no block content, e.g the value is null.
- return
- case *objectVal:
- // Single instance of the block
- *blocks = append(*blocks, &hcl.Block{
- Type: typeName,
- Labels: labels,
- Body: &body{
- val: tv,
- },
-
- DefRange: tv.OpenRange,
- TypeRange: *typeRange,
- LabelRanges: labelR,
- })
- case *arrayVal:
- // Multiple instances of the block
- for _, av := range tv.Values {
- *blocks = append(*blocks, &hcl.Block{
- Type: typeName,
- Labels: labels,
- Body: &body{
- val: av, // might be mistyped; we'll find out when content is requested for this body
- },
-
- DefRange: tv.OpenRange,
- TypeRange: *typeRange,
- LabelRanges: labelR,
- })
- }
- default:
- diags = diags.Append(&hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Incorrect JSON value type",
- Detail: fmt.Sprintf("Either a JSON object or a JSON array is required, representing the contents of one or more %q blocks.", typeName),
- Subject: v.StartRange().Ptr(),
- })
- }
- return
-}
-
-// collectDeepAttrs takes either a single object or an array of objects and
-// flattens it into a list of object attributes, collecting attributes from
-// all of the objects in a given array.
-//
-// Ordering is preserved, so a list of objects that each have one property
-// will result in those properties being returned in the same order as the
-// objects appeared in the array.
-//
-// This is appropriate for use only for objects representing bodies or labels
-// within a block.
-//
-// The labelName argument, if non-null, is used to tailor returned error
-// messages to refer to block labels rather than attributes and child blocks.
-// It has no other effect.
-func (b *body) collectDeepAttrs(v node, labelName *string) ([]*objectAttr, hcl.Diagnostics) {
- var diags hcl.Diagnostics
- var attrs []*objectAttr
-
- switch tv := v.(type) {
- case *nullVal:
- // If a value is null, then we don't return any attributes or return an error.
-
- case *objectVal:
- attrs = append(attrs, tv.Attrs...)
-
- case *arrayVal:
- for _, ev := range tv.Values {
- switch tev := ev.(type) {
- case *objectVal:
- attrs = append(attrs, tev.Attrs...)
- default:
- if labelName != nil {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Incorrect JSON value type",
- Detail: fmt.Sprintf("A JSON object is required here, to specify %s labels for this block.", *labelName),
- Subject: ev.StartRange().Ptr(),
- })
- } else {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Incorrect JSON value type",
- Detail: "A JSON object is required here, to define arguments and child blocks.",
- Subject: ev.StartRange().Ptr(),
- })
- }
- }
- }
-
- default:
- if labelName != nil {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Incorrect JSON value type",
- Detail: fmt.Sprintf("Either a JSON object or JSON array of objects is required here, to specify %s labels for this block.", *labelName),
- Subject: v.StartRange().Ptr(),
- })
- } else {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Incorrect JSON value type",
- Detail: "Either a JSON object or JSON array of objects is required here, to define arguments and child blocks.",
- Subject: v.StartRange().Ptr(),
- })
- }
- }
-
- return attrs, diags
-}
-
-func (e *expression) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
- switch v := e.src.(type) {
- case *stringVal:
- if ctx != nil {
- // Parse string contents as a HCL native language expression.
- // We only do this if we have a context, so passing a nil context
- // is how the caller specifies that interpolations are not allowed
- // and that the string should just be returned verbatim.
- templateSrc := v.Value
- expr, diags := hclsyntax.ParseTemplate(
- []byte(templateSrc),
- v.SrcRange.Filename,
-
- // This won't produce _exactly_ the right result, since
- // the hclsyntax parser can't "see" any escapes we removed
- // while parsing JSON, but it's better than nothing.
- hcl.Pos{
- Line: v.SrcRange.Start.Line,
-
- // skip over the opening quote mark
- Byte: v.SrcRange.Start.Byte + 1,
- Column: v.SrcRange.Start.Column + 1,
- },
- )
- if diags.HasErrors() {
- return cty.DynamicVal, diags
- }
- val, evalDiags := expr.Value(ctx)
- diags = append(diags, evalDiags...)
- return val, diags
- }
-
- return cty.StringVal(v.Value), nil
- case *numberVal:
- return cty.NumberVal(v.Value), nil
- case *booleanVal:
- return cty.BoolVal(v.Value), nil
- case *arrayVal:
- var diags hcl.Diagnostics
- vals := []cty.Value{}
- for _, jsonVal := range v.Values {
- val, valDiags := (&expression{src: jsonVal}).Value(ctx)
- vals = append(vals, val)
- diags = append(diags, valDiags...)
- }
- return cty.TupleVal(vals), diags
- case *objectVal:
- var diags hcl.Diagnostics
- attrs := map[string]cty.Value{}
- attrRanges := map[string]hcl.Range{}
- known := true
- for _, jsonAttr := range v.Attrs {
- // In this one context we allow keys to contain interpolation
- // expressions too, assuming we're evaluating in interpolation
- // mode. This achieves parity with the native syntax where
- // object expressions can have dynamic keys, while block contents
- // may not.
- name, nameDiags := (&expression{src: &stringVal{
- Value: jsonAttr.Name,
- SrcRange: jsonAttr.NameRange,
- }}).Value(ctx)
- valExpr := &expression{src: jsonAttr.Value}
- val, valDiags := valExpr.Value(ctx)
- diags = append(diags, nameDiags...)
- diags = append(diags, valDiags...)
-
- var err error
- name, err = convert.Convert(name, cty.String)
- if err != nil {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid object key expression",
- Detail: fmt.Sprintf("Cannot use this expression as an object key: %s.", err),
- Subject: &jsonAttr.NameRange,
- Expression: valExpr,
- EvalContext: ctx,
- })
- continue
- }
- if name.IsNull() {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid object key expression",
- Detail: "Cannot use null value as an object key.",
- Subject: &jsonAttr.NameRange,
- Expression: valExpr,
- EvalContext: ctx,
- })
- continue
- }
- if !name.IsKnown() {
- // This is a bit of a weird case, since our usual rules require
- // us to tolerate unknowns and just represent the result as
- // best we can but if we don't know the key then we can't
- // know the type of our object at all, and thus we must turn
- // the whole thing into cty.DynamicVal. This is consistent with
- // how this situation is handled in the native syntax.
- // We'll keep iterating so we can collect other errors in
- // subsequent attributes.
- known = false
- continue
- }
- nameStr := name.AsString()
- if _, defined := attrs[nameStr]; defined {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Duplicate object attribute",
- Detail: fmt.Sprintf("An attribute named %q was already defined at %s.", nameStr, attrRanges[nameStr]),
- Subject: &jsonAttr.NameRange,
- Expression: e,
- EvalContext: ctx,
- })
- continue
- }
- attrs[nameStr] = val
- attrRanges[nameStr] = jsonAttr.NameRange
- }
- if !known {
- // We encountered an unknown key somewhere along the way, so
- // we can't know what our type will eventually be.
- return cty.DynamicVal, diags
- }
- return cty.ObjectVal(attrs), diags
- case *nullVal:
- return cty.NullVal(cty.DynamicPseudoType), nil
- default:
- // Default to DynamicVal so that ASTs containing invalid nodes can
- // still be partially-evaluated.
- return cty.DynamicVal, nil
- }
-}
-
-func (e *expression) Variables() []hcl.Traversal {
- var vars []hcl.Traversal
-
- switch v := e.src.(type) {
- case *stringVal:
- templateSrc := v.Value
- expr, diags := hclsyntax.ParseTemplate(
- []byte(templateSrc),
- v.SrcRange.Filename,
-
- // This won't produce _exactly_ the right result, since
- // the hclsyntax parser can't "see" any escapes we removed
- // while parsing JSON, but it's better than nothing.
- hcl.Pos{
- Line: v.SrcRange.Start.Line,
-
- // skip over the opening quote mark
- Byte: v.SrcRange.Start.Byte + 1,
- Column: v.SrcRange.Start.Column + 1,
- },
- )
- if diags.HasErrors() {
- return vars
- }
- return expr.Variables()
-
- case *arrayVal:
- for _, jsonVal := range v.Values {
- vars = append(vars, (&expression{src: jsonVal}).Variables()...)
- }
- case *objectVal:
- for _, jsonAttr := range v.Attrs {
- keyExpr := &stringVal{ // we're going to treat key as an expression in this context
- Value: jsonAttr.Name,
- SrcRange: jsonAttr.NameRange,
- }
- vars = append(vars, (&expression{src: keyExpr}).Variables()...)
- vars = append(vars, (&expression{src: jsonAttr.Value}).Variables()...)
- }
- }
-
- return vars
-}
-
-func (e *expression) Range() hcl.Range {
- return e.src.Range()
-}
-
-func (e *expression) StartRange() hcl.Range {
- return e.src.StartRange()
-}
-
-// Implementation for hcl.AbsTraversalForExpr.
-func (e *expression) AsTraversal() hcl.Traversal {
- // In JSON-based syntax a traversal is given as a string containing
- // traversal syntax as defined by hclsyntax.ParseTraversalAbs.
-
- switch v := e.src.(type) {
- case *stringVal:
- traversal, diags := hclsyntax.ParseTraversalAbs([]byte(v.Value), v.SrcRange.Filename, v.SrcRange.Start)
- if diags.HasErrors() {
- return nil
- }
- return traversal
- default:
- return nil
- }
-}
-
-// Implementation for hcl.ExprCall.
-func (e *expression) ExprCall() *hcl.StaticCall {
- // In JSON-based syntax a static call is given as a string containing
- // an expression in the native syntax that also supports ExprCall.
-
- switch v := e.src.(type) {
- case *stringVal:
- expr, diags := hclsyntax.ParseExpression([]byte(v.Value), v.SrcRange.Filename, v.SrcRange.Start)
- if diags.HasErrors() {
- return nil
- }
-
- call, diags := hcl.ExprCall(expr)
- if diags.HasErrors() {
- return nil
- }
-
- return call
- default:
- return nil
- }
-}
-
-// Implementation for hcl.ExprList.
-func (e *expression) ExprList() []hcl.Expression {
- switch v := e.src.(type) {
- case *arrayVal:
- ret := make([]hcl.Expression, len(v.Values))
- for i, node := range v.Values {
- ret[i] = &expression{src: node}
- }
- return ret
- default:
- return nil
- }
-}
-
-// Implementation for hcl.ExprMap.
-func (e *expression) ExprMap() []hcl.KeyValuePair {
- switch v := e.src.(type) {
- case *objectVal:
- ret := make([]hcl.KeyValuePair, len(v.Attrs))
- for i, jsonAttr := range v.Attrs {
- ret[i] = hcl.KeyValuePair{
- Key: &expression{src: &stringVal{
- Value: jsonAttr.Name,
- SrcRange: jsonAttr.NameRange,
- }},
- Value: &expression{src: jsonAttr.Value},
- }
- }
- return ret
- default:
- return nil
- }
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/json/tokentype_string.go b/vendor/github.com/hashicorp/hcl2/hcl/json/tokentype_string.go
deleted file mode 100644
index bbcce5b30..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/json/tokentype_string.go
+++ /dev/null
@@ -1,29 +0,0 @@
-// Code generated by "stringer -type tokenType scanner.go"; DO NOT EDIT.
-
-package json
-
-import "strconv"
-
-const _tokenType_name = "tokenInvalidtokenCommatokenColontokenEqualstokenKeywordtokenNumbertokenStringtokenBrackOtokenBrackCtokenBraceOtokenBraceCtokenEOF"
-
-var _tokenType_map = map[tokenType]string{
- 0: _tokenType_name[0:12],
- 44: _tokenType_name[12:22],
- 58: _tokenType_name[22:32],
- 61: _tokenType_name[32:43],
- 75: _tokenType_name[43:55],
- 78: _tokenType_name[55:66],
- 83: _tokenType_name[66:77],
- 91: _tokenType_name[77:88],
- 93: _tokenType_name[88:99],
- 123: _tokenType_name[99:110],
- 125: _tokenType_name[110:121],
- 9220: _tokenType_name[121:129],
-}
-
-func (i tokenType) String() string {
- if str, ok := _tokenType_map[i]; ok {
- return str
- }
- return "tokenType(" + strconv.FormatInt(int64(i), 10) + ")"
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/merged.go b/vendor/github.com/hashicorp/hcl2/hcl/merged.go
deleted file mode 100644
index 96e62a58d..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/merged.go
+++ /dev/null
@@ -1,226 +0,0 @@
-package hcl
-
-import (
- "fmt"
-)
-
-// MergeFiles combines the given files to produce a single body that contains
-// configuration from all of the given files.
-//
-// The ordering of the given files decides the order in which contained
-// elements will be returned. If any top-level attributes are defined with
-// the same name across multiple files, a diagnostic will be produced from
-// the Content and PartialContent methods describing this error in a
-// user-friendly way.
-func MergeFiles(files []*File) Body {
- var bodies []Body
- for _, file := range files {
- bodies = append(bodies, file.Body)
- }
- return MergeBodies(bodies)
-}
-
-// MergeBodies is like MergeFiles except it deals directly with bodies, rather
-// than with entire files.
-func MergeBodies(bodies []Body) Body {
- if len(bodies) == 0 {
- // Swap out for our singleton empty body, to reduce the number of
- // empty slices we have hanging around.
- return emptyBody
- }
-
- // If any of the given bodies are already merged bodies, we'll unpack
- // to flatten to a single mergedBodies, since that's conceptually simpler.
- // This also, as a side-effect, eliminates any empty bodies, since
- // empties are merged bodies with no inner bodies.
- var newLen int
- var flatten bool
- for _, body := range bodies {
- if children, merged := body.(mergedBodies); merged {
- newLen += len(children)
- flatten = true
- } else {
- newLen++
- }
- }
-
- if !flatten { // not just newLen == len, because we might have mergedBodies with single bodies inside
- return mergedBodies(bodies)
- }
-
- if newLen == 0 {
- // Don't allocate a new empty when we already have one
- return emptyBody
- }
-
- new := make([]Body, 0, newLen)
- for _, body := range bodies {
- if children, merged := body.(mergedBodies); merged {
- new = append(new, children...)
- } else {
- new = append(new, body)
- }
- }
- return mergedBodies(new)
-}
-
-var emptyBody = mergedBodies([]Body{})
-
-// EmptyBody returns a body with no content. This body can be used as a
-// placeholder when a body is required but no body content is available.
-func EmptyBody() Body {
- return emptyBody
-}
-
-type mergedBodies []Body
-
-// Content returns the content produced by applying the given schema to all
-// of the merged bodies and merging the result.
-//
-// Although required attributes _are_ supported, they should be used sparingly
-// with merged bodies since in this case there is no contextual information
-// with which to return good diagnostics. Applications working with merged
-// bodies may wish to mark all attributes as optional and then check for
-// required attributes afterwards, to produce better diagnostics.
-func (mb mergedBodies) Content(schema *BodySchema) (*BodyContent, Diagnostics) {
- // the returned body will always be empty in this case, because mergedContent
- // will only ever call Content on the child bodies.
- content, _, diags := mb.mergedContent(schema, false)
- return content, diags
-}
-
-func (mb mergedBodies) PartialContent(schema *BodySchema) (*BodyContent, Body, Diagnostics) {
- return mb.mergedContent(schema, true)
-}
-
-func (mb mergedBodies) JustAttributes() (Attributes, Diagnostics) {
- attrs := make(map[string]*Attribute)
- var diags Diagnostics
-
- for _, body := range mb {
- thisAttrs, thisDiags := body.JustAttributes()
-
- if len(thisDiags) != 0 {
- diags = append(diags, thisDiags...)
- }
-
- if thisAttrs != nil {
- for name, attr := range thisAttrs {
- if existing := attrs[name]; existing != nil {
- diags = diags.Append(&Diagnostic{
- Severity: DiagError,
- Summary: "Duplicate argument",
- Detail: fmt.Sprintf(
- "Argument %q was already set at %s",
- name, existing.NameRange.String(),
- ),
- Subject: &attr.NameRange,
- })
- continue
- }
-
- attrs[name] = attr
- }
- }
- }
-
- return attrs, diags
-}
-
-func (mb mergedBodies) MissingItemRange() Range {
- if len(mb) == 0 {
- // Nothing useful to return here, so we'll return some garbage.
- return Range{
- Filename: "",
- }
- }
-
- // arbitrarily use the first body's missing item range
- return mb[0].MissingItemRange()
-}
-
-func (mb mergedBodies) mergedContent(schema *BodySchema, partial bool) (*BodyContent, Body, Diagnostics) {
- // We need to produce a new schema with none of the attributes marked as
- // required, since _any one_ of our bodies can contribute an attribute value.
- // We'll separately check that all required attributes are present at
- // the end.
- mergedSchema := &BodySchema{
- Blocks: schema.Blocks,
- }
- for _, attrS := range schema.Attributes {
- mergedAttrS := attrS
- mergedAttrS.Required = false
- mergedSchema.Attributes = append(mergedSchema.Attributes, mergedAttrS)
- }
-
- var mergedLeftovers []Body
- content := &BodyContent{
- Attributes: map[string]*Attribute{},
- }
-
- var diags Diagnostics
- for _, body := range mb {
- var thisContent *BodyContent
- var thisLeftovers Body
- var thisDiags Diagnostics
-
- if partial {
- thisContent, thisLeftovers, thisDiags = body.PartialContent(mergedSchema)
- } else {
- thisContent, thisDiags = body.Content(mergedSchema)
- }
-
- if thisLeftovers != nil {
- mergedLeftovers = append(mergedLeftovers, thisLeftovers)
- }
- if len(thisDiags) != 0 {
- diags = append(diags, thisDiags...)
- }
-
- if thisContent.Attributes != nil {
- for name, attr := range thisContent.Attributes {
- if existing := content.Attributes[name]; existing != nil {
- diags = diags.Append(&Diagnostic{
- Severity: DiagError,
- Summary: "Duplicate argument",
- Detail: fmt.Sprintf(
- "Argument %q was already set at %s",
- name, existing.NameRange.String(),
- ),
- Subject: &attr.NameRange,
- })
- continue
- }
- content.Attributes[name] = attr
- }
- }
-
- if len(thisContent.Blocks) != 0 {
- content.Blocks = append(content.Blocks, thisContent.Blocks...)
- }
- }
-
- // Finally, we check for required attributes.
- for _, attrS := range schema.Attributes {
- if !attrS.Required {
- continue
- }
-
- if content.Attributes[attrS.Name] == nil {
- // We don't have any context here to produce a good diagnostic,
- // which is why we warn in the Content docstring to minimize the
- // use of required attributes on merged bodies.
- diags = diags.Append(&Diagnostic{
- Severity: DiagError,
- Summary: "Missing required argument",
- Detail: fmt.Sprintf(
- "The argument %q is required, but was not set.",
- attrS.Name,
- ),
- })
- }
- }
-
- leftoverBody := MergeBodies(mergedLeftovers)
- return content, leftoverBody, diags
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/ops.go b/vendor/github.com/hashicorp/hcl2/hcl/ops.go
deleted file mode 100644
index 5d2910c13..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/ops.go
+++ /dev/null
@@ -1,288 +0,0 @@
-package hcl
-
-import (
- "fmt"
- "math/big"
-
- "github.com/zclconf/go-cty/cty"
- "github.com/zclconf/go-cty/cty/convert"
-)
-
-// Index is a helper function that performs the same operation as the index
-// operator in the HCL expression language. That is, the result is the
-// same as it would be for collection[key] in a configuration expression.
-//
-// This is exported so that applications can perform indexing in a manner
-// consistent with how the language does it, including handling of null and
-// unknown values, etc.
-//
-// Diagnostics are produced if the given combination of values is not valid.
-// Therefore a pointer to a source range must be provided to use in diagnostics,
-// though nil can be provided if the calling application is going to
-// ignore the subject of the returned diagnostics anyway.
-func Index(collection, key cty.Value, srcRange *Range) (cty.Value, Diagnostics) {
- if collection.IsNull() {
- return cty.DynamicVal, Diagnostics{
- {
- Severity: DiagError,
- Summary: "Attempt to index null value",
- Detail: "This value is null, so it does not have any indices.",
- Subject: srcRange,
- },
- }
- }
- if key.IsNull() {
- return cty.DynamicVal, Diagnostics{
- {
- Severity: DiagError,
- Summary: "Invalid index",
- Detail: "Can't use a null value as an indexing key.",
- Subject: srcRange,
- },
- }
- }
- ty := collection.Type()
- kty := key.Type()
- if kty == cty.DynamicPseudoType || ty == cty.DynamicPseudoType {
- return cty.DynamicVal, nil
- }
-
- switch {
-
- case ty.IsListType() || ty.IsTupleType() || ty.IsMapType():
- var wantType cty.Type
- switch {
- case ty.IsListType() || ty.IsTupleType():
- wantType = cty.Number
- case ty.IsMapType():
- wantType = cty.String
- default:
- // should never happen
- panic("don't know what key type we want")
- }
-
- key, keyErr := convert.Convert(key, wantType)
- if keyErr != nil {
- return cty.DynamicVal, Diagnostics{
- {
- Severity: DiagError,
- Summary: "Invalid index",
- Detail: fmt.Sprintf(
- "The given key does not identify an element in this collection value: %s.",
- keyErr.Error(),
- ),
- Subject: srcRange,
- },
- }
- }
-
- has := collection.HasIndex(key)
- if !has.IsKnown() {
- if ty.IsTupleType() {
- return cty.DynamicVal, nil
- } else {
- return cty.UnknownVal(ty.ElementType()), nil
- }
- }
- if has.False() {
- // We have a more specialized error message for the situation of
- // using a fractional number to index into a sequence, because
- // that will tend to happen if the user is trying to use division
- // to calculate an index and not realizing that HCL does float
- // division rather than integer division.
- if (ty.IsListType() || ty.IsTupleType()) && key.Type().Equals(cty.Number) {
- if key.IsKnown() && !key.IsNull() {
- bf := key.AsBigFloat()
- if _, acc := bf.Int(nil); acc != big.Exact {
- return cty.DynamicVal, Diagnostics{
- {
- Severity: DiagError,
- Summary: "Invalid index",
- Detail: fmt.Sprintf("The given key does not identify an element in this collection value: indexing a sequence requires a whole number, but the given index (%g) has a fractional part.", bf),
- Subject: srcRange,
- },
- }
- }
- }
- }
-
- return cty.DynamicVal, Diagnostics{
- {
- Severity: DiagError,
- Summary: "Invalid index",
- Detail: "The given key does not identify an element in this collection value.",
- Subject: srcRange,
- },
- }
- }
-
- return collection.Index(key), nil
-
- case ty.IsObjectType():
- key, keyErr := convert.Convert(key, cty.String)
- if keyErr != nil {
- return cty.DynamicVal, Diagnostics{
- {
- Severity: DiagError,
- Summary: "Invalid index",
- Detail: fmt.Sprintf(
- "The given key does not identify an element in this collection value: %s.",
- keyErr.Error(),
- ),
- Subject: srcRange,
- },
- }
- }
- if !collection.IsKnown() {
- return cty.DynamicVal, nil
- }
- if !key.IsKnown() {
- return cty.DynamicVal, nil
- }
-
- attrName := key.AsString()
-
- if !ty.HasAttribute(attrName) {
- return cty.DynamicVal, Diagnostics{
- {
- Severity: DiagError,
- Summary: "Invalid index",
- Detail: "The given key does not identify an element in this collection value.",
- Subject: srcRange,
- },
- }
- }
-
- return collection.GetAttr(attrName), nil
-
- default:
- return cty.DynamicVal, Diagnostics{
- {
- Severity: DiagError,
- Summary: "Invalid index",
- Detail: "This value does not have any indices.",
- Subject: srcRange,
- },
- }
- }
-
-}
-
-// GetAttr is a helper function that performs the same operation as the
-// attribute access in the HCL expression language. That is, the result is the
-// same as it would be for obj.attr in a configuration expression.
-//
-// This is exported so that applications can access attributes in a manner
-// consistent with how the language does it, including handling of null and
-// unknown values, etc.
-//
-// Diagnostics are produced if the given combination of values is not valid.
-// Therefore a pointer to a source range must be provided to use in diagnostics,
-// though nil can be provided if the calling application is going to
-// ignore the subject of the returned diagnostics anyway.
-func GetAttr(obj cty.Value, attrName string, srcRange *Range) (cty.Value, Diagnostics) {
- if obj.IsNull() {
- return cty.DynamicVal, Diagnostics{
- {
- Severity: DiagError,
- Summary: "Attempt to get attribute from null value",
- Detail: "This value is null, so it does not have any attributes.",
- Subject: srcRange,
- },
- }
- }
-
- ty := obj.Type()
- switch {
- case ty.IsObjectType():
- if !ty.HasAttribute(attrName) {
- return cty.DynamicVal, Diagnostics{
- {
- Severity: DiagError,
- Summary: "Unsupported attribute",
- Detail: fmt.Sprintf("This object does not have an attribute named %q.", attrName),
- Subject: srcRange,
- },
- }
- }
-
- if !obj.IsKnown() {
- return cty.UnknownVal(ty.AttributeType(attrName)), nil
- }
-
- return obj.GetAttr(attrName), nil
- case ty.IsMapType():
- if !obj.IsKnown() {
- return cty.UnknownVal(ty.ElementType()), nil
- }
-
- idx := cty.StringVal(attrName)
- if obj.HasIndex(idx).False() {
- return cty.DynamicVal, Diagnostics{
- {
- Severity: DiagError,
- Summary: "Missing map element",
- Detail: fmt.Sprintf("This map does not have an element with the key %q.", attrName),
- Subject: srcRange,
- },
- }
- }
-
- return obj.Index(idx), nil
- case ty == cty.DynamicPseudoType:
- return cty.DynamicVal, nil
- default:
- return cty.DynamicVal, Diagnostics{
- {
- Severity: DiagError,
- Summary: "Unsupported attribute",
- Detail: "This value does not have any attributes.",
- Subject: srcRange,
- },
- }
- }
-
-}
-
-// ApplyPath is a helper function that applies a cty.Path to a value using the
-// indexing and attribute access operations from HCL.
-//
-// This is similar to calling the path's own Apply method, but ApplyPath uses
-// the more relaxed typing rules that apply to these operations in HCL, rather
-// than cty's relatively-strict rules. ApplyPath is implemented in terms of
-// Index and GetAttr, and so it has the same behavior for individual steps
-// but will stop and return any errors returned by intermediate steps.
-//
-// Diagnostics are produced if the given path cannot be applied to the given
-// value. Therefore a pointer to a source range must be provided to use in
-// diagnostics, though nil can be provided if the calling application is going
-// to ignore the subject of the returned diagnostics anyway.
-func ApplyPath(val cty.Value, path cty.Path, srcRange *Range) (cty.Value, Diagnostics) {
- var diags Diagnostics
-
- for _, step := range path {
- var stepDiags Diagnostics
- switch ts := step.(type) {
- case cty.IndexStep:
- val, stepDiags = Index(val, ts.Key, srcRange)
- case cty.GetAttrStep:
- val, stepDiags = GetAttr(val, ts.Name, srcRange)
- default:
- // Should never happen because the above are all of the step types.
- diags = diags.Append(&Diagnostic{
- Severity: DiagError,
- Summary: "Invalid path step",
- Detail: fmt.Sprintf("Go type %T is not a valid path step. This is a bug in this program.", step),
- Subject: srcRange,
- })
- return cty.DynamicVal, diags
- }
-
- diags = append(diags, stepDiags...)
- if stepDiags.HasErrors() {
- return cty.DynamicVal, diags
- }
- }
-
- return val, diags
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/pos.go b/vendor/github.com/hashicorp/hcl2/hcl/pos.go
deleted file mode 100644
index 06db8bfbd..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/pos.go
+++ /dev/null
@@ -1,275 +0,0 @@
-package hcl
-
-import "fmt"
-
-// Pos represents a single position in a source file, by addressing the
-// start byte of a unicode character encoded in UTF-8.
-//
-// Pos is generally used only in the context of a Range, which then defines
-// which source file the position is within.
-type Pos struct {
- // Line is the source code line where this position points. Lines are
- // counted starting at 1 and incremented for each newline character
- // encountered.
- Line int
-
- // Column is the source code column where this position points, in
- // unicode characters, with counting starting at 1.
- //
- // Column counts characters as they appear visually, so for example a
- // latin letter with a combining diacritic mark counts as one character.
- // This is intended for rendering visual markers against source code in
- // contexts where these diacritics would be rendered in a single character
- // cell. Technically speaking, Column is counting grapheme clusters as
- // used in unicode normalization.
- Column int
-
- // Byte is the byte offset into the file where the indicated character
- // begins. This is a zero-based offset to the first byte of the first
- // UTF-8 codepoint sequence in the character, and thus gives a position
- // that can be resolved _without_ awareness of Unicode characters.
- Byte int
-}
-
-// InitialPos is a suitable position to use to mark the start of a file.
-var InitialPos = Pos{Byte: 0, Line: 1, Column: 1}
-
-// Range represents a span of characters between two positions in a source
-// file.
-//
-// This struct is usually used by value in types that represent AST nodes,
-// but by pointer in types that refer to the positions of other objects,
-// such as in diagnostics.
-type Range struct {
- // Filename is the name of the file into which this range's positions
- // point.
- Filename string
-
- // Start and End represent the bounds of this range. Start is inclusive
- // and End is exclusive.
- Start, End Pos
-}
-
-// RangeBetween returns a new range that spans from the beginning of the
-// start range to the end of the end range.
-//
-// The result is meaningless if the two ranges do not belong to the same
-// source file or if the end range appears before the start range.
-func RangeBetween(start, end Range) Range {
- return Range{
- Filename: start.Filename,
- Start: start.Start,
- End: end.End,
- }
-}
-
-// RangeOver returns a new range that covers both of the given ranges and
-// possibly additional content between them if the two ranges do not overlap.
-//
-// If either range is empty then it is ignored. The result is empty if both
-// given ranges are empty.
-//
-// The result is meaningless if the two ranges to not belong to the same
-// source file.
-func RangeOver(a, b Range) Range {
- if a.Empty() {
- return b
- }
- if b.Empty() {
- return a
- }
-
- var start, end Pos
- if a.Start.Byte < b.Start.Byte {
- start = a.Start
- } else {
- start = b.Start
- }
- if a.End.Byte > b.End.Byte {
- end = a.End
- } else {
- end = b.End
- }
- return Range{
- Filename: a.Filename,
- Start: start,
- End: end,
- }
-}
-
-// ContainsPos returns true if and only if the given position is contained within
-// the receiving range.
-//
-// In the unlikely case that the line/column information disagree with the byte
-// offset information in the given position or receiving range, the byte
-// offsets are given priority.
-func (r Range) ContainsPos(pos Pos) bool {
- return r.ContainsOffset(pos.Byte)
-}
-
-// ContainsOffset returns true if and only if the given byte offset is within
-// the receiving Range.
-func (r Range) ContainsOffset(offset int) bool {
- return offset >= r.Start.Byte && offset < r.End.Byte
-}
-
-// Ptr returns a pointer to a copy of the receiver. This is a convenience when
-// ranges in places where pointers are required, such as in Diagnostic, but
-// the range in question is returned from a method. Go would otherwise not
-// allow one to take the address of a function call.
-func (r Range) Ptr() *Range {
- return &r
-}
-
-// String returns a compact string representation of the receiver.
-// Callers should generally prefer to present a range more visually,
-// e.g. via markers directly on the relevant portion of source code.
-func (r Range) String() string {
- if r.Start.Line == r.End.Line {
- return fmt.Sprintf(
- "%s:%d,%d-%d",
- r.Filename,
- r.Start.Line, r.Start.Column,
- r.End.Column,
- )
- } else {
- return fmt.Sprintf(
- "%s:%d,%d-%d,%d",
- r.Filename,
- r.Start.Line, r.Start.Column,
- r.End.Line, r.End.Column,
- )
- }
-}
-
-func (r Range) Empty() bool {
- return r.Start.Byte == r.End.Byte
-}
-
-// CanSliceBytes returns true if SliceBytes could return an accurate
-// sub-slice of the given slice.
-//
-// This effectively tests whether the start and end offsets of the range
-// are within the bounds of the slice, and thus whether SliceBytes can be
-// trusted to produce an accurate start and end position within that slice.
-func (r Range) CanSliceBytes(b []byte) bool {
- switch {
- case r.Start.Byte < 0 || r.Start.Byte > len(b):
- return false
- case r.End.Byte < 0 || r.End.Byte > len(b):
- return false
- case r.End.Byte < r.Start.Byte:
- return false
- default:
- return true
- }
-}
-
-// SliceBytes returns a sub-slice of the given slice that is covered by the
-// receiving range, assuming that the given slice is the source code of the
-// file indicated by r.Filename.
-//
-// If the receiver refers to any byte offsets that are outside of the slice
-// then the result is constrained to the overlapping portion only, to avoid
-// a panic. Use CanSliceBytes to determine if the result is guaranteed to
-// be an accurate span of the requested range.
-func (r Range) SliceBytes(b []byte) []byte {
- start := r.Start.Byte
- end := r.End.Byte
- if start < 0 {
- start = 0
- } else if start > len(b) {
- start = len(b)
- }
- if end < 0 {
- end = 0
- } else if end > len(b) {
- end = len(b)
- }
- if end < start {
- end = start
- }
- return b[start:end]
-}
-
-// Overlaps returns true if the receiver and the other given range share any
-// characters in common.
-func (r Range) Overlaps(other Range) bool {
- switch {
- case r.Filename != other.Filename:
- // If the ranges are in different files then they can't possibly overlap
- return false
- case r.Empty() || other.Empty():
- // Empty ranges can never overlap
- return false
- case r.ContainsOffset(other.Start.Byte) || r.ContainsOffset(other.End.Byte):
- return true
- case other.ContainsOffset(r.Start.Byte) || other.ContainsOffset(r.End.Byte):
- return true
- default:
- return false
- }
-}
-
-// Overlap finds a range that is either identical to or a sub-range of both
-// the receiver and the other given range. It returns an empty range
-// within the receiver if there is no overlap between the two ranges.
-//
-// A non-empty result is either identical to or a subset of the receiver.
-func (r Range) Overlap(other Range) Range {
- if !r.Overlaps(other) {
- // Start == End indicates an empty range
- return Range{
- Filename: r.Filename,
- Start: r.Start,
- End: r.Start,
- }
- }
-
- var start, end Pos
- if r.Start.Byte > other.Start.Byte {
- start = r.Start
- } else {
- start = other.Start
- }
- if r.End.Byte < other.End.Byte {
- end = r.End
- } else {
- end = other.End
- }
-
- return Range{
- Filename: r.Filename,
- Start: start,
- End: end,
- }
-}
-
-// PartitionAround finds the portion of the given range that overlaps with
-// the reciever and returns three ranges: the portion of the reciever that
-// precedes the overlap, the overlap itself, and then the portion of the
-// reciever that comes after the overlap.
-//
-// If the two ranges do not overlap then all three returned ranges are empty.
-//
-// If the given range aligns with or extends beyond either extent of the
-// reciever then the corresponding outer range will be empty.
-func (r Range) PartitionAround(other Range) (before, overlap, after Range) {
- overlap = r.Overlap(other)
- if overlap.Empty() {
- return overlap, overlap, overlap
- }
-
- before = Range{
- Filename: r.Filename,
- Start: r.Start,
- End: overlap.Start,
- }
- after = Range{
- Filename: r.Filename,
- Start: overlap.End,
- End: r.End,
- }
-
- return before, overlap, after
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/pos_scanner.go b/vendor/github.com/hashicorp/hcl2/hcl/pos_scanner.go
deleted file mode 100644
index 17c0d7c6b..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/pos_scanner.go
+++ /dev/null
@@ -1,152 +0,0 @@
-package hcl
-
-import (
- "bufio"
- "bytes"
-
- "github.com/apparentlymart/go-textseg/textseg"
-)
-
-// RangeScanner is a helper that will scan over a buffer using a bufio.SplitFunc
-// and visit a source range for each token matched.
-//
-// For example, this can be used with bufio.ScanLines to find the source range
-// for each line in the file, skipping over the actual newline characters, which
-// may be useful when printing source code snippets as part of diagnostic
-// messages.
-//
-// The line and column information in the returned ranges is produced by
-// counting newline characters and grapheme clusters respectively, which
-// mimics the behavior we expect from a parser when producing ranges.
-type RangeScanner struct {
- filename string
- b []byte
- cb bufio.SplitFunc
-
- pos Pos // position of next byte to process in b
- cur Range // latest range
- tok []byte // slice of b that is covered by cur
- err error // error from last scan, if any
-}
-
-// NewRangeScanner creates a new RangeScanner for the given buffer, producing
-// ranges for the given filename.
-//
-// Since ranges have grapheme-cluster granularity rather than byte granularity,
-// the scanner will produce incorrect results if the given SplitFunc creates
-// tokens between grapheme cluster boundaries. In particular, it is incorrect
-// to use RangeScanner with bufio.ScanRunes because it will produce tokens
-// around individual UTF-8 sequences, which will split any multi-sequence
-// grapheme clusters.
-func NewRangeScanner(b []byte, filename string, cb bufio.SplitFunc) *RangeScanner {
- return NewRangeScannerFragment(b, filename, InitialPos, cb)
-}
-
-// NewRangeScannerFragment is like NewRangeScanner but the ranges it produces
-// will be offset by the given starting position, which is appropriate for
-// sub-slices of a file, whereas NewRangeScanner assumes it is scanning an
-// entire file.
-func NewRangeScannerFragment(b []byte, filename string, start Pos, cb bufio.SplitFunc) *RangeScanner {
- return &RangeScanner{
- filename: filename,
- b: b,
- cb: cb,
- pos: start,
- }
-}
-
-func (sc *RangeScanner) Scan() bool {
- if sc.pos.Byte >= len(sc.b) || sc.err != nil {
- // All done
- return false
- }
-
- // Since we're operating on an in-memory buffer, we always pass the whole
- // remainder of the buffer to our SplitFunc and set isEOF to let it know
- // that it has the whole thing.
- advance, token, err := sc.cb(sc.b[sc.pos.Byte:], true)
-
- // Since we are setting isEOF to true this should never happen, but
- // if it does we will just abort and assume the SplitFunc is misbehaving.
- if advance == 0 && token == nil && err == nil {
- return false
- }
-
- if err != nil {
- sc.err = err
- sc.cur = Range{
- Filename: sc.filename,
- Start: sc.pos,
- End: sc.pos,
- }
- sc.tok = nil
- return false
- }
-
- sc.tok = token
- start := sc.pos
- end := sc.pos
- new := sc.pos
-
- // adv is similar to token but it also includes any subsequent characters
- // we're being asked to skip over by the SplitFunc.
- // adv is a slice covering any additional bytes we are skipping over, based
- // on what the SplitFunc told us to do with advance.
- adv := sc.b[sc.pos.Byte : sc.pos.Byte+advance]
-
- // We now need to scan over our token to count the grapheme clusters
- // so we can correctly advance Column, and count the newlines so we
- // can correctly advance Line.
- advR := bytes.NewReader(adv)
- gsc := bufio.NewScanner(advR)
- advanced := 0
- gsc.Split(textseg.ScanGraphemeClusters)
- for gsc.Scan() {
- gr := gsc.Bytes()
- new.Byte += len(gr)
- new.Column++
-
- // We rely here on the fact that \r\n is considered a grapheme cluster
- // and so we don't need to worry about miscounting additional lines
- // on files with Windows-style line endings.
- if len(gr) != 0 && (gr[0] == '\r' || gr[0] == '\n') {
- new.Column = 1
- new.Line++
- }
-
- if advanced < len(token) {
- // If we've not yet found the end of our token then we'll
- // also push our "end" marker along.
- // (if advance > len(token) then we'll stop moving "end" early
- // so that the caller only sees the range covered by token.)
- end = new
- }
- advanced += len(gr)
- }
-
- sc.cur = Range{
- Filename: sc.filename,
- Start: start,
- End: end,
- }
- sc.pos = new
- return true
-}
-
-// Range returns a range that covers the latest token obtained after a call
-// to Scan returns true.
-func (sc *RangeScanner) Range() Range {
- return sc.cur
-}
-
-// Bytes returns the slice of the input buffer that is covered by the range
-// that would be returned by Range.
-func (sc *RangeScanner) Bytes() []byte {
- return sc.tok
-}
-
-// Err can be called after Scan returns false to determine if the latest read
-// resulted in an error, and obtain that error if so.
-func (sc *RangeScanner) Err() error {
- return sc.err
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/schema.go b/vendor/github.com/hashicorp/hcl2/hcl/schema.go
deleted file mode 100644
index 891257acb..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/schema.go
+++ /dev/null
@@ -1,21 +0,0 @@
-package hcl
-
-// BlockHeaderSchema represents the shape of a block header, and is
-// used for matching blocks within bodies.
-type BlockHeaderSchema struct {
- Type string
- LabelNames []string
-}
-
-// AttributeSchema represents the requirements for an attribute, and is used
-// for matching attributes within bodies.
-type AttributeSchema struct {
- Name string
- Required bool
-}
-
-// BodySchema represents the desired shallow structure of a body.
-type BodySchema struct {
- Attributes []AttributeSchema
- Blocks []BlockHeaderSchema
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/spec.md b/vendor/github.com/hashicorp/hcl2/hcl/spec.md
deleted file mode 100644
index 97ef61318..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/spec.md
+++ /dev/null
@@ -1,691 +0,0 @@
-# HCL Syntax-Agnostic Information Model
-
-This is the specification for the general information model (abstract types and
-semantics) for hcl. HCL is a system for defining configuration languages for
-applications. The HCL information model is designed to support multiple
-concrete syntaxes for configuration, each with a mapping to the model defined
-in this specification.
-
-The two primary syntaxes intended for use in conjunction with this model are
-[the HCL native syntax](./hclsyntax/spec.md) and [the JSON syntax](./json/spec.md).
-In principle other syntaxes are possible as long as either their language model
-is sufficiently rich to express the concepts described in this specification
-or the language targets a well-defined subset of the specification.
-
-## Structural Elements
-
-The primary structural element is the _body_, which is a container representing
-a set of zero or more _attributes_ and a set of zero or more _blocks_.
-
-A _configuration file_ is the top-level object, and will usually be produced
-by reading a file from disk and parsing it as a particular syntax. A
-configuration file has its own _body_, representing the top-level attributes
-and blocks.
-
-An _attribute_ is a name and value pair associated with a body. Attribute names
-are unique within a given body. Attribute values are provided as _expressions_,
-which are discussed in detail in a later section.
-
-A _block_ is a nested structure that has a _type name_, zero or more string
-_labels_ (e.g. identifiers), and a nested body.
-
-Together the structural elements create a hierarchical data structure, with
-attributes intended to represent the direct properties of a particular object
-in the calling application, and blocks intended to represent child objects
-of a particular object.
-
-## Body Content
-
-To support the expression of the HCL concepts in languages whose information
-model is a subset of HCL's, such as JSON, a _body_ is an opaque container
-whose content can only be accessed by providing information on the expected
-structure of the content.
-
-The specification for each syntax must describe how its physical constructs
-are mapped on to body content given a schema. For syntaxes that have
-first-class syntax distinguishing attributes and bodies this can be relatively
-straightforward, while more detailed mapping rules may be required in syntaxes
-where the representation of attributes vs. blocks is ambiguous.
-
-### Schema-driven Processing
-
-Schema-driven processing is the primary way to access body content.
-A _body schema_ is a description of what is expected within a particular body,
-which can then be used to extract the _body content_, which then provides
-access to the specific attributes and blocks requested.
-
-A _body schema_ consists of a list of _attribute schemata_ and
-_block header schemata_:
-
-- An _attribute schema_ provides the name of an attribute and whether its
- presence is required.
-
-- A _block header schema_ provides a block type name and the semantic names
- assigned to each of the labels of that block type, if any.
-
-Within a schema, it is an error to request the same attribute name twice or
-to request a block type whose name is also an attribute name. While this can
-in principle be supported in some syntaxes, in other syntaxes the attribute
-and block namespaces are combined and so an attribute cannot coexist with
-a block whose type name is identical to the attribute name.
-
-The result of applying a body schema to a body is _body content_, which
-consists of an _attribute map_ and a _block sequence_:
-
-- The _attribute map_ is a map data structure whose keys are attribute names
- and whose values are _expressions_ that represent the corresponding attribute
- values.
-
-- The _block sequence_ is an ordered sequence of blocks, with each specifying
- a block _type name_, the sequence of _labels_ specified for the block,
- and the body object (not body _content_) representing the block's own body.
-
-After obtaining _body content_, the calling application may continue processing
-by evaluating attribute expressions and/or recursively applying further
-schema-driven processing to the child block bodies.
-
-**Note:** The _body schema_ is intentionally minimal, to reduce the set of
-mapping rules that must be defined for each syntax. Higher-level utility
-libraries may be provided to assist in the construction of a schema and
-perform additional processing, such as automatically evaluating attribute
-expressions and assigning their result values into a data structure, or
-recursively applying a schema to child blocks. Such utilities are not part of
-this core specification and will vary depending on the capabilities and idiom
-of the implementation language.
-
-### _Dynamic Attributes_ Processing
-
-The _schema-driven_ processing model is useful when the expected structure
-of a body is known a priori by the calling application. Some blocks are
-instead more free-form, such as a user-provided set of arbitrary key/value
-pairs.
-
-The alternative _dynamic attributes_ processing mode allows for this more
-ad-hoc approach. Processing in this mode behaves as if a schema had been
-constructed without any _block header schemata_ and with an attribute
-schema for each distinct key provided within the physical representation
-of the body.
-
-The means by which _distinct keys_ are identified is dependent on the
-physical syntax; this processing mode assumes that the syntax has a way
-to enumerate keys provided by the author and identify expressions that
-correspond with those keys, but does not define the means by which this is
-done.
-
-The result of _dynamic attributes_ processing is an _attribute map_ as
-defined in the previous section. No _block sequence_ is produced in this
-processing mode.
-
-### Partial Processing of Body Content
-
-Under _schema-driven processing_, by default the given schema is assumed
-to be exhaustive, such that any attribute or block not matched by schema
-elements is considered an error. This allows feedback about unsupported
-attributes and blocks (such as typos) to be provided.
-
-An alternative is _partial processing_, where any additional elements within
-the body are not considered an error.
-
-Under partial processing, the result is both body content as described
-above _and_ a new body that represents any body elements that remain after
-the schema has been processed.
-
-Specifically:
-
-- Any attribute whose name is specified in the schema is returned in body
- content and elided from the new body.
-
-- Any block whose type is specified in the schema is returned in body content
- and elided from the new body.
-
-- Any attribute or block _not_ meeting the above conditions is placed into
- the new body, unmodified.
-
-The new body can then be recursively processed using any of the body
-processing models. This facility allows different subsets of body content
-to be processed by different parts of the calling application.
-
-Processing a body in two steps — first partial processing of a source body,
-then exhaustive processing of the returned body — is equivalent to single-step
-processing with a schema that is the union of the schemata used
-across the two steps.
-
-## Expressions
-
-Attribute values are represented by _expressions_. Depending on the concrete
-syntax in use, an expression may just be a literal value or it may describe
-a computation in terms of literal values, variables, and functions.
-
-Each syntax defines its own representation of expressions. For syntaxes based
-in languages that do not have any non-literal expression syntax, it is
-recommended to embed the template language from
-[the native syntax](./hclsyntax/spec.md) e.g. as a post-processing step on
-string literals.
-
-### Expression Evaluation
-
-In order to obtain a concrete value, each expression must be _evaluated_.
-Evaluation is performed in terms of an evaluation context, which
-consists of the following:
-
-- An _evaluation mode_, which is defined below.
-- A _variable scope_, which provides a set of named variables for use in
- expressions.
-- A _function table_, which provides a set of named functions for use in
- expressions.
-
-The _evaluation mode_ allows for two different interpretations of an
-expression:
-
-- In _literal-only mode_, variables and functions are not available and it
- is assumed that the calling application's intent is to treat the attribute
- value as a literal.
-
-- In _full expression mode_, variables and functions are defined and it is
- assumed that the calling application wishes to provide a full expression
- language for definition of the attribute value.
-
-The actual behavior of these two modes depends on the syntax in use. For
-languages with first-class expression syntax, these two modes may be considered
-equivalent, with _literal-only mode_ simply not defining any variables or
-functions. For languages that embed arbitrary expressions via string templates,
-_literal-only mode_ may disable such processing, allowing literal strings to
-pass through without interpretation as templates.
-
-Since literal-only mode does not support variables and functions, it is an
-error for the calling application to enable this mode and yet provide a
-variable scope and/or function table.
-
-## Values and Value Types
-
-The result of expression evaluation is a _value_. Each value has a _type_,
-which is dynamically determined during evaluation. The _variable scope_ in
-the evaluation context is a map from variable name to value, using the same
-definition of value.
-
-The type system for HCL values is intended to be of a level abstraction
-suitable for configuration of various applications. A well-defined,
-implementation-language-agnostic type system is defined to allow for
-consistent processing of configuration across many implementation languages.
-Concrete implementations may provide additional functionality to lower
-HCL values and types to corresponding native language types, which may then
-impose additional constraints on the values outside of the scope of this
-specification.
-
-Two values are _equal_ if and only if they have identical types and their
-values are equal according to the rules of their shared type.
-
-### Primitive Types
-
-The primitive types are _string_, _bool_, and _number_.
-
-A _string_ is a sequence of unicode characters. Two strings are equal if
-NFC normalization ([UAX#15](http://unicode.org/reports/tr15/)
-of each string produces two identical sequences of characters.
-NFC normalization ensures that, for example, a precomposed combination of a
-latin letter and a diacritic compares equal with the letter followed by
-a combining diacritic.
-
-The _bool_ type has only two non-null values: _true_ and _false_. Two bool
-values are equal if and only if they are either both true or both false.
-
-A _number_ is an arbitrary-precision floating point value. An implementation
-_must_ make the full-precision values available to the calling application
-for interpretation into any suitable number representation. An implementation
-may in practice implement numbers with limited precision so long as the
-following constraints are met:
-
-- Integers are represented with at least 256 bits.
-- Non-integer numbers are represented as floating point values with a
- mantissa of at least 256 bits and a signed binary exponent of at least
- 16 bits.
-- An error is produced if an integer value given in source cannot be
- represented precisely.
-- An error is produced if a non-integer value cannot be represented due to
- overflow.
-- A non-integer number is rounded to the nearest possible value when a
- value is of too high a precision to be represented.
-
-The _number_ type also requires representation of both positive and negative
-infinity. A "not a number" (NaN) value is _not_ provided nor used.
-
-Two number values are equal if they are numerically equal to the precision
-associated with the number. Positive infinity and negative infinity are
-equal to themselves but not to each other. Positive infinity is greater than
-any other number value, and negative infinity is less than any other number
-value.
-
-Some syntaxes may be unable to represent numeric literals of arbitrary
-precision. This must be defined in the syntax specification as part of its
-description of mapping numeric literals to HCL values.
-
-### Structural Types
-
-_Structural types_ are types that are constructed by combining other types.
-Each distinct combination of other types is itself a distinct type. There
-are two structural type _kinds_:
-
-- _Object types_ are constructed of a set of named attributes, each of which
- has a type. Attribute names are always strings. (_Object_ attributes are a
- distinct idea from _body_ attributes, though calling applications
- may choose to blur the distinction by use of common naming schemes.)
-- _Tuple types_ are constructed of a sequence of elements, each of which
- has a type.
-
-Values of structural types are compared for equality in terms of their
-attributes or elements. A structural type value is equal to another if and
-only if all of the corresponding attributes or elements are equal.
-
-Two structural types are identical if they are of the same kind and
-have attributes or elements with identical types.
-
-### Collection Types
-
-_Collection types_ are types that combine together an arbitrary number of
-values of some other single type. There are three collection type _kinds_:
-
-- _List types_ represent ordered sequences of values of their element type.
-- _Map types_ represent values of their element type accessed via string keys.
-- _Set types_ represent unordered sets of distinct values of their element type.
-
-For each of these kinds and each distinct element type there is a distinct
-collection type. For example, "list of string" is a distinct type from
-"set of string", and "list of number" is a distinct type from "list of string".
-
-Values of collection types are compared for equality in terms of their
-elements. A collection type value is equal to another if and only if both
-have the same number of elements and their corresponding elements are equal.
-
-Two collection types are identical if they are of the same kind and have
-the same element type.
-
-### Null values
-
-Each type has a null value. The null value of a type represents the absence
-of a value, but with type information retained to allow for type checking.
-
-Null values are used primarily to represent the conditional absence of a
-body attribute. In a syntax with a conditional operator, one of the result
-values of that conditional may be null to indicate that the attribute should be
-considered not present in that case.
-
-Calling applications _should_ consider an attribute with a null value as
-equivalent to the value not being present at all.
-
-A null value of a particular type is equal to itself.
-
-### Unknown Values and the Dynamic Pseudo-type
-
-An _unknown value_ is a placeholder for a value that is not yet known.
-Operations on unknown values themselves return unknown values that have a
-type appropriate to the operation. For example, adding together two unknown
-numbers yields an unknown number, while comparing two unknown values of any
-type for equality yields an unknown bool.
-
-Each type has a distinct unknown value. For example, an unknown _number_ is
-a distinct value from an unknown _string_.
-
-_The dynamic pseudo-type_ is a placeholder for a type that is not yet known.
-The only values of this type are its null value and its unknown value. It is
-referred to as a _pseudo-type_ because it should not be considered a type in
-its own right, but rather as a placeholder for a type yet to be established.
-The unknown value of the dynamic pseudo-type is referred to as _the dynamic
-value_.
-
-Operations on values of the dynamic pseudo-type behave as if it is a value
-of the expected type, optimistically assuming that once the value and type
-are known they will be valid for the operation. For example, adding together
-a number and the dynamic value produces an unknown number.
-
-Unknown values and the dynamic pseudo-type can be used as a mechanism for
-partial type checking and semantic checking: by evaluating an expression with
-all variables set to an unknown value, the expression can be evaluated to
-produce an unknown value of a given type, or produce an error if any operation
-is provably invalid with only type information.
-
-Unknown values and the dynamic pseudo-type must never be returned from
-operations unless at least one operand is unknown or dynamic. Calling
-applications are guaranteed that unless the global scope includes unknown
-values, or the function table includes functions that return unknown values,
-no expression will evaluate to an unknown value. The calling application is
-thus in total control over the use and meaning of unknown values.
-
-The dynamic pseudo-type is identical only to itself.
-
-### Capsule Types
-
-A _capsule type_ is a custom type defined by the calling application. A value
-of a capsule type is considered opaque to HCL, but may be accepted
-by functions provided by the calling application.
-
-A particular capsule type is identical only to itself. The equality of two
-values of the same capsule type is defined by the calling application. No
-other operations are supported for values of capsule types.
-
-Support for capsule types in a HCL implementation is optional. Capsule types
-are intended to allow calling applications to pass through values that are
-not part of the standard type system. For example, an application that
-deals with raw binary data may define a capsule type representing a byte
-array, and provide functions that produce or operate on byte arrays.
-
-### Type Specifications
-
-In certain situations it is necessary to define expectations about the expected
-type of a value. Whereas two _types_ have a commutative _identity_ relationship,
-a type has a non-commutative _matches_ relationship with a _type specification_.
-A type specification is, in practice, just a different interpretation of a
-type such that:
-
-- Any type _matches_ any type that it is identical to.
-
-- Any type _matches_ the dynamic pseudo-type.
-
-For example, given a type specification "list of dynamic pseudo-type", the
-concrete types "list of string" and "list of map" match, but the
-type "set of string" does not.
-
-## Functions and Function Calls
-
-The evaluation context used to evaluate an expression includes a function
-table, which represents an application-defined set of named functions
-available for use in expressions.
-
-Each syntax defines whether function calls are supported and how they are
-physically represented in source code, but the semantics of function calls are
-defined here to ensure consistent results across syntaxes and to allow
-applications to provide functions that are interoperable with all syntaxes.
-
-A _function_ is defined from the following elements:
-
-- Zero or more _positional parameters_, each with a name used for documentation,
- a type specification for expected argument values, and a flag for whether
- each of null values, unknown values, and values of the dynamic pseudo-type
- are accepted.
-
-- Zero or one _variadic parameters_, with the same structure as the _positional_
- parameters, which if present collects any additional arguments provided at
- the function call site.
-
-- A _result type definition_, which specifies the value type returned for each
- valid sequence of argument values.
-
-- A _result value definition_, which specifies the value returned for each
- valid sequence of argument values.
-
-A _function call_, regardless of source syntax, consists of a sequence of
-argument values. The argument values are each mapped to a corresponding
-parameter as follows:
-
-- For each of the function's positional parameters in sequence, take the next
- argument. If there are no more arguments, the call is erroneous.
-
-- If the function has a variadic parameter, take all remaining arguments that
- where not yet assigned to a positional parameter and collect them into
- a sequence of variadic arguments that each correspond to the variadic
- parameter.
-
-- If the function has _no_ variadic parameter, it is an error if any arguments
- remain after taking one argument for each positional parameter.
-
-After mapping each argument to a parameter, semantic checking proceeds
-for each argument:
-
-- If the argument value corresponding to a parameter does not match the
- parameter's type specification, the call is erroneous.
-
-- If the argument value corresponding to a parameter is null and the parameter
- is not specified as accepting nulls, the call is erroneous.
-
-- If the argument value corresponding to a parameter is the dynamic value
- and the parameter is not specified as accepting values of the dynamic
- pseudo-type, the call is valid but its _result type_ is forced to be the
- dynamic pseudo type.
-
-- If neither of the above conditions holds for any argument, the call is
- valid and the function's value type definition is used to determine the
- call's _result type_. A function _may_ vary its result type depending on
- the argument _values_ as well as the argument _types_; for example, a
- function that decodes a JSON value will return a different result type
- depending on the data structure described by the given JSON source code.
-
-If semantic checking succeeds without error, the call is _executed_:
-
-- For each argument, if its value is unknown and its corresponding parameter
- is not specified as accepting unknowns, the _result value_ is forced to be an
- unknown value of the result type.
-
-- If the previous condition does not apply, the function's result value
- definition is used to determine the call's _result value_.
-
-The result of a function call expression is either an error, if one of the
-erroneous conditions above applies, or the _result value_.
-
-## Type Conversions and Unification
-
-Values given in configuration may not always match the expectations of the
-operations applied to them or to the calling application. In such situations,
-automatic type conversion is attempted as a convenience to the user.
-
-Along with conversions to a _specified_ type, it is sometimes necessary to
-ensure that a selection of values are all of the _same_ type, without any
-constraint on which type that is. This is the process of _type unification_,
-which attempts to find the most general type that all of the given types can
-be converted to.
-
-Both type conversions and unification are defined in the syntax-agnostic
-model to ensure consistency of behavior between syntaxes.
-
-Type conversions are broadly characterized into two categories: _safe_ and
-_unsafe_. A conversion is "safe" if any distinct value of the source type
-has a corresponding distinct value in the target type. A conversion is
-"unsafe" if either the target type values are _not_ distinct (information
-may be lost in conversion) or if some values of the source type do not have
-any corresponding value in the target type. An unsafe conversion may result
-in an error.
-
-A given type can always be converted to itself, which is a no-op.
-
-### Conversion of Null Values
-
-All null values are safely convertable to a null value of any other type,
-regardless of other type-specific rules specified in the sections below.
-
-### Conversion to and from the Dynamic Pseudo-type
-
-Conversion _from_ the dynamic pseudo-type _to_ any other type always succeeds,
-producing an unknown value of the target type.
-
-Conversion of any value _to_ the dynamic pseudo-type is a no-op. The result
-is the input value, verbatim. This is the only situation where the conversion
-result value is not of the given target type.
-
-### Primitive Type Conversions
-
-Bidirectional conversions are available between the string and number types,
-and between the string and boolean types.
-
-The bool value true corresponds to the string containing the characters "true",
-while the bool value false corresponds to the string containing the characters
-"false". Conversion from bool to string is safe, while the converse is
-unsafe. The strings "1" and "0" are alternative string representations
-of true and false respectively. It is an error to convert a string other than
-the four in this paragraph to type bool.
-
-A number value is converted to string by translating its integer portion
-into a sequence of decimal digits (`0` through `9`), and then if it has a
-non-zero fractional part, a period `.` followed by a sequence of decimal
-digits representing its fractional part. No exponent portion is included.
-The number is converted at its full precision. Conversion from number to
-string is safe.
-
-A string is converted to a number value by reversing the above mapping.
-No exponent portion is allowed. Conversion from string to number is unsafe.
-It is an error to convert a string that does not comply with the expected
-syntax to type number.
-
-No direct conversion is available between the bool and number types.
-
-### Collection and Structural Type Conversions
-
-Conversion from set types to list types is _safe_, as long as their
-element types are safely convertable. If the element types are _unsafely_
-convertable, then the collection conversion is also unsafe. Each set element
-becomes a corresponding list element, in an undefined order. Although no
-particular ordering is required, implementations _should_ produce list
-elements in a consistent order for a given input set, as a convenience
-to calling applications.
-
-Conversion from list types to set types is _unsafe_, as long as their element
-types are convertable. Each distinct list item becomes a distinct set item.
-If two list items are equal, one of the two is lost in the conversion.
-
-Conversion from tuple types to list types permitted if all of the
-tuple element types are convertable to the target list element type.
-The safety of the conversion depends on the safety of each of the element
-conversions. Each element in turn is converted to the list element type,
-producing a list of identical length.
-
-Conversion from tuple types to set types is permitted, behaving as if the
-tuple type was first converted to a list of the same element type and then
-that list converted to the target set type.
-
-Conversion from object types to map types is permitted if all of the object
-attribute types are convertable to the target map element type. The safety
-of the conversion depends on the safety of each of the attribute conversions.
-Each attribute in turn is converted to the map element type, and map element
-keys are set to the name of each corresponding object attribute.
-
-Conversion from list and set types to tuple types is permitted, following
-the opposite steps as the converse conversions. Such conversions are _unsafe_.
-It is an error to convert a list or set to a tuple type whose number of
-elements does not match the list or set length.
-
-Conversion from map types to object types is permitted if each map key
-corresponds to an attribute in the target object type. It is an error to
-convert from a map value whose set of keys does not exactly match the target
-type's attributes. The conversion takes the opposite steps of the converse
-conversion.
-
-Conversion from one object type to another is permitted as long as the
-common attribute names have convertable types. Any attribute present in the
-target type but not in the source type is populated with a null value of
-the appropriate type.
-
-Conversion from one tuple type to another is permitted as long as the
-tuples have the same length and the elements have convertable types.
-
-### Type Unification
-
-Type unification is an operation that takes a list of types and attempts
-to find a single type to which they can all be converted. Since some
-type pairs have bidirectional conversions, preference is given to _safe_
-conversions. In technical terms, all possible types are arranged into
-a lattice, from which a most general supertype is selected where possible.
-
-The type resulting from type unification may be one of the input types, or
-it may be an entirely new type produced by combination of two or more
-input types.
-
-The following rules do not guarantee a valid result. In addition to these
-rules, unification fails if any of the given types are not convertable
-(per the above rules) to the selected result type.
-
-The following unification rules apply transitively. That is, if a rule is
-defined from A to B, and one from B to C, then A can unify to C.
-
-Number and bool types both unify with string by preferring string.
-
-Two collection types of the same kind unify according to the unification
-of their element types.
-
-List and set types unify by preferring the list type.
-
-Map and object types unify by preferring the object type.
-
-List, set and tuple types unify by preferring the tuple type.
-
-The dynamic pseudo-type unifies with any other type by selecting that other
-type. The dynamic pseudo-type is the result type only if _all_ input types
-are the dynamic pseudo-type.
-
-Two object types unify by constructing a new type whose attributes are
-the union of those of the two input types. Any common attributes themselves
-have their types unified.
-
-Two tuple types of the same length unify constructing a new type of the
-same length whose elements are the unification of the corresponding elements
-in the two input types.
-
-## Static Analysis
-
-In most applications, full expression evaluation is sufficient for understanding
-the provided configuration. However, some specialized applications require more
-direct access to the physical structures in the expressions, which can for
-example allow the construction of new language constructs in terms of the
-existing syntax elements.
-
-Since static analysis analyses the physical structure of configuration, the
-details will vary depending on syntax. Each syntax must decide which of its
-physical structures corresponds to the following analyses, producing error
-diagnostics if they are applied to inappropriate expressions.
-
-The following are the required static analysis functions:
-
-- **Static List**: Require list/tuple construction syntax to be used and
- return a list of expressions for each of the elements given.
-
-- **Static Map**: Require map/object construction syntax to be used and
- return a list of key/value pairs -- both expressions -- for each of
- the elements given. The usual constraint that a map key must be a string
- must not apply to this analysis, thus allowing applications to interpret
- arbitrary keys as they see fit.
-
-- **Static Call**: Require function call syntax to be used and return an
- object describing the called function name and a list of expressions
- representing each of the call arguments.
-
-- **Static Traversal**: Require a reference to a symbol in the variable
- scope and return a description of the path from the root scope to the
- accessed attribute or index.
-
-The intent of a calling application using these features is to require a more
-rigid interpretation of the configuration than in expression evaluation.
-Syntax implementations should make use of the extra contextual information
-provided in order to make an intuitive mapping onto the constructs of the
-underlying syntax, possibly interpreting the expression slightly differently
-than it would be interpreted in normal evaluation.
-
-Each syntax must define which of its expression elements each of the analyses
-above applies to, and how those analyses behave given those expression elements.
-
-## Implementation Considerations
-
-Implementations of this specification are free to adopt any strategy that
-produces behavior consistent with the specification. This non-normative
-section describes some possible implementation strategies that are consistent
-with the goals of this specification.
-
-### Language-agnosticism
-
-The language-agnosticism of this specification assumes that certain behaviors
-are implemented separately for each syntax:
-
-- Matching of a body schema with the physical elements of a body in the
- source language, to determine correspondence between physical constructs
- and schema elements.
-
-- Implementing the _dynamic attributes_ body processing mode by either
- interpreting all physical constructs as attributes or producing an error
- if non-attribute constructs are present.
-
-- Providing an evaluation function for all possible expressions that produces
- a value given an evaluation context.
-
-- Providing the static analysis functionality described above in a manner that
- makes sense within the convention of the syntax.
-
-The suggested implementation strategy is to use an implementation language's
-closest concept to an _abstract type_, _virtual type_ or _interface type_
-to represent both Body and Expression. Each language-specific implementation
-can then provide an implementation of each of these types wrapping AST nodes
-or other physical constructs from the language parser.
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/static_expr.go b/vendor/github.com/hashicorp/hcl2/hcl/static_expr.go
deleted file mode 100644
index 98ada87b6..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/static_expr.go
+++ /dev/null
@@ -1,40 +0,0 @@
-package hcl
-
-import (
- "github.com/zclconf/go-cty/cty"
-)
-
-type staticExpr struct {
- val cty.Value
- rng Range
-}
-
-// StaticExpr returns an Expression that always evaluates to the given value.
-//
-// This is useful to substitute default values for expressions that are
-// not explicitly given in configuration and thus would otherwise have no
-// Expression to return.
-//
-// Since expressions are expected to have a source range, the caller must
-// provide one. Ideally this should be a real source range, but it can
-// be a synthetic one (with an empty-string filename) if no suitable range
-// is available.
-func StaticExpr(val cty.Value, rng Range) Expression {
- return staticExpr{val, rng}
-}
-
-func (e staticExpr) Value(ctx *EvalContext) (cty.Value, Diagnostics) {
- return e.val, nil
-}
-
-func (e staticExpr) Variables() []Traversal {
- return nil
-}
-
-func (e staticExpr) Range() Range {
- return e.rng
-}
-
-func (e staticExpr) StartRange() Range {
- return e.rng
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/structure.go b/vendor/github.com/hashicorp/hcl2/hcl/structure.go
deleted file mode 100644
index aab09457d..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/structure.go
+++ /dev/null
@@ -1,151 +0,0 @@
-package hcl
-
-import (
- "github.com/zclconf/go-cty/cty"
-)
-
-// File is the top-level node that results from parsing a HCL file.
-type File struct {
- Body Body
- Bytes []byte
-
- // Nav is used to integrate with the "hcled" editor integration package,
- // and with diagnostic information formatters. It is not for direct use
- // by a calling application.
- Nav interface{}
-}
-
-// Block represents a nested block within a Body.
-type Block struct {
- Type string
- Labels []string
- Body Body
-
- DefRange Range // Range that can be considered the "definition" for seeking in an editor
- TypeRange Range // Range for the block type declaration specifically.
- LabelRanges []Range // Ranges for the label values specifically.
-}
-
-// Blocks is a sequence of Block.
-type Blocks []*Block
-
-// Attributes is a set of attributes keyed by their names.
-type Attributes map[string]*Attribute
-
-// Body is a container for attributes and blocks. It serves as the primary
-// unit of hierarchical structure within configuration.
-//
-// The content of a body cannot be meaningfully interpreted without a schema,
-// so Body represents the raw body content and has methods that allow the
-// content to be extracted in terms of a given schema.
-type Body interface {
- // Content verifies that the entire body content conforms to the given
- // schema and then returns it, and/or returns diagnostics. The returned
- // body content is valid if non-nil, regardless of whether Diagnostics
- // are provided, but diagnostics should still be eventually shown to
- // the user.
- Content(schema *BodySchema) (*BodyContent, Diagnostics)
-
- // PartialContent is like Content except that it permits the configuration
- // to contain additional blocks or attributes not specified in the
- // schema. If any are present, the returned Body is non-nil and contains
- // the remaining items from the body that were not selected by the schema.
- PartialContent(schema *BodySchema) (*BodyContent, Body, Diagnostics)
-
- // JustAttributes attempts to interpret all of the contents of the body
- // as attributes, allowing for the contents to be accessed without a priori
- // knowledge of the structure.
- //
- // The behavior of this method depends on the body's source language.
- // Some languages, like JSON, can't distinguish between attributes and
- // blocks without schema hints, but for languages that _can_ error
- // diagnostics will be generated if any blocks are present in the body.
- //
- // Diagnostics may be produced for other reasons too, such as duplicate
- // declarations of the same attribute.
- JustAttributes() (Attributes, Diagnostics)
-
- // MissingItemRange returns a range that represents where a missing item
- // might hypothetically be inserted. This is used when producing
- // diagnostics about missing required attributes or blocks. Not all bodies
- // will have an obvious single insertion point, so the result here may
- // be rather arbitrary.
- MissingItemRange() Range
-}
-
-// BodyContent is the result of applying a BodySchema to a Body.
-type BodyContent struct {
- Attributes Attributes
- Blocks Blocks
-
- MissingItemRange Range
-}
-
-// Attribute represents an attribute from within a body.
-type Attribute struct {
- Name string
- Expr Expression
-
- Range Range
- NameRange Range
-}
-
-// Expression is a literal value or an expression provided in the
-// configuration, which can be evaluated within a scope to produce a value.
-type Expression interface {
- // Value returns the value resulting from evaluating the expression
- // in the given evaluation context.
- //
- // The context may be nil, in which case the expression may contain
- // only constants and diagnostics will be produced for any non-constant
- // sub-expressions. (The exact definition of this depends on the source
- // language.)
- //
- // The context may instead be set but have either its Variables or
- // Functions maps set to nil, in which case only use of these features
- // will return diagnostics.
- //
- // Different diagnostics are provided depending on whether the given
- // context maps are nil or empty. In the former case, the message
- // tells the user that variables/functions are not permitted at all,
- // while in the latter case usage will produce a "not found" error for
- // the specific symbol in question.
- Value(ctx *EvalContext) (cty.Value, Diagnostics)
-
- // Variables returns a list of variables referenced in the receiving
- // expression. These are expressed as absolute Traversals, so may include
- // additional information about how the variable is used, such as
- // attribute lookups, which the calling application can potentially use
- // to only selectively populate the scope.
- Variables() []Traversal
-
- Range() Range
- StartRange() Range
-}
-
-// OfType filters the receiving block sequence by block type name,
-// returning a new block sequence including only the blocks of the
-// requested type.
-func (els Blocks) OfType(typeName string) Blocks {
- ret := make(Blocks, 0)
- for _, el := range els {
- if el.Type == typeName {
- ret = append(ret, el)
- }
- }
- return ret
-}
-
-// ByType transforms the receiving block sequence into a map from type
-// name to block sequences of only that type.
-func (els Blocks) ByType() map[string]Blocks {
- ret := make(map[string]Blocks)
- for _, el := range els {
- ty := el.Type
- if ret[ty] == nil {
- ret[ty] = make(Blocks, 0, 1)
- }
- ret[ty] = append(ret[ty], el)
- }
- return ret
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/structure_at_pos.go b/vendor/github.com/hashicorp/hcl2/hcl/structure_at_pos.go
deleted file mode 100644
index 8521814e5..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/structure_at_pos.go
+++ /dev/null
@@ -1,117 +0,0 @@
-package hcl
-
-// -----------------------------------------------------------------------------
-// The methods in this file all have the general pattern of making a best-effort
-// to find one or more constructs that contain a given source position.
-//
-// These all operate by delegating to an optional method of the same name and
-// signature on the file's root body, allowing each syntax to potentially
-// provide its own implementations of these. For syntaxes that don't implement
-// them, the result is always nil.
-// -----------------------------------------------------------------------------
-
-// BlocksAtPos attempts to find all of the blocks that contain the given
-// position, ordered so that the outermost block is first and the innermost
-// block is last. This is a best-effort method that may not be able to produce
-// a complete result for all positions or for all HCL syntaxes.
-//
-// If the returned slice is non-empty, the first element is guaranteed to
-// represent the same block as would be the result of OutermostBlockAtPos and
-// the last element the result of InnermostBlockAtPos. However, the
-// implementation may return two different objects describing the same block,
-// so comparison by pointer identity is not possible.
-//
-// The result is nil if no blocks at all contain the given position.
-func (f *File) BlocksAtPos(pos Pos) []*Block {
- // The root body of the file must implement this interface in order
- // to support BlocksAtPos.
- type Interface interface {
- BlocksAtPos(pos Pos) []*Block
- }
-
- impl, ok := f.Body.(Interface)
- if !ok {
- return nil
- }
- return impl.BlocksAtPos(pos)
-}
-
-// OutermostBlockAtPos attempts to find a top-level block in the receiving file
-// that contains the given position. This is a best-effort method that may not
-// be able to produce a result for all positions or for all HCL syntaxes.
-//
-// The result is nil if no single block could be selected for any reason.
-func (f *File) OutermostBlockAtPos(pos Pos) *Block {
- // The root body of the file must implement this interface in order
- // to support OutermostBlockAtPos.
- type Interface interface {
- OutermostBlockAtPos(pos Pos) *Block
- }
-
- impl, ok := f.Body.(Interface)
- if !ok {
- return nil
- }
- return impl.OutermostBlockAtPos(pos)
-}
-
-// InnermostBlockAtPos attempts to find the most deeply-nested block in the
-// receiving file that contains the given position. This is a best-effort
-// method that may not be able to produce a result for all positions or for
-// all HCL syntaxes.
-//
-// The result is nil if no single block could be selected for any reason.
-func (f *File) InnermostBlockAtPos(pos Pos) *Block {
- // The root body of the file must implement this interface in order
- // to support InnermostBlockAtPos.
- type Interface interface {
- InnermostBlockAtPos(pos Pos) *Block
- }
-
- impl, ok := f.Body.(Interface)
- if !ok {
- return nil
- }
- return impl.InnermostBlockAtPos(pos)
-}
-
-// OutermostExprAtPos attempts to find an expression in the receiving file
-// that contains the given position. This is a best-effort method that may not
-// be able to produce a result for all positions or for all HCL syntaxes.
-//
-// Since expressions are often nested inside one another, this method returns
-// the outermost "root" expression that is not contained by any other.
-//
-// The result is nil if no single expression could be selected for any reason.
-func (f *File) OutermostExprAtPos(pos Pos) Expression {
- // The root body of the file must implement this interface in order
- // to support OutermostExprAtPos.
- type Interface interface {
- OutermostExprAtPos(pos Pos) Expression
- }
-
- impl, ok := f.Body.(Interface)
- if !ok {
- return nil
- }
- return impl.OutermostExprAtPos(pos)
-}
-
-// AttributeAtPos attempts to find an attribute definition in the receiving
-// file that contains the given position. This is a best-effort method that may
-// not be able to produce a result for all positions or for all HCL syntaxes.
-//
-// The result is nil if no single attribute could be selected for any reason.
-func (f *File) AttributeAtPos(pos Pos) *Attribute {
- // The root body of the file must implement this interface in order
- // to support OutermostExprAtPos.
- type Interface interface {
- AttributeAtPos(pos Pos) *Attribute
- }
-
- impl, ok := f.Body.(Interface)
- if !ok {
- return nil
- }
- return impl.AttributeAtPos(pos)
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/traversal.go b/vendor/github.com/hashicorp/hcl2/hcl/traversal.go
deleted file mode 100644
index d71019700..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/traversal.go
+++ /dev/null
@@ -1,293 +0,0 @@
-package hcl
-
-import (
- "fmt"
-
- "github.com/zclconf/go-cty/cty"
-)
-
-// A Traversal is a description of traversing through a value through a
-// series of operations such as attribute lookup, index lookup, etc.
-//
-// It is used to look up values in scopes, for example.
-//
-// The traversal operations are implementations of interface Traverser.
-// This is a closed set of implementations, so the interface cannot be
-// implemented from outside this package.
-//
-// A traversal can be absolute (its first value is a symbol name) or relative
-// (starts from an existing value).
-type Traversal []Traverser
-
-// TraversalJoin appends a relative traversal to an absolute traversal to
-// produce a new absolute traversal.
-func TraversalJoin(abs Traversal, rel Traversal) Traversal {
- if abs.IsRelative() {
- panic("first argument to TraversalJoin must be absolute")
- }
- if !rel.IsRelative() {
- panic("second argument to TraversalJoin must be relative")
- }
-
- ret := make(Traversal, len(abs)+len(rel))
- copy(ret, abs)
- copy(ret[len(abs):], rel)
- return ret
-}
-
-// TraverseRel applies the receiving traversal to the given value, returning
-// the resulting value. This is supported only for relative traversals,
-// and will panic if applied to an absolute traversal.
-func (t Traversal) TraverseRel(val cty.Value) (cty.Value, Diagnostics) {
- if !t.IsRelative() {
- panic("can't use TraverseRel on an absolute traversal")
- }
-
- current := val
- var diags Diagnostics
- for _, tr := range t {
- var newDiags Diagnostics
- current, newDiags = tr.TraversalStep(current)
- diags = append(diags, newDiags...)
- if newDiags.HasErrors() {
- return cty.DynamicVal, diags
- }
- }
- return current, diags
-}
-
-// TraverseAbs applies the receiving traversal to the given eval context,
-// returning the resulting value. This is supported only for absolute
-// traversals, and will panic if applied to a relative traversal.
-func (t Traversal) TraverseAbs(ctx *EvalContext) (cty.Value, Diagnostics) {
- if t.IsRelative() {
- panic("can't use TraverseAbs on a relative traversal")
- }
-
- split := t.SimpleSplit()
- root := split.Abs[0].(TraverseRoot)
- name := root.Name
-
- thisCtx := ctx
- hasNonNil := false
- for thisCtx != nil {
- if thisCtx.Variables == nil {
- thisCtx = thisCtx.parent
- continue
- }
- hasNonNil = true
- val, exists := thisCtx.Variables[name]
- if exists {
- return split.Rel.TraverseRel(val)
- }
- thisCtx = thisCtx.parent
- }
-
- if !hasNonNil {
- return cty.DynamicVal, Diagnostics{
- {
- Severity: DiagError,
- Summary: "Variables not allowed",
- Detail: "Variables may not be used here.",
- Subject: &root.SrcRange,
- },
- }
- }
-
- suggestions := make([]string, 0, len(ctx.Variables))
- thisCtx = ctx
- for thisCtx != nil {
- for k := range thisCtx.Variables {
- suggestions = append(suggestions, k)
- }
- thisCtx = thisCtx.parent
- }
- suggestion := nameSuggestion(name, suggestions)
- if suggestion != "" {
- suggestion = fmt.Sprintf(" Did you mean %q?", suggestion)
- }
-
- return cty.DynamicVal, Diagnostics{
- {
- Severity: DiagError,
- Summary: "Unknown variable",
- Detail: fmt.Sprintf("There is no variable named %q.%s", name, suggestion),
- Subject: &root.SrcRange,
- },
- }
-}
-
-// IsRelative returns true if the receiver is a relative traversal, or false
-// otherwise.
-func (t Traversal) IsRelative() bool {
- if len(t) == 0 {
- return true
- }
- if _, firstIsRoot := t[0].(TraverseRoot); firstIsRoot {
- return false
- }
- return true
-}
-
-// SimpleSplit returns a TraversalSplit where the name lookup is the absolute
-// part and the remainder is the relative part. Supported only for
-// absolute traversals, and will panic if applied to a relative traversal.
-//
-// This can be used by applications that have a relatively-simple variable
-// namespace where only the top-level is directly populated in the scope, with
-// everything else handled by relative lookups from those initial values.
-func (t Traversal) SimpleSplit() TraversalSplit {
- if t.IsRelative() {
- panic("can't use SimpleSplit on a relative traversal")
- }
- return TraversalSplit{
- Abs: t[0:1],
- Rel: t[1:],
- }
-}
-
-// RootName returns the root name for a absolute traversal. Will panic if
-// called on a relative traversal.
-func (t Traversal) RootName() string {
- if t.IsRelative() {
- panic("can't use RootName on a relative traversal")
-
- }
- return t[0].(TraverseRoot).Name
-}
-
-// SourceRange returns the source range for the traversal.
-func (t Traversal) SourceRange() Range {
- if len(t) == 0 {
- // Nothing useful to return here, but we'll return something
- // that's correctly-typed at least.
- return Range{}
- }
-
- return RangeBetween(t[0].SourceRange(), t[len(t)-1].SourceRange())
-}
-
-// TraversalSplit represents a pair of traversals, the first of which is
-// an absolute traversal and the second of which is relative to the first.
-//
-// This is used by calling applications that only populate prefixes of the
-// traversals in the scope, with Abs representing the part coming from the
-// scope and Rel representing the remaining steps once that part is
-// retrieved.
-type TraversalSplit struct {
- Abs Traversal
- Rel Traversal
-}
-
-// TraverseAbs traverses from a scope to the value resulting from the
-// absolute traversal.
-func (t TraversalSplit) TraverseAbs(ctx *EvalContext) (cty.Value, Diagnostics) {
- return t.Abs.TraverseAbs(ctx)
-}
-
-// TraverseRel traverses from a given value, assumed to be the result of
-// TraverseAbs on some scope, to a final result for the entire split traversal.
-func (t TraversalSplit) TraverseRel(val cty.Value) (cty.Value, Diagnostics) {
- return t.Rel.TraverseRel(val)
-}
-
-// Traverse is a convenience function to apply TraverseAbs followed by
-// TraverseRel.
-func (t TraversalSplit) Traverse(ctx *EvalContext) (cty.Value, Diagnostics) {
- v1, diags := t.TraverseAbs(ctx)
- if diags.HasErrors() {
- return cty.DynamicVal, diags
- }
- v2, newDiags := t.TraverseRel(v1)
- diags = append(diags, newDiags...)
- return v2, diags
-}
-
-// Join concatenates together the Abs and Rel parts to produce a single
-// absolute traversal.
-func (t TraversalSplit) Join() Traversal {
- return TraversalJoin(t.Abs, t.Rel)
-}
-
-// RootName returns the root name for the absolute part of the split.
-func (t TraversalSplit) RootName() string {
- return t.Abs.RootName()
-}
-
-// A Traverser is a step within a Traversal.
-type Traverser interface {
- TraversalStep(cty.Value) (cty.Value, Diagnostics)
- SourceRange() Range
- isTraverserSigil() isTraverser
-}
-
-// Embed this in a struct to declare it as a Traverser
-type isTraverser struct {
-}
-
-func (tr isTraverser) isTraverserSigil() isTraverser {
- return isTraverser{}
-}
-
-// TraverseRoot looks up a root name in a scope. It is used as the first step
-// of an absolute Traversal, and cannot itself be traversed directly.
-type TraverseRoot struct {
- isTraverser
- Name string
- SrcRange Range
-}
-
-// TraversalStep on a TraverseName immediately panics, because absolute
-// traversals cannot be directly traversed.
-func (tn TraverseRoot) TraversalStep(cty.Value) (cty.Value, Diagnostics) {
- panic("Cannot traverse an absolute traversal")
-}
-
-func (tn TraverseRoot) SourceRange() Range {
- return tn.SrcRange
-}
-
-// TraverseAttr looks up an attribute in its initial value.
-type TraverseAttr struct {
- isTraverser
- Name string
- SrcRange Range
-}
-
-func (tn TraverseAttr) TraversalStep(val cty.Value) (cty.Value, Diagnostics) {
- return GetAttr(val, tn.Name, &tn.SrcRange)
-}
-
-func (tn TraverseAttr) SourceRange() Range {
- return tn.SrcRange
-}
-
-// TraverseIndex applies the index operation to its initial value.
-type TraverseIndex struct {
- isTraverser
- Key cty.Value
- SrcRange Range
-}
-
-func (tn TraverseIndex) TraversalStep(val cty.Value) (cty.Value, Diagnostics) {
- return Index(val, tn.Key, &tn.SrcRange)
-}
-
-func (tn TraverseIndex) SourceRange() Range {
- return tn.SrcRange
-}
-
-// TraverseSplat applies the splat operation to its initial value.
-type TraverseSplat struct {
- isTraverser
- Each Traversal
- SrcRange Range
-}
-
-func (tn TraverseSplat) TraversalStep(val cty.Value) (cty.Value, Diagnostics) {
- panic("TraverseSplat not yet implemented")
-}
-
-func (tn TraverseSplat) SourceRange() Range {
- return tn.SrcRange
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hcl/traversal_for_expr.go b/vendor/github.com/hashicorp/hcl2/hcl/traversal_for_expr.go
deleted file mode 100644
index f69d5fe9b..000000000
--- a/vendor/github.com/hashicorp/hcl2/hcl/traversal_for_expr.go
+++ /dev/null
@@ -1,124 +0,0 @@
-package hcl
-
-// AbsTraversalForExpr attempts to interpret the given expression as
-// an absolute traversal, or returns error diagnostic(s) if that is
-// not possible for the given expression.
-//
-// A particular Expression implementation can support this function by
-// offering a method called AsTraversal that takes no arguments and
-// returns either a valid absolute traversal or nil to indicate that
-// no traversal is possible. Alternatively, an implementation can support
-// UnwrapExpression to delegate handling of this function to a wrapped
-// Expression object.
-//
-// In most cases the calling application is interested in the value
-// that results from an expression, but in rarer cases the application
-// needs to see the the name of the variable and subsequent
-// attributes/indexes itself, for example to allow users to give references
-// to the variables themselves rather than to their values. An implementer
-// of this function should at least support attribute and index steps.
-func AbsTraversalForExpr(expr Expression) (Traversal, Diagnostics) {
- type asTraversal interface {
- AsTraversal() Traversal
- }
-
- physExpr := UnwrapExpressionUntil(expr, func(expr Expression) bool {
- _, supported := expr.(asTraversal)
- return supported
- })
-
- if asT, supported := physExpr.(asTraversal); supported {
- if traversal := asT.AsTraversal(); traversal != nil {
- return traversal, nil
- }
- }
- return nil, Diagnostics{
- &Diagnostic{
- Severity: DiagError,
- Summary: "Invalid expression",
- Detail: "A single static variable reference is required: only attribute access and indexing with constant keys. No calculations, function calls, template expressions, etc are allowed here.",
- Subject: expr.Range().Ptr(),
- },
- }
-}
-
-// RelTraversalForExpr is similar to AbsTraversalForExpr but it returns
-// a relative traversal instead. Due to the nature of HCL expressions, the
-// first element of the returned traversal is always a TraverseAttr, and
-// then it will be followed by zero or more other expressions.
-//
-// Any expression accepted by AbsTraversalForExpr is also accepted by
-// RelTraversalForExpr.
-func RelTraversalForExpr(expr Expression) (Traversal, Diagnostics) {
- traversal, diags := AbsTraversalForExpr(expr)
- if len(traversal) > 0 {
- ret := make(Traversal, len(traversal))
- copy(ret, traversal)
- root := traversal[0].(TraverseRoot)
- ret[0] = TraverseAttr{
- Name: root.Name,
- SrcRange: root.SrcRange,
- }
- return ret, diags
- }
- return traversal, diags
-}
-
-// ExprAsKeyword attempts to interpret the given expression as a static keyword,
-// returning the keyword string if possible, and the empty string if not.
-//
-// A static keyword, for the sake of this function, is a single identifier.
-// For example, the following attribute has an expression that would produce
-// the keyword "foo":
-//
-// example = foo
-//
-// This function is a variant of AbsTraversalForExpr, which uses the same
-// interface on the given expression. This helper constrains the result
-// further by requiring only a single root identifier.
-//
-// This function is intended to be used with the following idiom, to recognize
-// situations where one of a fixed set of keywords is required and arbitrary
-// expressions are not allowed:
-//
-// switch hcl.ExprAsKeyword(expr) {
-// case "allow":
-// // (take suitable action for keyword "allow")
-// case "deny":
-// // (take suitable action for keyword "deny")
-// default:
-// diags = append(diags, &hcl.Diagnostic{
-// // ... "invalid keyword" diagnostic message ...
-// })
-// }
-//
-// The above approach will generate the same message for both the use of an
-// unrecognized keyword and for not using a keyword at all, which is usually
-// reasonable if the message specifies that the given value must be a keyword
-// from that fixed list.
-//
-// Note that in the native syntax the keywords "true", "false", and "null" are
-// recognized as literal values during parsing and so these reserved words
-// cannot not be accepted as keywords by this function.
-//
-// Since interpreting an expression as a keyword bypasses usual expression
-// evaluation, it should be used sparingly for situations where e.g. one of
-// a fixed set of keywords is used in a structural way in a special attribute
-// to affect the further processing of a block.
-func ExprAsKeyword(expr Expression) string {
- type asTraversal interface {
- AsTraversal() Traversal
- }
-
- physExpr := UnwrapExpressionUntil(expr, func(expr Expression) bool {
- _, supported := expr.(asTraversal)
- return supported
- })
-
- if asT, supported := physExpr.(asTraversal); supported {
- if traversal := asT.AsTraversal(); len(traversal) == 1 {
- return traversal.RootName()
- }
- }
- return ""
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hclparse/parser.go b/vendor/github.com/hashicorp/hcl2/hclparse/parser.go
deleted file mode 100644
index 6d47f1268..000000000
--- a/vendor/github.com/hashicorp/hcl2/hclparse/parser.go
+++ /dev/null
@@ -1,123 +0,0 @@
-package hclparse
-
-import (
- "fmt"
- "io/ioutil"
-
- "github.com/hashicorp/hcl2/hcl"
- "github.com/hashicorp/hcl2/hcl/hclsyntax"
- "github.com/hashicorp/hcl2/hcl/json"
-)
-
-// NOTE: This is the public interface for parsing. The actual parsers are
-// in other packages alongside this one, with this package just wrapping them
-// to provide a unified interface for the caller across all supported formats.
-
-// Parser is the main interface for parsing configuration files. As well as
-// parsing files, a parser also retains a registry of all of the files it
-// has parsed so that multiple attempts to parse the same file will return
-// the same object and so the collected files can be used when printing
-// diagnostics.
-//
-// Any diagnostics for parsing a file are only returned once on the first
-// call to parse that file. Callers are expected to collect up diagnostics
-// and present them together, so returning diagnostics for the same file
-// multiple times would create a confusing result.
-type Parser struct {
- files map[string]*hcl.File
-}
-
-// NewParser creates a new parser, ready to parse configuration files.
-func NewParser() *Parser {
- return &Parser{
- files: map[string]*hcl.File{},
- }
-}
-
-// ParseHCL parses the given buffer (which is assumed to have been loaded from
-// the given filename) as a native-syntax configuration file and returns the
-// hcl.File object representing it.
-func (p *Parser) ParseHCL(src []byte, filename string) (*hcl.File, hcl.Diagnostics) {
- if existing := p.files[filename]; existing != nil {
- return existing, nil
- }
-
- file, diags := hclsyntax.ParseConfig(src, filename, hcl.Pos{Byte: 0, Line: 1, Column: 1})
- p.files[filename] = file
- return file, diags
-}
-
-// ParseHCLFile reads the given filename and parses it as a native-syntax HCL
-// configuration file. An error diagnostic is returned if the given file
-// cannot be read.
-func (p *Parser) ParseHCLFile(filename string) (*hcl.File, hcl.Diagnostics) {
- if existing := p.files[filename]; existing != nil {
- return existing, nil
- }
-
- src, err := ioutil.ReadFile(filename)
- if err != nil {
- return nil, hcl.Diagnostics{
- {
- Severity: hcl.DiagError,
- Summary: "Failed to read file",
- Detail: fmt.Sprintf("The configuration file %q could not be read.", filename),
- },
- }
- }
-
- return p.ParseHCL(src, filename)
-}
-
-// ParseJSON parses the given JSON buffer (which is assumed to have been loaded
-// from the given filename) and returns the hcl.File object representing it.
-func (p *Parser) ParseJSON(src []byte, filename string) (*hcl.File, hcl.Diagnostics) {
- if existing := p.files[filename]; existing != nil {
- return existing, nil
- }
-
- file, diags := json.Parse(src, filename)
- p.files[filename] = file
- return file, diags
-}
-
-// ParseJSONFile reads the given filename and parses it as JSON, similarly to
-// ParseJSON. An error diagnostic is returned if the given file cannot be read.
-func (p *Parser) ParseJSONFile(filename string) (*hcl.File, hcl.Diagnostics) {
- if existing := p.files[filename]; existing != nil {
- return existing, nil
- }
-
- file, diags := json.ParseFile(filename)
- p.files[filename] = file
- return file, diags
-}
-
-// AddFile allows a caller to record in a parser a file that was parsed some
-// other way, thus allowing it to be included in the registry of sources.
-func (p *Parser) AddFile(filename string, file *hcl.File) {
- p.files[filename] = file
-}
-
-// Sources returns a map from filenames to the raw source code that was
-// read from them. This is intended to be used, for example, to print
-// diagnostics with contextual information.
-//
-// The arrays underlying the returned slices should not be modified.
-func (p *Parser) Sources() map[string][]byte {
- ret := make(map[string][]byte)
- for fn, f := range p.files {
- ret[fn] = f.Bytes
- }
- return ret
-}
-
-// Files returns a map from filenames to the File objects produced from them.
-// This is intended to be used, for example, to print diagnostics with
-// contextual information.
-//
-// The returned map and all of the objects it refers to directly or indirectly
-// must not be modified.
-func (p *Parser) Files() map[string]*hcl.File {
- return p.files
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hclwrite/ast.go b/vendor/github.com/hashicorp/hcl2/hclwrite/ast.go
deleted file mode 100644
index 090416528..000000000
--- a/vendor/github.com/hashicorp/hcl2/hclwrite/ast.go
+++ /dev/null
@@ -1,121 +0,0 @@
-package hclwrite
-
-import (
- "bytes"
- "io"
-)
-
-type File struct {
- inTree
-
- srcBytes []byte
- body *node
-}
-
-// NewEmptyFile constructs a new file with no content, ready to be mutated
-// by other calls that append to its body.
-func NewEmptyFile() *File {
- f := &File{
- inTree: newInTree(),
- }
- body := newBody()
- f.body = f.children.Append(body)
- return f
-}
-
-// Body returns the root body of the file, which contains the top-level
-// attributes and blocks.
-func (f *File) Body() *Body {
- return f.body.content.(*Body)
-}
-
-// WriteTo writes the tokens underlying the receiving file to the given writer.
-//
-// The tokens first have a simple formatting pass applied that adjusts only
-// the spaces between them.
-func (f *File) WriteTo(wr io.Writer) (int64, error) {
- tokens := f.inTree.children.BuildTokens(nil)
- format(tokens)
- return tokens.WriteTo(wr)
-}
-
-// Bytes returns a buffer containing the source code resulting from the
-// tokens underlying the receiving file. If any updates have been made via
-// the AST API, these will be reflected in the result.
-func (f *File) Bytes() []byte {
- buf := &bytes.Buffer{}
- f.WriteTo(buf)
- return buf.Bytes()
-}
-
-type comments struct {
- leafNode
-
- parent *node
- tokens Tokens
-}
-
-func newComments(tokens Tokens) *comments {
- return &comments{
- tokens: tokens,
- }
-}
-
-func (c *comments) BuildTokens(to Tokens) Tokens {
- return c.tokens.BuildTokens(to)
-}
-
-type identifier struct {
- leafNode
-
- parent *node
- token *Token
-}
-
-func newIdentifier(token *Token) *identifier {
- return &identifier{
- token: token,
- }
-}
-
-func (i *identifier) BuildTokens(to Tokens) Tokens {
- return append(to, i.token)
-}
-
-func (i *identifier) hasName(name string) bool {
- return name == string(i.token.Bytes)
-}
-
-type number struct {
- leafNode
-
- parent *node
- token *Token
-}
-
-func newNumber(token *Token) *number {
- return &number{
- token: token,
- }
-}
-
-func (n *number) BuildTokens(to Tokens) Tokens {
- return append(to, n.token)
-}
-
-type quoted struct {
- leafNode
-
- parent *node
- tokens Tokens
-}
-
-func newQuoted(tokens Tokens) *quoted {
- return "ed{
- tokens: tokens,
- }
-}
-
-func (q *quoted) BuildTokens(to Tokens) Tokens {
- return q.tokens.BuildTokens(to)
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hclwrite/ast_attribute.go b/vendor/github.com/hashicorp/hcl2/hclwrite/ast_attribute.go
deleted file mode 100644
index 975fa7428..000000000
--- a/vendor/github.com/hashicorp/hcl2/hclwrite/ast_attribute.go
+++ /dev/null
@@ -1,48 +0,0 @@
-package hclwrite
-
-import (
- "github.com/hashicorp/hcl2/hcl/hclsyntax"
-)
-
-type Attribute struct {
- inTree
-
- leadComments *node
- name *node
- expr *node
- lineComments *node
-}
-
-func newAttribute() *Attribute {
- return &Attribute{
- inTree: newInTree(),
- }
-}
-
-func (a *Attribute) init(name string, expr *Expression) {
- expr.assertUnattached()
-
- nameTok := newIdentToken(name)
- nameObj := newIdentifier(nameTok)
- a.leadComments = a.children.Append(newComments(nil))
- a.name = a.children.Append(nameObj)
- a.children.AppendUnstructuredTokens(Tokens{
- {
- Type: hclsyntax.TokenEqual,
- Bytes: []byte{'='},
- },
- })
- a.expr = a.children.Append(expr)
- a.expr.list = a.children
- a.lineComments = a.children.Append(newComments(nil))
- a.children.AppendUnstructuredTokens(Tokens{
- {
- Type: hclsyntax.TokenNewline,
- Bytes: []byte{'\n'},
- },
- })
-}
-
-func (a *Attribute) Expr() *Expression {
- return a.expr.content.(*Expression)
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hclwrite/ast_block.go b/vendor/github.com/hashicorp/hcl2/hclwrite/ast_block.go
deleted file mode 100644
index d5fd32bd5..000000000
--- a/vendor/github.com/hashicorp/hcl2/hclwrite/ast_block.go
+++ /dev/null
@@ -1,74 +0,0 @@
-package hclwrite
-
-import (
- "github.com/hashicorp/hcl2/hcl/hclsyntax"
- "github.com/zclconf/go-cty/cty"
-)
-
-type Block struct {
- inTree
-
- leadComments *node
- typeName *node
- labels nodeSet
- open *node
- body *node
- close *node
-}
-
-func newBlock() *Block {
- return &Block{
- inTree: newInTree(),
- labels: newNodeSet(),
- }
-}
-
-// NewBlock constructs a new, empty block with the given type name and labels.
-func NewBlock(typeName string, labels []string) *Block {
- block := newBlock()
- block.init(typeName, labels)
- return block
-}
-
-func (b *Block) init(typeName string, labels []string) {
- nameTok := newIdentToken(typeName)
- nameObj := newIdentifier(nameTok)
- b.leadComments = b.children.Append(newComments(nil))
- b.typeName = b.children.Append(nameObj)
- for _, label := range labels {
- labelToks := TokensForValue(cty.StringVal(label))
- labelObj := newQuoted(labelToks)
- labelNode := b.children.Append(labelObj)
- b.labels.Add(labelNode)
- }
- b.open = b.children.AppendUnstructuredTokens(Tokens{
- {
- Type: hclsyntax.TokenOBrace,
- Bytes: []byte{'{'},
- },
- {
- Type: hclsyntax.TokenNewline,
- Bytes: []byte{'\n'},
- },
- })
- body := newBody() // initially totally empty; caller can append to it subsequently
- b.body = b.children.Append(body)
- b.close = b.children.AppendUnstructuredTokens(Tokens{
- {
- Type: hclsyntax.TokenCBrace,
- Bytes: []byte{'}'},
- },
- {
- Type: hclsyntax.TokenNewline,
- Bytes: []byte{'\n'},
- },
- })
-}
-
-// Body returns the body that represents the content of the receiving block.
-//
-// Appending to or otherwise modifying this body will make changes to the
-// tokens that are generated between the blocks open and close braces.
-func (b *Block) Body() *Body {
- return b.body.content.(*Body)
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hclwrite/ast_body.go b/vendor/github.com/hashicorp/hcl2/hclwrite/ast_body.go
deleted file mode 100644
index cf69fee21..000000000
--- a/vendor/github.com/hashicorp/hcl2/hclwrite/ast_body.go
+++ /dev/null
@@ -1,153 +0,0 @@
-package hclwrite
-
-import (
- "github.com/hashicorp/hcl2/hcl"
- "github.com/hashicorp/hcl2/hcl/hclsyntax"
- "github.com/zclconf/go-cty/cty"
-)
-
-type Body struct {
- inTree
-
- items nodeSet
-}
-
-func newBody() *Body {
- return &Body{
- inTree: newInTree(),
- items: newNodeSet(),
- }
-}
-
-func (b *Body) appendItem(c nodeContent) *node {
- nn := b.children.Append(c)
- b.items.Add(nn)
- return nn
-}
-
-func (b *Body) appendItemNode(nn *node) *node {
- nn.assertUnattached()
- b.children.AppendNode(nn)
- b.items.Add(nn)
- return nn
-}
-
-// Clear removes all of the items from the body, making it empty.
-func (b *Body) Clear() {
- b.children.Clear()
-}
-
-func (b *Body) AppendUnstructuredTokens(ts Tokens) {
- b.inTree.children.Append(ts)
-}
-
-// Attributes returns a new map of all of the attributes in the body, with
-// the attribute names as the keys.
-func (b *Body) Attributes() map[string]*Attribute {
- ret := make(map[string]*Attribute)
- for n := range b.items {
- if attr, isAttr := n.content.(*Attribute); isAttr {
- nameObj := attr.name.content.(*identifier)
- name := string(nameObj.token.Bytes)
- ret[name] = attr
- }
- }
- return ret
-}
-
-// Blocks returns a new slice of all the blocks in the body.
-func (b *Body) Blocks() []*Block {
- ret := make([]*Block, 0, len(b.items))
- for n := range b.items {
- if block, isBlock := n.content.(*Block); isBlock {
- ret = append(ret, block)
- }
- }
- return ret
-}
-
-// GetAttribute returns the attribute from the body that has the given name,
-// or returns nil if there is currently no matching attribute.
-func (b *Body) GetAttribute(name string) *Attribute {
- for n := range b.items {
- if attr, isAttr := n.content.(*Attribute); isAttr {
- nameObj := attr.name.content.(*identifier)
- if nameObj.hasName(name) {
- // We've found it!
- return attr
- }
- }
- }
-
- return nil
-}
-
-// SetAttributeValue either replaces the expression of an existing attribute
-// of the given name or adds a new attribute definition to the end of the block.
-//
-// The value is given as a cty.Value, and must therefore be a literal. To set
-// a variable reference or other traversal, use SetAttributeTraversal.
-//
-// The return value is the attribute that was either modified in-place or
-// created.
-func (b *Body) SetAttributeValue(name string, val cty.Value) *Attribute {
- attr := b.GetAttribute(name)
- expr := NewExpressionLiteral(val)
- if attr != nil {
- attr.expr = attr.expr.ReplaceWith(expr)
- } else {
- attr := newAttribute()
- attr.init(name, expr)
- b.appendItem(attr)
- }
- return attr
-}
-
-// SetAttributeTraversal either replaces the expression of an existing attribute
-// of the given name or adds a new attribute definition to the end of the body.
-//
-// The new expression is given as a hcl.Traversal, which must be an absolute
-// traversal. To set a literal value, use SetAttributeValue.
-//
-// The return value is the attribute that was either modified in-place or
-// created.
-func (b *Body) SetAttributeTraversal(name string, traversal hcl.Traversal) *Attribute {
- attr := b.GetAttribute(name)
- expr := NewExpressionAbsTraversal(traversal)
- if attr != nil {
- attr.expr = attr.expr.ReplaceWith(expr)
- } else {
- attr := newAttribute()
- attr.init(name, expr)
- b.appendItem(attr)
- }
- return attr
-}
-
-// AppendBlock appends an existing block (which must not be already attached
-// to a body) to the end of the receiving body.
-func (b *Body) AppendBlock(block *Block) *Block {
- b.appendItem(block)
- return block
-}
-
-// AppendNewBlock appends a new nested block to the end of the receiving body
-// with the given type name and labels.
-func (b *Body) AppendNewBlock(typeName string, labels []string) *Block {
- block := newBlock()
- block.init(typeName, labels)
- b.appendItem(block)
- return block
-}
-
-// AppendNewline appends a newline token to th end of the receiving body,
-// which generally serves as a separator between different sets of body
-// contents.
-func (b *Body) AppendNewline() {
- b.AppendUnstructuredTokens(Tokens{
- {
- Type: hclsyntax.TokenNewline,
- Bytes: []byte{'\n'},
- },
- })
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hclwrite/ast_expression.go b/vendor/github.com/hashicorp/hcl2/hclwrite/ast_expression.go
deleted file mode 100644
index 62d89fbef..000000000
--- a/vendor/github.com/hashicorp/hcl2/hclwrite/ast_expression.go
+++ /dev/null
@@ -1,201 +0,0 @@
-package hclwrite
-
-import (
- "fmt"
-
- "github.com/hashicorp/hcl2/hcl"
- "github.com/hashicorp/hcl2/hcl/hclsyntax"
- "github.com/zclconf/go-cty/cty"
-)
-
-type Expression struct {
- inTree
-
- absTraversals nodeSet
-}
-
-func newExpression() *Expression {
- return &Expression{
- inTree: newInTree(),
- absTraversals: newNodeSet(),
- }
-}
-
-// NewExpressionLiteral constructs an an expression that represents the given
-// literal value.
-//
-// Since an unknown value cannot be represented in source code, this function
-// will panic if the given value is unknown or contains a nested unknown value.
-// Use val.IsWhollyKnown before calling to be sure.
-//
-// HCL native syntax does not directly represent lists, maps, and sets, and
-// instead relies on the automatic conversions to those collection types from
-// either list or tuple constructor syntax. Therefore converting collection
-// values to source code and re-reading them will lose type information, and
-// the reader must provide a suitable type at decode time to recover the
-// original value.
-func NewExpressionLiteral(val cty.Value) *Expression {
- toks := TokensForValue(val)
- expr := newExpression()
- expr.children.AppendUnstructuredTokens(toks)
- return expr
-}
-
-// NewExpressionAbsTraversal constructs an expression that represents the
-// given traversal, which must be absolute or this function will panic.
-func NewExpressionAbsTraversal(traversal hcl.Traversal) *Expression {
- if traversal.IsRelative() {
- panic("can't construct expression from relative traversal")
- }
-
- physT := newTraversal()
- rootName := traversal.RootName()
- steps := traversal[1:]
-
- {
- tn := newTraverseName()
- tn.name = tn.children.Append(newIdentifier(&Token{
- Type: hclsyntax.TokenIdent,
- Bytes: []byte(rootName),
- }))
- physT.steps.Add(physT.children.Append(tn))
- }
-
- for _, step := range steps {
- switch ts := step.(type) {
- case hcl.TraverseAttr:
- tn := newTraverseName()
- tn.children.AppendUnstructuredTokens(Tokens{
- {
- Type: hclsyntax.TokenDot,
- Bytes: []byte{'.'},
- },
- })
- tn.name = tn.children.Append(newIdentifier(&Token{
- Type: hclsyntax.TokenIdent,
- Bytes: []byte(ts.Name),
- }))
- physT.steps.Add(physT.children.Append(tn))
- case hcl.TraverseIndex:
- ti := newTraverseIndex()
- ti.children.AppendUnstructuredTokens(Tokens{
- {
- Type: hclsyntax.TokenOBrack,
- Bytes: []byte{'['},
- },
- })
- indexExpr := NewExpressionLiteral(ts.Key)
- ti.key = ti.children.Append(indexExpr)
- ti.children.AppendUnstructuredTokens(Tokens{
- {
- Type: hclsyntax.TokenCBrack,
- Bytes: []byte{']'},
- },
- })
- physT.steps.Add(physT.children.Append(ti))
- }
- }
-
- expr := newExpression()
- expr.absTraversals.Add(expr.children.Append(physT))
- return expr
-}
-
-// Variables returns the absolute traversals that exist within the receiving
-// expression.
-func (e *Expression) Variables() []*Traversal {
- nodes := e.absTraversals.List()
- ret := make([]*Traversal, len(nodes))
- for i, node := range nodes {
- ret[i] = node.content.(*Traversal)
- }
- return ret
-}
-
-// RenameVariablePrefix examines each of the absolute traversals in the
-// receiving expression to see if they have the given sequence of names as
-// a prefix prefix. If so, they are updated in place to have the given
-// replacement names instead of that prefix.
-//
-// This can be used to implement symbol renaming. The calling application can
-// visit all relevant expressions in its input and apply the same renaming
-// to implement a global symbol rename.
-//
-// The search and replacement traversals must be the same length, or this
-// method will panic. Only attribute access operations can be matched and
-// replaced. Index steps never match the prefix.
-func (e *Expression) RenameVariablePrefix(search, replacement []string) {
- if len(search) != len(replacement) {
- panic(fmt.Sprintf("search and replacement length mismatch (%d and %d)", len(search), len(replacement)))
- }
-Traversals:
- for node := range e.absTraversals {
- traversal := node.content.(*Traversal)
- if len(traversal.steps) < len(search) {
- // If it's shorter then it can't have our prefix
- continue
- }
-
- stepNodes := traversal.steps.List()
- for i, name := range search {
- step, isName := stepNodes[i].content.(*TraverseName)
- if !isName {
- continue Traversals // only name nodes can match
- }
- foundNameBytes := step.name.content.(*identifier).token.Bytes
- if len(foundNameBytes) != len(name) {
- continue Traversals
- }
- if string(foundNameBytes) != name {
- continue Traversals
- }
- }
-
- // If we get here then the prefix matched, so now we'll swap in
- // the replacement strings.
- for i, name := range replacement {
- step := stepNodes[i].content.(*TraverseName)
- token := step.name.content.(*identifier).token
- token.Bytes = []byte(name)
- }
- }
-}
-
-// Traversal represents a sequence of variable, attribute, and/or index
-// operations.
-type Traversal struct {
- inTree
-
- steps nodeSet
-}
-
-func newTraversal() *Traversal {
- return &Traversal{
- inTree: newInTree(),
- steps: newNodeSet(),
- }
-}
-
-type TraverseName struct {
- inTree
-
- name *node
-}
-
-func newTraverseName() *TraverseName {
- return &TraverseName{
- inTree: newInTree(),
- }
-}
-
-type TraverseIndex struct {
- inTree
-
- key *node
-}
-
-func newTraverseIndex() *TraverseIndex {
- return &TraverseIndex{
- inTree: newInTree(),
- }
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hclwrite/doc.go b/vendor/github.com/hashicorp/hcl2/hclwrite/doc.go
deleted file mode 100644
index 56d5b7752..000000000
--- a/vendor/github.com/hashicorp/hcl2/hclwrite/doc.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Package hclwrite deals with the problem of generating HCL configuration
-// and of making specific surgical changes to existing HCL configurations.
-//
-// It operates at a different level of abstraction than the main HCL parser
-// and AST, since details such as the placement of comments and newlines
-// are preserved when unchanged.
-//
-// The hclwrite API follows a similar principle to XML/HTML DOM, allowing nodes
-// to be read out, created and inserted, etc. Nodes represent syntax constructs
-// rather than semantic concepts.
-package hclwrite
diff --git a/vendor/github.com/hashicorp/hcl2/hclwrite/format.go b/vendor/github.com/hashicorp/hcl2/hclwrite/format.go
deleted file mode 100644
index 7111ebde2..000000000
--- a/vendor/github.com/hashicorp/hcl2/hclwrite/format.go
+++ /dev/null
@@ -1,463 +0,0 @@
-package hclwrite
-
-import (
- "github.com/hashicorp/hcl2/hcl/hclsyntax"
-)
-
-var inKeyword = hclsyntax.Keyword([]byte{'i', 'n'})
-
-// placeholder token used when we don't have a token but we don't want
-// to pass a real "nil" and complicate things with nil pointer checks
-var nilToken = &Token{
- Type: hclsyntax.TokenNil,
- Bytes: []byte{},
- SpacesBefore: 0,
-}
-
-// format rewrites tokens within the given sequence, in-place, to adjust the
-// whitespace around their content to achieve canonical formatting.
-func format(tokens Tokens) {
- // Formatting is a multi-pass process. More details on the passes below,
- // but this is the overview:
- // - adjust the leading space on each line to create appropriate
- // indentation
- // - adjust spaces between tokens in a single cell using a set of rules
- // - adjust the leading space in the "assign" and "comment" cells on each
- // line to vertically align with neighboring lines.
- // All of these steps operate in-place on the given tokens, so a caller
- // may collect a flat sequence of all of the tokens underlying an AST
- // and pass it here and we will then indirectly modify the AST itself.
- // Formatting must change only whitespace. Specifically, that means
- // changing the SpacesBefore attribute on a token while leaving the
- // other token attributes unchanged.
-
- lines := linesForFormat(tokens)
- formatIndent(lines)
- formatSpaces(lines)
- formatCells(lines)
-}
-
-func formatIndent(lines []formatLine) {
- // Our methodology for indents is to take the input one line at a time
- // and count the bracketing delimiters on each line. If a line has a net
- // increase in open brackets, we increase the indent level by one and
- // remember how many new openers we had. If the line has a net _decrease_,
- // we'll compare it to the most recent number of openers and decrease the
- // dedent level by one each time we pass an indent level remembered
- // earlier.
- // The "indent stack" used here allows for us to recognize degenerate
- // input where brackets are not symmetrical within lines and avoid
- // pushing things too far left or right, creating confusion.
-
- // We'll start our indent stack at a reasonable capacity to minimize the
- // chance of us needing to grow it; 10 here means 10 levels of indent,
- // which should be more than enough for reasonable HCL uses.
- indents := make([]int, 0, 10)
-
- for i := range lines {
- line := &lines[i]
- if len(line.lead) == 0 {
- continue
- }
-
- if line.lead[0].Type == hclsyntax.TokenNewline {
- // Never place spaces before a newline
- line.lead[0].SpacesBefore = 0
- continue
- }
-
- netBrackets := 0
- for _, token := range line.lead {
- netBrackets += tokenBracketChange(token)
- if token.Type == hclsyntax.TokenOHeredoc {
- break
- }
- }
-
- for _, token := range line.assign {
- netBrackets += tokenBracketChange(token)
- }
-
- switch {
- case netBrackets > 0:
- line.lead[0].SpacesBefore = 2 * len(indents)
- indents = append(indents, netBrackets)
- case netBrackets < 0:
- closed := -netBrackets
- for closed > 0 && len(indents) > 0 {
- switch {
-
- case closed > indents[len(indents)-1]:
- closed -= indents[len(indents)-1]
- indents = indents[:len(indents)-1]
-
- case closed < indents[len(indents)-1]:
- indents[len(indents)-1] -= closed
- closed = 0
-
- default:
- indents = indents[:len(indents)-1]
- closed = 0
- }
- }
- line.lead[0].SpacesBefore = 2 * len(indents)
- default:
- line.lead[0].SpacesBefore = 2 * len(indents)
- }
- }
-}
-
-func formatSpaces(lines []formatLine) {
- for _, line := range lines {
- for i, token := range line.lead {
- var before, after *Token
- if i > 0 {
- before = line.lead[i-1]
- } else {
- before = nilToken
- }
- if i < (len(line.lead) - 1) {
- after = line.lead[i+1]
- } else {
- after = nilToken
- }
- if spaceAfterToken(token, before, after) {
- after.SpacesBefore = 1
- } else {
- after.SpacesBefore = 0
- }
- }
- for i, token := range line.assign {
- if i == 0 {
- // first token in "assign" always has one space before to
- // separate the equals sign from what it's assigning.
- token.SpacesBefore = 1
- }
-
- var before, after *Token
- if i > 0 {
- before = line.assign[i-1]
- } else {
- before = nilToken
- }
- if i < (len(line.assign) - 1) {
- after = line.assign[i+1]
- } else {
- after = nilToken
- }
- if spaceAfterToken(token, before, after) {
- after.SpacesBefore = 1
- } else {
- after.SpacesBefore = 0
- }
- }
-
- }
-}
-
-func formatCells(lines []formatLine) {
-
- chainStart := -1
- maxColumns := 0
-
- // We'll deal with the "assign" cell first, since moving that will
- // also impact the "comment" cell.
- closeAssignChain := func(i int) {
- for _, chainLine := range lines[chainStart:i] {
- columns := chainLine.lead.Columns()
- spaces := (maxColumns - columns) + 1
- chainLine.assign[0].SpacesBefore = spaces
- }
- chainStart = -1
- maxColumns = 0
- }
- for i, line := range lines {
- if line.assign == nil {
- if chainStart != -1 {
- closeAssignChain(i)
- }
- } else {
- if chainStart == -1 {
- chainStart = i
- }
- columns := line.lead.Columns()
- if columns > maxColumns {
- maxColumns = columns
- }
- }
- }
- if chainStart != -1 {
- closeAssignChain(len(lines))
- }
-
- // Now we'll deal with the comments
- closeCommentChain := func(i int) {
- for _, chainLine := range lines[chainStart:i] {
- columns := chainLine.lead.Columns() + chainLine.assign.Columns()
- spaces := (maxColumns - columns) + 1
- chainLine.comment[0].SpacesBefore = spaces
- }
- chainStart = -1
- maxColumns = 0
- }
- for i, line := range lines {
- if line.comment == nil {
- if chainStart != -1 {
- closeCommentChain(i)
- }
- } else {
- if chainStart == -1 {
- chainStart = i
- }
- columns := line.lead.Columns() + line.assign.Columns()
- if columns > maxColumns {
- maxColumns = columns
- }
- }
- }
- if chainStart != -1 {
- closeCommentChain(len(lines))
- }
-
-}
-
-// spaceAfterToken decides whether a particular subject token should have a
-// space after it when surrounded by the given before and after tokens.
-// "before" can be TokenNil, if the subject token is at the start of a sequence.
-func spaceAfterToken(subject, before, after *Token) bool {
- switch {
-
- case after.Type == hclsyntax.TokenNewline || after.Type == hclsyntax.TokenNil:
- // Never add spaces before a newline
- return false
-
- case subject.Type == hclsyntax.TokenIdent && after.Type == hclsyntax.TokenOParen:
- // Don't split a function name from open paren in a call
- return false
-
- case subject.Type == hclsyntax.TokenDot || after.Type == hclsyntax.TokenDot:
- // Don't use spaces around attribute access dots
- return false
-
- case after.Type == hclsyntax.TokenComma || after.Type == hclsyntax.TokenEllipsis:
- // No space right before a comma or ... in an argument list
- return false
-
- case subject.Type == hclsyntax.TokenComma:
- // Always a space after a comma
- return true
-
- case subject.Type == hclsyntax.TokenQuotedLit || subject.Type == hclsyntax.TokenStringLit || subject.Type == hclsyntax.TokenOQuote || subject.Type == hclsyntax.TokenOHeredoc || after.Type == hclsyntax.TokenQuotedLit || after.Type == hclsyntax.TokenStringLit || after.Type == hclsyntax.TokenCQuote || after.Type == hclsyntax.TokenCHeredoc:
- // No extra spaces within templates
- return false
-
- case inKeyword.TokenMatches(subject.asHCLSyntax()) && before.Type == hclsyntax.TokenIdent:
- // This is a special case for inside for expressions where a user
- // might want to use a literal tuple constructor:
- // [for x in [foo]: x]
- // ... in that case, we would normally produce in[foo] thinking that
- // in is a reference, but we'll recognize it as a keyword here instead
- // to make the result less confusing.
- return true
-
- case after.Type == hclsyntax.TokenOBrack && (subject.Type == hclsyntax.TokenIdent || subject.Type == hclsyntax.TokenNumberLit || tokenBracketChange(subject) < 0):
- return false
-
- case subject.Type == hclsyntax.TokenMinus:
- // Since a minus can either be subtraction or negation, and the latter
- // should _not_ have a space after it, we need to use some heuristics
- // to decide which case this is.
- // We guess that we have a negation if the token before doesn't look
- // like it could be the end of an expression.
-
- switch before.Type {
-
- case hclsyntax.TokenNil:
- // Minus at the start of input must be a negation
- return false
-
- case hclsyntax.TokenOParen, hclsyntax.TokenOBrace, hclsyntax.TokenOBrack, hclsyntax.TokenEqual, hclsyntax.TokenColon, hclsyntax.TokenComma, hclsyntax.TokenQuestion:
- // Minus immediately after an opening bracket or separator must be a negation.
- return false
-
- case hclsyntax.TokenPlus, hclsyntax.TokenStar, hclsyntax.TokenSlash, hclsyntax.TokenPercent, hclsyntax.TokenMinus:
- // Minus immediately after another arithmetic operator must be negation.
- return false
-
- case hclsyntax.TokenEqualOp, hclsyntax.TokenNotEqual, hclsyntax.TokenGreaterThan, hclsyntax.TokenGreaterThanEq, hclsyntax.TokenLessThan, hclsyntax.TokenLessThanEq:
- // Minus immediately after another comparison operator must be negation.
- return false
-
- case hclsyntax.TokenAnd, hclsyntax.TokenOr, hclsyntax.TokenBang:
- // Minus immediately after logical operator doesn't make sense but probably intended as negation.
- return false
-
- default:
- return true
- }
-
- case subject.Type == hclsyntax.TokenOBrace || after.Type == hclsyntax.TokenCBrace:
- // Unlike other bracket types, braces have spaces on both sides of them,
- // both in single-line nested blocks foo { bar = baz } and in object
- // constructor expressions foo = { bar = baz }.
- if subject.Type == hclsyntax.TokenOBrace && after.Type == hclsyntax.TokenCBrace {
- // An open brace followed by a close brace is an exception, however.
- // e.g. foo {} rather than foo { }
- return false
- }
- return true
-
- // In the unlikely event that an interpolation expression is just
- // a single object constructor, we'll put a space between the ${ and
- // the following { to make this more obvious, and then the same
- // thing for the two braces at the end.
- case (subject.Type == hclsyntax.TokenTemplateInterp || subject.Type == hclsyntax.TokenTemplateControl) && after.Type == hclsyntax.TokenOBrace:
- return true
- case subject.Type == hclsyntax.TokenCBrace && after.Type == hclsyntax.TokenTemplateSeqEnd:
- return true
-
- // Don't add spaces between interpolated items
- case subject.Type == hclsyntax.TokenTemplateSeqEnd && (after.Type == hclsyntax.TokenTemplateInterp || after.Type == hclsyntax.TokenTemplateControl):
- return false
-
- case tokenBracketChange(subject) > 0:
- // No spaces after open brackets
- return false
-
- case tokenBracketChange(after) < 0:
- // No spaces before close brackets
- return false
-
- default:
- // Most tokens are space-separated
- return true
-
- }
-}
-
-func linesForFormat(tokens Tokens) []formatLine {
- if len(tokens) == 0 {
- return make([]formatLine, 0)
- }
-
- // first we'll count our lines, so we can allocate the array for them in
- // a single block. (We want to minimize memory pressure in this codepath,
- // so it can be run somewhat-frequently by editor integrations.)
- lineCount := 1 // if there are zero newlines then there is one line
- for _, tok := range tokens {
- if tokenIsNewline(tok) {
- lineCount++
- }
- }
-
- // To start, we'll just put everything in the "lead" cell on each line,
- // and then do another pass over the lines afterwards to adjust.
- lines := make([]formatLine, lineCount)
- li := 0
- lineStart := 0
- for i, tok := range tokens {
- if tok.Type == hclsyntax.TokenEOF {
- // The EOF token doesn't belong to any line, and terminates the
- // token sequence.
- lines[li].lead = tokens[lineStart:i]
- break
- }
-
- if tokenIsNewline(tok) {
- lines[li].lead = tokens[lineStart : i+1]
- lineStart = i + 1
- li++
- }
- }
-
- // If a set of tokens doesn't end in TokenEOF (e.g. because it's a
- // fragment of tokens from the middle of a file) then we might fall
- // out here with a line still pending.
- if lineStart < len(tokens) {
- lines[li].lead = tokens[lineStart:]
- if lines[li].lead[len(lines[li].lead)-1].Type == hclsyntax.TokenEOF {
- lines[li].lead = lines[li].lead[:len(lines[li].lead)-1]
- }
- }
-
- // Now we'll pick off any trailing comments and attribute assignments
- // to shuffle off into the "comment" and "assign" cells.
- for i := range lines {
- line := &lines[i]
-
- if len(line.lead) == 0 {
- // if the line is empty then there's nothing for us to do
- // (this should happen only for the final line, because all other
- // lines would have a newline token of some kind)
- continue
- }
-
- if len(line.lead) > 1 && line.lead[len(line.lead)-1].Type == hclsyntax.TokenComment {
- line.comment = line.lead[len(line.lead)-1:]
- line.lead = line.lead[:len(line.lead)-1]
- }
-
- for i, tok := range line.lead {
- if i > 0 && tok.Type == hclsyntax.TokenEqual {
- // We only move the tokens into "assign" if the RHS seems to
- // be a whole expression, which we determine by counting
- // brackets. If there's a net positive number of brackets
- // then that suggests we're introducing a multi-line expression.
- netBrackets := 0
- for _, token := range line.lead[i:] {
- netBrackets += tokenBracketChange(token)
- }
-
- if netBrackets == 0 {
- line.assign = line.lead[i:]
- line.lead = line.lead[:i]
- }
- break
- }
- }
- }
-
- return lines
-}
-
-func tokenIsNewline(tok *Token) bool {
- if tok.Type == hclsyntax.TokenNewline {
- return true
- } else if tok.Type == hclsyntax.TokenComment {
- // Single line tokens (# and //) consume their terminating newline,
- // so we need to treat them as newline tokens as well.
- if len(tok.Bytes) > 0 && tok.Bytes[len(tok.Bytes)-1] == '\n' {
- return true
- }
- }
- return false
-}
-
-func tokenBracketChange(tok *Token) int {
- switch tok.Type {
- case hclsyntax.TokenOBrace, hclsyntax.TokenOBrack, hclsyntax.TokenOParen, hclsyntax.TokenTemplateControl, hclsyntax.TokenTemplateInterp:
- return 1
- case hclsyntax.TokenCBrace, hclsyntax.TokenCBrack, hclsyntax.TokenCParen, hclsyntax.TokenTemplateSeqEnd:
- return -1
- default:
- return 0
- }
-}
-
-// formatLine represents a single line of source code for formatting purposes,
-// splitting its tokens into up to three "cells":
-//
-// lead: always present, representing everything up to one of the others
-// assign: if line contains an attribute assignment, represents the tokens
-// starting at (and including) the equals symbol
-// comment: if line contains any non-comment tokens and ends with a
-// single-line comment token, represents the comment.
-//
-// When formatting, the leading spaces of the first tokens in each of these
-// cells is adjusted to align vertically their occurences on consecutive
-// rows.
-type formatLine struct {
- lead Tokens
- assign Tokens
- comment Tokens
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hclwrite/generate.go b/vendor/github.com/hashicorp/hcl2/hclwrite/generate.go
deleted file mode 100644
index d249cfdf9..000000000
--- a/vendor/github.com/hashicorp/hcl2/hclwrite/generate.go
+++ /dev/null
@@ -1,250 +0,0 @@
-package hclwrite
-
-import (
- "fmt"
- "unicode"
- "unicode/utf8"
-
- "github.com/hashicorp/hcl2/hcl"
- "github.com/hashicorp/hcl2/hcl/hclsyntax"
- "github.com/zclconf/go-cty/cty"
-)
-
-// TokensForValue returns a sequence of tokens that represents the given
-// constant value.
-//
-// This function only supports types that are used by HCL. In particular, it
-// does not support capsule types and will panic if given one.
-//
-// It is not possible to express an unknown value in source code, so this
-// function will panic if the given value is unknown or contains any unknown
-// values. A caller can call the value's IsWhollyKnown method to verify that
-// no unknown values are present before calling TokensForValue.
-func TokensForValue(val cty.Value) Tokens {
- toks := appendTokensForValue(val, nil)
- format(toks) // fiddle with the SpacesBefore field to get canonical spacing
- return toks
-}
-
-// TokensForTraversal returns a sequence of tokens that represents the given
-// traversal.
-//
-// If the traversal is absolute then the result is a self-contained, valid
-// reference expression. If the traversal is relative then the returned tokens
-// could be appended to some other expression tokens to traverse into the
-// represented expression.
-func TokensForTraversal(traversal hcl.Traversal) Tokens {
- toks := appendTokensForTraversal(traversal, nil)
- format(toks) // fiddle with the SpacesBefore field to get canonical spacing
- return toks
-}
-
-func appendTokensForValue(val cty.Value, toks Tokens) Tokens {
- switch {
-
- case !val.IsKnown():
- panic("cannot produce tokens for unknown value")
-
- case val.IsNull():
- toks = append(toks, &Token{
- Type: hclsyntax.TokenIdent,
- Bytes: []byte(`null`),
- })
-
- case val.Type() == cty.Bool:
- var src []byte
- if val.True() {
- src = []byte(`true`)
- } else {
- src = []byte(`false`)
- }
- toks = append(toks, &Token{
- Type: hclsyntax.TokenIdent,
- Bytes: src,
- })
-
- case val.Type() == cty.Number:
- bf := val.AsBigFloat()
- srcStr := bf.Text('f', -1)
- toks = append(toks, &Token{
- Type: hclsyntax.TokenNumberLit,
- Bytes: []byte(srcStr),
- })
-
- case val.Type() == cty.String:
- // TODO: If it's a multi-line string ending in a newline, format
- // it as a HEREDOC instead.
- src := escapeQuotedStringLit(val.AsString())
- toks = append(toks, &Token{
- Type: hclsyntax.TokenOQuote,
- Bytes: []byte{'"'},
- })
- if len(src) > 0 {
- toks = append(toks, &Token{
- Type: hclsyntax.TokenQuotedLit,
- Bytes: src,
- })
- }
- toks = append(toks, &Token{
- Type: hclsyntax.TokenCQuote,
- Bytes: []byte{'"'},
- })
-
- case val.Type().IsListType() || val.Type().IsSetType() || val.Type().IsTupleType():
- toks = append(toks, &Token{
- Type: hclsyntax.TokenOBrack,
- Bytes: []byte{'['},
- })
-
- i := 0
- for it := val.ElementIterator(); it.Next(); {
- if i > 0 {
- toks = append(toks, &Token{
- Type: hclsyntax.TokenComma,
- Bytes: []byte{','},
- })
- }
- _, eVal := it.Element()
- toks = appendTokensForValue(eVal, toks)
- i++
- }
-
- toks = append(toks, &Token{
- Type: hclsyntax.TokenCBrack,
- Bytes: []byte{']'},
- })
-
- case val.Type().IsMapType() || val.Type().IsObjectType():
- toks = append(toks, &Token{
- Type: hclsyntax.TokenOBrace,
- Bytes: []byte{'{'},
- })
-
- i := 0
- for it := val.ElementIterator(); it.Next(); {
- if i > 0 {
- toks = append(toks, &Token{
- Type: hclsyntax.TokenComma,
- Bytes: []byte{','},
- })
- }
- eKey, eVal := it.Element()
- if hclsyntax.ValidIdentifier(eKey.AsString()) {
- toks = append(toks, &Token{
- Type: hclsyntax.TokenIdent,
- Bytes: []byte(eKey.AsString()),
- })
- } else {
- toks = appendTokensForValue(eKey, toks)
- }
- toks = append(toks, &Token{
- Type: hclsyntax.TokenEqual,
- Bytes: []byte{'='},
- })
- toks = appendTokensForValue(eVal, toks)
- i++
- }
-
- toks = append(toks, &Token{
- Type: hclsyntax.TokenCBrace,
- Bytes: []byte{'}'},
- })
-
- default:
- panic(fmt.Sprintf("cannot produce tokens for %#v", val))
- }
-
- return toks
-}
-
-func appendTokensForTraversal(traversal hcl.Traversal, toks Tokens) Tokens {
- for _, step := range traversal {
- appendTokensForTraversalStep(step, toks)
- }
- return toks
-}
-
-func appendTokensForTraversalStep(step hcl.Traverser, toks Tokens) {
- switch ts := step.(type) {
- case hcl.TraverseRoot:
- toks = append(toks, &Token{
- Type: hclsyntax.TokenIdent,
- Bytes: []byte(ts.Name),
- })
- case hcl.TraverseAttr:
- toks = append(
- toks,
- &Token{
- Type: hclsyntax.TokenDot,
- Bytes: []byte{'.'},
- },
- &Token{
- Type: hclsyntax.TokenIdent,
- Bytes: []byte(ts.Name),
- },
- )
- case hcl.TraverseIndex:
- toks = append(toks, &Token{
- Type: hclsyntax.TokenOBrack,
- Bytes: []byte{'['},
- })
- appendTokensForValue(ts.Key, toks)
- toks = append(toks, &Token{
- Type: hclsyntax.TokenCBrack,
- Bytes: []byte{']'},
- })
- default:
- panic(fmt.Sprintf("unsupported traversal step type %T", step))
- }
-}
-
-func escapeQuotedStringLit(s string) []byte {
- if len(s) == 0 {
- return nil
- }
- buf := make([]byte, 0, len(s))
- for i, r := range s {
- switch r {
- case '\n':
- buf = append(buf, '\\', 'n')
- case '\r':
- buf = append(buf, '\\', 'r')
- case '\t':
- buf = append(buf, '\\', 't')
- case '"':
- buf = append(buf, '\\', '"')
- case '\\':
- buf = append(buf, '\\', '\\')
- case '$', '%':
- buf = appendRune(buf, r)
- remain := s[i+1:]
- if len(remain) > 0 && remain[0] == '{' {
- // Double up our template introducer symbol to escape it.
- buf = appendRune(buf, r)
- }
- default:
- if !unicode.IsPrint(r) {
- var fmted string
- if r < 65536 {
- fmted = fmt.Sprintf("\\u%04x", r)
- } else {
- fmted = fmt.Sprintf("\\U%08x", r)
- }
- buf = append(buf, fmted...)
- } else {
- buf = appendRune(buf, r)
- }
- }
- }
- return buf
-}
-
-func appendRune(b []byte, r rune) []byte {
- l := utf8.RuneLen(r)
- for i := 0; i < l; i++ {
- b = append(b, 0) // make room at the end of our buffer
- }
- ch := b[len(b)-l:]
- utf8.EncodeRune(ch, r)
- return b
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hclwrite/native_node_sorter.go b/vendor/github.com/hashicorp/hcl2/hclwrite/native_node_sorter.go
deleted file mode 100644
index a13c0ec41..000000000
--- a/vendor/github.com/hashicorp/hcl2/hclwrite/native_node_sorter.go
+++ /dev/null
@@ -1,23 +0,0 @@
-package hclwrite
-
-import (
- "github.com/hashicorp/hcl2/hcl/hclsyntax"
-)
-
-type nativeNodeSorter struct {
- Nodes []hclsyntax.Node
-}
-
-func (s nativeNodeSorter) Len() int {
- return len(s.Nodes)
-}
-
-func (s nativeNodeSorter) Less(i, j int) bool {
- rangeI := s.Nodes[i].Range()
- rangeJ := s.Nodes[j].Range()
- return rangeI.Start.Byte < rangeJ.Start.Byte
-}
-
-func (s nativeNodeSorter) Swap(i, j int) {
- s.Nodes[i], s.Nodes[j] = s.Nodes[j], s.Nodes[i]
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hclwrite/node.go b/vendor/github.com/hashicorp/hcl2/hclwrite/node.go
deleted file mode 100644
index 71fd00faf..000000000
--- a/vendor/github.com/hashicorp/hcl2/hclwrite/node.go
+++ /dev/null
@@ -1,236 +0,0 @@
-package hclwrite
-
-import (
- "fmt"
-
- "github.com/google/go-cmp/cmp"
-)
-
-// node represents a node in the AST.
-type node struct {
- content nodeContent
-
- list *nodes
- before, after *node
-}
-
-func newNode(c nodeContent) *node {
- return &node{
- content: c,
- }
-}
-
-func (n *node) Equal(other *node) bool {
- return cmp.Equal(n.content, other.content)
-}
-
-func (n *node) BuildTokens(to Tokens) Tokens {
- return n.content.BuildTokens(to)
-}
-
-// Detach removes the receiver from the list it currently belongs to. If the
-// node is not currently in a list, this is a no-op.
-func (n *node) Detach() {
- if n.list == nil {
- return
- }
- if n.before != nil {
- n.before.after = n.after
- }
- if n.after != nil {
- n.after.before = n.before
- }
- if n.list.first == n {
- n.list.first = n.after
- }
- if n.list.last == n {
- n.list.last = n.before
- }
- n.list = nil
- n.before = nil
- n.after = nil
-}
-
-// ReplaceWith removes the receiver from the list it currently belongs to and
-// inserts a new node with the given content in its place. If the node is not
-// currently in a list, this function will panic.
-//
-// The return value is the newly-constructed node, containing the given content.
-// After this function returns, the reciever is no longer attached to a list.
-func (n *node) ReplaceWith(c nodeContent) *node {
- if n.list == nil {
- panic("can't replace node that is not in a list")
- }
-
- before := n.before
- after := n.after
- list := n.list
- n.before, n.after, n.list = nil, nil, nil
-
- nn := newNode(c)
- nn.before = before
- nn.after = after
- nn.list = list
- if before != nil {
- before.after = nn
- }
- if after != nil {
- after.before = nn
- }
- return nn
-}
-
-func (n *node) assertUnattached() {
- if n.list != nil {
- panic(fmt.Sprintf("attempt to attach already-attached node %#v", n))
- }
-}
-
-// nodeContent is the interface type implemented by all AST content types.
-type nodeContent interface {
- walkChildNodes(w internalWalkFunc)
- BuildTokens(to Tokens) Tokens
-}
-
-// nodes is a list of nodes.
-type nodes struct {
- first, last *node
-}
-
-func (ns *nodes) BuildTokens(to Tokens) Tokens {
- for n := ns.first; n != nil; n = n.after {
- to = n.BuildTokens(to)
- }
- return to
-}
-
-func (ns *nodes) Clear() {
- ns.first = nil
- ns.last = nil
-}
-
-func (ns *nodes) Append(c nodeContent) *node {
- n := &node{
- content: c,
- }
- ns.AppendNode(n)
- n.list = ns
- return n
-}
-
-func (ns *nodes) AppendNode(n *node) {
- if ns.last != nil {
- n.before = ns.last
- ns.last.after = n
- }
- n.list = ns
- ns.last = n
- if ns.first == nil {
- ns.first = n
- }
-}
-
-func (ns *nodes) AppendUnstructuredTokens(tokens Tokens) *node {
- if len(tokens) == 0 {
- return nil
- }
- n := newNode(tokens)
- ns.AppendNode(n)
- n.list = ns
- return n
-}
-
-// nodeSet is an unordered set of nodes. It is used to describe a set of nodes
-// that all belong to the same list that have some role or characteristic
-// in common.
-type nodeSet map[*node]struct{}
-
-func newNodeSet() nodeSet {
- return make(nodeSet)
-}
-
-func (ns nodeSet) Has(n *node) bool {
- if ns == nil {
- return false
- }
- _, exists := ns[n]
- return exists
-}
-
-func (ns nodeSet) Add(n *node) {
- ns[n] = struct{}{}
-}
-
-func (ns nodeSet) Remove(n *node) {
- delete(ns, n)
-}
-
-func (ns nodeSet) List() []*node {
- if len(ns) == 0 {
- return nil
- }
-
- ret := make([]*node, 0, len(ns))
-
- // Determine which list we are working with. We assume here that all of
- // the nodes belong to the same list, since that is part of the contract
- // for nodeSet.
- var list *nodes
- for n := range ns {
- list = n.list
- break
- }
-
- // We recover the order by iterating over the whole list. This is not
- // the most efficient way to do it, but our node lists should always be
- // small so not worth making things more complex.
- for n := list.first; n != nil; n = n.after {
- if ns.Has(n) {
- ret = append(ret, n)
- }
- }
- return ret
-}
-
-type internalWalkFunc func(*node)
-
-// inTree can be embedded into a content struct that has child nodes to get
-// a standard implementation of the NodeContent interface and a record of
-// a potential parent node.
-type inTree struct {
- parent *node
- children *nodes
-}
-
-func newInTree() inTree {
- return inTree{
- children: &nodes{},
- }
-}
-
-func (it *inTree) assertUnattached() {
- if it.parent != nil {
- panic(fmt.Sprintf("node is already attached to %T", it.parent.content))
- }
-}
-
-func (it *inTree) walkChildNodes(w internalWalkFunc) {
- for n := it.children.first; n != nil; n = n.after {
- w(n)
- }
-}
-
-func (it *inTree) BuildTokens(to Tokens) Tokens {
- for n := it.children.first; n != nil; n = n.after {
- to = n.BuildTokens(to)
- }
- return to
-}
-
-// leafNode can be embedded into a content struct to give it a do-nothing
-// implementation of walkChildNodes
-type leafNode struct {
-}
-
-func (n *leafNode) walkChildNodes(w internalWalkFunc) {
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hclwrite/parser.go b/vendor/github.com/hashicorp/hcl2/hclwrite/parser.go
deleted file mode 100644
index 1876818fd..000000000
--- a/vendor/github.com/hashicorp/hcl2/hclwrite/parser.go
+++ /dev/null
@@ -1,594 +0,0 @@
-package hclwrite
-
-import (
- "fmt"
- "sort"
-
- "github.com/hashicorp/hcl2/hcl"
- "github.com/hashicorp/hcl2/hcl/hclsyntax"
- "github.com/zclconf/go-cty/cty"
-)
-
-// Our "parser" here is actually not doing any parsing of its own. Instead,
-// it leans on the native parser in hclsyntax, and then uses the source ranges
-// from the AST to partition the raw token sequence to match the raw tokens
-// up to AST nodes.
-//
-// This strategy feels somewhat counter-intuitive, since most of the work the
-// parser does is thrown away here, but this strategy is chosen because the
-// normal parsing work done by hclsyntax is considered to be the "main case",
-// while modifying and re-printing source is more of an edge case, used only
-// in ancillary tools, and so it's good to keep all the main parsing logic
-// with the main case but keep all of the extra complexity of token wrangling
-// out of the main parser, which is already rather complex just serving the
-// use-cases it already serves.
-//
-// If the parsing step produces any errors, the returned File is nil because
-// we can't reliably extract tokens from the partial AST produced by an
-// erroneous parse.
-func parse(src []byte, filename string, start hcl.Pos) (*File, hcl.Diagnostics) {
- file, diags := hclsyntax.ParseConfig(src, filename, start)
- if diags.HasErrors() {
- return nil, diags
- }
-
- // To do our work here, we use the "native" tokens (those from hclsyntax)
- // to match against source ranges in the AST, but ultimately produce
- // slices from our sequence of "writer" tokens, which contain only
- // *relative* position information that is more appropriate for
- // transformation/writing use-cases.
- nativeTokens, diags := hclsyntax.LexConfig(src, filename, start)
- if diags.HasErrors() {
- // should never happen, since we would've caught these diags in
- // the first call above.
- return nil, diags
- }
- writerTokens := writerTokens(nativeTokens)
-
- from := inputTokens{
- nativeTokens: nativeTokens,
- writerTokens: writerTokens,
- }
-
- before, root, after := parseBody(file.Body.(*hclsyntax.Body), from)
- ret := &File{
- inTree: newInTree(),
-
- srcBytes: src,
- body: root,
- }
-
- nodes := ret.inTree.children
- nodes.Append(before.Tokens())
- nodes.AppendNode(root)
- nodes.Append(after.Tokens())
-
- return ret, diags
-}
-
-type inputTokens struct {
- nativeTokens hclsyntax.Tokens
- writerTokens Tokens
-}
-
-func (it inputTokens) Partition(rng hcl.Range) (before, within, after inputTokens) {
- start, end := partitionTokens(it.nativeTokens, rng)
- before = it.Slice(0, start)
- within = it.Slice(start, end)
- after = it.Slice(end, len(it.nativeTokens))
- return
-}
-
-func (it inputTokens) PartitionType(ty hclsyntax.TokenType) (before, within, after inputTokens) {
- for i, t := range it.writerTokens {
- if t.Type == ty {
- return it.Slice(0, i), it.Slice(i, i+1), it.Slice(i+1, len(it.nativeTokens))
- }
- }
- panic(fmt.Sprintf("didn't find any token of type %s", ty))
-}
-
-func (it inputTokens) PartitionTypeSingle(ty hclsyntax.TokenType) (before inputTokens, found *Token, after inputTokens) {
- before, within, after := it.PartitionType(ty)
- if within.Len() != 1 {
- panic("PartitionType found more than one token")
- }
- return before, within.Tokens()[0], after
-}
-
-// PartitionIncludeComments is like Partition except the returned "within"
-// range includes any lead and line comments associated with the range.
-func (it inputTokens) PartitionIncludingComments(rng hcl.Range) (before, within, after inputTokens) {
- start, end := partitionTokens(it.nativeTokens, rng)
- start = partitionLeadCommentTokens(it.nativeTokens[:start])
- _, afterNewline := partitionLineEndTokens(it.nativeTokens[end:])
- end += afterNewline
-
- before = it.Slice(0, start)
- within = it.Slice(start, end)
- after = it.Slice(end, len(it.nativeTokens))
- return
-
-}
-
-// PartitionBlockItem is similar to PartitionIncludeComments but it returns
-// the comments as separate token sequences so that they can be captured into
-// AST attributes. It makes assumptions that apply only to block items, so
-// should not be used for other constructs.
-func (it inputTokens) PartitionBlockItem(rng hcl.Range) (before, leadComments, within, lineComments, newline, after inputTokens) {
- before, within, after = it.Partition(rng)
- before, leadComments = before.PartitionLeadComments()
- lineComments, newline, after = after.PartitionLineEndTokens()
- return
-}
-
-func (it inputTokens) PartitionLeadComments() (before, within inputTokens) {
- start := partitionLeadCommentTokens(it.nativeTokens)
- before = it.Slice(0, start)
- within = it.Slice(start, len(it.nativeTokens))
- return
-}
-
-func (it inputTokens) PartitionLineEndTokens() (comments, newline, after inputTokens) {
- afterComments, afterNewline := partitionLineEndTokens(it.nativeTokens)
- comments = it.Slice(0, afterComments)
- newline = it.Slice(afterComments, afterNewline)
- after = it.Slice(afterNewline, len(it.nativeTokens))
- return
-}
-
-func (it inputTokens) Slice(start, end int) inputTokens {
- // When we slice, we create a new slice with no additional capacity because
- // we expect that these slices will be mutated in order to insert
- // new code into the AST, and we want to ensure that a new underlying
- // array gets allocated in that case, rather than writing into some
- // following slice and corrupting it.
- return inputTokens{
- nativeTokens: it.nativeTokens[start:end:end],
- writerTokens: it.writerTokens[start:end:end],
- }
-}
-
-func (it inputTokens) Len() int {
- return len(it.nativeTokens)
-}
-
-func (it inputTokens) Tokens() Tokens {
- return it.writerTokens
-}
-
-func (it inputTokens) Types() []hclsyntax.TokenType {
- ret := make([]hclsyntax.TokenType, len(it.nativeTokens))
- for i, tok := range it.nativeTokens {
- ret[i] = tok.Type
- }
- return ret
-}
-
-// parseBody locates the given body within the given input tokens and returns
-// the resulting *Body object as well as the tokens that appeared before and
-// after it.
-func parseBody(nativeBody *hclsyntax.Body, from inputTokens) (inputTokens, *node, inputTokens) {
- before, within, after := from.PartitionIncludingComments(nativeBody.SrcRange)
-
- // The main AST doesn't retain the original source ordering of the
- // body items, so we need to reconstruct that ordering by inspecting
- // their source ranges.
- nativeItems := make([]hclsyntax.Node, 0, len(nativeBody.Attributes)+len(nativeBody.Blocks))
- for _, nativeAttr := range nativeBody.Attributes {
- nativeItems = append(nativeItems, nativeAttr)
- }
- for _, nativeBlock := range nativeBody.Blocks {
- nativeItems = append(nativeItems, nativeBlock)
- }
- sort.Sort(nativeNodeSorter{nativeItems})
-
- body := &Body{
- inTree: newInTree(),
- items: newNodeSet(),
- }
-
- remain := within
- for _, nativeItem := range nativeItems {
- beforeItem, item, afterItem := parseBodyItem(nativeItem, remain)
-
- if beforeItem.Len() > 0 {
- body.AppendUnstructuredTokens(beforeItem.Tokens())
- }
- body.appendItemNode(item)
-
- remain = afterItem
- }
-
- if remain.Len() > 0 {
- body.AppendUnstructuredTokens(remain.Tokens())
- }
-
- return before, newNode(body), after
-}
-
-func parseBodyItem(nativeItem hclsyntax.Node, from inputTokens) (inputTokens, *node, inputTokens) {
- before, leadComments, within, lineComments, newline, after := from.PartitionBlockItem(nativeItem.Range())
-
- var item *node
-
- switch tItem := nativeItem.(type) {
- case *hclsyntax.Attribute:
- item = parseAttribute(tItem, within, leadComments, lineComments, newline)
- case *hclsyntax.Block:
- item = parseBlock(tItem, within, leadComments, lineComments, newline)
- default:
- // should never happen if caller is behaving
- panic("unsupported native item type")
- }
-
- return before, item, after
-}
-
-func parseAttribute(nativeAttr *hclsyntax.Attribute, from, leadComments, lineComments, newline inputTokens) *node {
- attr := &Attribute{
- inTree: newInTree(),
- }
- children := attr.inTree.children
-
- {
- cn := newNode(newComments(leadComments.Tokens()))
- attr.leadComments = cn
- children.AppendNode(cn)
- }
-
- before, nameTokens, from := from.Partition(nativeAttr.NameRange)
- {
- children.AppendUnstructuredTokens(before.Tokens())
- if nameTokens.Len() != 1 {
- // Should never happen with valid input
- panic("attribute name is not exactly one token")
- }
- token := nameTokens.Tokens()[0]
- in := newNode(newIdentifier(token))
- attr.name = in
- children.AppendNode(in)
- }
-
- before, equalsTokens, from := from.Partition(nativeAttr.EqualsRange)
- children.AppendUnstructuredTokens(before.Tokens())
- children.AppendUnstructuredTokens(equalsTokens.Tokens())
-
- before, exprTokens, from := from.Partition(nativeAttr.Expr.Range())
- {
- children.AppendUnstructuredTokens(before.Tokens())
- exprNode := parseExpression(nativeAttr.Expr, exprTokens)
- attr.expr = exprNode
- children.AppendNode(exprNode)
- }
-
- {
- cn := newNode(newComments(lineComments.Tokens()))
- attr.lineComments = cn
- children.AppendNode(cn)
- }
-
- children.AppendUnstructuredTokens(newline.Tokens())
-
- // Collect any stragglers, though there shouldn't be any
- children.AppendUnstructuredTokens(from.Tokens())
-
- return newNode(attr)
-}
-
-func parseBlock(nativeBlock *hclsyntax.Block, from, leadComments, lineComments, newline inputTokens) *node {
- block := &Block{
- inTree: newInTree(),
- labels: newNodeSet(),
- }
- children := block.inTree.children
-
- {
- cn := newNode(newComments(leadComments.Tokens()))
- block.leadComments = cn
- children.AppendNode(cn)
- }
-
- before, typeTokens, from := from.Partition(nativeBlock.TypeRange)
- {
- children.AppendUnstructuredTokens(before.Tokens())
- if typeTokens.Len() != 1 {
- // Should never happen with valid input
- panic("block type name is not exactly one token")
- }
- token := typeTokens.Tokens()[0]
- in := newNode(newIdentifier(token))
- block.typeName = in
- children.AppendNode(in)
- }
-
- for _, rng := range nativeBlock.LabelRanges {
- var labelTokens inputTokens
- before, labelTokens, from = from.Partition(rng)
- children.AppendUnstructuredTokens(before.Tokens())
- tokens := labelTokens.Tokens()
- ln := newNode(newQuoted(tokens))
- block.labels.Add(ln)
- children.AppendNode(ln)
- }
-
- before, oBrace, from := from.Partition(nativeBlock.OpenBraceRange)
- children.AppendUnstructuredTokens(before.Tokens())
- children.AppendUnstructuredTokens(oBrace.Tokens())
-
- // We go a bit out of order here: we go hunting for the closing brace
- // so that we have a delimited body, but then we'll deal with the body
- // before we actually append the closing brace and any straggling tokens
- // that appear after it.
- bodyTokens, cBrace, from := from.Partition(nativeBlock.CloseBraceRange)
- before, body, after := parseBody(nativeBlock.Body, bodyTokens)
- children.AppendUnstructuredTokens(before.Tokens())
- block.body = body
- children.AppendNode(body)
- children.AppendUnstructuredTokens(after.Tokens())
-
- children.AppendUnstructuredTokens(cBrace.Tokens())
-
- // stragglers
- children.AppendUnstructuredTokens(from.Tokens())
- if lineComments.Len() > 0 {
- // blocks don't actually have line comments, so we'll just treat
- // them as extra stragglers
- children.AppendUnstructuredTokens(lineComments.Tokens())
- }
- children.AppendUnstructuredTokens(newline.Tokens())
-
- return newNode(block)
-}
-
-func parseExpression(nativeExpr hclsyntax.Expression, from inputTokens) *node {
- expr := newExpression()
- children := expr.inTree.children
-
- nativeVars := nativeExpr.Variables()
-
- for _, nativeTraversal := range nativeVars {
- before, traversal, after := parseTraversal(nativeTraversal, from)
- children.AppendUnstructuredTokens(before.Tokens())
- children.AppendNode(traversal)
- expr.absTraversals.Add(traversal)
- from = after
- }
- // Attach any stragglers that don't belong to a traversal to the expression
- // itself. In an expression with no traversals at all, this is just the
- // entirety of "from".
- children.AppendUnstructuredTokens(from.Tokens())
-
- return newNode(expr)
-}
-
-func parseTraversal(nativeTraversal hcl.Traversal, from inputTokens) (before inputTokens, n *node, after inputTokens) {
- traversal := newTraversal()
- children := traversal.inTree.children
- before, from, after = from.Partition(nativeTraversal.SourceRange())
-
- stepAfter := from
- for _, nativeStep := range nativeTraversal {
- before, step, after := parseTraversalStep(nativeStep, stepAfter)
- children.AppendUnstructuredTokens(before.Tokens())
- children.AppendNode(step)
- traversal.steps.Add(step)
- stepAfter = after
- }
-
- return before, newNode(traversal), after
-}
-
-func parseTraversalStep(nativeStep hcl.Traverser, from inputTokens) (before inputTokens, n *node, after inputTokens) {
- var children *nodes
- switch tNativeStep := nativeStep.(type) {
-
- case hcl.TraverseRoot, hcl.TraverseAttr:
- step := newTraverseName()
- children = step.inTree.children
- before, from, after = from.Partition(nativeStep.SourceRange())
- inBefore, token, inAfter := from.PartitionTypeSingle(hclsyntax.TokenIdent)
- name := newIdentifier(token)
- children.AppendUnstructuredTokens(inBefore.Tokens())
- step.name = children.Append(name)
- children.AppendUnstructuredTokens(inAfter.Tokens())
- return before, newNode(step), after
-
- case hcl.TraverseIndex:
- step := newTraverseIndex()
- children = step.inTree.children
- before, from, after = from.Partition(nativeStep.SourceRange())
-
- var inBefore, oBrack, keyTokens, cBrack inputTokens
- inBefore, oBrack, from = from.PartitionType(hclsyntax.TokenOBrack)
- children.AppendUnstructuredTokens(inBefore.Tokens())
- children.AppendUnstructuredTokens(oBrack.Tokens())
- keyTokens, cBrack, from = from.PartitionType(hclsyntax.TokenCBrack)
-
- keyVal := tNativeStep.Key
- switch keyVal.Type() {
- case cty.String:
- key := newQuoted(keyTokens.Tokens())
- step.key = children.Append(key)
- case cty.Number:
- valBefore, valToken, valAfter := keyTokens.PartitionTypeSingle(hclsyntax.TokenNumberLit)
- children.AppendUnstructuredTokens(valBefore.Tokens())
- key := newNumber(valToken)
- step.key = children.Append(key)
- children.AppendUnstructuredTokens(valAfter.Tokens())
- }
-
- children.AppendUnstructuredTokens(cBrack.Tokens())
- children.AppendUnstructuredTokens(from.Tokens())
-
- return before, newNode(step), after
- default:
- panic(fmt.Sprintf("unsupported traversal step type %T", nativeStep))
- }
-
-}
-
-// writerTokens takes a sequence of tokens as produced by the main hclsyntax
-// package and transforms it into an equivalent sequence of tokens using
-// this package's own token model.
-//
-// The resulting list contains the same number of tokens and uses the same
-// indices as the input, allowing the two sets of tokens to be correlated
-// by index.
-func writerTokens(nativeTokens hclsyntax.Tokens) Tokens {
- // Ultimately we want a slice of token _pointers_, but since we can
- // predict how much memory we're going to devote to tokens we'll allocate
- // it all as a single flat buffer and thus give the GC less work to do.
- tokBuf := make([]Token, len(nativeTokens))
- var lastByteOffset int
- for i, mainToken := range nativeTokens {
- // Create a copy of the bytes so that we can mutate without
- // corrupting the original token stream.
- bytes := make([]byte, len(mainToken.Bytes))
- copy(bytes, mainToken.Bytes)
-
- tokBuf[i] = Token{
- Type: mainToken.Type,
- Bytes: bytes,
-
- // We assume here that spaces are always ASCII spaces, since
- // that's what the scanner also assumes, and thus the number
- // of bytes skipped is also the number of space characters.
- SpacesBefore: mainToken.Range.Start.Byte - lastByteOffset,
- }
-
- lastByteOffset = mainToken.Range.End.Byte
- }
-
- // Now make a slice of pointers into the previous slice.
- ret := make(Tokens, len(tokBuf))
- for i := range ret {
- ret[i] = &tokBuf[i]
- }
-
- return ret
-}
-
-// partitionTokens takes a sequence of tokens and a hcl.Range and returns
-// two indices within the token sequence that correspond with the range
-// boundaries, such that the slice operator could be used to produce
-// three token sequences for before, within, and after respectively:
-//
-// start, end := partitionTokens(toks, rng)
-// before := toks[:start]
-// within := toks[start:end]
-// after := toks[end:]
-//
-// This works best when the range is aligned with token boundaries (e.g.
-// because it was produced in terms of the scanner's result) but if that isn't
-// true then it will make a best effort that may produce strange results at
-// the boundaries.
-//
-// Native hclsyntax tokens are used here, because they contain the necessary
-// absolute position information. However, since writerTokens produces a
-// correlatable sequence of writer tokens, the resulting indices can be
-// used also to index into its result, allowing the partitioning of writer
-// tokens to be driven by the partitioning of native tokens.
-//
-// The tokens are assumed to be in source order and non-overlapping, which
-// will be true if the token sequence from the scanner is used directly.
-func partitionTokens(toks hclsyntax.Tokens, rng hcl.Range) (start, end int) {
- // We us a linear search here because we assume tha in most cases our
- // target range is close to the beginning of the sequence, and the seqences
- // are generally small for most reasonable files anyway.
- for i := 0; ; i++ {
- if i >= len(toks) {
- // No tokens for the given range at all!
- return len(toks), len(toks)
- }
-
- if toks[i].Range.Start.Byte >= rng.Start.Byte {
- start = i
- break
- }
- }
-
- for i := start; ; i++ {
- if i >= len(toks) {
- // The range "hangs off" the end of the token sequence
- return start, len(toks)
- }
-
- if toks[i].Range.Start.Byte >= rng.End.Byte {
- end = i // end marker is exclusive
- break
- }
- }
-
- return start, end
-}
-
-// partitionLeadCommentTokens takes a sequence of tokens that is assumed
-// to immediately precede a construct that can have lead comment tokens,
-// and returns the index into that sequence where the lead comments begin.
-//
-// Lead comments are defined as whole lines containing only comment tokens
-// with no blank lines between. If no such lines are found, the returned
-// index will be len(toks).
-func partitionLeadCommentTokens(toks hclsyntax.Tokens) int {
- // single-line comments (which is what we're interested in here)
- // consume their trailing newline, so we can just walk backwards
- // until we stop seeing comment tokens.
- for i := len(toks) - 1; i >= 0; i-- {
- if toks[i].Type != hclsyntax.TokenComment {
- return i + 1
- }
- }
- return 0
-}
-
-// partitionLineEndTokens takes a sequence of tokens that is assumed
-// to immediately follow a construct that can have a line comment, and
-// returns first the index where any line comments end and then second
-// the index immediately after the trailing newline.
-//
-// Line comments are defined as comments that appear immediately after
-// a construct on the same line where its significant tokens ended.
-//
-// Since single-line comment tokens (# and //) include the newline that
-// terminates them, in the presence of these the two returned indices
-// will be the same since the comment itself serves as the line end.
-func partitionLineEndTokens(toks hclsyntax.Tokens) (afterComment, afterNewline int) {
- for i := 0; i < len(toks); i++ {
- tok := toks[i]
- if tok.Type != hclsyntax.TokenComment {
- switch tok.Type {
- case hclsyntax.TokenNewline:
- return i, i + 1
- case hclsyntax.TokenEOF:
- // Although this is valid, we mustn't include the EOF
- // itself as our "newline" or else strange things will
- // happen when we try to append new items.
- return i, i
- default:
- // If we have well-formed input here then nothing else should be
- // possible. This path should never happen, because we only try
- // to extract tokens from the sequence if the parser succeeded,
- // and it should catch this problem itself.
- panic("malformed line trailers: expected only comments and newlines")
- }
- }
-
- if len(tok.Bytes) > 0 && tok.Bytes[len(tok.Bytes)-1] == '\n' {
- // Newline at the end of a single-line comment serves both as
- // the end of comments *and* the end of the line.
- return i + 1, i + 1
- }
- }
- return len(toks), len(toks)
-}
-
-// lexConfig uses the hclsyntax scanner to get a token stream and then
-// rewrites it into this package's token model.
-//
-// Any errors produced during scanning are ignored, so the results of this
-// function should be used with care.
-func lexConfig(src []byte) Tokens {
- mainTokens, _ := hclsyntax.LexConfig(src, "", hcl.Pos{Byte: 0, Line: 1, Column: 1})
- return writerTokens(mainTokens)
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hclwrite/public.go b/vendor/github.com/hashicorp/hcl2/hclwrite/public.go
deleted file mode 100644
index 4d5ce2a6e..000000000
--- a/vendor/github.com/hashicorp/hcl2/hclwrite/public.go
+++ /dev/null
@@ -1,44 +0,0 @@
-package hclwrite
-
-import (
- "bytes"
-
- "github.com/hashicorp/hcl2/hcl"
-)
-
-// NewFile creates a new file object that is empty and ready to have constructs
-// added t it.
-func NewFile() *File {
- body := &Body{
- inTree: newInTree(),
- items: newNodeSet(),
- }
- file := &File{
- inTree: newInTree(),
- }
- file.body = file.inTree.children.Append(body)
- return file
-}
-
-// ParseConfig interprets the given source bytes into a *hclwrite.File. The
-// resulting AST can be used to perform surgical edits on the source code
-// before turning it back into bytes again.
-func ParseConfig(src []byte, filename string, start hcl.Pos) (*File, hcl.Diagnostics) {
- return parse(src, filename, start)
-}
-
-// Format takes source code and performs simple whitespace changes to transform
-// it to a canonical layout style.
-//
-// Format skips constructing an AST and works directly with tokens, so it
-// is less expensive than formatting via the AST for situations where no other
-// changes will be made. It also ignores syntax errors and can thus be applied
-// to partial source code, although the result in that case may not be
-// desirable.
-func Format(src []byte) []byte {
- tokens := lexConfig(src)
- format(tokens)
- buf := &bytes.Buffer{}
- tokens.WriteTo(buf)
- return buf.Bytes()
-}
diff --git a/vendor/github.com/hashicorp/hcl2/hclwrite/tokens.go b/vendor/github.com/hashicorp/hcl2/hclwrite/tokens.go
deleted file mode 100644
index d87f81853..000000000
--- a/vendor/github.com/hashicorp/hcl2/hclwrite/tokens.go
+++ /dev/null
@@ -1,122 +0,0 @@
-package hclwrite
-
-import (
- "bytes"
- "io"
-
- "github.com/apparentlymart/go-textseg/textseg"
- "github.com/hashicorp/hcl2/hcl"
- "github.com/hashicorp/hcl2/hcl/hclsyntax"
-)
-
-// Token is a single sequence of bytes annotated with a type. It is similar
-// in purpose to hclsyntax.Token, but discards the source position information
-// since that is not useful in code generation.
-type Token struct {
- Type hclsyntax.TokenType
- Bytes []byte
-
- // We record the number of spaces before each token so that we can
- // reproduce the exact layout of the original file when we're making
- // surgical changes in-place. When _new_ code is created it will always
- // be in the canonical style, but we preserve layout of existing code.
- SpacesBefore int
-}
-
-// asHCLSyntax returns the receiver expressed as an incomplete hclsyntax.Token.
-// A complete token is not possible since we don't have source location
-// information here, and so this method is unexported so we can be sure it will
-// only be used for internal purposes where we know the range isn't important.
-//
-// This is primarily intended to allow us to re-use certain functionality from
-// hclsyntax rather than re-implementing it against our own token type here.
-func (t *Token) asHCLSyntax() hclsyntax.Token {
- return hclsyntax.Token{
- Type: t.Type,
- Bytes: t.Bytes,
- Range: hcl.Range{
- Filename: "",
- },
- }
-}
-
-// Tokens is a flat list of tokens.
-type Tokens []*Token
-
-func (ts Tokens) Bytes() []byte {
- buf := &bytes.Buffer{}
- ts.WriteTo(buf)
- return buf.Bytes()
-}
-
-func (ts Tokens) testValue() string {
- return string(ts.Bytes())
-}
-
-// Columns returns the number of columns (grapheme clusters) the token sequence
-// occupies. The result is not meaningful if there are newline or single-line
-// comment tokens in the sequence.
-func (ts Tokens) Columns() int {
- ret := 0
- for _, token := range ts {
- ret += token.SpacesBefore // spaces are always worth one column each
- ct, _ := textseg.TokenCount(token.Bytes, textseg.ScanGraphemeClusters)
- ret += ct
- }
- return ret
-}
-
-// WriteTo takes an io.Writer and writes the bytes for each token to it,
-// along with the spacing that separates each token. In other words, this
-// allows serializing the tokens to a file or other such byte stream.
-func (ts Tokens) WriteTo(wr io.Writer) (int64, error) {
- // We know we're going to be writing a lot of small chunks of repeated
- // space characters, so we'll prepare a buffer of these that we can
- // easily pass to wr.Write without any further allocation.
- spaces := make([]byte, 40)
- for i := range spaces {
- spaces[i] = ' '
- }
-
- var n int64
- var err error
- for _, token := range ts {
- if err != nil {
- return n, err
- }
-
- for spacesBefore := token.SpacesBefore; spacesBefore > 0; spacesBefore -= len(spaces) {
- thisChunk := spacesBefore
- if thisChunk > len(spaces) {
- thisChunk = len(spaces)
- }
- var thisN int
- thisN, err = wr.Write(spaces[:thisChunk])
- n += int64(thisN)
- if err != nil {
- return n, err
- }
- }
-
- var thisN int
- thisN, err = wr.Write(token.Bytes)
- n += int64(thisN)
- }
-
- return n, err
-}
-
-func (ts Tokens) walkChildNodes(w internalWalkFunc) {
- // Unstructured tokens have no child nodes
-}
-
-func (ts Tokens) BuildTokens(to Tokens) Tokens {
- return append(to, ts...)
-}
-
-func newIdentToken(name string) *Token {
- return &Token{
- Type: hclsyntax.TokenIdent,
- Bytes: []byte(name),
- }
-}
diff --git a/vendor/github.com/hashicorp/hil/.gitignore b/vendor/github.com/hashicorp/hil/.gitignore
deleted file mode 100644
index 9d6e5df38..000000000
--- a/vendor/github.com/hashicorp/hil/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-.DS_Store
-.idea
-*.iml
diff --git a/vendor/github.com/hashicorp/hil/.travis.yml b/vendor/github.com/hashicorp/hil/.travis.yml
deleted file mode 100644
index a78544422..000000000
--- a/vendor/github.com/hashicorp/hil/.travis.yml
+++ /dev/null
@@ -1,3 +0,0 @@
-sudo: false
-language: go
-go: 1.7
diff --git a/vendor/github.com/hashicorp/hil/LICENSE b/vendor/github.com/hashicorp/hil/LICENSE
deleted file mode 100644
index 82b4de97c..000000000
--- a/vendor/github.com/hashicorp/hil/LICENSE
+++ /dev/null
@@ -1,353 +0,0 @@
-Mozilla Public License, version 2.0
-
-1. Definitions
-
-1.1. “Contributor”
-
- means each individual or legal entity that creates, contributes to the
- creation of, or owns Covered Software.
-
-1.2. “Contributor Version”
-
- means the combination of the Contributions of others (if any) used by a
- Contributor and that particular Contributor’s Contribution.
-
-1.3. “Contribution”
-
- means Covered Software of a particular Contributor.
-
-1.4. “Covered Software”
-
- means Source Code Form to which the initial Contributor has attached the
- notice in Exhibit A, the Executable Form of such Source Code Form, and
- Modifications of such Source Code Form, in each case including portions
- thereof.
-
-1.5. “Incompatible With Secondary Licenses”
- means
-
- a. that the initial Contributor has attached the notice described in
- Exhibit B to the Covered Software; or
-
- b. that the Covered Software was made available under the terms of version
- 1.1 or earlier of the License, but not also under the terms of a
- Secondary License.
-
-1.6. “Executable Form”
-
- means any form of the work other than Source Code Form.
-
-1.7. “Larger Work”
-
- means a work that combines Covered Software with other material, in a separate
- file or files, that is not Covered Software.
-
-1.8. “License”
-
- means this document.
-
-1.9. “Licensable”
-
- means having the right to grant, to the maximum extent possible, whether at the
- time of the initial grant or subsequently, any and all of the rights conveyed by
- this License.
-
-1.10. “Modifications”
-
- means any of the following:
-
- a. any file in Source Code Form that results from an addition to, deletion
- from, or modification of the contents of Covered Software; or
-
- b. any new file in Source Code Form that contains any Covered Software.
-
-1.11. “Patent Claims” of a Contributor
-
- means any patent claim(s), including without limitation, method, process,
- and apparatus claims, in any patent Licensable by such Contributor that
- would be infringed, but for the grant of the License, by the making,
- using, selling, offering for sale, having made, import, or transfer of
- either its Contributions or its Contributor Version.
-
-1.12. “Secondary License”
-
- means either the GNU General Public License, Version 2.0, the GNU Lesser
- General Public License, Version 2.1, the GNU Affero General Public
- License, Version 3.0, or any later versions of those licenses.
-
-1.13. “Source Code Form”
-
- means the form of the work preferred for making modifications.
-
-1.14. “You” (or “Your”)
-
- means an individual or a legal entity exercising rights under this
- License. For legal entities, “You” includes any entity that controls, is
- controlled by, or is under common control with You. For purposes of this
- definition, “control” means (a) the power, direct or indirect, to cause
- the direction or management of such entity, whether by contract or
- otherwise, or (b) ownership of more than fifty percent (50%) of the
- outstanding shares or beneficial ownership of such entity.
-
-
-2. License Grants and Conditions
-
-2.1. Grants
-
- Each Contributor hereby grants You a world-wide, royalty-free,
- non-exclusive license:
-
- a. under intellectual property rights (other than patent or trademark)
- Licensable by such Contributor to use, reproduce, make available,
- modify, display, perform, distribute, and otherwise exploit its
- Contributions, either on an unmodified basis, with Modifications, or as
- part of a Larger Work; and
-
- b. under Patent Claims of such Contributor to make, use, sell, offer for
- sale, have made, import, and otherwise transfer either its Contributions
- or its Contributor Version.
-
-2.2. Effective Date
-
- The licenses granted in Section 2.1 with respect to any Contribution become
- effective for each Contribution on the date the Contributor first distributes
- such Contribution.
-
-2.3. Limitations on Grant Scope
-
- The licenses granted in this Section 2 are the only rights granted under this
- License. No additional rights or licenses will be implied from the distribution
- or licensing of Covered Software under this License. Notwithstanding Section
- 2.1(b) above, no patent license is granted by a Contributor:
-
- a. for any code that a Contributor has removed from Covered Software; or
-
- b. for infringements caused by: (i) Your and any other third party’s
- modifications of Covered Software, or (ii) the combination of its
- Contributions with other software (except as part of its Contributor
- Version); or
-
- c. under Patent Claims infringed by Covered Software in the absence of its
- Contributions.
-
- This License does not grant any rights in the trademarks, service marks, or
- logos of any Contributor (except as may be necessary to comply with the
- notice requirements in Section 3.4).
-
-2.4. Subsequent Licenses
-
- No Contributor makes additional grants as a result of Your choice to
- distribute the Covered Software under a subsequent version of this License
- (see Section 10.2) or under the terms of a Secondary License (if permitted
- under the terms of Section 3.3).
-
-2.5. Representation
-
- Each Contributor represents that the Contributor believes its Contributions
- are its original creation(s) or it has sufficient rights to grant the
- rights to its Contributions conveyed by this License.
-
-2.6. Fair Use
-
- This License is not intended to limit any rights You have under applicable
- copyright doctrines of fair use, fair dealing, or other equivalents.
-
-2.7. Conditions
-
- Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
- Section 2.1.
-
-
-3. Responsibilities
-
-3.1. Distribution of Source Form
-
- All distribution of Covered Software in Source Code Form, including any
- Modifications that You create or to which You contribute, must be under the
- terms of this License. You must inform recipients that the Source Code Form
- of the Covered Software is governed by the terms of this License, and how
- they can obtain a copy of this License. You may not attempt to alter or
- restrict the recipients’ rights in the Source Code Form.
-
-3.2. Distribution of Executable Form
-
- If You distribute Covered Software in Executable Form then:
-
- a. such Covered Software must also be made available in Source Code Form,
- as described in Section 3.1, and You must inform recipients of the
- Executable Form how they can obtain a copy of such Source Code Form by
- reasonable means in a timely manner, at a charge no more than the cost
- of distribution to the recipient; and
-
- b. You may distribute such Executable Form under the terms of this License,
- or sublicense it under different terms, provided that the license for
- the Executable Form does not attempt to limit or alter the recipients’
- rights in the Source Code Form under this License.
-
-3.3. Distribution of a Larger Work
-
- You may create and distribute a Larger Work under terms of Your choice,
- provided that You also comply with the requirements of this License for the
- Covered Software. If the Larger Work is a combination of Covered Software
- with a work governed by one or more Secondary Licenses, and the Covered
- Software is not Incompatible With Secondary Licenses, this License permits
- You to additionally distribute such Covered Software under the terms of
- such Secondary License(s), so that the recipient of the Larger Work may, at
- their option, further distribute the Covered Software under the terms of
- either this License or such Secondary License(s).
-
-3.4. Notices
-
- You may not remove or alter the substance of any license notices (including
- copyright notices, patent notices, disclaimers of warranty, or limitations
- of liability) contained within the Source Code Form of the Covered
- Software, except that You may alter any license notices to the extent
- required to remedy known factual inaccuracies.
-
-3.5. Application of Additional Terms
-
- You may choose to offer, and to charge a fee for, warranty, support,
- indemnity or liability obligations to one or more recipients of Covered
- Software. However, You may do so only on Your own behalf, and not on behalf
- of any Contributor. You must make it absolutely clear that any such
- warranty, support, indemnity, or liability obligation is offered by You
- alone, and You hereby agree to indemnify every Contributor for any
- liability incurred by such Contributor as a result of warranty, support,
- indemnity or liability terms You offer. You may include additional
- disclaimers of warranty and limitations of liability specific to any
- jurisdiction.
-
-4. Inability to Comply Due to Statute or Regulation
-
- If it is impossible for You to comply with any of the terms of this License
- with respect to some or all of the Covered Software due to statute, judicial
- order, or regulation then You must: (a) comply with the terms of this License
- to the maximum extent possible; and (b) describe the limitations and the code
- they affect. Such description must be placed in a text file included with all
- distributions of the Covered Software under this License. Except to the
- extent prohibited by statute or regulation, such description must be
- sufficiently detailed for a recipient of ordinary skill to be able to
- understand it.
-
-5. Termination
-
-5.1. The rights granted under this License will terminate automatically if You
- fail to comply with any of its terms. However, if You become compliant,
- then the rights granted under this License from a particular Contributor
- are reinstated (a) provisionally, unless and until such Contributor
- explicitly and finally terminates Your grants, and (b) on an ongoing basis,
- if such Contributor fails to notify You of the non-compliance by some
- reasonable means prior to 60 days after You have come back into compliance.
- Moreover, Your grants from a particular Contributor are reinstated on an
- ongoing basis if such Contributor notifies You of the non-compliance by
- some reasonable means, this is the first time You have received notice of
- non-compliance with this License from such Contributor, and You become
- compliant prior to 30 days after Your receipt of the notice.
-
-5.2. If You initiate litigation against any entity by asserting a patent
- infringement claim (excluding declaratory judgment actions, counter-claims,
- and cross-claims) alleging that a Contributor Version directly or
- indirectly infringes any patent, then the rights granted to You by any and
- all Contributors for the Covered Software under Section 2.1 of this License
- shall terminate.
-
-5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
- license agreements (excluding distributors and resellers) which have been
- validly granted by You or Your distributors under this License prior to
- termination shall survive termination.
-
-6. Disclaimer of Warranty
-
- Covered Software is provided under this License on an “as is” basis, without
- warranty of any kind, either expressed, implied, or statutory, including,
- without limitation, warranties that the Covered Software is free of defects,
- merchantable, fit for a particular purpose or non-infringing. The entire
- risk as to the quality and performance of the Covered Software is with You.
- Should any Covered Software prove defective in any respect, You (not any
- Contributor) assume the cost of any necessary servicing, repair, or
- correction. This disclaimer of warranty constitutes an essential part of this
- License. No use of any Covered Software is authorized under this License
- except under this disclaimer.
-
-7. Limitation of Liability
-
- Under no circumstances and under no legal theory, whether tort (including
- negligence), contract, or otherwise, shall any Contributor, or anyone who
- distributes Covered Software as permitted above, be liable to You for any
- direct, indirect, special, incidental, or consequential damages of any
- character including, without limitation, damages for lost profits, loss of
- goodwill, work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses, even if such party shall have been
- informed of the possibility of such damages. This limitation of liability
- shall not apply to liability for death or personal injury resulting from such
- party’s negligence to the extent applicable law prohibits such limitation.
- Some jurisdictions do not allow the exclusion or limitation of incidental or
- consequential damages, so this exclusion and limitation may not apply to You.
-
-8. Litigation
-
- Any litigation relating to this License may be brought only in the courts of
- a jurisdiction where the defendant maintains its principal place of business
- and such litigation shall be governed by laws of that jurisdiction, without
- reference to its conflict-of-law provisions. Nothing in this Section shall
- prevent a party’s ability to bring cross-claims or counter-claims.
-
-9. Miscellaneous
-
- This License represents the complete agreement concerning the subject matter
- hereof. If any provision of this License is held to be unenforceable, such
- provision shall be reformed only to the extent necessary to make it
- enforceable. Any law or regulation which provides that the language of a
- contract shall be construed against the drafter shall not be used to construe
- this License against a Contributor.
-
-
-10. Versions of the License
-
-10.1. New Versions
-
- Mozilla Foundation is the license steward. Except as provided in Section
- 10.3, no one other than the license steward has the right to modify or
- publish new versions of this License. Each version will be given a
- distinguishing version number.
-
-10.2. Effect of New Versions
-
- You may distribute the Covered Software under the terms of the version of
- the License under which You originally received the Covered Software, or
- under the terms of any subsequent version published by the license
- steward.
-
-10.3. Modified Versions
-
- If you create software not governed by this License, and you want to
- create a new license for such software, you may create and use a modified
- version of this License if you rename the license and remove any
- references to the name of the license steward (except to note that such
- modified license differs from this License).
-
-10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses
- If You choose to distribute Source Code Form that is Incompatible With
- Secondary Licenses under the terms of this version of the License, the
- notice described in Exhibit B of this License must be attached.
-
-Exhibit A - Source Code Form License Notice
-
- This Source Code Form is subject to the
- terms of the Mozilla Public License, v.
- 2.0. If a copy of the MPL was not
- distributed with this file, You can
- obtain one at
- http://mozilla.org/MPL/2.0/.
-
-If it is not possible or desirable to put the notice in a particular file, then
-You may include the notice in a location (such as a LICENSE file in a relevant
-directory) where a recipient would be likely to look for such a notice.
-
-You may add additional accurate notices of copyright ownership.
-
-Exhibit B - “Incompatible With Secondary Licenses” Notice
-
- This Source Code Form is “Incompatible
- With Secondary Licenses”, as defined by
- the Mozilla Public License, v. 2.0.
diff --git a/vendor/github.com/hashicorp/hil/README.md b/vendor/github.com/hashicorp/hil/README.md
deleted file mode 100644
index 186ed2518..000000000
--- a/vendor/github.com/hashicorp/hil/README.md
+++ /dev/null
@@ -1,102 +0,0 @@
-# HIL
-
-[![GoDoc](https://godoc.org/github.com/hashicorp/hil?status.png)](https://godoc.org/github.com/hashicorp/hil) [![Build Status](https://travis-ci.org/hashicorp/hil.svg?branch=master)](https://travis-ci.org/hashicorp/hil)
-
-HIL (HashiCorp Interpolation Language) is a lightweight embedded language used
-primarily for configuration interpolation. The goal of HIL is to make a simple
-language for interpolations in the various configurations of HashiCorp tools.
-
-HIL is built to interpolate any string, but is in use by HashiCorp primarily
-with [HCL](https://github.com/hashicorp/hcl). HCL is _not required_ in any
-way for use with HIL.
-
-HIL isn't meant to be a general purpose language. It was built for basic
-configuration interpolations. Therefore, you can't currently write functions,
-have conditionals, set intermediary variables, etc. within HIL itself. It is
-possible some of these may be added later but the right use case must exist.
-
-## Why?
-
-Many of our tools have support for something similar to templates, but
-within the configuration itself. The most prominent requirement was in
-[Terraform](https://github.com/hashicorp/terraform) where we wanted the
-configuration to be able to reference values from elsewhere in the
-configuration. Example:
-
- foo = "hi ${var.world}"
-
-We originally used a full templating language for this, but found it
-was too heavy weight. Additionally, many full languages required bindings
-to C (and thus the usage of cgo) which we try to avoid to make cross-compilation
-easier. We then moved to very basic regular expression based
-string replacement, but found the need for basic arithmetic and function
-calls resulting in overly complex regular expressions.
-
-Ultimately, we wrote our own mini-language within Terraform itself. As
-we built other projects such as [Nomad](https://nomadproject.io) and
-[Otto](https://ottoproject.io), the need for basic interpolations arose
-again.
-
-Thus HIL was born. It is extracted from Terraform, cleaned up, and
-better tested for general purpose use.
-
-## Syntax
-
-For a complete grammar, please see the parser itself. A high-level overview
-of the syntax and grammer is listed here.
-
-Code begins within `${` and `}`. Outside of this, text is treated
-literally. For example, `foo` is a valid HIL program that is just the
-string "foo", but `foo ${bar}` is an HIL program that is the string "foo "
-concatened with the value of `bar`. For the remainder of the syntax
-docs, we'll assume you're within `${}`.
-
- * Identifiers are any text in the format of `[a-zA-Z0-9-.]`. Example
- identifiers: `foo`, `var.foo`, `foo-bar`.
-
- * Strings are double quoted and can contain any UTF-8 characters.
- Example: `"Hello, World"`
-
- * Numbers are assumed to be base 10. If you prefix a number with 0x,
- it is treated as a hexadecimal. If it is prefixed with 0, it is
- treated as an octal. Numbers can be in scientific notation: "1e10".
-
- * Unary `-` can be used for negative numbers. Example: `-10` or `-0.2`
-
- * Boolean values: `true`, `false`
-
- * The following arithmetic operations are allowed: +, -, *, /, %.
-
- * Function calls are in the form of `name(arg1, arg2, ...)`. Example:
- `add(1, 5)`. Arguments can be any valid HIL expression, example:
- `add(1, var.foo)` or even nested function calls:
- `add(1, get("some value"))`.
-
- * Within strings, further interpolations can be opened with `${}`.
- Example: `"Hello ${nested}"`. A full example including the
- original `${}` (remember this list assumes were inside of one
- already) could be: `foo ${func("hello ${var.foo}")}`.
-
-## Language Changes
-
-We've used this mini-language in Terraform for years. For backwards compatibility
-reasons, we're unlikely to make an incompatible change to the language but
-we're not currently making that promise, either.
-
-The internal API of this project may very well change as we evolve it
-to work with more of our projects. We recommend using some sort of dependency
-management solution with this package.
-
-## Future Changes
-
-The following changes are already planned to be made at some point:
-
- * Richer types: lists, maps, etc.
-
- * Convert to a more standard Go parser structure similar to HCL. This
- will improve our error messaging as well as allow us to have automatic
- formatting.
-
- * Allow interpolations to result in more types than just a string. While
- within the interpolation basic types are honored, the result is always
- a string.
diff --git a/vendor/github.com/hashicorp/hil/appveyor.yml b/vendor/github.com/hashicorp/hil/appveyor.yml
deleted file mode 100644
index feaf7a34e..000000000
--- a/vendor/github.com/hashicorp/hil/appveyor.yml
+++ /dev/null
@@ -1,18 +0,0 @@
-version: "build-{branch}-{build}"
-image: Visual Studio 2015
-clone_folder: c:\gopath\src\github.com\hashicorp\hil
-environment:
- GOPATH: c:\gopath
-init:
- - git config --global core.autocrlf true
-install:
-- cmd: >-
- echo %Path%
-
- go version
-
- go env
-
- go get -d -v -t ./...
-build_script:
-- cmd: go test -v ./...
diff --git a/vendor/github.com/hashicorp/hil/ast/arithmetic.go b/vendor/github.com/hashicorp/hil/ast/arithmetic.go
deleted file mode 100644
index 94dc24f89..000000000
--- a/vendor/github.com/hashicorp/hil/ast/arithmetic.go
+++ /dev/null
@@ -1,43 +0,0 @@
-package ast
-
-import (
- "bytes"
- "fmt"
-)
-
-// Arithmetic represents a node where the result is arithmetic of
-// two or more operands in the order given.
-type Arithmetic struct {
- Op ArithmeticOp
- Exprs []Node
- Posx Pos
-}
-
-func (n *Arithmetic) Accept(v Visitor) Node {
- for i, expr := range n.Exprs {
- n.Exprs[i] = expr.Accept(v)
- }
-
- return v(n)
-}
-
-func (n *Arithmetic) Pos() Pos {
- return n.Posx
-}
-
-func (n *Arithmetic) GoString() string {
- return fmt.Sprintf("*%#v", *n)
-}
-
-func (n *Arithmetic) String() string {
- var b bytes.Buffer
- for _, expr := range n.Exprs {
- b.WriteString(fmt.Sprintf("%s", expr))
- }
-
- return b.String()
-}
-
-func (n *Arithmetic) Type(Scope) (Type, error) {
- return TypeInt, nil
-}
diff --git a/vendor/github.com/hashicorp/hil/ast/arithmetic_op.go b/vendor/github.com/hashicorp/hil/ast/arithmetic_op.go
deleted file mode 100644
index 18880c604..000000000
--- a/vendor/github.com/hashicorp/hil/ast/arithmetic_op.go
+++ /dev/null
@@ -1,24 +0,0 @@
-package ast
-
-// ArithmeticOp is the operation to use for the math.
-type ArithmeticOp int
-
-const (
- ArithmeticOpInvalid ArithmeticOp = 0
-
- ArithmeticOpAdd ArithmeticOp = iota
- ArithmeticOpSub
- ArithmeticOpMul
- ArithmeticOpDiv
- ArithmeticOpMod
-
- ArithmeticOpLogicalAnd
- ArithmeticOpLogicalOr
-
- ArithmeticOpEqual
- ArithmeticOpNotEqual
- ArithmeticOpLessThan
- ArithmeticOpLessThanOrEqual
- ArithmeticOpGreaterThan
- ArithmeticOpGreaterThanOrEqual
-)
diff --git a/vendor/github.com/hashicorp/hil/ast/ast.go b/vendor/github.com/hashicorp/hil/ast/ast.go
deleted file mode 100644
index c6350f8bb..000000000
--- a/vendor/github.com/hashicorp/hil/ast/ast.go
+++ /dev/null
@@ -1,99 +0,0 @@
-package ast
-
-import (
- "fmt"
-)
-
-// Node is the interface that all AST nodes must implement.
-type Node interface {
- // Accept is called to dispatch to the visitors. It must return the
- // resulting Node (which might be different in an AST transform).
- Accept(Visitor) Node
-
- // Pos returns the position of this node in some source.
- Pos() Pos
-
- // Type returns the type of this node for the given context.
- Type(Scope) (Type, error)
-}
-
-// Pos is the starting position of an AST node
-type Pos struct {
- Column, Line int // Column/Line number, starting at 1
- Filename string // Optional source filename, if known
-}
-
-func (p Pos) String() string {
- if p.Filename == "" {
- return fmt.Sprintf("%d:%d", p.Line, p.Column)
- } else {
- return fmt.Sprintf("%s:%d:%d", p.Filename, p.Line, p.Column)
- }
-}
-
-// InitPos is an initiaial position value. This should be used as
-// the starting position (presets the column and line to 1).
-var InitPos = Pos{Column: 1, Line: 1}
-
-// Visitors are just implementations of this function.
-//
-// The function must return the Node to replace this node with. "nil" is
-// _not_ a valid return value. If there is no replacement, the original node
-// should be returned. We build this replacement directly into the visitor
-// pattern since AST transformations are a common and useful tool and
-// building it into the AST itself makes it required for future Node
-// implementations and very easy to do.
-//
-// Note that this isn't a true implementation of the visitor pattern, which
-// generally requires proper type dispatch on the function. However,
-// implementing this basic visitor pattern style is still very useful even
-// if you have to type switch.
-type Visitor func(Node) Node
-
-//go:generate stringer -type=Type
-
-// Type is the type of any value.
-type Type uint32
-
-const (
- TypeInvalid Type = 0
- TypeAny Type = 1 << iota
- TypeBool
- TypeString
- TypeInt
- TypeFloat
- TypeList
- TypeMap
-
- // This is a special type used by Terraform to mark "unknown" values.
- // It is impossible for this type to be introduced into your HIL programs
- // unless you explicitly set a variable to this value. In that case,
- // any operation including the variable will return "TypeUnknown" as the
- // type.
- TypeUnknown
-)
-
-func (t Type) Printable() string {
- switch t {
- case TypeInvalid:
- return "invalid type"
- case TypeAny:
- return "any type"
- case TypeBool:
- return "type bool"
- case TypeString:
- return "type string"
- case TypeInt:
- return "type int"
- case TypeFloat:
- return "type float"
- case TypeList:
- return "type list"
- case TypeMap:
- return "type map"
- case TypeUnknown:
- return "type unknown"
- default:
- return "unknown type"
- }
-}
diff --git a/vendor/github.com/hashicorp/hil/ast/call.go b/vendor/github.com/hashicorp/hil/ast/call.go
deleted file mode 100644
index 055701102..000000000
--- a/vendor/github.com/hashicorp/hil/ast/call.go
+++ /dev/null
@@ -1,47 +0,0 @@
-package ast
-
-import (
- "fmt"
- "strings"
-)
-
-// Call represents a function call.
-type Call struct {
- Func string
- Args []Node
- Posx Pos
-}
-
-func (n *Call) Accept(v Visitor) Node {
- for i, a := range n.Args {
- n.Args[i] = a.Accept(v)
- }
-
- return v(n)
-}
-
-func (n *Call) Pos() Pos {
- return n.Posx
-}
-
-func (n *Call) String() string {
- args := make([]string, len(n.Args))
- for i, arg := range n.Args {
- args[i] = fmt.Sprintf("%s", arg)
- }
-
- return fmt.Sprintf("Call(%s, %s)", n.Func, strings.Join(args, ", "))
-}
-
-func (n *Call) Type(s Scope) (Type, error) {
- f, ok := s.LookupFunc(n.Func)
- if !ok {
- return TypeInvalid, fmt.Errorf("unknown function: %s", n.Func)
- }
-
- return f.ReturnType, nil
-}
-
-func (n *Call) GoString() string {
- return fmt.Sprintf("*%#v", *n)
-}
diff --git a/vendor/github.com/hashicorp/hil/ast/conditional.go b/vendor/github.com/hashicorp/hil/ast/conditional.go
deleted file mode 100644
index be48f89d4..000000000
--- a/vendor/github.com/hashicorp/hil/ast/conditional.go
+++ /dev/null
@@ -1,36 +0,0 @@
-package ast
-
-import (
- "fmt"
-)
-
-type Conditional struct {
- CondExpr Node
- TrueExpr Node
- FalseExpr Node
- Posx Pos
-}
-
-// Accept passes the given visitor to the child nodes in this order:
-// CondExpr, TrueExpr, FalseExpr. It then finally passes itself to the visitor.
-func (n *Conditional) Accept(v Visitor) Node {
- n.CondExpr = n.CondExpr.Accept(v)
- n.TrueExpr = n.TrueExpr.Accept(v)
- n.FalseExpr = n.FalseExpr.Accept(v)
-
- return v(n)
-}
-
-func (n *Conditional) Pos() Pos {
- return n.Posx
-}
-
-func (n *Conditional) Type(Scope) (Type, error) {
- // This is not actually a useful value; the type checker ignores
- // this function when analyzing conditionals, just as with Arithmetic.
- return TypeInt, nil
-}
-
-func (n *Conditional) GoString() string {
- return fmt.Sprintf("*%#v", *n)
-}
diff --git a/vendor/github.com/hashicorp/hil/ast/index.go b/vendor/github.com/hashicorp/hil/ast/index.go
deleted file mode 100644
index 860c25fd2..000000000
--- a/vendor/github.com/hashicorp/hil/ast/index.go
+++ /dev/null
@@ -1,76 +0,0 @@
-package ast
-
-import (
- "fmt"
- "strings"
-)
-
-// Index represents an indexing operation into another data structure
-type Index struct {
- Target Node
- Key Node
- Posx Pos
-}
-
-func (n *Index) Accept(v Visitor) Node {
- n.Target = n.Target.Accept(v)
- n.Key = n.Key.Accept(v)
- return v(n)
-}
-
-func (n *Index) Pos() Pos {
- return n.Posx
-}
-
-func (n *Index) String() string {
- return fmt.Sprintf("Index(%s, %s)", n.Target, n.Key)
-}
-
-func (n *Index) Type(s Scope) (Type, error) {
- variableAccess, ok := n.Target.(*VariableAccess)
- if !ok {
- return TypeInvalid, fmt.Errorf("target is not a variable")
- }
-
- variable, ok := s.LookupVar(variableAccess.Name)
- if !ok {
- return TypeInvalid, fmt.Errorf("unknown variable accessed: %s", variableAccess.Name)
- }
-
- switch variable.Type {
- case TypeList:
- return n.typeList(variable, variableAccess.Name)
- case TypeMap:
- return n.typeMap(variable, variableAccess.Name)
- default:
- return TypeInvalid, fmt.Errorf("invalid index operation into non-indexable type: %s", variable.Type)
- }
-}
-
-func (n *Index) typeList(variable Variable, variableName string) (Type, error) {
- // We assume type checking has already determined that this is a list
- list := variable.Value.([]Variable)
-
- return VariableListElementTypesAreHomogenous(variableName, list)
-}
-
-func (n *Index) typeMap(variable Variable, variableName string) (Type, error) {
- // We assume type checking has already determined that this is a map
- vmap := variable.Value.(map[string]Variable)
-
- return VariableMapValueTypesAreHomogenous(variableName, vmap)
-}
-
-func reportTypes(typesFound map[Type]struct{}) string {
- stringTypes := make([]string, len(typesFound))
- i := 0
- for k, _ := range typesFound {
- stringTypes[0] = k.String()
- i++
- }
- return strings.Join(stringTypes, ", ")
-}
-
-func (n *Index) GoString() string {
- return fmt.Sprintf("*%#v", *n)
-}
diff --git a/vendor/github.com/hashicorp/hil/ast/literal.go b/vendor/github.com/hashicorp/hil/ast/literal.go
deleted file mode 100644
index da6014fee..000000000
--- a/vendor/github.com/hashicorp/hil/ast/literal.go
+++ /dev/null
@@ -1,88 +0,0 @@
-package ast
-
-import (
- "fmt"
- "reflect"
-)
-
-// LiteralNode represents a single literal value, such as "foo" or
-// 42 or 3.14159. Based on the Type, the Value can be safely cast.
-type LiteralNode struct {
- Value interface{}
- Typex Type
- Posx Pos
-}
-
-// NewLiteralNode returns a new literal node representing the given
-// literal Go value, which must correspond to one of the primitive types
-// supported by HIL. Lists and maps cannot currently be constructed via
-// this function.
-//
-// If an inappropriately-typed value is provided, this function will
-// return an error. The main intended use of this function is to produce
-// "synthetic" literals from constants in code, where the value type is
-// well known at compile time. To easily store these in global variables,
-// see also MustNewLiteralNode.
-func NewLiteralNode(value interface{}, pos Pos) (*LiteralNode, error) {
- goType := reflect.TypeOf(value)
- var hilType Type
-
- switch goType.Kind() {
- case reflect.Bool:
- hilType = TypeBool
- case reflect.Int:
- hilType = TypeInt
- case reflect.Float64:
- hilType = TypeFloat
- case reflect.String:
- hilType = TypeString
- default:
- return nil, fmt.Errorf("unsupported literal node type: %T", value)
- }
-
- return &LiteralNode{
- Value: value,
- Typex: hilType,
- Posx: pos,
- }, nil
-}
-
-// MustNewLiteralNode wraps NewLiteralNode and panics if an error is
-// returned, thus allowing valid literal nodes to be easily assigned to
-// global variables.
-func MustNewLiteralNode(value interface{}, pos Pos) *LiteralNode {
- node, err := NewLiteralNode(value, pos)
- if err != nil {
- panic(err)
- }
- return node
-}
-
-func (n *LiteralNode) Accept(v Visitor) Node {
- return v(n)
-}
-
-func (n *LiteralNode) Pos() Pos {
- return n.Posx
-}
-
-func (n *LiteralNode) GoString() string {
- return fmt.Sprintf("*%#v", *n)
-}
-
-func (n *LiteralNode) String() string {
- return fmt.Sprintf("Literal(%s, %v)", n.Typex, n.Value)
-}
-
-func (n *LiteralNode) Type(Scope) (Type, error) {
- return n.Typex, nil
-}
-
-// IsUnknown returns true either if the node's value is itself unknown
-// of if it is a collection containing any unknown elements, deeply.
-func (n *LiteralNode) IsUnknown() bool {
- return IsUnknown(Variable{
- Type: n.Typex,
- Value: n.Value,
- })
-}
diff --git a/vendor/github.com/hashicorp/hil/ast/output.go b/vendor/github.com/hashicorp/hil/ast/output.go
deleted file mode 100644
index 1e27f970b..000000000
--- a/vendor/github.com/hashicorp/hil/ast/output.go
+++ /dev/null
@@ -1,78 +0,0 @@
-package ast
-
-import (
- "bytes"
- "fmt"
-)
-
-// Output represents the root node of all interpolation evaluations. If the
-// output only has one expression which is either a TypeList or TypeMap, the
-// Output can be type-asserted to []interface{} or map[string]interface{}
-// respectively. Otherwise the Output evaluates as a string, and concatenates
-// the evaluation of each expression.
-type Output struct {
- Exprs []Node
- Posx Pos
-}
-
-func (n *Output) Accept(v Visitor) Node {
- for i, expr := range n.Exprs {
- n.Exprs[i] = expr.Accept(v)
- }
-
- return v(n)
-}
-
-func (n *Output) Pos() Pos {
- return n.Posx
-}
-
-func (n *Output) GoString() string {
- return fmt.Sprintf("*%#v", *n)
-}
-
-func (n *Output) String() string {
- var b bytes.Buffer
- for _, expr := range n.Exprs {
- b.WriteString(fmt.Sprintf("%s", expr))
- }
-
- return b.String()
-}
-
-func (n *Output) Type(s Scope) (Type, error) {
- // Special case no expressions for backward compatibility
- if len(n.Exprs) == 0 {
- return TypeString, nil
- }
-
- // Special case a single expression of types list or map
- if len(n.Exprs) == 1 {
- exprType, err := n.Exprs[0].Type(s)
- if err != nil {
- return TypeInvalid, err
- }
- switch exprType {
- case TypeList:
- return TypeList, nil
- case TypeMap:
- return TypeMap, nil
- }
- }
-
- // Otherwise ensure all our expressions are strings
- for index, expr := range n.Exprs {
- exprType, err := expr.Type(s)
- if err != nil {
- return TypeInvalid, err
- }
- // We only look for things we know we can't coerce with an implicit conversion func
- if exprType == TypeList || exprType == TypeMap {
- return TypeInvalid, fmt.Errorf(
- "multi-expression HIL outputs may only have string inputs: %d is type %s",
- index, exprType)
- }
- }
-
- return TypeString, nil
-}
diff --git a/vendor/github.com/hashicorp/hil/ast/scope.go b/vendor/github.com/hashicorp/hil/ast/scope.go
deleted file mode 100644
index 7a975d999..000000000
--- a/vendor/github.com/hashicorp/hil/ast/scope.go
+++ /dev/null
@@ -1,90 +0,0 @@
-package ast
-
-import (
- "fmt"
- "reflect"
-)
-
-// Scope is the interface used to look up variables and functions while
-// evaluating. How these functions/variables are defined are up to the caller.
-type Scope interface {
- LookupFunc(string) (Function, bool)
- LookupVar(string) (Variable, bool)
-}
-
-// Variable is a variable value for execution given as input to the engine.
-// It records the value of a variables along with their type.
-type Variable struct {
- Value interface{}
- Type Type
-}
-
-// NewVariable creates a new Variable for the given value. This will
-// attempt to infer the correct type. If it can't, an error will be returned.
-func NewVariable(v interface{}) (result Variable, err error) {
- switch v := reflect.ValueOf(v); v.Kind() {
- case reflect.String:
- result.Type = TypeString
- default:
- err = fmt.Errorf("Unknown type: %s", v.Kind())
- }
-
- result.Value = v
- return
-}
-
-// String implements Stringer on Variable, displaying the type and value
-// of the Variable.
-func (v Variable) String() string {
- return fmt.Sprintf("{Variable (%s): %+v}", v.Type, v.Value)
-}
-
-// Function defines a function that can be executed by the engine.
-// The type checker will validate that the proper types will be called
-// to the callback.
-type Function struct {
- // ArgTypes is the list of types in argument order. These are the
- // required arguments.
- //
- // ReturnType is the type of the returned value. The Callback MUST
- // return this type.
- ArgTypes []Type
- ReturnType Type
-
- // Variadic, if true, says that this function is variadic, meaning
- // it takes a variable number of arguments. In this case, the
- // VariadicType must be set.
- Variadic bool
- VariadicType Type
-
- // Callback is the function called for a function. The argument
- // types are guaranteed to match the spec above by the type checker.
- // The length of the args is strictly == len(ArgTypes) unless Varidiac
- // is true, in which case its >= len(ArgTypes).
- Callback func([]interface{}) (interface{}, error)
-}
-
-// BasicScope is a simple scope that looks up variables and functions
-// using a map.
-type BasicScope struct {
- FuncMap map[string]Function
- VarMap map[string]Variable
-}
-
-func (s *BasicScope) LookupFunc(n string) (Function, bool) {
- if s == nil {
- return Function{}, false
- }
-
- v, ok := s.FuncMap[n]
- return v, ok
-}
-
-func (s *BasicScope) LookupVar(n string) (Variable, bool) {
- if s == nil {
- return Variable{}, false
- }
-
- v, ok := s.VarMap[n]
- return v, ok
-}
diff --git a/vendor/github.com/hashicorp/hil/ast/stack.go b/vendor/github.com/hashicorp/hil/ast/stack.go
deleted file mode 100644
index bd2bc1578..000000000
--- a/vendor/github.com/hashicorp/hil/ast/stack.go
+++ /dev/null
@@ -1,25 +0,0 @@
-package ast
-
-// Stack is a stack of Node.
-type Stack struct {
- stack []Node
-}
-
-func (s *Stack) Len() int {
- return len(s.stack)
-}
-
-func (s *Stack) Push(n Node) {
- s.stack = append(s.stack, n)
-}
-
-func (s *Stack) Pop() Node {
- x := s.stack[len(s.stack)-1]
- s.stack[len(s.stack)-1] = nil
- s.stack = s.stack[:len(s.stack)-1]
- return x
-}
-
-func (s *Stack) Reset() {
- s.stack = nil
-}
diff --git a/vendor/github.com/hashicorp/hil/ast/type_string.go b/vendor/github.com/hashicorp/hil/ast/type_string.go
deleted file mode 100644
index 1f51a98dd..000000000
--- a/vendor/github.com/hashicorp/hil/ast/type_string.go
+++ /dev/null
@@ -1,54 +0,0 @@
-// Code generated by "stringer -type=Type"; DO NOT EDIT
-
-package ast
-
-import "fmt"
-
-const (
- _Type_name_0 = "TypeInvalid"
- _Type_name_1 = "TypeAny"
- _Type_name_2 = "TypeBool"
- _Type_name_3 = "TypeString"
- _Type_name_4 = "TypeInt"
- _Type_name_5 = "TypeFloat"
- _Type_name_6 = "TypeList"
- _Type_name_7 = "TypeMap"
- _Type_name_8 = "TypeUnknown"
-)
-
-var (
- _Type_index_0 = [...]uint8{0, 11}
- _Type_index_1 = [...]uint8{0, 7}
- _Type_index_2 = [...]uint8{0, 8}
- _Type_index_3 = [...]uint8{0, 10}
- _Type_index_4 = [...]uint8{0, 7}
- _Type_index_5 = [...]uint8{0, 9}
- _Type_index_6 = [...]uint8{0, 8}
- _Type_index_7 = [...]uint8{0, 7}
- _Type_index_8 = [...]uint8{0, 11}
-)
-
-func (i Type) String() string {
- switch {
- case i == 0:
- return _Type_name_0
- case i == 2:
- return _Type_name_1
- case i == 4:
- return _Type_name_2
- case i == 8:
- return _Type_name_3
- case i == 16:
- return _Type_name_4
- case i == 32:
- return _Type_name_5
- case i == 64:
- return _Type_name_6
- case i == 128:
- return _Type_name_7
- case i == 256:
- return _Type_name_8
- default:
- return fmt.Sprintf("Type(%d)", i)
- }
-}
diff --git a/vendor/github.com/hashicorp/hil/ast/unknown.go b/vendor/github.com/hashicorp/hil/ast/unknown.go
deleted file mode 100644
index d6ddaecc7..000000000
--- a/vendor/github.com/hashicorp/hil/ast/unknown.go
+++ /dev/null
@@ -1,30 +0,0 @@
-package ast
-
-// IsUnknown reports whether a variable is unknown or contains any value
-// that is unknown. This will recurse into lists and maps and so on.
-func IsUnknown(v Variable) bool {
- // If it is unknown itself, return true
- if v.Type == TypeUnknown {
- return true
- }
-
- // If it is a container type, check the values
- switch v.Type {
- case TypeList:
- for _, el := range v.Value.([]Variable) {
- if IsUnknown(el) {
- return true
- }
- }
- case TypeMap:
- for _, el := range v.Value.(map[string]Variable) {
- if IsUnknown(el) {
- return true
- }
- }
- default:
- }
-
- // Not a container type or survive the above checks
- return false
-}
diff --git a/vendor/github.com/hashicorp/hil/ast/variable_access.go b/vendor/github.com/hashicorp/hil/ast/variable_access.go
deleted file mode 100644
index 4c1362d75..000000000
--- a/vendor/github.com/hashicorp/hil/ast/variable_access.go
+++ /dev/null
@@ -1,36 +0,0 @@
-package ast
-
-import (
- "fmt"
-)
-
-// VariableAccess represents a variable access.
-type VariableAccess struct {
- Name string
- Posx Pos
-}
-
-func (n *VariableAccess) Accept(v Visitor) Node {
- return v(n)
-}
-
-func (n *VariableAccess) Pos() Pos {
- return n.Posx
-}
-
-func (n *VariableAccess) GoString() string {
- return fmt.Sprintf("*%#v", *n)
-}
-
-func (n *VariableAccess) String() string {
- return fmt.Sprintf("Variable(%s)", n.Name)
-}
-
-func (n *VariableAccess) Type(s Scope) (Type, error) {
- v, ok := s.LookupVar(n.Name)
- if !ok {
- return TypeInvalid, fmt.Errorf("unknown variable: %s", n.Name)
- }
-
- return v.Type, nil
-}
diff --git a/vendor/github.com/hashicorp/hil/ast/variables_helper.go b/vendor/github.com/hashicorp/hil/ast/variables_helper.go
deleted file mode 100644
index 06bd18de2..000000000
--- a/vendor/github.com/hashicorp/hil/ast/variables_helper.go
+++ /dev/null
@@ -1,63 +0,0 @@
-package ast
-
-import "fmt"
-
-func VariableListElementTypesAreHomogenous(variableName string, list []Variable) (Type, error) {
- if len(list) == 0 {
- return TypeInvalid, fmt.Errorf("list %q does not have any elements so cannot determine type.", variableName)
- }
-
- elemType := TypeUnknown
- for _, v := range list {
- if v.Type == TypeUnknown {
- continue
- }
-
- if elemType == TypeUnknown {
- elemType = v.Type
- continue
- }
-
- if v.Type != elemType {
- return TypeInvalid, fmt.Errorf(
- "list %q does not have homogenous types. found %s and then %s",
- variableName,
- elemType, v.Type,
- )
- }
-
- elemType = v.Type
- }
-
- return elemType, nil
-}
-
-func VariableMapValueTypesAreHomogenous(variableName string, vmap map[string]Variable) (Type, error) {
- if len(vmap) == 0 {
- return TypeInvalid, fmt.Errorf("map %q does not have any elements so cannot determine type.", variableName)
- }
-
- elemType := TypeUnknown
- for _, v := range vmap {
- if v.Type == TypeUnknown {
- continue
- }
-
- if elemType == TypeUnknown {
- elemType = v.Type
- continue
- }
-
- if v.Type != elemType {
- return TypeInvalid, fmt.Errorf(
- "map %q does not have homogenous types. found %s and then %s",
- variableName,
- elemType, v.Type,
- )
- }
-
- elemType = v.Type
- }
-
- return elemType, nil
-}
diff --git a/vendor/github.com/hashicorp/hil/builtins.go b/vendor/github.com/hashicorp/hil/builtins.go
deleted file mode 100644
index 909c788a2..000000000
--- a/vendor/github.com/hashicorp/hil/builtins.go
+++ /dev/null
@@ -1,331 +0,0 @@
-package hil
-
-import (
- "errors"
- "strconv"
-
- "github.com/hashicorp/hil/ast"
-)
-
-// NOTE: All builtins are tested in engine_test.go
-
-func registerBuiltins(scope *ast.BasicScope) *ast.BasicScope {
- if scope == nil {
- scope = new(ast.BasicScope)
- }
- if scope.FuncMap == nil {
- scope.FuncMap = make(map[string]ast.Function)
- }
-
- // Implicit conversions
- scope.FuncMap["__builtin_BoolToString"] = builtinBoolToString()
- scope.FuncMap["__builtin_FloatToInt"] = builtinFloatToInt()
- scope.FuncMap["__builtin_FloatToString"] = builtinFloatToString()
- scope.FuncMap["__builtin_IntToFloat"] = builtinIntToFloat()
- scope.FuncMap["__builtin_IntToString"] = builtinIntToString()
- scope.FuncMap["__builtin_StringToInt"] = builtinStringToInt()
- scope.FuncMap["__builtin_StringToFloat"] = builtinStringToFloat()
- scope.FuncMap["__builtin_StringToBool"] = builtinStringToBool()
-
- // Math operations
- scope.FuncMap["__builtin_IntMath"] = builtinIntMath()
- scope.FuncMap["__builtin_FloatMath"] = builtinFloatMath()
- scope.FuncMap["__builtin_BoolCompare"] = builtinBoolCompare()
- scope.FuncMap["__builtin_FloatCompare"] = builtinFloatCompare()
- scope.FuncMap["__builtin_IntCompare"] = builtinIntCompare()
- scope.FuncMap["__builtin_StringCompare"] = builtinStringCompare()
- scope.FuncMap["__builtin_Logical"] = builtinLogical()
- return scope
-}
-
-func builtinFloatMath() ast.Function {
- return ast.Function{
- ArgTypes: []ast.Type{ast.TypeInt},
- Variadic: true,
- VariadicType: ast.TypeFloat,
- ReturnType: ast.TypeFloat,
- Callback: func(args []interface{}) (interface{}, error) {
- op := args[0].(ast.ArithmeticOp)
- result := args[1].(float64)
- for _, raw := range args[2:] {
- arg := raw.(float64)
- switch op {
- case ast.ArithmeticOpAdd:
- result += arg
- case ast.ArithmeticOpSub:
- result -= arg
- case ast.ArithmeticOpMul:
- result *= arg
- case ast.ArithmeticOpDiv:
- result /= arg
- }
- }
-
- return result, nil
- },
- }
-}
-
-func builtinIntMath() ast.Function {
- return ast.Function{
- ArgTypes: []ast.Type{ast.TypeInt},
- Variadic: true,
- VariadicType: ast.TypeInt,
- ReturnType: ast.TypeInt,
- Callback: func(args []interface{}) (interface{}, error) {
- op := args[0].(ast.ArithmeticOp)
- result := args[1].(int)
- for _, raw := range args[2:] {
- arg := raw.(int)
- switch op {
- case ast.ArithmeticOpAdd:
- result += arg
- case ast.ArithmeticOpSub:
- result -= arg
- case ast.ArithmeticOpMul:
- result *= arg
- case ast.ArithmeticOpDiv:
- if arg == 0 {
- return nil, errors.New("divide by zero")
- }
-
- result /= arg
- case ast.ArithmeticOpMod:
- if arg == 0 {
- return nil, errors.New("divide by zero")
- }
-
- result = result % arg
- }
- }
-
- return result, nil
- },
- }
-}
-
-func builtinBoolCompare() ast.Function {
- return ast.Function{
- ArgTypes: []ast.Type{ast.TypeInt, ast.TypeBool, ast.TypeBool},
- Variadic: false,
- ReturnType: ast.TypeBool,
- Callback: func(args []interface{}) (interface{}, error) {
- op := args[0].(ast.ArithmeticOp)
- lhs := args[1].(bool)
- rhs := args[2].(bool)
-
- switch op {
- case ast.ArithmeticOpEqual:
- return lhs == rhs, nil
- case ast.ArithmeticOpNotEqual:
- return lhs != rhs, nil
- default:
- return nil, errors.New("invalid comparison operation")
- }
- },
- }
-}
-
-func builtinFloatCompare() ast.Function {
- return ast.Function{
- ArgTypes: []ast.Type{ast.TypeInt, ast.TypeFloat, ast.TypeFloat},
- Variadic: false,
- ReturnType: ast.TypeBool,
- Callback: func(args []interface{}) (interface{}, error) {
- op := args[0].(ast.ArithmeticOp)
- lhs := args[1].(float64)
- rhs := args[2].(float64)
-
- switch op {
- case ast.ArithmeticOpEqual:
- return lhs == rhs, nil
- case ast.ArithmeticOpNotEqual:
- return lhs != rhs, nil
- case ast.ArithmeticOpLessThan:
- return lhs < rhs, nil
- case ast.ArithmeticOpLessThanOrEqual:
- return lhs <= rhs, nil
- case ast.ArithmeticOpGreaterThan:
- return lhs > rhs, nil
- case ast.ArithmeticOpGreaterThanOrEqual:
- return lhs >= rhs, nil
- default:
- return nil, errors.New("invalid comparison operation")
- }
- },
- }
-}
-
-func builtinIntCompare() ast.Function {
- return ast.Function{
- ArgTypes: []ast.Type{ast.TypeInt, ast.TypeInt, ast.TypeInt},
- Variadic: false,
- ReturnType: ast.TypeBool,
- Callback: func(args []interface{}) (interface{}, error) {
- op := args[0].(ast.ArithmeticOp)
- lhs := args[1].(int)
- rhs := args[2].(int)
-
- switch op {
- case ast.ArithmeticOpEqual:
- return lhs == rhs, nil
- case ast.ArithmeticOpNotEqual:
- return lhs != rhs, nil
- case ast.ArithmeticOpLessThan:
- return lhs < rhs, nil
- case ast.ArithmeticOpLessThanOrEqual:
- return lhs <= rhs, nil
- case ast.ArithmeticOpGreaterThan:
- return lhs > rhs, nil
- case ast.ArithmeticOpGreaterThanOrEqual:
- return lhs >= rhs, nil
- default:
- return nil, errors.New("invalid comparison operation")
- }
- },
- }
-}
-
-func builtinStringCompare() ast.Function {
- return ast.Function{
- ArgTypes: []ast.Type{ast.TypeInt, ast.TypeString, ast.TypeString},
- Variadic: false,
- ReturnType: ast.TypeBool,
- Callback: func(args []interface{}) (interface{}, error) {
- op := args[0].(ast.ArithmeticOp)
- lhs := args[1].(string)
- rhs := args[2].(string)
-
- switch op {
- case ast.ArithmeticOpEqual:
- return lhs == rhs, nil
- case ast.ArithmeticOpNotEqual:
- return lhs != rhs, nil
- default:
- return nil, errors.New("invalid comparison operation")
- }
- },
- }
-}
-
-func builtinLogical() ast.Function {
- return ast.Function{
- ArgTypes: []ast.Type{ast.TypeInt},
- Variadic: true,
- VariadicType: ast.TypeBool,
- ReturnType: ast.TypeBool,
- Callback: func(args []interface{}) (interface{}, error) {
- op := args[0].(ast.ArithmeticOp)
- result := args[1].(bool)
- for _, raw := range args[2:] {
- arg := raw.(bool)
- switch op {
- case ast.ArithmeticOpLogicalOr:
- result = result || arg
- case ast.ArithmeticOpLogicalAnd:
- result = result && arg
- default:
- return nil, errors.New("invalid logical operator")
- }
- }
-
- return result, nil
- },
- }
-}
-
-func builtinFloatToInt() ast.Function {
- return ast.Function{
- ArgTypes: []ast.Type{ast.TypeFloat},
- ReturnType: ast.TypeInt,
- Callback: func(args []interface{}) (interface{}, error) {
- return int(args[0].(float64)), nil
- },
- }
-}
-
-func builtinFloatToString() ast.Function {
- return ast.Function{
- ArgTypes: []ast.Type{ast.TypeFloat},
- ReturnType: ast.TypeString,
- Callback: func(args []interface{}) (interface{}, error) {
- return strconv.FormatFloat(
- args[0].(float64), 'g', -1, 64), nil
- },
- }
-}
-
-func builtinIntToFloat() ast.Function {
- return ast.Function{
- ArgTypes: []ast.Type{ast.TypeInt},
- ReturnType: ast.TypeFloat,
- Callback: func(args []interface{}) (interface{}, error) {
- return float64(args[0].(int)), nil
- },
- }
-}
-
-func builtinIntToString() ast.Function {
- return ast.Function{
- ArgTypes: []ast.Type{ast.TypeInt},
- ReturnType: ast.TypeString,
- Callback: func(args []interface{}) (interface{}, error) {
- return strconv.FormatInt(int64(args[0].(int)), 10), nil
- },
- }
-}
-
-func builtinStringToInt() ast.Function {
- return ast.Function{
- ArgTypes: []ast.Type{ast.TypeInt},
- ReturnType: ast.TypeString,
- Callback: func(args []interface{}) (interface{}, error) {
- v, err := strconv.ParseInt(args[0].(string), 0, 0)
- if err != nil {
- return nil, err
- }
-
- return int(v), nil
- },
- }
-}
-
-func builtinStringToFloat() ast.Function {
- return ast.Function{
- ArgTypes: []ast.Type{ast.TypeString},
- ReturnType: ast.TypeFloat,
- Callback: func(args []interface{}) (interface{}, error) {
- v, err := strconv.ParseFloat(args[0].(string), 64)
- if err != nil {
- return nil, err
- }
-
- return v, nil
- },
- }
-}
-
-func builtinBoolToString() ast.Function {
- return ast.Function{
- ArgTypes: []ast.Type{ast.TypeBool},
- ReturnType: ast.TypeString,
- Callback: func(args []interface{}) (interface{}, error) {
- return strconv.FormatBool(args[0].(bool)), nil
- },
- }
-}
-
-func builtinStringToBool() ast.Function {
- return ast.Function{
- ArgTypes: []ast.Type{ast.TypeString},
- ReturnType: ast.TypeBool,
- Callback: func(args []interface{}) (interface{}, error) {
- v, err := strconv.ParseBool(args[0].(string))
- if err != nil {
- return nil, err
- }
-
- return v, nil
- },
- }
-}
diff --git a/vendor/github.com/hashicorp/hil/check_identifier.go b/vendor/github.com/hashicorp/hil/check_identifier.go
deleted file mode 100644
index 474f50588..000000000
--- a/vendor/github.com/hashicorp/hil/check_identifier.go
+++ /dev/null
@@ -1,88 +0,0 @@
-package hil
-
-import (
- "fmt"
- "sync"
-
- "github.com/hashicorp/hil/ast"
-)
-
-// IdentifierCheck is a SemanticCheck that checks that all identifiers
-// resolve properly and that the right number of arguments are passed
-// to functions.
-type IdentifierCheck struct {
- Scope ast.Scope
-
- err error
- lock sync.Mutex
-}
-
-func (c *IdentifierCheck) Visit(root ast.Node) error {
- c.lock.Lock()
- defer c.lock.Unlock()
- defer c.reset()
- root.Accept(c.visit)
- return c.err
-}
-
-func (c *IdentifierCheck) visit(raw ast.Node) ast.Node {
- if c.err != nil {
- return raw
- }
-
- switch n := raw.(type) {
- case *ast.Call:
- c.visitCall(n)
- case *ast.VariableAccess:
- c.visitVariableAccess(n)
- case *ast.Output:
- // Ignore
- case *ast.LiteralNode:
- // Ignore
- default:
- // Ignore
- }
-
- // We never do replacement with this visitor
- return raw
-}
-
-func (c *IdentifierCheck) visitCall(n *ast.Call) {
- // Look up the function in the map
- function, ok := c.Scope.LookupFunc(n.Func)
- if !ok {
- c.createErr(n, fmt.Sprintf("unknown function called: %s", n.Func))
- return
- }
-
- // Break up the args into what is variadic and what is required
- args := n.Args
- if function.Variadic && len(args) > len(function.ArgTypes) {
- args = n.Args[:len(function.ArgTypes)]
- }
-
- // Verify the number of arguments
- if len(args) != len(function.ArgTypes) {
- c.createErr(n, fmt.Sprintf(
- "%s: expected %d arguments, got %d",
- n.Func, len(function.ArgTypes), len(n.Args)))
- return
- }
-}
-
-func (c *IdentifierCheck) visitVariableAccess(n *ast.VariableAccess) {
- // Look up the variable in the map
- if _, ok := c.Scope.LookupVar(n.Name); !ok {
- c.createErr(n, fmt.Sprintf(
- "unknown variable accessed: %s", n.Name))
- return
- }
-}
-
-func (c *IdentifierCheck) createErr(n ast.Node, str string) {
- c.err = fmt.Errorf("%s: %s", n.Pos(), str)
-}
-
-func (c *IdentifierCheck) reset() {
- c.err = nil
-}
diff --git a/vendor/github.com/hashicorp/hil/check_types.go b/vendor/github.com/hashicorp/hil/check_types.go
deleted file mode 100644
index f16da3918..000000000
--- a/vendor/github.com/hashicorp/hil/check_types.go
+++ /dev/null
@@ -1,668 +0,0 @@
-package hil
-
-import (
- "fmt"
- "sync"
-
- "github.com/hashicorp/hil/ast"
-)
-
-// TypeCheck implements ast.Visitor for type checking an AST tree.
-// It requires some configuration to look up the type of nodes.
-//
-// It also optionally will not type error and will insert an implicit
-// type conversions for specific types if specified by the Implicit
-// field. Note that this is kind of organizationally weird to put into
-// this structure but we'd rather do that than duplicate the type checking
-// logic multiple times.
-type TypeCheck struct {
- Scope ast.Scope
-
- // Implicit is a map of implicit type conversions that we can do,
- // and that shouldn't error. The key of the first map is the from type,
- // the key of the second map is the to type, and the final string
- // value is the function to call (which must be registered in the Scope).
- Implicit map[ast.Type]map[ast.Type]string
-
- // Stack of types. This shouldn't be used directly except by implementations
- // of TypeCheckNode.
- Stack []ast.Type
-
- err error
- lock sync.Mutex
-}
-
-// TypeCheckNode is the interface that must be implemented by any
-// ast.Node that wants to support type-checking. If the type checker
-// encounters a node that doesn't implement this, it will error.
-type TypeCheckNode interface {
- TypeCheck(*TypeCheck) (ast.Node, error)
-}
-
-func (v *TypeCheck) Visit(root ast.Node) error {
- v.lock.Lock()
- defer v.lock.Unlock()
- defer v.reset()
- root.Accept(v.visit)
-
- // If the resulting type is unknown, then just let the whole thing go.
- if v.err == errExitUnknown {
- v.err = nil
- }
-
- return v.err
-}
-
-func (v *TypeCheck) visit(raw ast.Node) ast.Node {
- if v.err != nil {
- return raw
- }
-
- var result ast.Node
- var err error
- switch n := raw.(type) {
- case *ast.Arithmetic:
- tc := &typeCheckArithmetic{n}
- result, err = tc.TypeCheck(v)
- case *ast.Call:
- tc := &typeCheckCall{n}
- result, err = tc.TypeCheck(v)
- case *ast.Conditional:
- tc := &typeCheckConditional{n}
- result, err = tc.TypeCheck(v)
- case *ast.Index:
- tc := &typeCheckIndex{n}
- result, err = tc.TypeCheck(v)
- case *ast.Output:
- tc := &typeCheckOutput{n}
- result, err = tc.TypeCheck(v)
- case *ast.LiteralNode:
- tc := &typeCheckLiteral{n}
- result, err = tc.TypeCheck(v)
- case *ast.VariableAccess:
- tc := &typeCheckVariableAccess{n}
- result, err = tc.TypeCheck(v)
- default:
- tc, ok := raw.(TypeCheckNode)
- if !ok {
- err = fmt.Errorf("unknown node for type check: %#v", raw)
- break
- }
-
- result, err = tc.TypeCheck(v)
- }
-
- if err != nil {
- pos := raw.Pos()
- v.err = fmt.Errorf("At column %d, line %d: %s",
- pos.Column, pos.Line, err)
- }
-
- return result
-}
-
-type typeCheckArithmetic struct {
- n *ast.Arithmetic
-}
-
-func (tc *typeCheckArithmetic) TypeCheck(v *TypeCheck) (ast.Node, error) {
- // The arguments are on the stack in reverse order, so pop them off.
- exprs := make([]ast.Type, len(tc.n.Exprs))
- for i, _ := range tc.n.Exprs {
- exprs[len(tc.n.Exprs)-1-i] = v.StackPop()
- }
-
- // If any operand is unknown then our result is automatically unknown
- for _, ty := range exprs {
- if ty == ast.TypeUnknown {
- v.StackPush(ast.TypeUnknown)
- return tc.n, nil
- }
- }
-
- switch tc.n.Op {
- case ast.ArithmeticOpLogicalAnd, ast.ArithmeticOpLogicalOr:
- return tc.checkLogical(v, exprs)
- case ast.ArithmeticOpEqual, ast.ArithmeticOpNotEqual,
- ast.ArithmeticOpLessThan, ast.ArithmeticOpGreaterThan,
- ast.ArithmeticOpGreaterThanOrEqual, ast.ArithmeticOpLessThanOrEqual:
- return tc.checkComparison(v, exprs)
- default:
- return tc.checkNumeric(v, exprs)
- }
-
-}
-
-func (tc *typeCheckArithmetic) checkNumeric(v *TypeCheck, exprs []ast.Type) (ast.Node, error) {
- // Determine the resulting type we want. We do this by going over
- // every expression until we find one with a type we recognize.
- // We do this because the first expr might be a string ("var.foo")
- // and we need to know what to implicit to.
- mathFunc := "__builtin_IntMath"
- mathType := ast.TypeInt
- for _, v := range exprs {
- // We assume int math but if we find ANY float, the entire
- // expression turns into floating point math.
- if v == ast.TypeFloat {
- mathFunc = "__builtin_FloatMath"
- mathType = v
- break
- }
- }
-
- // Verify the args
- for i, arg := range exprs {
- if arg != mathType {
- cn := v.ImplicitConversion(exprs[i], mathType, tc.n.Exprs[i])
- if cn != nil {
- tc.n.Exprs[i] = cn
- continue
- }
-
- return nil, fmt.Errorf(
- "operand %d should be %s, got %s",
- i+1, mathType, arg)
- }
- }
-
- // Modulo doesn't work for floats
- if mathType == ast.TypeFloat && tc.n.Op == ast.ArithmeticOpMod {
- return nil, fmt.Errorf("modulo cannot be used with floats")
- }
-
- // Return type
- v.StackPush(mathType)
-
- // Replace our node with a call to the proper function. This isn't
- // type checked but we already verified types.
- args := make([]ast.Node, len(tc.n.Exprs)+1)
- args[0] = &ast.LiteralNode{
- Value: tc.n.Op,
- Typex: ast.TypeInt,
- Posx: tc.n.Pos(),
- }
- copy(args[1:], tc.n.Exprs)
- return &ast.Call{
- Func: mathFunc,
- Args: args,
- Posx: tc.n.Pos(),
- }, nil
-}
-
-func (tc *typeCheckArithmetic) checkComparison(v *TypeCheck, exprs []ast.Type) (ast.Node, error) {
- if len(exprs) != 2 {
- // This should never happen, because the parser never produces
- // nodes that violate this.
- return nil, fmt.Errorf(
- "comparison operators must have exactly two operands",
- )
- }
-
- // The first operand always dictates the type for a comparison.
- compareFunc := ""
- compareType := exprs[0]
- switch compareType {
- case ast.TypeBool:
- compareFunc = "__builtin_BoolCompare"
- case ast.TypeFloat:
- compareFunc = "__builtin_FloatCompare"
- case ast.TypeInt:
- compareFunc = "__builtin_IntCompare"
- case ast.TypeString:
- compareFunc = "__builtin_StringCompare"
- default:
- return nil, fmt.Errorf(
- "comparison operators apply only to bool, float, int, and string",
- )
- }
-
- // For non-equality comparisons, we will do implicit conversions to
- // integer types if possible. In this case, we need to go through and
- // determine the type of comparison we're doing to enable the implicit
- // conversion.
- if tc.n.Op != ast.ArithmeticOpEqual && tc.n.Op != ast.ArithmeticOpNotEqual {
- compareFunc = "__builtin_IntCompare"
- compareType = ast.TypeInt
- for _, expr := range exprs {
- if expr == ast.TypeFloat {
- compareFunc = "__builtin_FloatCompare"
- compareType = ast.TypeFloat
- break
- }
- }
- }
-
- // Verify (and possibly, convert) the args
- for i, arg := range exprs {
- if arg != compareType {
- cn := v.ImplicitConversion(exprs[i], compareType, tc.n.Exprs[i])
- if cn != nil {
- tc.n.Exprs[i] = cn
- continue
- }
-
- return nil, fmt.Errorf(
- "operand %d should be %s, got %s",
- i+1, compareType, arg,
- )
- }
- }
-
- // Only ints and floats can have the <, >, <= and >= operators applied
- switch tc.n.Op {
- case ast.ArithmeticOpEqual, ast.ArithmeticOpNotEqual:
- // anything goes
- default:
- switch compareType {
- case ast.TypeFloat, ast.TypeInt:
- // fine
- default:
- return nil, fmt.Errorf(
- "<, >, <= and >= may apply only to int and float values",
- )
- }
- }
-
- // Comparison operators always return bool
- v.StackPush(ast.TypeBool)
-
- // Replace our node with a call to the proper function. This isn't
- // type checked but we already verified types.
- args := make([]ast.Node, len(tc.n.Exprs)+1)
- args[0] = &ast.LiteralNode{
- Value: tc.n.Op,
- Typex: ast.TypeInt,
- Posx: tc.n.Pos(),
- }
- copy(args[1:], tc.n.Exprs)
- return &ast.Call{
- Func: compareFunc,
- Args: args,
- Posx: tc.n.Pos(),
- }, nil
-}
-
-func (tc *typeCheckArithmetic) checkLogical(v *TypeCheck, exprs []ast.Type) (ast.Node, error) {
- for i, t := range exprs {
- if t != ast.TypeBool {
- cn := v.ImplicitConversion(t, ast.TypeBool, tc.n.Exprs[i])
- if cn == nil {
- return nil, fmt.Errorf(
- "logical operators require boolean operands, not %s",
- t,
- )
- }
- tc.n.Exprs[i] = cn
- }
- }
-
- // Return type is always boolean
- v.StackPush(ast.TypeBool)
-
- // Arithmetic nodes are replaced with a call to a built-in function
- args := make([]ast.Node, len(tc.n.Exprs)+1)
- args[0] = &ast.LiteralNode{
- Value: tc.n.Op,
- Typex: ast.TypeInt,
- Posx: tc.n.Pos(),
- }
- copy(args[1:], tc.n.Exprs)
- return &ast.Call{
- Func: "__builtin_Logical",
- Args: args,
- Posx: tc.n.Pos(),
- }, nil
-}
-
-type typeCheckCall struct {
- n *ast.Call
-}
-
-func (tc *typeCheckCall) TypeCheck(v *TypeCheck) (ast.Node, error) {
- // Look up the function in the map
- function, ok := v.Scope.LookupFunc(tc.n.Func)
- if !ok {
- return nil, fmt.Errorf("unknown function called: %s", tc.n.Func)
- }
-
- // The arguments are on the stack in reverse order, so pop them off.
- args := make([]ast.Type, len(tc.n.Args))
- for i, _ := range tc.n.Args {
- args[len(tc.n.Args)-1-i] = v.StackPop()
- }
-
- // Verify the args
- for i, expected := range function.ArgTypes {
- if expected == ast.TypeAny {
- continue
- }
-
- if args[i] == ast.TypeUnknown {
- v.StackPush(ast.TypeUnknown)
- return tc.n, nil
- }
-
- if args[i] != expected {
- cn := v.ImplicitConversion(args[i], expected, tc.n.Args[i])
- if cn != nil {
- tc.n.Args[i] = cn
- continue
- }
-
- return nil, fmt.Errorf(
- "%s: argument %d should be %s, got %s",
- tc.n.Func, i+1, expected.Printable(), args[i].Printable())
- }
- }
-
- // If we're variadic, then verify the types there
- if function.Variadic && function.VariadicType != ast.TypeAny {
- args = args[len(function.ArgTypes):]
- for i, t := range args {
- if t == ast.TypeUnknown {
- v.StackPush(ast.TypeUnknown)
- return tc.n, nil
- }
-
- if t != function.VariadicType {
- realI := i + len(function.ArgTypes)
- cn := v.ImplicitConversion(
- t, function.VariadicType, tc.n.Args[realI])
- if cn != nil {
- tc.n.Args[realI] = cn
- continue
- }
-
- return nil, fmt.Errorf(
- "%s: argument %d should be %s, got %s",
- tc.n.Func, realI,
- function.VariadicType.Printable(), t.Printable())
- }
- }
- }
-
- // Return type
- v.StackPush(function.ReturnType)
-
- return tc.n, nil
-}
-
-type typeCheckConditional struct {
- n *ast.Conditional
-}
-
-func (tc *typeCheckConditional) TypeCheck(v *TypeCheck) (ast.Node, error) {
- // On the stack we have the types of the condition, true and false
- // expressions, but they are in reverse order.
- falseType := v.StackPop()
- trueType := v.StackPop()
- condType := v.StackPop()
-
- if condType == ast.TypeUnknown {
- v.StackPush(ast.TypeUnknown)
- return tc.n, nil
- }
-
- if condType != ast.TypeBool {
- cn := v.ImplicitConversion(condType, ast.TypeBool, tc.n.CondExpr)
- if cn == nil {
- return nil, fmt.Errorf(
- "condition must be type bool, not %s", condType.Printable(),
- )
- }
- tc.n.CondExpr = cn
- }
-
- // The types of the true and false expression must match
- if trueType != falseType && trueType != ast.TypeUnknown && falseType != ast.TypeUnknown {
-
- // Since passing around stringified versions of other types is
- // common, we pragmatically allow the false expression to dictate
- // the result type when the true expression is a string.
- if trueType == ast.TypeString {
- cn := v.ImplicitConversion(trueType, falseType, tc.n.TrueExpr)
- if cn == nil {
- return nil, fmt.Errorf(
- "true and false expression types must match; have %s and %s",
- trueType.Printable(), falseType.Printable(),
- )
- }
- tc.n.TrueExpr = cn
- trueType = falseType
- } else {
- cn := v.ImplicitConversion(falseType, trueType, tc.n.FalseExpr)
- if cn == nil {
- return nil, fmt.Errorf(
- "true and false expression types must match; have %s and %s",
- trueType.Printable(), falseType.Printable(),
- )
- }
- tc.n.FalseExpr = cn
- falseType = trueType
- }
- }
-
- // Currently list and map types cannot be used, because we cannot
- // generally assert that their element types are consistent.
- // Such support might be added later, either by improving the type
- // system or restricting usage to only variable and literal expressions,
- // but for now this is simply prohibited because it doesn't seem to
- // be a common enough case to be worth the complexity.
- switch trueType {
- case ast.TypeList:
- return nil, fmt.Errorf(
- "conditional operator cannot be used with list values",
- )
- case ast.TypeMap:
- return nil, fmt.Errorf(
- "conditional operator cannot be used with map values",
- )
- }
-
- // Result type (guaranteed to also match falseType due to the above)
- if trueType == ast.TypeUnknown {
- // falseType may also be unknown, but that's okay because two
- // unknowns means our result is unknown anyway.
- v.StackPush(falseType)
- } else {
- v.StackPush(trueType)
- }
-
- return tc.n, nil
-}
-
-type typeCheckOutput struct {
- n *ast.Output
-}
-
-func (tc *typeCheckOutput) TypeCheck(v *TypeCheck) (ast.Node, error) {
- n := tc.n
- types := make([]ast.Type, len(n.Exprs))
- for i, _ := range n.Exprs {
- types[len(n.Exprs)-1-i] = v.StackPop()
- }
-
- for _, ty := range types {
- if ty == ast.TypeUnknown {
- v.StackPush(ast.TypeUnknown)
- return tc.n, nil
- }
- }
-
- // If there is only one argument and it is a list, we evaluate to a list
- if len(types) == 1 {
- switch t := types[0]; t {
- case ast.TypeList:
- fallthrough
- case ast.TypeMap:
- v.StackPush(t)
- return n, nil
- }
- }
-
- // Otherwise, all concat args must be strings, so validate that
- resultType := ast.TypeString
- for i, t := range types {
-
- if t == ast.TypeUnknown {
- resultType = ast.TypeUnknown
- continue
- }
-
- if t != ast.TypeString {
- cn := v.ImplicitConversion(t, ast.TypeString, n.Exprs[i])
- if cn != nil {
- n.Exprs[i] = cn
- continue
- }
-
- return nil, fmt.Errorf(
- "output of an HIL expression must be a string, or a single list (argument %d is %s)", i+1, t)
- }
- }
-
- // This always results in type string, unless there are unknowns
- v.StackPush(resultType)
-
- return n, nil
-}
-
-type typeCheckLiteral struct {
- n *ast.LiteralNode
-}
-
-func (tc *typeCheckLiteral) TypeCheck(v *TypeCheck) (ast.Node, error) {
- v.StackPush(tc.n.Typex)
- return tc.n, nil
-}
-
-type typeCheckVariableAccess struct {
- n *ast.VariableAccess
-}
-
-func (tc *typeCheckVariableAccess) TypeCheck(v *TypeCheck) (ast.Node, error) {
- // Look up the variable in the map
- variable, ok := v.Scope.LookupVar(tc.n.Name)
- if !ok {
- return nil, fmt.Errorf(
- "unknown variable accessed: %s", tc.n.Name)
- }
-
- // Add the type to the stack
- v.StackPush(variable.Type)
-
- return tc.n, nil
-}
-
-type typeCheckIndex struct {
- n *ast.Index
-}
-
-func (tc *typeCheckIndex) TypeCheck(v *TypeCheck) (ast.Node, error) {
- keyType := v.StackPop()
- targetType := v.StackPop()
-
- if keyType == ast.TypeUnknown || targetType == ast.TypeUnknown {
- v.StackPush(ast.TypeUnknown)
- return tc.n, nil
- }
-
- // Ensure we have a VariableAccess as the target
- varAccessNode, ok := tc.n.Target.(*ast.VariableAccess)
- if !ok {
- return nil, fmt.Errorf(
- "target of an index must be a VariableAccess node, was %T", tc.n.Target)
- }
-
- // Get the variable
- variable, ok := v.Scope.LookupVar(varAccessNode.Name)
- if !ok {
- return nil, fmt.Errorf(
- "unknown variable accessed: %s", varAccessNode.Name)
- }
-
- switch targetType {
- case ast.TypeList:
- if keyType != ast.TypeInt {
- tc.n.Key = v.ImplicitConversion(keyType, ast.TypeInt, tc.n.Key)
- if tc.n.Key == nil {
- return nil, fmt.Errorf(
- "key of an index must be an int, was %s", keyType)
- }
- }
-
- valType, err := ast.VariableListElementTypesAreHomogenous(
- varAccessNode.Name, variable.Value.([]ast.Variable))
- if err != nil {
- return tc.n, err
- }
-
- v.StackPush(valType)
- return tc.n, nil
- case ast.TypeMap:
- if keyType != ast.TypeString {
- tc.n.Key = v.ImplicitConversion(keyType, ast.TypeString, tc.n.Key)
- if tc.n.Key == nil {
- return nil, fmt.Errorf(
- "key of an index must be a string, was %s", keyType)
- }
- }
-
- valType, err := ast.VariableMapValueTypesAreHomogenous(
- varAccessNode.Name, variable.Value.(map[string]ast.Variable))
- if err != nil {
- return tc.n, err
- }
-
- v.StackPush(valType)
- return tc.n, nil
- default:
- return nil, fmt.Errorf("invalid index operation into non-indexable type: %s", variable.Type)
- }
-}
-
-func (v *TypeCheck) ImplicitConversion(
- actual ast.Type, expected ast.Type, n ast.Node) ast.Node {
- if v.Implicit == nil {
- return nil
- }
-
- fromMap, ok := v.Implicit[actual]
- if !ok {
- return nil
- }
-
- toFunc, ok := fromMap[expected]
- if !ok {
- return nil
- }
-
- return &ast.Call{
- Func: toFunc,
- Args: []ast.Node{n},
- Posx: n.Pos(),
- }
-}
-
-func (v *TypeCheck) reset() {
- v.Stack = nil
- v.err = nil
-}
-
-func (v *TypeCheck) StackPush(t ast.Type) {
- v.Stack = append(v.Stack, t)
-}
-
-func (v *TypeCheck) StackPop() ast.Type {
- var x ast.Type
- x, v.Stack = v.Stack[len(v.Stack)-1], v.Stack[:len(v.Stack)-1]
- return x
-}
-
-func (v *TypeCheck) StackPeek() ast.Type {
- if len(v.Stack) == 0 {
- return ast.TypeInvalid
- }
-
- return v.Stack[len(v.Stack)-1]
-}
diff --git a/vendor/github.com/hashicorp/hil/convert.go b/vendor/github.com/hashicorp/hil/convert.go
deleted file mode 100644
index 184e029b0..000000000
--- a/vendor/github.com/hashicorp/hil/convert.go
+++ /dev/null
@@ -1,174 +0,0 @@
-package hil
-
-import (
- "fmt"
- "reflect"
-
- "github.com/hashicorp/hil/ast"
- "github.com/mitchellh/mapstructure"
-)
-
-// UnknownValue is a sentinel value that can be used to denote
-// that a value of a variable (or map element, list element, etc.)
-// is unknown. This will always have the type ast.TypeUnknown.
-const UnknownValue = "74D93920-ED26-11E3-AC10-0800200C9A66"
-
-var hilMapstructureDecodeHookSlice []interface{}
-var hilMapstructureDecodeHookStringSlice []string
-var hilMapstructureDecodeHookMap map[string]interface{}
-
-// hilMapstructureWeakDecode behaves in the same way as mapstructure.WeakDecode
-// but has a DecodeHook which defeats the backward compatibility mode of mapstructure
-// which WeakDecodes []interface{}{} into an empty map[string]interface{}. This
-// allows us to use WeakDecode (desirable), but not fail on empty lists.
-func hilMapstructureWeakDecode(m interface{}, rawVal interface{}) error {
- config := &mapstructure.DecoderConfig{
- DecodeHook: func(source reflect.Type, target reflect.Type, val interface{}) (interface{}, error) {
- sliceType := reflect.TypeOf(hilMapstructureDecodeHookSlice)
- stringSliceType := reflect.TypeOf(hilMapstructureDecodeHookStringSlice)
- mapType := reflect.TypeOf(hilMapstructureDecodeHookMap)
-
- if (source == sliceType || source == stringSliceType) && target == mapType {
- return nil, fmt.Errorf("Cannot convert %s into a %s", source, target)
- }
-
- return val, nil
- },
- WeaklyTypedInput: true,
- Result: rawVal,
- }
-
- decoder, err := mapstructure.NewDecoder(config)
- if err != nil {
- return err
- }
-
- return decoder.Decode(m)
-}
-
-func InterfaceToVariable(input interface{}) (ast.Variable, error) {
- if iv, ok := input.(ast.Variable); ok {
- return iv, nil
- }
-
- // This is just to maintain backward compatibility
- // after https://github.com/mitchellh/mapstructure/pull/98
- if v, ok := input.([]ast.Variable); ok {
- return ast.Variable{
- Type: ast.TypeList,
- Value: v,
- }, nil
- }
- if v, ok := input.(map[string]ast.Variable); ok {
- return ast.Variable{
- Type: ast.TypeMap,
- Value: v,
- }, nil
- }
-
- var stringVal string
- if err := hilMapstructureWeakDecode(input, &stringVal); err == nil {
- // Special case the unknown value to turn into "unknown"
- if stringVal == UnknownValue {
- return ast.Variable{Value: UnknownValue, Type: ast.TypeUnknown}, nil
- }
-
- // Otherwise return the string value
- return ast.Variable{
- Type: ast.TypeString,
- Value: stringVal,
- }, nil
- }
-
- var mapVal map[string]interface{}
- if err := hilMapstructureWeakDecode(input, &mapVal); err == nil {
- elements := make(map[string]ast.Variable)
- for i, element := range mapVal {
- varElement, err := InterfaceToVariable(element)
- if err != nil {
- return ast.Variable{}, err
- }
- elements[i] = varElement
- }
-
- return ast.Variable{
- Type: ast.TypeMap,
- Value: elements,
- }, nil
- }
-
- var sliceVal []interface{}
- if err := hilMapstructureWeakDecode(input, &sliceVal); err == nil {
- elements := make([]ast.Variable, len(sliceVal))
- for i, element := range sliceVal {
- varElement, err := InterfaceToVariable(element)
- if err != nil {
- return ast.Variable{}, err
- }
- elements[i] = varElement
- }
-
- return ast.Variable{
- Type: ast.TypeList,
- Value: elements,
- }, nil
- }
-
- return ast.Variable{}, fmt.Errorf("value for conversion must be a string, interface{} or map[string]interface: got %T", input)
-}
-
-func VariableToInterface(input ast.Variable) (interface{}, error) {
- if input.Type == ast.TypeString {
- if inputStr, ok := input.Value.(string); ok {
- return inputStr, nil
- } else {
- return nil, fmt.Errorf("ast.Variable with type string has value which is not a string")
- }
- }
-
- if input.Type == ast.TypeList {
- inputList, ok := input.Value.([]ast.Variable)
- if !ok {
- return nil, fmt.Errorf("ast.Variable with type list has value which is not a []ast.Variable")
- }
-
- result := make([]interface{}, 0)
- if len(inputList) == 0 {
- return result, nil
- }
-
- for _, element := range inputList {
- if convertedElement, err := VariableToInterface(element); err == nil {
- result = append(result, convertedElement)
- } else {
- return nil, err
- }
- }
-
- return result, nil
- }
-
- if input.Type == ast.TypeMap {
- inputMap, ok := input.Value.(map[string]ast.Variable)
- if !ok {
- return nil, fmt.Errorf("ast.Variable with type map has value which is not a map[string]ast.Variable")
- }
-
- result := make(map[string]interface{}, 0)
- if len(inputMap) == 0 {
- return result, nil
- }
-
- for key, value := range inputMap {
- if convertedValue, err := VariableToInterface(value); err == nil {
- result[key] = convertedValue
- } else {
- return nil, err
- }
- }
-
- return result, nil
- }
-
- return nil, fmt.Errorf("unknown input type: %s", input.Type)
-}
diff --git a/vendor/github.com/hashicorp/hil/eval.go b/vendor/github.com/hashicorp/hil/eval.go
deleted file mode 100644
index 27820769e..000000000
--- a/vendor/github.com/hashicorp/hil/eval.go
+++ /dev/null
@@ -1,472 +0,0 @@
-package hil
-
-import (
- "bytes"
- "errors"
- "fmt"
- "sync"
-
- "github.com/hashicorp/hil/ast"
-)
-
-// EvalConfig is the configuration for evaluating.
-type EvalConfig struct {
- // GlobalScope is the global scope of execution for evaluation.
- GlobalScope *ast.BasicScope
-
- // SemanticChecks is a list of additional semantic checks that will be run
- // on the tree prior to evaluating it. The type checker, identifier checker,
- // etc. will be run before these automatically.
- SemanticChecks []SemanticChecker
-}
-
-// SemanticChecker is the type that must be implemented to do a
-// semantic check on an AST tree. This will be called with the root node.
-type SemanticChecker func(ast.Node) error
-
-// EvaluationResult is a struct returned from the hil.Eval function,
-// representing the result of an interpolation. Results are returned in their
-// "natural" Go structure rather than in terms of the HIL AST. For the types
-// currently implemented, this means that the Value field can be interpreted as
-// the following Go types:
-// TypeInvalid: undefined
-// TypeString: string
-// TypeList: []interface{}
-// TypeMap: map[string]interface{}
-// TypBool: bool
-type EvaluationResult struct {
- Type EvalType
- Value interface{}
-}
-
-// InvalidResult is a structure representing the result of a HIL interpolation
-// which has invalid syntax, missing variables, or some other type of error.
-// The error is described out of band in the accompanying error return value.
-var InvalidResult = EvaluationResult{Type: TypeInvalid, Value: nil}
-
-// errExitUnknown is an internal error that when returned means the result
-// is an unknown value. We use this for early exit.
-var errExitUnknown = errors.New("unknown value")
-
-func Eval(root ast.Node, config *EvalConfig) (EvaluationResult, error) {
- output, outputType, err := internalEval(root, config)
- if err != nil {
- return InvalidResult, err
- }
-
- // If the result contains any nested unknowns then the result as a whole
- // is unknown, so that callers only have to deal with "entirely known"
- // or "entirely unknown" as outcomes.
- if ast.IsUnknown(ast.Variable{Type: outputType, Value: output}) {
- outputType = ast.TypeUnknown
- output = UnknownValue
- }
-
- switch outputType {
- case ast.TypeList:
- val, err := VariableToInterface(ast.Variable{
- Type: ast.TypeList,
- Value: output,
- })
- return EvaluationResult{
- Type: TypeList,
- Value: val,
- }, err
- case ast.TypeMap:
- val, err := VariableToInterface(ast.Variable{
- Type: ast.TypeMap,
- Value: output,
- })
- return EvaluationResult{
- Type: TypeMap,
- Value: val,
- }, err
- case ast.TypeString:
- return EvaluationResult{
- Type: TypeString,
- Value: output,
- }, nil
- case ast.TypeBool:
- return EvaluationResult{
- Type: TypeBool,
- Value: output,
- }, nil
- case ast.TypeUnknown:
- return EvaluationResult{
- Type: TypeUnknown,
- Value: UnknownValue,
- }, nil
- default:
- return InvalidResult, fmt.Errorf("unknown type %s as interpolation output", outputType)
- }
-}
-
-// Eval evaluates the given AST tree and returns its output value, the type
-// of the output, and any error that occurred.
-func internalEval(root ast.Node, config *EvalConfig) (interface{}, ast.Type, error) {
- // Copy the scope so we can add our builtins
- if config == nil {
- config = new(EvalConfig)
- }
- scope := registerBuiltins(config.GlobalScope)
- implicitMap := map[ast.Type]map[ast.Type]string{
- ast.TypeFloat: {
- ast.TypeInt: "__builtin_FloatToInt",
- ast.TypeString: "__builtin_FloatToString",
- },
- ast.TypeInt: {
- ast.TypeFloat: "__builtin_IntToFloat",
- ast.TypeString: "__builtin_IntToString",
- },
- ast.TypeString: {
- ast.TypeInt: "__builtin_StringToInt",
- ast.TypeFloat: "__builtin_StringToFloat",
- ast.TypeBool: "__builtin_StringToBool",
- },
- ast.TypeBool: {
- ast.TypeString: "__builtin_BoolToString",
- },
- }
-
- // Build our own semantic checks that we always run
- tv := &TypeCheck{Scope: scope, Implicit: implicitMap}
- ic := &IdentifierCheck{Scope: scope}
-
- // Build up the semantic checks for execution
- checks := make(
- []SemanticChecker,
- len(config.SemanticChecks),
- len(config.SemanticChecks)+2)
- copy(checks, config.SemanticChecks)
- checks = append(checks, ic.Visit)
- checks = append(checks, tv.Visit)
-
- // Run the semantic checks
- for _, check := range checks {
- if err := check(root); err != nil {
- return nil, ast.TypeInvalid, err
- }
- }
-
- // Execute
- v := &evalVisitor{Scope: scope}
- return v.Visit(root)
-}
-
-// EvalNode is the interface that must be implemented by any ast.Node
-// to support evaluation. This will be called in visitor pattern order.
-// The result of each call to Eval is automatically pushed onto the
-// stack as a LiteralNode. Pop elements off the stack to get child
-// values.
-type EvalNode interface {
- Eval(ast.Scope, *ast.Stack) (interface{}, ast.Type, error)
-}
-
-type evalVisitor struct {
- Scope ast.Scope
- Stack ast.Stack
-
- err error
- lock sync.Mutex
-}
-
-func (v *evalVisitor) Visit(root ast.Node) (interface{}, ast.Type, error) {
- // Run the actual visitor pattern
- root.Accept(v.visit)
-
- // Get our result and clear out everything else
- var result *ast.LiteralNode
- if v.Stack.Len() > 0 {
- result = v.Stack.Pop().(*ast.LiteralNode)
- } else {
- result = new(ast.LiteralNode)
- }
- resultErr := v.err
- if resultErr == errExitUnknown {
- // This means the return value is unknown and we used the error
- // as an early exit mechanism. Reset since the value on the stack
- // should be the unknown value.
- resultErr = nil
- }
-
- // Clear everything else so we aren't just dangling
- v.Stack.Reset()
- v.err = nil
-
- t, err := result.Type(v.Scope)
- if err != nil {
- return nil, ast.TypeInvalid, err
- }
-
- return result.Value, t, resultErr
-}
-
-func (v *evalVisitor) visit(raw ast.Node) ast.Node {
- if v.err != nil {
- return raw
- }
-
- en, err := evalNode(raw)
- if err != nil {
- v.err = err
- return raw
- }
-
- out, outType, err := en.Eval(v.Scope, &v.Stack)
- if err != nil {
- v.err = err
- return raw
- }
-
- v.Stack.Push(&ast.LiteralNode{
- Value: out,
- Typex: outType,
- })
-
- if outType == ast.TypeUnknown {
- // Halt immediately
- v.err = errExitUnknown
- return raw
- }
-
- return raw
-}
-
-// evalNode is a private function that returns an EvalNode for built-in
-// types as well as any other EvalNode implementations.
-func evalNode(raw ast.Node) (EvalNode, error) {
- switch n := raw.(type) {
- case *ast.Index:
- return &evalIndex{n}, nil
- case *ast.Call:
- return &evalCall{n}, nil
- case *ast.Conditional:
- return &evalConditional{n}, nil
- case *ast.Output:
- return &evalOutput{n}, nil
- case *ast.LiteralNode:
- return &evalLiteralNode{n}, nil
- case *ast.VariableAccess:
- return &evalVariableAccess{n}, nil
- default:
- en, ok := n.(EvalNode)
- if !ok {
- return nil, fmt.Errorf("node doesn't support evaluation: %#v", raw)
- }
-
- return en, nil
- }
-}
-
-type evalCall struct{ *ast.Call }
-
-func (v *evalCall) Eval(s ast.Scope, stack *ast.Stack) (interface{}, ast.Type, error) {
- // Look up the function in the map
- function, ok := s.LookupFunc(v.Func)
- if !ok {
- return nil, ast.TypeInvalid, fmt.Errorf(
- "unknown function called: %s", v.Func)
- }
-
- // The arguments are on the stack in reverse order, so pop them off.
- args := make([]interface{}, len(v.Args))
- for i, _ := range v.Args {
- node := stack.Pop().(*ast.LiteralNode)
- if node.IsUnknown() {
- // If any arguments are unknown then the result is automatically unknown
- return UnknownValue, ast.TypeUnknown, nil
- }
- args[len(v.Args)-1-i] = node.Value
- }
-
- // Call the function
- result, err := function.Callback(args)
- if err != nil {
- return nil, ast.TypeInvalid, fmt.Errorf("%s: %s", v.Func, err)
- }
-
- return result, function.ReturnType, nil
-}
-
-type evalConditional struct{ *ast.Conditional }
-
-func (v *evalConditional) Eval(s ast.Scope, stack *ast.Stack) (interface{}, ast.Type, error) {
- // On the stack we have literal nodes representing the resulting values
- // of the condition, true and false expressions, but they are in reverse
- // order.
- falseLit := stack.Pop().(*ast.LiteralNode)
- trueLit := stack.Pop().(*ast.LiteralNode)
- condLit := stack.Pop().(*ast.LiteralNode)
-
- if condLit.IsUnknown() {
- // If our conditional is unknown then our result is also unknown
- return UnknownValue, ast.TypeUnknown, nil
- }
-
- if condLit.Value.(bool) {
- return trueLit.Value, trueLit.Typex, nil
- } else {
- return falseLit.Value, trueLit.Typex, nil
- }
-}
-
-type evalIndex struct{ *ast.Index }
-
-func (v *evalIndex) Eval(scope ast.Scope, stack *ast.Stack) (interface{}, ast.Type, error) {
- key := stack.Pop().(*ast.LiteralNode)
- target := stack.Pop().(*ast.LiteralNode)
-
- variableName := v.Index.Target.(*ast.VariableAccess).Name
-
- if key.IsUnknown() {
- // If our key is unknown then our result is also unknown
- return UnknownValue, ast.TypeUnknown, nil
- }
-
- // For target, we'll accept collections containing unknown values but
- // we still need to catch when the collection itself is unknown, shallowly.
- if target.Typex == ast.TypeUnknown {
- return UnknownValue, ast.TypeUnknown, nil
- }
-
- switch target.Typex {
- case ast.TypeList:
- return v.evalListIndex(variableName, target.Value, key.Value)
- case ast.TypeMap:
- return v.evalMapIndex(variableName, target.Value, key.Value)
- default:
- return nil, ast.TypeInvalid, fmt.Errorf(
- "target %q for indexing must be ast.TypeList or ast.TypeMap, is %s",
- variableName, target.Typex)
- }
-}
-
-func (v *evalIndex) evalListIndex(variableName string, target interface{}, key interface{}) (interface{}, ast.Type, error) {
- // We assume type checking was already done and we can assume that target
- // is a list and key is an int
- list, ok := target.([]ast.Variable)
- if !ok {
- return nil, ast.TypeInvalid, fmt.Errorf(
- "cannot cast target to []Variable, is: %T", target)
- }
-
- keyInt, ok := key.(int)
- if !ok {
- return nil, ast.TypeInvalid, fmt.Errorf(
- "cannot cast key to int, is: %T", key)
- }
-
- if len(list) == 0 {
- return nil, ast.TypeInvalid, fmt.Errorf("list is empty")
- }
-
- if keyInt < 0 || len(list) < keyInt+1 {
- return nil, ast.TypeInvalid, fmt.Errorf(
- "index %d out of range for list %s (max %d)",
- keyInt, variableName, len(list))
- }
-
- returnVal := list[keyInt].Value
- returnType := list[keyInt].Type
- return returnVal, returnType, nil
-}
-
-func (v *evalIndex) evalMapIndex(variableName string, target interface{}, key interface{}) (interface{}, ast.Type, error) {
- // We assume type checking was already done and we can assume that target
- // is a map and key is a string
- vmap, ok := target.(map[string]ast.Variable)
- if !ok {
- return nil, ast.TypeInvalid, fmt.Errorf(
- "cannot cast target to map[string]Variable, is: %T", target)
- }
-
- keyString, ok := key.(string)
- if !ok {
- return nil, ast.TypeInvalid, fmt.Errorf(
- "cannot cast key to string, is: %T", key)
- }
-
- if len(vmap) == 0 {
- return nil, ast.TypeInvalid, fmt.Errorf("map is empty")
- }
-
- value, ok := vmap[keyString]
- if !ok {
- return nil, ast.TypeInvalid, fmt.Errorf(
- "key %q does not exist in map %s", keyString, variableName)
- }
-
- return value.Value, value.Type, nil
-}
-
-type evalOutput struct{ *ast.Output }
-
-func (v *evalOutput) Eval(s ast.Scope, stack *ast.Stack) (interface{}, ast.Type, error) {
- // The expressions should all be on the stack in reverse
- // order. So pop them off, reverse their order, and concatenate.
- nodes := make([]*ast.LiteralNode, 0, len(v.Exprs))
- haveUnknown := false
- for range v.Exprs {
- n := stack.Pop().(*ast.LiteralNode)
- nodes = append(nodes, n)
-
- // If we have any unknowns then the whole result is unknown
- // (we must deal with this first, because the type checker can
- // skip type conversions in the presence of unknowns, and thus
- // any of our other nodes may be incorrectly typed.)
- if n.IsUnknown() {
- haveUnknown = true
- }
- }
-
- if haveUnknown {
- return UnknownValue, ast.TypeUnknown, nil
- }
-
- // Special case the single list and map
- if len(nodes) == 1 {
- switch t := nodes[0].Typex; t {
- case ast.TypeList:
- fallthrough
- case ast.TypeMap:
- fallthrough
- case ast.TypeUnknown:
- return nodes[0].Value, t, nil
- }
- }
-
- // Otherwise concatenate the strings
- var buf bytes.Buffer
- for i := len(nodes) - 1; i >= 0; i-- {
- if nodes[i].Typex != ast.TypeString {
- return nil, ast.TypeInvalid, fmt.Errorf(
- "invalid output with %s value at index %d: %#v",
- nodes[i].Typex,
- i,
- nodes[i].Value,
- )
- }
- buf.WriteString(nodes[i].Value.(string))
- }
-
- return buf.String(), ast.TypeString, nil
-}
-
-type evalLiteralNode struct{ *ast.LiteralNode }
-
-func (v *evalLiteralNode) Eval(ast.Scope, *ast.Stack) (interface{}, ast.Type, error) {
- return v.Value, v.Typex, nil
-}
-
-type evalVariableAccess struct{ *ast.VariableAccess }
-
-func (v *evalVariableAccess) Eval(scope ast.Scope, _ *ast.Stack) (interface{}, ast.Type, error) {
- // Look up the variable in the map
- variable, ok := scope.LookupVar(v.Name)
- if !ok {
- return nil, ast.TypeInvalid, fmt.Errorf(
- "unknown variable accessed: %s", v.Name)
- }
-
- return variable.Value, variable.Type, nil
-}
diff --git a/vendor/github.com/hashicorp/hil/eval_type.go b/vendor/github.com/hashicorp/hil/eval_type.go
deleted file mode 100644
index 6946ecd23..000000000
--- a/vendor/github.com/hashicorp/hil/eval_type.go
+++ /dev/null
@@ -1,16 +0,0 @@
-package hil
-
-//go:generate stringer -type=EvalType eval_type.go
-
-// EvalType represents the type of the output returned from a HIL
-// evaluation.
-type EvalType uint32
-
-const (
- TypeInvalid EvalType = 0
- TypeString EvalType = 1 << iota
- TypeBool
- TypeList
- TypeMap
- TypeUnknown
-)
diff --git a/vendor/github.com/hashicorp/hil/evaltype_string.go b/vendor/github.com/hashicorp/hil/evaltype_string.go
deleted file mode 100644
index b107ddd45..000000000
--- a/vendor/github.com/hashicorp/hil/evaltype_string.go
+++ /dev/null
@@ -1,42 +0,0 @@
-// Code generated by "stringer -type=EvalType eval_type.go"; DO NOT EDIT
-
-package hil
-
-import "fmt"
-
-const (
- _EvalType_name_0 = "TypeInvalid"
- _EvalType_name_1 = "TypeString"
- _EvalType_name_2 = "TypeBool"
- _EvalType_name_3 = "TypeList"
- _EvalType_name_4 = "TypeMap"
- _EvalType_name_5 = "TypeUnknown"
-)
-
-var (
- _EvalType_index_0 = [...]uint8{0, 11}
- _EvalType_index_1 = [...]uint8{0, 10}
- _EvalType_index_2 = [...]uint8{0, 8}
- _EvalType_index_3 = [...]uint8{0, 8}
- _EvalType_index_4 = [...]uint8{0, 7}
- _EvalType_index_5 = [...]uint8{0, 11}
-)
-
-func (i EvalType) String() string {
- switch {
- case i == 0:
- return _EvalType_name_0
- case i == 2:
- return _EvalType_name_1
- case i == 4:
- return _EvalType_name_2
- case i == 8:
- return _EvalType_name_3
- case i == 16:
- return _EvalType_name_4
- case i == 32:
- return _EvalType_name_5
- default:
- return fmt.Sprintf("EvalType(%d)", i)
- }
-}
diff --git a/vendor/github.com/hashicorp/hil/go.mod b/vendor/github.com/hashicorp/hil/go.mod
deleted file mode 100644
index 45719a69b..000000000
--- a/vendor/github.com/hashicorp/hil/go.mod
+++ /dev/null
@@ -1,6 +0,0 @@
-module github.com/hashicorp/hil
-
-require (
- github.com/mitchellh/mapstructure v1.1.2
- github.com/mitchellh/reflectwalk v1.0.0
-)
diff --git a/vendor/github.com/hashicorp/hil/go.sum b/vendor/github.com/hashicorp/hil/go.sum
deleted file mode 100644
index 83639b691..000000000
--- a/vendor/github.com/hashicorp/hil/go.sum
+++ /dev/null
@@ -1,4 +0,0 @@
-github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE=
-github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
-github.com/mitchellh/reflectwalk v1.0.0 h1:9D+8oIskB4VJBN5SFlmc27fSlIBZaov1Wpk/IfikLNY=
-github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
diff --git a/vendor/github.com/hashicorp/hil/parse.go b/vendor/github.com/hashicorp/hil/parse.go
deleted file mode 100644
index ecbe1fdbf..000000000
--- a/vendor/github.com/hashicorp/hil/parse.go
+++ /dev/null
@@ -1,29 +0,0 @@
-package hil
-
-import (
- "github.com/hashicorp/hil/ast"
- "github.com/hashicorp/hil/parser"
- "github.com/hashicorp/hil/scanner"
-)
-
-// Parse parses the given program and returns an executable AST tree.
-//
-// Syntax errors are returned with error having the dynamic type
-// *parser.ParseError, which gives the caller access to the source position
-// where the error was found, which allows (for example) combining it with
-// a known source filename to add context to the error message.
-func Parse(v string) (ast.Node, error) {
- return ParseWithPosition(v, ast.Pos{Line: 1, Column: 1})
-}
-
-// ParseWithPosition is like Parse except that it overrides the source
-// row and column position of the first character in the string, which should
-// be 1-based.
-//
-// This can be used when HIL is embedded in another language and the outer
-// parser knows the row and column where the HIL expression started within
-// the overall source file.
-func ParseWithPosition(v string, pos ast.Pos) (ast.Node, error) {
- ch := scanner.Scan(v, pos)
- return parser.Parse(ch)
-}
diff --git a/vendor/github.com/hashicorp/hil/parser/binary_op.go b/vendor/github.com/hashicorp/hil/parser/binary_op.go
deleted file mode 100644
index 2e013e01d..000000000
--- a/vendor/github.com/hashicorp/hil/parser/binary_op.go
+++ /dev/null
@@ -1,45 +0,0 @@
-package parser
-
-import (
- "github.com/hashicorp/hil/ast"
- "github.com/hashicorp/hil/scanner"
-)
-
-var binaryOps []map[scanner.TokenType]ast.ArithmeticOp
-
-func init() {
- // This operation table maps from the operator's scanner token type
- // to the AST arithmetic operation. All expressions produced from
- // binary operators are *ast.Arithmetic nodes.
- //
- // Binary operator groups are listed in order of precedence, with
- // the *lowest* precedence first. Operators within the same group
- // have left-to-right associativity.
- binaryOps = []map[scanner.TokenType]ast.ArithmeticOp{
- {
- scanner.OR: ast.ArithmeticOpLogicalOr,
- },
- {
- scanner.AND: ast.ArithmeticOpLogicalAnd,
- },
- {
- scanner.EQUAL: ast.ArithmeticOpEqual,
- scanner.NOTEQUAL: ast.ArithmeticOpNotEqual,
- },
- {
- scanner.GT: ast.ArithmeticOpGreaterThan,
- scanner.GTE: ast.ArithmeticOpGreaterThanOrEqual,
- scanner.LT: ast.ArithmeticOpLessThan,
- scanner.LTE: ast.ArithmeticOpLessThanOrEqual,
- },
- {
- scanner.PLUS: ast.ArithmeticOpAdd,
- scanner.MINUS: ast.ArithmeticOpSub,
- },
- {
- scanner.STAR: ast.ArithmeticOpMul,
- scanner.SLASH: ast.ArithmeticOpDiv,
- scanner.PERCENT: ast.ArithmeticOpMod,
- },
- }
-}
diff --git a/vendor/github.com/hashicorp/hil/parser/error.go b/vendor/github.com/hashicorp/hil/parser/error.go
deleted file mode 100644
index bacd69645..000000000
--- a/vendor/github.com/hashicorp/hil/parser/error.go
+++ /dev/null
@@ -1,38 +0,0 @@
-package parser
-
-import (
- "fmt"
-
- "github.com/hashicorp/hil/ast"
- "github.com/hashicorp/hil/scanner"
-)
-
-type ParseError struct {
- Message string
- Pos ast.Pos
-}
-
-func Errorf(pos ast.Pos, format string, args ...interface{}) error {
- return &ParseError{
- Message: fmt.Sprintf(format, args...),
- Pos: pos,
- }
-}
-
-// TokenErrorf is a convenient wrapper around Errorf that uses the
-// position of the given token.
-func TokenErrorf(token *scanner.Token, format string, args ...interface{}) error {
- return Errorf(token.Pos, format, args...)
-}
-
-func ExpectationError(wanted string, got *scanner.Token) error {
- return TokenErrorf(got, "expected %s but found %s", wanted, got)
-}
-
-func (e *ParseError) Error() string {
- return fmt.Sprintf("parse error at %s: %s", e.Pos, e.Message)
-}
-
-func (e *ParseError) String() string {
- return e.Error()
-}
diff --git a/vendor/github.com/hashicorp/hil/parser/fuzz.go b/vendor/github.com/hashicorp/hil/parser/fuzz.go
deleted file mode 100644
index de954f383..000000000
--- a/vendor/github.com/hashicorp/hil/parser/fuzz.go
+++ /dev/null
@@ -1,28 +0,0 @@
-// +build gofuzz
-
-package parser
-
-import (
- "github.com/hashicorp/hil/ast"
- "github.com/hashicorp/hil/scanner"
-)
-
-// This is a fuzz testing function designed to be used with go-fuzz:
-// https://github.com/dvyukov/go-fuzz
-//
-// It's not included in a normal build due to the gofuzz build tag above.
-//
-// There are some input files that you can use as a seed corpus for go-fuzz
-// in the directory ./fuzz-corpus .
-
-func Fuzz(data []byte) int {
- str := string(data)
-
- ch := scanner.Scan(str, ast.Pos{Line: 1, Column: 1})
- _, err := Parse(ch)
- if err != nil {
- return 0
- }
-
- return 1
-}
diff --git a/vendor/github.com/hashicorp/hil/parser/parser.go b/vendor/github.com/hashicorp/hil/parser/parser.go
deleted file mode 100644
index 376f1c49d..000000000
--- a/vendor/github.com/hashicorp/hil/parser/parser.go
+++ /dev/null
@@ -1,522 +0,0 @@
-package parser
-
-import (
- "strconv"
- "unicode/utf8"
-
- "github.com/hashicorp/hil/ast"
- "github.com/hashicorp/hil/scanner"
-)
-
-func Parse(ch <-chan *scanner.Token) (ast.Node, error) {
- peeker := scanner.NewPeeker(ch)
- parser := &parser{peeker}
- output, err := parser.ParseTopLevel()
- peeker.Close()
- return output, err
-}
-
-type parser struct {
- peeker *scanner.Peeker
-}
-
-func (p *parser) ParseTopLevel() (ast.Node, error) {
- return p.parseInterpolationSeq(false)
-}
-
-func (p *parser) ParseQuoted() (ast.Node, error) {
- return p.parseInterpolationSeq(true)
-}
-
-// parseInterpolationSeq parses either the top-level sequence of literals
-// and interpolation expressions or a similar sequence within a quoted
-// string inside an interpolation expression. The latter case is requested
-// by setting 'quoted' to true.
-func (p *parser) parseInterpolationSeq(quoted bool) (ast.Node, error) {
- literalType := scanner.LITERAL
- endType := scanner.EOF
- if quoted {
- // exceptions for quoted sequences
- literalType = scanner.STRING
- endType = scanner.CQUOTE
- }
-
- startPos := p.peeker.Peek().Pos
-
- if quoted {
- tok := p.peeker.Read()
- if tok.Type != scanner.OQUOTE {
- return nil, ExpectationError("open quote", tok)
- }
- }
-
- var exprs []ast.Node
- for {
- tok := p.peeker.Read()
-
- if tok.Type == endType {
- break
- }
-
- switch tok.Type {
- case literalType:
- val, err := p.parseStringToken(tok)
- if err != nil {
- return nil, err
- }
- exprs = append(exprs, &ast.LiteralNode{
- Value: val,
- Typex: ast.TypeString,
- Posx: tok.Pos,
- })
- case scanner.BEGIN:
- expr, err := p.ParseInterpolation()
- if err != nil {
- return nil, err
- }
- exprs = append(exprs, expr)
- default:
- return nil, ExpectationError(`"${"`, tok)
- }
- }
-
- if len(exprs) == 0 {
- // If we have no parts at all then the input must've
- // been an empty string.
- exprs = append(exprs, &ast.LiteralNode{
- Value: "",
- Typex: ast.TypeString,
- Posx: startPos,
- })
- }
-
- // As a special case, if our "Output" contains only one expression
- // and it's a literal string then we'll hoist it up to be our
- // direct return value, so callers can easily recognize a string
- // that has no interpolations at all.
- if len(exprs) == 1 {
- if lit, ok := exprs[0].(*ast.LiteralNode); ok {
- if lit.Typex == ast.TypeString {
- return lit, nil
- }
- }
- }
-
- return &ast.Output{
- Exprs: exprs,
- Posx: startPos,
- }, nil
-}
-
-// parseStringToken takes a token of either LITERAL or STRING type and
-// returns the interpreted string, after processing any relevant
-// escape sequences.
-func (p *parser) parseStringToken(tok *scanner.Token) (string, error) {
- var backslashes bool
- switch tok.Type {
- case scanner.LITERAL:
- backslashes = false
- case scanner.STRING:
- backslashes = true
- default:
- panic("unsupported string token type")
- }
-
- raw := []byte(tok.Content)
- buf := make([]byte, 0, len(raw))
-
- for i := 0; i < len(raw); i++ {
- b := raw[i]
- more := len(raw) > (i + 1)
-
- if b == '$' {
- if more && raw[i+1] == '$' {
- // skip over the second dollar sign
- i++
- }
- } else if backslashes && b == '\\' {
- if !more {
- return "", Errorf(
- ast.Pos{
- Column: tok.Pos.Column + utf8.RuneCount(raw[:i]),
- Line: tok.Pos.Line,
- },
- `unfinished backslash escape sequence`,
- )
- }
- escapeType := raw[i+1]
- switch escapeType {
- case '\\':
- // skip over the second slash
- i++
- case 'n':
- b = '\n'
- i++
- case '"':
- b = '"'
- i++
- default:
- return "", Errorf(
- ast.Pos{
- Column: tok.Pos.Column + utf8.RuneCount(raw[:i]),
- Line: tok.Pos.Line,
- },
- `invalid backslash escape sequence`,
- )
- }
- }
-
- buf = append(buf, b)
- }
-
- return string(buf), nil
-}
-
-func (p *parser) ParseInterpolation() (ast.Node, error) {
- // By the time we're called, we're already "inside" the ${ sequence
- // because the caller consumed the ${ token.
-
- expr, err := p.ParseExpression()
- if err != nil {
- return nil, err
- }
-
- err = p.requireTokenType(scanner.END, `"}"`)
- if err != nil {
- return nil, err
- }
-
- return expr, nil
-}
-
-func (p *parser) ParseExpression() (ast.Node, error) {
- return p.parseTernaryCond()
-}
-
-func (p *parser) parseTernaryCond() (ast.Node, error) {
- // The ternary condition operator (.. ? .. : ..) behaves somewhat
- // like a binary operator except that the "operator" is itself
- // an expression enclosed in two punctuation characters.
- // The middle expression is parsed as if the ? and : symbols
- // were parentheses. The "rhs" (the "false expression") is then
- // treated right-associatively so it behaves similarly to the
- // middle in terms of precedence.
-
- startPos := p.peeker.Peek().Pos
-
- var cond, trueExpr, falseExpr ast.Node
- var err error
-
- cond, err = p.parseBinaryOps(binaryOps)
- if err != nil {
- return nil, err
- }
-
- next := p.peeker.Peek()
- if next.Type != scanner.QUESTION {
- return cond, nil
- }
-
- p.peeker.Read() // eat question mark
-
- trueExpr, err = p.ParseExpression()
- if err != nil {
- return nil, err
- }
-
- colon := p.peeker.Read()
- if colon.Type != scanner.COLON {
- return nil, ExpectationError(":", colon)
- }
-
- falseExpr, err = p.ParseExpression()
- if err != nil {
- return nil, err
- }
-
- return &ast.Conditional{
- CondExpr: cond,
- TrueExpr: trueExpr,
- FalseExpr: falseExpr,
- Posx: startPos,
- }, nil
-}
-
-// parseBinaryOps calls itself recursively to work through all of the
-// operator precedence groups, and then eventually calls ParseExpressionTerm
-// for each operand.
-func (p *parser) parseBinaryOps(ops []map[scanner.TokenType]ast.ArithmeticOp) (ast.Node, error) {
- if len(ops) == 0 {
- // We've run out of operators, so now we'll just try to parse a term.
- return p.ParseExpressionTerm()
- }
-
- thisLevel := ops[0]
- remaining := ops[1:]
-
- startPos := p.peeker.Peek().Pos
-
- var lhs, rhs ast.Node
- operator := ast.ArithmeticOpInvalid
- var err error
-
- // parse a term that might be the first operand of a binary
- // expression or it might just be a standalone term, but
- // we won't know until we've parsed it and can look ahead
- // to see if there's an operator token.
- lhs, err = p.parseBinaryOps(remaining)
- if err != nil {
- return nil, err
- }
-
- // We'll keep eating up arithmetic operators until we run
- // out, so that operators with the same precedence will combine in a
- // left-associative manner:
- // a+b+c => (a+b)+c, not a+(b+c)
- //
- // Should we later want to have right-associative operators, a way
- // to achieve that would be to call back up to ParseExpression here
- // instead of iteratively parsing only the remaining operators.
- for {
- next := p.peeker.Peek()
- var newOperator ast.ArithmeticOp
- var ok bool
- if newOperator, ok = thisLevel[next.Type]; !ok {
- break
- }
-
- // Are we extending an expression started on
- // the previous iteration?
- if operator != ast.ArithmeticOpInvalid {
- lhs = &ast.Arithmetic{
- Op: operator,
- Exprs: []ast.Node{lhs, rhs},
- Posx: startPos,
- }
- }
-
- operator = newOperator
- p.peeker.Read() // eat operator token
- rhs, err = p.parseBinaryOps(remaining)
- if err != nil {
- return nil, err
- }
- }
-
- if operator != ast.ArithmeticOpInvalid {
- return &ast.Arithmetic{
- Op: operator,
- Exprs: []ast.Node{lhs, rhs},
- Posx: startPos,
- }, nil
- } else {
- return lhs, nil
- }
-}
-
-func (p *parser) ParseExpressionTerm() (ast.Node, error) {
-
- next := p.peeker.Peek()
-
- switch next.Type {
-
- case scanner.OPAREN:
- p.peeker.Read()
- expr, err := p.ParseExpression()
- if err != nil {
- return nil, err
- }
- err = p.requireTokenType(scanner.CPAREN, `")"`)
- return expr, err
-
- case scanner.OQUOTE:
- return p.ParseQuoted()
-
- case scanner.INTEGER:
- tok := p.peeker.Read()
- val, err := strconv.Atoi(tok.Content)
- if err != nil {
- return nil, TokenErrorf(tok, "invalid integer: %s", err)
- }
- return &ast.LiteralNode{
- Value: val,
- Typex: ast.TypeInt,
- Posx: tok.Pos,
- }, nil
-
- case scanner.FLOAT:
- tok := p.peeker.Read()
- val, err := strconv.ParseFloat(tok.Content, 64)
- if err != nil {
- return nil, TokenErrorf(tok, "invalid float: %s", err)
- }
- return &ast.LiteralNode{
- Value: val,
- Typex: ast.TypeFloat,
- Posx: tok.Pos,
- }, nil
-
- case scanner.BOOL:
- tok := p.peeker.Read()
- // the scanner guarantees that tok.Content is either "true" or "false"
- var val bool
- if tok.Content[0] == 't' {
- val = true
- } else {
- val = false
- }
- return &ast.LiteralNode{
- Value: val,
- Typex: ast.TypeBool,
- Posx: tok.Pos,
- }, nil
-
- case scanner.MINUS:
- opTok := p.peeker.Read()
- // important to use ParseExpressionTerm rather than ParseExpression
- // here, otherwise we can capture a following binary expression into
- // our negation.
- // e.g. -46+5 should parse as (0-46)+5, not 0-(46+5)
- operand, err := p.ParseExpressionTerm()
- if err != nil {
- return nil, err
- }
- // The AST currently represents negative numbers as
- // a binary subtraction of the number from zero.
- return &ast.Arithmetic{
- Op: ast.ArithmeticOpSub,
- Exprs: []ast.Node{
- &ast.LiteralNode{
- Value: 0,
- Typex: ast.TypeInt,
- Posx: opTok.Pos,
- },
- operand,
- },
- Posx: opTok.Pos,
- }, nil
-
- case scanner.BANG:
- opTok := p.peeker.Read()
- // important to use ParseExpressionTerm rather than ParseExpression
- // here, otherwise we can capture a following binary expression into
- // our negation.
- operand, err := p.ParseExpressionTerm()
- if err != nil {
- return nil, err
- }
- // The AST currently represents binary negation as an equality
- // test with "false".
- return &ast.Arithmetic{
- Op: ast.ArithmeticOpEqual,
- Exprs: []ast.Node{
- &ast.LiteralNode{
- Value: false,
- Typex: ast.TypeBool,
- Posx: opTok.Pos,
- },
- operand,
- },
- Posx: opTok.Pos,
- }, nil
-
- case scanner.IDENTIFIER:
- return p.ParseScopeInteraction()
-
- default:
- return nil, ExpectationError("expression", next)
- }
-}
-
-// ParseScopeInteraction parses the expression types that interact
-// with the evaluation scope: variable access, function calls, and
-// indexing.
-//
-// Indexing should actually be a distinct operator in its own right,
-// so that e.g. it can be applied to the result of a function call,
-// but for now we're preserving the behavior of the older yacc-based
-// parser.
-func (p *parser) ParseScopeInteraction() (ast.Node, error) {
- first := p.peeker.Read()
- startPos := first.Pos
- if first.Type != scanner.IDENTIFIER {
- return nil, ExpectationError("identifier", first)
- }
-
- next := p.peeker.Peek()
- if next.Type == scanner.OPAREN {
- // function call
- funcName := first.Content
- p.peeker.Read() // eat paren
- var args []ast.Node
-
- for {
- if p.peeker.Peek().Type == scanner.CPAREN {
- break
- }
-
- arg, err := p.ParseExpression()
- if err != nil {
- return nil, err
- }
-
- args = append(args, arg)
-
- if p.peeker.Peek().Type == scanner.COMMA {
- p.peeker.Read() // eat comma
- continue
- } else {
- break
- }
- }
-
- err := p.requireTokenType(scanner.CPAREN, `")"`)
- if err != nil {
- return nil, err
- }
-
- return &ast.Call{
- Func: funcName,
- Args: args,
- Posx: startPos,
- }, nil
- }
-
- varNode := &ast.VariableAccess{
- Name: first.Content,
- Posx: startPos,
- }
-
- if p.peeker.Peek().Type == scanner.OBRACKET {
- // index operator
- startPos := p.peeker.Read().Pos // eat bracket
- indexExpr, err := p.ParseExpression()
- if err != nil {
- return nil, err
- }
- err = p.requireTokenType(scanner.CBRACKET, `"]"`)
- if err != nil {
- return nil, err
- }
- return &ast.Index{
- Target: varNode,
- Key: indexExpr,
- Posx: startPos,
- }, nil
- }
-
- return varNode, nil
-}
-
-// requireTokenType consumes the next token an returns an error if its
-// type does not match the given type. nil is returned if the type matches.
-//
-// This is a helper around peeker.Read() for situations where the parser just
-// wants to assert that a particular token type must be present.
-func (p *parser) requireTokenType(wantType scanner.TokenType, wantName string) error {
- token := p.peeker.Read()
- if token.Type != wantType {
- return ExpectationError(wantName, token)
- }
- return nil
-}
diff --git a/vendor/github.com/hashicorp/hil/scanner/peeker.go b/vendor/github.com/hashicorp/hil/scanner/peeker.go
deleted file mode 100644
index 4de372831..000000000
--- a/vendor/github.com/hashicorp/hil/scanner/peeker.go
+++ /dev/null
@@ -1,55 +0,0 @@
-package scanner
-
-// Peeker is a utility that wraps a token channel returned by Scan and
-// provides an interface that allows a caller (e.g. the parser) to
-// work with the token stream in a mode that allows one token of lookahead,
-// and provides utilities for more convenient processing of the stream.
-type Peeker struct {
- ch <-chan *Token
- peeked *Token
-}
-
-func NewPeeker(ch <-chan *Token) *Peeker {
- return &Peeker{
- ch: ch,
- }
-}
-
-// Peek returns the next token in the stream without consuming it. A
-// subsequent call to Read will return the same token.
-func (p *Peeker) Peek() *Token {
- if p.peeked == nil {
- p.peeked = <-p.ch
- }
- return p.peeked
-}
-
-// Read consumes the next token in the stream and returns it.
-func (p *Peeker) Read() *Token {
- token := p.Peek()
-
- // As a special case, we will produce the EOF token forever once
- // it is reached.
- if token.Type != EOF {
- p.peeked = nil
- }
-
- return token
-}
-
-// Close ensures that the token stream has been exhausted, to prevent
-// the goroutine in the underlying scanner from leaking.
-//
-// It's not necessary to call this if the caller reads the token stream
-// to EOF, since that implicitly closes the scanner.
-func (p *Peeker) Close() {
- for _ = range p.ch {
- // discard
- }
- // Install a synthetic EOF token in 'peeked' in case someone
- // erroneously calls Peek() or Read() after we've closed.
- p.peeked = &Token{
- Type: EOF,
- Content: "",
- }
-}
diff --git a/vendor/github.com/hashicorp/hil/scanner/scanner.go b/vendor/github.com/hashicorp/hil/scanner/scanner.go
deleted file mode 100644
index 86085de01..000000000
--- a/vendor/github.com/hashicorp/hil/scanner/scanner.go
+++ /dev/null
@@ -1,556 +0,0 @@
-package scanner
-
-import (
- "unicode"
- "unicode/utf8"
-
- "github.com/hashicorp/hil/ast"
-)
-
-// Scan returns a channel that recieves Tokens from the given input string.
-//
-// The scanner's job is just to partition the string into meaningful parts.
-// It doesn't do any transformation of the raw input string, so the caller
-// must deal with any further interpretation required, such as parsing INTEGER
-// tokens into real ints, or dealing with escape sequences in LITERAL or
-// STRING tokens.
-//
-// Strings in the returned tokens are slices from the original string.
-//
-// startPos should be set to ast.InitPos unless the caller knows that
-// this interpolation string is part of a larger file and knows the position
-// of the first character in that larger file.
-func Scan(s string, startPos ast.Pos) <-chan *Token {
- ch := make(chan *Token)
- go scan(s, ch, startPos)
- return ch
-}
-
-func scan(s string, ch chan<- *Token, pos ast.Pos) {
- // 'remain' starts off as the whole string but we gradually
- // slice of the front of it as we work our way through.
- remain := s
-
- // nesting keeps track of how many ${ .. } sequences we are
- // inside, so we can recognize the minor differences in syntax
- // between outer string literals (LITERAL tokens) and quoted
- // string literals (STRING tokens).
- nesting := 0
-
- // We're going to flip back and forth between parsing literals/strings
- // and parsing interpolation sequences ${ .. } until we reach EOF or
- // some INVALID token.
-All:
- for {
- startPos := pos
- // Literal string processing first, since the beginning of
- // a string is always outside of an interpolation sequence.
- literalVal, terminator := scanLiteral(remain, pos, nesting > 0)
-
- if len(literalVal) > 0 {
- litType := LITERAL
- if nesting > 0 {
- litType = STRING
- }
- ch <- &Token{
- Type: litType,
- Content: literalVal,
- Pos: startPos,
- }
- remain = remain[len(literalVal):]
- }
-
- ch <- terminator
- remain = remain[len(terminator.Content):]
- pos = terminator.Pos
- // Safe to use len() here because none of the terminator tokens
- // can contain UTF-8 sequences.
- pos.Column = pos.Column + len(terminator.Content)
-
- switch terminator.Type {
- case INVALID:
- // Synthetic EOF after invalid token, since further scanning
- // is likely to just produce more garbage.
- ch <- &Token{
- Type: EOF,
- Content: "",
- Pos: pos,
- }
- break All
- case EOF:
- // All done!
- break All
- case BEGIN:
- nesting++
- case CQUOTE:
- // nothing special to do
- default:
- // Should never happen
- panic("invalid string/literal terminator")
- }
-
- // Now we do the processing of the insides of ${ .. } sequences.
- // This loop terminates when we encounter either a closing } or
- // an opening ", which will cause us to return to literal processing.
- Interpolation:
- for {
-
- token, size, newPos := scanInterpolationToken(remain, pos)
- ch <- token
- remain = remain[size:]
- pos = newPos
-
- switch token.Type {
- case INVALID:
- // Synthetic EOF after invalid token, since further scanning
- // is likely to just produce more garbage.
- ch <- &Token{
- Type: EOF,
- Content: "",
- Pos: pos,
- }
- break All
- case EOF:
- // All done
- // (though a syntax error that we'll catch in the parser)
- break All
- case END:
- nesting--
- if nesting < 0 {
- // Can happen if there are unbalanced ${ and } sequences
- // in the input, which we'll catch in the parser.
- nesting = 0
- }
- break Interpolation
- case OQUOTE:
- // Beginning of nested quoted string
- break Interpolation
- }
- }
- }
-
- close(ch)
-}
-
-// Returns the token found at the start of the given string, followed by
-// the number of bytes that were consumed from the string and the adjusted
-// source position.
-//
-// Note that the number of bytes consumed can be more than the length of
-// the returned token contents if the string begins with whitespace, since
-// it will be silently consumed before reading the token.
-func scanInterpolationToken(s string, startPos ast.Pos) (*Token, int, ast.Pos) {
- pos := startPos
- size := 0
-
- // Consume whitespace, if any
- for len(s) > 0 && byteIsSpace(s[0]) {
- if s[0] == '\n' {
- pos.Column = 1
- pos.Line++
- } else {
- pos.Column++
- }
- size++
- s = s[1:]
- }
-
- // Unexpected EOF during sequence
- if len(s) == 0 {
- return &Token{
- Type: EOF,
- Content: "",
- Pos: pos,
- }, size, pos
- }
-
- next := s[0]
- var token *Token
-
- switch next {
- case '(', ')', '[', ']', ',', '.', '+', '-', '*', '/', '%', '?', ':':
- // Easy punctuation symbols that don't have any special meaning
- // during scanning, and that stand for themselves in the
- // TokenType enumeration.
- token = &Token{
- Type: TokenType(next),
- Content: s[:1],
- Pos: pos,
- }
- case '}':
- token = &Token{
- Type: END,
- Content: s[:1],
- Pos: pos,
- }
- case '"':
- token = &Token{
- Type: OQUOTE,
- Content: s[:1],
- Pos: pos,
- }
- case '!':
- if len(s) >= 2 && s[:2] == "!=" {
- token = &Token{
- Type: NOTEQUAL,
- Content: s[:2],
- Pos: pos,
- }
- } else {
- token = &Token{
- Type: BANG,
- Content: s[:1],
- Pos: pos,
- }
- }
- case '<':
- if len(s) >= 2 && s[:2] == "<=" {
- token = &Token{
- Type: LTE,
- Content: s[:2],
- Pos: pos,
- }
- } else {
- token = &Token{
- Type: LT,
- Content: s[:1],
- Pos: pos,
- }
- }
- case '>':
- if len(s) >= 2 && s[:2] == ">=" {
- token = &Token{
- Type: GTE,
- Content: s[:2],
- Pos: pos,
- }
- } else {
- token = &Token{
- Type: GT,
- Content: s[:1],
- Pos: pos,
- }
- }
- case '=':
- if len(s) >= 2 && s[:2] == "==" {
- token = &Token{
- Type: EQUAL,
- Content: s[:2],
- Pos: pos,
- }
- } else {
- // A single equals is not a valid operator
- token = &Token{
- Type: INVALID,
- Content: s[:1],
- Pos: pos,
- }
- }
- case '&':
- if len(s) >= 2 && s[:2] == "&&" {
- token = &Token{
- Type: AND,
- Content: s[:2],
- Pos: pos,
- }
- } else {
- token = &Token{
- Type: INVALID,
- Content: s[:1],
- Pos: pos,
- }
- }
- case '|':
- if len(s) >= 2 && s[:2] == "||" {
- token = &Token{
- Type: OR,
- Content: s[:2],
- Pos: pos,
- }
- } else {
- token = &Token{
- Type: INVALID,
- Content: s[:1],
- Pos: pos,
- }
- }
- default:
- if next >= '0' && next <= '9' {
- num, numType := scanNumber(s)
- token = &Token{
- Type: numType,
- Content: num,
- Pos: pos,
- }
- } else if stringStartsWithIdentifier(s) {
- ident, runeLen := scanIdentifier(s)
- tokenType := IDENTIFIER
- if ident == "true" || ident == "false" {
- tokenType = BOOL
- }
- token = &Token{
- Type: tokenType,
- Content: ident,
- Pos: pos,
- }
- // Skip usual token handling because it doesn't
- // know how to deal with UTF-8 sequences.
- pos.Column = pos.Column + runeLen
- return token, size + len(ident), pos
- } else {
- _, byteLen := utf8.DecodeRuneInString(s)
- token = &Token{
- Type: INVALID,
- Content: s[:byteLen],
- Pos: pos,
- }
- // Skip usual token handling because it doesn't
- // know how to deal with UTF-8 sequences.
- pos.Column = pos.Column + 1
- return token, size + byteLen, pos
- }
- }
-
- // Here we assume that the token content contains no UTF-8 sequences,
- // because we dealt with UTF-8 characters as a special case where
- // necessary above.
- size = size + len(token.Content)
- pos.Column = pos.Column + len(token.Content)
-
- return token, size, pos
-}
-
-// Returns the (possibly-empty) prefix of the given string that represents
-// a literal, followed by the token that marks the end of the literal.
-func scanLiteral(s string, startPos ast.Pos, nested bool) (string, *Token) {
- litLen := 0
- pos := startPos
- var terminator *Token
- for {
-
- if litLen >= len(s) {
- if nested {
- // We've ended in the middle of a quoted string,
- // which means this token is actually invalid.
- return "", &Token{
- Type: INVALID,
- Content: s,
- Pos: startPos,
- }
- }
- terminator = &Token{
- Type: EOF,
- Content: "",
- Pos: pos,
- }
- break
- }
-
- next := s[litLen]
-
- if next == '$' && len(s) > litLen+1 {
- follow := s[litLen+1]
-
- if follow == '{' {
- terminator = &Token{
- Type: BEGIN,
- Content: s[litLen : litLen+2],
- Pos: pos,
- }
- pos.Column = pos.Column + 2
- break
- } else if follow == '$' {
- // Double-$ escapes the special processing of $,
- // so we will consume both characters here.
- pos.Column = pos.Column + 2
- litLen = litLen + 2
- continue
- }
- }
-
- // special handling that applies only to quoted strings
- if nested {
- if next == '"' {
- terminator = &Token{
- Type: CQUOTE,
- Content: s[litLen : litLen+1],
- Pos: pos,
- }
- pos.Column = pos.Column + 1
- break
- }
-
- // Escaped quote marks do not terminate the string.
- //
- // All we do here in the scanner is avoid terminating a string
- // due to an escaped quote. The parser is responsible for the
- // full handling of escape sequences, since it's able to produce
- // better error messages than we can produce in here.
- if next == '\\' && len(s) > litLen+1 {
- follow := s[litLen+1]
-
- if follow == '"' {
- // \" escapes the special processing of ",
- // so we will consume both characters here.
- pos.Column = pos.Column + 2
- litLen = litLen + 2
- continue
- } else if follow == '\\' {
- // \\ escapes \
- // so we will consume both characters here.
- pos.Column = pos.Column + 2
- litLen = litLen + 2
- continue
- }
- }
- }
-
- if next == '\n' {
- pos.Column = 1
- pos.Line++
- litLen++
- } else {
- pos.Column++
-
- // "Column" measures runes, so we need to actually consume
- // a valid UTF-8 character here.
- _, size := utf8.DecodeRuneInString(s[litLen:])
- litLen = litLen + size
- }
-
- }
-
- return s[:litLen], terminator
-}
-
-// scanNumber returns the extent of the prefix of the string that represents
-// a valid number, along with what type of number it represents: INT or FLOAT.
-//
-// scanNumber does only basic character analysis: numbers consist of digits
-// and periods, with at least one period signalling a FLOAT. It's the parser's
-// responsibility to validate the form and range of the number, such as ensuring
-// that a FLOAT actually contains only one period, etc.
-func scanNumber(s string) (string, TokenType) {
- period := -1
- byteLen := 0
- numType := INTEGER
- for {
- if byteLen >= len(s) {
- break
- }
-
- next := s[byteLen]
- if next != '.' && (next < '0' || next > '9') {
- // If our last value was a period, then we're not a float,
- // we're just an integer that ends in a period.
- if period == byteLen-1 {
- byteLen--
- numType = INTEGER
- }
-
- break
- }
-
- if next == '.' {
- // If we've already seen a period, break out
- if period >= 0 {
- break
- }
-
- period = byteLen
- numType = FLOAT
- }
-
- byteLen++
- }
-
- return s[:byteLen], numType
-}
-
-// scanIdentifier returns the extent of the prefix of the string that
-// represents a valid identifier, along with the length of that prefix
-// in runes.
-//
-// Identifiers may contain utf8-encoded non-Latin letters, which will
-// cause the returned "rune length" to be shorter than the byte length
-// of the returned string.
-func scanIdentifier(s string) (string, int) {
- byteLen := 0
- runeLen := 0
- for {
- if byteLen >= len(s) {
- break
- }
-
- nextRune, size := utf8.DecodeRuneInString(s[byteLen:])
- if !(nextRune == '_' ||
- nextRune == '-' ||
- nextRune == '.' ||
- nextRune == '*' ||
- unicode.IsNumber(nextRune) ||
- unicode.IsLetter(nextRune) ||
- unicode.IsMark(nextRune)) {
- break
- }
-
- // If we reach a star, it must be between periods to be part
- // of the same identifier.
- if nextRune == '*' && s[byteLen-1] != '.' {
- break
- }
-
- // If our previous character was a star, then the current must
- // be period. Otherwise, undo that and exit.
- if byteLen > 0 && s[byteLen-1] == '*' && nextRune != '.' {
- byteLen--
- if s[byteLen-1] == '.' {
- byteLen--
- }
-
- break
- }
-
- byteLen = byteLen + size
- runeLen = runeLen + 1
- }
-
- return s[:byteLen], runeLen
-}
-
-// byteIsSpace implements a restrictive interpretation of spaces that includes
-// only what's valid inside interpolation sequences: spaces, tabs, newlines.
-func byteIsSpace(b byte) bool {
- switch b {
- case ' ', '\t', '\r', '\n':
- return true
- default:
- return false
- }
-}
-
-// stringStartsWithIdentifier returns true if the given string begins with
-// a character that is a legal start of an identifier: an underscore or
-// any character that Unicode considers to be a letter.
-func stringStartsWithIdentifier(s string) bool {
- if len(s) == 0 {
- return false
- }
-
- first := s[0]
-
- // Easy ASCII cases first
- if (first >= 'a' && first <= 'z') || (first >= 'A' && first <= 'Z') || first == '_' {
- return true
- }
-
- // If our first byte begins a UTF-8 sequence then the sequence might
- // be a unicode letter.
- if utf8.RuneStart(first) {
- firstRune, _ := utf8.DecodeRuneInString(s)
- if unicode.IsLetter(firstRune) {
- return true
- }
- }
-
- return false
-}
diff --git a/vendor/github.com/hashicorp/hil/scanner/token.go b/vendor/github.com/hashicorp/hil/scanner/token.go
deleted file mode 100644
index b6c82ae9b..000000000
--- a/vendor/github.com/hashicorp/hil/scanner/token.go
+++ /dev/null
@@ -1,105 +0,0 @@
-package scanner
-
-import (
- "fmt"
-
- "github.com/hashicorp/hil/ast"
-)
-
-type Token struct {
- Type TokenType
- Content string
- Pos ast.Pos
-}
-
-//go:generate stringer -type=TokenType
-type TokenType rune
-
-const (
- // Raw string data outside of ${ .. } sequences
- LITERAL TokenType = 'o'
-
- // STRING is like a LITERAL but it's inside a quoted string
- // within a ${ ... } sequence, and so it can contain backslash
- // escaping.
- STRING TokenType = 'S'
-
- // Other Literals
- INTEGER TokenType = 'I'
- FLOAT TokenType = 'F'
- BOOL TokenType = 'B'
-
- BEGIN TokenType = '$' // actually "${"
- END TokenType = '}'
- OQUOTE TokenType = '“' // Opening quote of a nested quoted sequence
- CQUOTE TokenType = '”' // Closing quote of a nested quoted sequence
- OPAREN TokenType = '('
- CPAREN TokenType = ')'
- OBRACKET TokenType = '['
- CBRACKET TokenType = ']'
- COMMA TokenType = ','
-
- IDENTIFIER TokenType = 'i'
-
- PERIOD TokenType = '.'
- PLUS TokenType = '+'
- MINUS TokenType = '-'
- STAR TokenType = '*'
- SLASH TokenType = '/'
- PERCENT TokenType = '%'
-
- AND TokenType = '∧'
- OR TokenType = '∨'
- BANG TokenType = '!'
-
- EQUAL TokenType = '='
- NOTEQUAL TokenType = '≠'
- GT TokenType = '>'
- LT TokenType = '<'
- GTE TokenType = '≥'
- LTE TokenType = '≤'
-
- QUESTION TokenType = '?'
- COLON TokenType = ':'
-
- EOF TokenType = '␄'
-
- // Produced for sequences that cannot be understood as valid tokens
- // e.g. due to use of unrecognized punctuation.
- INVALID TokenType = '�'
-)
-
-func (t *Token) String() string {
- switch t.Type {
- case EOF:
- return "end of string"
- case INVALID:
- return fmt.Sprintf("invalid sequence %q", t.Content)
- case INTEGER:
- return fmt.Sprintf("integer %s", t.Content)
- case FLOAT:
- return fmt.Sprintf("float %s", t.Content)
- case STRING:
- return fmt.Sprintf("string %q", t.Content)
- case LITERAL:
- return fmt.Sprintf("literal %q", t.Content)
- case OQUOTE:
- return fmt.Sprintf("opening quote")
- case CQUOTE:
- return fmt.Sprintf("closing quote")
- case AND:
- return "&&"
- case OR:
- return "||"
- case NOTEQUAL:
- return "!="
- case GTE:
- return ">="
- case LTE:
- return "<="
- default:
- // The remaining token types have content that
- // speaks for itself.
- return fmt.Sprintf("%q", t.Content)
- }
-}
diff --git a/vendor/github.com/hashicorp/hil/scanner/tokentype_string.go b/vendor/github.com/hashicorp/hil/scanner/tokentype_string.go
deleted file mode 100644
index a602f5fdd..000000000
--- a/vendor/github.com/hashicorp/hil/scanner/tokentype_string.go
+++ /dev/null
@@ -1,51 +0,0 @@
-// Code generated by "stringer -type=TokenType"; DO NOT EDIT
-
-package scanner
-
-import "fmt"
-
-const _TokenType_name = "BANGBEGINPERCENTOPARENCPARENSTARPLUSCOMMAMINUSPERIODSLASHCOLONLTEQUALGTQUESTIONBOOLFLOATINTEGERSTRINGOBRACKETCBRACKETIDENTIFIERLITERALENDOQUOTECQUOTEANDORNOTEQUALLTEGTEEOFINVALID"
-
-var _TokenType_map = map[TokenType]string{
- 33: _TokenType_name[0:4],
- 36: _TokenType_name[4:9],
- 37: _TokenType_name[9:16],
- 40: _TokenType_name[16:22],
- 41: _TokenType_name[22:28],
- 42: _TokenType_name[28:32],
- 43: _TokenType_name[32:36],
- 44: _TokenType_name[36:41],
- 45: _TokenType_name[41:46],
- 46: _TokenType_name[46:52],
- 47: _TokenType_name[52:57],
- 58: _TokenType_name[57:62],
- 60: _TokenType_name[62:64],
- 61: _TokenType_name[64:69],
- 62: _TokenType_name[69:71],
- 63: _TokenType_name[71:79],
- 66: _TokenType_name[79:83],
- 70: _TokenType_name[83:88],
- 73: _TokenType_name[88:95],
- 83: _TokenType_name[95:101],
- 91: _TokenType_name[101:109],
- 93: _TokenType_name[109:117],
- 105: _TokenType_name[117:127],
- 111: _TokenType_name[127:134],
- 125: _TokenType_name[134:137],
- 8220: _TokenType_name[137:143],
- 8221: _TokenType_name[143:149],
- 8743: _TokenType_name[149:152],
- 8744: _TokenType_name[152:154],
- 8800: _TokenType_name[154:162],
- 8804: _TokenType_name[162:165],
- 8805: _TokenType_name[165:168],
- 9220: _TokenType_name[168:171],
- 65533: _TokenType_name[171:178],
-}
-
-func (i TokenType) String() string {
- if str, ok := _TokenType_map[i]; ok {
- return str
- }
- return fmt.Sprintf("TokenType(%d)", i)
-}
diff --git a/vendor/github.com/hashicorp/hil/transform_fixed.go b/vendor/github.com/hashicorp/hil/transform_fixed.go
deleted file mode 100644
index e69df2943..000000000
--- a/vendor/github.com/hashicorp/hil/transform_fixed.go
+++ /dev/null
@@ -1,29 +0,0 @@
-package hil
-
-import (
- "github.com/hashicorp/hil/ast"
-)
-
-// FixedValueTransform transforms an AST to return a fixed value for
-// all interpolations. i.e. you can make "hi ${anything}" always
-// turn into "hi foo".
-//
-// The primary use case for this is for config validations where you can
-// verify that interpolations result in a certain type of string.
-func FixedValueTransform(root ast.Node, Value *ast.LiteralNode) ast.Node {
- // We visit the nodes in top-down order
- result := root
- switch n := result.(type) {
- case *ast.Output:
- for i, v := range n.Exprs {
- n.Exprs[i] = FixedValueTransform(v, Value)
- }
- case *ast.LiteralNode:
- // We keep it as-is
- default:
- // Anything else we replace
- result = Value
- }
-
- return result
-}
diff --git a/vendor/github.com/hashicorp/hil/walk.go b/vendor/github.com/hashicorp/hil/walk.go
deleted file mode 100644
index 0ace83065..000000000
--- a/vendor/github.com/hashicorp/hil/walk.go
+++ /dev/null
@@ -1,266 +0,0 @@
-package hil
-
-import (
- "fmt"
- "reflect"
- "strings"
-
- "github.com/hashicorp/hil/ast"
- "github.com/mitchellh/reflectwalk"
-)
-
-// WalkFn is the type of function to pass to Walk. Modify fields within
-// WalkData to control whether replacement happens.
-type WalkFn func(*WalkData) error
-
-// WalkData is the structure passed to the callback of the Walk function.
-//
-// This structure contains data passed in as well as fields that are expected
-// to be written by the caller as a result. Please see the documentation for
-// each field for more information.
-type WalkData struct {
- // Root is the parsed root of this HIL program
- Root ast.Node
-
- // Location is the location within the structure where this
- // value was found. This can be used to modify behavior within
- // slices and so on.
- Location reflectwalk.Location
-
- // The below two values must be set by the callback to have any effect.
- //
- // Replace, if true, will replace the value in the structure with
- // ReplaceValue. It is up to the caller to make sure this is a string.
- Replace bool
- ReplaceValue string
-}
-
-// Walk will walk an arbitrary Go structure and parse any string as an
-// HIL program and call the callback cb to determine what to replace it
-// with.
-//
-// This function is very useful for arbitrary HIL program interpolation
-// across a complex configuration structure. Due to the heavy use of
-// reflection in this function, it is recommend to write many unit tests
-// with your typical configuration structures to hilp mitigate the risk
-// of panics.
-func Walk(v interface{}, cb WalkFn) error {
- walker := &interpolationWalker{F: cb}
- return reflectwalk.Walk(v, walker)
-}
-
-// interpolationWalker implements interfaces for the reflectwalk package
-// (github.com/mitchellh/reflectwalk) that can be used to automatically
-// execute a callback for an interpolation.
-type interpolationWalker struct {
- F WalkFn
-
- key []string
- lastValue reflect.Value
- loc reflectwalk.Location
- cs []reflect.Value
- csKey []reflect.Value
- csData interface{}
- sliceIndex int
- unknownKeys []string
-}
-
-func (w *interpolationWalker) Enter(loc reflectwalk.Location) error {
- w.loc = loc
- return nil
-}
-
-func (w *interpolationWalker) Exit(loc reflectwalk.Location) error {
- w.loc = reflectwalk.None
-
- switch loc {
- case reflectwalk.Map:
- w.cs = w.cs[:len(w.cs)-1]
- case reflectwalk.MapValue:
- w.key = w.key[:len(w.key)-1]
- w.csKey = w.csKey[:len(w.csKey)-1]
- case reflectwalk.Slice:
- // Split any values that need to be split
- w.splitSlice()
- w.cs = w.cs[:len(w.cs)-1]
- case reflectwalk.SliceElem:
- w.csKey = w.csKey[:len(w.csKey)-1]
- }
-
- return nil
-}
-
-func (w *interpolationWalker) Map(m reflect.Value) error {
- w.cs = append(w.cs, m)
- return nil
-}
-
-func (w *interpolationWalker) MapElem(m, k, v reflect.Value) error {
- w.csData = k
- w.csKey = append(w.csKey, k)
- w.key = append(w.key, k.String())
- w.lastValue = v
- return nil
-}
-
-func (w *interpolationWalker) Slice(s reflect.Value) error {
- w.cs = append(w.cs, s)
- return nil
-}
-
-func (w *interpolationWalker) SliceElem(i int, elem reflect.Value) error {
- w.csKey = append(w.csKey, reflect.ValueOf(i))
- w.sliceIndex = i
- return nil
-}
-
-func (w *interpolationWalker) Primitive(v reflect.Value) error {
- setV := v
-
- // We only care about strings
- if v.Kind() == reflect.Interface {
- setV = v
- v = v.Elem()
- }
- if v.Kind() != reflect.String {
- return nil
- }
-
- astRoot, err := Parse(v.String())
- if err != nil {
- return err
- }
-
- // If the AST we got is just a literal string value with the same
- // value then we ignore it. We have to check if its the same value
- // because it is possible to input a string, get out a string, and
- // have it be different. For example: "foo-$${bar}" turns into
- // "foo-${bar}"
- if n, ok := astRoot.(*ast.LiteralNode); ok {
- if s, ok := n.Value.(string); ok && s == v.String() {
- return nil
- }
- }
-
- if w.F == nil {
- return nil
- }
-
- data := WalkData{Root: astRoot, Location: w.loc}
- if err := w.F(&data); err != nil {
- return fmt.Errorf(
- "%s in:\n\n%s",
- err, v.String())
- }
-
- if data.Replace {
- /*
- if remove {
- w.removeCurrent()
- return nil
- }
- */
-
- resultVal := reflect.ValueOf(data.ReplaceValue)
- switch w.loc {
- case reflectwalk.MapKey:
- m := w.cs[len(w.cs)-1]
-
- // Delete the old value
- var zero reflect.Value
- m.SetMapIndex(w.csData.(reflect.Value), zero)
-
- // Set the new key with the existing value
- m.SetMapIndex(resultVal, w.lastValue)
-
- // Set the key to be the new key
- w.csData = resultVal
- case reflectwalk.MapValue:
- // If we're in a map, then the only way to set a map value is
- // to set it directly.
- m := w.cs[len(w.cs)-1]
- mk := w.csData.(reflect.Value)
- m.SetMapIndex(mk, resultVal)
- default:
- // Otherwise, we should be addressable
- setV.Set(resultVal)
- }
- }
-
- return nil
-}
-
-func (w *interpolationWalker) removeCurrent() {
- // Append the key to the unknown keys
- w.unknownKeys = append(w.unknownKeys, strings.Join(w.key, "."))
-
- for i := 1; i <= len(w.cs); i++ {
- c := w.cs[len(w.cs)-i]
- switch c.Kind() {
- case reflect.Map:
- // Zero value so that we delete the map key
- var val reflect.Value
-
- // Get the key and delete it
- k := w.csData.(reflect.Value)
- c.SetMapIndex(k, val)
- return
- }
- }
-
- panic("No container found for removeCurrent")
-}
-
-func (w *interpolationWalker) replaceCurrent(v reflect.Value) {
- c := w.cs[len(w.cs)-2]
- switch c.Kind() {
- case reflect.Map:
- // Get the key and delete it
- k := w.csKey[len(w.csKey)-1]
- c.SetMapIndex(k, v)
- }
-}
-
-func (w *interpolationWalker) splitSlice() {
- // Get the []interface{} slice so we can do some operations on
- // it without dealing with reflection. We'll document each step
- // here to be clear.
- var s []interface{}
- raw := w.cs[len(w.cs)-1]
- switch v := raw.Interface().(type) {
- case []interface{}:
- s = v
- case []map[string]interface{}:
- return
- default:
- panic("Unknown kind: " + raw.Kind().String())
- }
-
- // Check if we have any elements that we need to split. If not, then
- // just return since we're done.
- split := false
- if !split {
- return
- }
-
- // Make a new result slice that is twice the capacity to fit our growth.
- result := make([]interface{}, 0, len(s)*2)
-
- // Go over each element of the original slice and start building up
- // the resulting slice by splitting where we have to.
- for _, v := range s {
- sv, ok := v.(string)
- if !ok {
- // Not a string, so just set it
- result = append(result, v)
- continue
- }
-
- // Not a string list, so just set it
- result = append(result, sv)
- }
-
- // Our slice is now done, we have to replace the slice now
- // with this new one that we have.
- w.replaceCurrent(reflect.ValueOf(result))
-}
diff --git a/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/diagnostic.go b/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/diagnostic.go
index 8d04ad4de..d9d276258 100644
--- a/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/diagnostic.go
+++ b/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/diagnostic.go
@@ -4,7 +4,7 @@ import (
"fmt"
legacyhclparser "github.com/hashicorp/hcl/hcl/parser"
- "github.com/hashicorp/hcl2/hcl"
+ "github.com/hashicorp/hcl/v2"
)
// Diagnostic describes a problem (error or warning) encountered during
diff --git a/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/load.go b/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/load.go
index 2d13fe124..a070f76e0 100644
--- a/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/load.go
+++ b/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/load.go
@@ -6,7 +6,7 @@ import (
"path/filepath"
"strings"
- "github.com/hashicorp/hcl2/hcl"
+ "github.com/hashicorp/hcl/v2"
)
// LoadModule reads the directory at the given path and attempts to interpret
@@ -52,12 +52,12 @@ func (m *Module) init(diags Diagnostics) {
// case so callers can easily recognize it.
for _, r := range m.ManagedResources {
if _, exists := m.RequiredProviders[r.Provider.Name]; !exists {
- m.RequiredProviders[r.Provider.Name] = []string{}
+ m.RequiredProviders[r.Provider.Name] = &ProviderRequirement{}
}
}
for _, r := range m.DataResources {
if _, exists := m.RequiredProviders[r.Provider.Name]; !exists {
- m.RequiredProviders[r.Provider.Name] = []string{}
+ m.RequiredProviders[r.Provider.Name] = &ProviderRequirement{}
}
}
diff --git a/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/load_hcl.go b/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/load_hcl.go
index 72b5d4af9..f83ac8726 100644
--- a/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/load_hcl.go
+++ b/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/load_hcl.go
@@ -5,11 +5,11 @@ import (
"fmt"
"strings"
- "github.com/hashicorp/hcl2/hcl/hclsyntax"
+ "github.com/hashicorp/hcl/v2/hclsyntax"
- "github.com/hashicorp/hcl2/gohcl"
- "github.com/hashicorp/hcl2/hcl"
- "github.com/hashicorp/hcl2/hclparse"
+ "github.com/hashicorp/hcl/v2"
+ "github.com/hashicorp/hcl/v2/gohcl"
+ "github.com/hashicorp/hcl/v2/hclparse"
ctyjson "github.com/zclconf/go-cty/cty/json"
)
@@ -51,18 +51,17 @@ func loadModule(dir string) (*Module, Diagnostics) {
}
}
- for _, block := range content.Blocks {
- // Our schema only allows required_providers here, so we
- // assume that we'll only get that block type.
- attrs, attrDiags := block.Body.JustAttributes()
- diags = append(diags, attrDiags...)
-
- for name, attr := range attrs {
- var version string
- valDiags := gohcl.DecodeExpression(attr.Expr, nil, &version)
- diags = append(diags, valDiags...)
- if !valDiags.HasErrors() {
- mod.RequiredProviders[name] = append(mod.RequiredProviders[name], version)
+ for _, innerBlock := range content.Blocks {
+ switch innerBlock.Type {
+ case "required_providers":
+ reqs, reqsDiags := decodeRequiredProvidersBlock(innerBlock)
+ diags = append(diags, reqsDiags...)
+ for name, req := range reqs {
+ if _, exists := mod.RequiredProviders[name]; !exists {
+ mod.RequiredProviders[name] = req
+ } else {
+ mod.RequiredProviders[name].VersionConstraints = append(mod.RequiredProviders[name].VersionConstraints, req.VersionConstraints...)
+ }
}
}
}
@@ -178,22 +177,20 @@ func loadModule(dir string) (*Module, Diagnostics) {
diags = append(diags, contentDiags...)
name := block.Labels[0]
-
+ // Even if there isn't an explicit version required, we still
+ // need an entry in our map to signal the unversioned dependency.
+ if _, exists := mod.RequiredProviders[name]; !exists {
+ mod.RequiredProviders[name] = &ProviderRequirement{}
+ }
if attr, defined := content.Attributes["version"]; defined {
var version string
valDiags := gohcl.DecodeExpression(attr.Expr, nil, &version)
diags = append(diags, valDiags...)
if !valDiags.HasErrors() {
- mod.RequiredProviders[name] = append(mod.RequiredProviders[name], version)
+ mod.RequiredProviders[name].VersionConstraints = append(mod.RequiredProviders[name].VersionConstraints, version)
}
}
- // Even if there wasn't an explicit version required, we still
- // need an entry in our map to signal the unversioned dependency.
- if _, exists := mod.RequiredProviders[name]; !exists {
- mod.RequiredProviders[name] = []string{}
- }
-
case "resource", "data":
content, _, contentDiags := block.Body.PartialContent(resourceSchema)
diff --git a/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/load_legacy.go b/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/load_legacy.go
index 86ffdf11d..c79b033b6 100644
--- a/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/load_legacy.go
+++ b/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/load_legacy.go
@@ -267,17 +267,15 @@ func loadModuleLegacyHCL(dir string) (*Module, Diagnostics) {
if err != nil {
return nil, diagnosticsErrorf("invalid provider block at %s: %s", item.Pos(), err)
}
-
- if block.Version != "" {
- mod.RequiredProviders[name] = append(mod.RequiredProviders[name], block.Version)
- }
-
// Even if there wasn't an explicit version required, we still
// need an entry in our map to signal the unversioned dependency.
if _, exists := mod.RequiredProviders[name]; !exists {
- mod.RequiredProviders[name] = []string{}
+ mod.RequiredProviders[name] = &ProviderRequirement{}
}
+ if block.Version != "" {
+ mod.RequiredProviders[name].VersionConstraints = append(mod.RequiredProviders[name].VersionConstraints, block.Version)
+ }
}
}
}
diff --git a/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/module.go b/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/module.go
index 65ddb2307..63027d184 100644
--- a/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/module.go
+++ b/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/module.go
@@ -9,8 +9,8 @@ type Module struct {
Variables map[string]*Variable `json:"variables"`
Outputs map[string]*Output `json:"outputs"`
- RequiredCore []string `json:"required_core,omitempty"`
- RequiredProviders map[string][]string `json:"required_providers"`
+ RequiredCore []string `json:"required_core,omitempty"`
+ RequiredProviders map[string]*ProviderRequirement `json:"required_providers"`
ManagedResources map[string]*Resource `json:"managed_resources"`
DataResources map[string]*Resource `json:"data_resources"`
@@ -27,7 +27,7 @@ func newModule(path string) *Module {
Path: path,
Variables: make(map[string]*Variable),
Outputs: make(map[string]*Output),
- RequiredProviders: make(map[string][]string),
+ RequiredProviders: make(map[string]*ProviderRequirement),
ManagedResources: make(map[string]*Resource),
DataResources: make(map[string]*Resource),
ModuleCalls: make(map[string]*ModuleCall),
diff --git a/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/provider_ref.go b/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/provider_ref.go
index d92483778..157c8c2c1 100644
--- a/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/provider_ref.go
+++ b/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/provider_ref.go
@@ -1,5 +1,11 @@
package tfconfig
+import (
+ "github.com/hashicorp/hcl/v2"
+ "github.com/hashicorp/hcl/v2/gohcl"
+ "github.com/zclconf/go-cty/cty/gocty"
+)
+
// ProviderRef is a reference to a provider configuration within a module.
// It represents the contents of a "provider" argument in a resource, or
// a value in the "providers" map for a module call.
@@ -7,3 +13,73 @@ type ProviderRef struct {
Name string `json:"name"`
Alias string `json:"alias,omitempty"` // Empty if the default provider configuration is referenced
}
+
+type ProviderRequirement struct {
+ Source string `json:"source,omitempty"`
+ VersionConstraints []string `json:"version_constraints,omitempty"`
+}
+
+func decodeRequiredProvidersBlock(block *hcl.Block) (map[string]*ProviderRequirement, hcl.Diagnostics) {
+ attrs, diags := block.Body.JustAttributes()
+ reqs := make(map[string]*ProviderRequirement)
+ for name, attr := range attrs {
+ expr, err := attr.Expr.Value(nil)
+ if err != nil {
+ diags = append(diags, err...)
+ }
+
+ switch {
+ case expr.Type().IsPrimitiveType():
+ var version string
+ valDiags := gohcl.DecodeExpression(attr.Expr, nil, &version)
+ diags = append(diags, valDiags...)
+ if !valDiags.HasErrors() {
+ reqs[name] = &ProviderRequirement{
+ VersionConstraints: []string{version},
+ }
+ }
+
+ case expr.Type().IsObjectType():
+ var pr ProviderRequirement
+ if expr.Type().HasAttribute("version") {
+ var version string
+ err := gocty.FromCtyValue(expr.GetAttr("version"), &version)
+ if err == nil {
+ pr.VersionConstraints = append(pr.VersionConstraints, version)
+ } else {
+ diags = append(diags, &hcl.Diagnostic{
+ Severity: hcl.DiagError,
+ Summary: "Unsuitable value type",
+ Detail: "Unsuitable value: string required",
+ Subject: attr.Expr.Range().Ptr(),
+ })
+ }
+ }
+ if expr.Type().HasAttribute("source") {
+ var source string
+ err := gocty.FromCtyValue(expr.GetAttr("source"), &source)
+ if err == nil {
+ pr.Source = source
+ } else {
+ diags = append(diags, &hcl.Diagnostic{
+ Severity: hcl.DiagError,
+ Summary: "Unsuitable value type",
+ Detail: "Unsuitable value: string required",
+ Subject: attr.Expr.Range().Ptr(),
+ })
+ }
+ }
+ reqs[name] = &pr
+
+ default:
+ diags = append(diags, &hcl.Diagnostic{
+ Severity: hcl.DiagError,
+ Summary: "Unsuitable value type",
+ Detail: "Unsuitable value: string required",
+ Subject: attr.Expr.Range().Ptr(),
+ })
+ }
+ }
+
+ return reqs, diags
+}
diff --git a/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/schema.go b/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/schema.go
index 3af742ff7..fd6ca9e70 100644
--- a/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/schema.go
+++ b/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/schema.go
@@ -1,7 +1,7 @@
package tfconfig
import (
- "github.com/hashicorp/hcl2/hcl"
+ "github.com/hashicorp/hcl/v2"
)
var rootSchema = &hcl.BodySchema{
diff --git a/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/source_pos.go b/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/source_pos.go
index 883914eb7..548c9f9a3 100644
--- a/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/source_pos.go
+++ b/vendor/github.com/hashicorp/terraform-config-inspect/tfconfig/source_pos.go
@@ -2,7 +2,7 @@ package tfconfig
import (
legacyhcltoken "github.com/hashicorp/hcl/hcl/token"
- "github.com/hashicorp/hcl2/hcl"
+ "github.com/hashicorp/hcl/v2"
)
// SourcePos is a pointer to a particular location in a source file.
diff --git a/vendor/github.com/hashicorp/terraform/LICENSE b/vendor/github.com/hashicorp/terraform-plugin-sdk/LICENSE
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/LICENSE
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/LICENSE
diff --git a/vendor/github.com/hashicorp/terraform/helper/acctest/acctest.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/acctest/acctest.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/helper/acctest/acctest.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/acctest/acctest.go
diff --git a/vendor/github.com/hashicorp/terraform/helper/acctest/random.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/acctest/random.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/helper/acctest/random.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/acctest/random.go
diff --git a/vendor/github.com/hashicorp/terraform/helper/acctest/remotetests.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/acctest/remotetests.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/helper/acctest/remotetests.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/acctest/remotetests.go
diff --git a/vendor/github.com/hashicorp/terraform/helper/hashcode/hashcode.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/hashcode/hashcode.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/helper/hashcode/hashcode.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/hashcode/hashcode.go
diff --git a/vendor/github.com/hashicorp/terraform/helper/logging/logging.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/logging/logging.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/helper/logging/logging.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/logging/logging.go
diff --git a/vendor/github.com/hashicorp/terraform/helper/logging/transport.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/logging/transport.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/helper/logging/transport.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/logging/transport.go
diff --git a/vendor/github.com/hashicorp/terraform/helper/mutexkv/mutexkv.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/mutexkv/mutexkv.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/helper/mutexkv/mutexkv.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/mutexkv/mutexkv.go
diff --git a/vendor/github.com/hashicorp/terraform/helper/resource/error.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/resource/error.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/helper/resource/error.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/resource/error.go
diff --git a/vendor/github.com/hashicorp/terraform/helper/resource/grpc_test_provider.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/resource/grpc_test_provider.go
similarity index 75%
rename from vendor/github.com/hashicorp/terraform/helper/resource/grpc_test_provider.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/resource/grpc_test_provider.go
index 0742e993b..db12cee20 100644
--- a/vendor/github.com/hashicorp/terraform/helper/resource/grpc_test_provider.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/resource/grpc_test_provider.go
@@ -5,11 +5,11 @@ import (
"net"
"time"
- "github.com/hashicorp/terraform/helper/plugin"
- proto "github.com/hashicorp/terraform/internal/tfplugin5"
- tfplugin "github.com/hashicorp/terraform/plugin"
- "github.com/hashicorp/terraform/providers"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/helper/plugin"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ proto "github.com/hashicorp/terraform-plugin-sdk/internal/tfplugin5"
+ tfplugin "github.com/hashicorp/terraform-plugin-sdk/plugin"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
"google.golang.org/grpc"
"google.golang.org/grpc/test/bufconn"
)
diff --git a/vendor/github.com/hashicorp/terraform/helper/resource/id.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/resource/id.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/helper/resource/id.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/resource/id.go
diff --git a/vendor/github.com/hashicorp/terraform/helper/resource/map.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/resource/map.go
similarity index 98%
rename from vendor/github.com/hashicorp/terraform/helper/resource/map.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/resource/map.go
index a465136f7..02a993d69 100644
--- a/vendor/github.com/hashicorp/terraform/helper/resource/map.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/resource/map.go
@@ -4,7 +4,7 @@ import (
"fmt"
"sort"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
// Map is a map of resources that are supported, and provides helpers for
diff --git a/vendor/github.com/hashicorp/terraform/helper/resource/resource.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/resource/resource.go
similarity index 91%
rename from vendor/github.com/hashicorp/terraform/helper/resource/resource.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/resource/resource.go
index 0d9c831a6..80782413b 100644
--- a/vendor/github.com/hashicorp/terraform/helper/resource/resource.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/resource/resource.go
@@ -1,8 +1,8 @@
package resource
import (
- "github.com/hashicorp/terraform/helper/config"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/helper/config"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
type Resource struct {
diff --git a/vendor/github.com/hashicorp/terraform/helper/resource/state.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/resource/state.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/helper/resource/state.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/resource/state.go
diff --git a/vendor/github.com/hashicorp/terraform/helper/resource/state_shim.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/resource/state_shim.go
similarity index 93%
rename from vendor/github.com/hashicorp/terraform/helper/resource/state_shim.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/resource/state_shim.go
index afd60b318..1e3225933 100644
--- a/vendor/github.com/hashicorp/terraform/helper/resource/state_shim.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/resource/state_shim.go
@@ -4,14 +4,14 @@ import (
"encoding/json"
"fmt"
- "github.com/hashicorp/terraform/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
"github.com/zclconf/go-cty/cty"
- "github.com/hashicorp/terraform/configs/hcl2shim"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/hcl2shim"
- "github.com/hashicorp/terraform/states"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
// shimState takes a new *states.State and reverts it to a legacy state for the provider ACC tests
@@ -87,7 +87,7 @@ func shimNewState(newState *states.State, providers map[string]terraform.Resourc
resState.Primary.Meta["schema_version"] = i.Current.SchemaVersion
}
- for _, dep := range i.Current.DependsOn {
+ for _, dep := range i.Current.Dependencies {
resState.Dependencies = append(resState.Dependencies, dep.String())
}
diff --git a/vendor/github.com/hashicorp/terraform/helper/resource/testing.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/resource/testing.go
similarity index 91%
rename from vendor/github.com/hashicorp/terraform/helper/resource/testing.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/resource/testing.go
index 576ef31f3..3f3e02a79 100644
--- a/vendor/github.com/hashicorp/terraform/helper/resource/testing.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/resource/testing.go
@@ -2,6 +2,7 @@ package resource
import (
"bytes"
+ "errors"
"flag"
"fmt"
"io"
@@ -21,16 +22,16 @@ import (
"github.com/hashicorp/logutils"
"github.com/mitchellh/colorstring"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/command/format"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/configs/configload"
- "github.com/hashicorp/terraform/helper/logging"
- "github.com/hashicorp/terraform/internal/initwd"
- "github.com/hashicorp/terraform/providers"
- "github.com/hashicorp/terraform/states"
- "github.com/hashicorp/terraform/terraform"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/logging"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/command/format"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configload"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/initwd"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
// flagSweep is a flag available when running tests on the command line. It
@@ -54,13 +55,10 @@ import (
// destroyed.
var flagSweep = flag.String("sweep", "", "List of Regions to run available Sweepers")
+var flagSweepAllowFailures = flag.Bool("sweep-allow-failures", false, "Enable to allow Sweeper Tests to continue after failures")
var flagSweepRun = flag.String("sweep-run", "", "Comma seperated list of Sweeper Tests to run")
var sweeperFuncs map[string]*Sweeper
-// map of sweepers that have ran, and the success/fail status based on any error
-// raised
-var sweeperRunList map[string]bool
-
// type SweeperFunc is a signature for a function that acts as a sweeper. It
// accepts a string for the region that the sweeper is to be ran in. This
// function must be able to construct a valid client for that region.
@@ -105,26 +103,64 @@ func TestMain(m *testing.M) {
// get filtered list of sweepers to run based on sweep-run flag
sweepers := filterSweepers(*flagSweepRun, sweeperFuncs)
- for _, region := range regions {
- region = strings.TrimSpace(region)
- // reset sweeperRunList for each region
- sweeperRunList = map[string]bool{}
-
- log.Printf("[DEBUG] Running Sweepers for region (%s):\n", region)
- for _, sweeper := range sweepers {
- if err := runSweeperWithRegion(region, sweeper); err != nil {
- log.Fatalf("[ERR] error running (%s): %s", sweeper.Name, err)
+
+ if _, err := runSweepers(regions, sweepers, *flagSweepAllowFailures); err != nil {
+ os.Exit(1)
+ }
+ } else {
+ os.Exit(m.Run())
+ }
+}
+
+func runSweepers(regions []string, sweepers map[string]*Sweeper, allowFailures bool) (map[string]map[string]error, error) {
+ var sweeperErrorFound bool
+ sweeperRunList := make(map[string]map[string]error)
+
+ for _, region := range regions {
+ region = strings.TrimSpace(region)
+
+ var regionSweeperErrorFound bool
+ regionSweeperRunList := make(map[string]error)
+
+ log.Printf("[DEBUG] Running Sweepers for region (%s):\n", region)
+ for _, sweeper := range sweepers {
+ if err := runSweeperWithRegion(region, sweeper, sweepers, regionSweeperRunList, allowFailures); err != nil {
+ if allowFailures {
+ continue
}
+
+ sweeperRunList[region] = regionSweeperRunList
+ return sweeperRunList, fmt.Errorf("sweeper (%s) for region (%s) failed: %s", sweeper.Name, region, err)
}
+ }
- log.Printf("Sweeper Tests ran:\n")
- for s, _ := range sweeperRunList {
- fmt.Printf("\t- %s\n", s)
+ log.Printf("Sweeper Tests ran successfully:\n")
+ for sweeper, sweeperErr := range regionSweeperRunList {
+ if sweeperErr == nil {
+ fmt.Printf("\t- %s\n", sweeper)
+ } else {
+ regionSweeperErrorFound = true
}
}
- } else {
- os.Exit(m.Run())
+
+ if regionSweeperErrorFound {
+ sweeperErrorFound = true
+ log.Printf("Sweeper Tests ran unsuccessfully:\n")
+ for sweeper, sweeperErr := range regionSweeperRunList {
+ if sweeperErr != nil {
+ fmt.Printf("\t- %s: %s\n", sweeper, sweeperErr)
+ }
+ }
+ }
+
+ sweeperRunList[region] = regionSweeperRunList
}
+
+ if sweeperErrorFound {
+ return sweeperRunList, errors.New("at least one sweeper failed")
+ }
+
+ return sweeperRunList, nil
}
// filterSweepers takes a comma seperated string listing the names of sweepers
@@ -139,29 +175,61 @@ func filterSweepers(f string, source map[string]*Sweeper) map[string]*Sweeper {
}
sweepers := make(map[string]*Sweeper)
- for name, sweeper := range source {
+ for name := range source {
for _, s := range filterSlice {
if strings.Contains(strings.ToLower(name), s) {
- sweepers[name] = sweeper
+ for foundName, foundSweeper := range filterSweeperWithDependencies(name, source) {
+ sweepers[foundName] = foundSweeper
+ }
}
}
}
return sweepers
}
+// filterSweeperWithDependencies recursively returns sweeper and all dependencies.
+// Since filterSweepers performs fuzzy matching, this function is used
+// to perform exact sweeper and dependency lookup.
+func filterSweeperWithDependencies(name string, source map[string]*Sweeper) map[string]*Sweeper {
+ result := make(map[string]*Sweeper)
+
+ currentSweeper, ok := source[name]
+ if !ok {
+ log.Printf("[WARN] Sweeper has dependency (%s), but that sweeper was not found", name)
+ return result
+ }
+
+ result[name] = currentSweeper
+
+ for _, dependency := range currentSweeper.Dependencies {
+ for foundName, foundSweeper := range filterSweeperWithDependencies(dependency, source) {
+ result[foundName] = foundSweeper
+ }
+ }
+
+ return result
+}
+
// runSweeperWithRegion recieves a sweeper and a region, and recursively calls
// itself with that region for every dependency found for that sweeper. If there
// are no dependencies, invoke the contained sweeper fun with the region, and
// add the success/fail status to the sweeperRunList.
-func runSweeperWithRegion(region string, s *Sweeper) error {
+func runSweeperWithRegion(region string, s *Sweeper, sweepers map[string]*Sweeper, sweeperRunList map[string]error, allowFailures bool) error {
for _, dep := range s.Dependencies {
- if depSweeper, ok := sweeperFuncs[dep]; ok {
+ if depSweeper, ok := sweepers[dep]; ok {
log.Printf("[DEBUG] Sweeper (%s) has dependency (%s), running..", s.Name, dep)
- if err := runSweeperWithRegion(region, depSweeper); err != nil {
+ err := runSweeperWithRegion(region, depSweeper, sweepers, sweeperRunList, allowFailures)
+
+ if err != nil {
+ if allowFailures {
+ log.Printf("[ERROR] Error running Sweeper (%s) in region (%s): %s", depSweeper.Name, region, err)
+ continue
+ }
+
return err
}
} else {
- log.Printf("[DEBUG] Sweeper (%s) has dependency (%s), but that sweeper was not found", s.Name, dep)
+ log.Printf("[WARN] Sweeper (%s) has dependency (%s), but that sweeper was not found", s.Name, dep)
}
}
@@ -170,11 +238,14 @@ func runSweeperWithRegion(region string, s *Sweeper) error {
return nil
}
+ log.Printf("[DEBUG] Running Sweeper (%s) in region (%s)", s.Name, region)
+
runE := s.F(region)
- if runE == nil {
- sweeperRunList[s.Name] = true
- } else {
- sweeperRunList[s.Name] = false
+
+ sweeperRunList[s.Name] = runE
+
+ if runE != nil {
+ log.Printf("[ERROR] Error running Sweeper (%s) in region (%s): %s", s.Name, region, runE)
}
return runE
diff --git a/vendor/github.com/hashicorp/terraform/helper/resource/testing_config.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/resource/testing_config.go
similarity index 96%
rename from vendor/github.com/hashicorp/terraform/helper/resource/testing_config.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/resource/testing_config.go
index e3da9bc02..e21525de8 100644
--- a/vendor/github.com/hashicorp/terraform/helper/resource/testing_config.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/resource/testing_config.go
@@ -9,14 +9,14 @@ import (
"sort"
"strings"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs/hcl2shim"
- "github.com/hashicorp/terraform/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/hcl2shim"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
"github.com/hashicorp/errwrap"
- "github.com/hashicorp/terraform/plans"
- "github.com/hashicorp/terraform/terraform"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
// testStepConfig runs a config-mode test step
diff --git a/vendor/github.com/hashicorp/terraform/helper/resource/testing_import_state.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/resource/testing_import_state.go
similarity index 96%
rename from vendor/github.com/hashicorp/terraform/helper/resource/testing_import_state.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/resource/testing_import_state.go
index 3473f8e52..561873dea 100644
--- a/vendor/github.com/hashicorp/terraform/helper/resource/testing_import_state.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/resource/testing_import_state.go
@@ -10,10 +10,10 @@ import (
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hcl/v2/hclsyntax"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/hashicorp/terraform/states"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
// testStepImportState runs an imort state test step
diff --git a/vendor/github.com/hashicorp/terraform/helper/resource/wait.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/resource/wait.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/helper/resource/wait.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/resource/wait.go
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/README.md b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/README.md
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/helper/schema/README.md
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/README.md
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/backend.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/backend.go
similarity index 95%
rename from vendor/github.com/hashicorp/terraform/helper/schema/backend.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/backend.go
index 42c2bed92..609c208b3 100644
--- a/vendor/github.com/hashicorp/terraform/helper/schema/backend.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/backend.go
@@ -4,12 +4,12 @@ import (
"context"
"fmt"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
"github.com/zclconf/go-cty/cty"
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/hashicorp/terraform/configs/hcl2shim"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/hcl2shim"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
ctyconvert "github.com/zclconf/go-cty/cty/convert"
)
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/core_schema.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/core_schema.go
similarity index 99%
rename from vendor/github.com/hashicorp/terraform/helper/schema/core_schema.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/core_schema.go
index 875677eb3..fa03d8338 100644
--- a/vendor/github.com/hashicorp/terraform/helper/schema/core_schema.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/core_schema.go
@@ -3,7 +3,7 @@ package schema
import (
"fmt"
- "github.com/hashicorp/terraform/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
"github.com/zclconf/go-cty/cty"
)
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/data_source_resource_shim.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/data_source_resource_shim.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/helper/schema/data_source_resource_shim.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/data_source_resource_shim.go
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/equal.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/equal.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/helper/schema/equal.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/equal.go
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/field_reader.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/field_reader.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/helper/schema/field_reader.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/field_reader.go
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/field_reader_config.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/field_reader_config.go
similarity index 99%
rename from vendor/github.com/hashicorp/terraform/helper/schema/field_reader_config.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/field_reader_config.go
index 6ad3f13cb..dc2ae1af5 100644
--- a/vendor/github.com/hashicorp/terraform/helper/schema/field_reader_config.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/field_reader_config.go
@@ -7,7 +7,7 @@ import (
"strings"
"sync"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
"github.com/mitchellh/mapstructure"
)
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/field_reader_diff.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/field_reader_diff.go
similarity index 99%
rename from vendor/github.com/hashicorp/terraform/helper/schema/field_reader_diff.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/field_reader_diff.go
index 3e70acf0b..c099029af 100644
--- a/vendor/github.com/hashicorp/terraform/helper/schema/field_reader_diff.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/field_reader_diff.go
@@ -4,7 +4,7 @@ import (
"fmt"
"strings"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
"github.com/mitchellh/mapstructure"
)
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/field_reader_map.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/field_reader_map.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/helper/schema/field_reader_map.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/field_reader_map.go
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/field_reader_multi.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/field_reader_multi.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/helper/schema/field_reader_multi.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/field_reader_multi.go
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/field_writer.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/field_writer.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/helper/schema/field_writer.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/field_writer.go
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/field_writer_map.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/field_writer_map.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/helper/schema/field_writer_map.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/field_writer_map.go
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/getsource_string.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/getsource_string.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/helper/schema/getsource_string.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/getsource_string.go
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/provider.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/provider.go
similarity index 97%
rename from vendor/github.com/hashicorp/terraform/helper/schema/provider.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/provider.go
index 9efc90e7b..bbea5dbd5 100644
--- a/vendor/github.com/hashicorp/terraform/helper/schema/provider.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/provider.go
@@ -8,8 +8,8 @@ import (
"sync"
"github.com/hashicorp/go-multierror"
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
var ReservedProviderFields = []string{
@@ -267,6 +267,11 @@ func (p *Provider) Configure(c *terraform.ResourceConfig) error {
return err
}
+ if p.TerraformVersion == "" {
+ // Terraform 0.12 introduced this field to the protocol
+ // We can therefore assume that if it's unconfigured at this point, it's 0.10 or 0.11
+ p.TerraformVersion = "0.11+compatible"
+ }
meta, err := p.ConfigureFunc(data)
if err != nil {
return err
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/resource.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/resource.go
similarity index 99%
rename from vendor/github.com/hashicorp/terraform/helper/schema/resource.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/resource.go
index 8cd2703aa..406dcdf71 100644
--- a/vendor/github.com/hashicorp/terraform/helper/schema/resource.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/resource.go
@@ -6,7 +6,7 @@ import (
"log"
"strconv"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
"github.com/zclconf/go-cty/cty"
)
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/resource_data.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/resource_data.go
similarity index 98%
rename from vendor/github.com/hashicorp/terraform/helper/schema/resource_data.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/resource_data.go
index 1c390709e..259487ed5 100644
--- a/vendor/github.com/hashicorp/terraform/helper/schema/resource_data.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/resource_data.go
@@ -7,7 +7,7 @@ import (
"sync"
"time"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
// ResourceData is used to query and set the attributes of a resource.
@@ -133,6 +133,16 @@ func (d *ResourceData) getRaw(key string, level getSource) getResult {
return d.get(parts, level)
}
+// HasChanges returns whether or not any of the given keys has been changed.
+func (d *ResourceData) HasChanges(keys ...string) bool {
+ for _, key := range keys {
+ if d.HasChange(key) {
+ return true
+ }
+ }
+ return false
+}
+
// HasChange returns whether or not the given key has been changed.
func (d *ResourceData) HasChange(key string) bool {
o, n := d.GetChange(key)
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/resource_data_get_source.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/resource_data_get_source.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/helper/schema/resource_data_get_source.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/resource_data_get_source.go
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/resource_diff.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/resource_diff.go
similarity index 99%
rename from vendor/github.com/hashicorp/terraform/helper/schema/resource_diff.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/resource_diff.go
index 47b548104..f55a66e14 100644
--- a/vendor/github.com/hashicorp/terraform/helper/schema/resource_diff.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/resource_diff.go
@@ -7,7 +7,7 @@ import (
"strings"
"sync"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
// newValueWriter is a minor re-implementation of MapFieldWriter to include
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/resource_importer.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/resource_importer.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/helper/schema/resource_importer.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/resource_importer.go
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/resource_timeout.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/resource_timeout.go
similarity index 98%
rename from vendor/github.com/hashicorp/terraform/helper/schema/resource_timeout.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/resource_timeout.go
index 5ad7aafc8..f12bf7259 100644
--- a/vendor/github.com/hashicorp/terraform/helper/schema/resource_timeout.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/resource_timeout.go
@@ -5,8 +5,8 @@ import (
"log"
"time"
- "github.com/hashicorp/terraform/configs/hcl2shim"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/hcl2shim"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
"github.com/mitchellh/copystructure"
)
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/schema.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/schema.go
similarity index 92%
rename from vendor/github.com/hashicorp/terraform/helper/schema/schema.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/schema.go
index 089e6b213..0cd64635e 100644
--- a/vendor/github.com/hashicorp/terraform/helper/schema/schema.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/schema.go
@@ -22,8 +22,8 @@ import (
"strings"
"sync"
- "github.com/hashicorp/terraform/configs/hcl2shim"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/hcl2shim"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
"github.com/mitchellh/copystructure"
"github.com/mitchellh/mapstructure"
)
@@ -216,7 +216,16 @@ type Schema struct {
// This will only check that they're set in the _config_. This will not
// raise an error for a malfunctioning resource that sets a conflicting
// key.
+ //
+ // ExactlyOneOf is a set of schema keys that, when set, only one of the
+ // keys in that list can be specified. It will error if none are
+ // specified as well.
+ //
+ // AtLeastOneOf is a set of schema keys that, when set, at least one of
+ // the keys in that list must be specified.
ConflictsWith []string
+ ExactlyOneOf []string
+ AtLeastOneOf []string
// When Deprecated is set, this attribute is deprecated.
//
@@ -749,36 +758,32 @@ func (m schemaMap) internalValidate(topSchemaMap schemaMap, attrsOnly bool) erro
return fmt.Errorf("%s: ConflictsWith cannot be set with Required", k)
}
- if len(v.ConflictsWith) > 0 {
- for _, key := range v.ConflictsWith {
- parts := strings.Split(key, ".")
- sm := topSchemaMap
- var target *Schema
- for _, part := range parts {
- // Skip index fields
- if _, err := strconv.Atoi(part); err == nil {
- continue
- }
+ if len(v.ExactlyOneOf) > 0 && v.Required {
+ return fmt.Errorf("%s: ExactlyOneOf cannot be set with Required", k)
+ }
- var ok bool
- if target, ok = sm[part]; !ok {
- return fmt.Errorf("%s: ConflictsWith references unknown attribute (%s) at part (%s)", k, key, part)
- }
+ if len(v.AtLeastOneOf) > 0 && v.Required {
+ return fmt.Errorf("%s: AtLeastOneOf cannot be set with Required", k)
+ }
- if subResource, ok := target.Elem.(*Resource); ok {
- sm = schemaMap(subResource.Schema)
- }
- }
- if target == nil {
- return fmt.Errorf("%s: ConflictsWith cannot find target attribute (%s), sm: %#v", k, key, sm)
- }
- if target.Required {
- return fmt.Errorf("%s: ConflictsWith cannot contain Required attribute (%s)", k, key)
- }
+ if len(v.ConflictsWith) > 0 {
+ err := checkKeysAgainstSchemaFlags(k, v.ConflictsWith, topSchemaMap)
+ if err != nil {
+ return fmt.Errorf("ConflictsWith: %+v", err)
+ }
+ }
- if len(target.ComputedWhen) > 0 {
- return fmt.Errorf("%s: ConflictsWith cannot contain Computed(When) attribute (%s)", k, key)
- }
+ if len(v.ExactlyOneOf) > 0 {
+ err := checkKeysAgainstSchemaFlags(k, v.ExactlyOneOf, topSchemaMap)
+ if err != nil {
+ return fmt.Errorf("ExactlyOneOf: %+v", err)
+ }
+ }
+
+ if len(v.AtLeastOneOf) > 0 {
+ err := checkKeysAgainstSchemaFlags(k, v.AtLeastOneOf, topSchemaMap)
+ if err != nil {
+ return fmt.Errorf("AtLeastOneOf: %+v", err)
}
}
@@ -845,6 +850,40 @@ func (m schemaMap) internalValidate(topSchemaMap schemaMap, attrsOnly bool) erro
return nil
}
+func checkKeysAgainstSchemaFlags(k string, keys []string, topSchemaMap schemaMap) error {
+ for _, key := range keys {
+ parts := strings.Split(key, ".")
+ sm := topSchemaMap
+ var target *Schema
+ for _, part := range parts {
+ // Skip index fields
+ if _, err := strconv.Atoi(part); err == nil {
+ continue
+ }
+
+ var ok bool
+ if target, ok = sm[part]; !ok {
+ return fmt.Errorf("%s references unknown attribute (%s) at part (%s)", k, key, part)
+ }
+
+ if subResource, ok := target.Elem.(*Resource); ok {
+ sm = schemaMap(subResource.Schema)
+ }
+ }
+ if target == nil {
+ return fmt.Errorf("%s cannot find target attribute (%s), sm: %#v", k, key, sm)
+ }
+ if target.Required {
+ return fmt.Errorf("%s cannot contain Required attribute (%s)", k, key)
+ }
+
+ if len(target.ComputedWhen) > 0 {
+ return fmt.Errorf("%s cannot contain Computed(When) attribute (%s)", k, key)
+ }
+ }
+ return nil
+}
+
func isValidFieldName(name string) bool {
re := regexp.MustCompile("^[a-z0-9_]+$")
return re.MatchString(name)
@@ -1350,12 +1389,22 @@ func (m schemaMap) validate(
// We're okay as long as we had a value set
ok = raw != nil
}
+
+ err := validateExactlyOneAttribute(k, schema, c)
+ if err != nil {
+ return nil, []error{err}
+ }
+
+ err = validateAtLeastOneAttribute(k, schema, c)
+ if err != nil {
+ return nil, []error{err}
+ }
+
if !ok {
if schema.Required {
return nil, []error{fmt.Errorf(
"%q: required field is not set", k)}
}
-
return nil, nil
}
@@ -1377,7 +1426,7 @@ func (m schemaMap) validate(
return nil, nil
}
- err := m.validateConflictingAttributes(k, schema, c)
+ err = validateConflictingAttributes(k, schema, c)
if err != nil {
return nil, []error{err}
}
@@ -1407,7 +1456,7 @@ func isWhollyKnown(raw interface{}) bool {
}
return true
}
-func (m schemaMap) validateConflictingAttributes(
+func validateConflictingAttributes(
k string,
schema *Schema,
c *terraform.ResourceConfig) error {
@@ -1431,6 +1480,79 @@ func (m schemaMap) validateConflictingAttributes(
return nil
}
+func removeDuplicates(elements []string) []string {
+ encountered := make(map[string]struct{}, 0)
+ result := []string{}
+
+ for v := range elements {
+ if _, ok := encountered[elements[v]]; !ok {
+ encountered[elements[v]] = struct{}{}
+ result = append(result, elements[v])
+ }
+ }
+
+ return result
+}
+
+func validateExactlyOneAttribute(
+ k string,
+ schema *Schema,
+ c *terraform.ResourceConfig) error {
+
+ if len(schema.ExactlyOneOf) == 0 {
+ return nil
+ }
+
+ allKeys := removeDuplicates(append(schema.ExactlyOneOf, k))
+ sort.Strings(allKeys)
+ specified := make([]string, 0)
+ unknownVariableValueCount := 0
+ for _, exactlyOneOfKey := range allKeys {
+ if c.IsComputed(exactlyOneOfKey) {
+ unknownVariableValueCount++
+ continue
+ }
+
+ _, ok := c.Get(exactlyOneOfKey)
+ if ok {
+ specified = append(specified, exactlyOneOfKey)
+ }
+ }
+
+ if len(specified) == 0 && unknownVariableValueCount == 0 {
+ return fmt.Errorf("%q: one of `%s` must be specified", k, strings.Join(allKeys, ","))
+ }
+
+ if len(specified) > 1 {
+ return fmt.Errorf("%q: only one of `%s` can be specified, but `%s` were specified.", k, strings.Join(allKeys, ","), strings.Join(specified, ","))
+ }
+
+ return nil
+}
+
+func validateAtLeastOneAttribute(
+ k string,
+ schema *Schema,
+ c *terraform.ResourceConfig) error {
+
+ if len(schema.AtLeastOneOf) == 0 {
+ return nil
+ }
+
+ allKeys := removeDuplicates(append(schema.AtLeastOneOf, k))
+ sort.Strings(allKeys)
+
+ for _, atLeastOneOfKey := range allKeys {
+ if _, ok := c.Get(atLeastOneOfKey); ok {
+ // We can ignore hcl2shim.UnknownVariable by assuming it's been set and additional validation elsewhere
+ // will uncover this if it is in fact null.
+ return nil
+ }
+ }
+
+ return fmt.Errorf("%q: one of `%s` must be specified", k, strings.Join(allKeys, ","))
+}
+
func (m schemaMap) validateList(
k string,
raw interface{},
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/serialize.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/serialize.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/helper/schema/serialize.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/serialize.go
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/set.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/set.go
similarity index 97%
rename from vendor/github.com/hashicorp/terraform/helper/schema/set.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/set.go
index 8ee89e475..daa431ddb 100644
--- a/vendor/github.com/hashicorp/terraform/helper/schema/set.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/set.go
@@ -8,7 +8,7 @@ import (
"strconv"
"sync"
- "github.com/hashicorp/terraform/helper/hashcode"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/hashcode"
)
// HashString hashes strings. If you want a Set of strings, this is the
@@ -235,10 +235,6 @@ func (s *Set) remove(item interface{}) string {
return code
}
-func (s *Set) index(item interface{}) int {
- return sort.SearchStrings(s.listCode(), s.hash(item))
-}
-
func (s *Set) listCode() []string {
// Sort the hash codes so the order of the list is deterministic
keys := make([]string, 0, len(s.m))
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/shims.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/shims.go
similarity index 94%
rename from vendor/github.com/hashicorp/terraform/helper/schema/shims.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/shims.go
index d2dbff53c..93c601f80 100644
--- a/vendor/github.com/hashicorp/terraform/helper/schema/shims.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/shims.go
@@ -6,9 +6,9 @@ import (
"github.com/zclconf/go-cty/cty"
ctyjson "github.com/zclconf/go-cty/cty/json"
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/hashicorp/terraform/configs/hcl2shim"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/hcl2shim"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
// DiffFromValues takes the current state and desired state as cty.Values and
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/testing.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/testing.go
similarity index 90%
rename from vendor/github.com/hashicorp/terraform/helper/schema/testing.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/testing.go
index 122782174..4d0fd7365 100644
--- a/vendor/github.com/hashicorp/terraform/helper/schema/testing.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/testing.go
@@ -3,7 +3,7 @@ package schema
import (
"testing"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
// TestResourceDataRaw creates a ResourceData from a raw configuration map.
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/valuetype.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/valuetype.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/helper/schema/valuetype.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/valuetype.go
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/valuetype_string.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/valuetype_string.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/helper/schema/valuetype_string.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/schema/valuetype_string.go
diff --git a/vendor/github.com/hashicorp/terraform/helper/structure/expand_json.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/structure/expand_json.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/helper/structure/expand_json.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/structure/expand_json.go
diff --git a/vendor/github.com/hashicorp/terraform/helper/structure/flatten_json.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/structure/flatten_json.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/helper/structure/flatten_json.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/structure/flatten_json.go
diff --git a/vendor/github.com/hashicorp/terraform/helper/structure/normalize_json.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/structure/normalize_json.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/helper/structure/normalize_json.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/structure/normalize_json.go
diff --git a/vendor/github.com/hashicorp/terraform/helper/structure/suppress_json_diff.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/structure/suppress_json_diff.go
similarity index 84%
rename from vendor/github.com/hashicorp/terraform/helper/structure/suppress_json_diff.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/helper/structure/suppress_json_diff.go
index 46f794a71..e23707f57 100644
--- a/vendor/github.com/hashicorp/terraform/helper/structure/suppress_json_diff.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/structure/suppress_json_diff.go
@@ -3,7 +3,7 @@ package structure
import (
"reflect"
- "github.com/hashicorp/terraform/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
)
func SuppressJsonDiff(k, old, new string, d *schema.ResourceData) bool {
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/validation/float.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/validation/float.go
new file mode 100644
index 000000000..f0bfbfa53
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/validation/float.go
@@ -0,0 +1,64 @@
+package validation
+
+import (
+ "fmt"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
+)
+
+// FloatBetween returns a SchemaValidateFunc which tests if the provided value
+// is of type float64 and is between min and max (inclusive).
+func FloatBetween(min, max float64) schema.SchemaValidateFunc {
+ return func(i interface{}, k string) (s []string, es []error) {
+ v, ok := i.(float64)
+ if !ok {
+ es = append(es, fmt.Errorf("expected type of %s to be float64", k))
+ return
+ }
+
+ if v < min || v > max {
+ es = append(es, fmt.Errorf("expected %s to be in the range (%f - %f), got %f", k, min, max, v))
+ return
+ }
+
+ return
+ }
+}
+
+// FloatAtLeast returns a SchemaValidateFunc which tests if the provided value
+// is of type float and is at least min (inclusive)
+func FloatAtLeast(min float64) schema.SchemaValidateFunc {
+ return func(i interface{}, k string) (s []string, es []error) {
+ v, ok := i.(float64)
+ if !ok {
+ es = append(es, fmt.Errorf("expected type of %s to be float", k))
+ return
+ }
+
+ if v < min {
+ es = append(es, fmt.Errorf("expected %s to be at least (%f), got %f", k, min, v))
+ return
+ }
+
+ return
+ }
+}
+
+// FloatAtMost returns a SchemaValidateFunc which tests if the provided value
+// is of type float and is at most max (inclusive)
+func FloatAtMost(max float64) schema.SchemaValidateFunc {
+ return func(i interface{}, k string) (s []string, es []error) {
+ v, ok := i.(float64)
+ if !ok {
+ es = append(es, fmt.Errorf("expected type of %s to be float", k))
+ return
+ }
+
+ if v > max {
+ es = append(es, fmt.Errorf("expected %s to be at most (%f), got %f", k, max, v))
+ return
+ }
+
+ return
+ }
+}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/validation/int.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/validation/int.go
new file mode 100644
index 000000000..8ade5b1e6
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/validation/int.go
@@ -0,0 +1,125 @@
+package validation
+
+import (
+ "fmt"
+ "math"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
+)
+
+// IntBetween returns a SchemaValidateFunc which tests if the provided value
+// is of type int and is between min and max (inclusive)
+func IntBetween(min, max int) schema.SchemaValidateFunc {
+ return func(i interface{}, k string) (warnings []string, errors []error) {
+ v, ok := i.(int)
+ if !ok {
+ errors = append(errors, fmt.Errorf("expected type of %s to be integer", k))
+ return warnings, errors
+ }
+
+ if v < min || v > max {
+ errors = append(errors, fmt.Errorf("expected %s to be in the range (%d - %d), got %d", k, min, max, v))
+ return warnings, errors
+ }
+
+ return warnings, errors
+ }
+}
+
+// IntAtLeast returns a SchemaValidateFunc which tests if the provided value
+// is of type int and is at least min (inclusive)
+func IntAtLeast(min int) schema.SchemaValidateFunc {
+ return func(i interface{}, k string) (warnings []string, errors []error) {
+ v, ok := i.(int)
+ if !ok {
+ errors = append(errors, fmt.Errorf("expected type of %s to be integer", k))
+ return warnings, errors
+ }
+
+ if v < min {
+ errors = append(errors, fmt.Errorf("expected %s to be at least (%d), got %d", k, min, v))
+ return warnings, errors
+ }
+
+ return warnings, errors
+ }
+}
+
+// IntAtMost returns a SchemaValidateFunc which tests if the provided value
+// is of type int and is at most max (inclusive)
+func IntAtMost(max int) schema.SchemaValidateFunc {
+ return func(i interface{}, k string) (warnings []string, errors []error) {
+ v, ok := i.(int)
+ if !ok {
+ errors = append(errors, fmt.Errorf("expected type of %s to be integer", k))
+ return warnings, errors
+ }
+
+ if v > max {
+ errors = append(errors, fmt.Errorf("expected %s to be at most (%d), got %d", k, max, v))
+ return warnings, errors
+ }
+
+ return warnings, errors
+ }
+}
+
+// IntDivisibleBy returns a SchemaValidateFunc which tests if the provided value
+// is of type int and is divisible by a given number
+func IntDivisibleBy(divisor int) schema.SchemaValidateFunc {
+ return func(i interface{}, k string) (warnings []string, errors []error) {
+ v, ok := i.(int)
+ if !ok {
+ errors = append(errors, fmt.Errorf("expected type of %s to be integer", k))
+ return warnings, errors
+ }
+
+ if math.Mod(float64(v), float64(divisor)) != 0 {
+ errors = append(errors, fmt.Errorf("expected %s to be divisible by %d, got: %v", k, divisor, i))
+ return warnings, errors
+ }
+
+ return warnings, errors
+ }
+}
+
+// IntInSlice returns a SchemaValidateFunc which tests if the provided value
+// is of type int and matches the value of an element in the valid slice
+func IntInSlice(valid []int) schema.SchemaValidateFunc {
+ return func(i interface{}, k string) (warnings []string, errors []error) {
+ v, ok := i.(int)
+ if !ok {
+ errors = append(errors, fmt.Errorf("expected type of %s to be integer", k))
+ return warnings, errors
+ }
+
+ for _, validInt := range valid {
+ if v == validInt {
+ return warnings, errors
+ }
+ }
+
+ errors = append(errors, fmt.Errorf("expected %s to be one of %v, got %d", k, valid, v))
+ return warnings, errors
+ }
+}
+
+// IntNotInSlice returns a SchemaValidateFunc which tests if the provided value
+// is of type int and matches the value of an element in the valid slice
+func IntNotInSlice(valid []int) schema.SchemaValidateFunc {
+ return func(i interface{}, k string) (warnings []string, errors []error) {
+ v, ok := i.(int)
+ if !ok {
+ errors = append(errors, fmt.Errorf("expected type of %s to be integer", k))
+ return warnings, errors
+ }
+
+ for _, validInt := range valid {
+ if v == validInt {
+ errors = append(errors, fmt.Errorf("expected %s to not be one of %v, got %d", k, valid, v))
+ }
+ }
+
+ return warnings, errors
+ }
+}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/validation/list.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/validation/list.go
new file mode 100644
index 000000000..d60a4882f
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/validation/list.go
@@ -0,0 +1,41 @@
+package validation
+
+import "fmt"
+
+// ValidateListUniqueStrings is a ValidateFunc that ensures a list has no
+// duplicate items in it. It's useful for when a list is needed over a set
+// because order matters, yet the items still need to be unique.
+//
+// Deprecated: use ListOfUniqueStrings
+func ValidateListUniqueStrings(i interface{}, k string) (warnings []string, errors []error) {
+ return ListOfUniqueStrings(i, k)
+}
+
+// ListOfUniqueStrings is a ValidateFunc that ensures a list has no
+// duplicate items in it. It's useful for when a list is needed over a set
+// because order matters, yet the items still need to be unique.
+func ListOfUniqueStrings(i interface{}, k string) (warnings []string, errors []error) {
+ v, ok := i.([]interface{})
+ if !ok {
+ errors = append(errors, fmt.Errorf("expected type of %q to be List", k))
+ return warnings, errors
+ }
+
+ for _, e := range v {
+ if _, eok := e.(string); !eok {
+ errors = append(errors, fmt.Errorf("expected %q to only contain string elements, found :%v", k, e))
+ return warnings, errors
+ }
+ }
+
+ for n1, i1 := range v {
+ for n2, i2 := range v {
+ if i1.(string) == i2.(string) && n1 != n2 {
+ errors = append(errors, fmt.Errorf("expected %q to not have duplicates: found 2 or more of %v", k, i1))
+ return warnings, errors
+ }
+ }
+ }
+
+ return warnings, errors
+}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/validation/meta.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/validation/meta.go
new file mode 100644
index 000000000..b05557ac2
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/validation/meta.go
@@ -0,0 +1,59 @@
+package validation
+
+import (
+ "fmt"
+ "reflect"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
+)
+
+// NoZeroValues is a SchemaValidateFunc which tests if the provided value is
+// not a zero value. It's useful in situations where you want to catch
+// explicit zero values on things like required fields during validation.
+func NoZeroValues(i interface{}, k string) (s []string, es []error) {
+ if reflect.ValueOf(i).Interface() == reflect.Zero(reflect.TypeOf(i)).Interface() {
+ switch reflect.TypeOf(i).Kind() {
+ case reflect.String:
+ es = append(es, fmt.Errorf("%s must not be empty, got %v", k, i))
+ case reflect.Int, reflect.Float64:
+ es = append(es, fmt.Errorf("%s must not be zero, got %v", k, i))
+ default:
+ // this validator should only ever be applied to TypeString, TypeInt and TypeFloat
+ panic(fmt.Errorf("can't use NoZeroValues with %T attribute %s", i, k))
+ }
+ }
+ return
+}
+
+// All returns a SchemaValidateFunc which tests if the provided value
+// passes all provided SchemaValidateFunc
+func All(validators ...schema.SchemaValidateFunc) schema.SchemaValidateFunc {
+ return func(i interface{}, k string) ([]string, []error) {
+ var allErrors []error
+ var allWarnings []string
+ for _, validator := range validators {
+ validatorWarnings, validatorErrors := validator(i, k)
+ allWarnings = append(allWarnings, validatorWarnings...)
+ allErrors = append(allErrors, validatorErrors...)
+ }
+ return allWarnings, allErrors
+ }
+}
+
+// Any returns a SchemaValidateFunc which tests if the provided value
+// passes any of the provided SchemaValidateFunc
+func Any(validators ...schema.SchemaValidateFunc) schema.SchemaValidateFunc {
+ return func(i interface{}, k string) ([]string, []error) {
+ var allErrors []error
+ var allWarnings []string
+ for _, validator := range validators {
+ validatorWarnings, validatorErrors := validator(i, k)
+ if len(validatorWarnings) == 0 && len(validatorErrors) == 0 {
+ return []string{}, []error{}
+ }
+ allWarnings = append(allWarnings, validatorWarnings...)
+ allErrors = append(allErrors, validatorErrors...)
+ }
+ return allWarnings, allErrors
+ }
+}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/validation/network.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/validation/network.go
new file mode 100644
index 000000000..4fea71798
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/validation/network.go
@@ -0,0 +1,194 @@
+package validation
+
+import (
+ "bytes"
+ "fmt"
+ "net"
+ "strings"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
+)
+
+// SingleIP returns a SchemaValidateFunc which tests if the provided value
+// is of type string, and in valid single Value notation
+//
+// Deprecated: use IsIPAddress instead
+func SingleIP() schema.SchemaValidateFunc {
+ return IsIPAddress
+}
+
+// IsIPAddress is a SchemaValidateFunc which tests if the provided value is of type string and is a single IP (v4 or v6)
+func IsIPAddress(i interface{}, k string) (warnings []string, errors []error) {
+ v, ok := i.(string)
+ if !ok {
+ errors = append(errors, fmt.Errorf("expected type of %q to be string", k))
+ return warnings, errors
+ }
+
+ ip := net.ParseIP(v)
+ if ip == nil {
+ errors = append(errors, fmt.Errorf("expected %s to contain a valid IP, got: %s", k, v))
+ }
+
+ return warnings, errors
+}
+
+// IsIPv6Address is a SchemaValidateFunc which tests if the provided value is of type string and a valid IPv6 address
+func IsIPv6Address(i interface{}, k string) (warnings []string, errors []error) {
+ v, ok := i.(string)
+ if !ok {
+ errors = append(errors, fmt.Errorf("expected type of %q to be string", k))
+ return warnings, errors
+ }
+
+ ip := net.ParseIP(v)
+ if six := ip.To16(); six == nil {
+ errors = append(errors, fmt.Errorf("expected %s to contain a valid IPv6 address, got: %s", k, v))
+ }
+
+ return warnings, errors
+}
+
+// IsIPv4Address is a SchemaValidateFunc which tests if the provided value is of type string and a valid IPv4 address
+func IsIPv4Address(i interface{}, k string) (warnings []string, errors []error) {
+ v, ok := i.(string)
+ if !ok {
+ errors = append(errors, fmt.Errorf("expected type of %q to be string", k))
+ return warnings, errors
+ }
+
+ ip := net.ParseIP(v)
+ if four := ip.To4(); four == nil {
+ errors = append(errors, fmt.Errorf("expected %s to contain a valid IPv4 address, got: %s", k, v))
+ }
+
+ return warnings, errors
+}
+
+// IPRange returns a SchemaValidateFunc which tests if the provided value is of type string, and in valid IP range
+//
+// Deprecated: use IsIPv4Range instead
+func IPRange() schema.SchemaValidateFunc {
+ return IsIPv4Range
+}
+
+// IsIPv4Range is a SchemaValidateFunc which tests if the provided value is of type string, and in valid IP range
+func IsIPv4Range(i interface{}, k string) (warnings []string, errors []error) {
+ v, ok := i.(string)
+ if !ok {
+ errors = append(errors, fmt.Errorf("expected type of %s to be string", k))
+ return warnings, errors
+ }
+
+ ips := strings.Split(v, "-")
+ if len(ips) != 2 {
+ errors = append(errors, fmt.Errorf("expected %s to contain a valid IP range, got: %s", k, v))
+ return warnings, errors
+ }
+
+ ip1 := net.ParseIP(ips[0])
+ ip2 := net.ParseIP(ips[1])
+ if ip1 == nil || ip2 == nil || bytes.Compare(ip1, ip2) > 0 {
+ errors = append(errors, fmt.Errorf("expected %s to contain a valid IP range, got: %s", k, v))
+ }
+
+ return warnings, errors
+}
+
+// IsCIDR is a SchemaValidateFunc which tests if the provided value is of type string and a valid CIDR
+func IsCIDR(i interface{}, k string) (warnings []string, errors []error) {
+ v, ok := i.(string)
+ if !ok {
+ errors = append(errors, fmt.Errorf("expected type of %s to be string", k))
+ return warnings, errors
+ }
+
+ if _, _, err := net.ParseCIDR(v); err != nil {
+ errors = append(errors, fmt.Errorf("expected %q to be a valid IPv4 Value, got %v: %v", k, i, err))
+ }
+
+ return warnings, errors
+}
+
+// CIDRNetwork returns a SchemaValidateFunc which tests if the provided value
+// is of type string, is in valid Value network notation, and has significant bits between min and max (inclusive)
+//
+// Deprecated: use IsCIDRNetwork instead
+func CIDRNetwork(min, max int) schema.SchemaValidateFunc {
+ return IsCIDRNetwork(min, max)
+}
+
+// IsCIDRNetwork returns a SchemaValidateFunc which tests if the provided value
+// is of type string, is in valid Value network notation, and has significant bits between min and max (inclusive)
+func IsCIDRNetwork(min, max int) schema.SchemaValidateFunc {
+ return func(i interface{}, k string) (warnings []string, errors []error) {
+ v, ok := i.(string)
+ if !ok {
+ errors = append(errors, fmt.Errorf("expected type of %s to be string", k))
+ return warnings, errors
+ }
+
+ _, ipnet, err := net.ParseCIDR(v)
+ if err != nil {
+ errors = append(errors, fmt.Errorf("expected %s to contain a valid Value, got: %s with err: %s", k, v, err))
+ return warnings, errors
+ }
+
+ if ipnet == nil || v != ipnet.String() {
+ errors = append(errors, fmt.Errorf("expected %s to contain a valid network Value, expected %s, got %s",
+ k, ipnet, v))
+ }
+
+ sigbits, _ := ipnet.Mask.Size()
+ if sigbits < min || sigbits > max {
+ errors = append(errors, fmt.Errorf("expected %q to contain a network Value with between %d and %d significant bits, got: %d", k, min, max, sigbits))
+ }
+
+ return warnings, errors
+ }
+}
+
+// IsMACAddress is a SchemaValidateFunc which tests if the provided value is of type string and a valid MAC address
+func IsMACAddress(i interface{}, k string) (warnings []string, errors []error) {
+ v, ok := i.(string)
+ if !ok {
+ errors = append(errors, fmt.Errorf("expected type of %q to be string", k))
+ return warnings, errors
+ }
+
+ if _, err := net.ParseMAC(v); err != nil {
+ errors = append(errors, fmt.Errorf("expected %q to be a valid MAC address, got %v: %v", k, i, err))
+ }
+
+ return warnings, errors
+}
+
+// IsPortNumber is a SchemaValidateFunc which tests if the provided value is of type string and a valid TCP Port Number
+func IsPortNumber(i interface{}, k string) (warnings []string, errors []error) {
+ v, ok := i.(int)
+ if !ok {
+ errors = append(errors, fmt.Errorf("expected type of %q to be integer", k))
+ return warnings, errors
+ }
+
+ if 1 > v || v > 65535 {
+ errors = append(errors, fmt.Errorf("expected %q to be a valid port number, got: %v", k, v))
+ }
+
+ return warnings, errors
+}
+
+// IsPortNumberOrZero is a SchemaValidateFunc which tests if the provided value is of type string and a valid TCP Port Number or zero
+func IsPortNumberOrZero(i interface{}, k string) (warnings []string, errors []error) {
+ v, ok := i.(int)
+ if !ok {
+ errors = append(errors, fmt.Errorf("expected type of %q to be integer", k))
+ return warnings, errors
+ }
+
+ if 0 > v || v > 65535 {
+ errors = append(errors, fmt.Errorf("expected %q to be a valid port number or 0, got: %v", k, v))
+ }
+
+ return warnings, errors
+}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/validation/strings.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/validation/strings.go
new file mode 100644
index 000000000..a5bfb6bea
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/validation/strings.go
@@ -0,0 +1,231 @@
+package validation
+
+import (
+ "encoding/base64"
+ "fmt"
+ "regexp"
+ "strings"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/structure"
+)
+
+// StringIsNotEmpty is a ValidateFunc that ensures a string is not empty
+func StringIsNotEmpty(i interface{}, k string) ([]string, []error) {
+ v, ok := i.(string)
+ if !ok {
+ return nil, []error{fmt.Errorf("expected type of %q to be string", k)}
+ }
+
+ if v == "" {
+ return nil, []error{fmt.Errorf("expected %q to not be an empty string, got %v", k, i)}
+ }
+
+ return nil, nil
+}
+
+// StringIsNotWhiteSpace is a ValidateFunc that ensures a string is not empty or consisting entirely of whitespace characters
+func StringIsNotWhiteSpace(i interface{}, k string) ([]string, []error) {
+ v, ok := i.(string)
+ if !ok {
+ return nil, []error{fmt.Errorf("expected type of %q to be string", k)}
+ }
+
+ if strings.TrimSpace(v) == "" {
+ return nil, []error{fmt.Errorf("expected %q to not be an empty string or whitespace", k)}
+ }
+
+ return nil, nil
+}
+
+// StringIsEmpty is a ValidateFunc that ensures a string has no characters
+func StringIsEmpty(i interface{}, k string) ([]string, []error) {
+ v, ok := i.(string)
+ if !ok {
+ return nil, []error{fmt.Errorf("expected type of %q to be string", k)}
+ }
+
+ if v != "" {
+ return nil, []error{fmt.Errorf("expected %q to be an empty string: got %v", k, v)}
+ }
+
+ return nil, nil
+}
+
+// StringIsWhiteSpace is a ValidateFunc that ensures a string is composed of entirely whitespace
+func StringIsWhiteSpace(i interface{}, k string) ([]string, []error) {
+ v, ok := i.(string)
+ if !ok {
+ return nil, []error{fmt.Errorf("expected type of %q to be string", k)}
+ }
+
+ if strings.TrimSpace(v) != "" {
+ return nil, []error{fmt.Errorf("expected %q to be an empty string or whitespace: got %v", k, v)}
+ }
+
+ return nil, nil
+}
+
+// StringLenBetween returns a SchemaValidateFunc which tests if the provided value
+// is of type string and has length between min and max (inclusive)
+func StringLenBetween(min, max int) schema.SchemaValidateFunc {
+ return func(i interface{}, k string) (warnings []string, errors []error) {
+ v, ok := i.(string)
+ if !ok {
+ errors = append(errors, fmt.Errorf("expected type of %s to be string", k))
+ return warnings, errors
+ }
+
+ if len(v) < min || len(v) > max {
+ errors = append(errors, fmt.Errorf("expected length of %s to be in the range (%d - %d), got %s", k, min, max, v))
+ }
+
+ return warnings, errors
+ }
+}
+
+// StringMatch returns a SchemaValidateFunc which tests if the provided value
+// matches a given regexp. Optionally an error message can be provided to
+// return something friendlier than "must match some globby regexp".
+func StringMatch(r *regexp.Regexp, message string) schema.SchemaValidateFunc {
+ return func(i interface{}, k string) ([]string, []error) {
+ v, ok := i.(string)
+ if !ok {
+ return nil, []error{fmt.Errorf("expected type of %s to be string", k)}
+ }
+
+ if ok := r.MatchString(v); !ok {
+ if message != "" {
+ return nil, []error{fmt.Errorf("invalid value for %s (%s)", k, message)}
+
+ }
+ return nil, []error{fmt.Errorf("expected value of %s to match regular expression %q, got %v", k, r, i)}
+ }
+ return nil, nil
+ }
+}
+
+// StringDoesNotMatch returns a SchemaValidateFunc which tests if the provided value
+// does not match a given regexp. Optionally an error message can be provided to
+// return something friendlier than "must not match some globby regexp".
+func StringDoesNotMatch(r *regexp.Regexp, message string) schema.SchemaValidateFunc {
+ return func(i interface{}, k string) ([]string, []error) {
+ v, ok := i.(string)
+ if !ok {
+ return nil, []error{fmt.Errorf("expected type of %s to be string", k)}
+ }
+
+ if ok := r.MatchString(v); ok {
+ if message != "" {
+ return nil, []error{fmt.Errorf("invalid value for %s (%s)", k, message)}
+
+ }
+ return nil, []error{fmt.Errorf("expected value of %s to not match regular expression %q, got %v", k, r, i)}
+ }
+ return nil, nil
+ }
+}
+
+// StringInSlice returns a SchemaValidateFunc which tests if the provided value
+// is of type string and matches the value of an element in the valid slice
+// will test with in lower case if ignoreCase is true
+func StringInSlice(valid []string, ignoreCase bool) schema.SchemaValidateFunc {
+ return func(i interface{}, k string) (warnings []string, errors []error) {
+ v, ok := i.(string)
+ if !ok {
+ errors = append(errors, fmt.Errorf("expected type of %s to be string", k))
+ return warnings, errors
+ }
+
+ for _, str := range valid {
+ if v == str || (ignoreCase && strings.ToLower(v) == strings.ToLower(str)) {
+ return warnings, errors
+ }
+ }
+
+ errors = append(errors, fmt.Errorf("expected %s to be one of %v, got %s", k, valid, v))
+ return warnings, errors
+ }
+}
+
+// StringDoesNotContainAny returns a SchemaValidateFunc which validates that the
+// provided value does not contain any of the specified Unicode code points in chars.
+func StringDoesNotContainAny(chars string) schema.SchemaValidateFunc {
+ return func(i interface{}, k string) (warnings []string, errors []error) {
+ v, ok := i.(string)
+ if !ok {
+ errors = append(errors, fmt.Errorf("expected type of %s to be string", k))
+ return warnings, errors
+ }
+
+ if strings.ContainsAny(v, chars) {
+ errors = append(errors, fmt.Errorf("expected value of %s to not contain any of %q, got %v", k, chars, i))
+ return warnings, errors
+ }
+
+ return warnings, errors
+ }
+}
+
+// StringIsBase64 is a ValidateFunc that ensures a string can be parsed as Base64
+func StringIsBase64(i interface{}, k string) (warnings []string, errors []error) {
+ // Empty string is not allowed
+ if warnings, errors = StringIsNotEmpty(i, k); len(errors) > 0 {
+ return
+ }
+
+ // NoEmptyStrings checks it is a string
+ v, _ := i.(string)
+
+ if _, err := base64.StdEncoding.DecodeString(v); err != nil {
+ errors = append(errors, fmt.Errorf("expected %q to be a base64 string, got %v", k, v))
+ }
+
+ return warnings, errors
+}
+
+// ValidateJsonString is a SchemaValidateFunc which tests to make sure the
+// supplied string is valid JSON.
+//
+// Deprecated: use StringIsJSON instead
+func ValidateJsonString(i interface{}, k string) (warnings []string, errors []error) {
+ return StringIsJSON(i, k)
+}
+
+// StringIsJSON is a SchemaValidateFunc which tests to make sure the supplied string is valid JSON.
+func StringIsJSON(i interface{}, k string) (warnings []string, errors []error) {
+ v, ok := i.(string)
+ if !ok {
+ errors = append(errors, fmt.Errorf("expected type of %s to be string", k))
+ return warnings, errors
+ }
+
+ if _, err := structure.NormalizeJsonString(v); err != nil {
+ errors = append(errors, fmt.Errorf("%q contains an invalid JSON: %s", k, err))
+ }
+
+ return warnings, errors
+}
+
+// ValidateRegexp returns a SchemaValidateFunc which tests to make sure the
+// supplied string is a valid regular expression.
+//
+// Deprecated: use StringIsValidRegExp instead
+func ValidateRegexp(i interface{}, k string) (warnings []string, errors []error) {
+ return StringIsValidRegExp(i, k)
+}
+
+// StringIsValidRegExp returns a SchemaValidateFunc which tests to make sure the supplied string is a valid regular expression.
+func StringIsValidRegExp(i interface{}, k string) (warnings []string, errors []error) {
+ v, ok := i.(string)
+ if !ok {
+ errors = append(errors, fmt.Errorf("expected type of %s to be string", k))
+ return warnings, errors
+ }
+
+ if _, err := regexp.Compile(v); err != nil {
+ errors = append(errors, fmt.Errorf("%q: %s", k, err))
+ }
+
+ return warnings, errors
+}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/validation/testing.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/validation/testing.go
new file mode 100644
index 000000000..8a2da7f89
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/validation/testing.go
@@ -0,0 +1,43 @@
+package validation
+
+import (
+ "regexp"
+ "testing"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
+)
+
+type testCase struct {
+ val interface{}
+ f schema.SchemaValidateFunc
+ expectedErr *regexp.Regexp
+}
+
+func runTestCases(t *testing.T, cases []testCase) {
+ matchErr := func(errs []error, r *regexp.Regexp) bool {
+ // err must match one provided
+ for _, err := range errs {
+ if r.MatchString(err.Error()) {
+ return true
+ }
+ }
+
+ return false
+ }
+
+ for i, tc := range cases {
+ _, errs := tc.f(tc.val, "test_property")
+
+ if len(errs) == 0 && tc.expectedErr == nil {
+ continue
+ }
+
+ if len(errs) != 0 && tc.expectedErr == nil {
+ t.Fatalf("expected test case %d to produce no errors, got %v", i, errs)
+ }
+
+ if !matchErr(errs, tc.expectedErr) {
+ t.Fatalf("expected test case %d to produce error matching \"%s\", got %v", i, tc.expectedErr, errs)
+ }
+ }
+}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/validation/time.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/validation/time.go
new file mode 100644
index 000000000..1c6788c68
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/validation/time.go
@@ -0,0 +1,61 @@
+package validation
+
+import (
+ "fmt"
+ "time"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
+)
+
+// IsDayOfTheWeek id a SchemaValidateFunc which tests if the provided value is of type string and a valid english day of the week
+func IsDayOfTheWeek(ignoreCase bool) schema.SchemaValidateFunc {
+ return StringInSlice([]string{
+ "Monday",
+ "Tuesday",
+ "Wednesday",
+ "Thursday",
+ "Friday",
+ "Saturday",
+ "Sunday",
+ }, ignoreCase)
+}
+
+// IsMonth id a SchemaValidateFunc which tests if the provided value is of type string and a valid english month
+func IsMonth(ignoreCase bool) schema.SchemaValidateFunc {
+ return StringInSlice([]string{
+ "January",
+ "February",
+ "March",
+ "April",
+ "May",
+ "June",
+ "July",
+ "August",
+ "September",
+ "October",
+ "November",
+ "December",
+ }, ignoreCase)
+}
+
+// IsRFC3339Time is a SchemaValidateFunc which tests if the provided value is of type string and a valid RFC33349Time
+func IsRFC3339Time(i interface{}, k string) (warnings []string, errors []error) {
+ v, ok := i.(string)
+ if !ok {
+ errors = append(errors, fmt.Errorf("expected type of %q to be string", k))
+ return warnings, errors
+ }
+
+ if _, err := time.Parse(time.RFC3339, v); err != nil {
+ errors = append(errors, fmt.Errorf("expected %q to be a valid RFC3339 date, got %q: %+v", k, i, err))
+ }
+
+ return warnings, errors
+}
+
+// ValidateRFC3339TimeString is a ValidateFunc that ensures a string parses as time.RFC3339 format
+//
+// Deprecated: use IsRFC3339Time() instead
+func ValidateRFC3339TimeString(i interface{}, k string) (warnings []string, errors []error) {
+ return IsRFC3339Time(i, k)
+}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/validation/uuid.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/validation/uuid.go
new file mode 100644
index 000000000..00783fafc
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/validation/uuid.go
@@ -0,0 +1,22 @@
+package validation
+
+import (
+ "fmt"
+
+ "github.com/hashicorp/go-uuid"
+)
+
+// IsUUID is a ValidateFunc that ensures a string can be parsed as UUID
+func IsUUID(i interface{}, k string) (warnings []string, errors []error) {
+ v, ok := i.(string)
+ if !ok {
+ errors = append(errors, fmt.Errorf("expected type of %q to be string", k))
+ return
+ }
+
+ if _, err := uuid.ParseUUID(v); err != nil {
+ errors = append(errors, fmt.Errorf("expected %q to be a valid UUID, got %v", k, v))
+ }
+
+ return warnings, errors
+}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/validation/web.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/validation/web.go
new file mode 100644
index 000000000..eb5437f14
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/helper/validation/web.go
@@ -0,0 +1,55 @@
+package validation
+
+import (
+ "fmt"
+ "net/url"
+ "strings"
+
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
+)
+
+// IsURLWithHTTPS is a SchemaValidateFunc which tests if the provided value is of type string and a valid HTTPS URL
+func IsURLWithHTTPS(i interface{}, k string) (_ []string, errors []error) {
+ return IsURLWithScheme([]string{"https"})(i, k)
+}
+
+// IsURLWithHTTPorHTTPS is a SchemaValidateFunc which tests if the provided value is of type string and a valid HTTP or HTTPS URL
+func IsURLWithHTTPorHTTPS(i interface{}, k string) (_ []string, errors []error) {
+ return IsURLWithScheme([]string{"http", "https"})(i, k)
+}
+
+// IsURLWithScheme is a SchemaValidateFunc which tests if the provided value is of type string and a valid URL with the provided schemas
+func IsURLWithScheme(validSchemes []string) schema.SchemaValidateFunc {
+ return func(i interface{}, k string) (_ []string, errors []error) {
+ v, ok := i.(string)
+ if !ok {
+ errors = append(errors, fmt.Errorf("expected type of %q to be string", k))
+ return
+ }
+
+ if v == "" {
+ errors = append(errors, fmt.Errorf("expected %q url to not be empty, got %v", k, i))
+ return
+ }
+
+ u, err := url.Parse(v)
+ if err != nil {
+ errors = append(errors, fmt.Errorf("expected %q to be a valid url, got %v: %+v", k, v, err))
+ return
+ }
+
+ if u.Host == "" {
+ errors = append(errors, fmt.Errorf("expected %q to have a host, got %v", k, v))
+ return
+ }
+
+ for _, s := range validSchemes {
+ if u.Scheme == s {
+ return //last check so just return
+ }
+ }
+
+ errors = append(errors, fmt.Errorf("expected %q to have a url with schema of: %q, got %v", k, strings.Join(validSchemes, ","), v))
+ return
+ }
+}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/httpclient/useragent.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/httpclient/useragent.go
new file mode 100644
index 000000000..36b494c01
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/httpclient/useragent.go
@@ -0,0 +1,26 @@
+package httpclient
+
+import (
+ "fmt"
+ "log"
+ "os"
+ "strings"
+
+ "github.com/hashicorp/terraform-plugin-sdk/meta"
+)
+
+const uaEnvVar = "TF_APPEND_USER_AGENT"
+
+func TerraformUserAgent(version string) string {
+ ua := fmt.Sprintf("HashiCorp Terraform/%s (+https://www.terraform.io) Terraform Plugin SDK/%s", version, meta.SDKVersionString())
+
+ if add := os.Getenv(uaEnvVar); add != "" {
+ add = strings.TrimSpace(add)
+ if len(add) > 0 {
+ ua += " " + add
+ log.Printf("[DEBUG] Using modified User-Agent: %s", ua)
+ }
+ }
+
+ return ua
+}
diff --git a/vendor/github.com/hashicorp/terraform/addrs/count_attr.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/count_attr.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/addrs/count_attr.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/count_attr.go
diff --git a/vendor/github.com/hashicorp/terraform/addrs/doc.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/doc.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/addrs/doc.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/doc.go
diff --git a/vendor/github.com/hashicorp/terraform/addrs/for_each_attr.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/for_each_attr.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/addrs/for_each_attr.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/for_each_attr.go
diff --git a/vendor/github.com/hashicorp/terraform/addrs/input_variable.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/input_variable.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/addrs/input_variable.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/input_variable.go
diff --git a/vendor/github.com/hashicorp/terraform/addrs/instance_key.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/instance_key.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/addrs/instance_key.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/instance_key.go
diff --git a/vendor/github.com/hashicorp/terraform/addrs/local_value.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/local_value.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/addrs/local_value.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/local_value.go
diff --git a/vendor/github.com/hashicorp/terraform/addrs/module.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/module.go
similarity index 69%
rename from vendor/github.com/hashicorp/terraform/addrs/module.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/module.go
index 6420c6301..1533f853c 100644
--- a/vendor/github.com/hashicorp/terraform/addrs/module.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/module.go
@@ -16,13 +16,6 @@ import (
// Although Module is a slice, it should be treated as immutable after creation.
type Module []string
-// RootModule is the module address representing the root of the static module
-// call tree, which is also the zero value of Module.
-//
-// Note that this is not the root of the dynamic module tree, which is instead
-// represented by RootModuleInstance.
-var RootModule Module
-
// IsRoot returns true if the receiver is the address of the root module,
// or false otherwise.
func (m Module) IsRoot() bool {
@@ -36,23 +29,6 @@ func (m Module) String() string {
return strings.Join([]string(m), ".")
}
-// Child returns the address of a child call in the receiver, identified by the
-// given name.
-func (m Module) Child(name string) Module {
- ret := make(Module, 0, len(m)+1)
- ret = append(ret, m...)
- return append(ret, name)
-}
-
-// Parent returns the address of the parent module of the receiver, or the
-// receiver itself if there is no parent (if it's the root module address).
-func (m Module) Parent() Module {
- if len(m) == 0 {
- return m
- }
- return m[:len(m)-1]
-}
-
// Call returns the module call address that corresponds to the given module
// instance, along with the address of the module that contains it.
//
diff --git a/vendor/github.com/hashicorp/terraform/addrs/module_call.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/module_call.go
similarity index 80%
rename from vendor/github.com/hashicorp/terraform/addrs/module_call.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/module_call.go
index 09596cc84..d138fade7 100644
--- a/vendor/github.com/hashicorp/terraform/addrs/module_call.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/module_call.go
@@ -18,15 +18,6 @@ func (c ModuleCall) String() string {
return "module." + c.Name
}
-// Instance returns the address of an instance of the receiver identified by
-// the given key.
-func (c ModuleCall) Instance(key InstanceKey) ModuleCallInstance {
- return ModuleCallInstance{
- Call: c,
- Key: key,
- }
-}
-
// ModuleCallInstance is the address of one instance of a module created from
// a module call, which might create multiple instances using "count" or
// "for_each" arguments.
@@ -51,15 +42,6 @@ func (c ModuleCallInstance) ModuleInstance(caller ModuleInstance) ModuleInstance
return caller.Child(c.Call.Name, c.Key)
}
-// Output returns the address of an output of the receiver identified by its
-// name.
-func (c ModuleCallInstance) Output(name string) ModuleCallOutput {
- return ModuleCallOutput{
- Call: c,
- Name: name,
- }
-}
-
// ModuleCallOutput is the address of a particular named output produced by
// an instance of a module call.
type ModuleCallOutput struct {
diff --git a/vendor/github.com/hashicorp/terraform/addrs/module_instance.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/module_instance.go
similarity index 94%
rename from vendor/github.com/hashicorp/terraform/addrs/module_instance.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/module_instance.go
index c81784e7a..bb0901a26 100644
--- a/vendor/github.com/hashicorp/terraform/addrs/module_instance.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/module_instance.go
@@ -9,7 +9,7 @@ import (
"github.com/zclconf/go-cty/cty"
"github.com/zclconf/go-cty/cty/gocty"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// ModuleInstance is an address for a particular module instance within the
@@ -264,12 +264,6 @@ func (m ModuleInstance) String() string {
return buf.String()
}
-// Equal returns true if the receiver and the given other value
-// contains the exact same parts.
-func (m ModuleInstance) Equal(o ModuleInstance) bool {
- return m.String() == o.String()
-}
-
// Less returns true if the receiver should sort before the given other value
// in a sorted list of addresses.
func (m ModuleInstance) Less(o ModuleInstance) bool {
@@ -305,27 +299,6 @@ func (m ModuleInstance) Ancestors() []ModuleInstance {
return ret
}
-// IsAncestor returns true if the receiver is an ancestor of the given
-// other value.
-func (m ModuleInstance) IsAncestor(o ModuleInstance) bool {
- // Longer or equal sized paths means the receiver cannot
- // be an ancestor of the given module insatnce.
- if len(m) >= len(o) {
- return false
- }
-
- for i, ms := range m {
- if ms.Name != o[i].Name {
- return false
- }
- if ms.InstanceKey != NoKey && ms.InstanceKey != o[i].InstanceKey {
- return false
- }
- }
-
- return true
-}
-
// Call returns the module call address that corresponds to the given module
// instance, along with the address of the module instance that contains it.
//
diff --git a/vendor/github.com/hashicorp/terraform/addrs/output_value.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/output_value.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/addrs/output_value.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/output_value.go
diff --git a/vendor/github.com/hashicorp/terraform/addrs/parse_ref.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/parse_ref.go
similarity index 99%
rename from vendor/github.com/hashicorp/terraform/addrs/parse_ref.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/parse_ref.go
index d5142690b..a2ee16441 100644
--- a/vendor/github.com/hashicorp/terraform/addrs/parse_ref.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/parse_ref.go
@@ -5,7 +5,7 @@ import (
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hcl/v2/hclsyntax"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// Reference describes a reference to an address with source location
diff --git a/vendor/github.com/hashicorp/terraform/addrs/parse_target.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/parse_target.go
similarity index 71%
rename from vendor/github.com/hashicorp/terraform/addrs/parse_target.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/parse_target.go
index c308525f5..5b922e8b6 100644
--- a/vendor/github.com/hashicorp/terraform/addrs/parse_target.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/parse_target.go
@@ -6,7 +6,7 @@ import (
"github.com/hashicorp/hcl/v2/hclsyntax"
"github.com/hashicorp/hcl/v2"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// Target describes a targeted address with source location information.
@@ -169,84 +169,6 @@ func ParseTargetStr(str string) (*Target, tfdiags.Diagnostics) {
return target, diags
}
-// ParseAbsResource attempts to interpret the given traversal as an absolute
-// resource address, using the same syntax as expected by ParseTarget.
-//
-// If no error diagnostics are returned, the returned target includes the
-// address that was extracted and the source range it was extracted from.
-//
-// If error diagnostics are returned then the AbsResource value is invalid and
-// must not be used.
-func ParseAbsResource(traversal hcl.Traversal) (AbsResource, tfdiags.Diagnostics) {
- addr, diags := ParseTarget(traversal)
- if diags.HasErrors() {
- return AbsResource{}, diags
- }
-
- switch tt := addr.Subject.(type) {
-
- case AbsResource:
- return tt, diags
-
- case AbsResourceInstance: // Catch likely user error with specialized message
- // Assume that the last element of the traversal must be the index,
- // since that's required for a valid resource instance address.
- indexStep := traversal[len(traversal)-1]
- diags = diags.Append(&hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid address",
- Detail: "A resource address is required. This instance key identifies a specific resource instance, which is not expected here.",
- Subject: indexStep.SourceRange().Ptr(),
- })
- return AbsResource{}, diags
-
- case ModuleInstance: // Catch likely user error with specialized message
- diags = diags.Append(&hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid address",
- Detail: "A resource address is required here. The module path must be followed by a resource specification.",
- Subject: traversal.SourceRange().Ptr(),
- })
- return AbsResource{}, diags
-
- default: // Generic message for other address types
- diags = diags.Append(&hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: "Invalid address",
- Detail: "A resource address is required here.",
- Subject: traversal.SourceRange().Ptr(),
- })
- return AbsResource{}, diags
-
- }
-}
-
-// ParseAbsResourceStr is a helper wrapper around ParseAbsResource that takes a
-// string and parses it with the HCL native syntax traversal parser before
-// interpreting it.
-//
-// Error diagnostics are returned if either the parsing fails or the analysis
-// of the traversal fails. There is no way for the caller to distinguish the
-// two kinds of diagnostics programmatically. If error diagnostics are returned
-// the returned address may be incomplete.
-//
-// Since this function has no context about the source of the given string,
-// any returned diagnostics will not have meaningful source location
-// information.
-func ParseAbsResourceStr(str string) (AbsResource, tfdiags.Diagnostics) {
- var diags tfdiags.Diagnostics
-
- traversal, parseDiags := hclsyntax.ParseTraversalAbs([]byte(str), "", hcl.Pos{Line: 1, Column: 1})
- diags = diags.Append(parseDiags)
- if parseDiags.HasErrors() {
- return AbsResource{}, diags
- }
-
- addr, addrDiags := ParseAbsResource(traversal)
- diags = diags.Append(addrDiags)
- return addr, diags
-}
-
// ParseAbsResourceInstance attempts to interpret the given traversal as an
// absolute resource instance address, using the same syntax as expected by
// ParseTarget.
diff --git a/vendor/github.com/hashicorp/terraform/addrs/path_attr.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/path_attr.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/addrs/path_attr.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/path_attr.go
diff --git a/vendor/github.com/hashicorp/terraform/addrs/provider_config.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/provider_config.go
similarity index 97%
rename from vendor/github.com/hashicorp/terraform/addrs/provider_config.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/provider_config.go
index aaef1d3a4..c6fce1a50 100644
--- a/vendor/github.com/hashicorp/terraform/addrs/provider_config.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/provider_config.go
@@ -3,7 +3,7 @@ package addrs
import (
"fmt"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hcl/v2/hclsyntax"
@@ -18,14 +18,6 @@ type ProviderConfig struct {
Alias string
}
-// NewDefaultProviderConfig returns the address of the default (un-aliased)
-// configuration for the provider with the given type name.
-func NewDefaultProviderConfig(typeName string) ProviderConfig {
- return ProviderConfig{
- Type: typeName,
- }
-}
-
// ParseProviderConfigCompact parses the given absolute traversal as a relative
// provider address in compact form. The following are examples of traversals
// that can be successfully parsed as compact relative provider configuration
diff --git a/vendor/github.com/hashicorp/terraform/addrs/provider_type.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/provider_type.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/addrs/provider_type.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/provider_type.go
diff --git a/vendor/github.com/hashicorp/terraform/addrs/referenceable.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/referenceable.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/addrs/referenceable.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/referenceable.go
diff --git a/vendor/github.com/hashicorp/terraform/addrs/resource.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/resource.go
similarity index 95%
rename from vendor/github.com/hashicorp/terraform/addrs/resource.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/resource.go
index b075a6d1d..103f8a28c 100644
--- a/vendor/github.com/hashicorp/terraform/addrs/resource.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/resource.go
@@ -28,10 +28,6 @@ func (r Resource) String() string {
}
}
-func (r Resource) Equal(o Resource) bool {
- return r.String() == o.String()
-}
-
// Instance produces the address for a specific instance of the receiver
// that is idenfied by the given key.
func (r Resource) Instance(key InstanceKey) ResourceInstance {
@@ -90,10 +86,6 @@ func (r ResourceInstance) String() string {
return r.Resource.String() + r.Key.String()
}
-func (r ResourceInstance) Equal(o ResourceInstance) bool {
- return r.String() == o.String()
-}
-
// Absolute returns an AbsResourceInstance from the receiver and the given module
// instance address.
func (r ResourceInstance) Absolute(module ModuleInstance) AbsResourceInstance {
@@ -157,10 +149,6 @@ func (r AbsResource) String() string {
return fmt.Sprintf("%s.%s", r.Module.String(), r.Resource.String())
}
-func (r AbsResource) Equal(o AbsResource) bool {
- return r.String() == o.String()
-}
-
// AbsResourceInstance is an absolute address for a resource instance under a
// given module path.
type AbsResourceInstance struct {
@@ -216,10 +204,6 @@ func (r AbsResourceInstance) String() string {
return fmt.Sprintf("%s.%s", r.Module.String(), r.Resource.String())
}
-func (r AbsResourceInstance) Equal(o AbsResourceInstance) bool {
- return r.String() == o.String()
-}
-
// Less returns true if the receiver should sort before the given other value
// in a sorted list of addresses.
func (r AbsResourceInstance) Less(o AbsResourceInstance) bool {
diff --git a/vendor/github.com/hashicorp/terraform/addrs/resource_phase.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/resource_phase.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/addrs/resource_phase.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/resource_phase.go
diff --git a/vendor/github.com/hashicorp/terraform/addrs/resourcemode_string.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/resourcemode_string.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/addrs/resourcemode_string.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/resourcemode_string.go
diff --git a/vendor/github.com/hashicorp/terraform/addrs/self.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/self.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/addrs/self.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/self.go
diff --git a/vendor/github.com/hashicorp/terraform/addrs/targetable.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/targetable.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/addrs/targetable.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/targetable.go
diff --git a/vendor/github.com/hashicorp/terraform/addrs/terraform_attr.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/terraform_attr.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/addrs/terraform_attr.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/addrs/terraform_attr.go
diff --git a/vendor/github.com/hashicorp/terraform/command/format/diagnostic.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/command/format/diagnostic.go
similarity index 99%
rename from vendor/github.com/hashicorp/terraform/command/format/diagnostic.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/command/format/diagnostic.go
index ed34ddbb9..c054acf0a 100644
--- a/vendor/github.com/hashicorp/terraform/command/format/diagnostic.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/command/format/diagnostic.go
@@ -10,7 +10,7 @@ import (
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hcl/v2/hcled"
"github.com/hashicorp/hcl/v2/hclparse"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
"github.com/mitchellh/colorstring"
wordwrap "github.com/mitchellh/go-wordwrap"
"github.com/zclconf/go-cty/cty"
diff --git a/vendor/github.com/hashicorp/terraform/command/format/diff.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/command/format/diff.go
similarity index 95%
rename from vendor/github.com/hashicorp/terraform/command/format/diff.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/command/format/diff.go
index 8f284a4a5..0a2aa7d02 100644
--- a/vendor/github.com/hashicorp/terraform/command/format/diff.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/command/format/diff.go
@@ -11,11 +11,11 @@ import (
"github.com/zclconf/go-cty/cty"
ctyjson "github.com/zclconf/go-cty/cty/json"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/hashicorp/terraform/plans"
- "github.com/hashicorp/terraform/plans/objchange"
- "github.com/hashicorp/terraform/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans/objchange"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
)
// ResourceChange returns a string representation of a change to a particular
@@ -502,7 +502,7 @@ func (p *blockBodyDiffPrinter) writeValue(val cty.Value, action plans.Action, in
ty, err := ctyjson.ImpliedType(src)
// check for the special case of "null", which decodes to nil,
// and just allow it to be printed out directly
- if err == nil && !ty.IsPrimitiveType() && strings.TrimSpace(val.AsString()) != "null" {
+ if err == nil && !ty.IsPrimitiveType() && val.AsString() != "null" {
jv, err := ctyjson.Unmarshal(src, ty)
if err == nil {
p.buf.WriteString("jsonencode(")
@@ -520,21 +520,6 @@ func (p *blockBodyDiffPrinter) writeValue(val cty.Value, action plans.Action, in
}
}
}
-
- if strings.Contains(val.AsString(), "\n") {
- // It's a multi-line string, so we want to use the multi-line
- // rendering so it'll be readable. Rather than re-implement
- // that here, we'll just re-use the multi-line string diff
- // printer with no changes, which ends up producing the
- // result we want here.
- // The path argument is nil because we don't track path
- // information into strings and we know that a string can't
- // have any indices or attributes that might need to be marked
- // as (requires replacement), which is what that argument is for.
- p.writeValueDiff(val, val, indent, nil)
- break
- }
-
fmt.Fprintf(p.buf, "%q", val.AsString())
case cty.Bool:
if val.True() {
@@ -1086,8 +1071,8 @@ func ctySequenceDiff(old, new []cty.Value) []*plans.Change {
var oldI, newI, lcsI int
for oldI < len(old) || newI < len(new) || lcsI < len(lcs) {
for oldI < len(old) && (lcsI >= len(lcs) || !old[oldI].RawEquals(lcs[lcsI])) {
- isObjectDiff := old[oldI].Type().IsObjectType() && newI < len(new) && new[newI].Type().IsObjectType() && (lcsI >= len(lcs) || !new[newI].RawEquals(lcs[lcsI]))
- if isObjectDiff {
+ isObjectDiff := old[oldI].Type().IsObjectType() && (newI >= len(new) || new[newI].Type().IsObjectType())
+ if isObjectDiff && newI < len(new) {
ret = append(ret, &plans.Change{
Action: plans.Update,
Before: old[oldI],
@@ -1205,26 +1190,3 @@ func ctyNullBlockSetAsEmpty(in cty.Value) cty.Value {
// sets, so our result here is always a set.
return cty.SetValEmpty(in.Type().ElementType())
}
-
-// DiffActionSymbol returns a string that, once passed through a
-// colorstring.Colorize, will produce a result that can be written
-// to a terminal to produce a symbol made of three printable
-// characters, possibly interspersed with VT100 color codes.
-func DiffActionSymbol(action plans.Action) string {
- switch action {
- case plans.DeleteThenCreate:
- return "[red]-[reset]/[green]+[reset]"
- case plans.CreateThenDelete:
- return "[green]+[reset]/[red]-[reset]"
- case plans.Create:
- return " [green]+[reset]"
- case plans.Delete:
- return " [red]-[reset]"
- case plans.Read:
- return " [cyan]<=[reset]"
- case plans.Update:
- return " [yellow]~[reset]"
- default:
- return " ?"
- }
-}
diff --git a/vendor/github.com/hashicorp/terraform/command/format/format.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/command/format/format.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/command/format/format.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/command/format/format.go
diff --git a/vendor/github.com/hashicorp/terraform/command/format/object_id.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/command/format/object_id.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/command/format/object_id.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/command/format/object_id.go
diff --git a/vendor/github.com/hashicorp/terraform/command/format/state.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/command/format/state.go
similarity index 63%
rename from vendor/github.com/hashicorp/terraform/command/format/state.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/command/format/state.go
index be1ea24de..14869ad3c 100644
--- a/vendor/github.com/hashicorp/terraform/command/format/state.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/command/format/state.go
@@ -6,14 +6,13 @@ import (
"sort"
"strings"
- "github.com/zclconf/go-cty/cty"
-
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/hashicorp/terraform/plans"
- "github.com/hashicorp/terraform/states"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
"github.com/mitchellh/colorstring"
+ "github.com/zclconf/go-cty/cty"
)
// StateOpts are the options for formatting a state.
@@ -207,116 +206,3 @@ func formatStateModule(p blockBodyDiffPrinter, m *states.Module, schemas *terraf
}
p.buf.WriteString("\n")
}
-
-func formatNestedList(indent string, outputList []interface{}) string {
- outputBuf := new(bytes.Buffer)
- outputBuf.WriteString(fmt.Sprintf("%s[", indent))
-
- lastIdx := len(outputList) - 1
-
- for i, value := range outputList {
- outputBuf.WriteString(fmt.Sprintf("\n%s%s%s", indent, " ", value))
- if i != lastIdx {
- outputBuf.WriteString(",")
- }
- }
-
- outputBuf.WriteString(fmt.Sprintf("\n%s]", indent))
- return strings.TrimPrefix(outputBuf.String(), "\n")
-}
-
-func formatListOutput(indent, outputName string, outputList []interface{}) string {
- keyIndent := ""
-
- outputBuf := new(bytes.Buffer)
-
- if outputName != "" {
- outputBuf.WriteString(fmt.Sprintf("%s%s = [", indent, outputName))
- keyIndent = " "
- }
-
- lastIdx := len(outputList) - 1
-
- for i, value := range outputList {
- switch typedValue := value.(type) {
- case string:
- outputBuf.WriteString(fmt.Sprintf("\n%s%s%s", indent, keyIndent, value))
- case []interface{}:
- outputBuf.WriteString(fmt.Sprintf("\n%s%s", indent,
- formatNestedList(indent+keyIndent, typedValue)))
- case map[string]interface{}:
- outputBuf.WriteString(fmt.Sprintf("\n%s%s", indent,
- formatNestedMap(indent+keyIndent, typedValue)))
- }
-
- if lastIdx != i {
- outputBuf.WriteString(",")
- }
- }
-
- if outputName != "" {
- if len(outputList) > 0 {
- outputBuf.WriteString(fmt.Sprintf("\n%s]", indent))
- } else {
- outputBuf.WriteString("]")
- }
- }
-
- return strings.TrimPrefix(outputBuf.String(), "\n")
-}
-
-func formatNestedMap(indent string, outputMap map[string]interface{}) string {
- ks := make([]string, 0, len(outputMap))
- for k := range outputMap {
- ks = append(ks, k)
- }
- sort.Strings(ks)
-
- outputBuf := new(bytes.Buffer)
- outputBuf.WriteString(fmt.Sprintf("%s{", indent))
-
- lastIdx := len(outputMap) - 1
- for i, k := range ks {
- v := outputMap[k]
- outputBuf.WriteString(fmt.Sprintf("\n%s%s = %v", indent+" ", k, v))
-
- if lastIdx != i {
- outputBuf.WriteString(",")
- }
- }
-
- outputBuf.WriteString(fmt.Sprintf("\n%s}", indent))
-
- return strings.TrimPrefix(outputBuf.String(), "\n")
-}
-
-func formatMapOutput(indent, outputName string, outputMap map[string]interface{}) string {
- ks := make([]string, 0, len(outputMap))
- for k := range outputMap {
- ks = append(ks, k)
- }
- sort.Strings(ks)
-
- keyIndent := ""
-
- outputBuf := new(bytes.Buffer)
- if outputName != "" {
- outputBuf.WriteString(fmt.Sprintf("%s%s = {", indent, outputName))
- keyIndent = " "
- }
-
- for _, k := range ks {
- v := outputMap[k]
- outputBuf.WriteString(fmt.Sprintf("\n%s%s%s = %v", indent, keyIndent, k, v))
- }
-
- if outputName != "" {
- if len(outputMap) > 0 {
- outputBuf.WriteString(fmt.Sprintf("\n%s}", indent))
- } else {
- outputBuf.WriteString("}")
- }
- }
-
- return strings.TrimPrefix(outputBuf.String(), "\n")
-}
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/backend.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/backend.go
new file mode 100644
index 000000000..76d161d72
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/backend.go
@@ -0,0 +1,24 @@
+package configs
+
+import (
+ "github.com/hashicorp/hcl/v2"
+)
+
+// Backend represents a "backend" block inside a "terraform" block in a module
+// or file.
+type Backend struct {
+ Type string
+ Config hcl.Body
+
+ TypeRange hcl.Range
+ DeclRange hcl.Range
+}
+
+func decodeBackendBlock(block *hcl.Block) (*Backend, hcl.Diagnostics) {
+ return &Backend{
+ Type: block.Labels[0],
+ TypeRange: block.LabelRanges[0],
+ Config: block.Body,
+ DeclRange: block.DefRange,
+ }, nil
+}
diff --git a/vendor/github.com/hashicorp/terraform/configs/compat_shim.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/compat_shim.go
similarity index 51%
rename from vendor/github.com/hashicorp/terraform/configs/compat_shim.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/compat_shim.go
index b645ac890..e594ebd40 100644
--- a/vendor/github.com/hashicorp/terraform/configs/compat_shim.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/compat_shim.go
@@ -69,21 +69,28 @@ func shimTraversalInString(expr hcl.Expression, wantKeyword bool) (hcl.Expressio
)
diags = append(diags, tDiags...)
- if wantKeyword {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagWarning,
- Summary: "Quoted keywords are deprecated",
- Detail: "In this context, keywords are expected literally rather than in quotes. Terraform 0.11 and earlier required quotes, but quoted keywords are now deprecated and will be removed in a future version of Terraform. Remove the quotes surrounding this keyword to silence this warning.",
- Subject: &srcRange,
- })
- } else {
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagWarning,
- Summary: "Quoted references are deprecated",
- Detail: "In this context, references are expected literally rather than in quotes. Terraform 0.11 and earlier required quotes, but quoted references are now deprecated and will be removed in a future version of Terraform. Remove the quotes surrounding this reference to silence this warning.",
- Subject: &srcRange,
- })
- }
+ // For initial release our deprecation warnings are disabled to allow
+ // a period where modules can be compatible with both old and new
+ // conventions.
+ // FIXME: Re-enable these deprecation warnings in a release prior to
+ // Terraform 0.13 and then remove the shims altogether for 0.13.
+ /*
+ if wantKeyword {
+ diags = append(diags, &hcl.Diagnostic{
+ Severity: hcl.DiagWarning,
+ Summary: "Quoted keywords are deprecated",
+ Detail: "In this context, keywords are expected literally rather than in quotes. Previous versions of Terraform required quotes, but that usage is now deprecated. Remove the quotes surrounding this keyword to silence this warning.",
+ Subject: &srcRange,
+ })
+ } else {
+ diags = append(diags, &hcl.Diagnostic{
+ Severity: hcl.DiagWarning,
+ Summary: "Quoted references are deprecated",
+ Detail: "In this context, references are expected literally rather than in quotes. Previous versions of Terraform required quotes, but that usage is now deprecated. Remove the quotes surrounding this reference to silence this warning.",
+ Subject: &srcRange,
+ })
+ }
+ */
return &hclsyntax.ScopeTraversalExpr{
Traversal: traversal,
@@ -107,58 +114,3 @@ func shimIsIgnoreChangesStar(expr hcl.Expression) bool {
}
return val.AsString() == "*"
}
-
-// warnForDeprecatedInterpolations returns warning diagnostics if the given
-// body can be proven to contain attributes whose expressions are native
-// syntax expressions consisting entirely of a single template interpolation,
-// which is a deprecated way to include a non-literal value in configuration.
-//
-// This is a best-effort sort of thing which relies on the physical HCL native
-// syntax AST, so it might not catch everything. The main goal is to catch the
-// "obvious" cases in order to help spread awareness that this old form is
-// deprecated, when folks copy it from older examples they've found on the
-// internet that were written for Terraform 0.11 or earlier.
-func warnForDeprecatedInterpolationsInBody(body hcl.Body) hcl.Diagnostics {
- var diags hcl.Diagnostics
-
- nativeBody, ok := body.(*hclsyntax.Body)
- if !ok {
- // If it's not native syntax then we've nothing to do here.
- return diags
- }
-
- for _, attr := range nativeBody.Attributes {
- moreDiags := warnForDeprecatedInterpolationsInExpr(attr.Expr)
- diags = append(diags, moreDiags...)
- }
-
- for _, block := range nativeBody.Blocks {
- // We'll also go hunting in nested blocks
- moreDiags := warnForDeprecatedInterpolationsInBody(block.Body)
- diags = append(diags, moreDiags...)
- }
-
- return diags
-}
-
-func warnForDeprecatedInterpolationsInExpr(expr hcl.Expression) hcl.Diagnostics {
- var diags hcl.Diagnostics
-
- if _, ok := expr.(*hclsyntax.TemplateWrapExpr); !ok {
- // We're only interested in TemplateWrapExpr, because that's how
- // the HCL native syntax parser represents the case of a template
- // that consists entirely of a single interpolation expression, which
- // is therefore subject to the special case of passing through the
- // inner value without conversion to string.
- return diags
- }
-
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagWarning,
- Summary: "Interpolation-only expressions are deprecated",
- Detail: "Terraform 0.11 and earlier required all non-constant expressions to be provided via interpolation syntax, but this pattern is now deprecated. To silence this warning, remove the \"${ sequence from the start and the }\" sequence from the end of this expression, leaving just the inner expression.\n\nTemplate interpolation syntax is still used to construct strings from expressions when the template includes multiple interpolation sequences or a mixture of literal strings and interpolations. This deprecation applies only to templates that consist entirely of a single interpolation sequence.",
- Subject: expr.Range().Ptr(),
- })
-
- return diags
-}
diff --git a/vendor/github.com/hashicorp/terraform/configs/config.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/config.go
similarity index 83%
rename from vendor/github.com/hashicorp/terraform/configs/config.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/config.go
index cc10fb9c4..82c88a10f 100644
--- a/vendor/github.com/hashicorp/terraform/configs/config.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/config.go
@@ -5,7 +5,7 @@ import (
version "github.com/hashicorp/go-version"
"github.com/hashicorp/hcl/v2"
- "github.com/hashicorp/terraform/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
)
// A Config is a node in the tree of modules within a configuration.
@@ -86,18 +86,6 @@ func NewEmptyConfig() *Config {
return ret
}
-// Depth returns the number of "hops" the receiver is from the root of its
-// module tree, with the root module having a depth of zero.
-func (c *Config) Depth() int {
- ret := 0
- this := c
- for this.Parent != nil {
- ret++
- this = this.Parent
- }
- return ret
-}
-
// DeepEach calls the given function once for each module in the tree, starting
// with the receiver.
//
@@ -116,35 +104,6 @@ func (c *Config) DeepEach(cb func(c *Config)) {
}
}
-// AllModules returns a slice of all the receiver and all of its descendent
-// nodes in the module tree, in the same order they would be visited by
-// DeepEach.
-func (c *Config) AllModules() []*Config {
- var ret []*Config
- c.DeepEach(func(c *Config) {
- ret = append(ret, c)
- })
- return ret
-}
-
-// Descendent returns the descendent config that has the given path beneath
-// the receiver, or nil if there is no such module.
-//
-// The path traverses the static module tree, prior to any expansion to handle
-// count and for_each arguments.
-//
-// An empty path will just return the receiver, and is therefore pointless.
-func (c *Config) Descendent(path addrs.Module) *Config {
- current := c
- for _, name := range path {
- current = current.Children[name]
- if current == nil {
- return nil
- }
- }
- return current
-}
-
// DescendentForInstance is like Descendent except that it accepts a path
// to a particular module instance in the dynamic module graph, returning
// the node from the static module graph that corresponds to it.
diff --git a/vendor/github.com/hashicorp/terraform/configs/config_build.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/config_build.go
similarity index 89%
rename from vendor/github.com/hashicorp/terraform/configs/config_build.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/config_build.go
index c38a67926..cb46b65aa 100644
--- a/vendor/github.com/hashicorp/terraform/configs/config_build.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/config_build.go
@@ -5,7 +5,7 @@ import (
version "github.com/hashicorp/go-version"
"github.com/hashicorp/hcl/v2"
- "github.com/hashicorp/terraform/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
)
// BuildConfig constructs a Config from a root module by loading all of its
@@ -158,23 +158,3 @@ type ModuleRequest struct {
// rather than to either its source address or its version number.
CallRange hcl.Range
}
-
-// DisabledModuleWalker is a ModuleWalker that doesn't support
-// child modules at all, and so will return an error if asked to load one.
-//
-// This is provided primarily for testing. There is no good reason to use this
-// in the main application.
-var DisabledModuleWalker ModuleWalker
-
-func init() {
- DisabledModuleWalker = ModuleWalkerFunc(func(req *ModuleRequest) (*Module, *version.Version, hcl.Diagnostics) {
- return nil, nil, hcl.Diagnostics{
- {
- Severity: hcl.DiagError,
- Summary: "Child modules are not supported",
- Detail: "Child module calls are not allowed in this context.",
- Subject: &req.CallRange,
- },
- }
- })
-}
diff --git a/vendor/github.com/hashicorp/terraform/configs/configload/copy_dir.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configload/copy_dir.go
similarity index 92%
rename from vendor/github.com/hashicorp/terraform/configs/configload/copy_dir.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configload/copy_dir.go
index 840a7aa97..ebbeb3b62 100644
--- a/vendor/github.com/hashicorp/terraform/configs/configload/copy_dir.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configload/copy_dir.go
@@ -4,6 +4,7 @@ import (
"io"
"os"
"path/filepath"
+ "strings"
)
// copyDir copies the src directory contents into dst. Both directories
@@ -23,6 +24,15 @@ func copyDir(dst, src string) error {
return nil
}
+ if strings.HasPrefix(filepath.Base(path), ".") {
+ // Skip any dot files
+ if info.IsDir() {
+ return filepath.SkipDir
+ } else {
+ return nil
+ }
+ }
+
// The "path" has the src prefixed to it. We need to join our
// destination with the path without the src on it.
dstPath := filepath.Join(dst, path[len(src):])
diff --git a/vendor/github.com/hashicorp/terraform/configs/configload/doc.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configload/doc.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/configload/doc.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configload/doc.go
diff --git a/vendor/github.com/hashicorp/terraform/configs/configload/inode.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configload/inode.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/configload/inode.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configload/inode.go
diff --git a/vendor/github.com/hashicorp/terraform/configs/configload/inode_freebsd.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configload/inode_freebsd.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/configload/inode_freebsd.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configload/inode_freebsd.go
diff --git a/vendor/github.com/hashicorp/terraform/configs/configload/inode_windows.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configload/inode_windows.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/configload/inode_windows.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configload/inode_windows.go
diff --git a/vendor/github.com/hashicorp/terraform/configs/configload/loader.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configload/loader.go
similarity index 63%
rename from vendor/github.com/hashicorp/terraform/configs/configload/loader.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configload/loader.go
index a09b80c8c..0d12d7d2a 100644
--- a/vendor/github.com/hashicorp/terraform/configs/configload/loader.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configload/loader.go
@@ -2,11 +2,10 @@ package configload
import (
"fmt"
- "path/filepath"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/registry"
"github.com/hashicorp/terraform-svchost/disco"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/registry"
"github.com/spf13/afero"
)
@@ -95,56 +94,8 @@ func (l *Loader) RefreshModules() error {
return l.modules.readModuleManifestSnapshot()
}
-// Parser returns the underlying parser for this loader.
-//
-// This is useful for loading other sorts of files than the module directories
-// that a loader deals with, since then they will share the source code cache
-// for this loader and can thus be shown as snippets in diagnostic messages.
-func (l *Loader) Parser() *configs.Parser {
- return l.parser
-}
-
// Sources returns the source code cache for the underlying parser of this
// loader. This is a shorthand for l.Parser().Sources().
func (l *Loader) Sources() map[string][]byte {
return l.parser.Sources()
}
-
-// IsConfigDir returns true if and only if the given directory contains at
-// least one Terraform configuration file. This is a wrapper around calling
-// the same method name on the loader's parser.
-func (l *Loader) IsConfigDir(path string) bool {
- return l.parser.IsConfigDir(path)
-}
-
-// ImportSources writes into the receiver's source code the given source
-// code buffers.
-//
-// This is useful in the situation where an ancillary loader is created for
-// some reason (e.g. loading config from a plan file) but the cached source
-// code from that loader must be imported into the "main" loader in order
-// to return source code snapshots in diagnostic messages.
-//
-// loader.ImportSources(otherLoader.Sources())
-func (l *Loader) ImportSources(sources map[string][]byte) {
- p := l.Parser()
- for name, src := range sources {
- p.ForceFileSource(name, src)
- }
-}
-
-// ImportSourcesFromSnapshot writes into the receiver's source code the
-// source files from the given snapshot.
-//
-// This is similar to ImportSources but knows how to unpack and flatten a
-// snapshot data structure to get the corresponding flat source file map.
-func (l *Loader) ImportSourcesFromSnapshot(snap *Snapshot) {
- p := l.Parser()
- for _, m := range snap.Modules {
- baseDir := m.Dir
- for fn, src := range m.Files {
- fullPath := filepath.Join(baseDir, fn)
- p.ForceFileSource(fullPath, src)
- }
- }
-}
diff --git a/vendor/github.com/hashicorp/terraform/configs/configload/loader_load.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configload/loader_load.go
similarity index 98%
rename from vendor/github.com/hashicorp/terraform/configs/configload/loader_load.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configload/loader_load.go
index 80b2de1b5..bcfa733e6 100644
--- a/vendor/github.com/hashicorp/terraform/configs/configload/loader_load.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configload/loader_load.go
@@ -5,7 +5,7 @@ import (
version "github.com/hashicorp/go-version"
"github.com/hashicorp/hcl/v2"
- "github.com/hashicorp/terraform/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
)
// LoadConfig reads the Terraform module in the given directory and uses it as the
diff --git a/vendor/github.com/hashicorp/terraform/configs/configload/loader_snapshot.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configload/loader_snapshot.go
similarity index 97%
rename from vendor/github.com/hashicorp/terraform/configs/configload/loader_snapshot.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configload/loader_snapshot.go
index 9e83895cf..0772edc71 100644
--- a/vendor/github.com/hashicorp/terraform/configs/configload/loader_snapshot.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configload/loader_snapshot.go
@@ -10,8 +10,8 @@ import (
version "github.com/hashicorp/go-version"
"github.com/hashicorp/hcl/v2"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/internal/modsdir"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/modsdir"
"github.com/spf13/afero"
)
@@ -77,18 +77,6 @@ type Snapshot struct {
Modules map[string]*SnapshotModule
}
-// NewEmptySnapshot constructs and returns a snapshot containing only an empty
-// root module. This is not useful for anything except placeholders in tests.
-func NewEmptySnapshot() *Snapshot {
- return &Snapshot{
- Modules: map[string]*SnapshotModule{
- "": &SnapshotModule{
- Files: map[string][]byte{},
- },
- },
- }
-}
-
// SnapshotModule represents a single module within a Snapshot.
type SnapshotModule struct {
// Dir is the path, relative to the root directory given when the
diff --git a/vendor/github.com/hashicorp/terraform/configs/configload/module_mgr.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configload/module_mgr.go
similarity index 77%
rename from vendor/github.com/hashicorp/terraform/configs/configload/module_mgr.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configload/module_mgr.go
index 16871e310..797f50d24 100644
--- a/vendor/github.com/hashicorp/terraform/configs/configload/module_mgr.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configload/module_mgr.go
@@ -4,9 +4,9 @@ import (
"os"
"path/filepath"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/modsdir"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/registry"
"github.com/hashicorp/terraform-svchost/disco"
- "github.com/hashicorp/terraform/internal/modsdir"
- "github.com/hashicorp/terraform/registry"
"github.com/spf13/afero"
)
@@ -60,17 +60,3 @@ func (m *moduleMgr) readModuleManifestSnapshot() error {
m.manifest, err = modsdir.ReadManifestSnapshot(r)
return err
}
-
-// writeModuleManifestSnapshot writes a snapshot of the current manifest
-// to the filesystem.
-//
-// The caller must guarantee no concurrent modifications of the manifest for
-// the duration of a call to this function, or the behavior is undefined.
-func (m *moduleMgr) writeModuleManifestSnapshot() error {
- w, err := m.FS.Create(m.manifestSnapshotPath())
- if err != nil {
- return err
- }
-
- return m.manifest.WriteSnapshot(w)
-}
diff --git a/vendor/github.com/hashicorp/terraform/configs/configload/testing.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configload/testing.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/configload/testing.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configload/testing.go
diff --git a/vendor/github.com/hashicorp/terraform/configs/configschema/coerce_value.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema/coerce_value.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/configschema/coerce_value.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema/coerce_value.go
diff --git a/vendor/github.com/hashicorp/terraform/configs/configschema/decoder_spec.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema/decoder_spec.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/configschema/decoder_spec.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema/decoder_spec.go
diff --git a/vendor/github.com/hashicorp/terraform/configs/configschema/doc.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema/doc.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/configschema/doc.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema/doc.go
diff --git a/vendor/github.com/hashicorp/terraform/configs/configschema/empty_value.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema/empty_value.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/configschema/empty_value.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema/empty_value.go
diff --git a/vendor/github.com/hashicorp/terraform/configs/configschema/implied_type.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema/implied_type.go
similarity index 50%
rename from vendor/github.com/hashicorp/terraform/configs/configschema/implied_type.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema/implied_type.go
index a81b7eab4..51f51cebc 100644
--- a/vendor/github.com/hashicorp/terraform/configs/configschema/implied_type.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema/implied_type.go
@@ -19,24 +19,3 @@ func (b *Block) ImpliedType() cty.Type {
return hcldec.ImpliedType(b.DecoderSpec())
}
-
-// ContainsSensitive returns true if any of the attributes of the receiving
-// block or any of its descendent blocks are marked as sensitive.
-//
-// Blocks themselves cannot be sensitive as a whole -- sensitivity is a
-// per-attribute idea -- but sometimes we want to include a whole object
-// decoded from a block in some UI output, and that is safe to do only if
-// none of the contained attributes are sensitive.
-func (b *Block) ContainsSensitive() bool {
- for _, attrS := range b.Attributes {
- if attrS.Sensitive {
- return true
- }
- }
- for _, blockS := range b.BlockTypes {
- if blockS.ContainsSensitive() {
- return true
- }
- }
- return false
-}
diff --git a/vendor/github.com/hashicorp/terraform/configs/configschema/internal_validate.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema/internal_validate.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/configschema/internal_validate.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema/internal_validate.go
diff --git a/vendor/github.com/hashicorp/terraform/configs/configschema/nestingmode_string.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema/nestingmode_string.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/configschema/nestingmode_string.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema/nestingmode_string.go
diff --git a/vendor/github.com/hashicorp/terraform/configs/configschema/none_required.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema/none_required.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/configschema/none_required.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema/none_required.go
diff --git a/vendor/github.com/hashicorp/terraform/configs/configschema/schema.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema/schema.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/configschema/schema.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema/schema.go
diff --git a/vendor/github.com/hashicorp/terraform/configs/configschema/validate_traversal.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema/validate_traversal.go
similarity index 97%
rename from vendor/github.com/hashicorp/terraform/configs/configschema/validate_traversal.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema/validate_traversal.go
index 9fc2de38c..446705baf 100644
--- a/vendor/github.com/hashicorp/terraform/configs/configschema/validate_traversal.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema/validate_traversal.go
@@ -8,8 +8,8 @@ import (
"github.com/hashicorp/hcl/v2/hclsyntax"
"github.com/zclconf/go-cty/cty"
- "github.com/hashicorp/terraform/helper/didyoumean"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/helper/didyoumean"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// StaticValidateTraversal checks whether the given traversal (which must be
diff --git a/vendor/github.com/hashicorp/terraform/configs/depends_on.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/depends_on.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/depends_on.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/depends_on.go
diff --git a/vendor/github.com/hashicorp/terraform/configs/doc.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/doc.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/doc.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/doc.go
diff --git a/vendor/github.com/hashicorp/terraform/configs/hcl2shim/flatmap.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/hcl2shim/flatmap.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/hcl2shim/flatmap.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/hcl2shim/flatmap.go
diff --git a/vendor/github.com/hashicorp/terraform/configs/hcl2shim/paths.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/hcl2shim/paths.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/hcl2shim/paths.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/hcl2shim/paths.go
diff --git a/vendor/github.com/hashicorp/terraform/configs/hcl2shim/single_attr_body.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/hcl2shim/single_attr_body.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/hcl2shim/single_attr_body.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/hcl2shim/single_attr_body.go
diff --git a/vendor/github.com/hashicorp/terraform/configs/hcl2shim/values.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/hcl2shim/values.go
similarity index 67%
rename from vendor/github.com/hashicorp/terraform/configs/hcl2shim/values.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/hcl2shim/values.go
index daeb0b8e0..a074c749d 100644
--- a/vendor/github.com/hashicorp/terraform/configs/hcl2shim/values.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/hcl2shim/values.go
@@ -4,10 +4,9 @@ import (
"fmt"
"math/big"
- "github.com/hashicorp/hil/ast"
"github.com/zclconf/go-cty/cty"
- "github.com/hashicorp/terraform/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
)
// UnknownVariableValue is a sentinel value that can be used
@@ -229,125 +228,3 @@ func HCL2ValueFromConfigValue(v interface{}) cty.Value {
panic(fmt.Errorf("can't convert %#v to cty.Value", v))
}
}
-
-func HILVariableFromHCL2Value(v cty.Value) ast.Variable {
- if v.IsNull() {
- // Caller should guarantee/check this before calling
- panic("Null values cannot be represented in HIL")
- }
- if !v.IsKnown() {
- return ast.Variable{
- Type: ast.TypeUnknown,
- Value: UnknownVariableValue,
- }
- }
-
- switch v.Type() {
- case cty.Bool:
- return ast.Variable{
- Type: ast.TypeBool,
- Value: v.True(),
- }
- case cty.Number:
- v := ConfigValueFromHCL2(v)
- switch tv := v.(type) {
- case int:
- return ast.Variable{
- Type: ast.TypeInt,
- Value: tv,
- }
- case float64:
- return ast.Variable{
- Type: ast.TypeFloat,
- Value: tv,
- }
- default:
- // should never happen
- panic("invalid return value for configValueFromHCL2")
- }
- case cty.String:
- return ast.Variable{
- Type: ast.TypeString,
- Value: v.AsString(),
- }
- }
-
- if v.Type().IsListType() || v.Type().IsSetType() || v.Type().IsTupleType() {
- l := make([]ast.Variable, 0, v.LengthInt())
- it := v.ElementIterator()
- for it.Next() {
- _, ev := it.Element()
- l = append(l, HILVariableFromHCL2Value(ev))
- }
- // If we were given a tuple then this could actually produce an invalid
- // list with non-homogenous types, which we expect to be caught inside
- // HIL just like a user-supplied non-homogenous list would be.
- return ast.Variable{
- Type: ast.TypeList,
- Value: l,
- }
- }
-
- if v.Type().IsMapType() || v.Type().IsObjectType() {
- l := make(map[string]ast.Variable)
- it := v.ElementIterator()
- for it.Next() {
- ek, ev := it.Element()
- l[ek.AsString()] = HILVariableFromHCL2Value(ev)
- }
- // If we were given an object then this could actually produce an invalid
- // map with non-homogenous types, which we expect to be caught inside
- // HIL just like a user-supplied non-homogenous map would be.
- return ast.Variable{
- Type: ast.TypeMap,
- Value: l,
- }
- }
-
- // If we fall out here then we have some weird type that we haven't
- // accounted for. This should never happen unless the caller is using
- // capsule types, and we don't currently have any such types defined.
- panic(fmt.Errorf("can't convert %#v to HIL variable", v))
-}
-
-func HCL2ValueFromHILVariable(v ast.Variable) cty.Value {
- switch v.Type {
- case ast.TypeList:
- vals := make([]cty.Value, len(v.Value.([]ast.Variable)))
- for i, ev := range v.Value.([]ast.Variable) {
- vals[i] = HCL2ValueFromHILVariable(ev)
- }
- return cty.TupleVal(vals)
- case ast.TypeMap:
- vals := make(map[string]cty.Value, len(v.Value.(map[string]ast.Variable)))
- for k, ev := range v.Value.(map[string]ast.Variable) {
- vals[k] = HCL2ValueFromHILVariable(ev)
- }
- return cty.ObjectVal(vals)
- default:
- return HCL2ValueFromConfigValue(v.Value)
- }
-}
-
-func HCL2TypeForHILType(hilType ast.Type) cty.Type {
- switch hilType {
- case ast.TypeAny:
- return cty.DynamicPseudoType
- case ast.TypeUnknown:
- return cty.DynamicPseudoType
- case ast.TypeBool:
- return cty.Bool
- case ast.TypeInt:
- return cty.Number
- case ast.TypeFloat:
- return cty.Number
- case ast.TypeString:
- return cty.String
- case ast.TypeList:
- return cty.List(cty.DynamicPseudoType)
- case ast.TypeMap:
- return cty.Map(cty.DynamicPseudoType)
- default:
- return cty.NilType // equilvalent to ast.TypeInvalid
- }
-}
diff --git a/vendor/github.com/hashicorp/terraform/configs/hcl2shim/values_equiv.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/hcl2shim/values_equiv.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/hcl2shim/values_equiv.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/hcl2shim/values_equiv.go
diff --git a/vendor/github.com/hashicorp/terraform/configs/module.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/module.go
similarity index 99%
rename from vendor/github.com/hashicorp/terraform/configs/module.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/module.go
index bd4182a5c..78223c3b8 100644
--- a/vendor/github.com/hashicorp/terraform/configs/module.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/module.go
@@ -5,7 +5,7 @@ import (
"github.com/hashicorp/hcl/v2"
- "github.com/hashicorp/terraform/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
)
// Module is a container for a set of configuration constructs that are
diff --git a/vendor/github.com/hashicorp/terraform/configs/module_call.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/module_call.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/module_call.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/module_call.go
diff --git a/vendor/github.com/hashicorp/terraform/configs/module_merge.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/module_merge.go
similarity index 99%
rename from vendor/github.com/hashicorp/terraform/configs/module_merge.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/module_merge.go
index 401b1c0a8..6fb82acfb 100644
--- a/vendor/github.com/hashicorp/terraform/configs/module_merge.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/module_merge.go
@@ -3,7 +3,7 @@ package configs
import (
"fmt"
- "github.com/hashicorp/terraform/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
"github.com/hashicorp/hcl/v2"
"github.com/zclconf/go-cty/cty"
diff --git a/vendor/github.com/hashicorp/terraform/configs/module_merge_body.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/module_merge_body.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/module_merge_body.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/module_merge_body.go
diff --git a/vendor/github.com/hashicorp/terraform/configs/named_values.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/named_values.go
similarity index 84%
rename from vendor/github.com/hashicorp/terraform/configs/named_values.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/named_values.go
index 280b70692..8c8398e0b 100644
--- a/vendor/github.com/hashicorp/terraform/configs/named_values.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/named_values.go
@@ -9,12 +9,10 @@ import (
"github.com/hashicorp/hcl/v2/hclsyntax"
"github.com/zclconf/go-cty/cty"
"github.com/zclconf/go-cty/cty/convert"
-
- "github.com/hashicorp/terraform/addrs"
)
// A consistent detail message for all "not a valid identifier" diagnostics.
-const badIdentifierDetail = "A name must start with a letter or underscore and may contain only letters, digits, underscores, and dashes."
+const badIdentifierDetail = "A name must start with a letter and may contain only letters, digits, underscores, and dashes."
// Variable represents a "variable" block in a module or file.
type Variable struct {
@@ -138,28 +136,10 @@ func decodeVariableType(expr hcl.Expression) (cty.Type, VariableParsingMode, hcl
str := val.AsString()
switch str {
case "string":
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagWarning,
- Summary: "Quoted type constraints are deprecated",
- Detail: "Terraform 0.11 and earlier required type constraints to be given in quotes, but that form is now deprecated and will be removed in a future version of Terraform. To silence this warning, remove the quotes around \"string\".",
- Subject: expr.Range().Ptr(),
- })
return cty.String, VariableParseLiteral, diags
case "list":
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagWarning,
- Summary: "Quoted type constraints are deprecated",
- Detail: "Terraform 0.11 and earlier required type constraints to be given in quotes, but that form is now deprecated and will be removed in a future version of Terraform. To silence this warning, remove the quotes around \"list\" and write list(string) instead to explicitly indicate that the list elements are strings.",
- Subject: expr.Range().Ptr(),
- })
return cty.List(cty.DynamicPseudoType), VariableParseHCL, diags
case "map":
- diags = append(diags, &hcl.Diagnostic{
- Severity: hcl.DiagWarning,
- Summary: "Quoted type constraints are deprecated",
- Detail: "Terraform 0.11 and earlier required type constraints to be given in quotes, but that form is now deprecated and will be removed in a future version of Terraform. To silence this warning, remove the quotes around \"map\" and write map(string) instead to explicitly indicate that the map elements are strings.",
- Subject: expr.Range().Ptr(),
- })
return cty.Map(cty.DynamicPseudoType), VariableParseHCL, diags
default:
return cty.DynamicPseudoType, VariableParseHCL, hcl.Diagnostics{{
@@ -197,12 +177,6 @@ func decodeVariableType(expr hcl.Expression) (cty.Type, VariableParsingMode, hcl
}
}
-// Required returns true if this variable is required to be set by the caller,
-// or false if there is a default value that will be used when it isn't set.
-func (v *Variable) Required() bool {
- return v.Default == cty.NilVal
-}
-
// VariableParsingMode defines how values of a particular variable given by
// text-only mechanisms (command line arguments and environment variables)
// should be parsed to produce the final value.
@@ -347,14 +321,6 @@ func decodeLocalsBlock(block *hcl.Block) ([]*Local, hcl.Diagnostics) {
return locals, diags
}
-// Addr returns the address of the local value declared by the receiver,
-// relative to its containing module.
-func (l *Local) Addr() addrs.LocalValue {
- return addrs.LocalValue{
- Name: l.Name,
- }
-}
-
var variableBlockSchema = &hcl.BodySchema{
Attributes: []hcl.AttributeSchema{
{
diff --git a/vendor/github.com/hashicorp/terraform/configs/parser.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/parser.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/parser.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/parser.go
diff --git a/vendor/github.com/hashicorp/terraform/configs/parser_config.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/parser_config.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/parser_config.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/parser_config.go
diff --git a/vendor/github.com/hashicorp/terraform/configs/parser_config_dir.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/parser_config_dir.go
similarity index 98%
rename from vendor/github.com/hashicorp/terraform/configs/parser_config_dir.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/parser_config_dir.go
index 2923af93a..afdd69833 100644
--- a/vendor/github.com/hashicorp/terraform/configs/parser_config_dir.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/parser_config_dir.go
@@ -154,9 +154,9 @@ func IsEmptyDir(path string) (bool, error) {
}
p := NewParser(nil)
- fs, os, diags := p.dirFiles(path)
- if diags.HasErrors() {
- return false, diags
+ fs, os, err := p.dirFiles(path)
+ if err != nil {
+ return false, err
}
return len(fs) == 0 && len(os) == 0, nil
diff --git a/vendor/github.com/hashicorp/terraform/configs/parser_values.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/parser_values.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/parser_values.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/parser_values.go
diff --git a/vendor/github.com/hashicorp/terraform/configs/provider.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/provider.go
similarity index 87%
rename from vendor/github.com/hashicorp/terraform/configs/provider.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/provider.go
index 17754a669..cb9ba1f3f 100644
--- a/vendor/github.com/hashicorp/terraform/configs/provider.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/provider.go
@@ -7,7 +7,7 @@ import (
"github.com/hashicorp/hcl/v2/gohcl"
"github.com/hashicorp/hcl/v2/hclsyntax"
- "github.com/hashicorp/terraform/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
)
// Provider represents a "provider" block in a module or file. A provider
@@ -27,17 +27,7 @@ type Provider struct {
}
func decodeProviderBlock(block *hcl.Block) (*Provider, hcl.Diagnostics) {
- var diags hcl.Diagnostics
-
- // Produce deprecation messages for any pre-0.12-style
- // single-interpolation-only expressions. We do this up front here because
- // then we can also catch instances inside special blocks like "connection",
- // before PartialContent extracts them.
- moreDiags := warnForDeprecatedInterpolationsInBody(block.Body)
- diags = append(diags, moreDiags...)
-
- content, config, moreDiags := block.Body.PartialContent(providerBlockSchema)
- diags = append(diags, moreDiags...)
+ content, config, diags := block.Body.PartialContent(providerBlockSchema)
provider := &Provider{
Name: block.Labels[0],
diff --git a/vendor/github.com/hashicorp/terraform/configs/provisioner.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/provisioner.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/provisioner.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/provisioner.go
diff --git a/vendor/github.com/hashicorp/terraform/configs/provisioneronfailure_string.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/provisioneronfailure_string.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/provisioneronfailure_string.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/provisioneronfailure_string.go
diff --git a/vendor/github.com/hashicorp/terraform/configs/provisionerwhen_string.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/provisionerwhen_string.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/provisionerwhen_string.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/provisionerwhen_string.go
diff --git a/vendor/github.com/hashicorp/terraform/configs/resource.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/resource.go
similarity index 96%
rename from vendor/github.com/hashicorp/terraform/configs/resource.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/resource.go
index 4d5506e29..cd9991a38 100644
--- a/vendor/github.com/hashicorp/terraform/configs/resource.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/resource.go
@@ -7,7 +7,7 @@ import (
"github.com/hashicorp/hcl/v2/gohcl"
"github.com/hashicorp/hcl/v2/hclsyntax"
- "github.com/hashicorp/terraform/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
)
// Resource represents a "resource" or "data" block in a module or file.
@@ -76,7 +76,6 @@ func (r *Resource) ProviderConfigAddr() addrs.ProviderConfig {
}
func decodeResourceBlock(block *hcl.Block) (*Resource, hcl.Diagnostics) {
- var diags hcl.Diagnostics
r := &Resource{
Mode: addrs.ManagedResourceMode,
Type: block.Labels[0],
@@ -86,15 +85,7 @@ func decodeResourceBlock(block *hcl.Block) (*Resource, hcl.Diagnostics) {
Managed: &ManagedResource{},
}
- // Produce deprecation messages for any pre-0.12-style
- // single-interpolation-only expressions. We do this up front here because
- // then we can also catch instances inside special blocks like "connection",
- // before PartialContent extracts them.
- moreDiags := warnForDeprecatedInterpolationsInBody(block.Body)
- diags = append(diags, moreDiags...)
-
- content, remain, moreDiags := block.Body.PartialContent(resourceBlockSchema)
- diags = append(diags, moreDiags...)
+ content, remain, diags := block.Body.PartialContent(resourceBlockSchema)
r.Config = remain
if !hclsyntax.ValidIdentifier(r.Type) {
diff --git a/vendor/github.com/hashicorp/terraform/configs/synth_body.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/synth_body.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/synth_body.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/synth_body.go
diff --git a/vendor/github.com/hashicorp/terraform/configs/util.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/util.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/util.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/util.go
diff --git a/vendor/github.com/hashicorp/terraform/configs/variable_type_hint.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/variable_type_hint.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/variable_type_hint.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/variable_type_hint.go
diff --git a/vendor/github.com/hashicorp/terraform/configs/variabletypehint_string.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/variabletypehint_string.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/variabletypehint_string.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/variabletypehint_string.go
diff --git a/vendor/github.com/hashicorp/terraform/configs/version_constraint.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/version_constraint.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/configs/version_constraint.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/configs/version_constraint.go
diff --git a/vendor/github.com/hashicorp/terraform/dag/dag.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/dag/dag.go
similarity index 99%
rename from vendor/github.com/hashicorp/terraform/dag/dag.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/dag/dag.go
index 77c67eff9..a150af961 100644
--- a/vendor/github.com/hashicorp/terraform/dag/dag.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/dag/dag.go
@@ -5,7 +5,7 @@ import (
"sort"
"strings"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
"github.com/hashicorp/go-multierror"
)
diff --git a/vendor/github.com/hashicorp/terraform/dag/dot.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/dag/dot.go
similarity index 98%
rename from vendor/github.com/hashicorp/terraform/dag/dot.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/dag/dot.go
index 7e6d2af3b..65a351b6f 100644
--- a/vendor/github.com/hashicorp/terraform/dag/dot.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/dag/dot.go
@@ -276,7 +276,3 @@ func (w *indentWriter) WriteByte(b byte) error {
w.indent()
return w.Buffer.WriteByte(b)
}
-func (w *indentWriter) WriteRune(r rune) (int, error) {
- w.indent()
- return w.Buffer.WriteRune(r)
-}
diff --git a/vendor/github.com/hashicorp/terraform/dag/edge.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/dag/edge.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/dag/edge.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/dag/edge.go
diff --git a/vendor/github.com/hashicorp/terraform/dag/graph.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/dag/graph.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/dag/graph.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/dag/graph.go
diff --git a/vendor/github.com/hashicorp/terraform/dag/marshal.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/dag/marshal.go
similarity index 96%
rename from vendor/github.com/hashicorp/terraform/dag/marshal.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/dag/marshal.go
index c567d2719..7b23ea9c1 100644
--- a/vendor/github.com/hashicorp/terraform/dag/marshal.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/dag/marshal.go
@@ -458,17 +458,3 @@ func newEdgeInfo(infoType string, e Edge, info string) *marshalEdgeInfo {
Info: info,
}
}
-
-// JSON2Dot reads a Graph debug log from and io.Reader, and converts the final
-// graph dot format.
-//
-// TODO: Allow returning the output at a certain point during decode.
-// Encode extra information from the json log into the Dot.
-func JSON2Dot(r io.Reader) ([]byte, error) {
- g, err := decodeGraph(r)
- if err != nil {
- return nil, err
- }
-
- return g.Dot(nil), nil
-}
diff --git a/vendor/github.com/hashicorp/terraform/dag/set.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/dag/set.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/dag/set.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/dag/set.go
diff --git a/vendor/github.com/hashicorp/terraform/dag/tarjan.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/dag/tarjan.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/dag/tarjan.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/dag/tarjan.go
diff --git a/vendor/github.com/hashicorp/terraform/dag/walk.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/dag/walk.go
similarity index 97%
rename from vendor/github.com/hashicorp/terraform/dag/walk.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/dag/walk.go
index 1c926c2c2..5ddf8ef34 100644
--- a/vendor/github.com/hashicorp/terraform/dag/walk.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/dag/walk.go
@@ -6,7 +6,7 @@ import (
"sync"
"time"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// Walker is used to walk every vertex of a graph in parallel.
@@ -97,11 +97,6 @@ type walkerVertex struct {
depsCancelCh chan struct{}
}
-// errWalkUpstream is used in the errMap of a walk to note that an upstream
-// dependency failed so this vertex wasn't run. This is not shown in the final
-// user-returned error.
-var errWalkUpstream = errors.New("upstream dependency failed")
-
// Wait waits for the completion of the walk and returns diagnostics describing
// any problems that arose. Update should be called to populate the walk with
// vertices and edges prior to calling this.
diff --git a/vendor/github.com/hashicorp/terraform/internal/earlyconfig/config.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/earlyconfig/config.go
similarity index 58%
rename from vendor/github.com/hashicorp/terraform/internal/earlyconfig/config.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/earlyconfig/config.go
index a9b8f9883..b86bd7923 100644
--- a/vendor/github.com/hashicorp/terraform/internal/earlyconfig/config.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/earlyconfig/config.go
@@ -1,15 +1,9 @@
package earlyconfig
import (
- "fmt"
- "sort"
-
version "github.com/hashicorp/go-version"
"github.com/hashicorp/terraform-config-inspect/tfconfig"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/moduledeps"
- "github.com/hashicorp/terraform/plugin/discovery"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
)
// A Config is a node in the tree of modules within a configuration.
@@ -67,57 +61,3 @@ type Config struct {
// be nil.
Version *version.Version
}
-
-// ProviderDependencies returns the provider dependencies for the recieving
-// config, including all of its descendent modules.
-func (c *Config) ProviderDependencies() (*moduledeps.Module, tfdiags.Diagnostics) {
- var diags tfdiags.Diagnostics
-
- var name string
- if len(c.Path) > 0 {
- name = c.Path[len(c.Path)-1]
- }
-
- ret := &moduledeps.Module{
- Name: name,
- }
-
- providers := make(moduledeps.Providers)
- for name, reqs := range c.Module.RequiredProviders {
- inst := moduledeps.ProviderInstance(name)
- var constraints version.Constraints
- for _, reqStr := range reqs {
- if reqStr != "" {
- constraint, err := version.NewConstraint(reqStr)
- if err != nil {
- diags = diags.Append(wrapDiagnostic(tfconfig.Diagnostic{
- Severity: tfconfig.DiagError,
- Summary: "Invalid provider version constraint",
- Detail: fmt.Sprintf("Invalid version constraint %q for provider %s.", reqStr, name),
- }))
- continue
- }
- constraints = append(constraints, constraint...)
- }
- }
- providers[inst] = moduledeps.ProviderDependency{
- Constraints: discovery.NewConstraints(constraints),
- Reason: moduledeps.ProviderDependencyExplicit,
- }
- }
- ret.Providers = providers
-
- childNames := make([]string, 0, len(c.Children))
- for name := range c.Children {
- childNames = append(childNames, name)
- }
- sort.Strings(childNames)
-
- for _, name := range childNames {
- child, childDiags := c.Children[name].ProviderDependencies()
- ret.Children = append(ret.Children, child)
- diags = diags.Append(childDiags)
- }
-
- return ret, diags
-}
diff --git a/vendor/github.com/hashicorp/terraform/internal/earlyconfig/config_build.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/earlyconfig/config_build.go
similarity index 97%
rename from vendor/github.com/hashicorp/terraform/internal/earlyconfig/config_build.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/earlyconfig/config_build.go
index 770d5dfbe..3707f2738 100644
--- a/vendor/github.com/hashicorp/terraform/internal/earlyconfig/config_build.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/earlyconfig/config_build.go
@@ -7,8 +7,8 @@ import (
version "github.com/hashicorp/go-version"
"github.com/hashicorp/terraform-config-inspect/tfconfig"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// BuildConfig constructs a Config from a root module by loading all of its
diff --git a/vendor/github.com/hashicorp/terraform/internal/earlyconfig/diagnostics.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/earlyconfig/diagnostics.go
similarity index 97%
rename from vendor/github.com/hashicorp/terraform/internal/earlyconfig/diagnostics.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/earlyconfig/diagnostics.go
index 9b2fd7f71..b2e1807eb 100644
--- a/vendor/github.com/hashicorp/terraform/internal/earlyconfig/diagnostics.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/earlyconfig/diagnostics.go
@@ -4,7 +4,7 @@ import (
"fmt"
"github.com/hashicorp/terraform-config-inspect/tfconfig"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
func wrapDiagnostics(diags tfconfig.Diagnostics) tfdiags.Diagnostics {
diff --git a/vendor/github.com/hashicorp/terraform/internal/earlyconfig/doc.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/earlyconfig/doc.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/internal/earlyconfig/doc.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/earlyconfig/doc.go
diff --git a/vendor/github.com/hashicorp/terraform/internal/earlyconfig/module.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/earlyconfig/module.go
similarity index 83%
rename from vendor/github.com/hashicorp/terraform/internal/earlyconfig/module.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/earlyconfig/module.go
index d2d628797..11eff2eb6 100644
--- a/vendor/github.com/hashicorp/terraform/internal/earlyconfig/module.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/earlyconfig/module.go
@@ -2,7 +2,7 @@ package earlyconfig
import (
"github.com/hashicorp/terraform-config-inspect/tfconfig"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// LoadModule loads some top-level metadata for the module in the given
diff --git a/vendor/github.com/hashicorp/terraform/flatmap/expand.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/flatmap/expand.go
similarity index 98%
rename from vendor/github.com/hashicorp/terraform/flatmap/expand.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/flatmap/expand.go
index b9d15461e..1bb7b9f2f 100644
--- a/vendor/github.com/hashicorp/terraform/flatmap/expand.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/flatmap/expand.go
@@ -6,7 +6,7 @@ import (
"strconv"
"strings"
- "github.com/hashicorp/terraform/configs/hcl2shim"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/hcl2shim"
)
// Expand takes a map and a key (prefix) and expands that value into
diff --git a/vendor/github.com/hashicorp/terraform/flatmap/flatten.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/flatmap/flatten.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/flatmap/flatten.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/flatmap/flatten.go
diff --git a/vendor/github.com/hashicorp/terraform/flatmap/map.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/flatmap/map.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/flatmap/map.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/flatmap/map.go
diff --git a/vendor/github.com/hashicorp/terraform/helper/config/validator.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/helper/config/validator.go
similarity index 97%
rename from vendor/github.com/hashicorp/terraform/helper/config/validator.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/helper/config/validator.go
index 1a6e023b6..35a3e7a49 100644
--- a/vendor/github.com/hashicorp/terraform/helper/config/validator.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/helper/config/validator.go
@@ -5,8 +5,8 @@ import (
"strconv"
"strings"
- "github.com/hashicorp/terraform/flatmap"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/flatmap"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
// Validator is a helper that helps you validate the configuration
diff --git a/vendor/github.com/hashicorp/terraform/helper/didyoumean/name_suggestion.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/helper/didyoumean/name_suggestion.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/helper/didyoumean/name_suggestion.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/helper/didyoumean/name_suggestion.go
diff --git a/vendor/github.com/hashicorp/terraform/helper/plugin/doc.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/helper/plugin/doc.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/helper/plugin/doc.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/helper/plugin/doc.go
diff --git a/vendor/github.com/hashicorp/terraform/helper/plugin/grpc_provider.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/helper/plugin/grpc_provider.go
similarity index 98%
rename from vendor/github.com/hashicorp/terraform/helper/plugin/grpc_provider.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/helper/plugin/grpc_provider.go
index f32610c6c..388f1ed59 100644
--- a/vendor/github.com/hashicorp/terraform/helper/plugin/grpc_provider.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/helper/plugin/grpc_provider.go
@@ -11,13 +11,13 @@ import (
"github.com/zclconf/go-cty/cty/msgpack"
context "golang.org/x/net/context"
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/hashicorp/terraform/configs/hcl2shim"
- "github.com/hashicorp/terraform/helper/schema"
- proto "github.com/hashicorp/terraform/internal/tfplugin5"
- "github.com/hashicorp/terraform/plans/objchange"
- "github.com/hashicorp/terraform/plugin/convert"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/helper/schema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/hcl2shim"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans/objchange"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plugin/convert"
+ proto "github.com/hashicorp/terraform-plugin-sdk/internal/tfplugin5"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
const newExtraKey = "_new_extra_shim"
diff --git a/vendor/github.com/hashicorp/terraform/helper/plugin/unknown.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/helper/plugin/unknown.go
similarity index 97%
rename from vendor/github.com/hashicorp/terraform/helper/plugin/unknown.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/helper/plugin/unknown.go
index 64a6784e8..a22a264fa 100644
--- a/vendor/github.com/hashicorp/terraform/helper/plugin/unknown.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/helper/plugin/unknown.go
@@ -3,7 +3,7 @@ package plugin
import (
"fmt"
- "github.com/hashicorp/terraform/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
"github.com/zclconf/go-cty/cty"
)
diff --git a/vendor/github.com/hashicorp/terraform/httpclient/useragent.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/httpclient/client.go
similarity index 66%
rename from vendor/github.com/hashicorp/terraform/httpclient/useragent.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/httpclient/client.go
index 536703c6b..ad8d626c6 100644
--- a/vendor/github.com/hashicorp/terraform/httpclient/useragent.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/httpclient/client.go
@@ -7,25 +7,22 @@ import (
"os"
"strings"
- "github.com/hashicorp/terraform/version"
+ cleanhttp "github.com/hashicorp/go-cleanhttp"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/version"
)
-const userAgentFormat = "Terraform/%s"
const uaEnvVar = "TF_APPEND_USER_AGENT"
+const userAgentFormat = "Terraform/%s"
-// Deprecated: Use UserAgent(version) instead
-func UserAgentString() string {
- ua := fmt.Sprintf(userAgentFormat, version.Version)
-
- if add := os.Getenv(uaEnvVar); add != "" {
- add = strings.TrimSpace(add)
- if len(add) > 0 {
- ua += " " + add
- log.Printf("[DEBUG] Using modified User-Agent: %s", ua)
- }
+// New returns the DefaultPooledClient from the cleanhttp
+// package that will also send a Terraform User-Agent string.
+func New() *http.Client {
+ cli := cleanhttp.DefaultPooledClient()
+ cli.Transport = &userAgentRoundTripper{
+ userAgent: UserAgentString(),
+ inner: cli.Transport,
}
-
- return ua
+ return cli
}
type userAgentRoundTripper struct {
@@ -41,8 +38,8 @@ func (rt *userAgentRoundTripper) RoundTrip(req *http.Request) (*http.Response, e
return rt.inner.RoundTrip(req)
}
-func TerraformUserAgent(version string) string {
- ua := fmt.Sprintf("HashiCorp Terraform/%s (+https://www.terraform.io)", version)
+func UserAgentString() string {
+ ua := fmt.Sprintf(userAgentFormat, version.Version)
if add := os.Getenv(uaEnvVar); add != "" {
add = strings.TrimSpace(add)
diff --git a/vendor/github.com/hashicorp/terraform/internal/initwd/copy_dir.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/initwd/copy_dir.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/internal/initwd/copy_dir.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/initwd/copy_dir.go
diff --git a/vendor/github.com/hashicorp/terraform/internal/initwd/doc.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/initwd/doc.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/internal/initwd/doc.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/initwd/doc.go
diff --git a/vendor/github.com/hashicorp/terraform/internal/initwd/from_module.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/initwd/from_module.go
similarity index 98%
rename from vendor/github.com/hashicorp/terraform/internal/initwd/from_module.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/initwd/from_module.go
index 6b40d08d6..641e71dec 100644
--- a/vendor/github.com/hashicorp/terraform/internal/initwd/from_module.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/initwd/from_module.go
@@ -2,7 +2,7 @@ package initwd
import (
"fmt"
- "github.com/hashicorp/terraform/internal/earlyconfig"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/earlyconfig"
"io/ioutil"
"log"
"os"
@@ -12,9 +12,9 @@ import (
version "github.com/hashicorp/go-version"
"github.com/hashicorp/terraform-config-inspect/tfconfig"
- "github.com/hashicorp/terraform/internal/modsdir"
- "github.com/hashicorp/terraform/registry"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/modsdir"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/registry"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
const initFromModuleRootCallName = "root"
diff --git a/vendor/github.com/hashicorp/terraform/internal/initwd/getter.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/initwd/getter.go
similarity index 96%
rename from vendor/github.com/hashicorp/terraform/internal/initwd/getter.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/initwd/getter.go
index 2f306be73..8dc0374b1 100644
--- a/vendor/github.com/hashicorp/terraform/internal/initwd/getter.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/initwd/getter.go
@@ -9,7 +9,7 @@ import (
cleanhttp "github.com/hashicorp/go-cleanhttp"
getter "github.com/hashicorp/go-getter"
- "github.com/hashicorp/terraform/registry/regsrc"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/registry/regsrc"
)
// We configure our own go-getter detector and getter sets here, because
@@ -19,14 +19,6 @@ import (
// any meddling that might be done by other go-getter callers linked into our
// executable.
-var goGetterDetectors = []getter.Detector{
- new(getter.GitHubDetector),
- new(getter.BitBucketDetector),
- new(getter.GCSDetector),
- new(getter.S3Detector),
- new(getter.FileDetector),
-}
-
var goGetterNoDetectors = []getter.Detector{}
var goGetterDecompressors = map[string]getter.Decompressor{
diff --git a/vendor/github.com/hashicorp/terraform/internal/initwd/inode.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/initwd/inode.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/internal/initwd/inode.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/initwd/inode.go
diff --git a/vendor/github.com/hashicorp/terraform/internal/initwd/inode_freebsd.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/initwd/inode_freebsd.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/internal/initwd/inode_freebsd.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/initwd/inode_freebsd.go
diff --git a/vendor/github.com/hashicorp/terraform/internal/initwd/inode_windows.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/initwd/inode_windows.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/internal/initwd/inode_windows.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/initwd/inode_windows.go
diff --git a/vendor/github.com/hashicorp/terraform/internal/initwd/module_install.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/initwd/module_install.go
similarity index 98%
rename from vendor/github.com/hashicorp/terraform/internal/initwd/module_install.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/initwd/module_install.go
index 531310ab8..8e0557567 100644
--- a/vendor/github.com/hashicorp/terraform/internal/initwd/module_install.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/initwd/module_install.go
@@ -9,12 +9,12 @@ import (
version "github.com/hashicorp/go-version"
"github.com/hashicorp/terraform-config-inspect/tfconfig"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/internal/earlyconfig"
- "github.com/hashicorp/terraform/internal/modsdir"
- "github.com/hashicorp/terraform/registry"
- "github.com/hashicorp/terraform/registry/regsrc"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/earlyconfig"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/modsdir"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/registry"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/registry/regsrc"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
type ModuleInstaller struct {
diff --git a/vendor/github.com/hashicorp/terraform/internal/initwd/module_install_hooks.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/initwd/module_install_hooks.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/internal/initwd/module_install_hooks.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/initwd/module_install_hooks.go
diff --git a/vendor/github.com/hashicorp/terraform/lang/blocktoattr/doc.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/blocktoattr/doc.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/lang/blocktoattr/doc.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/blocktoattr/doc.go
diff --git a/vendor/github.com/hashicorp/terraform/lang/blocktoattr/fixup.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/blocktoattr/fixup.go
similarity index 98%
rename from vendor/github.com/hashicorp/terraform/lang/blocktoattr/fixup.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/blocktoattr/fixup.go
index 0af708ec4..f782f6b75 100644
--- a/vendor/github.com/hashicorp/terraform/lang/blocktoattr/fixup.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/blocktoattr/fixup.go
@@ -3,7 +3,7 @@ package blocktoattr
import (
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hcl/v2/hcldec"
- "github.com/hashicorp/terraform/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
"github.com/zclconf/go-cty/cty"
)
diff --git a/vendor/github.com/hashicorp/terraform/lang/blocktoattr/schema.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/blocktoattr/schema.go
similarity index 77%
rename from vendor/github.com/hashicorp/terraform/lang/blocktoattr/schema.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/blocktoattr/schema.go
index 31e010cc7..129ee0e82 100644
--- a/vendor/github.com/hashicorp/terraform/lang/blocktoattr/schema.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/blocktoattr/schema.go
@@ -2,7 +2,7 @@ package blocktoattr
import (
"github.com/hashicorp/hcl/v2"
- "github.com/hashicorp/terraform/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
"github.com/zclconf/go-cty/cty"
)
@@ -117,30 +117,3 @@ func SchemaForCtyElementType(ty cty.Type) *configschema.Block {
}
return ret
}
-
-// SchemaForCtyContainerType converts a cty list-of-object or set-of-object type
-// into an approximately-equivalent configschema.NestedBlock. If the given type
-// is not of the expected kind then this function will panic.
-func SchemaForCtyContainerType(ty cty.Type) *configschema.NestedBlock {
- var nesting configschema.NestingMode
- switch {
- case ty.IsListType():
- nesting = configschema.NestingList
- case ty.IsSetType():
- nesting = configschema.NestingSet
- default:
- panic("unsuitable type")
- }
- nested := SchemaForCtyElementType(ty.ElementType())
- return &configschema.NestedBlock{
- Nesting: nesting,
- Block: *nested,
- }
-}
-
-// TypeCanBeBlocks returns true if the given type is a list-of-object or
-// set-of-object type, and would thus be subject to the blocktoattr fixup
-// if used as an attribute type.
-func TypeCanBeBlocks(ty cty.Type) bool {
- return (ty.IsListType() || ty.IsSetType()) && ty.ElementType().IsObjectType()
-}
diff --git a/vendor/github.com/hashicorp/terraform/lang/blocktoattr/variables.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/blocktoattr/variables.go
similarity index 96%
rename from vendor/github.com/hashicorp/terraform/lang/blocktoattr/variables.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/blocktoattr/variables.go
index ae5c609df..f5ed1c539 100644
--- a/vendor/github.com/hashicorp/terraform/lang/blocktoattr/variables.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/blocktoattr/variables.go
@@ -4,7 +4,7 @@ import (
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hcl/v2/ext/dynblock"
"github.com/hashicorp/hcl/v2/hcldec"
- "github.com/hashicorp/terraform/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
)
// ExpandedVariables finds all of the global variables referenced in the
diff --git a/vendor/github.com/hashicorp/terraform/lang/data.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/data.go
similarity index 93%
rename from vendor/github.com/hashicorp/terraform/lang/data.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/data.go
index ebc008e35..13f7ed935 100644
--- a/vendor/github.com/hashicorp/terraform/lang/data.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/data.go
@@ -1,8 +1,8 @@
package lang
import (
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
"github.com/zclconf/go-cty/cty"
)
diff --git a/vendor/github.com/hashicorp/terraform/lang/doc.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/doc.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/lang/doc.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/doc.go
diff --git a/vendor/github.com/hashicorp/terraform/lang/eval.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/eval.go
similarity index 96%
rename from vendor/github.com/hashicorp/terraform/lang/eval.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/eval.go
index bfacd671a..ec48a873f 100644
--- a/vendor/github.com/hashicorp/terraform/lang/eval.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/eval.go
@@ -8,10 +8,10 @@ import (
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hcl/v2/ext/dynblock"
"github.com/hashicorp/hcl/v2/hcldec"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/hashicorp/terraform/lang/blocktoattr"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/lang/blocktoattr"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
"github.com/zclconf/go-cty/cty"
"github.com/zclconf/go-cty/cty/convert"
)
@@ -240,19 +240,15 @@ func (s *Scope) evalContext(refs []*addrs.Reference, selfAddr addrs.Referenceabl
// Self is an exception in that it must always resolve to a
// particular instance. We will still insert the full resource into
// the context below.
- var hclDiags hcl.Diagnostics
- // We should always have a valid self index by this point, but in
- // the case of an error, self may end up as a cty.DynamicValue.
switch k := subj.Key.(type) {
case addrs.IntKey:
- self, hclDiags = hcl.Index(val, cty.NumberIntVal(int64(k)), ref.SourceRange.ToHCL().Ptr())
- diags.Append(hclDiags)
+ self = val.Index(cty.NumberIntVal(int64(k)))
case addrs.StringKey:
- self, hclDiags = hcl.Index(val, cty.StringVal(string(k)), ref.SourceRange.ToHCL().Ptr())
- diags.Append(hclDiags)
+ self = val.Index(cty.StringVal(string(k)))
default:
self = val
}
+
continue
}
diff --git a/vendor/github.com/hashicorp/terraform/lang/funcs/cidr.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/funcs/cidr.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/lang/funcs/cidr.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/funcs/cidr.go
diff --git a/vendor/github.com/hashicorp/terraform/lang/funcs/collection.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/funcs/collection.go
similarity index 99%
rename from vendor/github.com/hashicorp/terraform/lang/funcs/collection.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/funcs/collection.go
index 2ea416875..e6898457b 100644
--- a/vendor/github.com/hashicorp/terraform/lang/funcs/collection.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/funcs/collection.go
@@ -940,7 +940,7 @@ var ReverseFunc = function.New(&function.Spec{
},
})
-// SetProductFunc calculates the Cartesian product of two or more sets or
+// SetProductFunc calculates the cartesian product of two or more sets or
// sequences. If the arguments are all lists then the result is a list of tuples,
// preserving the ordering of all of the input lists. Otherwise the result is a
// set of tuples.
@@ -1179,6 +1179,7 @@ func sliceIndexes(args []cty.Value) (int, int, bool, error) {
return startIndex, endIndex, startKnown && endKnown, nil
}
+// TransposeFunc contructs a function that takes a map of lists of strings and
// TransposeFunc constructs a function that takes a map of lists of strings and
// swaps the keys and values to produce a new map of lists of strings.
var TransposeFunc = function.New(&function.Spec{
@@ -1225,10 +1226,6 @@ var TransposeFunc = function.New(&function.Spec{
outputMap[outKey] = cty.ListVal(values)
}
- if len(outputMap) == 0 {
- return cty.MapValEmpty(cty.List(cty.String)), nil
- }
-
return cty.MapVal(outputMap), nil
},
})
@@ -1494,7 +1491,7 @@ func Reverse(list cty.Value) (cty.Value, error) {
return ReverseFunc.Call([]cty.Value{list})
}
-// SetProduct computes the Cartesian product of sets or sequences.
+// SetProduct computes the cartesian product of sets or sequences.
func SetProduct(sets ...cty.Value) (cty.Value, error) {
return SetProductFunc.Call(sets)
}
diff --git a/vendor/github.com/hashicorp/terraform/lang/funcs/conversion.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/funcs/conversion.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/lang/funcs/conversion.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/funcs/conversion.go
diff --git a/vendor/github.com/hashicorp/terraform/lang/funcs/crypto.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/funcs/crypto.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/lang/funcs/crypto.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/funcs/crypto.go
diff --git a/vendor/github.com/hashicorp/terraform/lang/funcs/datetime.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/funcs/datetime.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/lang/funcs/datetime.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/funcs/datetime.go
diff --git a/vendor/github.com/hashicorp/terraform/lang/funcs/encoding.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/funcs/encoding.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/lang/funcs/encoding.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/funcs/encoding.go
diff --git a/vendor/github.com/hashicorp/terraform/lang/funcs/filesystem.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/funcs/filesystem.go
similarity index 83%
rename from vendor/github.com/hashicorp/terraform/lang/funcs/filesystem.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/funcs/filesystem.go
index 4b899cbc4..786d3e74b 100644
--- a/vendor/github.com/hashicorp/terraform/lang/funcs/filesystem.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/funcs/filesystem.go
@@ -8,7 +8,6 @@ import (
"path/filepath"
"unicode/utf8"
- "github.com/bmatcuk/doublestar"
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hcl/v2/hclsyntax"
homedir "github.com/mitchellh/go-homedir"
@@ -208,74 +207,6 @@ func MakeFileExistsFunc(baseDir string) function.Function {
})
}
-// MakeFileSetFunc constructs a function that takes a glob pattern
-// and enumerates a file set from that pattern
-func MakeFileSetFunc(baseDir string) function.Function {
- return function.New(&function.Spec{
- Params: []function.Parameter{
- {
- Name: "path",
- Type: cty.String,
- },
- {
- Name: "pattern",
- Type: cty.String,
- },
- },
- Type: function.StaticReturnType(cty.Set(cty.String)),
- Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) {
- path := args[0].AsString()
- pattern := args[1].AsString()
-
- if !filepath.IsAbs(path) {
- path = filepath.Join(baseDir, path)
- }
-
- // Join the path to the glob pattern, while ensuring the full
- // pattern is canonical for the host OS. The joined path is
- // automatically cleaned during this operation.
- pattern = filepath.Join(path, pattern)
-
- matches, err := doublestar.Glob(pattern)
- if err != nil {
- return cty.UnknownVal(cty.Set(cty.String)), fmt.Errorf("failed to glob pattern (%s): %s", pattern, err)
- }
-
- var matchVals []cty.Value
- for _, match := range matches {
- fi, err := os.Stat(match)
-
- if err != nil {
- return cty.UnknownVal(cty.Set(cty.String)), fmt.Errorf("failed to stat (%s): %s", match, err)
- }
-
- if !fi.Mode().IsRegular() {
- continue
- }
-
- // Remove the path and file separator from matches.
- match, err = filepath.Rel(path, match)
-
- if err != nil {
- return cty.UnknownVal(cty.Set(cty.String)), fmt.Errorf("failed to trim path of match (%s): %s", match, err)
- }
-
- // Replace any remaining file separators with forward slash (/)
- // separators for cross-system compatibility.
- match = filepath.ToSlash(match)
-
- matchVals = append(matchVals, cty.StringVal(match))
- }
-
- if len(matchVals) == 0 {
- return cty.SetValEmpty(cty.String), nil
- }
-
- return cty.SetVal(matchVals), nil
- },
- })
-}
-
// BasenameFunc constructs a function that takes a string containing a filesystem path
// and removes all except the last portion from it.
var BasenameFunc = function.New(&function.Spec{
@@ -385,16 +316,6 @@ func FileExists(baseDir string, path cty.Value) (cty.Value, error) {
return fn.Call([]cty.Value{path})
}
-// FileSet enumerates a set of files given a glob pattern
-//
-// The underlying function implementation works relative to a particular base
-// directory, so this wrapper takes a base directory string and uses it to
-// construct the underlying function before calling it.
-func FileSet(baseDir string, path, pattern cty.Value) (cty.Value, error) {
- fn := MakeFileSetFunc(baseDir)
- return fn.Call([]cty.Value{path, pattern})
-}
-
// FileBase64 reads the contents of the file at the given path.
//
// The bytes from the file are encoded as base64 before returning.
diff --git a/vendor/github.com/hashicorp/terraform/lang/funcs/number.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/funcs/number.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/lang/funcs/number.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/funcs/number.go
diff --git a/vendor/github.com/hashicorp/terraform/lang/funcs/string.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/funcs/string.go
similarity index 73%
rename from vendor/github.com/hashicorp/terraform/lang/funcs/string.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/funcs/string.go
index 2e66be451..c9ddf19e3 100644
--- a/vendor/github.com/hashicorp/terraform/lang/funcs/string.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/funcs/string.go
@@ -71,7 +71,7 @@ var SortFunc = function.New(&function.Spec{
if !listVal.IsWhollyKnown() {
// If some of the element values aren't known yet then we
- // can't yet predict the order of the result.
+ // can't yet preduct the order of the result.
return cty.UnknownVal(retType), nil
}
if listVal.LengthInt() == 0 { // Easy path
@@ -123,7 +123,7 @@ var SplitFunc = function.New(&function.Spec{
},
})
-// ChompFunc constructs a function that removes newline characters at the end of a string.
+// ChompFunc constructions a function that removes newline characters at the end of a string.
var ChompFunc = function.New(&function.Spec{
Params: []function.Parameter{
{
@@ -138,7 +138,7 @@ var ChompFunc = function.New(&function.Spec{
},
})
-// IndentFunc constructs a function that adds a given number of spaces to the
+// IndentFunc constructions a function that adds a given number of spaces to the
// beginnings of all but the first line in a given multi-line string.
var IndentFunc = function.New(&function.Spec{
Params: []function.Parameter{
@@ -163,7 +163,7 @@ var IndentFunc = function.New(&function.Spec{
},
})
-// ReplaceFunc constructs a function that searches a given string for another
+// ReplaceFunc constructions a function that searches a given string for another
// given substring, and replaces each occurence with a given replacement string.
var ReplaceFunc = function.New(&function.Spec{
Params: []function.Parameter{
@@ -201,7 +201,7 @@ var ReplaceFunc = function.New(&function.Spec{
},
})
-// TitleFunc constructs a function that converts the first letter of each word
+// TitleFunc constructions a function that converts the first letter of each word
// in the given string to uppercase.
var TitleFunc = function.New(&function.Spec{
Params: []function.Parameter{
@@ -216,7 +216,7 @@ var TitleFunc = function.New(&function.Spec{
},
})
-// TrimSpaceFunc constructs a function that removes any space characters from
+// TrimSpaceFunc constructions a function that removes any space characters from
// the start and end of the given string.
var TrimSpaceFunc = function.New(&function.Spec{
Params: []function.Parameter{
@@ -231,69 +231,6 @@ var TrimSpaceFunc = function.New(&function.Spec{
},
})
-// TrimFunc constructs a function that removes the specified characters from
-// the start and end of the given string.
-var TrimFunc = function.New(&function.Spec{
- Params: []function.Parameter{
- {
- Name: "str",
- Type: cty.String,
- },
- {
- Name: "cutset",
- Type: cty.String,
- },
- },
- Type: function.StaticReturnType(cty.String),
- Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) {
- str := args[0].AsString()
- cutset := args[1].AsString()
- return cty.StringVal(strings.Trim(str, cutset)), nil
- },
-})
-
-// TrimPrefixFunc constructs a function that removes the specified characters from
-// the start the given string.
-var TrimPrefixFunc = function.New(&function.Spec{
- Params: []function.Parameter{
- {
- Name: "str",
- Type: cty.String,
- },
- {
- Name: "prefix",
- Type: cty.String,
- },
- },
- Type: function.StaticReturnType(cty.String),
- Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) {
- str := args[0].AsString()
- prefix := args[1].AsString()
- return cty.StringVal(strings.TrimPrefix(str, prefix)), nil
- },
-})
-
-// TrimSuffixFunc constructs a function that removes the specified characters from
-// the end of the given string.
-var TrimSuffixFunc = function.New(&function.Spec{
- Params: []function.Parameter{
- {
- Name: "str",
- Type: cty.String,
- },
- {
- Name: "suffix",
- Type: cty.String,
- },
- },
- Type: function.StaticReturnType(cty.String),
- Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) {
- str := args[0].AsString()
- cutset := args[1].AsString()
- return cty.StringVal(strings.TrimSuffix(str, cutset)), nil
- },
-})
-
// Join concatenates together the string elements of one or more lists with a
// given separator.
func Join(sep cty.Value, lists ...cty.Value) (cty.Value, error) {
@@ -341,18 +278,3 @@ func Title(str cty.Value) (cty.Value, error) {
func TrimSpace(str cty.Value) (cty.Value, error) {
return TrimSpaceFunc.Call([]cty.Value{str})
}
-
-// Trim removes the specified characters from the start and end of the given string.
-func Trim(str, cutset cty.Value) (cty.Value, error) {
- return TrimFunc.Call([]cty.Value{str, cutset})
-}
-
-// TrimPrefix removes the specified prefix from the start of the given string.
-func TrimPrefix(str, prefix cty.Value) (cty.Value, error) {
- return TrimPrefixFunc.Call([]cty.Value{str, prefix})
-}
-
-// TrimSuffix removes the specified suffix from the end of the given string.
-func TrimSuffix(str, suffix cty.Value) (cty.Value, error) {
- return TrimSuffixFunc.Call([]cty.Value{str, suffix})
-}
diff --git a/vendor/github.com/hashicorp/terraform/lang/functions.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/functions.go
similarity index 91%
rename from vendor/github.com/hashicorp/terraform/lang/functions.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/functions.go
index fd820df04..a3c490664 100644
--- a/vendor/github.com/hashicorp/terraform/lang/functions.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/functions.go
@@ -1,14 +1,12 @@
package lang
import (
- "fmt"
-
ctyyaml "github.com/zclconf/go-cty-yaml"
"github.com/zclconf/go-cty/cty"
"github.com/zclconf/go-cty/cty/function"
"github.com/zclconf/go-cty/cty/function/stdlib"
- "github.com/hashicorp/terraform/lang/funcs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/lang/funcs"
)
var impureFunctions = []string{
@@ -57,7 +55,6 @@ func (s *Scope) Functions() map[string]function.Function {
"chunklist": funcs.ChunklistFunc,
"file": funcs.MakeFileFunc(s.BaseDir, false),
"fileexists": funcs.MakeFileExistsFunc(s.BaseDir),
- "fileset": funcs.MakeFileSetFunc(s.BaseDir),
"filebase64": funcs.MakeFileFunc(s.BaseDir, true),
"filebase64sha256": funcs.MakeFileBase64Sha256Func(s.BaseDir),
"filebase64sha512": funcs.MakeFileBase64Sha512Func(s.BaseDir),
@@ -118,10 +115,7 @@ func (s *Scope) Functions() map[string]function.Function {
"tolist": funcs.MakeToFunc(cty.List(cty.DynamicPseudoType)),
"tomap": funcs.MakeToFunc(cty.Map(cty.DynamicPseudoType)),
"transpose": funcs.TransposeFunc,
- "trim": funcs.TrimFunc,
- "trimprefix": funcs.TrimPrefixFunc,
"trimspace": funcs.TrimSpaceFunc,
- "trimsuffix": funcs.TrimSuffixFunc,
"upper": stdlib.UpperFunc,
"urlencode": funcs.URLEncodeFunc,
"uuid": funcs.UUIDFunc,
@@ -150,12 +144,3 @@ func (s *Scope) Functions() map[string]function.Function {
return s.funcs
}
-
-var unimplFunc = function.New(&function.Spec{
- Type: func([]cty.Value) (cty.Type, error) {
- return cty.DynamicPseudoType, fmt.Errorf("function not yet implemented")
- },
- Impl: func([]cty.Value, cty.Type) (cty.Value, error) {
- return cty.DynamicVal, fmt.Errorf("function not yet implemented")
- },
-})
diff --git a/vendor/github.com/hashicorp/terraform/lang/references.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/references.go
similarity index 91%
rename from vendor/github.com/hashicorp/terraform/lang/references.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/references.go
index 569251cb8..7923d5113 100644
--- a/vendor/github.com/hashicorp/terraform/lang/references.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/references.go
@@ -2,10 +2,10 @@ package lang
import (
"github.com/hashicorp/hcl/v2"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/hashicorp/terraform/lang/blocktoattr"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/lang/blocktoattr"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// References finds all of the references in the given set of traversals,
diff --git a/vendor/github.com/hashicorp/terraform/lang/scope.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/scope.go
similarity index 94%
rename from vendor/github.com/hashicorp/terraform/lang/scope.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/scope.go
index 98fca6baa..a720cca68 100644
--- a/vendor/github.com/hashicorp/terraform/lang/scope.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/lang/scope.go
@@ -5,7 +5,7 @@ import (
"github.com/zclconf/go-cty/cty/function"
- "github.com/hashicorp/terraform/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
)
// Scope is the main type in this package, allowing dynamic evaluation of
diff --git a/vendor/github.com/hashicorp/terraform/internal/modsdir/doc.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/modsdir/doc.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/internal/modsdir/doc.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/modsdir/doc.go
diff --git a/vendor/github.com/hashicorp/terraform/internal/modsdir/manifest.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/modsdir/manifest.go
similarity index 98%
rename from vendor/github.com/hashicorp/terraform/internal/modsdir/manifest.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/modsdir/manifest.go
index 36f6c033f..2d45c8520 100644
--- a/vendor/github.com/hashicorp/terraform/internal/modsdir/manifest.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/modsdir/manifest.go
@@ -11,7 +11,7 @@ import (
version "github.com/hashicorp/go-version"
- "github.com/hashicorp/terraform/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
)
// Record represents some metadata about an installed module, as part
diff --git a/vendor/github.com/hashicorp/terraform/internal/modsdir/paths.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/modsdir/paths.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/internal/modsdir/paths.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/modsdir/paths.go
diff --git a/vendor/github.com/hashicorp/terraform/moduledeps/dependencies.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/moduledeps/dependencies.go
similarity index 95%
rename from vendor/github.com/hashicorp/terraform/moduledeps/dependencies.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/moduledeps/dependencies.go
index 87c8431ea..c80588718 100644
--- a/vendor/github.com/hashicorp/terraform/moduledeps/dependencies.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/moduledeps/dependencies.go
@@ -1,7 +1,7 @@
package moduledeps
import (
- "github.com/hashicorp/terraform/plugin/discovery"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery"
)
// Providers describes a set of provider dependencies for a given module.
diff --git a/vendor/github.com/hashicorp/terraform/moduledeps/doc.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/moduledeps/doc.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/moduledeps/doc.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/moduledeps/doc.go
diff --git a/vendor/github.com/hashicorp/terraform/moduledeps/module.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/moduledeps/module.go
similarity index 67%
rename from vendor/github.com/hashicorp/terraform/moduledeps/module.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/moduledeps/module.go
index d6cbaf5c5..5189acfc1 100644
--- a/vendor/github.com/hashicorp/terraform/moduledeps/module.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/moduledeps/module.go
@@ -4,7 +4,7 @@ import (
"sort"
"strings"
- "github.com/hashicorp/terraform/plugin/discovery"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery"
)
// Module represents the dependencies of a single module, as well being
@@ -71,15 +71,6 @@ func (m *Module) SortChildren() {
sort.Sort(sortModules{m.Children})
}
-// SortDescendents is a convenience wrapper for calling SortChildren on
-// the receiver and all of its descendent modules.
-func (m *Module) SortDescendents() {
- m.WalkTree(func(path []string, parent *Module, current *Module) error {
- current.SortChildren()
- return nil
- })
-}
-
type sortModules struct {
modules []*Module
}
@@ -141,64 +132,3 @@ func (m *Module) AllPluginRequirements() discovery.PluginRequirements {
})
return ret
}
-
-// Equal returns true if the receiver is the root of an identical tree
-// to the other given Module. This is a deep comparison that considers
-// the equality of all downstream modules too.
-//
-// The children are considered to be ordered, so callers may wish to use
-// SortDescendents first to normalize the order of the slices of child nodes.
-//
-// The implementation of this function is not optimized since it is provided
-// primarily for use in tests.
-func (m *Module) Equal(other *Module) bool {
- // take care of nils first
- if m == nil && other == nil {
- return true
- } else if (m == nil && other != nil) || (m != nil && other == nil) {
- return false
- }
-
- if m.Name != other.Name {
- return false
- }
-
- if len(m.Providers) != len(other.Providers) {
- return false
- }
- if len(m.Children) != len(other.Children) {
- return false
- }
-
- // Can't use reflect.DeepEqual on this provider structure because
- // the nested Constraints objects contain function pointers that
- // never compare as equal. So we'll need to walk it the long way.
- for inst, dep := range m.Providers {
- if _, exists := other.Providers[inst]; !exists {
- return false
- }
-
- if dep.Reason != other.Providers[inst].Reason {
- return false
- }
-
- // Constraints are not too easy to compare robustly, so
- // we'll just use their string representations as a proxy
- // for now.
- if dep.Constraints.String() != other.Providers[inst].Constraints.String() {
- return false
- }
- }
-
- // Above we already checked that we have the same number of children
- // in each module, so now we just need to check that they are
- // recursively equal.
- for i := range m.Children {
- if !m.Children[i].Equal(other.Children[i]) {
- return false
- }
- }
-
- // If we fall out here then they are equal
- return true
-}
diff --git a/vendor/github.com/hashicorp/terraform/moduledeps/provider.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/moduledeps/provider.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/moduledeps/provider.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/moduledeps/provider.go
diff --git a/vendor/github.com/hashicorp/terraform/plans/action.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/action.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/plans/action.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/action.go
diff --git a/vendor/github.com/hashicorp/terraform/plans/action_string.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/action_string.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/plans/action_string.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/action_string.go
diff --git a/vendor/github.com/hashicorp/terraform/plans/changes.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/changes.go
similarity index 98%
rename from vendor/github.com/hashicorp/terraform/plans/changes.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/changes.go
index d7e0dcdb8..5c2028c83 100644
--- a/vendor/github.com/hashicorp/terraform/plans/changes.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/changes.go
@@ -1,8 +1,8 @@
package plans
import (
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
"github.com/zclconf/go-cty/cty"
)
diff --git a/vendor/github.com/hashicorp/terraform/plans/changes_src.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/changes_src.go
similarity index 98%
rename from vendor/github.com/hashicorp/terraform/plans/changes_src.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/changes_src.go
index 90153ea7b..97bc8da7c 100644
--- a/vendor/github.com/hashicorp/terraform/plans/changes_src.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/changes_src.go
@@ -3,8 +3,8 @@ package plans
import (
"fmt"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
"github.com/zclconf/go-cty/cty"
)
diff --git a/vendor/github.com/hashicorp/terraform/plans/changes_sync.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/changes_sync.go
similarity index 97%
rename from vendor/github.com/hashicorp/terraform/plans/changes_sync.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/changes_sync.go
index 6b4ff98ff..89cc1ab22 100644
--- a/vendor/github.com/hashicorp/terraform/plans/changes_sync.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/changes_sync.go
@@ -4,8 +4,8 @@ import (
"fmt"
"sync"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
)
// ChangesSync is a wrapper around a Changes that provides a concurrency-safe
diff --git a/vendor/github.com/hashicorp/terraform/plans/doc.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/doc.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/plans/doc.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/doc.go
diff --git a/vendor/github.com/hashicorp/terraform/plans/dynamic_value.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/dynamic_value.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/plans/dynamic_value.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/dynamic_value.go
diff --git a/vendor/github.com/hashicorp/terraform/plans/objchange/all_null.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/objchange/all_null.go
similarity index 89%
rename from vendor/github.com/hashicorp/terraform/plans/objchange/all_null.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/objchange/all_null.go
index 18a7e99a3..ba9cc9611 100644
--- a/vendor/github.com/hashicorp/terraform/plans/objchange/all_null.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/objchange/all_null.go
@@ -1,7 +1,7 @@
package objchange
import (
- "github.com/hashicorp/terraform/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
"github.com/zclconf/go-cty/cty"
)
diff --git a/vendor/github.com/hashicorp/terraform/plans/objchange/compatible.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/objchange/compatible.go
similarity index 99%
rename from vendor/github.com/hashicorp/terraform/plans/objchange/compatible.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/objchange/compatible.go
index d85086c97..36a7d496c 100644
--- a/vendor/github.com/hashicorp/terraform/plans/objchange/compatible.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/objchange/compatible.go
@@ -7,7 +7,7 @@ import (
"github.com/zclconf/go-cty/cty"
"github.com/zclconf/go-cty/cty/convert"
- "github.com/hashicorp/terraform/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
)
// AssertObjectCompatible checks whether the given "actual" value is a valid
diff --git a/vendor/github.com/hashicorp/terraform/plans/objchange/doc.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/objchange/doc.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/plans/objchange/doc.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/objchange/doc.go
diff --git a/vendor/github.com/hashicorp/terraform/plans/objchange/lcs.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/objchange/lcs.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/plans/objchange/lcs.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/objchange/lcs.go
diff --git a/vendor/github.com/hashicorp/terraform/plans/objchange/normalize_obj.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/objchange/normalize_obj.go
similarity index 98%
rename from vendor/github.com/hashicorp/terraform/plans/objchange/normalize_obj.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/objchange/normalize_obj.go
index c23f44dac..a8629046c 100644
--- a/vendor/github.com/hashicorp/terraform/plans/objchange/normalize_obj.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/objchange/normalize_obj.go
@@ -1,7 +1,7 @@
package objchange
import (
- "github.com/hashicorp/terraform/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
"github.com/zclconf/go-cty/cty"
)
diff --git a/vendor/github.com/hashicorp/terraform/plans/objchange/objchange.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/objchange/objchange.go
similarity index 99%
rename from vendor/github.com/hashicorp/terraform/plans/objchange/objchange.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/objchange/objchange.go
index 5a8af1481..879fc93a1 100644
--- a/vendor/github.com/hashicorp/terraform/plans/objchange/objchange.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/objchange/objchange.go
@@ -5,7 +5,7 @@ import (
"github.com/zclconf/go-cty/cty"
- "github.com/hashicorp/terraform/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
)
// ProposedNewObject constructs a proposed new object value by combining the
diff --git a/vendor/github.com/hashicorp/terraform/plans/objchange/plan_valid.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/objchange/plan_valid.go
similarity index 99%
rename from vendor/github.com/hashicorp/terraform/plans/objchange/plan_valid.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/objchange/plan_valid.go
index 69acb8979..905a91142 100644
--- a/vendor/github.com/hashicorp/terraform/plans/objchange/plan_valid.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/objchange/plan_valid.go
@@ -5,7 +5,7 @@ import (
"github.com/zclconf/go-cty/cty"
- "github.com/hashicorp/terraform/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
)
// AssertPlanValid checks checks whether a planned new state returned by a
diff --git a/vendor/github.com/hashicorp/terraform/plans/plan.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/plan.go
similarity index 95%
rename from vendor/github.com/hashicorp/terraform/plans/plan.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/plan.go
index 5a3e4548e..0abed56a0 100644
--- a/vendor/github.com/hashicorp/terraform/plans/plan.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plans/plan.go
@@ -3,8 +3,8 @@ package plans
import (
"sort"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
"github.com/zclconf/go-cty/cty"
)
diff --git a/vendor/github.com/hashicorp/terraform/plugin/convert/diagnostics.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plugin/convert/diagnostics.go
similarity index 96%
rename from vendor/github.com/hashicorp/terraform/plugin/convert/diagnostics.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plugin/convert/diagnostics.go
index 51cb2fe2f..f20f0507e 100644
--- a/vendor/github.com/hashicorp/terraform/plugin/convert/diagnostics.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plugin/convert/diagnostics.go
@@ -1,8 +1,8 @@
package convert
import (
- proto "github.com/hashicorp/terraform/internal/tfplugin5"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
+ proto "github.com/hashicorp/terraform-plugin-sdk/internal/tfplugin5"
"github.com/zclconf/go-cty/cty"
)
diff --git a/vendor/github.com/hashicorp/terraform/plugin/convert/schema.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plugin/convert/schema.go
similarity index 94%
rename from vendor/github.com/hashicorp/terraform/plugin/convert/schema.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plugin/convert/schema.go
index 6a45f54c9..105c32c6f 100644
--- a/vendor/github.com/hashicorp/terraform/plugin/convert/schema.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plugin/convert/schema.go
@@ -5,9 +5,9 @@ import (
"reflect"
"sort"
- "github.com/hashicorp/terraform/configs/configschema"
- proto "github.com/hashicorp/terraform/internal/tfplugin5"
- "github.com/hashicorp/terraform/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ proto "github.com/hashicorp/terraform-plugin-sdk/internal/tfplugin5"
)
// ConfigSchemaToProto takes a *configschema.Block and converts it to a
diff --git a/vendor/github.com/hashicorp/terraform/plugin/discovery/error.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery/error.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/plugin/discovery/error.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery/error.go
diff --git a/vendor/github.com/hashicorp/terraform/plugin/discovery/find.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery/find.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/plugin/discovery/find.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery/find.go
diff --git a/vendor/github.com/hashicorp/terraform/plugin/discovery/get.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery/get.go
similarity index 94%
rename from vendor/github.com/hashicorp/terraform/plugin/discovery/get.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery/get.go
index 0ee23c6fd..722bb28a2 100644
--- a/vendor/github.com/hashicorp/terraform/plugin/discovery/get.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery/get.go
@@ -16,25 +16,21 @@ import (
"github.com/hashicorp/errwrap"
getter "github.com/hashicorp/go-getter"
multierror "github.com/hashicorp/go-multierror"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/httpclient"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/registry"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/registry/regsrc"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/registry/response"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
+ tfversion "github.com/hashicorp/terraform-plugin-sdk/internal/version"
"github.com/hashicorp/terraform-svchost/disco"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/httpclient"
- "github.com/hashicorp/terraform/registry"
- "github.com/hashicorp/terraform/registry/regsrc"
- "github.com/hashicorp/terraform/registry/response"
- "github.com/hashicorp/terraform/tfdiags"
- tfversion "github.com/hashicorp/terraform/version"
"github.com/mitchellh/cli"
)
// Releases are located by querying the terraform registry.
-const protocolVersionHeader = "x-terraform-protocol-version"
-
var httpClient *http.Client
-var errVersionNotFound = errors.New("version not found")
-
func init() {
httpClient = httpclient.New()
@@ -47,13 +43,6 @@ func init() {
getter.Getters["https"] = httpGetter
}
-// An Installer maintains a local cache of plugins by downloading plugins
-// from an online repository.
-type Installer interface {
- Get(provider addrs.ProviderType, req Constraints) (PluginMeta, tfdiags.Diagnostics, error)
- PurgeUnused(used map[string]PluginMeta) (removed PluginMetaSet, err error)
-}
-
// ProviderInstaller is an Installer implementation that knows how to
// download Terraform providers from the official HashiCorp releases service
// into a local directory. The files downloaded are compliant with the
@@ -570,19 +559,6 @@ func (i *ProviderInstaller) checkPluginProtocol(versionMeta *response.TerraformP
return ErrorNoVersionCompatible
}
-// REVIEWER QUESTION (again): this ends up swallowing a bunch of errors from
-// checkPluginProtocol. Do they need to be percolated up better, or would
-// debug messages would suffice in these situations?
-func (i *ProviderInstaller) findPlatformCompatibleVersion(versions []*response.TerraformProviderVersion) (*response.TerraformProviderVersion, error) {
- for _, version := range versions {
- if err := i.checkPlatformCompatibility(version); err == nil {
- return version, nil
- }
- }
-
- return nil, ErrorNoVersionCompatibleWithPlatform
-}
-
// platformCompatibleVersions returns a list of provider versions that are
// compatible with the requested platform.
func (i *ProviderInstaller) platformCompatibleVersions(versions []*response.TerraformProviderVersion) []*response.TerraformProviderVersion {
diff --git a/vendor/github.com/hashicorp/terraform/plugin/discovery/get_cache.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery/get_cache.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/plugin/discovery/get_cache.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery/get_cache.go
diff --git a/vendor/github.com/hashicorp/terraform/plugin/discovery/hashicorp.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery/hashicorp.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/plugin/discovery/hashicorp.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery/hashicorp.go
diff --git a/vendor/github.com/hashicorp/terraform/plugin/discovery/meta.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery/meta.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/plugin/discovery/meta.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery/meta.go
diff --git a/vendor/github.com/hashicorp/terraform/plugin/discovery/meta_set.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery/meta_set.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/plugin/discovery/meta_set.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery/meta_set.go
diff --git a/vendor/github.com/hashicorp/terraform/plugin/discovery/requirements.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery/requirements.go
similarity index 92%
rename from vendor/github.com/hashicorp/terraform/plugin/discovery/requirements.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery/requirements.go
index 0466ab25a..75430fdd6 100644
--- a/vendor/github.com/hashicorp/terraform/plugin/discovery/requirements.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery/requirements.go
@@ -4,12 +4,6 @@ import (
"bytes"
)
-// PluginInstallProtocolVersion is the protocol version TF-core
-// supports to communicate with servers, and is used to resolve
-// plugin discovery with terraform registry, in addition to
-// any specified plugin version constraints
-const PluginInstallProtocolVersion = 5
-
// PluginRequirements describes a set of plugins (assumed to be of a consistent
// kind) that are required to exist and have versions within the given
// corresponding sets.
diff --git a/vendor/github.com/hashicorp/terraform/plugin/discovery/signature.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery/signature.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/plugin/discovery/signature.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery/signature.go
diff --git a/vendor/github.com/hashicorp/terraform/plugin/discovery/version.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery/version.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/plugin/discovery/version.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery/version.go
diff --git a/vendor/github.com/hashicorp/terraform/plugin/discovery/version_set.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery/version_set.go
similarity index 93%
rename from vendor/github.com/hashicorp/terraform/plugin/discovery/version_set.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery/version_set.go
index de02f5ec5..fc8b6f8bd 100644
--- a/vendor/github.com/hashicorp/terraform/plugin/discovery/version_set.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery/version_set.go
@@ -81,9 +81,3 @@ func (s Constraints) Append(other Constraints) Constraints {
func (s Constraints) String() string {
return s.raw.String()
}
-
-// Unconstrained returns true if and only if the receiver is an empty
-// constraint set.
-func (s Constraints) Unconstrained() bool {
- return len(s.raw) == 0
-}
diff --git a/vendor/github.com/hashicorp/terraform/providers/addressed_types.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/providers/addressed_types.go
similarity index 94%
rename from vendor/github.com/hashicorp/terraform/providers/addressed_types.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/providers/addressed_types.go
index 7ed523f15..0f48f2447 100644
--- a/vendor/github.com/hashicorp/terraform/providers/addressed_types.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/providers/addressed_types.go
@@ -3,7 +3,7 @@ package providers
import (
"sort"
- "github.com/hashicorp/terraform/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
)
// AddressedTypes is a helper that extracts all of the distinct provider
diff --git a/vendor/github.com/hashicorp/terraform/providers/doc.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/providers/doc.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/providers/doc.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/providers/doc.go
diff --git a/vendor/github.com/hashicorp/terraform/providers/provider.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/providers/provider.go
similarity index 98%
rename from vendor/github.com/hashicorp/terraform/providers/provider.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/providers/provider.go
index 7e0a74c58..3d0aa8ec9 100644
--- a/vendor/github.com/hashicorp/terraform/providers/provider.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/providers/provider.go
@@ -3,9 +3,9 @@ package providers
import (
"github.com/zclconf/go-cty/cty"
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/hashicorp/terraform/states"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// Interface represents the set of methods required for a complete resource
diff --git a/vendor/github.com/hashicorp/terraform/providers/resolver.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/providers/resolver.go
similarity index 58%
rename from vendor/github.com/hashicorp/terraform/providers/resolver.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/providers/resolver.go
index 4de8e0acd..b42e49202 100644
--- a/vendor/github.com/hashicorp/terraform/providers/resolver.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/providers/resolver.go
@@ -3,7 +3,7 @@ package providers
import (
"fmt"
- "github.com/hashicorp/terraform/plugin/discovery"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery"
)
// Resolver is an interface implemented by objects that are able to resolve
@@ -66,47 +66,3 @@ func FactoryFixed(p Interface) Factory {
return p, nil
}
}
-
-// ProviderHasResource is a helper that requests schema from the given provider
-// and checks if it has a resource type of the given name.
-//
-// This function is more expensive than it may first appear since it must
-// retrieve the entire schema from the underlying provider, and so it should
-// be used sparingly and especially not in tight loops.
-//
-// Since retrieving the provider may fail (e.g. if the provider is accessed
-// over an RPC channel that has operational problems), this function will
-// return false if the schema cannot be retrieved, under the assumption that
-// a subsequent call to do anything with the resource type would fail
-// anyway.
-func ProviderHasResource(provider Interface, typeName string) bool {
- resp := provider.GetSchema()
- if resp.Diagnostics.HasErrors() {
- return false
- }
-
- _, exists := resp.ResourceTypes[typeName]
- return exists
-}
-
-// ProviderHasDataSource is a helper that requests schema from the given
-// provider and checks if it has a data source of the given name.
-//
-// This function is more expensive than it may first appear since it must
-// retrieve the entire schema from the underlying provider, and so it should
-// be used sparingly and especially not in tight loops.
-//
-// Since retrieving the provider may fail (e.g. if the provider is accessed
-// over an RPC channel that has operational problems), this function will
-// return false if the schema cannot be retrieved, under the assumption that
-// a subsequent call to do anything with the data source would fail
-// anyway.
-func ProviderHasDataSource(provider Interface, dataSourceName string) bool {
- resp := provider.GetSchema()
- if resp.Diagnostics.HasErrors() {
- return false
- }
-
- _, exists := resp.DataSources[dataSourceName]
- return exists
-}
diff --git a/vendor/github.com/hashicorp/terraform/provisioners/doc.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/provisioners/doc.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/provisioners/doc.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/provisioners/doc.go
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/provisioners/factory.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/provisioners/factory.go
new file mode 100644
index 000000000..7a9dca0a0
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/provisioners/factory.go
@@ -0,0 +1,5 @@
+package provisioners
+
+// Factory is a function type that creates a new instance of a resource
+// provisioner, or returns an error if that is impossible.
+type Factory func() (Interface, error)
diff --git a/vendor/github.com/hashicorp/terraform/provisioners/provisioner.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/provisioners/provisioner.go
similarity index 95%
rename from vendor/github.com/hashicorp/terraform/provisioners/provisioner.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/provisioners/provisioner.go
index e53c88488..7d8f4076b 100644
--- a/vendor/github.com/hashicorp/terraform/provisioners/provisioner.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/provisioners/provisioner.go
@@ -1,8 +1,8 @@
package provisioners
import (
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
"github.com/zclconf/go-cty/cty"
)
diff --git a/vendor/github.com/hashicorp/terraform/registry/client.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/registry/client.go
similarity index 95%
rename from vendor/github.com/hashicorp/terraform/registry/client.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/registry/client.go
index e8f7ac111..4ef22052c 100644
--- a/vendor/github.com/hashicorp/terraform/registry/client.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/registry/client.go
@@ -11,12 +11,13 @@ import (
"strings"
"time"
+ "github.com/hashicorp/terraform-plugin-sdk/httpclient"
+ internalhttpclient "github.com/hashicorp/terraform-plugin-sdk/internal/httpclient"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/registry/regsrc"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/registry/response"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/version"
"github.com/hashicorp/terraform-svchost"
"github.com/hashicorp/terraform-svchost/disco"
- "github.com/hashicorp/terraform/httpclient"
- "github.com/hashicorp/terraform/registry/regsrc"
- "github.com/hashicorp/terraform/registry/response"
- "github.com/hashicorp/terraform/version"
)
const (
@@ -46,7 +47,7 @@ func NewClient(services *disco.Disco, client *http.Client) *Client {
}
if client == nil {
- client = httpclient.New()
+ client = internalhttpclient.New()
client.Timeout = requestTimeout
}
diff --git a/vendor/github.com/hashicorp/terraform/registry/errors.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/registry/errors.go
similarity index 79%
rename from vendor/github.com/hashicorp/terraform/registry/errors.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/registry/errors.go
index 3b99b34d8..b05438c4d 100644
--- a/vendor/github.com/hashicorp/terraform/registry/errors.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/registry/errors.go
@@ -3,8 +3,8 @@ package registry
import (
"fmt"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/registry/regsrc"
"github.com/hashicorp/terraform-svchost/disco"
- "github.com/hashicorp/terraform/registry/regsrc"
)
type errModuleNotFound struct {
@@ -31,14 +31,6 @@ func (e *errProviderNotFound) Error() string {
return fmt.Sprintf("provider %s not found", e.addr)
}
-// IsProviderNotFound returns true only if the given error is a "provider not found"
-// error. This allows callers to recognize this particular error condition
-// as distinct from operational errors such as poor network connectivity.
-func IsProviderNotFound(err error) bool {
- _, ok := err.(*errProviderNotFound)
- return ok
-}
-
// IsServiceNotProvided returns true only if the given error is a "service not provided"
// error. This allows callers to recognize this particular error condition
// as distinct from operational errors such as poor network connectivity.
diff --git a/vendor/github.com/hashicorp/terraform/registry/regsrc/friendly_host.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/registry/regsrc/friendly_host.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/registry/regsrc/friendly_host.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/registry/regsrc/friendly_host.go
diff --git a/vendor/github.com/hashicorp/terraform/registry/regsrc/module.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/registry/regsrc/module.go
similarity index 86%
rename from vendor/github.com/hashicorp/terraform/registry/regsrc/module.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/registry/regsrc/module.go
index c3edd7d87..eb37481ff 100644
--- a/vendor/github.com/hashicorp/terraform/registry/regsrc/module.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/registry/regsrc/module.go
@@ -6,7 +6,7 @@ import (
"regexp"
"strings"
- "github.com/hashicorp/terraform-svchost"
+ svchost "github.com/hashicorp/terraform-svchost"
)
var (
@@ -35,14 +35,6 @@ var (
fmt.Sprintf("^(%s)\\/(%s)\\/(%s)(?:\\/\\/(.*))?$",
nameSubRe, nameSubRe, providerSubRe))
- // NameRe is a regular expression defining the format allowed for namespace
- // or name fields in module registry implementations.
- NameRe = regexp.MustCompile("^" + nameSubRe + "$")
-
- // ProviderRe is a regular expression defining the format allowed for
- // provider fields in module registry implementations.
- ProviderRe = regexp.MustCompile("^" + providerSubRe + "$")
-
// these hostnames are not allowed as registry sources, because they are
// already special case module sources in terraform.
disallowed = map[string]bool{
@@ -67,28 +59,6 @@ type Module struct {
RawSubmodule string
}
-// NewModule construct a new module source from separate parts. Pass empty
-// string if host or submodule are not needed.
-func NewModule(host, namespace, name, provider, submodule string) (*Module, error) {
- m := &Module{
- RawNamespace: namespace,
- RawName: name,
- RawProvider: provider,
- RawSubmodule: submodule,
- }
- if host != "" {
- h := NewFriendlyHost(host)
- if h != nil {
- fmt.Println("HOST:", h)
- if !h.Valid() || disallowed[h.Display()] {
- return nil, ErrInvalidModuleSource
- }
- }
- m.RawHost = h
- }
- return m, nil
-}
-
// ParseModuleSource attempts to parse source as a Terraform registry module
// source. If the string is not found to be in a valid format,
// ErrInvalidModuleSource is returned. Note that this can only be used on
diff --git a/vendor/github.com/hashicorp/terraform/registry/regsrc/regsrc.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/registry/regsrc/regsrc.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/registry/regsrc/regsrc.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/registry/regsrc/regsrc.go
diff --git a/vendor/github.com/hashicorp/terraform/registry/regsrc/terraform_provider.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/registry/regsrc/terraform_provider.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/registry/regsrc/terraform_provider.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/registry/regsrc/terraform_provider.go
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/registry/response/module.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/registry/response/module.go
new file mode 100644
index 000000000..06163963e
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/registry/response/module.go
@@ -0,0 +1,46 @@
+package response
+
+// ModuleSubmodule is the metadata about a specific submodule within
+// a module. This includes the root module as a special case.
+type ModuleSubmodule struct {
+ Path string `json:"path"`
+ Readme string `json:"readme"`
+ Empty bool `json:"empty"`
+
+ Inputs []*ModuleInput `json:"inputs"`
+ Outputs []*ModuleOutput `json:"outputs"`
+ Dependencies []*ModuleDep `json:"dependencies"`
+ Resources []*ModuleResource `json:"resources"`
+}
+
+// ModuleInput is an input for a module.
+type ModuleInput struct {
+ Name string `json:"name"`
+ Description string `json:"description"`
+ Default string `json:"default"`
+}
+
+// ModuleOutput is an output for a module.
+type ModuleOutput struct {
+ Name string `json:"name"`
+ Description string `json:"description"`
+}
+
+// ModuleDep is an output for a module.
+type ModuleDep struct {
+ Name string `json:"name"`
+ Source string `json:"source"`
+ Version string `json:"version"`
+}
+
+// ModuleProviderDep is the output for a provider dependency
+type ModuleProviderDep struct {
+ Name string `json:"name"`
+ Version string `json:"version"`
+}
+
+// ModuleResource is an output for a module.
+type ModuleResource struct {
+ Name string `json:"name"`
+ Type string `json:"type"`
+}
diff --git a/vendor/github.com/hashicorp/terraform/registry/response/module_versions.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/registry/response/module_versions.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/registry/response/module_versions.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/registry/response/module_versions.go
diff --git a/vendor/github.com/hashicorp/terraform/registry/response/pagination.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/registry/response/pagination.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/registry/response/pagination.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/registry/response/pagination.go
diff --git a/vendor/github.com/hashicorp/terraform/registry/response/terraform_provider.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/registry/response/terraform_provider.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/registry/response/terraform_provider.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/registry/response/terraform_provider.go
diff --git a/vendor/github.com/hashicorp/terraform/states/doc.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/doc.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/states/doc.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/doc.go
diff --git a/vendor/github.com/hashicorp/terraform/states/eachmode_string.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/eachmode_string.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/states/eachmode_string.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/eachmode_string.go
diff --git a/vendor/github.com/hashicorp/terraform/states/instance_generation.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/instance_generation.go
similarity index 93%
rename from vendor/github.com/hashicorp/terraform/states/instance_generation.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/instance_generation.go
index 617ad4ea6..891adc003 100644
--- a/vendor/github.com/hashicorp/terraform/states/instance_generation.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/instance_generation.go
@@ -18,7 +18,3 @@ type Generation interface {
// CurrentGen is the Generation representing the currently-active object for
// a resource instance.
var CurrentGen Generation
-
-type currentGen struct{}
-
-func (g currentGen) generation() {}
diff --git a/vendor/github.com/hashicorp/terraform/states/instance_object.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/instance_object.go
similarity index 87%
rename from vendor/github.com/hashicorp/terraform/states/instance_object.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/instance_object.go
index 78e1dda93..3bb717d33 100644
--- a/vendor/github.com/hashicorp/terraform/states/instance_object.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/instance_object.go
@@ -4,7 +4,7 @@ import (
"github.com/zclconf/go-cty/cty"
ctyjson "github.com/zclconf/go-cty/cty/json"
- "github.com/hashicorp/terraform/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
)
// ResourceInstanceObject is the local representation of a specific remote
@@ -29,17 +29,12 @@ type ResourceInstanceObject struct {
// it was updated.
Status ObjectStatus
- // Dependencies is a set of absolute address to other resources this
- // instance dependeded on when it was applied. This is used to construct
- // the dependency relationships for an object whose configuration is no
- // longer available, such as if it has been removed from configuration
- // altogether, or is now deposed.
- Dependencies []addrs.AbsResource
-
- // DependsOn corresponds to the deprecated `depends_on` field in the state.
- // This field contained the configuration `depends_on` values, and some of
- // the references from within a single module.
- DependsOn []addrs.Referenceable
+ // Dependencies is a set of other addresses in the same module which
+ // this instance depended on when the given attributes were evaluated.
+ // This is used to construct the dependency relationships for an object
+ // whose configuration is no longer available, such as if it has been
+ // removed from configuration altogether, or is now deposed.
+ Dependencies []addrs.Referenceable
}
// ObjectStatus represents the status of a RemoteObject.
diff --git a/vendor/github.com/hashicorp/terraform/states/instance_object_src.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/instance_object_src.go
similarity index 95%
rename from vendor/github.com/hashicorp/terraform/states/instance_object_src.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/instance_object_src.go
index a18cf313c..728ad80d1 100644
--- a/vendor/github.com/hashicorp/terraform/states/instance_object_src.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/instance_object_src.go
@@ -4,8 +4,8 @@ import (
"github.com/zclconf/go-cty/cty"
ctyjson "github.com/zclconf/go-cty/cty/json"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs/hcl2shim"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/hcl2shim"
)
// ResourceInstanceObjectSrc is a not-fully-decoded version of
@@ -53,9 +53,7 @@ type ResourceInstanceObjectSrc struct {
// ResourceInstanceObject.
Private []byte
Status ObjectStatus
- Dependencies []addrs.AbsResource
- // deprecated
- DependsOn []addrs.Referenceable
+ Dependencies []addrs.Referenceable
}
// Decode unmarshals the raw representation of the object attributes. Pass the
@@ -88,7 +86,6 @@ func (os *ResourceInstanceObjectSrc) Decode(ty cty.Type) (*ResourceInstanceObjec
Value: val,
Status: os.Status,
Dependencies: os.Dependencies,
- DependsOn: os.DependsOn,
Private: os.Private,
}, nil
}
diff --git a/vendor/github.com/hashicorp/terraform/states/module.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/module.go
similarity index 93%
rename from vendor/github.com/hashicorp/terraform/states/module.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/module.go
index d89e7878d..6b74cbfa6 100644
--- a/vendor/github.com/hashicorp/terraform/states/module.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/module.go
@@ -3,7 +3,7 @@ package states
import (
"github.com/zclconf/go-cty/cty"
- "github.com/hashicorp/terraform/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
)
// Module is a container for the states of objects within a particular module.
@@ -146,23 +146,6 @@ func (ms *Module) SetResourceInstanceDeposed(addr addrs.ResourceInstance, key De
}
}
-// ForgetResourceInstanceAll removes the record of all objects associated with
-// the specified resource instance, if present. If not present, this is a no-op.
-func (ms *Module) ForgetResourceInstanceAll(addr addrs.ResourceInstance) {
- rs := ms.Resource(addr.Resource)
- if rs == nil {
- return
- }
- delete(rs.Instances, addr.Key)
-
- if rs.EachMode == NoEach && len(rs.Instances) == 0 {
- // Also clean up if we only expect to have one instance anyway
- // and there are none. We leave the resource behind if an each mode
- // is active because an empty list or map of instances is a valid state.
- delete(ms.Resources, addr.Resource.String())
- }
-}
-
// ForgetResourceInstanceDeposed removes the record of the deposed object with
// the given address and key, if present. If not present, this is a no-op.
func (ms *Module) ForgetResourceInstanceDeposed(addr addrs.ResourceInstance, key DeposedKey) {
diff --git a/vendor/github.com/hashicorp/terraform/states/objectstatus_string.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/objectstatus_string.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/states/objectstatus_string.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/objectstatus_string.go
diff --git a/vendor/github.com/hashicorp/terraform/states/output_value.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/output_value.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/states/output_value.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/output_value.go
diff --git a/vendor/github.com/hashicorp/terraform/states/resource.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/resource.go
similarity index 97%
rename from vendor/github.com/hashicorp/terraform/states/resource.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/resource.go
index 7f58543c4..32ea638ac 100644
--- a/vendor/github.com/hashicorp/terraform/states/resource.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/resource.go
@@ -5,7 +5,7 @@ import (
"math/rand"
"time"
- "github.com/hashicorp/terraform/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
)
// Resource represents the state of a resource.
@@ -88,12 +88,6 @@ func (i *ResourceInstance) HasDeposed(key DeposedKey) bool {
return i != nil && i.Deposed[key] != nil
}
-// HasAnyDeposed returns true if this resource instance has one or more
-// deposed objects.
-func (i *ResourceInstance) HasAnyDeposed() bool {
- return i != nil && len(i.Deposed) > 0
-}
-
// HasObjects returns true if this resource has any objects at all, whether
// current or deposed.
func (i *ResourceInstance) HasObjects() bool {
diff --git a/vendor/github.com/hashicorp/terraform/states/state.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/state.go
similarity index 99%
rename from vendor/github.com/hashicorp/terraform/states/state.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/state.go
index 1f842359e..328dd53d5 100644
--- a/vendor/github.com/hashicorp/terraform/states/state.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/state.go
@@ -5,7 +5,7 @@ import (
"github.com/zclconf/go-cty/cty"
- "github.com/hashicorp/terraform/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
)
// State is the top-level type of a Terraform state.
diff --git a/vendor/github.com/hashicorp/terraform/states/state_deepcopy.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/state_deepcopy.go
similarity index 93%
rename from vendor/github.com/hashicorp/terraform/states/state_deepcopy.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/state_deepcopy.go
index 7d7a7ef10..6266aca79 100644
--- a/vendor/github.com/hashicorp/terraform/states/state_deepcopy.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/state_deepcopy.go
@@ -1,7 +1,7 @@
package states
import (
- "github.com/hashicorp/terraform/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
"github.com/zclconf/go-cty/cty"
)
@@ -153,17 +153,8 @@ func (obj *ResourceInstanceObjectSrc) DeepCopy() *ResourceInstanceObjectSrc {
// Some addrs.Referencable implementations are technically mutable, but
// we treat them as immutable by convention and so we don't deep-copy here.
- var dependencies []addrs.AbsResource
- if obj.Dependencies != nil {
- dependencies = make([]addrs.AbsResource, len(obj.Dependencies))
- copy(dependencies, obj.Dependencies)
- }
-
- var dependsOn []addrs.Referenceable
- if obj.DependsOn != nil {
- dependsOn = make([]addrs.Referenceable, len(obj.DependsOn))
- copy(dependsOn, obj.DependsOn)
- }
+ dependencies := make([]addrs.Referenceable, len(obj.Dependencies))
+ copy(dependencies, obj.Dependencies)
return &ResourceInstanceObjectSrc{
Status: obj.Status,
@@ -172,7 +163,6 @@ func (obj *ResourceInstanceObjectSrc) DeepCopy() *ResourceInstanceObjectSrc {
AttrsFlat: attrsFlat,
AttrsJSON: attrsJSON,
Dependencies: dependencies,
- DependsOn: dependsOn,
}
}
@@ -197,9 +187,9 @@ func (obj *ResourceInstanceObject) DeepCopy() *ResourceInstanceObject {
// Some addrs.Referenceable implementations are technically mutable, but
// we treat them as immutable by convention and so we don't deep-copy here.
- var dependencies []addrs.AbsResource
+ var dependencies []addrs.Referenceable
if obj.Dependencies != nil {
- dependencies = make([]addrs.AbsResource, len(obj.Dependencies))
+ dependencies = make([]addrs.Referenceable, len(obj.Dependencies))
copy(dependencies, obj.Dependencies)
}
diff --git a/vendor/github.com/hashicorp/terraform/states/state_equal.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/state_equal.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/states/state_equal.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/state_equal.go
diff --git a/vendor/github.com/hashicorp/terraform/states/state_string.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/state_string.go
similarity index 98%
rename from vendor/github.com/hashicorp/terraform/states/state_string.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/state_string.go
index 8be3d01a0..dffd650d6 100644
--- a/vendor/github.com/hashicorp/terraform/states/state_string.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/state_string.go
@@ -10,8 +10,8 @@ import (
ctyjson "github.com/zclconf/go-cty/cty/json"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs/hcl2shim"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/hcl2shim"
)
// String returns a rather-odd string representation of the entire state.
diff --git a/vendor/github.com/hashicorp/terraform/states/statefile/diagnostics.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/diagnostics.go
similarity index 96%
rename from vendor/github.com/hashicorp/terraform/states/statefile/diagnostics.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/diagnostics.go
index a6d88ecd5..042ce51c1 100644
--- a/vendor/github.com/hashicorp/terraform/states/statefile/diagnostics.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/diagnostics.go
@@ -4,7 +4,7 @@ import (
"encoding/json"
"fmt"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
const invalidFormat = "Invalid state file format"
diff --git a/vendor/github.com/hashicorp/terraform/states/statefile/doc.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/doc.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/states/statefile/doc.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/doc.go
diff --git a/vendor/github.com/hashicorp/terraform/states/statefile/file.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/file.go
similarity index 53%
rename from vendor/github.com/hashicorp/terraform/states/statefile/file.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/file.go
index 6e2024019..70c8ba6ce 100644
--- a/vendor/github.com/hashicorp/terraform/states/statefile/file.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/file.go
@@ -3,8 +3,7 @@ package statefile
import (
version "github.com/hashicorp/go-version"
- "github.com/hashicorp/terraform/states"
- tfversion "github.com/hashicorp/terraform/version"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
)
// File is the in-memory representation of a state file. It includes the state
@@ -30,33 +29,3 @@ type File struct {
// State is the actual state represented by this file.
State *states.State
}
-
-func New(state *states.State, lineage string, serial uint64) *File {
- // To make life easier on callers, we'll accept a nil state here and just
- // allocate an empty one, which is required for this file to be successfully
- // written out.
- if state == nil {
- state = states.NewState()
- }
-
- return &File{
- TerraformVersion: tfversion.SemVer,
- State: state,
- Lineage: lineage,
- Serial: serial,
- }
-}
-
-// DeepCopy is a convenience method to create a new File object whose state
-// is a deep copy of the receiver's, as implemented by states.State.DeepCopy.
-func (f *File) DeepCopy() *File {
- if f == nil {
- return nil
- }
- return &File{
- TerraformVersion: f.TerraformVersion,
- Serial: f.Serial,
- Lineage: f.Lineage,
- State: f.State.DeepCopy(),
- }
-}
diff --git a/vendor/github.com/hashicorp/terraform/states/statefile/read.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/read.go
similarity index 97%
rename from vendor/github.com/hashicorp/terraform/states/statefile/read.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/read.go
index d691c0290..f1899cd22 100644
--- a/vendor/github.com/hashicorp/terraform/states/statefile/read.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/read.go
@@ -10,8 +10,8 @@ import (
version "github.com/hashicorp/go-version"
- "github.com/hashicorp/terraform/tfdiags"
- tfversion "github.com/hashicorp/terraform/version"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
+ tfversion "github.com/hashicorp/terraform-plugin-sdk/internal/version"
)
// ErrNoState is returned by ReadState when the state file is empty.
diff --git a/vendor/github.com/hashicorp/terraform/states/statefile/version0.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/version0.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/states/statefile/version0.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/version0.go
diff --git a/vendor/github.com/hashicorp/terraform/states/statefile/version1.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/version1.go
similarity index 95%
rename from vendor/github.com/hashicorp/terraform/states/statefile/version1.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/version1.go
index 80d711bc8..85b422ad2 100644
--- a/vendor/github.com/hashicorp/terraform/states/statefile/version1.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/version1.go
@@ -4,7 +4,7 @@ import (
"encoding/json"
"fmt"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
func readStateV1(src []byte) (*File, tfdiags.Diagnostics) {
@@ -165,10 +165,3 @@ type instanceStateV1 struct {
// external client code.
Meta map[string]string `json:"meta,omitempty"`
}
-
-type ephemeralStateV1 struct {
- // ConnInfo is used for the providers to export information which is
- // used to connect to the resource for provisioning. For example,
- // this could contain SSH or WinRM credentials.
- ConnInfo map[string]string `json:"-"`
-}
diff --git a/vendor/github.com/hashicorp/terraform/states/statefile/version1_upgrade.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/version1_upgrade.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/states/statefile/version1_upgrade.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/version1_upgrade.go
diff --git a/vendor/github.com/hashicorp/terraform/states/statefile/version2.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/version2.go
similarity index 98%
rename from vendor/github.com/hashicorp/terraform/states/statefile/version2.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/version2.go
index be93924a7..6d10166b2 100644
--- a/vendor/github.com/hashicorp/terraform/states/statefile/version2.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/version2.go
@@ -3,9 +3,8 @@ package statefile
import (
"encoding/json"
"fmt"
- "sync"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
func readStateV2(src []byte) (*File, tfdiags.Diagnostics) {
@@ -95,8 +94,6 @@ type outputStateV2 struct {
// Value contains the value of the output, in the structure described
// by the Type field.
Value interface{} `json:"value"`
-
- mu sync.Mutex
}
type moduleStateV2 struct {
@@ -178,8 +175,6 @@ type resourceStateV2 struct {
// e.g. "aws_instance" goes with the "aws" provider.
// If the resource block contained a "provider" key, that value will be set here.
Provider string `json:"provider"`
-
- mu sync.Mutex
}
type instanceStateV2 struct {
diff --git a/vendor/github.com/hashicorp/terraform/states/statefile/version2_upgrade.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/version2_upgrade.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/states/statefile/version2_upgrade.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/version2_upgrade.go
diff --git a/vendor/github.com/hashicorp/terraform/states/statefile/version3.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/version3.go
similarity index 95%
rename from vendor/github.com/hashicorp/terraform/states/statefile/version3.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/version3.go
index ab6414b0a..1c81e7169 100644
--- a/vendor/github.com/hashicorp/terraform/states/statefile/version3.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/version3.go
@@ -4,7 +4,7 @@ import (
"encoding/json"
"fmt"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
func readStateV3(src []byte) (*File, tfdiags.Diagnostics) {
diff --git a/vendor/github.com/hashicorp/terraform/states/statefile/version3_upgrade.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/version3_upgrade.go
similarity index 80%
rename from vendor/github.com/hashicorp/terraform/states/statefile/version3_upgrade.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/version3_upgrade.go
index 753298ff0..f08a62b2d 100644
--- a/vendor/github.com/hashicorp/terraform/states/statefile/version3_upgrade.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/version3_upgrade.go
@@ -3,17 +3,15 @@ package statefile
import (
"encoding/json"
"fmt"
- "log"
"strconv"
"strings"
- "github.com/hashicorp/hcl/v2/hclsyntax"
"github.com/zclconf/go-cty/cty"
ctyjson "github.com/zclconf/go-cty/cty/json"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/states"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
func upgradeStateV3ToV4(old *stateV3) (*stateV4, error) {
@@ -52,13 +50,6 @@ func upgradeStateV3ToV4(old *stateV3) (*stateV4, error) {
// all of the modules are unkeyed.
moduleAddr := make(addrs.ModuleInstance, len(msOld.Path)-1)
for i, name := range msOld.Path[1:] {
- if !hclsyntax.ValidIdentifier(name) {
- // If we don't fail here then we'll produce an invalid state
- // version 4 which subsequent operations will reject, so we'll
- // fail early here for safety to make sure we can never
- // inadvertently commit an invalid snapshot to a backend.
- return nil, fmt.Errorf("state contains invalid module path %#v: %q is not a valid identifier; rename it in Terraform 0.11 before upgrading to Terraform 0.12", msOld.Path, name)
- }
moduleAddr[i] = addrs.ModuleInstanceStep{
Name: name,
InstanceKey: addrs.NoKey,
@@ -107,13 +98,6 @@ func upgradeStateV3ToV4(old *stateV3) (*stateV4, error) {
var diags tfdiags.Diagnostics
providerAddr, diags = addrs.ParseAbsProviderConfigStr(oldProviderAddr)
if diags.HasErrors() {
- if strings.Contains(oldProviderAddr, "${") {
- // There seems to be a common misconception that
- // interpolation was valid in provider aliases
- // in 0.11, so we'll use a specialized error
- // message for that case.
- return nil, fmt.Errorf("invalid provider config reference %q for %s: this alias seems to contain a template interpolation sequence, which was not supported but also not error-checked in Terraform 0.11. To proceed, rename the associated provider alias to a valid identifier and apply the change with Terraform 0.11 before upgrading to Terraform 0.12", oldProviderAddr, instAddr)
- }
return nil, fmt.Errorf("invalid provider config reference %q for %s: %s", oldProviderAddr, instAddr, diags.Err())
}
} else {
@@ -125,13 +109,6 @@ func upgradeStateV3ToV4(old *stateV3) (*stateV4, error) {
if oldProviderAddr != "" {
localAddr, diags := addrs.ParseProviderConfigCompactStr(oldProviderAddr)
if diags.HasErrors() {
- if strings.Contains(oldProviderAddr, "${") {
- // There seems to be a common misconception that
- // interpolation was valid in provider aliases
- // in 0.11, so we'll use a specialized error
- // message for that case.
- return nil, fmt.Errorf("invalid legacy provider config reference %q for %s: this alias seems to contain a template interpolation sequence, which was not supported but also not error-checked in Terraform 0.11. To proceed, rename the associated provider alias to a valid identifier and apply the change with Terraform 0.11 before upgrading to Terraform 0.12", oldProviderAddr, instAddr)
- }
return nil, fmt.Errorf("invalid legacy provider config reference %q for %s: %s", oldProviderAddr, instAddr, diags.Err())
}
providerAddr = localAddr.Absolute(moduleAddr)
@@ -322,33 +299,13 @@ func upgradeInstanceObjectV3ToV4(rsOld *resourceStateV2, isOld *instanceStateV2,
}
}
- dependencies := make([]string, 0, len(rsOld.Dependencies))
- for _, v := range rsOld.Dependencies {
+ dependencies := make([]string, len(rsOld.Dependencies))
+ for i, v := range rsOld.Dependencies {
depStr, err := parseLegacyDependency(v)
if err != nil {
- // We just drop invalid dependencies on the floor here, because
- // they tend to get left behind in Terraform 0.11 when resources
- // are renamed or moved between modules and there's no automatic
- // way to fix them here. In practice it shouldn't hurt to miss
- // a few dependency edges in the state because a subsequent plan
- // will run a refresh walk first and re-synchronize the
- // dependencies with the configuration.
- //
- // There is one rough edges where this can cause an incorrect
- // result, though: If the first command the user runs after
- // upgrading to Terraform 0.12 uses -refresh=false and thus
- // prevents the dependency reorganization from occurring _and_
- // that initial plan discovered "orphaned" resources (not present
- // in configuration any longer) then when the plan is applied the
- // destroy ordering will be incorrect for the instances of those
- // resources. We expect that is a rare enough situation that it
- // isn't a big deal, and even when it _does_ occur it's common for
- // the apply to succeed anyway unless many separate resources with
- // complex inter-dependencies are all orphaned at once.
- log.Printf("statefile: ignoring invalid dependency address %q while upgrading from state version 3 to version 4: %s", v, err)
- continue
+ return nil, fmt.Errorf("invalid dependency reference %q: %s", v, err)
}
- dependencies = append(dependencies, depStr)
+ dependencies[i] = depStr
}
return &instanceObjectStateV4{
@@ -356,7 +313,7 @@ func upgradeInstanceObjectV3ToV4(rsOld *resourceStateV2, isOld *instanceStateV2,
Status: status,
Deposed: string(deposedKey),
AttributesFlat: attributes,
- DependsOn: dependencies,
+ Dependencies: dependencies,
SchemaVersion: schemaVersion,
PrivateRaw: privateJSON,
}, nil
diff --git a/vendor/github.com/hashicorp/terraform/states/statefile/version4.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/version4.go
similarity index 95%
rename from vendor/github.com/hashicorp/terraform/states/statefile/version4.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/version4.go
index 2cc0677ab..164b57f82 100644
--- a/vendor/github.com/hashicorp/terraform/states/statefile/version4.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/version4.go
@@ -9,9 +9,9 @@ import (
version "github.com/hashicorp/go-version"
ctyjson "github.com/zclconf/go-cty/cty/json"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/states"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
func readStateV4(src []byte) (*File, tfdiags.Diagnostics) {
@@ -181,10 +181,7 @@ func prepareStateV4(sV4 *stateV4) (*File, tfdiags.Diagnostics) {
}
{
- // Allow both the deprecated `depends_on` and new
- // `dependencies` to coexist for now so resources can be
- // upgraded as they are refreshed.
- depsRaw := isV4.DependsOn
+ depsRaw := isV4.Dependencies
deps := make([]addrs.Referenceable, 0, len(depsRaw))
for _, depRaw := range depsRaw {
ref, refDiags := addrs.ParseRefStr(depRaw)
@@ -205,20 +202,6 @@ func prepareStateV4(sV4 *stateV4) (*File, tfdiags.Diagnostics) {
}
deps = append(deps, ref.Subject)
}
- obj.DependsOn = deps
- }
-
- {
- depsRaw := isV4.Dependencies
- deps := make([]addrs.AbsResource, 0, len(depsRaw))
- for _, depRaw := range depsRaw {
- addr, addrDiags := addrs.ParseAbsResourceStr(depRaw)
- diags = diags.Append(addrDiags)
- if addrDiags.HasErrors() {
- continue
- }
- deps = append(deps, addr)
- }
obj.Dependencies = deps
}
@@ -483,11 +466,6 @@ func appendInstanceObjectStateV4(rs *states.Resource, is *states.ResourceInstanc
deps[i] = depAddr.String()
}
- depOn := make([]string, len(obj.DependsOn))
- for i, depAddr := range obj.DependsOn {
- depOn[i] = depAddr.String()
- }
-
var rawKey interface{}
switch tk := key.(type) {
case addrs.IntKey:
@@ -513,7 +491,6 @@ func appendInstanceObjectStateV4(rs *states.Resource, is *states.ResourceInstanc
AttributesRaw: obj.AttrsJSON,
PrivateRaw: privateRaw,
Dependencies: deps,
- DependsOn: depOn,
}), diags
}
@@ -563,8 +540,7 @@ type instanceObjectStateV4 struct {
PrivateRaw []byte `json:"private,omitempty"`
- Dependencies []string `json:"dependencies,omitempty"`
- DependsOn []string `json:"depends_on,omitempty"`
+ Dependencies []string `json:"depends_on,omitempty"`
}
// stateVersionV4 is a weird special type we use to produce our hard-coded
diff --git a/vendor/github.com/hashicorp/terraform/states/statefile/write.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/write.go
similarity index 82%
rename from vendor/github.com/hashicorp/terraform/states/statefile/write.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/write.go
index 548ba8a8b..8fdca4580 100644
--- a/vendor/github.com/hashicorp/terraform/states/statefile/write.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile/write.go
@@ -3,7 +3,7 @@ package statefile
import (
"io"
- tfversion "github.com/hashicorp/terraform/version"
+ tfversion "github.com/hashicorp/terraform-plugin-sdk/internal/version"
)
// Write writes the given state to the given writer in the current state
diff --git a/vendor/github.com/hashicorp/terraform/states/sync.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/sync.go
similarity index 90%
rename from vendor/github.com/hashicorp/terraform/states/sync.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/sync.go
index 47fb16d6e..6d2361254 100644
--- a/vendor/github.com/hashicorp/terraform/states/sync.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/states/sync.go
@@ -4,7 +4,7 @@ import (
"log"
"sync"
- "github.com/hashicorp/terraform/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
"github.com/zclconf/go-cty/cty"
)
@@ -48,17 +48,6 @@ func (s *SyncState) Module(addr addrs.ModuleInstance) *Module {
return ret
}
-// RemoveModule removes the entire state for the given module, taking with
-// it any resources associated with the module. This should generally be
-// called only for modules whose resources have all been destroyed, but
-// that is not enforced by this method.
-func (s *SyncState) RemoveModule(addr addrs.ModuleInstance) {
- s.lock.Lock()
- defer s.lock.Unlock()
-
- s.state.RemoveModule(addr)
-}
-
// OutputValue returns a snapshot of the state of the output value with the
// given address, or nil if no such output value is tracked.
//
@@ -193,20 +182,6 @@ func (s *SyncState) SetResourceMeta(addr addrs.AbsResource, eachMode EachMode, p
ms.SetResourceMeta(addr.Resource, eachMode, provider)
}
-// RemoveResource removes the entire state for the given resource, taking with
-// it any instances associated with the resource. This should generally be
-// called only for resource objects whose instances have all been destroyed,
-// but that is not enforced by this method. (Use RemoveResourceIfEmpty instead
-// to safely check first.)
-func (s *SyncState) RemoveResource(addr addrs.AbsResource) {
- s.lock.Lock()
- defer s.lock.Unlock()
-
- ms := s.state.EnsureModule(addr.Module)
- ms.RemoveResource(addr.Resource)
- s.maybePruneModule(addr.Module)
-}
-
// RemoveResourceIfEmpty is similar to RemoveResource but first checks to
// make sure there are no instances or objects left in the resource.
//
@@ -387,34 +362,6 @@ func (s *SyncState) DeposeResourceInstanceObjectForceKey(addr addrs.AbsResourceI
ms.deposeResourceInstanceObject(addr.Resource, forcedKey)
}
-// ForgetResourceInstanceAll removes the record of all objects associated with
-// the specified resource instance, if present. If not present, this is a no-op.
-func (s *SyncState) ForgetResourceInstanceAll(addr addrs.AbsResourceInstance) {
- s.lock.Lock()
- defer s.lock.Unlock()
-
- ms := s.state.Module(addr.Module)
- if ms == nil {
- return
- }
- ms.ForgetResourceInstanceAll(addr.Resource)
- s.maybePruneModule(addr.Module)
-}
-
-// ForgetResourceInstanceDeposed removes the record of the deposed object with
-// the given address and key, if present. If not present, this is a no-op.
-func (s *SyncState) ForgetResourceInstanceDeposed(addr addrs.AbsResourceInstance, key DeposedKey) {
- s.lock.Lock()
- defer s.lock.Unlock()
-
- ms := s.state.Module(addr.Module)
- if ms == nil {
- return
- }
- ms.ForgetResourceInstanceDeposed(addr.Resource, key)
- s.maybePruneModule(addr.Module)
-}
-
// MaybeRestoreResourceInstanceDeposed will restore the deposed object with the
// given key on the specified resource as the current object for that instance
// if and only if that would not cause us to forget an existing current
diff --git a/vendor/github.com/hashicorp/terraform/tfdiags/config_traversals.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags/config_traversals.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/tfdiags/config_traversals.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags/config_traversals.go
diff --git a/vendor/github.com/hashicorp/terraform/tfdiags/contextual.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags/contextual.go
similarity index 99%
rename from vendor/github.com/hashicorp/terraform/tfdiags/contextual.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags/contextual.go
index d55bc2f0c..59c06b70b 100644
--- a/vendor/github.com/hashicorp/terraform/tfdiags/contextual.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags/contextual.go
@@ -308,8 +308,8 @@ func hclRangeFromIndexStepAndAttribute(idxStep cty.IndexStep, attr *hcl.Attribut
}
stepKey := idxStep.Key.AsString()
for _, kvPair := range pairs {
- key, diags := kvPair.Key.Value(nil)
- if diags.HasErrors() {
+ key, err := kvPair.Key.Value(nil)
+ if err != nil {
return attr.Expr.Range()
}
if key.AsString() == stepKey {
diff --git a/vendor/github.com/hashicorp/terraform/tfdiags/diagnostic.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags/diagnostic.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/tfdiags/diagnostic.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags/diagnostic.go
diff --git a/vendor/github.com/hashicorp/terraform/tfdiags/diagnostic_base.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags/diagnostic_base.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/tfdiags/diagnostic_base.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags/diagnostic_base.go
diff --git a/vendor/github.com/hashicorp/terraform/tfdiags/diagnostics.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags/diagnostics.go
similarity index 95%
rename from vendor/github.com/hashicorp/terraform/tfdiags/diagnostics.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags/diagnostics.go
index 30476ee26..a19fa80c4 100644
--- a/vendor/github.com/hashicorp/terraform/tfdiags/diagnostics.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags/diagnostics.go
@@ -177,18 +177,6 @@ func (diags Diagnostics) NonFatalErr() error {
return NonFatalError{diags}
}
-// Sort applies an ordering to the diagnostics in the receiver in-place.
-//
-// The ordering is: warnings before errors, sourceless before sourced,
-// short source paths before long source paths, and then ordering by
-// position within each file.
-//
-// Diagnostics that do not differ by any of these sortable characteristics
-// will remain in the same relative order after this method returns.
-func (diags Diagnostics) Sort() {
- sort.Stable(sortDiagnostics(diags))
-}
-
type diagnosticsAsError struct {
Diagnostics
}
diff --git a/vendor/github.com/hashicorp/terraform/tfdiags/doc.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags/doc.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/tfdiags/doc.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags/doc.go
diff --git a/vendor/github.com/hashicorp/terraform/tfdiags/error.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags/error.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/tfdiags/error.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags/error.go
diff --git a/vendor/github.com/hashicorp/terraform/tfdiags/hcl.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags/hcl.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/tfdiags/hcl.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags/hcl.go
diff --git a/vendor/github.com/hashicorp/terraform/tfdiags/rpc_friendly.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags/rpc_friendly.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/tfdiags/rpc_friendly.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags/rpc_friendly.go
diff --git a/vendor/github.com/hashicorp/terraform/tfdiags/severity_string.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags/severity_string.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/tfdiags/severity_string.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags/severity_string.go
diff --git a/vendor/github.com/hashicorp/terraform/tfdiags/simple_warning.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags/simple_warning.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/tfdiags/simple_warning.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags/simple_warning.go
diff --git a/vendor/github.com/hashicorp/terraform/tfdiags/source_range.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags/source_range.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/tfdiags/source_range.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags/source_range.go
diff --git a/vendor/github.com/hashicorp/terraform/tfdiags/sourceless.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags/sourceless.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/tfdiags/sourceless.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags/sourceless.go
diff --git a/vendor/github.com/hashicorp/terraform/internal/tfplugin5/generate.sh b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfplugin5/generate.sh
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/internal/tfplugin5/generate.sh
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfplugin5/generate.sh
diff --git a/vendor/github.com/hashicorp/terraform/internal/tfplugin5/tfplugin5.pb.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfplugin5/tfplugin5.pb.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/internal/tfplugin5/tfplugin5.pb.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfplugin5/tfplugin5.pb.go
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfplugin5/tfplugin5.proto b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfplugin5/tfplugin5.proto
new file mode 100644
index 000000000..9875d9ba6
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/tfplugin5/tfplugin5.proto
@@ -0,0 +1,353 @@
+// Terraform Plugin RPC protocol version 5.1
+//
+// This file defines version 5.1 of the RPC protocol. To implement a plugin
+// against this protocol, copy this definition into your own codebase and
+// use protoc to generate stubs for your target language.
+//
+// This file will be updated in-place in the source Terraform repository for
+// any minor versions of protocol 5, but later minor versions will always be
+// backwards compatible. Breaking changes, if any are required, will come
+// in a subsequent major version with its own separate proto definition.
+//
+// Note that only the proto files included in a release tag of Terraform are
+// official protocol releases. Proto files taken from other commits may include
+// incomplete changes or features that did not make it into a final release.
+// In all reasonable cases, plugin developers should take the proto file from
+// the tag of the most recent release of Terraform, and not from the master
+// branch or any other development branch.
+//
+syntax = "proto3";
+
+package tfplugin5;
+
+// DynamicValue is an opaque encoding of terraform data, with the field name
+// indicating the encoding scheme used.
+message DynamicValue {
+ bytes msgpack = 1;
+ bytes json = 2;
+}
+
+message Diagnostic {
+ enum Severity {
+ INVALID = 0;
+ ERROR = 1;
+ WARNING = 2;
+ }
+ Severity severity = 1;
+ string summary = 2;
+ string detail = 3;
+ AttributePath attribute = 4;
+}
+
+message AttributePath {
+ message Step {
+ oneof selector {
+ // Set "attribute_name" to represent looking up an attribute
+ // in the current object value.
+ string attribute_name = 1;
+ // Set "element_key_*" to represent looking up an element in
+ // an indexable collection type.
+ string element_key_string = 2;
+ int64 element_key_int = 3;
+ }
+ }
+ repeated Step steps = 1;
+}
+
+message Stop {
+ message Request {
+ }
+ message Response {
+ string Error = 1;
+ }
+}
+
+// RawState holds the stored state for a resource to be upgraded by the
+// provider. It can be in one of two formats, the current json encoded format
+// in bytes, or the legacy flatmap format as a map of strings.
+message RawState {
+ bytes json = 1;
+ map flatmap = 2;
+}
+
+// Schema is the configuration schema for a Resource, Provider, or Provisioner.
+message Schema {
+ message Block {
+ int64 version = 1;
+ repeated Attribute attributes = 2;
+ repeated NestedBlock block_types = 3;
+ }
+
+ message Attribute {
+ string name = 1;
+ bytes type = 2;
+ string description = 3;
+ bool required = 4;
+ bool optional = 5;
+ bool computed = 6;
+ bool sensitive = 7;
+ }
+
+ message NestedBlock {
+ enum NestingMode {
+ INVALID = 0;
+ SINGLE = 1;
+ LIST = 2;
+ SET = 3;
+ MAP = 4;
+ GROUP = 5;
+ }
+
+ string type_name = 1;
+ Block block = 2;
+ NestingMode nesting = 3;
+ int64 min_items = 4;
+ int64 max_items = 5;
+ }
+
+ // The version of the schema.
+ // Schemas are versioned, so that providers can upgrade a saved resource
+ // state when the schema is changed.
+ int64 version = 1;
+
+ // Block is the top level configuration block for this schema.
+ Block block = 2;
+}
+
+service Provider {
+ //////// Information about what a provider supports/expects
+ rpc GetSchema(GetProviderSchema.Request) returns (GetProviderSchema.Response);
+ rpc PrepareProviderConfig(PrepareProviderConfig.Request) returns (PrepareProviderConfig.Response);
+ rpc ValidateResourceTypeConfig(ValidateResourceTypeConfig.Request) returns (ValidateResourceTypeConfig.Response);
+ rpc ValidateDataSourceConfig(ValidateDataSourceConfig.Request) returns (ValidateDataSourceConfig.Response);
+ rpc UpgradeResourceState(UpgradeResourceState.Request) returns (UpgradeResourceState.Response);
+
+ //////// One-time initialization, called before other functions below
+ rpc Configure(Configure.Request) returns (Configure.Response);
+
+ //////// Managed Resource Lifecycle
+ rpc ReadResource(ReadResource.Request) returns (ReadResource.Response);
+ rpc PlanResourceChange(PlanResourceChange.Request) returns (PlanResourceChange.Response);
+ rpc ApplyResourceChange(ApplyResourceChange.Request) returns (ApplyResourceChange.Response);
+ rpc ImportResourceState(ImportResourceState.Request) returns (ImportResourceState.Response);
+
+ rpc ReadDataSource(ReadDataSource.Request) returns (ReadDataSource.Response);
+
+ //////// Graceful Shutdown
+ rpc Stop(Stop.Request) returns (Stop.Response);
+}
+
+message GetProviderSchema {
+ message Request {
+ }
+ message Response {
+ Schema provider = 1;
+ map resource_schemas = 2;
+ map data_source_schemas = 3;
+ repeated Diagnostic diagnostics = 4;
+ }
+}
+
+message PrepareProviderConfig {
+ message Request {
+ DynamicValue config = 1;
+ }
+ message Response {
+ DynamicValue prepared_config = 1;
+ repeated Diagnostic diagnostics = 2;
+ }
+}
+
+message UpgradeResourceState {
+ message Request {
+ string type_name = 1;
+
+ // version is the schema_version number recorded in the state file
+ int64 version = 2;
+
+ // raw_state is the raw states as stored for the resource. Core does
+ // not have access to the schema of prior_version, so it's the
+ // provider's responsibility to interpret this value using the
+ // appropriate older schema. The raw_state will be the json encoded
+ // state, or a legacy flat-mapped format.
+ RawState raw_state = 3;
+ }
+ message Response {
+ // new_state is a msgpack-encoded data structure that, when interpreted with
+ // the _current_ schema for this resource type, is functionally equivalent to
+ // that which was given in prior_state_raw.
+ DynamicValue upgraded_state = 1;
+
+ // diagnostics describes any errors encountered during migration that could not
+ // be safely resolved, and warnings about any possibly-risky assumptions made
+ // in the upgrade process.
+ repeated Diagnostic diagnostics = 2;
+ }
+}
+
+message ValidateResourceTypeConfig {
+ message Request {
+ string type_name = 1;
+ DynamicValue config = 2;
+ }
+ message Response {
+ repeated Diagnostic diagnostics = 1;
+ }
+}
+
+message ValidateDataSourceConfig {
+ message Request {
+ string type_name = 1;
+ DynamicValue config = 2;
+ }
+ message Response {
+ repeated Diagnostic diagnostics = 1;
+ }
+}
+
+message Configure {
+ message Request {
+ string terraform_version = 1;
+ DynamicValue config = 2;
+ }
+ message Response {
+ repeated Diagnostic diagnostics = 1;
+ }
+}
+
+message ReadResource {
+ message Request {
+ string type_name = 1;
+ DynamicValue current_state = 2;
+ bytes private = 3;
+ }
+ message Response {
+ DynamicValue new_state = 1;
+ repeated Diagnostic diagnostics = 2;
+ bytes private = 3;
+ }
+}
+
+message PlanResourceChange {
+ message Request {
+ string type_name = 1;
+ DynamicValue prior_state = 2;
+ DynamicValue proposed_new_state = 3;
+ DynamicValue config = 4;
+ bytes prior_private = 5;
+ }
+
+ message Response {
+ DynamicValue planned_state = 1;
+ repeated AttributePath requires_replace = 2;
+ bytes planned_private = 3;
+ repeated Diagnostic diagnostics = 4;
+
+
+ // This may be set only by the helper/schema "SDK" in the main Terraform
+ // repository, to request that Terraform Core >=0.12 permit additional
+ // inconsistencies that can result from the legacy SDK type system
+ // and its imprecise mapping to the >=0.12 type system.
+ // The change in behavior implied by this flag makes sense only for the
+ // specific details of the legacy SDK type system, and are not a general
+ // mechanism to avoid proper type handling in providers.
+ //
+ // ==== DO NOT USE THIS ====
+ // ==== THIS MUST BE LEFT UNSET IN ALL OTHER SDKS ====
+ // ==== DO NOT USE THIS ====
+ bool legacy_type_system = 5;
+ }
+}
+
+message ApplyResourceChange {
+ message Request {
+ string type_name = 1;
+ DynamicValue prior_state = 2;
+ DynamicValue planned_state = 3;
+ DynamicValue config = 4;
+ bytes planned_private = 5;
+ }
+ message Response {
+ DynamicValue new_state = 1;
+ bytes private = 2;
+ repeated Diagnostic diagnostics = 3;
+
+ // This may be set only by the helper/schema "SDK" in the main Terraform
+ // repository, to request that Terraform Core >=0.12 permit additional
+ // inconsistencies that can result from the legacy SDK type system
+ // and its imprecise mapping to the >=0.12 type system.
+ // The change in behavior implied by this flag makes sense only for the
+ // specific details of the legacy SDK type system, and are not a general
+ // mechanism to avoid proper type handling in providers.
+ //
+ // ==== DO NOT USE THIS ====
+ // ==== THIS MUST BE LEFT UNSET IN ALL OTHER SDKS ====
+ // ==== DO NOT USE THIS ====
+ bool legacy_type_system = 4;
+ }
+}
+
+message ImportResourceState {
+ message Request {
+ string type_name = 1;
+ string id = 2;
+ }
+
+ message ImportedResource {
+ string type_name = 1;
+ DynamicValue state = 2;
+ bytes private = 3;
+ }
+
+ message Response {
+ repeated ImportedResource imported_resources = 1;
+ repeated Diagnostic diagnostics = 2;
+ }
+}
+
+message ReadDataSource {
+ message Request {
+ string type_name = 1;
+ DynamicValue config = 2;
+ }
+ message Response {
+ DynamicValue state = 1;
+ repeated Diagnostic diagnostics = 2;
+ }
+}
+
+service Provisioner {
+ rpc GetSchema(GetProvisionerSchema.Request) returns (GetProvisionerSchema.Response);
+ rpc ValidateProvisionerConfig(ValidateProvisionerConfig.Request) returns (ValidateProvisionerConfig.Response);
+ rpc ProvisionResource(ProvisionResource.Request) returns (stream ProvisionResource.Response);
+ rpc Stop(Stop.Request) returns (Stop.Response);
+}
+
+message GetProvisionerSchema {
+ message Request {
+ }
+ message Response {
+ Schema provisioner = 1;
+ repeated Diagnostic diagnostics = 2;
+ }
+}
+
+message ValidateProvisionerConfig {
+ message Request {
+ DynamicValue config = 1;
+ }
+ message Response {
+ repeated Diagnostic diagnostics = 1;
+ }
+}
+
+message ProvisionResource {
+ message Request {
+ DynamicValue config = 1;
+ DynamicValue connection = 2;
+ }
+ message Response {
+ string output = 1;
+ repeated Diagnostic diagnostics = 2;
+ }
+}
diff --git a/vendor/github.com/hashicorp/terraform/version/version.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/version/version.go
similarity index 85%
rename from vendor/github.com/hashicorp/terraform/version/version.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/internal/version/version.go
index a62948996..2d56dab69 100644
--- a/vendor/github.com/hashicorp/terraform/version/version.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/internal/version/version.go
@@ -11,12 +11,12 @@ import (
)
// The main version number that is being run at the moment.
-var Version = "0.12.17"
+var Version = "0.12.7"
// A pre-release marker for the version. If this is "" (empty string)
// then it means that it is a final release. Otherwise, this is a pre-release
// such as "dev" (in development), "beta", "rc1", etc.
-var Prerelease = ""
+var Prerelease = "sdk"
// SemVer is an instance of version.Version. This has the secondary
// benefit of verifying during tests and init time that our version is a
@@ -27,10 +27,6 @@ func init() {
SemVer = version.Must(version.NewVersion(Version))
}
-// Header is the header name used to send the current terraform version
-// in http requests.
-const Header = "Terraform-Version"
-
// String returns the complete version string, including prerelease
func String() string {
if Prerelease != "" {
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/meta/meta.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/meta/meta.go
new file mode 100644
index 000000000..e64e224b1
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/meta/meta.go
@@ -0,0 +1,36 @@
+// The meta package provides a location to set the release version
+// and any other relevant metadata for the SDK.
+//
+// This package should not import any other SDK packages.
+package meta
+
+import (
+ "fmt"
+
+ version "github.com/hashicorp/go-version"
+)
+
+// The main version number that is being run at the moment.
+var SDKVersion = "1.6.0"
+
+// A pre-release marker for the version. If this is "" (empty string)
+// then it means that it is a final release. Otherwise, this is a pre-release
+// such as "dev" (in development), "beta", "rc1", etc.
+var SDKPrerelease = ""
+
+// SemVer is an instance of version.Version. This has the secondary
+// benefit of verifying during tests and init time that our version is a
+// proper semantic version, which should always be the case.
+var SemVer *version.Version
+
+func init() {
+ SemVer = version.Must(version.NewVersion(SDKVersion))
+}
+
+// VersionString returns the complete version string, including prerelease
+func SDKVersionString() string {
+ if SDKPrerelease != "" {
+ return fmt.Sprintf("%s-%s", SDKVersion, SDKPrerelease)
+ }
+ return SDKVersion
+}
diff --git a/vendor/github.com/hashicorp/terraform/plugin/client.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/plugin/client.go
similarity index 92%
rename from vendor/github.com/hashicorp/terraform/plugin/client.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/plugin/client.go
index 0eab5385b..5a99e9006 100644
--- a/vendor/github.com/hashicorp/terraform/plugin/client.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/plugin/client.go
@@ -6,7 +6,7 @@ import (
hclog "github.com/hashicorp/go-hclog"
plugin "github.com/hashicorp/go-plugin"
- "github.com/hashicorp/terraform/plugin/discovery"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery"
)
// ClientConfig returns a configuration object that can be used to instantiate
diff --git a/vendor/github.com/hashicorp/terraform/plugin/grpc_provider.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/plugin/grpc_provider.go
similarity index 98%
rename from vendor/github.com/hashicorp/terraform/plugin/grpc_provider.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/plugin/grpc_provider.go
index 1abdbe297..e4520975c 100644
--- a/vendor/github.com/hashicorp/terraform/plugin/grpc_provider.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/plugin/grpc_provider.go
@@ -9,9 +9,9 @@ import (
"github.com/zclconf/go-cty/cty"
plugin "github.com/hashicorp/go-plugin"
- proto "github.com/hashicorp/terraform/internal/tfplugin5"
- "github.com/hashicorp/terraform/plugin/convert"
- "github.com/hashicorp/terraform/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plugin/convert"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ proto "github.com/hashicorp/terraform-plugin-sdk/internal/tfplugin5"
"github.com/zclconf/go-cty/cty/msgpack"
"google.golang.org/grpc"
)
diff --git a/vendor/github.com/hashicorp/terraform/plugin/grpc_provisioner.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/plugin/grpc_provisioner.go
similarity index 94%
rename from vendor/github.com/hashicorp/terraform/plugin/grpc_provisioner.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/plugin/grpc_provisioner.go
index 136c88d68..c0e6f549a 100644
--- a/vendor/github.com/hashicorp/terraform/plugin/grpc_provisioner.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/plugin/grpc_provisioner.go
@@ -8,10 +8,10 @@ import (
"sync"
plugin "github.com/hashicorp/go-plugin"
- "github.com/hashicorp/terraform/configs/configschema"
- proto "github.com/hashicorp/terraform/internal/tfplugin5"
- "github.com/hashicorp/terraform/plugin/convert"
- "github.com/hashicorp/terraform/provisioners"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plugin/convert"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/provisioners"
+ proto "github.com/hashicorp/terraform-plugin-sdk/internal/tfplugin5"
"github.com/zclconf/go-cty/cty"
"github.com/zclconf/go-cty/cty/msgpack"
"google.golang.org/grpc"
diff --git a/vendor/github.com/hashicorp/terraform/plugin/plugin.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/plugin/plugin.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/plugin/plugin.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/plugin/plugin.go
diff --git a/vendor/github.com/hashicorp/terraform/plugin/resource_provider.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/plugin/resource_provider.go
similarity index 99%
rename from vendor/github.com/hashicorp/terraform/plugin/resource_provider.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/plugin/resource_provider.go
index 459661a55..bfd62e2e9 100644
--- a/vendor/github.com/hashicorp/terraform/plugin/resource_provider.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/plugin/resource_provider.go
@@ -4,7 +4,7 @@ import (
"net/rpc"
"github.com/hashicorp/go-plugin"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
// ResourceProviderPlugin is the plugin.Plugin implementation.
diff --git a/vendor/github.com/hashicorp/terraform/plugin/serve.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/plugin/serve.go
similarity index 74%
rename from vendor/github.com/hashicorp/terraform/plugin/serve.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/plugin/serve.go
index 8d056c591..cbe9fc636 100644
--- a/vendor/github.com/hashicorp/terraform/plugin/serve.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/plugin/serve.go
@@ -2,16 +2,15 @@ package plugin
import (
"github.com/hashicorp/go-plugin"
- grpcplugin "github.com/hashicorp/terraform/helper/plugin"
- proto "github.com/hashicorp/terraform/internal/tfplugin5"
- "github.com/hashicorp/terraform/terraform"
+ grpcplugin "github.com/hashicorp/terraform-plugin-sdk/internal/helper/plugin"
+ proto "github.com/hashicorp/terraform-plugin-sdk/internal/tfplugin5"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
const (
// The constants below are the names of the plugins that can be dispensed
// from the plugin server.
- ProviderPluginName = "provider"
- ProvisionerPluginName = "provisioner"
+ ProviderPluginName = "provider"
// DefaultProtocolVersion is the protocol version assumed for legacy clients that don't specify
// a particular version during their handshake. This is the version used when Terraform 0.10
@@ -36,19 +35,15 @@ var Handshake = plugin.HandshakeConfig{
}
type ProviderFunc func() terraform.ResourceProvider
-type ProvisionerFunc func() terraform.ResourceProvisioner
type GRPCProviderFunc func() proto.ProviderServer
-type GRPCProvisionerFunc func() proto.ProvisionerServer
// ServeOpts are the configurations to serve a plugin.
type ServeOpts struct {
- ProviderFunc ProviderFunc
- ProvisionerFunc ProvisionerFunc
+ ProviderFunc ProviderFunc
// Wrapped versions of the above plugins will automatically shimmed and
// added to the GRPC functions when possible.
- GRPCProviderFunc GRPCProviderFunc
- GRPCProvisionerFunc GRPCProvisionerFunc
+ GRPCProviderFunc GRPCProviderFunc
}
// Serve serves a plugin. This function never returns and should be the final
@@ -66,14 +61,6 @@ func Serve(opts *ServeOpts) {
}
}
}
- if opts.GRPCProvisionerFunc == nil && opts.ProvisionerFunc != nil {
- provisioner := grpcplugin.NewGRPCProvisionerServerShim(opts.ProvisionerFunc())
- if provisioner != nil {
- opts.GRPCProvisionerFunc = func() proto.ProvisionerServer {
- return provisioner
- }
- }
- }
plugin.Serve(&plugin.ServeConfig{
HandshakeConfig: Handshake,
@@ -89,9 +76,6 @@ func legacyPluginMap(opts *ServeOpts) map[string]plugin.Plugin {
"provider": &ResourceProviderPlugin{
ResourceProvider: opts.ProviderFunc,
},
- "provisioner": &ResourceProvisionerPlugin{
- ResourceProvisioner: opts.ProvisionerFunc,
- },
}
}
@@ -104,18 +88,13 @@ func pluginSet(opts *ServeOpts) map[int]plugin.PluginSet {
}
// add the new protocol versions if they're configured
- if opts.GRPCProviderFunc != nil || opts.GRPCProvisionerFunc != nil {
+ if opts.GRPCProviderFunc != nil {
plugins[5] = plugin.PluginSet{}
if opts.GRPCProviderFunc != nil {
plugins[5]["provider"] = &GRPCProviderPlugin{
GRPCProvider: opts.GRPCProviderFunc,
}
}
- if opts.GRPCProvisionerFunc != nil {
- plugins[5]["provisioner"] = &GRPCProvisionerPlugin{
- GRPCProvisioner: opts.GRPCProvisionerFunc,
- }
- }
}
return plugins
}
diff --git a/vendor/github.com/hashicorp/terraform/plugin/ui_input.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/plugin/ui_input.go
similarity index 94%
rename from vendor/github.com/hashicorp/terraform/plugin/ui_input.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/plugin/ui_input.go
index 3469e6a96..b24b03ebf 100644
--- a/vendor/github.com/hashicorp/terraform/plugin/ui_input.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/plugin/ui_input.go
@@ -5,7 +5,7 @@ import (
"net/rpc"
"github.com/hashicorp/go-plugin"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
// UIInput is an implementation of terraform.UIInput that communicates
diff --git a/vendor/github.com/hashicorp/terraform/plugin/ui_output.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/plugin/ui_output.go
similarity index 89%
rename from vendor/github.com/hashicorp/terraform/plugin/ui_output.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/plugin/ui_output.go
index c222b00cd..07c13d03a 100644
--- a/vendor/github.com/hashicorp/terraform/plugin/ui_output.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/plugin/ui_output.go
@@ -3,7 +3,7 @@ package plugin
import (
"net/rpc"
- "github.com/hashicorp/terraform/terraform"
+ "github.com/hashicorp/terraform-plugin-sdk/terraform"
)
// UIOutput is an implementatin of terraform.UIOutput that communicates
diff --git a/vendor/github.com/hashicorp/terraform/terraform/context.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/context.go
similarity index 94%
rename from vendor/github.com/hashicorp/terraform/terraform/context.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/context.go
index 911ad088d..eb05c68ae 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/context.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/context.go
@@ -7,15 +7,15 @@ import (
"log"
"sync"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/lang"
- "github.com/hashicorp/terraform/plans"
- "github.com/hashicorp/terraform/providers"
- "github.com/hashicorp/terraform/provisioners"
- "github.com/hashicorp/terraform/states"
- "github.com/hashicorp/terraform/states/statefile"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/lang"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/provisioners"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
"github.com/zclconf/go-cty/cty"
)
@@ -24,23 +24,19 @@ import (
type InputMode byte
const (
+ // InputModeVar asks for all variables
+ InputModeVar InputMode = 1 << iota
+
+ // InputModeVarUnset asks for variables which are not set yet.
+ // InputModeVar must be set for this to have an effect.
+ InputModeVarUnset
+
// InputModeProvider asks for provider variables
- InputModeProvider InputMode = 1 << iota
+ InputModeProvider
// InputModeStd is the standard operating mode and asks for both variables
// and providers.
- InputModeStd = InputModeProvider
-)
-
-var (
- // contextFailOnShadowError will cause Context operations to return
- // errors when shadow operations fail. This is only used for testing.
- contextFailOnShadowError = false
-
- // contextTestDeepCopyOnPlan will perform a Diff DeepCopy on every
- // Plan operation, effectively testing the Diff DeepCopy whenever
- // a Plan occurs. This is enabled for tests.
- contextTestDeepCopyOnPlan = false
+ InputModeStd = InputModeVar | InputModeProvider
)
// ContextOpts are the user-configurable options to create a context with
@@ -97,7 +93,6 @@ type Context struct {
parallelSem Semaphore
providerInputConfig map[string]map[string]cty.Value
providerSHA256s map[string][]byte
- runLock sync.Mutex
runCond *sync.Cond
runContext context.Context
runContextCancel context.CancelFunc
@@ -203,18 +198,6 @@ func NewContext(opts *ContextOpts) (*Context, tfdiags.Diagnostics) {
log.Printf("[TRACE] terraform.NewContext: complete")
- // By the time we get here, we should have values defined for all of
- // the root module variables, even if some of them are "unknown". It's the
- // caller's responsibility to have already handled the decoding of these
- // from the various ways the CLI allows them to be set and to produce
- // user-friendly error messages if they are not all present, and so
- // the error message from checkInputVariables should never be seen and
- // includes language asking the user to report a bug.
- if config != nil {
- varDiags := checkInputVariables(config.Module.Variables, variables)
- diags = diags.Append(varDiags)
- }
-
return &Context{
components: components,
schemas: schemas,
@@ -232,7 +215,7 @@ func NewContext(opts *ContextOpts) (*Context, tfdiags.Diagnostics) {
providerInputConfig: make(map[string]map[string]cty.Value),
providerSHA256s: opts.ProviderSHA256s,
sh: sh,
- }, diags
+ }, nil
}
func (c *Context) Schemas() *Schemas {
@@ -663,6 +646,14 @@ func (c *Context) Validate() tfdiags.Diagnostics {
var diags tfdiags.Diagnostics
+ // Validate input variables. We do this only for the values supplied
+ // by the root module, since child module calls are validated when we
+ // visit their graph nodes.
+ if c.config != nil {
+ varDiags := checkInputVariables(c.config.Module.Variables, c.variables)
+ diags = diags.Append(varDiags)
+ }
+
// If we have errors at this point then we probably won't be able to
// construct a graph without producing redundant errors, so we'll halt early.
if diags.HasErrors() {
diff --git a/vendor/github.com/hashicorp/terraform/terraform/context_components.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/context_components.go
similarity index 93%
rename from vendor/github.com/hashicorp/terraform/terraform/context_components.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/context_components.go
index 26ec99595..a627996e3 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/context_components.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/context_components.go
@@ -3,8 +3,8 @@ package terraform
import (
"fmt"
- "github.com/hashicorp/terraform/providers"
- "github.com/hashicorp/terraform/provisioners"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/provisioners"
)
// contextComponentFactory is the interface that Context uses
diff --git a/vendor/github.com/hashicorp/terraform/terraform/context_graph_type.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/context_graph_type.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/terraform/context_graph_type.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/context_graph_type.go
diff --git a/vendor/github.com/hashicorp/terraform/terraform/context_import.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/context_import.go
similarity index 89%
rename from vendor/github.com/hashicorp/terraform/terraform/context_import.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/context_import.go
index 313e9094f..9a9cd9626 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/context_import.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/context_import.go
@@ -1,10 +1,10 @@
package terraform
import (
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/states"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// ImportOpts are used as the configuration for Import.
diff --git a/vendor/github.com/hashicorp/terraform/terraform/context_input.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/context_input.go
similarity index 69%
rename from vendor/github.com/hashicorp/terraform/terraform/context_input.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/context_input.go
index d24adcb7c..b99f1afac 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/context_input.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/context_input.go
@@ -2,6 +2,7 @@ package terraform
import (
"context"
+ "fmt"
"log"
"sort"
@@ -9,27 +10,15 @@ import (
"github.com/hashicorp/hcl/v2/hcldec"
"github.com/zclconf/go-cty/cty"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
-// Input asks for input to fill unset required arguments in provider
-// configurations.
-//
+// Input asks for input to fill variables and provider configurations.
// This modifies the configuration in-place, so asking for Input twice
// may result in different UI output showing different current values.
func (c *Context) Input(mode InputMode) tfdiags.Diagnostics {
- // This function used to be responsible for more than it is now, so its
- // interface is more general than its current functionality requires.
- // It now exists only to handle interactive prompts for provider
- // configurations, with other prompts the responsibility of the CLI
- // layer prior to calling in to this package.
- //
- // (Hopefully in future the remaining functionality here can move to the
- // CLI layer too in order to avoid this odd situation where core code
- // produces UI input prompts.)
-
var diags tfdiags.Diagnostics
defer c.acquireRun("input")()
@@ -40,6 +29,85 @@ func (c *Context) Input(mode InputMode) tfdiags.Diagnostics {
ctx := context.Background()
+ if mode&InputModeVar != 0 {
+ log.Printf("[TRACE] Context.Input: Prompting for variables")
+
+ // Walk the variables first for the root module. We walk them in
+ // alphabetical order for UX reasons.
+ configs := c.config.Module.Variables
+ names := make([]string, 0, len(configs))
+ for name := range configs {
+ names = append(names, name)
+ }
+ sort.Strings(names)
+ Variables:
+ for _, n := range names {
+ v := configs[n]
+
+ // If we only care about unset variables, then we should set any
+ // variable that is already set.
+ if mode&InputModeVarUnset != 0 {
+ if _, isSet := c.variables[n]; isSet {
+ continue
+ }
+ }
+
+ // this should only happen during tests
+ if c.uiInput == nil {
+ log.Println("[WARN] Context.uiInput is nil during input walk")
+ continue
+ }
+
+ // Ask the user for a value for this variable
+ var rawValue string
+ retry := 0
+ for {
+ var err error
+ rawValue, err = c.uiInput.Input(ctx, &InputOpts{
+ Id: fmt.Sprintf("var.%s", n),
+ Query: fmt.Sprintf("var.%s", n),
+ Description: v.Description,
+ })
+ if err != nil {
+ diags = diags.Append(tfdiags.Sourceless(
+ tfdiags.Error,
+ "Failed to request interactive input",
+ fmt.Sprintf("Terraform attempted to request a value for var.%s interactively, but encountered an error: %s.", n, err),
+ ))
+ return diags
+ }
+
+ if rawValue == "" && v.Default == cty.NilVal {
+ // Redo if it is required, but abort if we keep getting
+ // blank entries
+ if retry > 2 {
+ diags = diags.Append(tfdiags.Sourceless(
+ tfdiags.Error,
+ "Required variable not assigned",
+ fmt.Sprintf("The variable %q is required, so Terraform cannot proceed without a defined value for it.", n),
+ ))
+ continue Variables
+ }
+ retry++
+ continue
+ }
+
+ break
+ }
+
+ val, valDiags := v.ParsingMode.Parse(n, rawValue)
+ diags = diags.Append(valDiags)
+ if diags.HasErrors() {
+ continue
+ }
+
+ c.variables[n] = &InputValue{
+ Value: val,
+ SourceType: ValueFromInput,
+ }
+ }
+ }
+
if mode&InputModeProvider != 0 {
log.Printf("[TRACE] Context.Input: Prompting for provider arguments")
diff --git a/vendor/github.com/hashicorp/terraform/terraform/diff.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/diff.go
similarity index 99%
rename from vendor/github.com/hashicorp/terraform/terraform/diff.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/diff.go
index 4e834204d..e2f54883b 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/diff.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/diff.go
@@ -12,9 +12,9 @@ import (
"strings"
"sync"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/hashicorp/terraform/configs/hcl2shim"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/hcl2shim"
"github.com/zclconf/go-cty/cty"
"github.com/mitchellh/copystructure"
@@ -218,16 +218,6 @@ func (d *Diff) String() string {
return strings.TrimSpace(buf.String())
}
-func (d *Diff) init() {
- if d.Modules == nil {
- rootDiff := &ModuleDiff{Path: rootModulePath}
- d.Modules = []*ModuleDiff{rootDiff}
- }
- for _, m := range d.Modules {
- m.init()
- }
-}
-
// ModuleDiff tracks the differences between resources to apply within
// a single module.
type ModuleDiff struct {
diff --git a/vendor/github.com/hashicorp/terraform/terraform/edge_destroy.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/edge_destroy.go
similarity index 88%
rename from vendor/github.com/hashicorp/terraform/terraform/edge_destroy.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/edge_destroy.go
index bc9d638aa..17464bc06 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/edge_destroy.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/edge_destroy.go
@@ -3,7 +3,7 @@ package terraform
import (
"fmt"
- "github.com/hashicorp/terraform/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
)
// DestroyEdge is an edge that represents a standard "destroy" relationship:
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval.go
similarity index 96%
rename from vendor/github.com/hashicorp/terraform/terraform/eval.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval.go
index 48ed3533a..c490c3bcf 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval.go
@@ -3,7 +3,7 @@ package terraform
import (
"log"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// EvalNode is the interface that must be implemented by graph nodes to
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_apply.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_apply.go
similarity index 92%
rename from vendor/github.com/hashicorp/terraform/terraform/eval_apply.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_apply.go
index 215b9b657..6beeaea98 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_apply.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_apply.go
@@ -9,14 +9,14 @@ import (
"github.com/hashicorp/hcl/v2"
"github.com/zclconf/go-cty/cty"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/plans"
- "github.com/hashicorp/terraform/plans/objchange"
- "github.com/hashicorp/terraform/providers"
- "github.com/hashicorp/terraform/provisioners"
- "github.com/hashicorp/terraform/states"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans/objchange"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/provisioners"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// EvalApply is an EvalNode implementation that writes the diff to
@@ -24,6 +24,7 @@ import (
type EvalApply struct {
Addr addrs.ResourceInstance
Config *configs.Resource
+ Dependencies []addrs.Referenceable
State **states.ResourceInstanceObject
Change **plans.ResourceInstanceChange
ProviderAddr addrs.AbsProviderConfig
@@ -253,8 +254,6 @@ func (n *EvalApply) Eval(ctx EvalContext) (interface{}, error) {
}
}
- newStatus := states.ObjectReady
-
// Sometimes providers return a null value when an operation fails for some
// reason, but we'd rather keep the prior state so that the error can be
// corrected on a subsequent run. We must only do this for null new value
@@ -267,20 +266,15 @@ func (n *EvalApply) Eval(ctx EvalContext) (interface{}, error) {
// If change.Action is Create then change.Before will also be null,
// which is fine.
newVal = change.Before
-
- // If we're recovering the previous state, we also want to restore the
- // the tainted status of the object.
- if state.Status == states.ObjectTainted {
- newStatus = states.ObjectTainted
- }
}
var newState *states.ResourceInstanceObject
if !newVal.IsNull() { // null value indicates that the object is deleted, so we won't set a new state in that case
newState = &states.ResourceInstanceObject{
- Status: newStatus,
- Value: newVal,
- Private: resp.Private,
+ Status: states.ObjectReady,
+ Value: newVal,
+ Private: resp.Private,
+ Dependencies: n.Dependencies, // Should be populated by the caller from the StateDependencies method on the resource instance node
}
}
@@ -384,39 +378,40 @@ type EvalMaybeTainted struct {
Change **plans.ResourceInstanceChange
State **states.ResourceInstanceObject
Error *error
+
+ // If StateOutput is not nil, its referent will be assigned either the same
+ // pointer as State or a new object with its status set as Tainted,
+ // depending on whether an error is given and if this was a create action.
+ StateOutput **states.ResourceInstanceObject
}
+// TODO: test
func (n *EvalMaybeTainted) Eval(ctx EvalContext) (interface{}, error) {
- if n.State == nil || n.Change == nil || n.Error == nil {
- return nil, nil
- }
-
state := *n.State
change := *n.Change
err := *n.Error
- // nothing to do if everything went as planned
- if err == nil {
- return nil, nil
- }
-
if state != nil && state.Status == states.ObjectTainted {
log.Printf("[TRACE] EvalMaybeTainted: %s was already tainted, so nothing to do", n.Addr.Absolute(ctx.Path()))
return nil, nil
}
- if change.Action == plans.Create {
- // If there are errors during a _create_ then the object is
- // in an undefined state, and so we'll mark it as tainted so
- // we can try again on the next run.
- //
- // We don't do this for other change actions because errors
- // during updates will often not change the remote object at all.
- // If there _were_ changes prior to the error, it's the provider's
- // responsibility to record the effect of those changes in the
- // object value it returned.
- log.Printf("[TRACE] EvalMaybeTainted: %s encountered an error during creation, so it is now marked as tainted", n.Addr.Absolute(ctx.Path()))
- *n.State = state.AsTainted()
+ if n.StateOutput != nil {
+ if err != nil && change.Action == plans.Create {
+ // If there are errors during a _create_ then the object is
+ // in an undefined state, and so we'll mark it as tainted so
+ // we can try again on the next run.
+ //
+ // We don't do this for other change actions because errors
+ // during updates will often not change the remote object at all.
+ // If there _were_ changes prior to the error, it's the provider's
+ // responsibility to record the effect of those changes in the
+ // object value it returned.
+ log.Printf("[TRACE] EvalMaybeTainted: %s encountered an error during creation, so it is now marked as tainted", n.Addr.Absolute(ctx.Path()))
+ *n.StateOutput = state.AsTainted()
+ } else {
+ *n.StateOutput = state
+ }
}
return nil, nil
@@ -569,11 +564,6 @@ func (n *EvalApplyProvisioners) apply(ctx EvalContext, provs []*configs.Provisio
config, _, configDiags := ctx.EvaluateBlock(prov.Config, schema, instanceAddr, keyData)
diags = diags.Append(configDiags)
- // we can't apply the provisioner if the config has errors
- if diags.HasErrors() {
- return diags.Err()
- }
-
// If the provisioner block contains a connection block of its own then
// it can override the base connection configuration, if any.
var localConn hcl.Body
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_check_prevent_destroy.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_check_prevent_destroy.go
similarity index 74%
rename from vendor/github.com/hashicorp/terraform/terraform/eval_check_prevent_destroy.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_check_prevent_destroy.go
index 3272a8e88..d13a96529 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_check_prevent_destroy.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_check_prevent_destroy.go
@@ -3,13 +3,13 @@ package terraform
import (
"fmt"
- "github.com/hashicorp/terraform/plans"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans"
"github.com/hashicorp/hcl/v2"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// EvalPreventDestroy is an EvalNode implementation that returns an
@@ -45,5 +45,3 @@ func (n *EvalCheckPreventDestroy) Eval(ctx EvalContext) (interface{}, error) {
return nil, nil
}
-
-const preventDestroyErrStr = `%s: the plan would destroy this resource, but it currently has lifecycle.prevent_destroy set to true. To avoid this error and continue with the plan, either disable lifecycle.prevent_destroy or adjust the scope of the plan using the -target flag.`
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_context.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_context.go
similarity index 91%
rename from vendor/github.com/hashicorp/terraform/terraform/eval_context.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_context.go
index e36805e90..4fa011e2b 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_context.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_context.go
@@ -2,14 +2,14 @@ package terraform
import (
"github.com/hashicorp/hcl/v2"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/hashicorp/terraform/lang"
- "github.com/hashicorp/terraform/plans"
- "github.com/hashicorp/terraform/providers"
- "github.com/hashicorp/terraform/provisioners"
- "github.com/hashicorp/terraform/states"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/lang"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/provisioners"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
"github.com/zclconf/go-cty/cty"
)
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_context_builtin.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_context_builtin.go
similarity index 90%
rename from vendor/github.com/hashicorp/terraform/terraform/eval_context_builtin.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_context_builtin.go
index f6531848f..bd414a960 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_context_builtin.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_context_builtin.go
@@ -6,19 +6,19 @@ import (
"log"
"sync"
- "github.com/hashicorp/terraform/plans"
- "github.com/hashicorp/terraform/providers"
- "github.com/hashicorp/terraform/provisioners"
- "github.com/hashicorp/terraform/version"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/provisioners"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/version"
- "github.com/hashicorp/terraform/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
"github.com/hashicorp/hcl/v2"
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/hashicorp/terraform/lang"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/lang"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
- "github.com/hashicorp/terraform/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
"github.com/zclconf/go-cty/cty"
)
@@ -225,12 +225,14 @@ func (ctx *BuiltinEvalContext) InitProvisioner(n string) (provisioners.Interface
ctx.ProvisionerLock.Lock()
defer ctx.ProvisionerLock.Unlock()
- p, err := ctx.Components.ResourceProvisioner(n, "")
+ key := PathObjectCacheKey(ctx.Path(), n)
+
+ p, err := ctx.Components.ResourceProvisioner(n, key)
if err != nil {
return nil, err
}
- ctx.ProvisionerCache[n] = p
+ ctx.ProvisionerCache[key] = p
return p, nil
}
@@ -241,7 +243,8 @@ func (ctx *BuiltinEvalContext) Provisioner(n string) provisioners.Interface {
ctx.ProvisionerLock.Lock()
defer ctx.ProvisionerLock.Unlock()
- return ctx.ProvisionerCache[n]
+ key := PathObjectCacheKey(ctx.Path(), n)
+ return ctx.ProvisionerCache[key]
}
func (ctx *BuiltinEvalContext) ProvisionerSchema(n string) *configschema.Block {
@@ -256,7 +259,9 @@ func (ctx *BuiltinEvalContext) CloseProvisioner(n string) error {
ctx.ProvisionerLock.Lock()
defer ctx.ProvisionerLock.Unlock()
- prov := ctx.ProvisionerCache[n]
+ key := PathObjectCacheKey(ctx.Path(), n)
+
+ prov := ctx.ProvisionerCache[key]
if prov != nil {
return prov.Close()
}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_context_mock.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_context_mock.go
similarity index 94%
rename from vendor/github.com/hashicorp/terraform/terraform/eval_context_mock.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_context_mock.go
index 26ed4be1f..786316fb3 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_context_mock.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_context_mock.go
@@ -3,14 +3,14 @@ package terraform
import (
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hcl/v2/hcldec"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/hashicorp/terraform/lang"
- "github.com/hashicorp/terraform/plans"
- "github.com/hashicorp/terraform/providers"
- "github.com/hashicorp/terraform/provisioners"
- "github.com/hashicorp/terraform/states"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/lang"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/provisioners"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
"github.com/zclconf/go-cty/cty"
"github.com/zclconf/go-cty/cty/convert"
)
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_count.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_count.go
similarity index 97%
rename from vendor/github.com/hashicorp/terraform/terraform/eval_count.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_count.go
index f3b07ef0d..7d6fa4919 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_count.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_count.go
@@ -5,8 +5,8 @@ import (
"log"
"github.com/hashicorp/hcl/v2"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
"github.com/zclconf/go-cty/cty"
"github.com/zclconf/go-cty/cty/gocty"
)
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_count_boundary.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_count_boundary.go
similarity index 95%
rename from vendor/github.com/hashicorp/terraform/terraform/eval_count_boundary.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_count_boundary.go
index 647c58d1e..aac380632 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_count_boundary.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_count_boundary.go
@@ -4,8 +4,8 @@ import (
"fmt"
"log"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
)
// EvalCountFixZeroOneBoundaryGlobal is an EvalNode that fixes up the state
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_diff.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_diff.go
similarity index 90%
rename from vendor/github.com/hashicorp/terraform/terraform/eval_diff.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_diff.go
index 3ce4adbee..d6f51c950 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_diff.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_diff.go
@@ -1,7 +1,6 @@
package terraform
import (
- "bytes"
"fmt"
"log"
"strings"
@@ -9,13 +8,13 @@ import (
"github.com/hashicorp/hcl/v2"
"github.com/zclconf/go-cty/cty"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/plans"
- "github.com/hashicorp/terraform/plans/objchange"
- "github.com/hashicorp/terraform/providers"
- "github.com/hashicorp/terraform/states"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans/objchange"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// EvalCheckPlannedChange is an EvalNode implementation that produces errors
@@ -567,96 +566,6 @@ func processIgnoreChangesIndividual(prior, proposed cty.Value, ignoreChanges []h
return ret, diags
}
-// legacyFlagmapKeyForTraversal constructs a key string compatible with what
-// the flatmap package would generate for an attribute addressable by the given
-// traversal.
-//
-// This is used only to shim references to attributes within the diff and
-// state structures, which have not (at the time of writing) yet been updated
-// to use the newer HCL-based representations.
-func legacyFlatmapKeyForTraversal(traversal hcl.Traversal) string {
- var buf bytes.Buffer
- first := true
- for _, step := range traversal {
- if !first {
- buf.WriteByte('.')
- }
- switch ts := step.(type) {
- case hcl.TraverseRoot:
- buf.WriteString(ts.Name)
- case hcl.TraverseAttr:
- buf.WriteString(ts.Name)
- case hcl.TraverseIndex:
- val := ts.Key
- switch val.Type() {
- case cty.Number:
- bf := val.AsBigFloat()
- buf.WriteString(bf.String())
- case cty.String:
- s := val.AsString()
- buf.WriteString(s)
- default:
- // should never happen, since no other types appear in
- // traversals in practice.
- buf.WriteByte('?')
- }
- default:
- // should never happen, since we've covered all of the types
- // that show up in parsed traversals in practice.
- buf.WriteByte('?')
- }
- first = false
- }
- return buf.String()
-}
-
-// a group of key-*ResourceAttrDiff pairs from the same flatmapped container
-type flatAttrDiff map[string]*ResourceAttrDiff
-
-// we need to keep all keys if any of them have a diff that's not ignored
-func (f flatAttrDiff) keepDiff(ignoreChanges map[string]bool) bool {
- for k, v := range f {
- ignore := false
- for attr := range ignoreChanges {
- if strings.HasPrefix(k, attr) {
- ignore = true
- }
- }
-
- if !v.Empty() && !v.NewComputed && !ignore {
- return true
- }
- }
- return false
-}
-
-// sets, lists and maps need to be compared for diff inclusion as a whole, so
-// group the flatmapped keys together for easier comparison.
-func groupContainers(d *InstanceDiff) map[string]flatAttrDiff {
- isIndex := multiVal.MatchString
- containers := map[string]flatAttrDiff{}
- attrs := d.CopyAttributes()
- // we need to loop once to find the index key
- for k := range attrs {
- if isIndex(k) {
- // add the key, always including the final dot to fully qualify it
- containers[k[:len(k)-1]] = flatAttrDiff{}
- }
- }
-
- // loop again to find all the sub keys
- for prefix, values := range containers {
- for k, attrDiff := range attrs {
- // we include the index value as well, since it could be part of the diff
- if strings.HasPrefix(k, prefix) {
- values[k] = attrDiff
- }
- }
- }
-
- return containers
-}
-
// EvalDiffDestroy is an EvalNode implementation that returns a plain
// destroy diff.
type EvalDiffDestroy struct {
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_error.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_error.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/terraform/eval_error.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_error.go
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_filter.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_filter.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/terraform/eval_filter.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_filter.go
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_filter_operation.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_filter_operation.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/terraform/eval_filter_operation.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_filter_operation.go
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_for_each.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_for_each.go
similarity index 98%
rename from vendor/github.com/hashicorp/terraform/terraform/eval_for_each.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_for_each.go
index efe0dd919..a63389a91 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_for_each.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_for_each.go
@@ -4,7 +4,7 @@ import (
"fmt"
"github.com/hashicorp/hcl/v2"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
"github.com/zclconf/go-cty/cty"
)
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_if.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_if.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/terraform/eval_if.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_if.go
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_import_state.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_import_state.go
similarity index 90%
rename from vendor/github.com/hashicorp/terraform/terraform/eval_import_state.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_import_state.go
index a60f4a0a2..25a2aae06 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_import_state.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_import_state.go
@@ -4,10 +4,10 @@ import (
"fmt"
"log"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/providers"
- "github.com/hashicorp/terraform/states"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// EvalImportState is an EvalNode implementation that performs an
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_lang.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_lang.go
similarity index 92%
rename from vendor/github.com/hashicorp/terraform/terraform/eval_lang.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_lang.go
index d3a4f5b44..5ab6b44f5 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_lang.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_lang.go
@@ -3,10 +3,10 @@ package terraform
import (
"log"
- "github.com/hashicorp/terraform/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
"github.com/hashicorp/hcl/v2"
- "github.com/hashicorp/terraform/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
"github.com/zclconf/go-cty/cty"
)
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_local.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_local.go
similarity index 90%
rename from vendor/github.com/hashicorp/terraform/terraform/eval_local.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_local.go
index f30286e2f..031019380 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_local.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_local.go
@@ -6,9 +6,9 @@ import (
"github.com/hashicorp/hcl/v2"
"github.com/zclconf/go-cty/cty"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/lang"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/lang"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// EvalLocal is an EvalNode implementation that evaluates the
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_noop.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_noop.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/terraform/eval_noop.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_noop.go
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_output.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_output.go
similarity index 95%
rename from vendor/github.com/hashicorp/terraform/terraform/eval_output.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_output.go
index 181e55635..9f71e92f6 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_output.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_output.go
@@ -7,9 +7,9 @@ import (
"github.com/hashicorp/hcl/v2"
"github.com/zclconf/go-cty/cty"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/plans"
- "github.com/hashicorp/terraform/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
)
// EvalDeleteOutput is an EvalNode implementation that deletes an output
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_provider.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_provider.go
similarity index 94%
rename from vendor/github.com/hashicorp/terraform/terraform/eval_provider.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_provider.go
index 1b12b3cc8..7440cff7a 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_provider.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_provider.go
@@ -6,10 +6,10 @@ import (
"github.com/hashicorp/hcl/v2"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/providers"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
func buildProviderConfig(ctx EvalContext, addr addrs.ProviderConfig, config *configs.Provider) hcl.Body {
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_provisioner.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_provisioner.go
similarity index 89%
rename from vendor/github.com/hashicorp/terraform/terraform/eval_provisioner.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_provisioner.go
index bc6b5cc76..405ce9d0b 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_provisioner.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_provisioner.go
@@ -3,8 +3,8 @@ package terraform
import (
"fmt"
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/hashicorp/terraform/provisioners"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/provisioners"
)
// EvalInitProvisioner is an EvalNode implementation that initializes a provisioner
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_read_data.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_read_data.go
similarity index 91%
rename from vendor/github.com/hashicorp/terraform/terraform/eval_read_data.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_read_data.go
index e58ec7c6e..0b734b793 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_read_data.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_read_data.go
@@ -6,13 +6,13 @@ import (
"github.com/zclconf/go-cty/cty"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/plans"
- "github.com/hashicorp/terraform/plans/objchange"
- "github.com/hashicorp/terraform/providers"
- "github.com/hashicorp/terraform/states"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans/objchange"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// EvalReadData is an EvalNode implementation that deals with the main part
@@ -21,6 +21,7 @@ import (
type EvalReadData struct {
Addr addrs.ResourceInstance
Config *configs.Resource
+ Dependencies []addrs.Referenceable
Provider *providers.Interface
ProviderAddr addrs.AbsProviderConfig
ProviderSchema **ProviderSchema
@@ -160,8 +161,9 @@ func (n *EvalReadData) Eval(ctx EvalContext) (interface{}, error) {
}
if n.OutputState != nil {
state := &states.ResourceInstanceObject{
- Value: change.After,
- Status: states.ObjectPlanned, // because the partial value in the plan must be used for now
+ Value: change.After,
+ Status: states.ObjectPlanned, // because the partial value in the plan must be used for now
+ Dependencies: n.Dependencies,
}
*n.OutputState = state
}
@@ -273,8 +275,9 @@ func (n *EvalReadData) Eval(ctx EvalContext) (interface{}, error) {
},
}
state := &states.ResourceInstanceObject{
- Value: change.After,
- Status: states.ObjectReady, // because we completed the read from the provider
+ Value: change.After,
+ Status: states.ObjectReady, // because we completed the read from the provider
+ Dependencies: n.Dependencies,
}
err = ctx.Hook(func(h Hook) (HookAction, error) {
@@ -303,13 +306,14 @@ func (n *EvalReadData) Eval(ctx EvalContext) (interface{}, error) {
// EvalReadDataApply is an EvalNode implementation that executes a data
// resource's ReadDataApply method to read data from the data source.
type EvalReadDataApply struct {
- Addr addrs.ResourceInstance
- Provider *providers.Interface
- ProviderAddr addrs.AbsProviderConfig
- ProviderSchema **ProviderSchema
- Output **states.ResourceInstanceObject
- Config *configs.Resource
- Change **plans.ResourceInstanceChange
+ Addr addrs.ResourceInstance
+ Provider *providers.Interface
+ ProviderAddr addrs.AbsProviderConfig
+ ProviderSchema **ProviderSchema
+ Output **states.ResourceInstanceObject
+ Config *configs.Resource
+ Change **plans.ResourceInstanceChange
+ StateReferences []addrs.Referenceable
}
func (n *EvalReadDataApply) Eval(ctx EvalContext) (interface{}, error) {
@@ -381,8 +385,9 @@ func (n *EvalReadDataApply) Eval(ctx EvalContext) (interface{}, error) {
if n.Output != nil {
*n.Output = &states.ResourceInstanceObject{
- Value: newVal,
- Status: states.ObjectReady,
+ Value: newVal,
+ Status: states.ObjectReady,
+ Dependencies: n.StateReferences,
}
}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_refresh.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_refresh.go
similarity index 91%
rename from vendor/github.com/hashicorp/terraform/terraform/eval_refresh.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_refresh.go
index fd50f873a..6a834445c 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_refresh.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_refresh.go
@@ -6,10 +6,10 @@ import (
"github.com/zclconf/go-cty/cty"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/providers"
- "github.com/hashicorp/terraform/states"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// EvalRefresh is an EvalNode implementation that does a refresh for
@@ -89,7 +89,6 @@ func (n *EvalRefresh) Eval(ctx EvalContext) (interface{}, error) {
newState := state.DeepCopy()
newState.Value = resp.NewState
newState.Private = resp.Private
- newState.Dependencies = state.Dependencies
// Call post-refresh hook
err = ctx.Hook(func(h Hook) (HookAction, error) {
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_sequence.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_sequence.go
similarity index 93%
rename from vendor/github.com/hashicorp/terraform/terraform/eval_sequence.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_sequence.go
index 3485e4f14..7d6bb6603 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_sequence.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_sequence.go
@@ -1,7 +1,7 @@
package terraform
import (
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// EvalSequence is an EvalNode that evaluates in sequence.
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_state.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_state.go
similarity index 89%
rename from vendor/github.com/hashicorp/terraform/terraform/eval_state.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_state.go
index 0be877544..70a72bbdb 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_state.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_state.go
@@ -3,13 +3,12 @@ package terraform
import (
"fmt"
"log"
- "sort"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/providers"
- "github.com/hashicorp/terraform/states"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// EvalReadState is an EvalNode implementation that reads the
@@ -201,10 +200,6 @@ type EvalWriteState struct {
// ProviderAddr is the address of the provider configuration that
// produced the given object.
ProviderAddr addrs.AbsProviderConfig
-
- // Dependencies are the inter-resource dependencies to be stored in the
- // state.
- Dependencies *[]addrs.AbsResource
}
func (n *EvalWriteState) Eval(ctx EvalContext) (interface{}, error) {
@@ -220,6 +215,7 @@ func (n *EvalWriteState) Eval(ctx EvalContext) (interface{}, error) {
if n.ProviderAddr.ProviderConfig.Type == "" {
return nil, fmt.Errorf("failed to write state for %s, missing provider type", absAddr)
}
+
obj := *n.State
if obj == nil || obj.Value.IsNull() {
// No need to encode anything: we'll just write it directly.
@@ -227,13 +223,6 @@ func (n *EvalWriteState) Eval(ctx EvalContext) (interface{}, error) {
log.Printf("[TRACE] EvalWriteState: removing state object for %s", absAddr)
return nil, nil
}
-
- // store the new deps in the state
- if n.Dependencies != nil {
- log.Printf("[TRACE] EvalWriteState: recording %d dependencies for %s", len(*n.Dependencies), absAddr)
- obj.Dependencies = *n.Dependencies
- }
-
if n.ProviderSchema == nil || *n.ProviderSchema == nil {
// Should never happen, unless our state object is nil
panic("EvalWriteState used with pointer to nil ProviderSchema object")
@@ -484,49 +473,3 @@ func (n *EvalForgetResourceState) Eval(ctx EvalContext) (interface{}, error) {
return nil, nil
}
-
-// EvalRefreshDependencies is an EvalNode implementation that appends any newly
-// found dependencies to those saved in the state. The existing dependencies
-// are retained, as they may be missing from the config, and will be required
-// for the updates and destroys during the next apply.
-type EvalRefreshDependencies struct {
- // Prior State
- State **states.ResourceInstanceObject
- // Dependencies to write to the new state
- Dependencies *[]addrs.AbsResource
-}
-
-func (n *EvalRefreshDependencies) Eval(ctx EvalContext) (interface{}, error) {
- state := *n.State
- if state == nil {
- // no existing state to append
- return nil, nil
- }
-
- depMap := make(map[string]addrs.AbsResource)
- for _, d := range *n.Dependencies {
- depMap[d.String()] = d
- }
-
- // We have already dependencies in state, so we need to trust those for
- // refresh. We can't write out new dependencies until apply time in case
- // the configuration has been changed in a manner the conflicts with the
- // stored dependencies.
- if len(state.Dependencies) > 0 {
- *n.Dependencies = state.Dependencies
- return nil, nil
- }
-
- deps := make([]addrs.AbsResource, 0, len(depMap))
- for _, d := range depMap {
- deps = append(deps, d)
- }
-
- sort.Slice(deps, func(i, j int) bool {
- return deps[i].String() < deps[j].String()
- })
-
- *n.Dependencies = deps
-
- return nil, nil
-}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_state_upgrade.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_state_upgrade.go
similarity index 93%
rename from vendor/github.com/hashicorp/terraform/terraform/eval_state_upgrade.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_state_upgrade.go
index e1940005e..27d5f212e 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_state_upgrade.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_state_upgrade.go
@@ -4,11 +4,11 @@ import (
"fmt"
"log"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/hashicorp/terraform/providers"
- "github.com/hashicorp/terraform/states"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// UpgradeResourceState will, if necessary, run the provider-defined upgrade
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_validate.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_validate.go
similarity index 97%
rename from vendor/github.com/hashicorp/terraform/terraform/eval_validate.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_validate.go
index 5b2146a58..a4f28bd90 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_validate.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_validate.go
@@ -5,12 +5,12 @@ import (
"log"
"github.com/hashicorp/hcl/v2"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/hashicorp/terraform/providers"
- "github.com/hashicorp/terraform/provisioners"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/provisioners"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
"github.com/zclconf/go-cty/cty"
"github.com/zclconf/go-cty/cty/convert"
"github.com/zclconf/go-cty/cty/gocty"
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_validate_selfref.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_validate_selfref.go
similarity index 88%
rename from vendor/github.com/hashicorp/terraform/terraform/eval_validate_selfref.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_validate_selfref.go
index dd5e4018d..c9cc0e6da 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_validate_selfref.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_validate_selfref.go
@@ -5,10 +5,10 @@ import (
"github.com/hashicorp/hcl/v2"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/hashicorp/terraform/lang"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/lang"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// EvalValidateSelfRef is an EvalNode implementation that checks to ensure that
diff --git a/vendor/github.com/hashicorp/terraform/terraform/eval_variable.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_variable.go
similarity index 76%
rename from vendor/github.com/hashicorp/terraform/terraform/eval_variable.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_variable.go
index 7f6651c4c..79f44b3fe 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/eval_variable.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/eval_variable.go
@@ -3,11 +3,10 @@ package terraform
import (
"fmt"
"log"
- "reflect"
"github.com/hashicorp/hcl/v2"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
"github.com/zclconf/go-cty/cty"
"github.com/zclconf/go-cty/cty/convert"
)
@@ -95,26 +94,3 @@ func (n *EvalModuleCallArgument) Eval(ctx EvalContext) (interface{}, error) {
}
return nil, diags.ErrWithWarnings()
}
-
-// hclTypeName returns the name of the type that would represent this value in
-// a config file, or falls back to the Go type name if there's no corresponding
-// HCL type. This is used for formatted output, not for comparing types.
-func hclTypeName(i interface{}) string {
- switch k := reflect.Indirect(reflect.ValueOf(i)).Kind(); k {
- case reflect.Bool:
- return "boolean"
- case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
- reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32,
- reflect.Uint64, reflect.Uintptr, reflect.Float32, reflect.Float64:
- return "number"
- case reflect.Array, reflect.Slice:
- return "list"
- case reflect.Map:
- return "map"
- case reflect.String:
- return "string"
- default:
- // fall back to the Go type if there's no match
- return k.String()
- }
-}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/evaltree_provider.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/evaltree_provider.go
similarity index 91%
rename from vendor/github.com/hashicorp/terraform/terraform/evaltree_provider.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/evaltree_provider.go
index 6b4df67aa..d4a8d3cf7 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/evaltree_provider.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/evaltree_provider.go
@@ -1,9 +1,9 @@
package terraform
import (
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
)
// ProviderEvalTree returns the evaluation tree for initializing and
diff --git a/vendor/github.com/hashicorp/terraform/terraform/evaluate.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/evaluate.go
similarity index 94%
rename from vendor/github.com/hashicorp/terraform/terraform/evaluate.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/evaluate.go
index 9e71d3efa..2d3eabd48 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/evaluate.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/evaluate.go
@@ -4,7 +4,6 @@ import (
"fmt"
"os"
"path/filepath"
- "strconv"
"sync"
"github.com/agext/levenshtein"
@@ -12,13 +11,13 @@ import (
"github.com/zclconf/go-cty/cty"
"github.com/zclconf/go-cty/cty/convert"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/hashicorp/terraform/lang"
- "github.com/hashicorp/terraform/plans"
- "github.com/hashicorp/terraform/states"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/lang"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// Evaluator provides the necessary contextual data for evaluating expressions
@@ -185,16 +184,6 @@ func (d *evaluationStateData) GetForEachAttr(addr addrs.ForEachAttr, rng tfdiags
returnVal = d.InstanceKeyData.EachKey
case "value":
returnVal = d.InstanceKeyData.EachValue
-
- if returnVal == cty.NilVal {
- diags = diags.Append(&hcl.Diagnostic{
- Severity: hcl.DiagError,
- Summary: `each.value cannot be used in this context`,
- Detail: fmt.Sprintf(`A reference to "each.value" has been used in a context in which it unavailable, such as when the configuration no longer contains the value in its "for_each" expression. Remove this reference to each.value in your configuration to work around this error.`),
- Subject: rng.ToHCL().Ptr(),
- })
- return cty.UnknownVal(cty.DynamicPseudoType), diags
- }
default:
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
@@ -784,43 +773,6 @@ func (d *evaluationStateData) getResourceSchema(addr addrs.Resource, providerAdd
return schema
}
-// coerceInstanceKey attempts to convert the given key to the type expected
-// for the given EachMode.
-//
-// If the key is already of the correct type or if it cannot be converted then
-// it is returned verbatim. If conversion is required and possible, the
-// converted value is returned. Callers should not try to determine if
-// conversion was possible, should instead just check if the result is of
-// the expected type.
-func (d *evaluationStateData) coerceInstanceKey(key addrs.InstanceKey, mode states.EachMode) addrs.InstanceKey {
- if key == addrs.NoKey {
- // An absent key can't be converted
- return key
- }
-
- switch mode {
- case states.NoEach:
- // No conversions possible at all
- return key
- case states.EachMap:
- if intKey, isInt := key.(addrs.IntKey); isInt {
- return addrs.StringKey(strconv.Itoa(int(intKey)))
- }
- return key
- case states.EachList:
- if strKey, isStr := key.(addrs.StringKey); isStr {
- i, err := strconv.Atoi(string(strKey))
- if err != nil {
- return key
- }
- return addrs.IntKey(i)
- }
- return key
- default:
- return key
- }
-}
-
func (d *evaluationStateData) GetTerraformAttr(addr addrs.TerraformAttr, rng tfdiags.SourceRange) (cty.Value, tfdiags.Diagnostics) {
var diags tfdiags.Diagnostics
switch addr.Name {
diff --git a/vendor/github.com/hashicorp/terraform/terraform/evaluate_valid.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/evaluate_valid.go
similarity index 97%
rename from vendor/github.com/hashicorp/terraform/terraform/evaluate_valid.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/evaluate_valid.go
index 9e55b2f99..35a8be0c9 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/evaluate_valid.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/evaluate_valid.go
@@ -6,10 +6,10 @@ import (
"github.com/hashicorp/hcl/v2"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/helper/didyoumean"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/helper/didyoumean"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// StaticValidateReferences checks the given references against schemas and
diff --git a/vendor/github.com/hashicorp/terraform/terraform/features.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/features.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/terraform/features.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/features.go
diff --git a/vendor/github.com/hashicorp/terraform/terraform/graph.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph.go
similarity index 95%
rename from vendor/github.com/hashicorp/terraform/terraform/graph.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph.go
index 58d45a7b6..36e295b6f 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/graph.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph.go
@@ -4,11 +4,11 @@ import (
"fmt"
"log"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
- "github.com/hashicorp/terraform/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
- "github.com/hashicorp/terraform/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
)
// Graph represents the graph that Terraform uses to represent resources
diff --git a/vendor/github.com/hashicorp/terraform/terraform/graph_builder.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_builder.go
similarity index 94%
rename from vendor/github.com/hashicorp/terraform/terraform/graph_builder.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_builder.go
index 66b21f300..ee2c5857a 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/graph_builder.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_builder.go
@@ -5,9 +5,9 @@ import (
"log"
"strings"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
- "github.com/hashicorp/terraform/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
)
// GraphBuilder is an interface that can be implemented and used with
diff --git a/vendor/github.com/hashicorp/terraform/terraform/graph_builder_apply.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_builder_apply.go
similarity index 94%
rename from vendor/github.com/hashicorp/terraform/terraform/graph_builder_apply.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_builder_apply.go
index 615731328..918987610 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/graph_builder_apply.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_builder_apply.go
@@ -1,12 +1,12 @@
package terraform
import (
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/dag"
- "github.com/hashicorp/terraform/plans"
- "github.com/hashicorp/terraform/states"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// ApplyGraphBuilder implements GraphBuilder and is responsible for building
@@ -155,7 +155,6 @@ func (b *ApplyGraphBuilder) Steps() []GraphTransformer {
// Connect references so ordering is correct
&ReferenceTransformer{},
- &AttachDependenciesTransformer{},
// Destruction ordering
&DestroyEdgeTransformer{
diff --git a/vendor/github.com/hashicorp/terraform/terraform/graph_builder_destroy_plan.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_builder_destroy_plan.go
similarity index 89%
rename from vendor/github.com/hashicorp/terraform/terraform/graph_builder_destroy_plan.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_builder_destroy_plan.go
index a6047a9b4..32fe5f973 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/graph_builder_destroy_plan.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_builder_destroy_plan.go
@@ -1,11 +1,11 @@
package terraform
import (
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/dag"
- "github.com/hashicorp/terraform/states"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// DestroyPlanGraphBuilder implements GraphBuilder and is responsible for
diff --git a/vendor/github.com/hashicorp/terraform/terraform/graph_builder_eval.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_builder_eval.go
similarity index 91%
rename from vendor/github.com/hashicorp/terraform/terraform/graph_builder_eval.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_builder_eval.go
index eb6c897bf..8a0bcf5ba 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/graph_builder_eval.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_builder_eval.go
@@ -1,11 +1,11 @@
package terraform
import (
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/dag"
- "github.com/hashicorp/terraform/states"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// EvalGraphBuilder implements GraphBuilder and constructs a graph suitable
diff --git a/vendor/github.com/hashicorp/terraform/terraform/graph_builder_import.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_builder_import.go
similarity index 91%
rename from vendor/github.com/hashicorp/terraform/terraform/graph_builder_import.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_builder_import.go
index 49879e4eb..dcbb10e60 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/graph_builder_import.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_builder_import.go
@@ -1,10 +1,10 @@
package terraform
import (
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/dag"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// ImportGraphBuilder implements GraphBuilder and is responsible for building
diff --git a/vendor/github.com/hashicorp/terraform/terraform/graph_builder_plan.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_builder_plan.go
similarity index 94%
rename from vendor/github.com/hashicorp/terraform/terraform/graph_builder_plan.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_builder_plan.go
index 17adfd279..bcd119b39 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/graph_builder_plan.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_builder_plan.go
@@ -3,11 +3,11 @@ package terraform
import (
"sync"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/dag"
- "github.com/hashicorp/terraform/states"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// PlanGraphBuilder implements GraphBuilder and is responsible for building
diff --git a/vendor/github.com/hashicorp/terraform/terraform/graph_builder_refresh.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_builder_refresh.go
similarity index 94%
rename from vendor/github.com/hashicorp/terraform/terraform/graph_builder_refresh.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_builder_refresh.go
index 1c7ae4898..fad7bf161 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/graph_builder_refresh.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_builder_refresh.go
@@ -3,12 +3,12 @@ package terraform
import (
"log"
- "github.com/hashicorp/terraform/states"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
)
// RefreshGraphBuilder implements GraphBuilder and is responsible for building
@@ -165,7 +165,6 @@ func (b *RefreshGraphBuilder) Steps() []GraphTransformer {
// Connect so that the references are ready for targeting. We'll
// have to connect again later for providers and so on.
&ReferenceTransformer{},
- &AttachDependenciesTransformer{},
// Target
&TargetsTransformer{
diff --git a/vendor/github.com/hashicorp/terraform/terraform/graph_builder_validate.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_builder_validate.go
similarity index 94%
rename from vendor/github.com/hashicorp/terraform/terraform/graph_builder_validate.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_builder_validate.go
index 1881f95f2..0aa8b915a 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/graph_builder_validate.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_builder_validate.go
@@ -1,7 +1,7 @@
package terraform
import (
- "github.com/hashicorp/terraform/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
)
// ValidateGraphBuilder creates the graph for the validate operation.
diff --git a/vendor/github.com/hashicorp/terraform/terraform/graph_dot.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_dot.go
similarity index 77%
rename from vendor/github.com/hashicorp/terraform/terraform/graph_dot.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_dot.go
index 73e3821fb..5dbf415ff 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/graph_dot.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_dot.go
@@ -1,6 +1,6 @@
package terraform
-import "github.com/hashicorp/terraform/dag"
+import "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
// GraphDot returns the dot formatting of a visual representation of
// the given Terraform graph.
diff --git a/vendor/github.com/hashicorp/terraform/terraform/graph_interface_subgraph.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_interface_subgraph.go
similarity index 79%
rename from vendor/github.com/hashicorp/terraform/terraform/graph_interface_subgraph.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_interface_subgraph.go
index 768590fb0..a005ea5a0 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/graph_interface_subgraph.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_interface_subgraph.go
@@ -1,7 +1,7 @@
package terraform
import (
- "github.com/hashicorp/terraform/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
)
// GraphNodeSubPath says that a node is part of a graph with a
diff --git a/vendor/github.com/hashicorp/terraform/terraform/graph_walk.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_walk.go
similarity index 86%
rename from vendor/github.com/hashicorp/terraform/terraform/graph_walk.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_walk.go
index e980e0c6d..d699376f2 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/graph_walk.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_walk.go
@@ -1,9 +1,9 @@
package terraform
import (
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/dag"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// GraphWalker is an interface that can be implemented that when used
diff --git a/vendor/github.com/hashicorp/terraform/terraform/graph_walk_context.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_walk_context.go
similarity index 90%
rename from vendor/github.com/hashicorp/terraform/terraform/graph_walk_context.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_walk_context.go
index 03c192a86..11fb2fd01 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/graph_walk_context.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_walk_context.go
@@ -7,14 +7,14 @@ import (
"github.com/zclconf/go-cty/cty"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/hashicorp/terraform/dag"
- "github.com/hashicorp/terraform/plans"
- "github.com/hashicorp/terraform/providers"
- "github.com/hashicorp/terraform/provisioners"
- "github.com/hashicorp/terraform/states"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/provisioners"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// ContextGraphWalker is the GraphWalker implementation used with the
diff --git a/vendor/github.com/hashicorp/terraform/terraform/graph_walk_operation.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_walk_operation.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/terraform/graph_walk_operation.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graph_walk_operation.go
diff --git a/vendor/github.com/hashicorp/terraform/terraform/graphtype_string.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graphtype_string.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/terraform/graphtype_string.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/graphtype_string.go
diff --git a/vendor/github.com/hashicorp/terraform/terraform/hook.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/hook.go
similarity index 93%
rename from vendor/github.com/hashicorp/terraform/terraform/hook.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/hook.go
index c0bb23ab2..b5be94824 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/hook.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/hook.go
@@ -3,10 +3,10 @@ package terraform
import (
"github.com/zclconf/go-cty/cty"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/plans"
- "github.com/hashicorp/terraform/providers"
- "github.com/hashicorp/terraform/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
)
// HookAction is an enum of actions that can be taken as a result of a hook
@@ -143,19 +143,3 @@ func (*NilHook) PostImportState(addr addrs.AbsResourceInstance, imported []provi
func (*NilHook) PostStateUpdate(new *states.State) (HookAction, error) {
return HookActionContinue, nil
}
-
-// handleHook turns hook actions into panics. This lets you use the
-// panic/recover mechanism in Go as a flow control mechanism for hook
-// actions.
-func handleHook(a HookAction, err error) {
- if err != nil {
- // TODO: handle errors
- }
-
- switch a {
- case HookActionContinue:
- return
- case HookActionHalt:
- panic(HookActionHalt)
- }
-}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/hook_mock.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/hook_mock.go
similarity index 97%
rename from vendor/github.com/hashicorp/terraform/terraform/hook_mock.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/hook_mock.go
index 6efa31963..74a29bde0 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/hook_mock.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/hook_mock.go
@@ -5,10 +5,10 @@ import (
"github.com/zclconf/go-cty/cty"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/plans"
- "github.com/hashicorp/terraform/providers"
- "github.com/hashicorp/terraform/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
)
// MockHook is an implementation of Hook that can be used for tests.
diff --git a/vendor/github.com/hashicorp/terraform/terraform/hook_stop.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/hook_stop.go
similarity index 92%
rename from vendor/github.com/hashicorp/terraform/terraform/hook_stop.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/hook_stop.go
index 811fb337c..42c3d20cb 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/hook_stop.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/hook_stop.go
@@ -5,10 +5,10 @@ import (
"github.com/zclconf/go-cty/cty"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/plans"
- "github.com/hashicorp/terraform/providers"
- "github.com/hashicorp/terraform/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
)
// stopHook is a private Hook implementation that Terraform uses to
diff --git a/vendor/github.com/hashicorp/terraform/terraform/instancetype.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/instancetype.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/terraform/instancetype.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/instancetype.go
diff --git a/vendor/github.com/hashicorp/terraform/terraform/instancetype_string.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/instancetype_string.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/terraform/instancetype_string.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/instancetype_string.go
diff --git a/vendor/github.com/hashicorp/terraform/terraform/module_dependencies.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/module_dependencies.go
similarity index 95%
rename from vendor/github.com/hashicorp/terraform/terraform/module_dependencies.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/module_dependencies.go
index 66a68c7de..f1434e625 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/module_dependencies.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/module_dependencies.go
@@ -3,11 +3,11 @@ package terraform
import (
version "github.com/hashicorp/go-version"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/moduledeps"
- "github.com/hashicorp/terraform/plugin/discovery"
- "github.com/hashicorp/terraform/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/moduledeps"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
)
// ConfigTreeDependencies returns the dependencies of the tree of modules
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_count_boundary.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_count_boundary.go
similarity index 88%
rename from vendor/github.com/hashicorp/terraform/terraform/node_count_boundary.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_count_boundary.go
index e4952039c..acd8262b0 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_count_boundary.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_count_boundary.go
@@ -1,7 +1,7 @@
package terraform
import (
- "github.com/hashicorp/terraform/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
)
// NodeCountBoundary fixes up any transitions between "each modes" in objects
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_data_destroy.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_data_destroy.go
similarity index 87%
rename from vendor/github.com/hashicorp/terraform/terraform/node_data_destroy.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_data_destroy.go
index 6ba39904d..56a33bce2 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_data_destroy.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_data_destroy.go
@@ -1,8 +1,8 @@
package terraform
import (
- "github.com/hashicorp/terraform/providers"
- "github.com/hashicorp/terraform/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
)
// NodeDestroyableDataResourceInstance represents a resource that is "destroyable":
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_data_refresh.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_data_refresh.go
similarity index 95%
rename from vendor/github.com/hashicorp/terraform/terraform/node_data_refresh.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_data_refresh.go
index 7133d42bf..56283c0ac 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_data_refresh.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_data_refresh.go
@@ -1,11 +1,11 @@
package terraform
import (
- "github.com/hashicorp/terraform/dag"
- "github.com/hashicorp/terraform/plans"
- "github.com/hashicorp/terraform/providers"
- "github.com/hashicorp/terraform/states"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
"github.com/zclconf/go-cty/cty"
)
@@ -169,6 +169,7 @@ func (n *NodeRefreshableDataResourceInstance) EvalTree() EvalNode {
&EvalReadData{
Addr: addr.Resource,
Config: n.Config,
+ Dependencies: n.StateReferences(),
Provider: &provider,
ProviderAddr: n.ResolvedProvider,
ProviderSchema: &providerSchema,
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_local.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_local.go
similarity index 86%
rename from vendor/github.com/hashicorp/terraform/terraform/node_local.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_local.go
index 591eb305a..38681d83d 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_local.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_local.go
@@ -1,10 +1,10 @@
package terraform
import (
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/dag"
- "github.com/hashicorp/terraform/lang"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/lang"
)
// NodeLocal represents a named local value in a particular module.
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_module_removed.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_module_removed.go
similarity index 97%
rename from vendor/github.com/hashicorp/terraform/terraform/node_module_removed.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_module_removed.go
index 99e440903..6e3cb41dc 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_module_removed.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_module_removed.go
@@ -3,7 +3,7 @@ package terraform
import (
"fmt"
- "github.com/hashicorp/terraform/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
)
// NodeModuleRemoved represents a module that is no longer in the
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_module_variable.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_module_variable.go
similarity index 94%
rename from vendor/github.com/hashicorp/terraform/terraform/node_module_variable.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_module_variable.go
index 6b675e570..76311a56d 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_module_variable.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_module_variable.go
@@ -2,10 +2,10 @@ package terraform
import (
"github.com/hashicorp/hcl/v2"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/dag"
- "github.com/hashicorp/terraform/lang"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/lang"
"github.com/zclconf/go-cty/cty"
)
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_output.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_output.go
similarity index 96%
rename from vendor/github.com/hashicorp/terraform/terraform/node_output.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_output.go
index bb3d06531..753057123 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_output.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_output.go
@@ -3,10 +3,10 @@ package terraform
import (
"fmt"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/dag"
- "github.com/hashicorp/terraform/lang"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/lang"
)
// NodeApplyableOutput represents an output that is "applyable":
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_output_orphan.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_output_orphan.go
similarity index 95%
rename from vendor/github.com/hashicorp/terraform/terraform/node_output_orphan.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_output_orphan.go
index 518b8aa09..a76d1742c 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_output_orphan.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_output_orphan.go
@@ -3,7 +3,7 @@ package terraform
import (
"fmt"
- "github.com/hashicorp/terraform/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
)
// NodeOutputOrphan represents an output that is an orphan.
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_provider.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_provider.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/terraform/node_provider.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_provider.go
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_provider_abstract.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_provider_abstract.go
similarity index 90%
rename from vendor/github.com/hashicorp/terraform/terraform/node_provider_abstract.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_provider_abstract.go
index a0cdcfe01..afdd4741d 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_provider_abstract.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_provider_abstract.go
@@ -1,11 +1,11 @@
package terraform
import (
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
- "github.com/hashicorp/terraform/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
)
// ConcreteProviderNodeFunc is a callback type used to convert an
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_provider_disabled.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_provider_disabled.go
similarity index 92%
rename from vendor/github.com/hashicorp/terraform/terraform/node_provider_disabled.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_provider_disabled.go
index 30d8813a4..51335654b 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_provider_disabled.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_provider_disabled.go
@@ -3,7 +3,7 @@ package terraform
import (
"fmt"
- "github.com/hashicorp/terraform/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
)
// NodeDisabledProvider represents a provider that is disabled. A disabled
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_provider_eval.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_provider_eval.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/terraform/node_provider_eval.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_provider_eval.go
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_provisioner.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_provisioner.go
similarity index 94%
rename from vendor/github.com/hashicorp/terraform/terraform/node_provisioner.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_provisioner.go
index cf51cf06c..573f030d7 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_provisioner.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_provisioner.go
@@ -3,7 +3,7 @@ package terraform
import (
"fmt"
- "github.com/hashicorp/terraform/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
)
// NodeProvisioner represents a provider that has no associated operations.
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_resource_abstract.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_abstract.go
similarity index 78%
rename from vendor/github.com/hashicorp/terraform/terraform/node_resource_abstract.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_abstract.go
index 8d6f01832..c7b0e3c8e 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_resource_abstract.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_abstract.go
@@ -3,14 +3,15 @@ package terraform
import (
"fmt"
"log"
-
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/hashicorp/terraform/dag"
- "github.com/hashicorp/terraform/lang"
- "github.com/hashicorp/terraform/states"
- "github.com/hashicorp/terraform/tfdiags"
+ "sort"
+
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/lang"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// ConcreteResourceNodeFunc is a callback type used to convert an
@@ -34,10 +35,6 @@ type ConcreteResourceInstanceNodeFunc func(*NodeAbstractResourceInstance) dag.Ve
// configuration.
type GraphNodeResourceInstance interface {
ResourceInstanceAddr() addrs.AbsResourceInstance
-
- // StateDependencies returns any inter-resource dependencies that are
- // stored in the state.
- StateDependencies() []addrs.AbsResource
}
// NodeAbstractResource represents a resource that has no associated
@@ -96,8 +93,8 @@ type NodeAbstractResourceInstance struct {
// The fields below will be automatically set using the Attach
// interfaces if you're running those transforms, but also be explicitly
// set if you already have that information.
+
ResourceState *states.Resource
- Dependencies []addrs.AbsResource
}
var (
@@ -170,12 +167,12 @@ func (n *NodeAbstractResource) References() []*addrs.Reference {
var result []*addrs.Reference
for _, traversal := range c.DependsOn {
- ref, diags := addrs.ParseRef(traversal)
- if diags.HasErrors() {
+ ref, err := addrs.ParseRef(traversal)
+ if err != nil {
// We ignore this here, because this isn't a suitable place to return
// errors. This situation should be caught and rejected during
// validation.
- log.Printf("[ERROR] Can't parse %#v from depends_on as reference: %s", traversal, diags.Err())
+ log.Printf("[ERROR] Can't parse %#v from depends_on as reference: %s", traversal, err)
continue
}
@@ -223,8 +220,7 @@ func (n *NodeAbstractResource) References() []*addrs.Reference {
func (n *NodeAbstractResourceInstance) References() []*addrs.Reference {
// If we have a configuration attached then we'll delegate to our
// embedded abstract resource, which knows how to extract dependencies
- // from configuration. If there is no config, then the dependencies will
- // be connected during destroy from those stored in the state.
+ // from configuration.
if n.Config != nil {
if n.Schema == nil {
// We'll produce a log message about this out here so that
@@ -236,10 +232,8 @@ func (n *NodeAbstractResourceInstance) References() []*addrs.Reference {
return n.NodeAbstractResource.References()
}
- // FIXME: remove once the deprecated DependsOn values have been removed from state
- // The state dependencies are now connected in a separate transformation as
- // absolute addresses, but we need to keep this here until we can be sure
- // that no state will need to use the old depends_on references.
+ // Otherwise, if we have state then we'll use the values stored in state
+ // as a fallback.
if rs := n.ResourceState; rs != nil {
if s := rs.Instance(n.InstanceKey); s != nil {
// State is still storing dependencies as old-style strings, so we'll
@@ -251,26 +245,26 @@ func (n *NodeAbstractResourceInstance) References() []*addrs.Reference {
// difficult to reproduce, so we will fix the symptom here and hope
// to find the root cause another time.
//
- // https://github.com/hashicorp/terraform/issues/21407
+ // https://github.com/hashicorp/terraform-plugin-sdk/issues/21407
if s.Current == nil {
log.Printf("[WARN] no current state found for %s", n.Name())
- return nil
- }
- for _, addr := range s.Current.DependsOn {
- if addr == nil {
- // Should never happen; indicates a bug in the state loader
- panic(fmt.Sprintf("dependencies for current object on %s contains nil address", n.ResourceInstanceAddr()))
+ } else {
+ for _, addr := range s.Current.Dependencies {
+ if addr == nil {
+ // Should never happen; indicates a bug in the state loader
+ panic(fmt.Sprintf("dependencies for current object on %s contains nil address", n.ResourceInstanceAddr()))
+ }
+
+ // This is a little weird: we need to manufacture an addrs.Reference
+ // with a fake range here because the state isn't something we can
+ // make source references into.
+ result = append(result, &addrs.Reference{
+ Subject: addr,
+ SourceRange: tfdiags.SourceRange{
+ Filename: "(state file)",
+ },
+ })
}
-
- // This is a little weird: we need to manufacture an addrs.Reference
- // with a fake range here because the state isn't something we can
- // make source references into.
- result = append(result, &addrs.Reference{
- Subject: addr,
- SourceRange: tfdiags.SourceRange{
- Filename: "(state file)",
- },
- })
}
return result
}
@@ -280,31 +274,67 @@ func (n *NodeAbstractResourceInstance) References() []*addrs.Reference {
return nil
}
-// converts an instance address to the legacy dotted notation
-func dottedInstanceAddr(tr addrs.ResourceInstance) string {
- // The legacy state format uses dot-separated instance keys,
- // rather than bracketed as in our modern syntax.
- var suffix string
- switch tk := tr.Key.(type) {
- case addrs.IntKey:
- suffix = fmt.Sprintf(".%d", int(tk))
- case addrs.StringKey:
- suffix = fmt.Sprintf(".%s", string(tk))
+// StateReferences returns the dependencies to put into the state for
+// this resource.
+func (n *NodeAbstractResourceInstance) StateReferences() []addrs.Referenceable {
+ selfAddrs := n.ReferenceableAddrs()
+
+ // Since we don't include the source location references in our
+ // results from this method, we'll also filter out duplicates:
+ // there's no point in listing the same object twice without
+ // that additional context.
+ seen := map[string]struct{}{}
+
+ // Pretend that we've already "seen" all of our own addresses so that we
+ // won't record self-references in the state. This can arise if, for
+ // example, a provisioner for a resource refers to the resource itself,
+ // which is valid (since provisioners always run after apply) but should
+ // not create an explicit dependency edge.
+ for _, selfAddr := range selfAddrs {
+ seen[selfAddr.String()] = struct{}{}
+ if riAddr, ok := selfAddr.(addrs.ResourceInstance); ok {
+ seen[riAddr.ContainingResource().String()] = struct{}{}
+ }
}
- return tr.Resource.String() + suffix
-}
-// StateDependencies returns the dependencies saved in the state.
-func (n *NodeAbstractResourceInstance) StateDependencies() []addrs.AbsResource {
- if rs := n.ResourceState; rs != nil {
- if s := rs.Instance(n.InstanceKey); s != nil {
- if s.Current != nil {
- return s.Current.Dependencies
- }
+ depsRaw := n.References()
+ deps := make([]addrs.Referenceable, 0, len(depsRaw))
+ for _, d := range depsRaw {
+ subj := d.Subject
+ if mco, isOutput := subj.(addrs.ModuleCallOutput); isOutput {
+ // For state dependencies, we simplify outputs to just refer
+ // to the module as a whole. It's not really clear why we do this,
+ // but this logic is preserved from before the 0.12 rewrite of
+ // this function.
+ subj = mco.Call
+ }
+
+ k := subj.String()
+ if _, exists := seen[k]; exists {
+ continue
+ }
+ seen[k] = struct{}{}
+ switch tr := subj.(type) {
+ case addrs.ResourceInstance:
+ deps = append(deps, tr)
+ case addrs.Resource:
+ deps = append(deps, tr)
+ case addrs.ModuleCallInstance:
+ deps = append(deps, tr)
+ default:
+ // No other reference types are recorded in the state.
}
}
- return nil
+ // We'll also sort them, since that'll avoid creating changes in the
+ // serialized state that make no semantic difference.
+ sort.Slice(deps, func(i, j int) bool {
+ // Simple string-based sort because we just care about consistency,
+ // not user-friendliness.
+ return deps[i].String() < deps[j].String()
+ })
+
+ return deps
}
func (n *NodeAbstractResource) SetProvider(p addrs.AbsProviderConfig) {
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_resource_apply.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_apply.go
similarity index 92%
rename from vendor/github.com/hashicorp/terraform/terraform/node_resource_apply.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_apply.go
index 3e2fff3a0..68d438d7b 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_resource_apply.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_apply.go
@@ -3,9 +3,9 @@ package terraform
import (
"log"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/dag"
- "github.com/hashicorp/terraform/lang"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/lang"
)
// NodeApplyableResource represents a resource that is "applyable":
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_resource_apply_instance.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_apply_instance.go
similarity index 90%
rename from vendor/github.com/hashicorp/terraform/terraform/node_resource_apply_instance.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_apply_instance.go
index 8fa9b1283..acdda45e4 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_resource_apply_instance.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_apply_instance.go
@@ -5,12 +5,12 @@ import (
"github.com/zclconf/go-cty/cty"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/plans"
- "github.com/hashicorp/terraform/providers"
- "github.com/hashicorp/terraform/states"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// NodeApplyableResourceInstance represents a resource instance that is
@@ -28,13 +28,12 @@ type NodeApplyableResourceInstance struct {
}
var (
- _ GraphNodeResource = (*NodeApplyableResourceInstance)(nil)
- _ GraphNodeResourceInstance = (*NodeApplyableResourceInstance)(nil)
- _ GraphNodeCreator = (*NodeApplyableResourceInstance)(nil)
- _ GraphNodeReferencer = (*NodeApplyableResourceInstance)(nil)
- _ GraphNodeDeposer = (*NodeApplyableResourceInstance)(nil)
- _ GraphNodeEvalable = (*NodeApplyableResourceInstance)(nil)
- _ GraphNodeAttachDependencies = (*NodeApplyableResourceInstance)(nil)
+ _ GraphNodeResource = (*NodeApplyableResourceInstance)(nil)
+ _ GraphNodeResourceInstance = (*NodeApplyableResourceInstance)(nil)
+ _ GraphNodeCreator = (*NodeApplyableResourceInstance)(nil)
+ _ GraphNodeReferencer = (*NodeApplyableResourceInstance)(nil)
+ _ GraphNodeDeposer = (*NodeApplyableResourceInstance)(nil)
+ _ GraphNodeEvalable = (*NodeApplyableResourceInstance)(nil)
)
// GraphNodeAttachDestroyer
@@ -98,11 +97,6 @@ func (n *NodeApplyableResourceInstance) References() []*addrs.Reference {
return ret
}
-// GraphNodeAttachDependencies
-func (n *NodeApplyableResourceInstance) AttachDependencies(deps []addrs.AbsResource) {
- n.Dependencies = deps
-}
-
// GraphNodeEvalable
func (n *NodeApplyableResourceInstance) EvalTree() EvalNode {
addr := n.ResourceInstanceAddr()
@@ -110,7 +104,7 @@ func (n *NodeApplyableResourceInstance) EvalTree() EvalNode {
if n.Config == nil {
// This should not be possible, but we've got here in at least one
// case as discussed in the following issue:
- // https://github.com/hashicorp/terraform/issues/21258
+ // https://github.com/hashicorp/terraform-plugin-sdk/issues/21258
// To avoid an outright crash here, we'll instead return an explicit
// error.
var diags tfdiags.Diagnostics
@@ -177,6 +171,7 @@ func (n *NodeApplyableResourceInstance) evalTreeDataResource(addr addrs.AbsResou
&EvalReadData{
Addr: addr.Resource,
Config: n.Config,
+ Dependencies: n.StateReferences(),
Planned: &change, // setting this indicates that the result must be complete
Provider: &provider,
ProviderAddr: n.ResolvedProvider,
@@ -346,6 +341,7 @@ func (n *NodeApplyableResourceInstance) evalTreeManagedResource(addr addrs.AbsRe
&EvalApply{
Addr: addr.Resource,
Config: n.Config,
+ Dependencies: n.StateReferences(),
State: &state,
Change: &diffApply,
Provider: &provider,
@@ -356,17 +352,17 @@ func (n *NodeApplyableResourceInstance) evalTreeManagedResource(addr addrs.AbsRe
CreateNew: &createNew,
},
&EvalMaybeTainted{
- Addr: addr.Resource,
- State: &state,
- Change: &diffApply,
- Error: &err,
+ Addr: addr.Resource,
+ State: &state,
+ Change: &diffApply,
+ Error: &err,
+ StateOutput: &state,
},
&EvalWriteState{
Addr: addr.Resource,
ProviderAddr: n.ResolvedProvider,
ProviderSchema: &providerSchema,
State: &state,
- Dependencies: &n.Dependencies,
},
&EvalApplyProvisioners{
Addr: addr.Resource,
@@ -377,17 +373,17 @@ func (n *NodeApplyableResourceInstance) evalTreeManagedResource(addr addrs.AbsRe
When: configs.ProvisionerWhenCreate,
},
&EvalMaybeTainted{
- Addr: addr.Resource,
- State: &state,
- Change: &diffApply,
- Error: &err,
+ Addr: addr.Resource,
+ State: &state,
+ Change: &diffApply,
+ Error: &err,
+ StateOutput: &state,
},
&EvalWriteState{
Addr: addr.Resource,
ProviderAddr: n.ResolvedProvider,
ProviderSchema: &providerSchema,
State: &state,
- Dependencies: &n.Dependencies,
},
&EvalIf{
If: func(ctx EvalContext) (bool, error) {
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_resource_destroy.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_destroy.go
similarity index 96%
rename from vendor/github.com/hashicorp/terraform/terraform/node_resource_destroy.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_destroy.go
index ca2267e47..049e5e990 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_resource_destroy.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_destroy.go
@@ -4,12 +4,12 @@ import (
"fmt"
"log"
- "github.com/hashicorp/terraform/plans"
- "github.com/hashicorp/terraform/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
)
// NodeDestroyResourceInstance represents a resource instance that is to be
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_resource_destroy_deposed.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_destroy_deposed.go
similarity index 96%
rename from vendor/github.com/hashicorp/terraform/terraform/node_resource_destroy_deposed.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_destroy_deposed.go
index e0d5db836..269c79808 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_resource_destroy_deposed.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_destroy_deposed.go
@@ -3,11 +3,11 @@ package terraform
import (
"fmt"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/dag"
- "github.com/hashicorp/terraform/plans"
- "github.com/hashicorp/terraform/providers"
- "github.com/hashicorp/terraform/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
)
// ConcreteResourceInstanceDeposedNodeFunc is a callback type used to convert
@@ -178,7 +178,7 @@ var (
)
func (n *NodeDestroyDeposedResourceInstanceObject) Name() string {
- return fmt.Sprintf("%s (destroy deposed %s)", n.ResourceInstanceAddr(), n.DeposedKey)
+ return fmt.Sprintf("%s (destroy deposed %s)", n.Addr.String(), n.DeposedKey)
}
func (n *NodeDestroyDeposedResourceInstanceObject) DeposedInstanceObjectKey() states.DeposedKey {
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_resource_plan.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_plan.go
similarity index 97%
rename from vendor/github.com/hashicorp/terraform/terraform/node_resource_plan.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_plan.go
index ec4aa9322..2dc0df908 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_resource_plan.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_plan.go
@@ -3,8 +3,8 @@ package terraform
import (
"log"
- "github.com/hashicorp/terraform/dag"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// NodePlannableResource represents a resource that is "plannable":
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_resource_plan_destroy.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_plan_destroy.go
similarity index 89%
rename from vendor/github.com/hashicorp/terraform/terraform/node_resource_plan_destroy.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_plan_destroy.go
index 38746f0d3..2c3a7012b 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_resource_plan_destroy.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_plan_destroy.go
@@ -3,11 +3,11 @@ package terraform
import (
"fmt"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/dag"
- "github.com/hashicorp/terraform/plans"
- "github.com/hashicorp/terraform/providers"
- "github.com/hashicorp/terraform/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
)
// NodePlanDestroyableResourceInstance represents a resource that is ready
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_resource_plan_instance.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_plan_instance.go
similarity index 93%
rename from vendor/github.com/hashicorp/terraform/terraform/node_resource_plan_instance.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_plan_instance.go
index 05ccefc34..ac4b24cf2 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_resource_plan_instance.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_plan_instance.go
@@ -3,11 +3,11 @@ package terraform
import (
"fmt"
- "github.com/hashicorp/terraform/plans"
- "github.com/hashicorp/terraform/providers"
- "github.com/hashicorp/terraform/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
- "github.com/hashicorp/terraform/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
"github.com/zclconf/go-cty/cty"
)
@@ -78,8 +78,8 @@ func (n *NodePlannableResourceInstance) evalTreeDataResource(addr addrs.AbsResou
// Check and see if any of our dependencies have changes.
changes := ctx.Changes()
- for _, d := range n.References() {
- ri, ok := d.Subject.(addrs.ResourceInstance)
+ for _, d := range n.StateReferences() {
+ ri, ok := d.(addrs.ResourceInstance)
if !ok {
continue
}
@@ -114,6 +114,7 @@ func (n *NodePlannableResourceInstance) evalTreeDataResource(addr addrs.AbsResou
&EvalReadData{
Addr: addr.Resource,
Config: n.Config,
+ Dependencies: n.StateReferences(),
Provider: &provider,
ProviderAddr: n.ResolvedProvider,
ProviderSchema: &providerSchema,
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_resource_plan_orphan.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_plan_orphan.go
similarity index 93%
rename from vendor/github.com/hashicorp/terraform/terraform/node_resource_plan_orphan.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_plan_orphan.go
index 841669491..8e4f7148f 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_resource_plan_orphan.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_plan_orphan.go
@@ -1,9 +1,9 @@
package terraform
import (
- "github.com/hashicorp/terraform/plans"
- "github.com/hashicorp/terraform/providers"
- "github.com/hashicorp/terraform/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
)
// NodePlannableResourceInstanceOrphan represents a resource that is "applyable":
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_resource_refresh.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_refresh.go
similarity index 91%
rename from vendor/github.com/hashicorp/terraform/terraform/node_resource_refresh.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_refresh.go
index 2dd549c0b..dcab37270 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_resource_refresh.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_refresh.go
@@ -4,24 +4,20 @@ import (
"fmt"
"log"
- "github.com/hashicorp/terraform/plans"
- "github.com/hashicorp/terraform/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
- "github.com/hashicorp/terraform/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/dag"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// NodeRefreshableManagedResource represents a resource that is expanabled into
// NodeRefreshableManagedResourceInstance. Resource count orphans are also added.
type NodeRefreshableManagedResource struct {
*NodeAbstractResource
-
- // We attach dependencies to the Resource during refresh, since the
- // instances are instantiated during DynamicExpand.
- Dependencies []addrs.AbsResource
}
var (
@@ -31,14 +27,8 @@ var (
_ GraphNodeReferencer = (*NodeRefreshableManagedResource)(nil)
_ GraphNodeResource = (*NodeRefreshableManagedResource)(nil)
_ GraphNodeAttachResourceConfig = (*NodeRefreshableManagedResource)(nil)
- _ GraphNodeAttachDependencies = (*NodeRefreshableManagedResource)(nil)
)
-// GraphNodeAttachDependencies
-func (n *NodeRefreshableManagedResource) AttachDependencies(deps []addrs.AbsResource) {
- n.Dependencies = deps
-}
-
// GraphNodeDynamicExpandable
func (n *NodeRefreshableManagedResource) DynamicExpand(ctx EvalContext) (*Graph, error) {
var diags tfdiags.Diagnostics
@@ -68,7 +58,6 @@ func (n *NodeRefreshableManagedResource) DynamicExpand(ctx EvalContext) (*Graph,
// Add the config and state since we don't do that via transforms
a.Config = n.Config
a.ResolvedProvider = n.ResolvedProvider
- a.Dependencies = n.Dependencies
return &NodeRefreshableManagedResourceInstance{
NodeAbstractResourceInstance: a,
@@ -214,11 +203,6 @@ func (n *NodeRefreshableManagedResourceInstance) evalTreeManagedResource() EvalN
Output: &state,
},
- &EvalRefreshDependencies{
- State: &state,
- Dependencies: &n.Dependencies,
- },
-
&EvalRefresh{
Addr: addr.Resource,
ProviderAddr: n.ResolvedProvider,
@@ -233,7 +217,6 @@ func (n *NodeRefreshableManagedResourceInstance) evalTreeManagedResource() EvalN
ProviderAddr: n.ResolvedProvider,
ProviderSchema: &providerSchema,
State: &state,
- Dependencies: &n.Dependencies,
},
},
}
@@ -293,7 +276,6 @@ func (n *NodeRefreshableManagedResourceInstance) evalTreeManagedResourceNoState(
ProviderAddr: n.ResolvedProvider,
ProviderSchema: &providerSchema,
State: &state,
- Dependencies: &n.Dependencies,
},
// We must also save the planned change, so that expressions in
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_resource_validate.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_validate.go
similarity index 89%
rename from vendor/github.com/hashicorp/terraform/terraform/node_resource_validate.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_validate.go
index efa657bf0..f0eb18a06 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_resource_validate.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_resource_validate.go
@@ -1,10 +1,10 @@
package terraform
import (
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/hashicorp/terraform/providers"
- "github.com/hashicorp/terraform/provisioners"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/provisioners"
"github.com/zclconf/go-cty/cty"
)
diff --git a/vendor/github.com/hashicorp/terraform/terraform/node_root_variable.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_root_variable.go
similarity index 83%
rename from vendor/github.com/hashicorp/terraform/terraform/node_root_variable.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_root_variable.go
index 1c302903d..844d060c9 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/node_root_variable.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/node_root_variable.go
@@ -1,9 +1,9 @@
package terraform
import (
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
)
// NodeRootVariable represents a root variable input.
diff --git a/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/path.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/path.go
new file mode 100644
index 000000000..19e3469cb
--- /dev/null
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/path.go
@@ -0,0 +1,17 @@
+package terraform
+
+import (
+ "fmt"
+
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+)
+
+// PathObjectCacheKey is like PathCacheKey but includes an additional name
+// to be included in the key, for module-namespaced objects.
+//
+// The result of this function is guaranteed unique for any distinct pair
+// of path and name, but is not guaranteed to be in any particular format
+// and in particular should never be shown to end-users.
+func PathObjectCacheKey(path addrs.ModuleInstance, objectName string) string {
+ return fmt.Sprintf("%s|%s", path.String(), objectName)
+}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/plan.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/plan.go
similarity index 84%
rename from vendor/github.com/hashicorp/terraform/terraform/plan.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/plan.go
index af04c6cd4..5c19f6e7c 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/plan.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/plan.go
@@ -5,11 +5,9 @@ import (
"encoding/gob"
"fmt"
"io"
- "sync"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
"github.com/zclconf/go-cty/cty"
-
- "github.com/hashicorp/terraform/configs"
)
func init() {
@@ -73,8 +71,6 @@ type Plan struct {
// Destroy indicates that this plan was created for a full destroy operation
Destroy bool
-
- once sync.Once
}
func (p *Plan) String() string {
@@ -86,30 +82,6 @@ func (p *Plan) String() string {
return buf.String()
}
-func (p *Plan) init() {
- p.once.Do(func() {
- if p.Diff == nil {
- p.Diff = new(Diff)
- p.Diff.init()
- }
-
- if p.State == nil {
- p.State = new(State)
- p.State.init()
- }
-
- if p.Vars == nil {
- p.Vars = make(map[string]cty.Value)
- }
- })
-}
-
-// The format byte is prefixed into the plan file format so that we have
-// the ability in the future to change the file format if we want for any
-// reason.
-const planFormatMagic = "tfplan"
-const planFormatVersion byte = 2
-
// ReadPlan reads a plan structure out of a reader in the format that
// was written by WritePlan.
func ReadPlan(src io.Reader) (*Plan, error) {
diff --git a/vendor/github.com/hashicorp/terraform/terraform/provider_mock.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/provider_mock.go
similarity index 98%
rename from vendor/github.com/hashicorp/terraform/terraform/provider_mock.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/provider_mock.go
index d82dc0f4e..7e401f33e 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/provider_mock.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/provider_mock.go
@@ -8,9 +8,9 @@ import (
"github.com/zclconf/go-cty/cty"
ctyjson "github.com/zclconf/go-cty/cty/json"
- "github.com/hashicorp/terraform/configs/hcl2shim"
- "github.com/hashicorp/terraform/providers"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/hcl2shim"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
var _ providers.Interface = (*MockProvider)(nil)
diff --git a/vendor/github.com/hashicorp/terraform/terraform/provisioner_mock.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/provisioner_mock.go
similarity index 98%
rename from vendor/github.com/hashicorp/terraform/terraform/provisioner_mock.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/provisioner_mock.go
index f59589164..93b19be57 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/provisioner_mock.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/provisioner_mock.go
@@ -7,7 +7,7 @@ import (
"github.com/zclconf/go-cty/cty"
"github.com/zclconf/go-cty/cty/convert"
- "github.com/hashicorp/terraform/provisioners"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/provisioners"
)
var _ provisioners.Interface = (*MockProvisioner)(nil)
diff --git a/vendor/github.com/hashicorp/terraform/terraform/resource.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/resource.go
similarity index 91%
rename from vendor/github.com/hashicorp/terraform/terraform/resource.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/resource.go
index fcf28aa65..bd5774600 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/resource.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/resource.go
@@ -11,10 +11,9 @@ import (
"github.com/mitchellh/reflectwalk"
"github.com/zclconf/go-cty/cty"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/config"
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/hashicorp/terraform/configs/hcl2shim"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/hcl2shim"
)
// Resource is a legacy way to identify a particular resource instance.
@@ -81,12 +80,6 @@ type InstanceInfo struct {
// Type is the resource type of this instance
Type string
-
- // uniqueExtra is an internal field that can be populated to supply
- // extra metadata that is used to identify a unique instance in
- // the graph walk. This will be appended to HumanID when uniqueId
- // is called.
- uniqueExtra string
}
// NewInstanceInfo constructs an InstanceInfo from an addrs.AbsResourceInstance.
@@ -185,15 +178,6 @@ type ResourceConfig struct {
ComputedKeys []string
Raw map[string]interface{}
Config map[string]interface{}
-
- raw *config.RawConfig
-}
-
-// NewResourceConfig creates a new ResourceConfig from a config.RawConfig.
-func NewResourceConfig(c *config.RawConfig) *ResourceConfig {
- result := &ResourceConfig{raw: c}
- result.interpolateForce()
- return result
}
// NewResourceConfigRaw constructs a ResourceConfig whose content is exactly
@@ -512,31 +496,6 @@ func (c *ResourceConfig) get(
return current, true
}
-// interpolateForce is a temporary thing. We want to get rid of interpolate
-// above and likewise this, but it can only be done after the f-ast-graph
-// refactor is complete.
-func (c *ResourceConfig) interpolateForce() {
- if c.raw == nil {
- // If we don't have a lowercase "raw" but we _do_ have the uppercase
- // Raw populated then this indicates that we're recieving a shim
- // ResourceConfig created by NewResourceConfigShimmed, which is already
- // fully evaluated and thus this function doesn't need to do anything.
- if c.Raw != nil {
- return
- }
-
- var err error
- c.raw, err = config.NewRawConfig(make(map[string]interface{}))
- if err != nil {
- panic(err)
- }
- }
-
- c.ComputedKeys = c.raw.UnknownKeys()
- c.Raw = c.raw.RawMap()
- c.Config = c.raw.Config()
-}
-
// unknownCheckWalker
type unknownCheckWalker struct {
Unknown bool
diff --git a/vendor/github.com/hashicorp/terraform/terraform/resource_address.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/resource_address.go
similarity index 99%
rename from vendor/github.com/hashicorp/terraform/terraform/resource_address.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/resource_address.go
index ca833fe13..8a683012d 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/resource_address.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/resource_address.go
@@ -7,8 +7,8 @@ import (
"strconv"
"strings"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
)
// ResourceAddress is a way of identifying an individual resource (or,
diff --git a/vendor/github.com/hashicorp/terraform/terraform/resource_mode.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/resource_mode.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/terraform/resource_mode.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/resource_mode.go
diff --git a/vendor/github.com/hashicorp/terraform/terraform/resource_mode_string.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/resource_mode_string.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/terraform/resource_mode_string.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/resource_mode_string.go
diff --git a/vendor/github.com/hashicorp/terraform/terraform/resource_provider.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/resource_provider.go
similarity index 97%
rename from vendor/github.com/hashicorp/terraform/terraform/resource_provider.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/resource_provider.go
index 3455ad88c..fec45967f 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/resource_provider.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/resource_provider.go
@@ -3,10 +3,10 @@ package terraform
import (
"fmt"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
- "github.com/hashicorp/terraform/plugin/discovery"
- "github.com/hashicorp/terraform/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
)
// ResourceProvider is an interface that must be implemented by any
@@ -23,7 +23,7 @@ type ResourceProvider interface {
* Functions related to the provider
*********************************************************************/
- // ProviderSchema returns the config schema for the main provider
+ // GetSchema returns the config schema for the main provider
// configuration, as would appear in a "provider" block in the
// configuration files.
//
diff --git a/vendor/github.com/hashicorp/terraform/terraform/resource_provider_mock.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/resource_provider_mock.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/terraform/resource_provider_mock.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/resource_provider_mock.go
diff --git a/vendor/github.com/hashicorp/terraform/terraform/resource_provisioner.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/resource_provisioner.go
similarity index 95%
rename from vendor/github.com/hashicorp/terraform/terraform/resource_provisioner.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/resource_provisioner.go
index 2743dd7e9..74ee2a940 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/resource_provisioner.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/resource_provisioner.go
@@ -1,8 +1,8 @@
package terraform
import (
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/hashicorp/terraform/provisioners"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/provisioners"
)
// ResourceProvisioner is an interface that must be implemented by any
diff --git a/vendor/github.com/hashicorp/terraform/terraform/resource_provisioner_mock.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/resource_provisioner_mock.go
similarity index 96%
rename from vendor/github.com/hashicorp/terraform/terraform/resource_provisioner_mock.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/resource_provisioner_mock.go
index 7b88cf733..ed6f241bc 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/resource_provisioner_mock.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/resource_provisioner_mock.go
@@ -3,7 +3,7 @@ package terraform
import (
"sync"
- "github.com/hashicorp/terraform/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
)
// MockResourceProvisioner implements ResourceProvisioner but mocks out all the
diff --git a/vendor/github.com/hashicorp/terraform/terraform/schemas.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/schemas.go
similarity index 95%
rename from vendor/github.com/hashicorp/terraform/terraform/schemas.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/schemas.go
index 62991c82d..8bc3b017b 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/schemas.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/schemas.go
@@ -4,12 +4,12 @@ import (
"fmt"
"log"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/hashicorp/terraform/providers"
- "github.com/hashicorp/terraform/states"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// Schemas is a container for various kinds of schema that Terraform needs
diff --git a/vendor/github.com/hashicorp/terraform/terraform/state.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/state.go
similarity index 98%
rename from vendor/github.com/hashicorp/terraform/terraform/state.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/state.go
index 2f97e5af3..1d742c2f8 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/state.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/state.go
@@ -22,13 +22,13 @@ import (
version "github.com/hashicorp/go-version"
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hcl/v2/hclsyntax"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/hashicorp/terraform/configs/hcl2shim"
- "github.com/hashicorp/terraform/plans"
- "github.com/hashicorp/terraform/tfdiags"
- tfversion "github.com/hashicorp/terraform/version"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/hcl2shim"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
+ tfversion "github.com/hashicorp/terraform-plugin-sdk/internal/version"
"github.com/mitchellh/copystructure"
"github.com/zclconf/go-cty/cty"
ctyjson "github.com/zclconf/go-cty/cty/json"
@@ -852,20 +852,6 @@ func (r *RemoteState) init() {
}
}
-func (r *RemoteState) deepcopy() *RemoteState {
- r.Lock()
- defer r.Unlock()
-
- confCopy := make(map[string]string, len(r.Config))
- for k, v := range r.Config {
- confCopy[k] = v
- }
- return &RemoteState{
- Type: r.Type,
- Config: confCopy,
- }
-}
-
func (r *RemoteState) Empty() bool {
if r == nil {
return true
@@ -944,19 +930,6 @@ func (s *OutputState) Equal(other *OutputState) bool {
return true
}
-func (s *OutputState) deepcopy() *OutputState {
- if s == nil {
- return nil
- }
-
- stateCopy, err := copystructure.Config{Lock: true}.Copy(s)
- if err != nil {
- panic(fmt.Errorf("Error copying output value: %s", err))
- }
-
- return stateCopy.(*OutputState)
-}
-
// ModuleState is used to track all the state relevant to a single
// module. Previous to Terraform 0.3, all state belonged to the "root"
// module.
@@ -1566,15 +1539,6 @@ func (s *ResourceState) init() {
}
}
-func (s *ResourceState) deepcopy() *ResourceState {
- copy, err := copystructure.Config{Lock: true}.Copy(s)
- if err != nil {
- panic(err)
- }
-
- return copy.(*ResourceState)
-}
-
// prune is used to remove any instances that are no longer required
func (s *ResourceState) prune() {
s.Lock()
diff --git a/vendor/github.com/hashicorp/terraform/terraform/state_filter.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/state_filter.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/terraform/state_filter.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/state_filter.go
diff --git a/vendor/github.com/hashicorp/terraform/terraform/state_upgrade_v1_to_v2.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/state_upgrade_v1_to_v2.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/terraform/state_upgrade_v1_to_v2.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/state_upgrade_v1_to_v2.go
diff --git a/vendor/github.com/hashicorp/terraform/terraform/state_upgrade_v2_to_v3.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/state_upgrade_v2_to_v3.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/terraform/state_upgrade_v2_to_v3.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/state_upgrade_v2_to_v3.go
diff --git a/vendor/github.com/hashicorp/terraform/terraform/state_v1.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/state_v1.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/terraform/state_v1.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/state_v1.go
diff --git a/vendor/github.com/hashicorp/terraform/terraform/testing.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/testing.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/terraform/testing.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/testing.go
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform.go
similarity index 97%
rename from vendor/github.com/hashicorp/terraform/terraform/transform.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform.go
index fd3f5c7da..f9559f41b 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform.go
@@ -3,7 +3,7 @@ package terraform
import (
"log"
- "github.com/hashicorp/terraform/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
)
// GraphTransformer is the interface that transformers implement. This
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_attach_config_provider.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_attach_config_provider.go
similarity index 78%
rename from vendor/github.com/hashicorp/terraform/terraform/transform_attach_config_provider.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_attach_config_provider.go
index 897a7e791..cbac13387 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_attach_config_provider.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_attach_config_provider.go
@@ -1,8 +1,8 @@
package terraform
import (
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
)
// GraphNodeAttachProvider is an interface that must be implemented by nodes
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_attach_config_resource.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_attach_config_resource.go
similarity index 94%
rename from vendor/github.com/hashicorp/terraform/terraform/transform_attach_config_resource.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_attach_config_resource.go
index 03f8564d7..23578c784 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_attach_config_resource.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_attach_config_resource.go
@@ -3,8 +3,8 @@ package terraform
import (
"log"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
)
// GraphNodeAttachResourceConfig is an interface that must be implemented by nodes
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_attach_schema.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_attach_schema.go
similarity index 96%
rename from vendor/github.com/hashicorp/terraform/terraform/transform_attach_schema.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_attach_schema.go
index c7695dd4e..fee220b52 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_attach_schema.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_attach_schema.go
@@ -4,8 +4,8 @@ import (
"fmt"
"log"
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/hashicorp/terraform/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
)
// GraphNodeAttachResourceSchema is an interface implemented by node types
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_attach_state.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_attach_state.go
similarity index 94%
rename from vendor/github.com/hashicorp/terraform/terraform/transform_attach_state.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_attach_state.go
index 3af7b989d..f87494879 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_attach_state.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_attach_state.go
@@ -3,8 +3,8 @@ package terraform
import (
"log"
- "github.com/hashicorp/terraform/dag"
- "github.com/hashicorp/terraform/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
)
// GraphNodeAttachResourceState is an interface that can be implemented
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_config.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_config.go
similarity index 94%
rename from vendor/github.com/hashicorp/terraform/terraform/transform_config.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_config.go
index 9d3b6f4b4..8920761ea 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_config.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_config.go
@@ -4,9 +4,9 @@ import (
"log"
"sync"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
)
// ConfigTransformer is a GraphTransformer that adds all the resources
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_count_boundary.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_count_boundary.go
similarity index 84%
rename from vendor/github.com/hashicorp/terraform/terraform/transform_count_boundary.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_count_boundary.go
index 01601bdda..892f75ec1 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_count_boundary.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_count_boundary.go
@@ -1,8 +1,8 @@
package terraform
import (
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
)
// CountBoundaryTransformer adds a node that depends on everything else
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_destroy_cbd.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_destroy_cbd.go
similarity index 87%
rename from vendor/github.com/hashicorp/terraform/terraform/transform_destroy_cbd.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_destroy_cbd.go
index 410a709ea..98e088eee 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_destroy_cbd.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_destroy_cbd.go
@@ -4,9 +4,9 @@ import (
"fmt"
"log"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/dag"
- "github.com/hashicorp/terraform/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
)
// GraphNodeDestroyerCBD must be implemented by nodes that might be
@@ -160,27 +160,23 @@ func (t *CBDEdgeTransformer) Transform(g *Graph) error {
continue
}
- // Find the resource edges
+ // Find the destroy edge. There should only be one.
for _, e := range g.EdgesTo(v) {
- switch de := e.(type) {
- case *DestroyEdge:
- // we need to invert the destroy edge from the create node
- log.Printf("[TRACE] CBDEdgeTransformer: inverting edge: %s => %s",
- dag.VertexName(de.Source()), dag.VertexName(de.Target()))
-
- // Found it! Invert.
- g.RemoveEdge(de)
- applyNode := de.Source()
- destroyNode := de.Target()
- g.Connect(&DestroyEdge{S: destroyNode, T: applyNode})
- default:
- // We cannot have any direct dependencies from creators when
- // the node is CBD without inducing a cycle.
- if _, ok := e.Source().(GraphNodeCreator); ok {
- log.Printf("[TRACE] CBDEdgeTransformer: removing non DestroyEdge to CBD destroy node: %s => %s", dag.VertexName(e.Source()), dag.VertexName(e.Target()))
- g.RemoveEdge(e)
- }
+ // Not a destroy edge, ignore it
+ de, ok := e.(*DestroyEdge)
+ if !ok {
+ continue
}
+
+ log.Printf("[TRACE] CBDEdgeTransformer: inverting edge: %s => %s",
+ dag.VertexName(de.Source()), dag.VertexName(de.Target()))
+
+ // Found it! Invert.
+ g.RemoveEdge(de)
+ applyNode := de.Source()
+ destroyNode := de.Target()
+ g.Connect(&DestroyEdge{S: destroyNode, T: applyNode})
+ break
}
// If the address has an index, we strip that. Our depMap creation
@@ -259,9 +255,7 @@ func (t *CBDEdgeTransformer) depMap(g *Graph, destroyMap map[string][]dag.Vertex
// Build the list of destroy nodes that each resource address should depend
// on. For example, when we find B, we map the address of B to A_d in the
// "depMap" variable below.
-
- // Use a nested map to remove duplicate edges.
- depMap := make(map[string]map[dag.Vertex]struct{})
+ depMap := make(map[string][]dag.Vertex)
for _, v := range g.Vertices() {
// We're looking for resources.
rn, ok := v.(GraphNodeResource)
@@ -295,25 +289,9 @@ func (t *CBDEdgeTransformer) depMap(g *Graph, destroyMap map[string][]dag.Vertex
// Keep track of the destroy nodes that this address
// needs to depend on.
key := rn.ResourceAddr().String()
-
- deps, ok := depMap[key]
- if !ok {
- deps = make(map[dag.Vertex]struct{})
- }
-
- for _, d := range dns {
- deps[d] = struct{}{}
- }
- depMap[key] = deps
- }
- }
-
- result := map[string][]dag.Vertex{}
- for k, m := range depMap {
- for v := range m {
- result[k] = append(result[k], v)
+ depMap[key] = append(depMap[key], dns...)
}
}
- return result, nil
+ return depMap, nil
}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_destroy_edge.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_destroy_edge.go
similarity index 80%
rename from vendor/github.com/hashicorp/terraform/terraform/transform_destroy_edge.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_destroy_edge.go
index f52429229..1d211570f 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_destroy_edge.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_destroy_edge.go
@@ -3,11 +3,11 @@ package terraform
import (
"log"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
)
// GraphNodeDestroyer must be implemented by nodes that destroy resources.
@@ -54,39 +54,26 @@ type DestroyEdgeTransformer struct {
func (t *DestroyEdgeTransformer) Transform(g *Graph) error {
// Build a map of what is being destroyed (by address string) to
- // the list of destroyers.
+ // the list of destroyers. Usually there will be at most one destroyer
+ // per node, but we allow multiple if present for completeness.
destroyers := make(map[string][]GraphNodeDestroyer)
destroyerAddrs := make(map[string]addrs.AbsResourceInstance)
-
- // Record the creators, which will need to depend on the destroyers if they
- // are only being updated.
- creators := make(map[string]GraphNodeCreator)
-
- // destroyersByResource records each destroyer by the AbsResourceAddress.
- // We use this because dependencies are only referenced as resources, but we
- // will want to connect all the individual instances for correct ordering.
- destroyersByResource := make(map[string][]GraphNodeDestroyer)
for _, v := range g.Vertices() {
- switch n := v.(type) {
- case GraphNodeDestroyer:
- addrP := n.DestroyAddr()
- if addrP == nil {
- log.Printf("[WARN] DestroyEdgeTransformer: %q (%T) has no destroy address", dag.VertexName(n), v)
- continue
- }
- addr := *addrP
+ dn, ok := v.(GraphNodeDestroyer)
+ if !ok {
+ continue
+ }
- key := addr.String()
- log.Printf("[TRACE] DestroyEdgeTransformer: %q (%T) destroys %s", dag.VertexName(n), v, key)
- destroyers[key] = append(destroyers[key], n)
- destroyerAddrs[key] = addr
-
- resAddr := addr.Resource.Resource.Absolute(addr.Module).String()
- destroyersByResource[resAddr] = append(destroyersByResource[resAddr], n)
- case GraphNodeCreator:
- addr := n.CreateAddr()
- creators[addr.String()] = n
+ addrP := dn.DestroyAddr()
+ if addrP == nil {
+ continue
}
+ addr := *addrP
+
+ key := addr.String()
+ log.Printf("[TRACE] DestroyEdgeTransformer: %q (%T) destroys %s", dag.VertexName(dn), v, key)
+ destroyers[key] = append(destroyers[key], dn)
+ destroyerAddrs[key] = addr
}
// If we aren't destroying anything, there will be no edges to make
@@ -95,40 +82,6 @@ func (t *DestroyEdgeTransformer) Transform(g *Graph) error {
return nil
}
- // Connect destroy despendencies as stored in the state
- for _, ds := range destroyers {
- for _, des := range ds {
- ri, ok := des.(GraphNodeResourceInstance)
- if !ok {
- continue
- }
-
- for _, resAddr := range ri.StateDependencies() {
- for _, desDep := range destroyersByResource[resAddr.String()] {
- log.Printf("[TRACE] DestroyEdgeTransformer: %s has stored dependency of %s\n", dag.VertexName(desDep), dag.VertexName(des))
- g.Connect(dag.BasicEdge(desDep, des))
-
- }
- }
- }
- }
-
- // connect creators to any destroyers on which they may depend
- for _, c := range creators {
- ri, ok := c.(GraphNodeResourceInstance)
- if !ok {
- continue
- }
-
- for _, resAddr := range ri.StateDependencies() {
- for _, desDep := range destroyersByResource[resAddr.String()] {
- log.Printf("[TRACE] DestroyEdgeTransformer: %s has stored dependency of %s\n", dag.VertexName(c), dag.VertexName(desDep))
- g.Connect(dag.BasicEdge(c, desDep))
-
- }
- }
- }
-
// Go through and connect creators to destroyers. Going along with
// our example, this makes: A_d => A
for _, v := range g.Vertices() {
@@ -142,7 +95,13 @@ func (t *DestroyEdgeTransformer) Transform(g *Graph) error {
continue
}
- for _, d := range destroyers[addr.String()] {
+ key := addr.String()
+ ds := destroyers[key]
+ if len(ds) == 0 {
+ continue
+ }
+
+ for _, d := range ds {
// For illustrating our example
a_d := d.(dag.Vertex)
a := v
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_diff.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_diff.go
similarity index 96%
rename from vendor/github.com/hashicorp/terraform/terraform/transform_diff.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_diff.go
index 23b6e2a75..b7a237fce 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_diff.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_diff.go
@@ -4,10 +4,10 @@ import (
"fmt"
"log"
- "github.com/hashicorp/terraform/dag"
- "github.com/hashicorp/terraform/plans"
- "github.com/hashicorp/terraform/states"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/plans"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// DiffTransformer is a GraphTransformer that adds graph nodes representing
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_expand.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_expand.go
similarity index 96%
rename from vendor/github.com/hashicorp/terraform/terraform/transform_expand.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_expand.go
index 982c098b8..03eac685e 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_expand.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_expand.go
@@ -3,7 +3,7 @@ package terraform
import (
"log"
- "github.com/hashicorp/terraform/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
)
// GraphNodeExapndable is an interface that nodes can implement to
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_import_provider.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_import_provider.go
similarity index 90%
rename from vendor/github.com/hashicorp/terraform/terraform/transform_import_provider.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_import_provider.go
index c801e5c8f..2ce23ddbe 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_import_provider.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_import_provider.go
@@ -4,8 +4,8 @@ import (
"fmt"
"github.com/hashicorp/hcl/v2"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// ImportProviderValidateTransformer is a GraphTransformer that goes through
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_import_state.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_import_state.go
similarity index 97%
rename from vendor/github.com/hashicorp/terraform/terraform/transform_import_state.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_import_state.go
index ab0ecae0a..7dd2c4876 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_import_state.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_import_state.go
@@ -3,9 +3,9 @@ package terraform
import (
"fmt"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/providers"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/providers"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// ImportStateTransformer is a GraphTransformer that adds nodes to the
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_local.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_local.go
similarity index 95%
rename from vendor/github.com/hashicorp/terraform/terraform/transform_local.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_local.go
index 84eb26b26..b97dea2ab 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_local.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_local.go
@@ -1,7 +1,7 @@
package terraform
import (
- "github.com/hashicorp/terraform/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
)
// LocalTransformer is a GraphTransformer that adds all the local values
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_module_variable.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_module_variable.go
similarity index 97%
rename from vendor/github.com/hashicorp/terraform/terraform/transform_module_variable.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_module_variable.go
index 18e0b2d1f..caa4b6a63 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_module_variable.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_module_variable.go
@@ -4,11 +4,11 @@ import (
"fmt"
"github.com/hashicorp/hcl/v2/hclsyntax"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
"github.com/zclconf/go-cty/cty"
"github.com/hashicorp/hcl/v2"
- "github.com/hashicorp/terraform/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
)
// ModuleVariableTransformer is a GraphTransformer that adds all the variables
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_orphan_count.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_orphan_count.go
similarity index 96%
rename from vendor/github.com/hashicorp/terraform/terraform/transform_orphan_count.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_orphan_count.go
index 40163cf91..4d1323fb0 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_orphan_count.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_orphan_count.go
@@ -3,9 +3,9 @@ package terraform
import (
"log"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/dag"
- "github.com/hashicorp/terraform/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
"github.com/zclconf/go-cty/cty"
)
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_orphan_output.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_orphan_output.go
similarity index 87%
rename from vendor/github.com/hashicorp/terraform/terraform/transform_orphan_output.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_orphan_output.go
index c67540934..cab10da12 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_orphan_output.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_orphan_output.go
@@ -3,9 +3,9 @@ package terraform
import (
"log"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
)
// OrphanOutputTransformer finds the outputs that aren't present
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_orphan_resource.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_orphan_resource.go
similarity index 96%
rename from vendor/github.com/hashicorp/terraform/terraform/transform_orphan_resource.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_orphan_resource.go
index 50df1781e..f927b1086 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_orphan_resource.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_orphan_resource.go
@@ -3,9 +3,9 @@ package terraform
import (
"log"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/dag"
- "github.com/hashicorp/terraform/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
)
// OrphanResourceInstanceTransformer is a GraphTransformer that adds orphaned
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_output.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_output.go
similarity index 95%
rename from vendor/github.com/hashicorp/terraform/terraform/transform_output.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_output.go
index ed93cdb87..e2979ac5c 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_output.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_output.go
@@ -3,8 +3,8 @@ package terraform
import (
"log"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
)
// OutputTransformer is a GraphTransformer that adds all the outputs
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_provider.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_provider.go
similarity index 97%
rename from vendor/github.com/hashicorp/terraform/terraform/transform_provider.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_provider.go
index bc86d295f..9c8966fac 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_provider.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_provider.go
@@ -5,10 +5,10 @@ import (
"log"
"github.com/hashicorp/hcl/v2"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/dag"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
func TransformProviders(providers []string, concrete ConcreteProviderNodeFunc, config *configs.Config) GraphTransformer {
@@ -397,18 +397,6 @@ func providerVertexMap(g *Graph) map[string]GraphNodeProvider {
return m
}
-func closeProviderVertexMap(g *Graph) map[string]GraphNodeCloseProvider {
- m := make(map[string]GraphNodeCloseProvider)
- for _, v := range g.Vertices() {
- if pv, ok := v.(GraphNodeCloseProvider); ok {
- addr := pv.CloseProviderAddr()
- m[addr.String()] = pv
- }
- }
-
- return m
-}
-
type graphNodeCloseProvider struct {
Addr addrs.AbsProviderConfig
}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_provisioner.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_provisioner.go
similarity index 81%
rename from vendor/github.com/hashicorp/terraform/terraform/transform_provisioner.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_provisioner.go
index b31026655..e6fe25dac 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_provisioner.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_provisioner.go
@@ -4,8 +4,10 @@ import (
"fmt"
"log"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+
"github.com/hashicorp/go-multierror"
- "github.com/hashicorp/terraform/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
)
// GraphNodeProvisioner is an interface that nodes that can be a provisioner
@@ -41,15 +43,16 @@ func (t *ProvisionerTransformer) Transform(g *Graph) error {
for _, v := range g.Vertices() {
if pv, ok := v.(GraphNodeProvisionerConsumer); ok {
for _, p := range pv.ProvisionedBy() {
- if m[p] == nil {
+ key := provisionerMapKey(p, pv)
+ if m[key] == nil {
err = multierror.Append(err, fmt.Errorf(
"%s: provisioner %s couldn't be found",
dag.VertexName(v), p))
continue
}
- log.Printf("[TRACE] ProvisionerTransformer: %s is provisioned by %s (%q)", dag.VertexName(v), p, dag.VertexName(m[p]))
- g.Connect(dag.BasicEdge(v, m[p]))
+ log.Printf("[TRACE] ProvisionerTransformer: %s is provisioned by %s (%q)", dag.VertexName(v), key, dag.VertexName(m[key]))
+ g.Connect(dag.BasicEdge(v, m[key]))
}
}
}
@@ -82,8 +85,18 @@ func (t *MissingProvisionerTransformer) Transform(g *Graph) error {
continue
}
+ // If this node has a subpath, then we use that as a prefix
+ // into our map to check for an existing provider.
+ path := addrs.RootModuleInstance
+ if sp, ok := pv.(GraphNodeSubPath); ok {
+ path = sp.Path()
+ }
+
for _, p := range pv.ProvisionedBy() {
- if _, ok := m[p]; ok {
+ // Build the key for storing in the map
+ key := provisionerMapKey(p, pv)
+
+ if _, ok := m[key]; ok {
// This provisioner already exists as a configure node
continue
}
@@ -97,11 +110,12 @@ func (t *MissingProvisionerTransformer) Transform(g *Graph) error {
// Build the vertex
var newV dag.Vertex = &NodeProvisioner{
NameValue: p,
+ PathValue: path,
}
// Add the missing provisioner node to the graph
- m[p] = g.Add(newV)
- log.Printf("[TRACE] MissingProviderTransformer: added implicit provisioner %s, first implied by %s", p, dag.VertexName(v))
+ m[key] = g.Add(newV)
+ log.Printf("[TRACE] MissingProviderTransformer: added implicit provisioner %s, first implied by %s", key, dag.VertexName(v))
}
}
@@ -139,11 +153,23 @@ func (t *CloseProvisionerTransformer) Transform(g *Graph) error {
return nil
}
+// provisionerMapKey is a helper that gives us the key to use for the
+// maps returned by things such as provisionerVertexMap.
+func provisionerMapKey(k string, v dag.Vertex) string {
+ pathPrefix := ""
+ if sp, ok := v.(GraphNodeSubPath); ok {
+ pathPrefix = sp.Path().String() + "."
+ }
+
+ return pathPrefix + k
+}
+
func provisionerVertexMap(g *Graph) map[string]dag.Vertex {
m := make(map[string]dag.Vertex)
for _, v := range g.Vertices() {
if pv, ok := v.(GraphNodeProvisioner); ok {
- m[pv.ProvisionerName()] = v
+ key := provisionerMapKey(pv.ProvisionerName(), v)
+ m[key] = v
}
}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_reference.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_reference.go
similarity index 84%
rename from vendor/github.com/hashicorp/terraform/terraform/transform_reference.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_reference.go
index 25e544996..54f9829c7 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_reference.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_reference.go
@@ -3,15 +3,12 @@ package terraform
import (
"fmt"
"log"
- "sort"
"github.com/hashicorp/hcl/v2"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/hashicorp/terraform/dag"
- "github.com/hashicorp/terraform/lang"
- "github.com/hashicorp/terraform/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/lang"
)
// GraphNodeReferenceable must be implemented by any node that represents
@@ -40,11 +37,6 @@ type GraphNodeReferencer interface {
References() []*addrs.Reference
}
-type GraphNodeAttachDependencies interface {
- GraphNodeResource
- AttachDependencies([]addrs.AbsResource)
-}
-
// GraphNodeReferenceOutside is an interface that can optionally be implemented.
// A node that implements it can specify that its own referenceable addresses
// and/or the addresses it references are in a different module than the
@@ -92,79 +84,6 @@ func (t *ReferenceTransformer) Transform(g *Graph) error {
for _, parent := range parents {
g.Connect(dag.BasicEdge(v, parent))
}
-
- if len(parents) > 0 {
- continue
- }
- }
-
- return nil
-}
-
-// AttachDependenciesTransformer records all resource dependencies for each
-// instance, and attaches the addresses to the node itself. Managed resource
-// will record these in the state for proper ordering of destroy operations.
-type AttachDependenciesTransformer struct {
- Config *configs.Config
- State *states.State
- Schemas *Schemas
-}
-
-func (t AttachDependenciesTransformer) Transform(g *Graph) error {
- for _, v := range g.Vertices() {
- attacher, ok := v.(GraphNodeAttachDependencies)
- if !ok {
- continue
- }
- selfAddr := attacher.ResourceAddr()
-
- // Data sources don't need to track destroy dependencies
- if selfAddr.Resource.Mode == addrs.DataResourceMode {
- continue
- }
-
- ans, err := g.Ancestors(v)
- if err != nil {
- return err
- }
-
- // dedupe addrs when there's multiple instances involved, or
- // multiple paths in the un-reduced graph
- depMap := map[string]addrs.AbsResource{}
- for _, d := range ans.List() {
- var addr addrs.AbsResource
-
- switch d := d.(type) {
- case GraphNodeResourceInstance:
- instAddr := d.ResourceInstanceAddr()
- addr = instAddr.Resource.Resource.Absolute(instAddr.Module)
- case GraphNodeResource:
- addr = d.ResourceAddr()
- default:
- continue
- }
-
- // Data sources don't need to track destroy dependencies
- if addr.Resource.Mode == addrs.DataResourceMode {
- continue
- }
-
- if addr.Equal(selfAddr) {
- continue
- }
- depMap[addr.String()] = addr
- }
-
- deps := make([]addrs.AbsResource, 0, len(depMap))
- for _, d := range depMap {
- deps = append(deps, d)
- }
- sort.Slice(deps, func(i, j int) bool {
- return deps[i].String() < deps[j].String()
- })
-
- log.Printf("[TRACE] AttachDependenciesTransformer: %s depends on %s", attacher.ResourceAddr(), deps)
- attacher.AttachDependencies(deps)
}
return nil
@@ -525,17 +444,3 @@ func appendResourceDestroyReferences(refs []*addrs.Reference) []*addrs.Reference
}
return refs
}
-
-func modulePrefixStr(p addrs.ModuleInstance) string {
- return p.String()
-}
-
-func modulePrefixList(result []string, prefix string) []string {
- if prefix != "" {
- for i, v := range result {
- result[i] = fmt.Sprintf("%s.%s", prefix, v)
- }
- }
-
- return result
-}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_removed_modules.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_removed_modules.go
similarity index 84%
rename from vendor/github.com/hashicorp/terraform/terraform/transform_removed_modules.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_removed_modules.go
index ee71387e2..327950d88 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_removed_modules.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_removed_modules.go
@@ -3,8 +3,8 @@ package terraform
import (
"log"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
)
// RemovedModuleTransformer implements GraphTransformer to add nodes indicating
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_resource_count.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_resource_count.go
similarity index 89%
rename from vendor/github.com/hashicorp/terraform/terraform/transform_resource_count.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_resource_count.go
index c70a3c144..51d9466a2 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_resource_count.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_resource_count.go
@@ -1,9 +1,9 @@
package terraform
import (
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/hashicorp/terraform/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
"github.com/zclconf/go-cty/cty"
)
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_root.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_root.go
similarity index 91%
rename from vendor/github.com/hashicorp/terraform/terraform/transform_root.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_root.go
index aee053d17..485c1c8a0 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_root.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_root.go
@@ -1,6 +1,6 @@
package terraform
-import "github.com/hashicorp/terraform/dag"
+import "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
const rootNodeName = "root"
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_state.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_state.go
similarity index 97%
rename from vendor/github.com/hashicorp/terraform/terraform/transform_state.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_state.go
index 0b52347df..e7d95be97 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_state.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_state.go
@@ -3,7 +3,7 @@ package terraform
import (
"log"
- "github.com/hashicorp/terraform/states"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/states"
)
// StateTransformer is a GraphTransformer that adds the elements of
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_targets.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_targets.go
similarity index 98%
rename from vendor/github.com/hashicorp/terraform/terraform/transform_targets.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_targets.go
index d25274e68..beb1eed9e 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_targets.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_targets.go
@@ -3,8 +3,8 @@ package terraform
import (
"log"
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
)
// GraphNodeTargetable is an interface for graph nodes to implement when they
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_transitive_reduction.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_transitive_reduction.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/terraform/transform_transitive_reduction.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_transitive_reduction.go
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_variable.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_variable.go
similarity index 87%
rename from vendor/github.com/hashicorp/terraform/terraform/transform_variable.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_variable.go
index 05daa5135..3afce5660 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_variable.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_variable.go
@@ -1,8 +1,8 @@
package terraform
import (
- "github.com/hashicorp/terraform/addrs"
- "github.com/hashicorp/terraform/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
)
// RootVariableTransformer is a GraphTransformer that adds all the root
diff --git a/vendor/github.com/hashicorp/terraform/terraform/transform_vertex.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_vertex.go
similarity index 94%
rename from vendor/github.com/hashicorp/terraform/terraform/transform_vertex.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_vertex.go
index 6b1293fc2..6b3c62d1f 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/transform_vertex.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/transform_vertex.go
@@ -3,7 +3,7 @@ package terraform
import (
"fmt"
- "github.com/hashicorp/terraform/dag"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/dag"
)
// VertexTransformer is a GraphTransformer that transforms vertices
diff --git a/vendor/github.com/hashicorp/terraform/terraform/ui_input.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/ui_input.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/terraform/ui_input.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/ui_input.go
diff --git a/vendor/github.com/hashicorp/terraform/terraform/ui_input_mock.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/ui_input_mock.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/terraform/ui_input_mock.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/ui_input_mock.go
diff --git a/vendor/github.com/hashicorp/terraform/terraform/ui_input_prefix.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/ui_input_prefix.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/terraform/ui_input_prefix.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/ui_input_prefix.go
diff --git a/vendor/github.com/hashicorp/terraform/terraform/ui_output.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/ui_output.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/terraform/ui_output.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/ui_output.go
diff --git a/vendor/github.com/hashicorp/terraform/terraform/ui_output_callback.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/ui_output_callback.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/terraform/ui_output_callback.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/ui_output_callback.go
diff --git a/vendor/github.com/hashicorp/terraform/terraform/ui_output_mock.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/ui_output_mock.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/terraform/ui_output_mock.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/ui_output_mock.go
diff --git a/vendor/github.com/hashicorp/terraform/terraform/ui_output_provisioner.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/ui_output_provisioner.go
similarity index 87%
rename from vendor/github.com/hashicorp/terraform/terraform/ui_output_provisioner.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/ui_output_provisioner.go
index fff964f4b..0d7d4ce03 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/ui_output_provisioner.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/ui_output_provisioner.go
@@ -1,7 +1,7 @@
package terraform
import (
- "github.com/hashicorp/terraform/addrs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/addrs"
)
// ProvisionerUIOutput is an implementation of UIOutput that calls a hook
diff --git a/vendor/github.com/hashicorp/terraform/terraform/util.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/util.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/terraform/util.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/util.go
diff --git a/vendor/github.com/hashicorp/terraform/terraform/valuesourcetype_string.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/valuesourcetype_string.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/terraform/valuesourcetype_string.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/valuesourcetype_string.go
diff --git a/vendor/github.com/hashicorp/terraform/terraform/variables.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/variables.go
similarity index 98%
rename from vendor/github.com/hashicorp/terraform/terraform/variables.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/variables.go
index 14f6a3ccf..4ae9c92cf 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/variables.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/variables.go
@@ -7,8 +7,8 @@ import (
"github.com/zclconf/go-cty/cty"
"github.com/zclconf/go-cty/cty/convert"
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
)
// InputValue represents a value for a variable in the root module, provided
diff --git a/vendor/github.com/hashicorp/terraform/terraform/version_required.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/version_required.go
similarity index 91%
rename from vendor/github.com/hashicorp/terraform/terraform/version_required.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/version_required.go
index ba9af1d14..4cc3bbba6 100644
--- a/vendor/github.com/hashicorp/terraform/terraform/version_required.go
+++ b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/version_required.go
@@ -4,11 +4,11 @@ import (
"fmt"
"github.com/hashicorp/hcl/v2"
- "github.com/hashicorp/terraform/tfdiags"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags"
- "github.com/hashicorp/terraform/configs"
+ "github.com/hashicorp/terraform-plugin-sdk/internal/configs"
- tfversion "github.com/hashicorp/terraform/version"
+ tfversion "github.com/hashicorp/terraform-plugin-sdk/internal/version"
)
// CheckCoreVersionRequirements visits each of the modules in the given
diff --git a/vendor/github.com/hashicorp/terraform/terraform/walkoperation_string.go b/vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/walkoperation_string.go
similarity index 100%
rename from vendor/github.com/hashicorp/terraform/terraform/walkoperation_string.go
rename to vendor/github.com/hashicorp/terraform-plugin-sdk/terraform/walkoperation_string.go
diff --git a/vendor/github.com/hashicorp/terraform/config/append.go b/vendor/github.com/hashicorp/terraform/config/append.go
deleted file mode 100644
index 9d80c42b7..000000000
--- a/vendor/github.com/hashicorp/terraform/config/append.go
+++ /dev/null
@@ -1,92 +0,0 @@
-package config
-
-// Append appends one configuration to another.
-//
-// Append assumes that both configurations will not have
-// conflicting variables, resources, etc. If they do, the
-// problems will be caught in the validation phase.
-//
-// It is possible that c1, c2 on their own are not valid. For
-// example, a resource in c2 may reference a variable in c1. But
-// together, they would be valid.
-func Append(c1, c2 *Config) (*Config, error) {
- c := new(Config)
-
- // Append unknown keys, but keep them unique since it is a set
- unknowns := make(map[string]struct{})
- for _, k := range c1.unknownKeys {
- _, present := unknowns[k]
- if !present {
- unknowns[k] = struct{}{}
- c.unknownKeys = append(c.unknownKeys, k)
- }
- }
-
- for _, k := range c2.unknownKeys {
- _, present := unknowns[k]
- if !present {
- unknowns[k] = struct{}{}
- c.unknownKeys = append(c.unknownKeys, k)
- }
- }
-
- c.Atlas = c1.Atlas
- if c2.Atlas != nil {
- c.Atlas = c2.Atlas
- }
-
- // merge Terraform blocks
- if c1.Terraform != nil {
- c.Terraform = c1.Terraform
- if c2.Terraform != nil {
- c.Terraform.Merge(c2.Terraform)
- }
- } else {
- c.Terraform = c2.Terraform
- }
-
- if len(c1.Modules) > 0 || len(c2.Modules) > 0 {
- c.Modules = make(
- []*Module, 0, len(c1.Modules)+len(c2.Modules))
- c.Modules = append(c.Modules, c1.Modules...)
- c.Modules = append(c.Modules, c2.Modules...)
- }
-
- if len(c1.Outputs) > 0 || len(c2.Outputs) > 0 {
- c.Outputs = make(
- []*Output, 0, len(c1.Outputs)+len(c2.Outputs))
- c.Outputs = append(c.Outputs, c1.Outputs...)
- c.Outputs = append(c.Outputs, c2.Outputs...)
- }
-
- if len(c1.ProviderConfigs) > 0 || len(c2.ProviderConfigs) > 0 {
- c.ProviderConfigs = make(
- []*ProviderConfig,
- 0, len(c1.ProviderConfigs)+len(c2.ProviderConfigs))
- c.ProviderConfigs = append(c.ProviderConfigs, c1.ProviderConfigs...)
- c.ProviderConfigs = append(c.ProviderConfigs, c2.ProviderConfigs...)
- }
-
- if len(c1.Resources) > 0 || len(c2.Resources) > 0 {
- c.Resources = make(
- []*Resource,
- 0, len(c1.Resources)+len(c2.Resources))
- c.Resources = append(c.Resources, c1.Resources...)
- c.Resources = append(c.Resources, c2.Resources...)
- }
-
- if len(c1.Variables) > 0 || len(c2.Variables) > 0 {
- c.Variables = make(
- []*Variable, 0, len(c1.Variables)+len(c2.Variables))
- c.Variables = append(c.Variables, c1.Variables...)
- c.Variables = append(c.Variables, c2.Variables...)
- }
-
- if len(c1.Locals) > 0 || len(c2.Locals) > 0 {
- c.Locals = make([]*Local, 0, len(c1.Locals)+len(c2.Locals))
- c.Locals = append(c.Locals, c1.Locals...)
- c.Locals = append(c.Locals, c2.Locals...)
- }
-
- return c, nil
-}
diff --git a/vendor/github.com/hashicorp/terraform/config/config.go b/vendor/github.com/hashicorp/terraform/config/config.go
deleted file mode 100644
index 497effef5..000000000
--- a/vendor/github.com/hashicorp/terraform/config/config.go
+++ /dev/null
@@ -1,1171 +0,0 @@
-// The config package is responsible for loading and validating the
-// configuration.
-package config
-
-import (
- "fmt"
- "regexp"
- "strconv"
- "strings"
-
- hcl2 "github.com/hashicorp/hcl/v2"
- "github.com/hashicorp/hil/ast"
- "github.com/hashicorp/terraform/helper/hilmapstructure"
- "github.com/hashicorp/terraform/plugin/discovery"
- "github.com/hashicorp/terraform/tfdiags"
- "github.com/mitchellh/reflectwalk"
-)
-
-// NameRegexp is the regular expression that all names (modules, providers,
-// resources, etc.) must follow.
-var NameRegexp = regexp.MustCompile(`(?i)\A[A-Z0-9_][A-Z0-9\-\_]*\z`)
-
-// Config is the configuration that comes from loading a collection
-// of Terraform templates.
-type Config struct {
- // Dir is the path to the directory where this configuration was
- // loaded from. If it is blank, this configuration wasn't loaded from
- // any meaningful directory.
- Dir string
-
- Terraform *Terraform
- Atlas *AtlasConfig
- Modules []*Module
- ProviderConfigs []*ProviderConfig
- Resources []*Resource
- Variables []*Variable
- Locals []*Local
- Outputs []*Output
-
- // The fields below can be filled in by loaders for validation
- // purposes.
- unknownKeys []string
-}
-
-// AtlasConfig is the configuration for building in HashiCorp's Atlas.
-type AtlasConfig struct {
- Name string
- Include []string
- Exclude []string
-}
-
-// Module is a module used within a configuration.
-//
-// This does not represent a module itself, this represents a module
-// call-site within an existing configuration.
-type Module struct {
- Name string
- Source string
- Version string
- Providers map[string]string
- RawConfig *RawConfig
-}
-
-// ProviderConfig is the configuration for a resource provider.
-//
-// For example, Terraform needs to set the AWS access keys for the AWS
-// resource provider.
-type ProviderConfig struct {
- Name string
- Alias string
- Version string
- RawConfig *RawConfig
-}
-
-// A resource represents a single Terraform resource in the configuration.
-// A Terraform resource is something that supports some or all of the
-// usual "create, read, update, delete" operations, depending on
-// the given Mode.
-type Resource struct {
- Mode ResourceMode // which operations the resource supports
- Name string
- Type string
- RawCount *RawConfig
- RawConfig *RawConfig
- Provisioners []*Provisioner
- Provider string
- DependsOn []string
- Lifecycle ResourceLifecycle
-}
-
-// Copy returns a copy of this Resource. Helpful for avoiding shared
-// config pointers across multiple pieces of the graph that need to do
-// interpolation.
-func (r *Resource) Copy() *Resource {
- n := &Resource{
- Mode: r.Mode,
- Name: r.Name,
- Type: r.Type,
- RawCount: r.RawCount.Copy(),
- RawConfig: r.RawConfig.Copy(),
- Provisioners: make([]*Provisioner, 0, len(r.Provisioners)),
- Provider: r.Provider,
- DependsOn: make([]string, len(r.DependsOn)),
- Lifecycle: *r.Lifecycle.Copy(),
- }
- for _, p := range r.Provisioners {
- n.Provisioners = append(n.Provisioners, p.Copy())
- }
- copy(n.DependsOn, r.DependsOn)
- return n
-}
-
-// ResourceLifecycle is used to store the lifecycle tuning parameters
-// to allow customized behavior
-type ResourceLifecycle struct {
- CreateBeforeDestroy bool `mapstructure:"create_before_destroy"`
- PreventDestroy bool `mapstructure:"prevent_destroy"`
- IgnoreChanges []string `mapstructure:"ignore_changes"`
-}
-
-// Copy returns a copy of this ResourceLifecycle
-func (r *ResourceLifecycle) Copy() *ResourceLifecycle {
- n := &ResourceLifecycle{
- CreateBeforeDestroy: r.CreateBeforeDestroy,
- PreventDestroy: r.PreventDestroy,
- IgnoreChanges: make([]string, len(r.IgnoreChanges)),
- }
- copy(n.IgnoreChanges, r.IgnoreChanges)
- return n
-}
-
-// Provisioner is a configured provisioner step on a resource.
-type Provisioner struct {
- Type string
- RawConfig *RawConfig
- ConnInfo *RawConfig
-
- When ProvisionerWhen
- OnFailure ProvisionerOnFailure
-}
-
-// Copy returns a copy of this Provisioner
-func (p *Provisioner) Copy() *Provisioner {
- return &Provisioner{
- Type: p.Type,
- RawConfig: p.RawConfig.Copy(),
- ConnInfo: p.ConnInfo.Copy(),
- When: p.When,
- OnFailure: p.OnFailure,
- }
-}
-
-// Variable is a module argument defined within the configuration.
-type Variable struct {
- Name string
- DeclaredType string `mapstructure:"type"`
- Default interface{}
- Description string
-}
-
-// Local is a local value defined within the configuration.
-type Local struct {
- Name string
- RawConfig *RawConfig
-}
-
-// Output is an output defined within the configuration. An output is
-// resulting data that is highlighted by Terraform when finished. An
-// output marked Sensitive will be output in a masked form following
-// application, but will still be available in state.
-type Output struct {
- Name string
- DependsOn []string
- Description string
- Sensitive bool
- RawConfig *RawConfig
-}
-
-// VariableType is the type of value a variable is holding, and returned
-// by the Type() function on variables.
-type VariableType byte
-
-const (
- VariableTypeUnknown VariableType = iota
- VariableTypeString
- VariableTypeList
- VariableTypeMap
-)
-
-func (v VariableType) Printable() string {
- switch v {
- case VariableTypeString:
- return "string"
- case VariableTypeMap:
- return "map"
- case VariableTypeList:
- return "list"
- default:
- return "unknown"
- }
-}
-
-// ProviderConfigName returns the name of the provider configuration in
-// the given mapping that maps to the proper provider configuration
-// for this resource.
-func ProviderConfigName(t string, pcs []*ProviderConfig) string {
- lk := ""
- for _, v := range pcs {
- k := v.Name
- if strings.HasPrefix(t, k) && len(k) > len(lk) {
- lk = k
- }
- }
-
- return lk
-}
-
-// A unique identifier for this module.
-func (r *Module) Id() string {
- return fmt.Sprintf("%s", r.Name)
-}
-
-// Count returns the count of this resource.
-func (r *Resource) Count() (int, error) {
- raw := r.RawCount.Value()
- count, ok := r.RawCount.Value().(string)
- if !ok {
- return 0, fmt.Errorf(
- "expected count to be a string or int, got %T", raw)
- }
-
- v, err := strconv.ParseInt(count, 0, 0)
- if err != nil {
- return 0, fmt.Errorf(
- "cannot parse %q as an integer",
- count,
- )
- }
-
- return int(v), nil
-}
-
-// A unique identifier for this resource.
-func (r *Resource) Id() string {
- switch r.Mode {
- case ManagedResourceMode:
- return fmt.Sprintf("%s.%s", r.Type, r.Name)
- case DataResourceMode:
- return fmt.Sprintf("data.%s.%s", r.Type, r.Name)
- default:
- panic(fmt.Errorf("unknown resource mode %s", r.Mode))
- }
-}
-
-// Validate does some basic semantic checking of the configuration.
-func (c *Config) Validate() tfdiags.Diagnostics {
- if c == nil {
- return nil
- }
-
- var diags tfdiags.Diagnostics
-
- for _, k := range c.unknownKeys {
- diags = diags.Append(
- fmt.Errorf("Unknown root level key: %s", k),
- )
- }
-
- // Validate the Terraform config
- if tf := c.Terraform; tf != nil {
- errs := c.Terraform.Validate()
- for _, err := range errs {
- diags = diags.Append(err)
- }
- }
-
- vars := c.InterpolatedVariables()
- varMap := make(map[string]*Variable)
- for _, v := range c.Variables {
- if _, ok := varMap[v.Name]; ok {
- diags = diags.Append(fmt.Errorf(
- "Variable '%s': duplicate found. Variable names must be unique.",
- v.Name,
- ))
- }
-
- varMap[v.Name] = v
- }
-
- for k, _ := range varMap {
- if !NameRegexp.MatchString(k) {
- diags = diags.Append(fmt.Errorf(
- "variable %q: variable name must match regular expression %s",
- k, NameRegexp,
- ))
- }
- }
-
- for _, v := range c.Variables {
- if v.Type() == VariableTypeUnknown {
- diags = diags.Append(fmt.Errorf(
- "Variable '%s': must be a string or a map",
- v.Name,
- ))
- continue
- }
-
- interp := false
- fn := func(n ast.Node) (interface{}, error) {
- // LiteralNode is a literal string (outside of a ${ ... } sequence).
- // interpolationWalker skips most of these. but in particular it
- // visits those that have escaped sequences (like $${foo}) as a
- // signal that *some* processing is required on this string. For
- // our purposes here though, this is fine and not an interpolation.
- if _, ok := n.(*ast.LiteralNode); !ok {
- interp = true
- }
- return "", nil
- }
-
- w := &interpolationWalker{F: fn}
- if v.Default != nil {
- if err := reflectwalk.Walk(v.Default, w); err == nil {
- if interp {
- diags = diags.Append(fmt.Errorf(
- "variable %q: default may not contain interpolations",
- v.Name,
- ))
- }
- }
- }
- }
-
- // Check for references to user variables that do not actually
- // exist and record those errors.
- for source, vs := range vars {
- for _, v := range vs {
- uv, ok := v.(*UserVariable)
- if !ok {
- continue
- }
-
- if _, ok := varMap[uv.Name]; !ok {
- diags = diags.Append(fmt.Errorf(
- "%s: unknown variable referenced: '%s'; define it with a 'variable' block",
- source,
- uv.Name,
- ))
- }
- }
- }
-
- // Check that all count variables are valid.
- for source, vs := range vars {
- for _, rawV := range vs {
- switch v := rawV.(type) {
- case *CountVariable:
- if v.Type == CountValueInvalid {
- diags = diags.Append(fmt.Errorf(
- "%s: invalid count variable: %s",
- source,
- v.FullKey(),
- ))
- }
- case *PathVariable:
- if v.Type == PathValueInvalid {
- diags = diags.Append(fmt.Errorf(
- "%s: invalid path variable: %s",
- source,
- v.FullKey(),
- ))
- }
- }
- }
- }
-
- // Check that providers aren't declared multiple times and that their
- // version constraints, where present, are syntactically valid.
- providerSet := make(map[string]bool)
- for _, p := range c.ProviderConfigs {
- name := p.FullName()
- if _, ok := providerSet[name]; ok {
- diags = diags.Append(fmt.Errorf(
- "provider.%s: multiple configurations present; only one configuration is allowed per provider",
- name,
- ))
- continue
- }
-
- if p.Version != "" {
- _, err := discovery.ConstraintStr(p.Version).Parse()
- if err != nil {
- diags = diags.Append(&hcl2.Diagnostic{
- Severity: hcl2.DiagError,
- Summary: "Invalid provider version constraint",
- Detail: fmt.Sprintf(
- "The value %q given for provider.%s is not a valid version constraint.",
- p.Version, name,
- ),
- // TODO: include a "Subject" source reference in here,
- // once the config loader is able to retain source
- // location information.
- })
- }
- }
-
- providerSet[name] = true
- }
-
- // Check that all references to modules are valid
- modules := make(map[string]*Module)
- dupped := make(map[string]struct{})
- for _, m := range c.Modules {
- // Check for duplicates
- if _, ok := modules[m.Id()]; ok {
- if _, ok := dupped[m.Id()]; !ok {
- dupped[m.Id()] = struct{}{}
-
- diags = diags.Append(fmt.Errorf(
- "module %q: module repeated multiple times",
- m.Id(),
- ))
- }
-
- // Already seen this module, just skip it
- continue
- }
-
- modules[m.Id()] = m
-
- // Check that the source has no interpolations
- rc, err := NewRawConfig(map[string]interface{}{
- "root": m.Source,
- })
- if err != nil {
- diags = diags.Append(fmt.Errorf(
- "module %q: module source error: %s",
- m.Id(), err,
- ))
- } else if len(rc.Interpolations) > 0 {
- diags = diags.Append(fmt.Errorf(
- "module %q: module source cannot contain interpolations",
- m.Id(),
- ))
- }
-
- // Check that the name matches our regexp
- if !NameRegexp.Match([]byte(m.Name)) {
- diags = diags.Append(fmt.Errorf(
- "module %q: module name must be a letter or underscore followed by only letters, numbers, dashes, and underscores",
- m.Id(),
- ))
- }
-
- // Check that the configuration can all be strings, lists or maps
- raw := make(map[string]interface{})
- for k, v := range m.RawConfig.Raw {
- var strVal string
- if err := hilmapstructure.WeakDecode(v, &strVal); err == nil {
- raw[k] = strVal
- continue
- }
-
- var mapVal map[string]interface{}
- if err := hilmapstructure.WeakDecode(v, &mapVal); err == nil {
- raw[k] = mapVal
- continue
- }
-
- var sliceVal []interface{}
- if err := hilmapstructure.WeakDecode(v, &sliceVal); err == nil {
- raw[k] = sliceVal
- continue
- }
-
- diags = diags.Append(fmt.Errorf(
- "module %q: argument %s must have a string, list, or map value",
- m.Id(), k,
- ))
- }
-
- // Check for invalid count variables
- for _, v := range m.RawConfig.Variables {
- switch v.(type) {
- case *CountVariable:
- diags = diags.Append(fmt.Errorf(
- "module %q: count variables are only valid within resources",
- m.Name,
- ))
- case *SelfVariable:
- diags = diags.Append(fmt.Errorf(
- "module %q: self variables are only valid within resources",
- m.Name,
- ))
- }
- }
-
- // Update the raw configuration to only contain the string values
- m.RawConfig, err = NewRawConfig(raw)
- if err != nil {
- diags = diags.Append(fmt.Errorf(
- "%s: can't initialize configuration: %s",
- m.Id(), err,
- ))
- }
-
- // check that all named providers actually exist
- for _, p := range m.Providers {
- if !providerSet[p] {
- diags = diags.Append(fmt.Errorf(
- "module %q: cannot pass non-existent provider %q",
- m.Name, p,
- ))
- }
- }
-
- }
- dupped = nil
-
- // Check that all variables for modules reference modules that
- // exist.
- for source, vs := range vars {
- for _, v := range vs {
- mv, ok := v.(*ModuleVariable)
- if !ok {
- continue
- }
-
- if _, ok := modules[mv.Name]; !ok {
- diags = diags.Append(fmt.Errorf(
- "%s: unknown module referenced: %s",
- source, mv.Name,
- ))
- }
- }
- }
-
- // Check that all references to resources are valid
- resources := make(map[string]*Resource)
- dupped = make(map[string]struct{})
- for _, r := range c.Resources {
- if _, ok := resources[r.Id()]; ok {
- if _, ok := dupped[r.Id()]; !ok {
- dupped[r.Id()] = struct{}{}
-
- diags = diags.Append(fmt.Errorf(
- "%s: resource repeated multiple times",
- r.Id(),
- ))
- }
- }
-
- resources[r.Id()] = r
- }
- dupped = nil
-
- // Validate resources
- for n, r := range resources {
- // Verify count variables
- for _, v := range r.RawCount.Variables {
- switch v.(type) {
- case *CountVariable:
- diags = diags.Append(fmt.Errorf(
- "%s: resource count can't reference count variable: %s",
- n, v.FullKey(),
- ))
- case *SimpleVariable:
- diags = diags.Append(fmt.Errorf(
- "%s: resource count can't reference variable: %s",
- n, v.FullKey(),
- ))
-
- // Good
- case *ModuleVariable:
- case *ResourceVariable:
- case *TerraformVariable:
- case *UserVariable:
- case *LocalVariable:
-
- default:
- diags = diags.Append(fmt.Errorf(
- "Internal error. Unknown type in count var in %s: %T",
- n, v,
- ))
- }
- }
-
- if !r.RawCount.couldBeInteger() {
- diags = diags.Append(fmt.Errorf(
- "%s: resource count must be an integer", n,
- ))
- }
- r.RawCount.init()
-
- // Validate DependsOn
- for _, err := range c.validateDependsOn(n, r.DependsOn, resources, modules) {
- diags = diags.Append(err)
- }
-
- // Verify provisioners
- for _, p := range r.Provisioners {
- // This validation checks that there are no splat variables
- // referencing ourself. This currently is not allowed.
-
- for _, v := range p.ConnInfo.Variables {
- rv, ok := v.(*ResourceVariable)
- if !ok {
- continue
- }
-
- if rv.Multi && rv.Index == -1 && rv.Type == r.Type && rv.Name == r.Name {
- diags = diags.Append(fmt.Errorf(
- "%s: connection info cannot contain splat variable referencing itself",
- n,
- ))
- break
- }
- }
-
- for _, v := range p.RawConfig.Variables {
- rv, ok := v.(*ResourceVariable)
- if !ok {
- continue
- }
-
- if rv.Multi && rv.Index == -1 && rv.Type == r.Type && rv.Name == r.Name {
- diags = diags.Append(fmt.Errorf(
- "%s: connection info cannot contain splat variable referencing itself",
- n,
- ))
- break
- }
- }
-
- // Check for invalid when/onFailure values, though this should be
- // picked up by the loader we check here just in case.
- if p.When == ProvisionerWhenInvalid {
- diags = diags.Append(fmt.Errorf(
- "%s: provisioner 'when' value is invalid", n,
- ))
- }
- if p.OnFailure == ProvisionerOnFailureInvalid {
- diags = diags.Append(fmt.Errorf(
- "%s: provisioner 'on_failure' value is invalid", n,
- ))
- }
- }
-
- // Verify ignore_changes contains valid entries
- for _, v := range r.Lifecycle.IgnoreChanges {
- if strings.Contains(v, "*") && v != "*" {
- diags = diags.Append(fmt.Errorf(
- "%s: ignore_changes does not support using a partial string together with a wildcard: %s",
- n, v,
- ))
- }
- }
-
- // Verify ignore_changes has no interpolations
- rc, err := NewRawConfig(map[string]interface{}{
- "root": r.Lifecycle.IgnoreChanges,
- })
- if err != nil {
- diags = diags.Append(fmt.Errorf(
- "%s: lifecycle ignore_changes error: %s",
- n, err,
- ))
- } else if len(rc.Interpolations) > 0 {
- diags = diags.Append(fmt.Errorf(
- "%s: lifecycle ignore_changes cannot contain interpolations",
- n,
- ))
- }
-
- // If it is a data source then it can't have provisioners
- if r.Mode == DataResourceMode {
- if _, ok := r.RawConfig.Raw["provisioner"]; ok {
- diags = diags.Append(fmt.Errorf(
- "%s: data sources cannot have provisioners",
- n,
- ))
- }
- }
- }
-
- for source, vs := range vars {
- for _, v := range vs {
- rv, ok := v.(*ResourceVariable)
- if !ok {
- continue
- }
-
- id := rv.ResourceId()
- if _, ok := resources[id]; !ok {
- diags = diags.Append(fmt.Errorf(
- "%s: unknown resource '%s' referenced in variable %s",
- source,
- id,
- rv.FullKey(),
- ))
- continue
- }
- }
- }
-
- // Check that all locals are valid
- {
- found := make(map[string]struct{})
- for _, l := range c.Locals {
- if _, ok := found[l.Name]; ok {
- diags = diags.Append(fmt.Errorf(
- "%s: duplicate local. local value names must be unique",
- l.Name,
- ))
- continue
- }
- found[l.Name] = struct{}{}
-
- for _, v := range l.RawConfig.Variables {
- if _, ok := v.(*CountVariable); ok {
- diags = diags.Append(fmt.Errorf(
- "local %s: count variables are only valid within resources", l.Name,
- ))
- }
- }
- }
- }
-
- // Check that all outputs are valid
- {
- found := make(map[string]struct{})
- for _, o := range c.Outputs {
- // Verify the output is new
- if _, ok := found[o.Name]; ok {
- diags = diags.Append(fmt.Errorf(
- "output %q: an output of this name was already defined",
- o.Name,
- ))
- continue
- }
- found[o.Name] = struct{}{}
-
- var invalidKeys []string
- valueKeyFound := false
- for k := range o.RawConfig.Raw {
- if k == "value" {
- valueKeyFound = true
- continue
- }
- if k == "sensitive" {
- if sensitive, ok := o.RawConfig.config[k].(bool); ok {
- if sensitive {
- o.Sensitive = true
- }
- continue
- }
-
- diags = diags.Append(fmt.Errorf(
- "output %q: value for 'sensitive' must be boolean",
- o.Name,
- ))
- continue
- }
- if k == "description" {
- if desc, ok := o.RawConfig.config[k].(string); ok {
- o.Description = desc
- continue
- }
-
- diags = diags.Append(fmt.Errorf(
- "output %q: value for 'description' must be string",
- o.Name,
- ))
- continue
- }
- invalidKeys = append(invalidKeys, k)
- }
- if len(invalidKeys) > 0 {
- diags = diags.Append(fmt.Errorf(
- "output %q: invalid keys: %s",
- o.Name, strings.Join(invalidKeys, ", "),
- ))
- }
- if !valueKeyFound {
- diags = diags.Append(fmt.Errorf(
- "output %q: missing required 'value' argument", o.Name,
- ))
- }
-
- for _, v := range o.RawConfig.Variables {
- if _, ok := v.(*CountVariable); ok {
- diags = diags.Append(fmt.Errorf(
- "output %q: count variables are only valid within resources",
- o.Name,
- ))
- }
- }
-
- // Detect a common mistake of using a "count"ed resource in
- // an output value without using the splat or index form.
- // Prior to 0.11 this error was silently ignored, but outputs
- // now have their errors checked like all other contexts.
- //
- // TODO: Remove this in 0.12.
- for _, v := range o.RawConfig.Variables {
- rv, ok := v.(*ResourceVariable)
- if !ok {
- continue
- }
-
- // If the variable seems to be treating the referenced
- // resource as a singleton (no count specified) then
- // we'll check to make sure it is indeed a singleton.
- // It's a warning if not.
-
- if rv.Multi || rv.Index != 0 {
- // This reference is treating the resource as a
- // multi-resource, so the warning doesn't apply.
- continue
- }
-
- for _, r := range c.Resources {
- if r.Id() != rv.ResourceId() {
- continue
- }
-
- // We test specifically for the raw string "1" here
- // because we _do_ want to generate this warning if
- // the user has provided an expression that happens
- // to return 1 right now, to catch situations where
- // a count might dynamically be set to something
- // other than 1 and thus splat syntax is still needed
- // to be safe.
- if r.RawCount != nil && r.RawCount.Raw != nil && r.RawCount.Raw["count"] != "1" && rv.Field != "count" {
- diags = diags.Append(tfdiags.SimpleWarning(fmt.Sprintf(
- "output %q: must use splat syntax to access %s attribute %q, because it has \"count\" set; use %s.*.%s to obtain a list of the attributes across all instances",
- o.Name,
- r.Id(), rv.Field,
- r.Id(), rv.Field,
- )))
- }
- }
- }
- }
- }
-
- // Validate the self variable
- for source, rc := range c.rawConfigs() {
- // Ignore provisioners. This is a pretty brittle way to do this,
- // but better than also repeating all the resources.
- if strings.Contains(source, "provision") {
- continue
- }
-
- for _, v := range rc.Variables {
- if _, ok := v.(*SelfVariable); ok {
- diags = diags.Append(fmt.Errorf(
- "%s: cannot contain self-reference %s",
- source, v.FullKey(),
- ))
- }
- }
- }
-
- return diags
-}
-
-// InterpolatedVariables is a helper that returns a mapping of all the interpolated
-// variables within the configuration. This is used to verify references
-// are valid in the Validate step.
-func (c *Config) InterpolatedVariables() map[string][]InterpolatedVariable {
- result := make(map[string][]InterpolatedVariable)
- for source, rc := range c.rawConfigs() {
- for _, v := range rc.Variables {
- result[source] = append(result[source], v)
- }
- }
- return result
-}
-
-// rawConfigs returns all of the RawConfigs that are available keyed by
-// a human-friendly source.
-func (c *Config) rawConfigs() map[string]*RawConfig {
- result := make(map[string]*RawConfig)
- for _, m := range c.Modules {
- source := fmt.Sprintf("module '%s'", m.Name)
- result[source] = m.RawConfig
- }
-
- for _, pc := range c.ProviderConfigs {
- source := fmt.Sprintf("provider config '%s'", pc.Name)
- result[source] = pc.RawConfig
- }
-
- for _, rc := range c.Resources {
- source := fmt.Sprintf("resource '%s'", rc.Id())
- result[source+" count"] = rc.RawCount
- result[source+" config"] = rc.RawConfig
-
- for i, p := range rc.Provisioners {
- subsource := fmt.Sprintf(
- "%s provisioner %s (#%d)",
- source, p.Type, i+1)
- result[subsource] = p.RawConfig
- }
- }
-
- for _, o := range c.Outputs {
- source := fmt.Sprintf("output '%s'", o.Name)
- result[source] = o.RawConfig
- }
-
- return result
-}
-
-func (c *Config) validateDependsOn(
- n string,
- v []string,
- resources map[string]*Resource,
- modules map[string]*Module) []error {
- // Verify depends on points to resources that all exist
- var errs []error
- for _, d := range v {
- // Check if we contain interpolations
- rc, err := NewRawConfig(map[string]interface{}{
- "value": d,
- })
- if err == nil && len(rc.Variables) > 0 {
- errs = append(errs, fmt.Errorf(
- "%s: depends on value cannot contain interpolations: %s",
- n, d))
- continue
- }
-
- // If it is a module, verify it is a module
- if strings.HasPrefix(d, "module.") {
- name := d[len("module."):]
- if _, ok := modules[name]; !ok {
- errs = append(errs, fmt.Errorf(
- "%s: resource depends on non-existent module '%s'",
- n, name))
- }
-
- continue
- }
-
- // Check resources
- if _, ok := resources[d]; !ok {
- errs = append(errs, fmt.Errorf(
- "%s: resource depends on non-existent resource '%s'",
- n, d))
- }
- }
-
- return errs
-}
-
-func (m *Module) mergerName() string {
- return m.Id()
-}
-
-func (m *Module) mergerMerge(other merger) merger {
- m2 := other.(*Module)
-
- result := *m
- result.Name = m2.Name
- result.RawConfig = result.RawConfig.merge(m2.RawConfig)
-
- if m2.Source != "" {
- result.Source = m2.Source
- }
-
- return &result
-}
-
-func (o *Output) mergerName() string {
- return o.Name
-}
-
-func (o *Output) mergerMerge(m merger) merger {
- o2 := m.(*Output)
-
- result := *o
- result.Name = o2.Name
- result.Description = o2.Description
- result.RawConfig = result.RawConfig.merge(o2.RawConfig)
- result.Sensitive = o2.Sensitive
- result.DependsOn = o2.DependsOn
-
- return &result
-}
-
-func (c *ProviderConfig) GoString() string {
- return fmt.Sprintf("*%#v", *c)
-}
-
-func (c *ProviderConfig) FullName() string {
- if c.Alias == "" {
- return c.Name
- }
-
- return fmt.Sprintf("%s.%s", c.Name, c.Alias)
-}
-
-func (c *ProviderConfig) mergerName() string {
- return c.Name
-}
-
-func (c *ProviderConfig) mergerMerge(m merger) merger {
- c2 := m.(*ProviderConfig)
-
- result := *c
- result.Name = c2.Name
- result.RawConfig = result.RawConfig.merge(c2.RawConfig)
-
- if c2.Alias != "" {
- result.Alias = c2.Alias
- }
-
- return &result
-}
-
-func (r *Resource) mergerName() string {
- return r.Id()
-}
-
-func (r *Resource) mergerMerge(m merger) merger {
- r2 := m.(*Resource)
-
- result := *r
- result.Mode = r2.Mode
- result.Name = r2.Name
- result.Type = r2.Type
- result.RawConfig = result.RawConfig.merge(r2.RawConfig)
-
- if r2.RawCount.Value() != "1" {
- result.RawCount = r2.RawCount
- }
-
- if len(r2.Provisioners) > 0 {
- result.Provisioners = r2.Provisioners
- }
-
- return &result
-}
-
-// Merge merges two variables to create a new third variable.
-func (v *Variable) Merge(v2 *Variable) *Variable {
- // Shallow copy the variable
- result := *v
-
- // The names should be the same, but the second name always wins.
- result.Name = v2.Name
-
- if v2.DeclaredType != "" {
- result.DeclaredType = v2.DeclaredType
- }
- if v2.Default != nil {
- result.Default = v2.Default
- }
- if v2.Description != "" {
- result.Description = v2.Description
- }
-
- return &result
-}
-
-var typeStringMap = map[string]VariableType{
- "string": VariableTypeString,
- "map": VariableTypeMap,
- "list": VariableTypeList,
-}
-
-// Type returns the type of variable this is.
-func (v *Variable) Type() VariableType {
- if v.DeclaredType != "" {
- declaredType, ok := typeStringMap[v.DeclaredType]
- if !ok {
- return VariableTypeUnknown
- }
-
- return declaredType
- }
-
- return v.inferTypeFromDefault()
-}
-
-// ValidateTypeAndDefault ensures that default variable value is compatible
-// with the declared type (if one exists), and that the type is one which is
-// known to Terraform
-func (v *Variable) ValidateTypeAndDefault() error {
- // If an explicit type is declared, ensure it is valid
- if v.DeclaredType != "" {
- if _, ok := typeStringMap[v.DeclaredType]; !ok {
- validTypes := []string{}
- for k := range typeStringMap {
- validTypes = append(validTypes, k)
- }
- return fmt.Errorf(
- "Variable '%s' type must be one of [%s] - '%s' is not a valid type",
- v.Name,
- strings.Join(validTypes, ", "),
- v.DeclaredType,
- )
- }
- }
-
- if v.DeclaredType == "" || v.Default == nil {
- return nil
- }
-
- if v.inferTypeFromDefault() != v.Type() {
- return fmt.Errorf("'%s' has a default value which is not of type '%s' (got '%s')",
- v.Name, v.DeclaredType, v.inferTypeFromDefault().Printable())
- }
-
- return nil
-}
-
-func (v *Variable) mergerName() string {
- return v.Name
-}
-
-func (v *Variable) mergerMerge(m merger) merger {
- return v.Merge(m.(*Variable))
-}
-
-// Required tests whether a variable is required or not.
-func (v *Variable) Required() bool {
- return v.Default == nil
-}
-
-// inferTypeFromDefault contains the logic for the old method of inferring
-// variable types - we can also use this for validating that the declared
-// type matches the type of the default value
-func (v *Variable) inferTypeFromDefault() VariableType {
- if v.Default == nil {
- return VariableTypeString
- }
-
- var s string
- if err := hilmapstructure.WeakDecode(v.Default, &s); err == nil {
- v.Default = s
- return VariableTypeString
- }
-
- var m map[string]interface{}
- if err := hilmapstructure.WeakDecode(v.Default, &m); err == nil {
- v.Default = m
- return VariableTypeMap
- }
-
- var l []interface{}
- if err := hilmapstructure.WeakDecode(v.Default, &l); err == nil {
- v.Default = l
- return VariableTypeList
- }
-
- return VariableTypeUnknown
-}
-
-func (m ResourceMode) Taintable() bool {
- switch m {
- case ManagedResourceMode:
- return true
- case DataResourceMode:
- return false
- default:
- panic(fmt.Errorf("unsupported ResourceMode value %s", m))
- }
-}
diff --git a/vendor/github.com/hashicorp/terraform/config/config_string.go b/vendor/github.com/hashicorp/terraform/config/config_string.go
deleted file mode 100644
index a6933c2a5..000000000
--- a/vendor/github.com/hashicorp/terraform/config/config_string.go
+++ /dev/null
@@ -1,378 +0,0 @@
-package config
-
-import (
- "bytes"
- "fmt"
- "sort"
- "strings"
-)
-
-// TestString is a Stringer-like function that outputs a string that can
-// be used to easily compare multiple Config structures in unit tests.
-//
-// This function has no practical use outside of unit tests and debugging.
-func (c *Config) TestString() string {
- if c == nil {
- return ""
- }
-
- var buf bytes.Buffer
- if len(c.Modules) > 0 {
- buf.WriteString("Modules:\n\n")
- buf.WriteString(modulesStr(c.Modules))
- buf.WriteString("\n\n")
- }
-
- if len(c.Variables) > 0 {
- buf.WriteString("Variables:\n\n")
- buf.WriteString(variablesStr(c.Variables))
- buf.WriteString("\n\n")
- }
-
- if len(c.ProviderConfigs) > 0 {
- buf.WriteString("Provider Configs:\n\n")
- buf.WriteString(providerConfigsStr(c.ProviderConfigs))
- buf.WriteString("\n\n")
- }
-
- if len(c.Resources) > 0 {
- buf.WriteString("Resources:\n\n")
- buf.WriteString(resourcesStr(c.Resources))
- buf.WriteString("\n\n")
- }
-
- if len(c.Outputs) > 0 {
- buf.WriteString("Outputs:\n\n")
- buf.WriteString(outputsStr(c.Outputs))
- buf.WriteString("\n")
- }
-
- return strings.TrimSpace(buf.String())
-}
-
-func terraformStr(t *Terraform) string {
- result := ""
-
- if b := t.Backend; b != nil {
- result += fmt.Sprintf("backend (%s)\n", b.Type)
-
- keys := make([]string, 0, len(b.RawConfig.Raw))
- for k, _ := range b.RawConfig.Raw {
- keys = append(keys, k)
- }
- sort.Strings(keys)
-
- for _, k := range keys {
- result += fmt.Sprintf(" %s\n", k)
- }
- }
-
- return strings.TrimSpace(result)
-}
-
-func modulesStr(ms []*Module) string {
- result := ""
- order := make([]int, 0, len(ms))
- ks := make([]string, 0, len(ms))
- mapping := make(map[string]int)
- for i, m := range ms {
- k := m.Id()
- ks = append(ks, k)
- mapping[k] = i
- }
- sort.Strings(ks)
- for _, k := range ks {
- order = append(order, mapping[k])
- }
-
- for _, i := range order {
- m := ms[i]
- result += fmt.Sprintf("%s\n", m.Id())
-
- ks := make([]string, 0, len(m.RawConfig.Raw))
- for k, _ := range m.RawConfig.Raw {
- ks = append(ks, k)
- }
- sort.Strings(ks)
-
- result += fmt.Sprintf(" source = %s\n", m.Source)
-
- for _, k := range ks {
- result += fmt.Sprintf(" %s\n", k)
- }
- }
-
- return strings.TrimSpace(result)
-}
-
-func outputsStr(os []*Output) string {
- ns := make([]string, 0, len(os))
- m := make(map[string]*Output)
- for _, o := range os {
- ns = append(ns, o.Name)
- m[o.Name] = o
- }
- sort.Strings(ns)
-
- result := ""
- for _, n := range ns {
- o := m[n]
-
- result += fmt.Sprintf("%s\n", n)
-
- if len(o.DependsOn) > 0 {
- result += fmt.Sprintf(" dependsOn\n")
- for _, d := range o.DependsOn {
- result += fmt.Sprintf(" %s\n", d)
- }
- }
-
- if len(o.RawConfig.Variables) > 0 {
- result += fmt.Sprintf(" vars\n")
- for _, rawV := range o.RawConfig.Variables {
- kind := "unknown"
- str := rawV.FullKey()
-
- switch rawV.(type) {
- case *ResourceVariable:
- kind = "resource"
- case *UserVariable:
- kind = "user"
- }
-
- result += fmt.Sprintf(" %s: %s\n", kind, str)
- }
- }
-
- if o.Description != "" {
- result += fmt.Sprintf(" description\n %s\n", o.Description)
- }
- }
-
- return strings.TrimSpace(result)
-}
-
-func localsStr(ls []*Local) string {
- ns := make([]string, 0, len(ls))
- m := make(map[string]*Local)
- for _, l := range ls {
- ns = append(ns, l.Name)
- m[l.Name] = l
- }
- sort.Strings(ns)
-
- result := ""
- for _, n := range ns {
- l := m[n]
-
- result += fmt.Sprintf("%s\n", n)
-
- if len(l.RawConfig.Variables) > 0 {
- result += fmt.Sprintf(" vars\n")
- for _, rawV := range l.RawConfig.Variables {
- kind := "unknown"
- str := rawV.FullKey()
-
- switch rawV.(type) {
- case *ResourceVariable:
- kind = "resource"
- case *UserVariable:
- kind = "user"
- }
-
- result += fmt.Sprintf(" %s: %s\n", kind, str)
- }
- }
- }
-
- return strings.TrimSpace(result)
-}
-
-// This helper turns a provider configs field into a deterministic
-// string value for comparison in tests.
-func providerConfigsStr(pcs []*ProviderConfig) string {
- result := ""
-
- ns := make([]string, 0, len(pcs))
- m := make(map[string]*ProviderConfig)
- for _, n := range pcs {
- ns = append(ns, n.Name)
- m[n.Name] = n
- }
- sort.Strings(ns)
-
- for _, n := range ns {
- pc := m[n]
-
- result += fmt.Sprintf("%s\n", n)
-
- keys := make([]string, 0, len(pc.RawConfig.Raw))
- for k, _ := range pc.RawConfig.Raw {
- keys = append(keys, k)
- }
- sort.Strings(keys)
-
- for _, k := range keys {
- result += fmt.Sprintf(" %s\n", k)
- }
-
- if len(pc.RawConfig.Variables) > 0 {
- result += fmt.Sprintf(" vars\n")
- for _, rawV := range pc.RawConfig.Variables {
- kind := "unknown"
- str := rawV.FullKey()
-
- switch rawV.(type) {
- case *ResourceVariable:
- kind = "resource"
- case *UserVariable:
- kind = "user"
- }
-
- result += fmt.Sprintf(" %s: %s\n", kind, str)
- }
- }
- }
-
- return strings.TrimSpace(result)
-}
-
-// This helper turns a resources field into a deterministic
-// string value for comparison in tests.
-func resourcesStr(rs []*Resource) string {
- result := ""
- order := make([]int, 0, len(rs))
- ks := make([]string, 0, len(rs))
- mapping := make(map[string]int)
- for i, r := range rs {
- k := r.Id()
- ks = append(ks, k)
- mapping[k] = i
- }
- sort.Strings(ks)
- for _, k := range ks {
- order = append(order, mapping[k])
- }
-
- for _, i := range order {
- r := rs[i]
- result += fmt.Sprintf(
- "%s (x%s)\n",
- r.Id(),
- r.RawCount.Value())
-
- ks := make([]string, 0, len(r.RawConfig.Raw))
- for k, _ := range r.RawConfig.Raw {
- ks = append(ks, k)
- }
- sort.Strings(ks)
-
- for _, k := range ks {
- result += fmt.Sprintf(" %s\n", k)
- }
-
- if len(r.Provisioners) > 0 {
- result += fmt.Sprintf(" provisioners\n")
- for _, p := range r.Provisioners {
- when := ""
- if p.When != ProvisionerWhenCreate {
- when = fmt.Sprintf(" (%s)", p.When.String())
- }
-
- result += fmt.Sprintf(" %s%s\n", p.Type, when)
-
- if p.OnFailure != ProvisionerOnFailureFail {
- result += fmt.Sprintf(" on_failure = %s\n", p.OnFailure.String())
- }
-
- ks := make([]string, 0, len(p.RawConfig.Raw))
- for k, _ := range p.RawConfig.Raw {
- ks = append(ks, k)
- }
- sort.Strings(ks)
-
- for _, k := range ks {
- result += fmt.Sprintf(" %s\n", k)
- }
- }
- }
-
- if len(r.DependsOn) > 0 {
- result += fmt.Sprintf(" dependsOn\n")
- for _, d := range r.DependsOn {
- result += fmt.Sprintf(" %s\n", d)
- }
- }
-
- if len(r.RawConfig.Variables) > 0 {
- result += fmt.Sprintf(" vars\n")
-
- ks := make([]string, 0, len(r.RawConfig.Variables))
- for k, _ := range r.RawConfig.Variables {
- ks = append(ks, k)
- }
- sort.Strings(ks)
-
- for _, k := range ks {
- rawV := r.RawConfig.Variables[k]
- kind := "unknown"
- str := rawV.FullKey()
-
- switch rawV.(type) {
- case *ResourceVariable:
- kind = "resource"
- case *UserVariable:
- kind = "user"
- }
-
- result += fmt.Sprintf(" %s: %s\n", kind, str)
- }
- }
- }
-
- return strings.TrimSpace(result)
-}
-
-// This helper turns a variables field into a deterministic
-// string value for comparison in tests.
-func variablesStr(vs []*Variable) string {
- result := ""
- ks := make([]string, 0, len(vs))
- m := make(map[string]*Variable)
- for _, v := range vs {
- ks = append(ks, v.Name)
- m[v.Name] = v
- }
- sort.Strings(ks)
-
- for _, k := range ks {
- v := m[k]
-
- required := ""
- if v.Required() {
- required = " (required)"
- }
-
- declaredType := ""
- if v.DeclaredType != "" {
- declaredType = fmt.Sprintf(" (%s)", v.DeclaredType)
- }
-
- if v.Default == nil || v.Default == "" {
- v.Default = "<>"
- }
- if v.Description == "" {
- v.Description = "<>"
- }
-
- result += fmt.Sprintf(
- "%s%s%s\n %v\n %s\n",
- k,
- required,
- declaredType,
- v.Default,
- v.Description)
- }
-
- return strings.TrimSpace(result)
-}
diff --git a/vendor/github.com/hashicorp/terraform/config/config_terraform.go b/vendor/github.com/hashicorp/terraform/config/config_terraform.go
deleted file mode 100644
index 8535c9648..000000000
--- a/vendor/github.com/hashicorp/terraform/config/config_terraform.go
+++ /dev/null
@@ -1,117 +0,0 @@
-package config
-
-import (
- "fmt"
- "strings"
-
- "github.com/hashicorp/go-version"
- "github.com/mitchellh/hashstructure"
-)
-
-// Terraform is the Terraform meta-configuration that can be present
-// in configuration files for configuring Terraform itself.
-type Terraform struct {
- RequiredVersion string `hcl:"required_version"` // Required Terraform version (constraint)
- Backend *Backend // See Backend struct docs
-}
-
-// Validate performs the validation for just the Terraform configuration.
-func (t *Terraform) Validate() []error {
- var errs []error
-
- if raw := t.RequiredVersion; raw != "" {
- // Check that the value has no interpolations
- rc, err := NewRawConfig(map[string]interface{}{
- "root": raw,
- })
- if err != nil {
- errs = append(errs, fmt.Errorf(
- "terraform.required_version: %s", err))
- } else if len(rc.Interpolations) > 0 {
- errs = append(errs, fmt.Errorf(
- "terraform.required_version: cannot contain interpolations"))
- } else {
- // Check it is valid
- _, err := version.NewConstraint(raw)
- if err != nil {
- errs = append(errs, fmt.Errorf(
- "terraform.required_version: invalid syntax: %s", err))
- }
- }
- }
-
- if t.Backend != nil {
- errs = append(errs, t.Backend.Validate()...)
- }
-
- return errs
-}
-
-// Merge t with t2.
-// Any conflicting fields are overwritten by t2.
-func (t *Terraform) Merge(t2 *Terraform) {
- if t2.RequiredVersion != "" {
- t.RequiredVersion = t2.RequiredVersion
- }
-
- if t2.Backend != nil {
- t.Backend = t2.Backend
- }
-}
-
-// Backend is the configuration for the "backend" to use with Terraform.
-// A backend is responsible for all major behavior of Terraform's core.
-// The abstraction layer above the core (the "backend") allows for behavior
-// such as remote operation.
-type Backend struct {
- Type string
- RawConfig *RawConfig
-
- // Hash is a unique hash code representing the original configuration
- // of the backend. This won't be recomputed unless Rehash is called.
- Hash uint64
-}
-
-// Rehash returns a unique content hash for this backend's configuration
-// as a uint64 value.
-func (b *Backend) Rehash() uint64 {
- // If we have no backend, the value is zero
- if b == nil {
- return 0
- }
-
- // Use hashstructure to hash only our type with the config.
- code, err := hashstructure.Hash(map[string]interface{}{
- "type": b.Type,
- "config": b.RawConfig.Raw,
- }, nil)
-
- // This should never happen since we have just some basic primitives
- // so panic if there is an error.
- if err != nil {
- panic(err)
- }
-
- return code
-}
-
-func (b *Backend) Validate() []error {
- if len(b.RawConfig.Interpolations) > 0 {
- return []error{fmt.Errorf(strings.TrimSpace(errBackendInterpolations))}
- }
-
- return nil
-}
-
-const errBackendInterpolations = `
-terraform.backend: configuration cannot contain interpolations
-
-The backend configuration is loaded by Terraform extremely early, before
-the core of Terraform can be initialized. This is necessary because the backend
-dictates the behavior of that core. The core is what handles interpolation
-processing. Because of this, interpolations cannot be used in backend
-configuration.
-
-If you'd like to parameterize backend configuration, we recommend using
-partial configuration with the "-backend-config" flag to "terraform init".
-`
diff --git a/vendor/github.com/hashicorp/terraform/config/config_tree.go b/vendor/github.com/hashicorp/terraform/config/config_tree.go
deleted file mode 100644
index 08dc0fe90..000000000
--- a/vendor/github.com/hashicorp/terraform/config/config_tree.go
+++ /dev/null
@@ -1,43 +0,0 @@
-package config
-
-// configTree represents a tree of configurations where the root is the
-// first file and its children are the configurations it has imported.
-type configTree struct {
- Path string
- Config *Config
- Children []*configTree
-}
-
-// Flatten flattens the entire tree down to a single merged Config
-// structure.
-func (t *configTree) Flatten() (*Config, error) {
- // No children is easy: we're already merged!
- if len(t.Children) == 0 {
- return t.Config, nil
- }
-
- // Depth-first, merge all the children first.
- childConfigs := make([]*Config, len(t.Children))
- for i, ct := range t.Children {
- c, err := ct.Flatten()
- if err != nil {
- return nil, err
- }
-
- childConfigs[i] = c
- }
-
- // Merge all the children in order
- config := childConfigs[0]
- childConfigs = childConfigs[1:]
- for _, config2 := range childConfigs {
- var err error
- config, err = Merge(config, config2)
- if err != nil {
- return nil, err
- }
- }
-
- // Merge the final merged child config with our own
- return Merge(config, t.Config)
-}
diff --git a/vendor/github.com/hashicorp/terraform/config/import_tree.go b/vendor/github.com/hashicorp/terraform/config/import_tree.go
deleted file mode 100644
index 08cbc7736..000000000
--- a/vendor/github.com/hashicorp/terraform/config/import_tree.go
+++ /dev/null
@@ -1,151 +0,0 @@
-package config
-
-import (
- "bufio"
- "fmt"
- "io"
- "os"
-
- "github.com/hashicorp/errwrap"
-)
-
-// configurable is an interface that must be implemented by any configuration
-// formats of Terraform in order to return a *Config.
-type configurable interface {
- Config() (*Config, error)
-}
-
-// importTree is the result of the first-pass load of the configuration
-// files. It is a tree of raw configurables and then any children (their
-// imports).
-//
-// An importTree can be turned into a configTree.
-type importTree struct {
- Path string
- Raw configurable
- Children []*importTree
-}
-
-// This is the function type that must be implemented by the configuration
-// file loader to turn a single file into a configurable and any additional
-// imports.
-type fileLoaderFunc func(path string) (configurable, []string, error)
-
-// Set this to a non-empty value at link time to enable the HCL2 experiment.
-// This is not currently enabled for release builds.
-//
-// For example:
-// go install -ldflags="-X github.com/hashicorp/terraform/config.enableHCL2Experiment=true" github.com/hashicorp/terraform
-var enableHCL2Experiment = ""
-
-// loadTree takes a single file and loads the entire importTree for that
-// file. This function detects what kind of configuration file it is an
-// executes the proper fileLoaderFunc.
-func loadTree(root string) (*importTree, error) {
- var f fileLoaderFunc
-
- // HCL2 experiment is currently activated at build time via the linker.
- // See the comment on this variable for more information.
- if enableHCL2Experiment == "" {
- // Main-line behavior: always use the original HCL parser
- switch ext(root) {
- case ".tf", ".tf.json":
- f = loadFileHcl
- default:
- }
- } else {
- // Experimental behavior: use the HCL2 parser if the opt-in comment
- // is present.
- switch ext(root) {
- case ".tf":
- // We need to sniff the file for the opt-in comment line to decide
- // if the file is participating in the HCL2 experiment.
- cf, err := os.Open(root)
- if err != nil {
- return nil, err
- }
- sc := bufio.NewScanner(cf)
- for sc.Scan() {
- if sc.Text() == "#terraform:hcl2" {
- f = globalHCL2Loader.loadFile
- }
- }
- if f == nil {
- f = loadFileHcl
- }
- case ".tf.json":
- f = loadFileHcl
- default:
- }
- }
-
- if f == nil {
- return nil, fmt.Errorf(
- "%s: unknown configuration format. Use '.tf' or '.tf.json' extension",
- root)
- }
-
- c, imps, err := f(root)
- if err != nil {
- return nil, err
- }
-
- children := make([]*importTree, len(imps))
- for i, imp := range imps {
- t, err := loadTree(imp)
- if err != nil {
- return nil, err
- }
-
- children[i] = t
- }
-
- return &importTree{
- Path: root,
- Raw: c,
- Children: children,
- }, nil
-}
-
-// Close releases any resources we might be holding open for the importTree.
-//
-// This can safely be called even while ConfigTree results are alive. The
-// importTree is not bound to these.
-func (t *importTree) Close() error {
- if c, ok := t.Raw.(io.Closer); ok {
- c.Close()
- }
- for _, ct := range t.Children {
- ct.Close()
- }
-
- return nil
-}
-
-// ConfigTree traverses the importTree and turns each node into a *Config
-// object, ultimately returning a *configTree.
-func (t *importTree) ConfigTree() (*configTree, error) {
- config, err := t.Raw.Config()
- if err != nil {
- return nil, errwrap.Wrapf(fmt.Sprintf("Error loading %s: {{err}}", t.Path), err)
- }
-
- // Build our result
- result := &configTree{
- Path: t.Path,
- Config: config,
- }
-
- // Build the config trees for the children
- result.Children = make([]*configTree, len(t.Children))
- for i, ct := range t.Children {
- t, err := ct.ConfigTree()
- if err != nil {
- return nil, err
- }
-
- result.Children[i] = t
- }
-
- return result, nil
-}
diff --git a/vendor/github.com/hashicorp/terraform/config/interpolate.go b/vendor/github.com/hashicorp/terraform/config/interpolate.go
deleted file mode 100644
index 599e5ecdb..000000000
--- a/vendor/github.com/hashicorp/terraform/config/interpolate.go
+++ /dev/null
@@ -1,439 +0,0 @@
-package config
-
-import (
- "fmt"
- "strconv"
- "strings"
-
- "github.com/hashicorp/terraform/tfdiags"
-
- "github.com/hashicorp/hil/ast"
-)
-
-// An InterpolatedVariable is a variable reference within an interpolation.
-//
-// Implementations of this interface represents various sources where
-// variables can come from: user variables, resources, etc.
-type InterpolatedVariable interface {
- FullKey() string
- SourceRange() tfdiags.SourceRange
-}
-
-// varRange can be embedded into an InterpolatedVariable implementation to
-// implement the SourceRange method.
-type varRange struct {
- rng tfdiags.SourceRange
-}
-
-func (r varRange) SourceRange() tfdiags.SourceRange {
- return r.rng
-}
-
-func makeVarRange(rng tfdiags.SourceRange) varRange {
- return varRange{rng}
-}
-
-// CountVariable is a variable for referencing information about
-// the count.
-type CountVariable struct {
- Type CountValueType
- key string
- varRange
-}
-
-// CountValueType is the type of the count variable that is referenced.
-type CountValueType byte
-
-const (
- CountValueInvalid CountValueType = iota
- CountValueIndex
-)
-
-// A ModuleVariable is a variable that is referencing the output
-// of a module, such as "${module.foo.bar}"
-type ModuleVariable struct {
- Name string
- Field string
- key string
- varRange
-}
-
-// A PathVariable is a variable that references path information about the
-// module.
-type PathVariable struct {
- Type PathValueType
- key string
- varRange
-}
-
-type PathValueType byte
-
-const (
- PathValueInvalid PathValueType = iota
- PathValueCwd
- PathValueModule
- PathValueRoot
-)
-
-// A ResourceVariable is a variable that is referencing the field
-// of a resource, such as "${aws_instance.foo.ami}"
-type ResourceVariable struct {
- Mode ResourceMode
- Type string // Resource type, i.e. "aws_instance"
- Name string // Resource name
- Field string // Resource field
-
- Multi bool // True if multi-variable: aws_instance.foo.*.id
- Index int // Index for multi-variable: aws_instance.foo.1.id == 1
-
- key string
- varRange
-}
-
-// SelfVariable is a variable that is referencing the same resource
-// it is running on: "${self.address}"
-type SelfVariable struct {
- Field string
-
- key string
- varRange
-}
-
-// SimpleVariable is an unprefixed variable, which can show up when users have
-// strings they are passing down to resources that use interpolation
-// internally. The template_file resource is an example of this.
-type SimpleVariable struct {
- Key string
- varRange
-}
-
-// TerraformVariable is a "terraform."-prefixed variable used to access
-// metadata about the Terraform run.
-type TerraformVariable struct {
- Field string
- key string
- varRange
-}
-
-// A UserVariable is a variable that is referencing a user variable
-// that is inputted from outside the configuration. This looks like
-// "${var.foo}"
-type UserVariable struct {
- Name string
- Elem string
-
- key string
- varRange
-}
-
-// A LocalVariable is a variable that references a local value defined within
-// the current module, via a "locals" block. This looks like "${local.foo}".
-type LocalVariable struct {
- Name string
- varRange
-}
-
-func NewInterpolatedVariable(v string) (InterpolatedVariable, error) {
- if strings.HasPrefix(v, "count.") {
- return NewCountVariable(v)
- } else if strings.HasPrefix(v, "path.") {
- return NewPathVariable(v)
- } else if strings.HasPrefix(v, "self.") {
- return NewSelfVariable(v)
- } else if strings.HasPrefix(v, "terraform.") {
- return NewTerraformVariable(v)
- } else if strings.HasPrefix(v, "var.") {
- return NewUserVariable(v)
- } else if strings.HasPrefix(v, "local.") {
- return NewLocalVariable(v)
- } else if strings.HasPrefix(v, "module.") {
- return NewModuleVariable(v)
- } else if !strings.ContainsRune(v, '.') {
- return NewSimpleVariable(v)
- } else {
- return NewResourceVariable(v)
- }
-}
-
-func NewCountVariable(key string) (*CountVariable, error) {
- var fieldType CountValueType
- parts := strings.SplitN(key, ".", 2)
- switch parts[1] {
- case "index":
- fieldType = CountValueIndex
- }
-
- return &CountVariable{
- Type: fieldType,
- key: key,
- }, nil
-}
-
-func (c *CountVariable) FullKey() string {
- return c.key
-}
-
-func NewModuleVariable(key string) (*ModuleVariable, error) {
- parts := strings.SplitN(key, ".", 3)
- if len(parts) < 3 {
- return nil, fmt.Errorf(
- "%s: module variables must be three parts: module.name.attr",
- key)
- }
-
- return &ModuleVariable{
- Name: parts[1],
- Field: parts[2],
- key: key,
- }, nil
-}
-
-func (v *ModuleVariable) FullKey() string {
- return v.key
-}
-
-func (v *ModuleVariable) GoString() string {
- return fmt.Sprintf("*%#v", *v)
-}
-
-func NewPathVariable(key string) (*PathVariable, error) {
- var fieldType PathValueType
- parts := strings.SplitN(key, ".", 2)
- switch parts[1] {
- case "cwd":
- fieldType = PathValueCwd
- case "module":
- fieldType = PathValueModule
- case "root":
- fieldType = PathValueRoot
- }
-
- return &PathVariable{
- Type: fieldType,
- key: key,
- }, nil
-}
-
-func (v *PathVariable) FullKey() string {
- return v.key
-}
-
-func NewResourceVariable(key string) (*ResourceVariable, error) {
- var mode ResourceMode
- var parts []string
- if strings.HasPrefix(key, "data.") {
- mode = DataResourceMode
- parts = strings.SplitN(key, ".", 4)
- if len(parts) < 4 {
- return nil, fmt.Errorf(
- "%s: data variables must be four parts: data.TYPE.NAME.ATTR",
- key)
- }
-
- // Don't actually need the "data." prefix for parsing, since it's
- // always constant.
- parts = parts[1:]
- } else {
- mode = ManagedResourceMode
- parts = strings.SplitN(key, ".", 3)
- if len(parts) < 3 {
- return nil, fmt.Errorf(
- "%s: resource variables must be three parts: TYPE.NAME.ATTR",
- key)
- }
- }
-
- field := parts[2]
- multi := false
- var index int
-
- if idx := strings.Index(field, "."); idx != -1 {
- indexStr := field[:idx]
- multi = indexStr == "*"
- index = -1
-
- if !multi {
- indexInt, err := strconv.ParseInt(indexStr, 0, 0)
- if err == nil {
- multi = true
- index = int(indexInt)
- }
- }
-
- if multi {
- field = field[idx+1:]
- }
- }
-
- return &ResourceVariable{
- Mode: mode,
- Type: parts[0],
- Name: parts[1],
- Field: field,
- Multi: multi,
- Index: index,
- key: key,
- }, nil
-}
-
-func (v *ResourceVariable) ResourceId() string {
- switch v.Mode {
- case ManagedResourceMode:
- return fmt.Sprintf("%s.%s", v.Type, v.Name)
- case DataResourceMode:
- return fmt.Sprintf("data.%s.%s", v.Type, v.Name)
- default:
- panic(fmt.Errorf("unknown resource mode %s", v.Mode))
- }
-}
-
-func (v *ResourceVariable) FullKey() string {
- return v.key
-}
-
-func NewSelfVariable(key string) (*SelfVariable, error) {
- field := key[len("self."):]
-
- return &SelfVariable{
- Field: field,
-
- key: key,
- }, nil
-}
-
-func (v *SelfVariable) FullKey() string {
- return v.key
-}
-
-func (v *SelfVariable) GoString() string {
- return fmt.Sprintf("*%#v", *v)
-}
-
-func NewSimpleVariable(key string) (*SimpleVariable, error) {
- return &SimpleVariable{Key: key}, nil
-}
-
-func (v *SimpleVariable) FullKey() string {
- return v.Key
-}
-
-func (v *SimpleVariable) GoString() string {
- return fmt.Sprintf("*%#v", *v)
-}
-
-func NewTerraformVariable(key string) (*TerraformVariable, error) {
- field := key[len("terraform."):]
- return &TerraformVariable{
- Field: field,
- key: key,
- }, nil
-}
-
-func (v *TerraformVariable) FullKey() string {
- return v.key
-}
-
-func (v *TerraformVariable) GoString() string {
- return fmt.Sprintf("*%#v", *v)
-}
-
-func NewUserVariable(key string) (*UserVariable, error) {
- name := key[len("var."):]
- elem := ""
- if idx := strings.Index(name, "."); idx > -1 {
- elem = name[idx+1:]
- name = name[:idx]
- }
-
- if len(elem) > 0 {
- return nil, fmt.Errorf("Invalid dot index found: 'var.%s.%s'. Values in maps and lists can be referenced using square bracket indexing, like: 'var.mymap[\"key\"]' or 'var.mylist[1]'.", name, elem)
- }
-
- return &UserVariable{
- key: key,
-
- Name: name,
- Elem: elem,
- }, nil
-}
-
-func (v *UserVariable) FullKey() string {
- return v.key
-}
-
-func (v *UserVariable) GoString() string {
- return fmt.Sprintf("*%#v", *v)
-}
-
-func NewLocalVariable(key string) (*LocalVariable, error) {
- name := key[len("local."):]
- if idx := strings.Index(name, "."); idx > -1 {
- return nil, fmt.Errorf("Can't use dot (.) attribute access in local.%s; use square bracket indexing", name)
- }
-
- return &LocalVariable{
- Name: name,
- }, nil
-}
-
-func (v *LocalVariable) FullKey() string {
- return fmt.Sprintf("local.%s", v.Name)
-}
-
-func (v *LocalVariable) GoString() string {
- return fmt.Sprintf("*%#v", *v)
-}
-
-// DetectVariables takes an AST root and returns all the interpolated
-// variables that are detected in the AST tree.
-func DetectVariables(root ast.Node) ([]InterpolatedVariable, error) {
- var result []InterpolatedVariable
- var resultErr error
-
- // Visitor callback
- fn := func(n ast.Node) ast.Node {
- if resultErr != nil {
- return n
- }
-
- switch vn := n.(type) {
- case *ast.VariableAccess:
- v, err := NewInterpolatedVariable(vn.Name)
- if err != nil {
- resultErr = err
- return n
- }
- result = append(result, v)
- case *ast.Index:
- if va, ok := vn.Target.(*ast.VariableAccess); ok {
- v, err := NewInterpolatedVariable(va.Name)
- if err != nil {
- resultErr = err
- return n
- }
- result = append(result, v)
- }
- if va, ok := vn.Key.(*ast.VariableAccess); ok {
- v, err := NewInterpolatedVariable(va.Name)
- if err != nil {
- resultErr = err
- return n
- }
- result = append(result, v)
- }
- default:
- return n
- }
-
- return n
- }
-
- // Visitor pattern
- root.Accept(fn)
-
- if resultErr != nil {
- return nil, resultErr
- }
-
- return result, nil
-}
diff --git a/vendor/github.com/hashicorp/terraform/config/interpolate_funcs.go b/vendor/github.com/hashicorp/terraform/config/interpolate_funcs.go
deleted file mode 100644
index 1f3f67b90..000000000
--- a/vendor/github.com/hashicorp/terraform/config/interpolate_funcs.go
+++ /dev/null
@@ -1,54 +0,0 @@
-package config
-
-import (
- "fmt"
-
- "github.com/hashicorp/hil/ast"
-)
-
-// stringSliceToVariableValue converts a string slice into the value
-// required to be returned from interpolation functions which return
-// TypeList.
-func stringSliceToVariableValue(values []string) []ast.Variable {
- output := make([]ast.Variable, len(values))
- for index, value := range values {
- output[index] = ast.Variable{
- Type: ast.TypeString,
- Value: value,
- }
- }
- return output
-}
-
-// listVariableSliceToVariableValue converts a list of lists into the value
-// required to be returned from interpolation functions which return TypeList.
-func listVariableSliceToVariableValue(values [][]ast.Variable) []ast.Variable {
- output := make([]ast.Variable, len(values))
-
- for index, value := range values {
- output[index] = ast.Variable{
- Type: ast.TypeList,
- Value: value,
- }
- }
- return output
-}
-
-func listVariableValueToStringSlice(values []ast.Variable) ([]string, error) {
- output := make([]string, len(values))
- for index, value := range values {
- if value.Type != ast.TypeString {
- return []string{}, fmt.Errorf("list has non-string element (%T)", value.Type.String())
- }
- output[index] = value.Value.(string)
- }
- return output, nil
-}
-
-// Funcs used to return a mapping of built-in functions for configuration.
-//
-// However, these function implementations are no longer used. To find the
-// current function implementations, refer to ../lang/functions.go instead.
-func Funcs() map[string]ast.Function {
- return nil
-}
diff --git a/vendor/github.com/hashicorp/terraform/config/interpolate_walk.go b/vendor/github.com/hashicorp/terraform/config/interpolate_walk.go
deleted file mode 100644
index f152d8004..000000000
--- a/vendor/github.com/hashicorp/terraform/config/interpolate_walk.go
+++ /dev/null
@@ -1,282 +0,0 @@
-package config
-
-import (
- "fmt"
- "reflect"
- "strings"
-
- "github.com/hashicorp/hil"
- "github.com/hashicorp/hil/ast"
- "github.com/hashicorp/terraform/configs/hcl2shim"
- "github.com/mitchellh/reflectwalk"
-)
-
-// interpolationWalker implements interfaces for the reflectwalk package
-// (github.com/mitchellh/reflectwalk) that can be used to automatically
-// execute a callback for an interpolation.
-type interpolationWalker struct {
- // F is the function to call for every interpolation. It can be nil.
- //
- // If Replace is true, then the return value of F will be used to
- // replace the interpolation.
- F interpolationWalkerFunc
- Replace bool
-
- // ContextF is an advanced version of F that also receives the
- // location of where it is in the structure. This lets you do
- // context-aware validation.
- ContextF interpolationWalkerContextFunc
-
- key []string
- lastValue reflect.Value
- loc reflectwalk.Location
- cs []reflect.Value
- csKey []reflect.Value
- csData interface{}
- sliceIndex []int
- unknownKeys []string
-}
-
-// interpolationWalkerFunc is the callback called by interpolationWalk.
-// It is called with any interpolation found. It should return a value
-// to replace the interpolation with, along with any errors.
-//
-// If Replace is set to false in interpolationWalker, then the replace
-// value can be anything as it will have no effect.
-type interpolationWalkerFunc func(ast.Node) (interface{}, error)
-
-// interpolationWalkerContextFunc is called by interpolationWalk if
-// ContextF is set. This receives both the interpolation and the location
-// where the interpolation is.
-//
-// This callback can be used to validate the location of the interpolation
-// within the configuration.
-type interpolationWalkerContextFunc func(reflectwalk.Location, ast.Node)
-
-func (w *interpolationWalker) Enter(loc reflectwalk.Location) error {
- w.loc = loc
- return nil
-}
-
-func (w *interpolationWalker) Exit(loc reflectwalk.Location) error {
- w.loc = reflectwalk.None
-
- switch loc {
- case reflectwalk.Map:
- w.cs = w.cs[:len(w.cs)-1]
- case reflectwalk.MapValue:
- w.key = w.key[:len(w.key)-1]
- w.csKey = w.csKey[:len(w.csKey)-1]
- case reflectwalk.Slice:
- // Split any values that need to be split
- w.splitSlice()
- w.cs = w.cs[:len(w.cs)-1]
- case reflectwalk.SliceElem:
- w.csKey = w.csKey[:len(w.csKey)-1]
- w.sliceIndex = w.sliceIndex[:len(w.sliceIndex)-1]
- }
-
- return nil
-}
-
-func (w *interpolationWalker) Map(m reflect.Value) error {
- w.cs = append(w.cs, m)
- return nil
-}
-
-func (w *interpolationWalker) MapElem(m, k, v reflect.Value) error {
- w.csData = k
- w.csKey = append(w.csKey, k)
-
- if l := len(w.sliceIndex); l > 0 {
- w.key = append(w.key, fmt.Sprintf("%d.%s", w.sliceIndex[l-1], k.String()))
- } else {
- w.key = append(w.key, k.String())
- }
-
- w.lastValue = v
- return nil
-}
-
-func (w *interpolationWalker) Slice(s reflect.Value) error {
- w.cs = append(w.cs, s)
- return nil
-}
-
-func (w *interpolationWalker) SliceElem(i int, elem reflect.Value) error {
- w.csKey = append(w.csKey, reflect.ValueOf(i))
- w.sliceIndex = append(w.sliceIndex, i)
- return nil
-}
-
-func (w *interpolationWalker) Primitive(v reflect.Value) error {
- setV := v
-
- // We only care about strings
- if v.Kind() == reflect.Interface {
- setV = v
- v = v.Elem()
- }
- if v.Kind() != reflect.String {
- return nil
- }
-
- astRoot, err := hil.Parse(v.String())
- if err != nil {
- return err
- }
-
- // If the AST we got is just a literal string value with the same
- // value then we ignore it. We have to check if its the same value
- // because it is possible to input a string, get out a string, and
- // have it be different. For example: "foo-$${bar}" turns into
- // "foo-${bar}"
- if n, ok := astRoot.(*ast.LiteralNode); ok {
- if s, ok := n.Value.(string); ok && s == v.String() {
- return nil
- }
- }
-
- if w.ContextF != nil {
- w.ContextF(w.loc, astRoot)
- }
-
- if w.F == nil {
- return nil
- }
-
- replaceVal, err := w.F(astRoot)
- if err != nil {
- return fmt.Errorf(
- "%s in:\n\n%s",
- err, v.String())
- }
-
- if w.Replace {
- // We need to determine if we need to remove this element
- // if the result contains any "UnknownVariableValue" which is
- // set if it is computed. This behavior is different if we're
- // splitting (in a SliceElem) or not.
- remove := false
- if w.loc == reflectwalk.SliceElem {
- switch typedReplaceVal := replaceVal.(type) {
- case string:
- if typedReplaceVal == hcl2shim.UnknownVariableValue {
- remove = true
- }
- case []interface{}:
- if hasUnknownValue(typedReplaceVal) {
- remove = true
- }
- }
- } else if replaceVal == hcl2shim.UnknownVariableValue {
- remove = true
- }
-
- if remove {
- w.unknownKeys = append(w.unknownKeys, strings.Join(w.key, "."))
- }
-
- resultVal := reflect.ValueOf(replaceVal)
- switch w.loc {
- case reflectwalk.MapKey:
- m := w.cs[len(w.cs)-1]
-
- // Delete the old value
- var zero reflect.Value
- m.SetMapIndex(w.csData.(reflect.Value), zero)
-
- // Set the new key with the existing value
- m.SetMapIndex(resultVal, w.lastValue)
-
- // Set the key to be the new key
- w.csData = resultVal
- case reflectwalk.MapValue:
- // If we're in a map, then the only way to set a map value is
- // to set it directly.
- m := w.cs[len(w.cs)-1]
- mk := w.csData.(reflect.Value)
- m.SetMapIndex(mk, resultVal)
- default:
- // Otherwise, we should be addressable
- setV.Set(resultVal)
- }
- }
-
- return nil
-}
-
-func (w *interpolationWalker) replaceCurrent(v reflect.Value) {
- // if we don't have at least 2 values, we're not going to find a map, but
- // we could panic.
- if len(w.cs) < 2 {
- return
- }
-
- c := w.cs[len(w.cs)-2]
- switch c.Kind() {
- case reflect.Map:
- // Get the key and delete it
- k := w.csKey[len(w.csKey)-1]
- c.SetMapIndex(k, v)
- }
-}
-
-func hasUnknownValue(variable []interface{}) bool {
- for _, value := range variable {
- if strVal, ok := value.(string); ok {
- if strVal == hcl2shim.UnknownVariableValue {
- return true
- }
- }
- }
- return false
-}
-
-func (w *interpolationWalker) splitSlice() {
- raw := w.cs[len(w.cs)-1]
-
- var s []interface{}
- switch v := raw.Interface().(type) {
- case []interface{}:
- s = v
- case []map[string]interface{}:
- return
- }
-
- split := false
- for _, val := range s {
- if varVal, ok := val.(ast.Variable); ok && varVal.Type == ast.TypeList {
- split = true
- }
- if _, ok := val.([]interface{}); ok {
- split = true
- }
- }
-
- if !split {
- return
- }
-
- result := make([]interface{}, 0)
- for _, v := range s {
- switch val := v.(type) {
- case ast.Variable:
- switch val.Type {
- case ast.TypeList:
- elements := val.Value.([]ast.Variable)
- for _, element := range elements {
- result = append(result, element.Value)
- }
- default:
- result = append(result, val.Value)
- }
- case []interface{}:
- result = append(result, val...)
- default:
- result = append(result, v)
- }
- }
-
- w.replaceCurrent(reflect.ValueOf(result))
-}
diff --git a/vendor/github.com/hashicorp/terraform/config/lang.go b/vendor/github.com/hashicorp/terraform/config/lang.go
deleted file mode 100644
index 890d30beb..000000000
--- a/vendor/github.com/hashicorp/terraform/config/lang.go
+++ /dev/null
@@ -1,11 +0,0 @@
-package config
-
-import (
- "github.com/hashicorp/hil/ast"
-)
-
-type noopNode struct{}
-
-func (n *noopNode) Accept(ast.Visitor) ast.Node { return n }
-func (n *noopNode) Pos() ast.Pos { return ast.Pos{} }
-func (n *noopNode) Type(ast.Scope) (ast.Type, error) { return ast.TypeString, nil }
diff --git a/vendor/github.com/hashicorp/terraform/config/loader.go b/vendor/github.com/hashicorp/terraform/config/loader.go
deleted file mode 100644
index 612e25b9e..000000000
--- a/vendor/github.com/hashicorp/terraform/config/loader.go
+++ /dev/null
@@ -1,212 +0,0 @@
-package config
-
-import (
- "encoding/json"
- "fmt"
- "io"
- "os"
- "path/filepath"
- "sort"
- "strings"
-
- "github.com/hashicorp/hcl"
-)
-
-// ErrNoConfigsFound is the error returned by LoadDir if no
-// Terraform configuration files were found in the given directory.
-type ErrNoConfigsFound struct {
- Dir string
-}
-
-func (e ErrNoConfigsFound) Error() string {
- return fmt.Sprintf(
- "No Terraform configuration files found in directory: %s",
- e.Dir)
-}
-
-// LoadJSON loads a single Terraform configuration from a given JSON document.
-//
-// The document must be a complete Terraform configuration. This function will
-// NOT try to load any additional modules so only the given document is loaded.
-func LoadJSON(raw json.RawMessage) (*Config, error) {
- obj, err := hcl.Parse(string(raw))
- if err != nil {
- return nil, fmt.Errorf(
- "Error parsing JSON document as HCL: %s", err)
- }
-
- // Start building the result
- hclConfig := &hclConfigurable{
- Root: obj,
- }
-
- return hclConfig.Config()
-}
-
-// LoadFile loads the Terraform configuration from a given file.
-//
-// This file can be any format that Terraform recognizes, and import any
-// other format that Terraform recognizes.
-func LoadFile(path string) (*Config, error) {
- importTree, err := loadTree(path)
- if err != nil {
- return nil, err
- }
-
- configTree, err := importTree.ConfigTree()
-
- // Close the importTree now so that we can clear resources as quickly
- // as possible.
- importTree.Close()
-
- if err != nil {
- return nil, err
- }
-
- return configTree.Flatten()
-}
-
-// LoadDir loads all the Terraform configuration files in a single
-// directory and appends them together.
-//
-// Special files known as "override files" can also be present, which
-// are merged into the loaded configuration. That is, the non-override
-// files are loaded first to create the configuration. Then, the overrides
-// are merged into the configuration to create the final configuration.
-//
-// Files are loaded in lexical order.
-func LoadDir(root string) (*Config, error) {
- files, overrides, err := dirFiles(root)
- if err != nil {
- return nil, err
- }
- if len(files) == 0 && len(overrides) == 0 {
- return nil, &ErrNoConfigsFound{Dir: root}
- }
-
- // Determine the absolute path to the directory.
- rootAbs, err := filepath.Abs(root)
- if err != nil {
- return nil, err
- }
-
- var result *Config
-
- // Sort the files and overrides so we have a deterministic order
- sort.Strings(files)
- sort.Strings(overrides)
-
- // Load all the regular files, append them to each other.
- for _, f := range files {
- c, err := LoadFile(f)
- if err != nil {
- return nil, err
- }
-
- if result != nil {
- result, err = Append(result, c)
- if err != nil {
- return nil, err
- }
- } else {
- result = c
- }
- }
- if len(files) == 0 {
- result = &Config{}
- }
-
- // Load all the overrides, and merge them into the config
- for _, f := range overrides {
- c, err := LoadFile(f)
- if err != nil {
- return nil, err
- }
-
- result, err = Merge(result, c)
- if err != nil {
- return nil, err
- }
- }
-
- // Mark the directory
- result.Dir = rootAbs
-
- return result, nil
-}
-
-// Ext returns the Terraform configuration extension of the given
-// path, or a blank string if it is an invalid function.
-func ext(path string) string {
- if strings.HasSuffix(path, ".tf") {
- return ".tf"
- } else if strings.HasSuffix(path, ".tf.json") {
- return ".tf.json"
- } else {
- return ""
- }
-}
-
-func dirFiles(dir string) ([]string, []string, error) {
- f, err := os.Open(dir)
- if err != nil {
- return nil, nil, err
- }
- defer f.Close()
-
- fi, err := f.Stat()
- if err != nil {
- return nil, nil, err
- }
- if !fi.IsDir() {
- return nil, nil, fmt.Errorf(
- "configuration path must be a directory: %s",
- dir)
- }
-
- var files, overrides []string
- err = nil
- for err != io.EOF {
- var fis []os.FileInfo
- fis, err = f.Readdir(128)
- if err != nil && err != io.EOF {
- return nil, nil, err
- }
-
- for _, fi := range fis {
- // Ignore directories
- if fi.IsDir() {
- continue
- }
-
- // Only care about files that are valid to load
- name := fi.Name()
- extValue := ext(name)
- if extValue == "" || IsIgnoredFile(name) {
- continue
- }
-
- // Determine if we're dealing with an override
- nameNoExt := name[:len(name)-len(extValue)]
- override := nameNoExt == "override" ||
- strings.HasSuffix(nameNoExt, "_override")
-
- path := filepath.Join(dir, name)
- if override {
- overrides = append(overrides, path)
- } else {
- files = append(files, path)
- }
- }
- }
-
- return files, overrides, nil
-}
-
-// IsIgnoredFile returns true or false depending on whether the
-// provided file name is a file that should be ignored.
-func IsIgnoredFile(name string) bool {
- return strings.HasPrefix(name, ".") || // Unix-like hidden files
- strings.HasSuffix(name, "~") || // vim
- strings.HasPrefix(name, "#") && strings.HasSuffix(name, "#") // emacs
-}
diff --git a/vendor/github.com/hashicorp/terraform/config/loader_hcl.go b/vendor/github.com/hashicorp/terraform/config/loader_hcl.go
deleted file mode 100644
index 68cffe2cc..000000000
--- a/vendor/github.com/hashicorp/terraform/config/loader_hcl.go
+++ /dev/null
@@ -1,1270 +0,0 @@
-package config
-
-import (
- "fmt"
- "io/ioutil"
-
- "github.com/hashicorp/go-multierror"
- "github.com/hashicorp/hcl"
- "github.com/hashicorp/hcl/hcl/ast"
- "github.com/mitchellh/mapstructure"
-)
-
-// hclConfigurable is an implementation of configurable that knows
-// how to turn HCL configuration into a *Config object.
-type hclConfigurable struct {
- File string
- Root *ast.File
-}
-
-var ReservedDataSourceFields = []string{
- "connection",
- "count",
- "depends_on",
- "lifecycle",
- "provider",
- "provisioner",
-}
-
-var ReservedResourceFields = []string{
- "connection",
- "count",
- "depends_on",
- "id",
- "lifecycle",
- "provider",
- "provisioner",
-}
-
-var ReservedProviderFields = []string{
- "alias",
- "version",
-}
-
-func (t *hclConfigurable) Config() (*Config, error) {
- validKeys := map[string]struct{}{
- "atlas": struct{}{},
- "data": struct{}{},
- "locals": struct{}{},
- "module": struct{}{},
- "output": struct{}{},
- "provider": struct{}{},
- "resource": struct{}{},
- "terraform": struct{}{},
- "variable": struct{}{},
- }
-
- // Top-level item should be the object list
- list, ok := t.Root.Node.(*ast.ObjectList)
- if !ok {
- return nil, fmt.Errorf("error parsing: file doesn't contain a root object")
- }
-
- // Start building up the actual configuration.
- config := new(Config)
-
- // Terraform config
- if o := list.Filter("terraform"); len(o.Items) > 0 {
- var err error
- config.Terraform, err = loadTerraformHcl(o)
- if err != nil {
- return nil, err
- }
- }
-
- // Build the variables
- if vars := list.Filter("variable"); len(vars.Items) > 0 {
- var err error
- config.Variables, err = loadVariablesHcl(vars)
- if err != nil {
- return nil, err
- }
- }
-
- // Build local values
- if locals := list.Filter("locals"); len(locals.Items) > 0 {
- var err error
- config.Locals, err = loadLocalsHcl(locals)
- if err != nil {
- return nil, err
- }
- }
-
- // Get Atlas configuration
- if atlas := list.Filter("atlas"); len(atlas.Items) > 0 {
- var err error
- config.Atlas, err = loadAtlasHcl(atlas)
- if err != nil {
- return nil, err
- }
- }
-
- // Build the modules
- if modules := list.Filter("module"); len(modules.Items) > 0 {
- var err error
- config.Modules, err = loadModulesHcl(modules)
- if err != nil {
- return nil, err
- }
- }
-
- // Build the provider configs
- if providers := list.Filter("provider"); len(providers.Items) > 0 {
- var err error
- config.ProviderConfigs, err = loadProvidersHcl(providers)
- if err != nil {
- return nil, err
- }
- }
-
- // Build the resources
- {
- var err error
- managedResourceConfigs := list.Filter("resource")
- dataResourceConfigs := list.Filter("data")
-
- config.Resources = make(
- []*Resource, 0,
- len(managedResourceConfigs.Items)+len(dataResourceConfigs.Items),
- )
-
- managedResources, err := loadManagedResourcesHcl(managedResourceConfigs)
- if err != nil {
- return nil, err
- }
- dataResources, err := loadDataResourcesHcl(dataResourceConfigs)
- if err != nil {
- return nil, err
- }
-
- config.Resources = append(config.Resources, dataResources...)
- config.Resources = append(config.Resources, managedResources...)
- }
-
- // Build the outputs
- if outputs := list.Filter("output"); len(outputs.Items) > 0 {
- var err error
- config.Outputs, err = loadOutputsHcl(outputs)
- if err != nil {
- return nil, err
- }
- }
-
- // Check for invalid keys
- for _, item := range list.Items {
- if len(item.Keys) == 0 {
- // Not sure how this would happen, but let's avoid a panic
- continue
- }
-
- k := item.Keys[0].Token.Value().(string)
- if _, ok := validKeys[k]; ok {
- continue
- }
-
- config.unknownKeys = append(config.unknownKeys, k)
- }
-
- return config, nil
-}
-
-// loadFileHcl is a fileLoaderFunc that knows how to read HCL
-// files and turn them into hclConfigurables.
-func loadFileHcl(root string) (configurable, []string, error) {
- // Read the HCL file and prepare for parsing
- d, err := ioutil.ReadFile(root)
- if err != nil {
- return nil, nil, fmt.Errorf(
- "Error reading %s: %s", root, err)
- }
-
- // Parse it
- hclRoot, err := hcl.Parse(string(d))
- if err != nil {
- return nil, nil, fmt.Errorf(
- "Error parsing %s: %s", root, err)
- }
-
- // Start building the result
- result := &hclConfigurable{
- File: root,
- Root: hclRoot,
- }
-
- // Dive in, find the imports. This is disabled for now since
- // imports were removed prior to Terraform 0.1. The code is
- // remaining here commented for historical purposes.
- /*
- imports := obj.Get("import")
- if imports == nil {
- result.Object.Ref()
- return result, nil, nil
- }
-
- if imports.Type() != libucl.ObjectTypeString {
- imports.Close()
-
- return nil, nil, fmt.Errorf(
- "Error in %s: all 'import' declarations should be in the format\n"+
- "`import \"foo\"` (Got type %s)",
- root,
- imports.Type())
- }
-
- // Gather all the import paths
- importPaths := make([]string, 0, imports.Len())
- iter := imports.Iterate(false)
- for imp := iter.Next(); imp != nil; imp = iter.Next() {
- path := imp.ToString()
- if !filepath.IsAbs(path) {
- // Relative paths are relative to the Terraform file itself
- dir := filepath.Dir(root)
- path = filepath.Join(dir, path)
- }
-
- importPaths = append(importPaths, path)
- imp.Close()
- }
- iter.Close()
- imports.Close()
-
- result.Object.Ref()
- */
-
- return result, nil, nil
-}
-
-// Given a handle to a HCL object, this transforms it into the Terraform config
-func loadTerraformHcl(list *ast.ObjectList) (*Terraform, error) {
- if len(list.Items) > 1 {
- return nil, fmt.Errorf("only one 'terraform' block allowed per module")
- }
-
- // Get our one item
- item := list.Items[0]
-
- // This block should have an empty top level ObjectItem. If there are keys
- // here, it's likely because we have a flattened JSON object, and we can
- // lift this into a nested ObjectList to decode properly.
- if len(item.Keys) > 0 {
- item = &ast.ObjectItem{
- Val: &ast.ObjectType{
- List: &ast.ObjectList{
- Items: []*ast.ObjectItem{item},
- },
- },
- }
- }
-
- // We need the item value as an ObjectList
- var listVal *ast.ObjectList
- if ot, ok := item.Val.(*ast.ObjectType); ok {
- listVal = ot.List
- } else {
- return nil, fmt.Errorf("terraform block: should be an object")
- }
-
- // NOTE: We purposely don't validate unknown HCL keys here so that
- // we can potentially read _future_ Terraform version config (to
- // still be able to validate the required version).
- //
- // We should still keep track of unknown keys to validate later, but
- // HCL doesn't currently support that.
-
- var config Terraform
- if err := hcl.DecodeObject(&config, item.Val); err != nil {
- return nil, fmt.Errorf(
- "Error reading terraform config: %s",
- err)
- }
-
- // If we have provisioners, then parse those out
- if os := listVal.Filter("backend"); len(os.Items) > 0 {
- var err error
- config.Backend, err = loadTerraformBackendHcl(os)
- if err != nil {
- return nil, fmt.Errorf(
- "Error reading backend config for terraform block: %s",
- err)
- }
- }
-
- return &config, nil
-}
-
-// Loads the Backend configuration from an object list.
-func loadTerraformBackendHcl(list *ast.ObjectList) (*Backend, error) {
- if len(list.Items) > 1 {
- return nil, fmt.Errorf("only one 'backend' block allowed")
- }
-
- // Get our one item
- item := list.Items[0]
-
- // Verify the keys
- if len(item.Keys) != 1 {
- return nil, fmt.Errorf(
- "position %s: 'backend' must be followed by exactly one string: a type",
- item.Pos())
- }
-
- typ := item.Keys[0].Token.Value().(string)
-
- // Decode the raw config
- var config map[string]interface{}
- if err := hcl.DecodeObject(&config, item.Val); err != nil {
- return nil, fmt.Errorf(
- "Error reading backend config: %s",
- err)
- }
-
- rawConfig, err := NewRawConfig(config)
- if err != nil {
- return nil, fmt.Errorf(
- "Error reading backend config: %s",
- err)
- }
-
- b := &Backend{
- Type: typ,
- RawConfig: rawConfig,
- }
- b.Hash = b.Rehash()
-
- return b, nil
-}
-
-// Given a handle to a HCL object, this transforms it into the Atlas
-// configuration.
-func loadAtlasHcl(list *ast.ObjectList) (*AtlasConfig, error) {
- if len(list.Items) > 1 {
- return nil, fmt.Errorf("only one 'atlas' block allowed")
- }
-
- // Get our one item
- item := list.Items[0]
-
- var config AtlasConfig
- if err := hcl.DecodeObject(&config, item.Val); err != nil {
- return nil, fmt.Errorf(
- "Error reading atlas config: %s",
- err)
- }
-
- return &config, nil
-}
-
-// Given a handle to a HCL object, this recurses into the structure
-// and pulls out a list of modules.
-//
-// The resulting modules may not be unique, but each module
-// represents exactly one module definition in the HCL configuration.
-// We leave it up to another pass to merge them together.
-func loadModulesHcl(list *ast.ObjectList) ([]*Module, error) {
- if err := assertAllBlocksHaveNames("module", list); err != nil {
- return nil, err
- }
-
- list = list.Children()
- if len(list.Items) == 0 {
- return nil, nil
- }
-
- // Where all the results will go
- var result []*Module
-
- // Now go over all the types and their children in order to get
- // all of the actual resources.
- for _, item := range list.Items {
- k := item.Keys[0].Token.Value().(string)
-
- var listVal *ast.ObjectList
- if ot, ok := item.Val.(*ast.ObjectType); ok {
- listVal = ot.List
- } else {
- return nil, fmt.Errorf("module '%s': should be an object", k)
- }
-
- var config map[string]interface{}
- if err := hcl.DecodeObject(&config, item.Val); err != nil {
- return nil, fmt.Errorf(
- "Error reading config for %s: %s",
- k,
- err)
- }
-
- rawConfig, err := NewRawConfig(config)
- if err != nil {
- return nil, fmt.Errorf(
- "Error reading config for %s: %s",
- k,
- err)
- }
-
- // Remove the fields we handle specially
- delete(config, "source")
- delete(config, "version")
- delete(config, "providers")
-
- var source string
- if o := listVal.Filter("source"); len(o.Items) > 0 {
- err = hcl.DecodeObject(&source, o.Items[0].Val)
- if err != nil {
- return nil, fmt.Errorf(
- "Error parsing source for %s: %s",
- k,
- err)
- }
- }
-
- var version string
- if o := listVal.Filter("version"); len(o.Items) > 0 {
- err = hcl.DecodeObject(&version, o.Items[0].Val)
- if err != nil {
- return nil, fmt.Errorf(
- "Error parsing version for %s: %s",
- k,
- err)
- }
- }
-
- var providers map[string]string
- if o := listVal.Filter("providers"); len(o.Items) > 0 {
- err = hcl.DecodeObject(&providers, o.Items[0].Val)
- if err != nil {
- return nil, fmt.Errorf(
- "Error parsing providers for %s: %s",
- k,
- err)
- }
- }
-
- result = append(result, &Module{
- Name: k,
- Source: source,
- Version: version,
- Providers: providers,
- RawConfig: rawConfig,
- })
- }
-
- return result, nil
-}
-
-// loadLocalsHcl recurses into the given HCL object turns it into
-// a list of locals.
-func loadLocalsHcl(list *ast.ObjectList) ([]*Local, error) {
-
- result := make([]*Local, 0, len(list.Items))
-
- for _, block := range list.Items {
- if len(block.Keys) > 0 {
- return nil, fmt.Errorf(
- "locals block at %s should not have label %q",
- block.Pos(), block.Keys[0].Token.Value(),
- )
- }
-
- blockObj, ok := block.Val.(*ast.ObjectType)
- if !ok {
- return nil, fmt.Errorf("locals value at %s should be a block", block.Val.Pos())
- }
-
- // blockObj now contains directly our local decls
- for _, item := range blockObj.List.Items {
- if len(item.Keys) != 1 {
- return nil, fmt.Errorf("local declaration at %s may not be a block", item.Val.Pos())
- }
-
- // By the time we get here there can only be one item left, but
- // we'll decode into a map anyway because it's a convenient way
- // to extract both the key and the value robustly.
- kv := map[string]interface{}{}
- hcl.DecodeObject(&kv, item)
- for k, v := range kv {
- rawConfig, err := NewRawConfig(map[string]interface{}{
- "value": v,
- })
-
- if err != nil {
- return nil, fmt.Errorf(
- "error parsing local value %q at %s: %s",
- k, item.Val.Pos(), err,
- )
- }
-
- result = append(result, &Local{
- Name: k,
- RawConfig: rawConfig,
- })
- }
- }
- }
-
- return result, nil
-}
-
-// LoadOutputsHcl recurses into the given HCL object and turns
-// it into a mapping of outputs.
-func loadOutputsHcl(list *ast.ObjectList) ([]*Output, error) {
- if err := assertAllBlocksHaveNames("output", list); err != nil {
- return nil, err
- }
-
- list = list.Children()
-
- // Go through each object and turn it into an actual result.
- result := make([]*Output, 0, len(list.Items))
- for _, item := range list.Items {
- n := item.Keys[0].Token.Value().(string)
-
- var listVal *ast.ObjectList
- if ot, ok := item.Val.(*ast.ObjectType); ok {
- listVal = ot.List
- } else {
- return nil, fmt.Errorf("output '%s': should be an object", n)
- }
-
- var config map[string]interface{}
- if err := hcl.DecodeObject(&config, item.Val); err != nil {
- return nil, err
- }
-
- // Delete special keys
- delete(config, "depends_on")
- delete(config, "description")
-
- rawConfig, err := NewRawConfig(config)
- if err != nil {
- return nil, fmt.Errorf(
- "Error reading config for output %s: %s",
- n,
- err)
- }
-
- // If we have depends fields, then add those in
- var dependsOn []string
- if o := listVal.Filter("depends_on"); len(o.Items) > 0 {
- err := hcl.DecodeObject(&dependsOn, o.Items[0].Val)
- if err != nil {
- return nil, fmt.Errorf(
- "Error reading depends_on for output %q: %s",
- n,
- err)
- }
- }
-
- // If we have a description field, then filter that
- var description string
- if o := listVal.Filter("description"); len(o.Items) > 0 {
- err := hcl.DecodeObject(&description, o.Items[0].Val)
- if err != nil {
- return nil, fmt.Errorf(
- "Error reading description for output %q: %s",
- n,
- err)
- }
- }
-
- result = append(result, &Output{
- Name: n,
- RawConfig: rawConfig,
- DependsOn: dependsOn,
- Description: description,
- })
- }
-
- return result, nil
-}
-
-// LoadVariablesHcl recurses into the given HCL object and turns
-// it into a list of variables.
-func loadVariablesHcl(list *ast.ObjectList) ([]*Variable, error) {
- if err := assertAllBlocksHaveNames("variable", list); err != nil {
- return nil, err
- }
-
- list = list.Children()
-
- // hclVariable is the structure each variable is decoded into
- type hclVariable struct {
- DeclaredType string `hcl:"type"`
- Default interface{}
- Description string
- Fields []string `hcl:",decodedFields"`
- }
-
- // Go through each object and turn it into an actual result.
- result := make([]*Variable, 0, len(list.Items))
- for _, item := range list.Items {
- // Clean up items from JSON
- unwrapHCLObjectKeysFromJSON(item, 1)
-
- // Verify the keys
- if len(item.Keys) != 1 {
- return nil, fmt.Errorf(
- "position %s: 'variable' must be followed by exactly one strings: a name",
- item.Pos())
- }
-
- n := item.Keys[0].Token.Value().(string)
- if !NameRegexp.MatchString(n) {
- return nil, fmt.Errorf(
- "position %s: 'variable' name must match regular expression: %s",
- item.Pos(), NameRegexp)
- }
-
- // Check for invalid keys
- valid := []string{"type", "default", "description"}
- if err := checkHCLKeys(item.Val, valid); err != nil {
- return nil, multierror.Prefix(err, fmt.Sprintf(
- "variable[%s]:", n))
- }
-
- // Decode into hclVariable to get typed values
- var hclVar hclVariable
- if err := hcl.DecodeObject(&hclVar, item.Val); err != nil {
- return nil, err
- }
-
- // Defaults turn into a slice of map[string]interface{} and
- // we need to make sure to convert that down into the
- // proper type for Config.
- if ms, ok := hclVar.Default.([]map[string]interface{}); ok {
- def := make(map[string]interface{})
- for _, m := range ms {
- for k, v := range m {
- def[k] = v
- }
- }
-
- hclVar.Default = def
- }
-
- // Build the new variable and do some basic validation
- newVar := &Variable{
- Name: n,
- DeclaredType: hclVar.DeclaredType,
- Default: hclVar.Default,
- Description: hclVar.Description,
- }
- if err := newVar.ValidateTypeAndDefault(); err != nil {
- return nil, err
- }
-
- result = append(result, newVar)
- }
-
- return result, nil
-}
-
-// LoadProvidersHcl recurses into the given HCL object and turns
-// it into a mapping of provider configs.
-func loadProvidersHcl(list *ast.ObjectList) ([]*ProviderConfig, error) {
- if err := assertAllBlocksHaveNames("provider", list); err != nil {
- return nil, err
- }
-
- list = list.Children()
- if len(list.Items) == 0 {
- return nil, nil
- }
-
- // Go through each object and turn it into an actual result.
- result := make([]*ProviderConfig, 0, len(list.Items))
- for _, item := range list.Items {
- n := item.Keys[0].Token.Value().(string)
-
- var listVal *ast.ObjectList
- if ot, ok := item.Val.(*ast.ObjectType); ok {
- listVal = ot.List
- } else {
- return nil, fmt.Errorf("module '%s': should be an object", n)
- }
-
- var config map[string]interface{}
- if err := hcl.DecodeObject(&config, item.Val); err != nil {
- return nil, err
- }
-
- delete(config, "alias")
- delete(config, "version")
-
- rawConfig, err := NewRawConfig(config)
- if err != nil {
- return nil, fmt.Errorf(
- "Error reading config for provider config %s: %s",
- n,
- err)
- }
-
- // If we have an alias field, then add those in
- var alias string
- if a := listVal.Filter("alias"); len(a.Items) > 0 {
- err := hcl.DecodeObject(&alias, a.Items[0].Val)
- if err != nil {
- return nil, fmt.Errorf(
- "Error reading alias for provider[%s]: %s",
- n,
- err)
- }
- }
-
- // If we have a version field then extract it
- var version string
- if a := listVal.Filter("version"); len(a.Items) > 0 {
- err := hcl.DecodeObject(&version, a.Items[0].Val)
- if err != nil {
- return nil, fmt.Errorf(
- "Error reading version for provider[%s]: %s",
- n,
- err)
- }
- }
-
- result = append(result, &ProviderConfig{
- Name: n,
- Alias: alias,
- Version: version,
- RawConfig: rawConfig,
- })
- }
-
- return result, nil
-}
-
-// Given a handle to a HCL object, this recurses into the structure
-// and pulls out a list of data sources.
-//
-// The resulting data sources may not be unique, but each one
-// represents exactly one data definition in the HCL configuration.
-// We leave it up to another pass to merge them together.
-func loadDataResourcesHcl(list *ast.ObjectList) ([]*Resource, error) {
- if err := assertAllBlocksHaveNames("data", list); err != nil {
- return nil, err
- }
-
- list = list.Children()
- if len(list.Items) == 0 {
- return nil, nil
- }
-
- // Where all the results will go
- var result []*Resource
-
- // Now go over all the types and their children in order to get
- // all of the actual resources.
- for _, item := range list.Items {
- if len(item.Keys) != 2 {
- return nil, fmt.Errorf(
- "position %s: 'data' must be followed by exactly two strings: a type and a name",
- item.Pos())
- }
-
- t := item.Keys[0].Token.Value().(string)
- k := item.Keys[1].Token.Value().(string)
-
- var listVal *ast.ObjectList
- if ot, ok := item.Val.(*ast.ObjectType); ok {
- listVal = ot.List
- } else {
- return nil, fmt.Errorf("data sources %s[%s]: should be an object", t, k)
- }
-
- var config map[string]interface{}
- if err := hcl.DecodeObject(&config, item.Val); err != nil {
- return nil, fmt.Errorf(
- "Error reading config for %s[%s]: %s",
- t,
- k,
- err)
- }
-
- // Remove the fields we handle specially
- delete(config, "depends_on")
- delete(config, "provider")
- delete(config, "count")
-
- rawConfig, err := NewRawConfig(config)
- if err != nil {
- return nil, fmt.Errorf(
- "Error reading config for %s[%s]: %s",
- t,
- k,
- err)
- }
-
- // If we have a count, then figure it out
- var count string = "1"
- if o := listVal.Filter("count"); len(o.Items) > 0 {
- err = hcl.DecodeObject(&count, o.Items[0].Val)
- if err != nil {
- return nil, fmt.Errorf(
- "Error parsing count for %s[%s]: %s",
- t,
- k,
- err)
- }
- }
- countConfig, err := NewRawConfig(map[string]interface{}{
- "count": count,
- })
- if err != nil {
- return nil, err
- }
- countConfig.Key = "count"
-
- // If we have depends fields, then add those in
- var dependsOn []string
- if o := listVal.Filter("depends_on"); len(o.Items) > 0 {
- err := hcl.DecodeObject(&dependsOn, o.Items[0].Val)
- if err != nil {
- return nil, fmt.Errorf(
- "Error reading depends_on for %s[%s]: %s",
- t,
- k,
- err)
- }
- }
-
- // If we have a provider, then parse it out
- var provider string
- if o := listVal.Filter("provider"); len(o.Items) > 0 {
- err := hcl.DecodeObject(&provider, o.Items[0].Val)
- if err != nil {
- return nil, fmt.Errorf(
- "Error reading provider for %s[%s]: %s",
- t,
- k,
- err)
- }
- }
-
- result = append(result, &Resource{
- Mode: DataResourceMode,
- Name: k,
- Type: t,
- RawCount: countConfig,
- RawConfig: rawConfig,
- Provider: provider,
- Provisioners: []*Provisioner{},
- DependsOn: dependsOn,
- Lifecycle: ResourceLifecycle{},
- })
- }
-
- return result, nil
-}
-
-// Given a handle to a HCL object, this recurses into the structure
-// and pulls out a list of managed resources.
-//
-// The resulting resources may not be unique, but each resource
-// represents exactly one "resource" block in the HCL configuration.
-// We leave it up to another pass to merge them together.
-func loadManagedResourcesHcl(list *ast.ObjectList) ([]*Resource, error) {
- list = list.Children()
- if len(list.Items) == 0 {
- return nil, nil
- }
-
- // Where all the results will go
- var result []*Resource
-
- // Now go over all the types and their children in order to get
- // all of the actual resources.
- for _, item := range list.Items {
- // GH-4385: We detect a pure provisioner resource and give the user
- // an error about how to do it cleanly.
- if len(item.Keys) == 4 && item.Keys[2].Token.Value().(string) == "provisioner" {
- return nil, fmt.Errorf(
- "position %s: provisioners in a resource should be wrapped in a list\n\n"+
- "Example: \"provisioner\": [ { \"local-exec\": ... } ]",
- item.Pos())
- }
-
- // Fix up JSON input
- unwrapHCLObjectKeysFromJSON(item, 2)
-
- if len(item.Keys) != 2 {
- return nil, fmt.Errorf(
- "position %s: resource must be followed by exactly two strings, a type and a name",
- item.Pos())
- }
-
- t := item.Keys[0].Token.Value().(string)
- k := item.Keys[1].Token.Value().(string)
-
- var listVal *ast.ObjectList
- if ot, ok := item.Val.(*ast.ObjectType); ok {
- listVal = ot.List
- } else {
- return nil, fmt.Errorf("resources %s[%s]: should be an object", t, k)
- }
-
- var config map[string]interface{}
- if err := hcl.DecodeObject(&config, item.Val); err != nil {
- return nil, fmt.Errorf(
- "Error reading config for %s[%s]: %s",
- t,
- k,
- err)
- }
-
- // Remove the fields we handle specially
- delete(config, "connection")
- delete(config, "count")
- delete(config, "depends_on")
- delete(config, "provisioner")
- delete(config, "provider")
- delete(config, "lifecycle")
-
- rawConfig, err := NewRawConfig(config)
- if err != nil {
- return nil, fmt.Errorf(
- "Error reading config for %s[%s]: %s",
- t,
- k,
- err)
- }
-
- // If we have a count, then figure it out
- var count string = "1"
- if o := listVal.Filter("count"); len(o.Items) > 0 {
- err = hcl.DecodeObject(&count, o.Items[0].Val)
- if err != nil {
- return nil, fmt.Errorf(
- "Error parsing count for %s[%s]: %s",
- t,
- k,
- err)
- }
- }
- countConfig, err := NewRawConfig(map[string]interface{}{
- "count": count,
- })
- if err != nil {
- return nil, err
- }
- countConfig.Key = "count"
-
- // If we have depends fields, then add those in
- var dependsOn []string
- if o := listVal.Filter("depends_on"); len(o.Items) > 0 {
- err := hcl.DecodeObject(&dependsOn, o.Items[0].Val)
- if err != nil {
- return nil, fmt.Errorf(
- "Error reading depends_on for %s[%s]: %s",
- t,
- k,
- err)
- }
- }
-
- // If we have connection info, then parse those out
- var connInfo map[string]interface{}
- if o := listVal.Filter("connection"); len(o.Items) > 0 {
- err := hcl.DecodeObject(&connInfo, o.Items[0].Val)
- if err != nil {
- return nil, fmt.Errorf(
- "Error reading connection info for %s[%s]: %s",
- t,
- k,
- err)
- }
- }
-
- // If we have provisioners, then parse those out
- var provisioners []*Provisioner
- if os := listVal.Filter("provisioner"); len(os.Items) > 0 {
- var err error
- provisioners, err = loadProvisionersHcl(os, connInfo)
- if err != nil {
- return nil, fmt.Errorf(
- "Error reading provisioners for %s[%s]: %s",
- t,
- k,
- err)
- }
- }
-
- // If we have a provider, then parse it out
- var provider string
- if o := listVal.Filter("provider"); len(o.Items) > 0 {
- err := hcl.DecodeObject(&provider, o.Items[0].Val)
- if err != nil {
- return nil, fmt.Errorf(
- "Error reading provider for %s[%s]: %s",
- t,
- k,
- err)
- }
- }
-
- // Check if the resource should be re-created before
- // destroying the existing instance
- var lifecycle ResourceLifecycle
- if o := listVal.Filter("lifecycle"); len(o.Items) > 0 {
- if len(o.Items) > 1 {
- return nil, fmt.Errorf(
- "%s[%s]: Multiple lifecycle blocks found, expected one",
- t, k)
- }
-
- // Check for invalid keys
- valid := []string{"create_before_destroy", "ignore_changes", "prevent_destroy"}
- if err := checkHCLKeys(o.Items[0].Val, valid); err != nil {
- return nil, multierror.Prefix(err, fmt.Sprintf(
- "%s[%s]:", t, k))
- }
-
- var raw map[string]interface{}
- if err = hcl.DecodeObject(&raw, o.Items[0].Val); err != nil {
- return nil, fmt.Errorf(
- "Error parsing lifecycle for %s[%s]: %s",
- t,
- k,
- err)
- }
-
- if err := mapstructure.WeakDecode(raw, &lifecycle); err != nil {
- return nil, fmt.Errorf(
- "Error parsing lifecycle for %s[%s]: %s",
- t,
- k,
- err)
- }
- }
-
- result = append(result, &Resource{
- Mode: ManagedResourceMode,
- Name: k,
- Type: t,
- RawCount: countConfig,
- RawConfig: rawConfig,
- Provisioners: provisioners,
- Provider: provider,
- DependsOn: dependsOn,
- Lifecycle: lifecycle,
- })
- }
-
- return result, nil
-}
-
-func loadProvisionersHcl(list *ast.ObjectList, connInfo map[string]interface{}) ([]*Provisioner, error) {
- if err := assertAllBlocksHaveNames("provisioner", list); err != nil {
- return nil, err
- }
-
- list = list.Children()
- if len(list.Items) == 0 {
- return nil, nil
- }
-
- // Go through each object and turn it into an actual result.
- result := make([]*Provisioner, 0, len(list.Items))
- for _, item := range list.Items {
- n := item.Keys[0].Token.Value().(string)
-
- var listVal *ast.ObjectList
- if ot, ok := item.Val.(*ast.ObjectType); ok {
- listVal = ot.List
- } else {
- return nil, fmt.Errorf("provisioner '%s': should be an object", n)
- }
-
- var config map[string]interface{}
- if err := hcl.DecodeObject(&config, item.Val); err != nil {
- return nil, err
- }
-
- // Parse the "when" value
- when := ProvisionerWhenCreate
- if v, ok := config["when"]; ok {
- switch v {
- case "create":
- when = ProvisionerWhenCreate
- case "destroy":
- when = ProvisionerWhenDestroy
- default:
- return nil, fmt.Errorf(
- "position %s: 'provisioner' when must be 'create' or 'destroy'",
- item.Pos())
- }
- }
-
- // Parse the "on_failure" value
- onFailure := ProvisionerOnFailureFail
- if v, ok := config["on_failure"]; ok {
- switch v {
- case "continue":
- onFailure = ProvisionerOnFailureContinue
- case "fail":
- onFailure = ProvisionerOnFailureFail
- default:
- return nil, fmt.Errorf(
- "position %s: 'provisioner' on_failure must be 'continue' or 'fail'",
- item.Pos())
- }
- }
-
- // Delete fields we special case
- delete(config, "connection")
- delete(config, "when")
- delete(config, "on_failure")
-
- rawConfig, err := NewRawConfig(config)
- if err != nil {
- return nil, err
- }
-
- // Check if we have a provisioner-level connection
- // block that overrides the resource-level
- var subConnInfo map[string]interface{}
- if o := listVal.Filter("connection"); len(o.Items) > 0 {
- err := hcl.DecodeObject(&subConnInfo, o.Items[0].Val)
- if err != nil {
- return nil, err
- }
- }
-
- // Inherit from the resource connInfo any keys
- // that are not explicitly overriden.
- if connInfo != nil && subConnInfo != nil {
- for k, v := range connInfo {
- if _, ok := subConnInfo[k]; !ok {
- subConnInfo[k] = v
- }
- }
- } else if subConnInfo == nil {
- subConnInfo = connInfo
- }
-
- // Parse the connInfo
- connRaw, err := NewRawConfig(subConnInfo)
- if err != nil {
- return nil, err
- }
-
- result = append(result, &Provisioner{
- Type: n,
- RawConfig: rawConfig,
- ConnInfo: connRaw,
- When: when,
- OnFailure: onFailure,
- })
- }
-
- return result, nil
-}
-
-/*
-func hclObjectMap(os *hclobj.Object) map[string]ast.ListNode {
- objects := make(map[string][]*hclobj.Object)
-
- for _, o := range os.Elem(false) {
- for _, elem := range o.Elem(true) {
- val, ok := objects[elem.Key]
- if !ok {
- val = make([]*hclobj.Object, 0, 1)
- }
-
- val = append(val, elem)
- objects[elem.Key] = val
- }
- }
-
- return objects
-}
-*/
-
-// assertAllBlocksHaveNames returns an error if any of the items in
-// the given object list are blocks without keys (like "module {}")
-// or simple assignments (like "module = 1"). It returns nil if
-// neither of these things are true.
-//
-// The given name is used in any generated error messages, and should
-// be the name of the block we're dealing with. The given list should
-// be the result of calling .Filter on an object list with that same
-// name.
-func assertAllBlocksHaveNames(name string, list *ast.ObjectList) error {
- if elem := list.Elem(); len(elem.Items) != 0 {
- switch et := elem.Items[0].Val.(type) {
- case *ast.ObjectType:
- pos := et.Lbrace
- return fmt.Errorf("%s: %q must be followed by a name", pos, name)
- default:
- pos := elem.Items[0].Val.Pos()
- return fmt.Errorf("%s: %q must be a configuration block", pos, name)
- }
- }
- return nil
-}
-
-func checkHCLKeys(node ast.Node, valid []string) error {
- var list *ast.ObjectList
- switch n := node.(type) {
- case *ast.ObjectList:
- list = n
- case *ast.ObjectType:
- list = n.List
- default:
- return fmt.Errorf("cannot check HCL keys of type %T", n)
- }
-
- validMap := make(map[string]struct{}, len(valid))
- for _, v := range valid {
- validMap[v] = struct{}{}
- }
-
- var result error
- for _, item := range list.Items {
- key := item.Keys[0].Token.Value().(string)
- if _, ok := validMap[key]; !ok {
- result = multierror.Append(result, fmt.Errorf(
- "invalid key: %s", key))
- }
- }
-
- return result
-}
-
-// unwrapHCLObjectKeysFromJSON cleans up an edge case that can occur when
-// parsing JSON as input: if we're parsing JSON then directly nested
-// items will show up as additional "keys".
-//
-// For objects that expect a fixed number of keys, this breaks the
-// decoding process. This function unwraps the object into what it would've
-// looked like if it came directly from HCL by specifying the number of keys
-// you expect.
-//
-// Example:
-//
-// { "foo": { "baz": {} } }
-//
-// Will show up with Keys being: []string{"foo", "baz"}
-// when we really just want the first two. This function will fix this.
-func unwrapHCLObjectKeysFromJSON(item *ast.ObjectItem, depth int) {
- if len(item.Keys) > depth && item.Keys[0].Token.JSON {
- for len(item.Keys) > depth {
- // Pop off the last key
- n := len(item.Keys)
- key := item.Keys[n-1]
- item.Keys[n-1] = nil
- item.Keys = item.Keys[:n-1]
-
- // Wrap our value in a list
- item.Val = &ast.ObjectType{
- List: &ast.ObjectList{
- Items: []*ast.ObjectItem{
- &ast.ObjectItem{
- Keys: []*ast.ObjectKey{key},
- Val: item.Val,
- },
- },
- },
- }
- }
- }
-}
diff --git a/vendor/github.com/hashicorp/terraform/config/loader_hcl2.go b/vendor/github.com/hashicorp/terraform/config/loader_hcl2.go
deleted file mode 100644
index da7559a9d..000000000
--- a/vendor/github.com/hashicorp/terraform/config/loader_hcl2.go
+++ /dev/null
@@ -1,473 +0,0 @@
-package config
-
-import (
- "fmt"
- "sort"
- "strings"
-
- hcl2 "github.com/hashicorp/hcl/v2"
- gohcl2 "github.com/hashicorp/hcl/v2/gohcl"
- hcl2parse "github.com/hashicorp/hcl/v2/hclparse"
- "github.com/hashicorp/terraform/configs/hcl2shim"
- "github.com/zclconf/go-cty/cty"
-)
-
-// hcl2Configurable is an implementation of configurable that knows
-// how to turn a HCL Body into a *Config object.
-type hcl2Configurable struct {
- SourceFilename string
- Body hcl2.Body
-}
-
-// hcl2Loader is a wrapper around a HCL parser that provides a fileLoaderFunc.
-type hcl2Loader struct {
- Parser *hcl2parse.Parser
-}
-
-// For the moment we'll just have a global loader since we don't have anywhere
-// better to stash this.
-// TODO: refactor the loader API so that it uses some sort of object we can
-// stash the parser inside.
-var globalHCL2Loader = newHCL2Loader()
-
-// newHCL2Loader creates a new hcl2Loader containing a new HCL Parser.
-//
-// HCL parsers retain information about files that are loaded to aid in
-// producing diagnostic messages, so all files within a single configuration
-// should be loaded with the same parser to ensure the availability of
-// full diagnostic information.
-func newHCL2Loader() hcl2Loader {
- return hcl2Loader{
- Parser: hcl2parse.NewParser(),
- }
-}
-
-// loadFile is a fileLoaderFunc that knows how to read a HCL2 file and turn it
-// into a hcl2Configurable.
-func (l hcl2Loader) loadFile(filename string) (configurable, []string, error) {
- var f *hcl2.File
- var diags hcl2.Diagnostics
- if strings.HasSuffix(filename, ".json") {
- f, diags = l.Parser.ParseJSONFile(filename)
- } else {
- f, diags = l.Parser.ParseHCLFile(filename)
- }
- if diags.HasErrors() {
- // Return diagnostics as an error; callers may type-assert this to
- // recover the original diagnostics, if it doesn't end up wrapped
- // in another error.
- return nil, nil, diags
- }
-
- return &hcl2Configurable{
- SourceFilename: filename,
- Body: f.Body,
- }, nil, nil
-}
-
-func (t *hcl2Configurable) Config() (*Config, error) {
- config := &Config{}
-
- // these structs are used only for the initial shallow decoding; we'll
- // expand this into the main, public-facing config structs afterwards.
- type atlas struct {
- Name string `hcl:"name"`
- Include *[]string `hcl:"include"`
- Exclude *[]string `hcl:"exclude"`
- }
- type provider struct {
- Name string `hcl:"name,label"`
- Alias *string `hcl:"alias,attr"`
- Version *string `hcl:"version,attr"`
- Config hcl2.Body `hcl:",remain"`
- }
- type module struct {
- Name string `hcl:"name,label"`
- Source string `hcl:"source,attr"`
- Version *string `hcl:"version,attr"`
- Providers *map[string]string `hcl:"providers,attr"`
- Config hcl2.Body `hcl:",remain"`
- }
- type resourceLifecycle struct {
- CreateBeforeDestroy *bool `hcl:"create_before_destroy,attr"`
- PreventDestroy *bool `hcl:"prevent_destroy,attr"`
- IgnoreChanges *[]string `hcl:"ignore_changes,attr"`
- }
- type connection struct {
- Config hcl2.Body `hcl:",remain"`
- }
- type provisioner struct {
- Type string `hcl:"type,label"`
-
- When *string `hcl:"when,attr"`
- OnFailure *string `hcl:"on_failure,attr"`
-
- Connection *connection `hcl:"connection,block"`
- Config hcl2.Body `hcl:",remain"`
- }
- type managedResource struct {
- Type string `hcl:"type,label"`
- Name string `hcl:"name,label"`
-
- CountExpr hcl2.Expression `hcl:"count,attr"`
- Provider *string `hcl:"provider,attr"`
- DependsOn *[]string `hcl:"depends_on,attr"`
-
- Lifecycle *resourceLifecycle `hcl:"lifecycle,block"`
- Provisioners []provisioner `hcl:"provisioner,block"`
- Connection *connection `hcl:"connection,block"`
-
- Config hcl2.Body `hcl:",remain"`
- }
- type dataResource struct {
- Type string `hcl:"type,label"`
- Name string `hcl:"name,label"`
-
- CountExpr hcl2.Expression `hcl:"count,attr"`
- Provider *string `hcl:"provider,attr"`
- DependsOn *[]string `hcl:"depends_on,attr"`
-
- Config hcl2.Body `hcl:",remain"`
- }
- type variable struct {
- Name string `hcl:"name,label"`
-
- DeclaredType *string `hcl:"type,attr"`
- Default *cty.Value `hcl:"default,attr"`
- Description *string `hcl:"description,attr"`
- Sensitive *bool `hcl:"sensitive,attr"`
- }
- type output struct {
- Name string `hcl:"name,label"`
-
- ValueExpr hcl2.Expression `hcl:"value,attr"`
- DependsOn *[]string `hcl:"depends_on,attr"`
- Description *string `hcl:"description,attr"`
- Sensitive *bool `hcl:"sensitive,attr"`
- }
- type locals struct {
- Definitions hcl2.Attributes `hcl:",remain"`
- }
- type backend struct {
- Type string `hcl:"type,label"`
- Config hcl2.Body `hcl:",remain"`
- }
- type terraform struct {
- RequiredVersion *string `hcl:"required_version,attr"`
- Backend *backend `hcl:"backend,block"`
- }
- type topLevel struct {
- Atlas *atlas `hcl:"atlas,block"`
- Datas []dataResource `hcl:"data,block"`
- Modules []module `hcl:"module,block"`
- Outputs []output `hcl:"output,block"`
- Providers []provider `hcl:"provider,block"`
- Resources []managedResource `hcl:"resource,block"`
- Terraform *terraform `hcl:"terraform,block"`
- Variables []variable `hcl:"variable,block"`
- Locals []*locals `hcl:"locals,block"`
- }
-
- var raw topLevel
- diags := gohcl2.DecodeBody(t.Body, nil, &raw)
- if diags.HasErrors() {
- // Do some minimal decoding to see if we can at least get the
- // required Terraform version, which might help explain why we
- // couldn't parse the rest.
- if raw.Terraform != nil && raw.Terraform.RequiredVersion != nil {
- config.Terraform = &Terraform{
- RequiredVersion: *raw.Terraform.RequiredVersion,
- }
- }
-
- // We return the diags as an implementation of error, which the
- // caller than then type-assert if desired to recover the individual
- // diagnostics.
- // FIXME: The current API gives us no way to return warnings in the
- // absence of any errors.
- return config, diags
- }
-
- if raw.Terraform != nil {
- var reqdVersion string
- var backend *Backend
-
- if raw.Terraform.RequiredVersion != nil {
- reqdVersion = *raw.Terraform.RequiredVersion
- }
- if raw.Terraform.Backend != nil {
- backend = new(Backend)
- backend.Type = raw.Terraform.Backend.Type
-
- // We don't permit interpolations or nested blocks inside the
- // backend config, so we can decode the config early here and
- // get direct access to the values, which is important for the
- // config hashing to work as expected.
- var config map[string]string
- configDiags := gohcl2.DecodeBody(raw.Terraform.Backend.Config, nil, &config)
- diags = append(diags, configDiags...)
-
- raw := make(map[string]interface{}, len(config))
- for k, v := range config {
- raw[k] = v
- }
-
- var err error
- backend.RawConfig, err = NewRawConfig(raw)
- if err != nil {
- diags = append(diags, &hcl2.Diagnostic{
- Severity: hcl2.DiagError,
- Summary: "Invalid backend configuration",
- Detail: fmt.Sprintf("Error in backend configuration: %s", err),
- })
- }
- }
-
- config.Terraform = &Terraform{
- RequiredVersion: reqdVersion,
- Backend: backend,
- }
- }
-
- if raw.Atlas != nil {
- var include, exclude []string
- if raw.Atlas.Include != nil {
- include = *raw.Atlas.Include
- }
- if raw.Atlas.Exclude != nil {
- exclude = *raw.Atlas.Exclude
- }
- config.Atlas = &AtlasConfig{
- Name: raw.Atlas.Name,
- Include: include,
- Exclude: exclude,
- }
- }
-
- for _, rawM := range raw.Modules {
- m := &Module{
- Name: rawM.Name,
- Source: rawM.Source,
- RawConfig: NewRawConfigHCL2(rawM.Config),
- }
-
- if rawM.Version != nil {
- m.Version = *rawM.Version
- }
-
- if rawM.Providers != nil {
- m.Providers = *rawM.Providers
- }
-
- config.Modules = append(config.Modules, m)
- }
-
- for _, rawV := range raw.Variables {
- v := &Variable{
- Name: rawV.Name,
- }
- if rawV.DeclaredType != nil {
- v.DeclaredType = *rawV.DeclaredType
- }
- if rawV.Default != nil {
- v.Default = hcl2shim.ConfigValueFromHCL2(*rawV.Default)
- }
- if rawV.Description != nil {
- v.Description = *rawV.Description
- }
-
- config.Variables = append(config.Variables, v)
- }
-
- for _, rawO := range raw.Outputs {
- o := &Output{
- Name: rawO.Name,
- }
-
- if rawO.Description != nil {
- o.Description = *rawO.Description
- }
- if rawO.DependsOn != nil {
- o.DependsOn = *rawO.DependsOn
- }
- if rawO.Sensitive != nil {
- o.Sensitive = *rawO.Sensitive
- }
-
- // The result is expected to be a map like map[string]interface{}{"value": something},
- // so we'll fake that with our hcl2shim.SingleAttrBody shim.
- o.RawConfig = NewRawConfigHCL2(hcl2shim.SingleAttrBody{
- Name: "value",
- Expr: rawO.ValueExpr,
- })
-
- config.Outputs = append(config.Outputs, o)
- }
-
- for _, rawR := range raw.Resources {
- r := &Resource{
- Mode: ManagedResourceMode,
- Type: rawR.Type,
- Name: rawR.Name,
- }
- if rawR.Lifecycle != nil {
- var l ResourceLifecycle
- if rawR.Lifecycle.CreateBeforeDestroy != nil {
- l.CreateBeforeDestroy = *rawR.Lifecycle.CreateBeforeDestroy
- }
- if rawR.Lifecycle.PreventDestroy != nil {
- l.PreventDestroy = *rawR.Lifecycle.PreventDestroy
- }
- if rawR.Lifecycle.IgnoreChanges != nil {
- l.IgnoreChanges = *rawR.Lifecycle.IgnoreChanges
- }
- r.Lifecycle = l
- }
- if rawR.Provider != nil {
- r.Provider = *rawR.Provider
- }
- if rawR.DependsOn != nil {
- r.DependsOn = *rawR.DependsOn
- }
-
- var defaultConnInfo *RawConfig
- if rawR.Connection != nil {
- defaultConnInfo = NewRawConfigHCL2(rawR.Connection.Config)
- }
-
- for _, rawP := range rawR.Provisioners {
- p := &Provisioner{
- Type: rawP.Type,
- }
-
- switch {
- case rawP.When == nil:
- p.When = ProvisionerWhenCreate
- case *rawP.When == "create":
- p.When = ProvisionerWhenCreate
- case *rawP.When == "destroy":
- p.When = ProvisionerWhenDestroy
- default:
- p.When = ProvisionerWhenInvalid
- }
-
- switch {
- case rawP.OnFailure == nil:
- p.OnFailure = ProvisionerOnFailureFail
- case *rawP.When == "fail":
- p.OnFailure = ProvisionerOnFailureFail
- case *rawP.When == "continue":
- p.OnFailure = ProvisionerOnFailureContinue
- default:
- p.OnFailure = ProvisionerOnFailureInvalid
- }
-
- if rawP.Connection != nil {
- p.ConnInfo = NewRawConfigHCL2(rawP.Connection.Config)
- } else {
- p.ConnInfo = defaultConnInfo
- }
-
- p.RawConfig = NewRawConfigHCL2(rawP.Config)
-
- r.Provisioners = append(r.Provisioners, p)
- }
-
- // The old loader records the count expression as a weird RawConfig with
- // a single-element map inside. Since the rest of the world is assuming
- // that, we'll mimic it here.
- {
- countBody := hcl2shim.SingleAttrBody{
- Name: "count",
- Expr: rawR.CountExpr,
- }
-
- r.RawCount = NewRawConfigHCL2(countBody)
- r.RawCount.Key = "count"
- }
-
- r.RawConfig = NewRawConfigHCL2(rawR.Config)
-
- config.Resources = append(config.Resources, r)
-
- }
-
- for _, rawR := range raw.Datas {
- r := &Resource{
- Mode: DataResourceMode,
- Type: rawR.Type,
- Name: rawR.Name,
- }
-
- if rawR.Provider != nil {
- r.Provider = *rawR.Provider
- }
- if rawR.DependsOn != nil {
- r.DependsOn = *rawR.DependsOn
- }
-
- // The old loader records the count expression as a weird RawConfig with
- // a single-element map inside. Since the rest of the world is assuming
- // that, we'll mimic it here.
- {
- countBody := hcl2shim.SingleAttrBody{
- Name: "count",
- Expr: rawR.CountExpr,
- }
-
- r.RawCount = NewRawConfigHCL2(countBody)
- r.RawCount.Key = "count"
- }
-
- r.RawConfig = NewRawConfigHCL2(rawR.Config)
-
- config.Resources = append(config.Resources, r)
- }
-
- for _, rawP := range raw.Providers {
- p := &ProviderConfig{
- Name: rawP.Name,
- }
-
- if rawP.Alias != nil {
- p.Alias = *rawP.Alias
- }
- if rawP.Version != nil {
- p.Version = *rawP.Version
- }
-
- // The result is expected to be a map like map[string]interface{}{"value": something},
- // so we'll fake that with our hcl2shim.SingleAttrBody shim.
- p.RawConfig = NewRawConfigHCL2(rawP.Config)
-
- config.ProviderConfigs = append(config.ProviderConfigs, p)
- }
-
- for _, rawL := range raw.Locals {
- names := make([]string, 0, len(rawL.Definitions))
- for n := range rawL.Definitions {
- names = append(names, n)
- }
- sort.Strings(names)
- for _, n := range names {
- attr := rawL.Definitions[n]
- l := &Local{
- Name: n,
- RawConfig: NewRawConfigHCL2(hcl2shim.SingleAttrBody{
- Name: "value",
- Expr: attr.Expr,
- }),
- }
- config.Locals = append(config.Locals, l)
- }
- }
-
- // FIXME: The current API gives us no way to return warnings in the
- // absence of any errors.
- var err error
- if diags.HasErrors() {
- err = diags
- }
-
- return config, err
-}
diff --git a/vendor/github.com/hashicorp/terraform/config/merge.go b/vendor/github.com/hashicorp/terraform/config/merge.go
deleted file mode 100644
index 55fc864f7..000000000
--- a/vendor/github.com/hashicorp/terraform/config/merge.go
+++ /dev/null
@@ -1,204 +0,0 @@
-package config
-
-// Merge merges two configurations into a single configuration.
-//
-// Merge allows for the two configurations to have duplicate resources,
-// because the resources will be merged. This differs from a single
-// Config which must only have unique resources.
-func Merge(c1, c2 *Config) (*Config, error) {
- c := new(Config)
-
- // Merge unknown keys
- unknowns := make(map[string]struct{})
- for _, k := range c1.unknownKeys {
- _, present := unknowns[k]
- if !present {
- unknowns[k] = struct{}{}
- c.unknownKeys = append(c.unknownKeys, k)
- }
- }
- for _, k := range c2.unknownKeys {
- _, present := unknowns[k]
- if !present {
- unknowns[k] = struct{}{}
- c.unknownKeys = append(c.unknownKeys, k)
- }
- }
-
- // Merge Atlas configuration. This is a dumb one overrides the other
- // sort of merge.
- c.Atlas = c1.Atlas
- if c2.Atlas != nil {
- c.Atlas = c2.Atlas
- }
-
- // Merge the Terraform configuration
- if c1.Terraform != nil {
- c.Terraform = c1.Terraform
- if c2.Terraform != nil {
- c.Terraform.Merge(c2.Terraform)
- }
- } else {
- c.Terraform = c2.Terraform
- }
-
- // NOTE: Everything below is pretty gross. Due to the lack of generics
- // in Go, there is some hoop-jumping involved to make this merging a
- // little more test-friendly and less repetitive. Ironically, making it
- // less repetitive involves being a little repetitive, but I prefer to
- // be repetitive with things that are less error prone than things that
- // are more error prone (more logic). Type conversions to an interface
- // are pretty low-error.
-
- var m1, m2, mresult []merger
-
- // Modules
- m1 = make([]merger, 0, len(c1.Modules))
- m2 = make([]merger, 0, len(c2.Modules))
- for _, v := range c1.Modules {
- m1 = append(m1, v)
- }
- for _, v := range c2.Modules {
- m2 = append(m2, v)
- }
- mresult = mergeSlice(m1, m2)
- if len(mresult) > 0 {
- c.Modules = make([]*Module, len(mresult))
- for i, v := range mresult {
- c.Modules[i] = v.(*Module)
- }
- }
-
- // Outputs
- m1 = make([]merger, 0, len(c1.Outputs))
- m2 = make([]merger, 0, len(c2.Outputs))
- for _, v := range c1.Outputs {
- m1 = append(m1, v)
- }
- for _, v := range c2.Outputs {
- m2 = append(m2, v)
- }
- mresult = mergeSlice(m1, m2)
- if len(mresult) > 0 {
- c.Outputs = make([]*Output, len(mresult))
- for i, v := range mresult {
- c.Outputs[i] = v.(*Output)
- }
- }
-
- // Provider Configs
- m1 = make([]merger, 0, len(c1.ProviderConfigs))
- m2 = make([]merger, 0, len(c2.ProviderConfigs))
- for _, v := range c1.ProviderConfigs {
- m1 = append(m1, v)
- }
- for _, v := range c2.ProviderConfigs {
- m2 = append(m2, v)
- }
- mresult = mergeSlice(m1, m2)
- if len(mresult) > 0 {
- c.ProviderConfigs = make([]*ProviderConfig, len(mresult))
- for i, v := range mresult {
- c.ProviderConfigs[i] = v.(*ProviderConfig)
- }
- }
-
- // Resources
- m1 = make([]merger, 0, len(c1.Resources))
- m2 = make([]merger, 0, len(c2.Resources))
- for _, v := range c1.Resources {
- m1 = append(m1, v)
- }
- for _, v := range c2.Resources {
- m2 = append(m2, v)
- }
- mresult = mergeSlice(m1, m2)
- if len(mresult) > 0 {
- c.Resources = make([]*Resource, len(mresult))
- for i, v := range mresult {
- c.Resources[i] = v.(*Resource)
- }
- }
-
- // Variables
- m1 = make([]merger, 0, len(c1.Variables))
- m2 = make([]merger, 0, len(c2.Variables))
- for _, v := range c1.Variables {
- m1 = append(m1, v)
- }
- for _, v := range c2.Variables {
- m2 = append(m2, v)
- }
- mresult = mergeSlice(m1, m2)
- if len(mresult) > 0 {
- c.Variables = make([]*Variable, len(mresult))
- for i, v := range mresult {
- c.Variables[i] = v.(*Variable)
- }
- }
-
- // Local Values
- // These are simpler than the other config elements because they are just
- // flat values and so no deep merging is required.
- if localsCount := len(c1.Locals) + len(c2.Locals); localsCount != 0 {
- // Explicit length check above because we want c.Locals to remain
- // nil if the result would be empty.
- c.Locals = make([]*Local, 0, len(c1.Locals)+len(c2.Locals))
- c.Locals = append(c.Locals, c1.Locals...)
- c.Locals = append(c.Locals, c2.Locals...)
- }
-
- return c, nil
-}
-
-// merger is an interface that must be implemented by types that are
-// merge-able. This simplifies the implementation of Merge for the various
-// components of a Config.
-type merger interface {
- mergerName() string
- mergerMerge(merger) merger
-}
-
-// mergeSlice merges a slice of mergers.
-func mergeSlice(m1, m2 []merger) []merger {
- r := make([]merger, len(m1), len(m1)+len(m2))
- copy(r, m1)
-
- m := map[string]struct{}{}
- for _, v2 := range m2 {
- // If we already saw it, just append it because its a
- // duplicate and invalid...
- name := v2.mergerName()
- if _, ok := m[name]; ok {
- r = append(r, v2)
- continue
- }
- m[name] = struct{}{}
-
- // Find an original to override
- var original merger
- originalIndex := -1
- for i, v := range m1 {
- if v.mergerName() == name {
- originalIndex = i
- original = v
- break
- }
- }
-
- var v merger
- if original == nil {
- v = v2
- } else {
- v = original.mergerMerge(v2)
- }
-
- if originalIndex == -1 {
- r = append(r, v)
- } else {
- r[originalIndex] = v
- }
- }
-
- return r
-}
diff --git a/vendor/github.com/hashicorp/terraform/config/providers.go b/vendor/github.com/hashicorp/terraform/config/providers.go
deleted file mode 100644
index eeddabc32..000000000
--- a/vendor/github.com/hashicorp/terraform/config/providers.go
+++ /dev/null
@@ -1,61 +0,0 @@
-package config
-
-import "github.com/blang/semver"
-
-// ProviderVersionConstraint presents a constraint for a particular
-// provider, identified by its full name.
-type ProviderVersionConstraint struct {
- Constraint string
- ProviderType string
-}
-
-// ProviderVersionConstraints is a map from provider full name to its associated
-// ProviderVersionConstraint, as produced by Config.RequiredProviders.
-type ProviderVersionConstraints map[string]ProviderVersionConstraint
-
-// RequiredRanges returns a semver.Range for each distinct provider type in
-// the constraint map. If the same provider type appears more than once
-// (e.g. because aliases are in use) then their respective constraints are
-// combined such that they must *all* apply.
-//
-// The result of this method can be passed to the
-// PluginMetaSet.ConstrainVersions method within the plugin/discovery
-// package in order to filter down the available plugins to those which
-// satisfy the given constraints.
-//
-// This function will panic if any of the constraints within cannot be
-// parsed as semver ranges. This is guaranteed to never happen for a
-// constraint set that was built from a configuration that passed validation.
-func (cons ProviderVersionConstraints) RequiredRanges() map[string]semver.Range {
- ret := make(map[string]semver.Range, len(cons))
-
- for _, con := range cons {
- spec := semver.MustParseRange(con.Constraint)
- if existing, exists := ret[con.ProviderType]; exists {
- ret[con.ProviderType] = existing.AND(spec)
- } else {
- ret[con.ProviderType] = spec
- }
- }
-
- return ret
-}
-
-// ProviderConfigsByFullName returns a map from provider full names (as
-// returned by ProviderConfig.FullName()) to the corresponding provider
-// configs.
-//
-// This function returns no new information than what's already in
-// c.ProviderConfigs, but returns it in a more convenient shape. If there
-// is more than one provider config with the same full name then the result
-// is undefined, but that is guaranteed not to happen for any config that
-// has passed validation.
-func (c *Config) ProviderConfigsByFullName() map[string]*ProviderConfig {
- ret := make(map[string]*ProviderConfig, len(c.ProviderConfigs))
-
- for _, pc := range c.ProviderConfigs {
- ret[pc.FullName()] = pc
- }
-
- return ret
-}
diff --git a/vendor/github.com/hashicorp/terraform/config/provisioner_enums.go b/vendor/github.com/hashicorp/terraform/config/provisioner_enums.go
deleted file mode 100644
index 00fd43fce..000000000
--- a/vendor/github.com/hashicorp/terraform/config/provisioner_enums.go
+++ /dev/null
@@ -1,40 +0,0 @@
-package config
-
-// ProvisionerWhen is an enum for valid values for when to run provisioners.
-type ProvisionerWhen int
-
-const (
- ProvisionerWhenInvalid ProvisionerWhen = iota
- ProvisionerWhenCreate
- ProvisionerWhenDestroy
-)
-
-var provisionerWhenStrs = map[ProvisionerWhen]string{
- ProvisionerWhenInvalid: "invalid",
- ProvisionerWhenCreate: "create",
- ProvisionerWhenDestroy: "destroy",
-}
-
-func (v ProvisionerWhen) String() string {
- return provisionerWhenStrs[v]
-}
-
-// ProvisionerOnFailure is an enum for valid values for on_failure options
-// for provisioners.
-type ProvisionerOnFailure int
-
-const (
- ProvisionerOnFailureInvalid ProvisionerOnFailure = iota
- ProvisionerOnFailureContinue
- ProvisionerOnFailureFail
-)
-
-var provisionerOnFailureStrs = map[ProvisionerOnFailure]string{
- ProvisionerOnFailureInvalid: "invalid",
- ProvisionerOnFailureContinue: "continue",
- ProvisionerOnFailureFail: "fail",
-}
-
-func (v ProvisionerOnFailure) String() string {
- return provisionerOnFailureStrs[v]
-}
diff --git a/vendor/github.com/hashicorp/terraform/config/raw_config.go b/vendor/github.com/hashicorp/terraform/config/raw_config.go
deleted file mode 100644
index 27bcd1da0..000000000
--- a/vendor/github.com/hashicorp/terraform/config/raw_config.go
+++ /dev/null
@@ -1,419 +0,0 @@
-package config
-
-import (
- "bytes"
- "encoding/gob"
- "errors"
- "strconv"
- "sync"
-
- hcl2 "github.com/hashicorp/hcl/v2"
- "github.com/hashicorp/hil"
- "github.com/hashicorp/hil/ast"
- "github.com/mitchellh/copystructure"
- "github.com/mitchellh/reflectwalk"
-)
-
-// RawConfig is a structure that holds a piece of configuration
-// where the overall structure is unknown since it will be used
-// to configure a plugin or some other similar external component.
-//
-// RawConfigs can be interpolated with variables that come from
-// other resources, user variables, etc.
-//
-// RawConfig supports a query-like interface to request
-// information from deep within the structure.
-type RawConfig struct {
- Key string
-
- // Only _one_ of Raw and Body may be populated at a time.
- //
- // In the normal case, Raw is populated and Body is nil.
- //
- // When the experimental HCL2 parsing mode is enabled, "Body"
- // is populated and RawConfig serves only to transport the hcl2.Body
- // through the rest of Terraform core so we can ultimately decode it
- // once its schema is known.
- //
- // Once we transition to HCL2 as the primary representation, RawConfig
- // should be removed altogether and the hcl2.Body should be passed
- // around directly.
-
- Raw map[string]interface{}
- Body hcl2.Body
-
- Interpolations []ast.Node
- Variables map[string]InterpolatedVariable
-
- lock sync.Mutex
- config map[string]interface{}
- unknownKeys []string
-}
-
-// NewRawConfig creates a new RawConfig structure and populates the
-// publicly readable struct fields.
-func NewRawConfig(raw map[string]interface{}) (*RawConfig, error) {
- result := &RawConfig{Raw: raw}
- if err := result.init(); err != nil {
- return nil, err
- }
-
- return result, nil
-}
-
-// NewRawConfigHCL2 creates a new RawConfig that is serving as a capsule
-// to transport a hcl2.Body. In this mode, the publicly-readable struct
-// fields are not populated since all operations should instead be diverted
-// to the HCL2 body.
-//
-// For a RawConfig object constructed with this function, the only valid use
-// is to later retrieve the Body value and call its own methods. Callers
-// may choose to set and then later handle the Key field, in a manner
-// consistent with how it is handled by the Value method, but the Value
-// method itself must not be used.
-//
-// This is an experimental codepath to be used only by the HCL2 config loader.
-// Non-experimental parsing should _always_ use NewRawConfig to produce a
-// fully-functional RawConfig object.
-func NewRawConfigHCL2(body hcl2.Body) *RawConfig {
- return &RawConfig{
- Body: body,
- }
-}
-
-// RawMap returns a copy of the RawConfig.Raw map.
-func (r *RawConfig) RawMap() map[string]interface{} {
- r.lock.Lock()
- defer r.lock.Unlock()
-
- m := make(map[string]interface{})
- for k, v := range r.Raw {
- m[k] = v
- }
- return m
-}
-
-// Copy returns a copy of this RawConfig, uninterpolated.
-func (r *RawConfig) Copy() *RawConfig {
- if r == nil {
- return nil
- }
-
- r.lock.Lock()
- defer r.lock.Unlock()
-
- if r.Body != nil {
- return NewRawConfigHCL2(r.Body)
- }
-
- newRaw := make(map[string]interface{})
- for k, v := range r.Raw {
- newRaw[k] = v
- }
-
- result, err := NewRawConfig(newRaw)
- if err != nil {
- panic("copy failed: " + err.Error())
- }
-
- result.Key = r.Key
- return result
-}
-
-// Value returns the value of the configuration if this configuration
-// has a Key set. If this does not have a Key set, nil will be returned.
-func (r *RawConfig) Value() interface{} {
- if c := r.Config(); c != nil {
- if v, ok := c[r.Key]; ok {
- return v
- }
- }
-
- r.lock.Lock()
- defer r.lock.Unlock()
- return r.Raw[r.Key]
-}
-
-// Config returns the entire configuration with the variables
-// interpolated from any call to Interpolate.
-//
-// If any interpolated variables are unknown (value set to
-// UnknownVariableValue), the first non-container (map, slice, etc.) element
-// will be removed from the config. The keys of unknown variables
-// can be found using the UnknownKeys function.
-//
-// By pruning out unknown keys from the configuration, the raw
-// structure will always successfully decode into its ultimate
-// structure using something like mapstructure.
-func (r *RawConfig) Config() map[string]interface{} {
- r.lock.Lock()
- defer r.lock.Unlock()
- return r.config
-}
-
-// Interpolate uses the given mapping of variable values and uses
-// those as the values to replace any variables in this raw
-// configuration.
-//
-// Any prior calls to Interpolate are replaced with this one.
-//
-// If a variable key is missing, this will panic.
-func (r *RawConfig) Interpolate(vs map[string]ast.Variable) error {
- r.lock.Lock()
- defer r.lock.Unlock()
-
- config := langEvalConfig(vs)
- return r.interpolate(func(root ast.Node) (interface{}, error) {
- // None of the variables we need are computed, meaning we should
- // be able to properly evaluate.
- result, err := hil.Eval(root, config)
- if err != nil {
- return "", err
- }
-
- return result.Value, nil
- })
-}
-
-// Merge merges another RawConfig into this one (overriding any conflicting
-// values in this config) and returns a new config. The original config
-// is not modified.
-func (r *RawConfig) Merge(other *RawConfig) *RawConfig {
- r.lock.Lock()
- defer r.lock.Unlock()
-
- // Merge the raw configurations
- raw := make(map[string]interface{})
- for k, v := range r.Raw {
- raw[k] = v
- }
- for k, v := range other.Raw {
- raw[k] = v
- }
-
- // Create the result
- result, err := NewRawConfig(raw)
- if err != nil {
- panic(err)
- }
-
- // Merge the interpolated results
- result.config = make(map[string]interface{})
- for k, v := range r.config {
- result.config[k] = v
- }
- for k, v := range other.config {
- result.config[k] = v
- }
-
- // Build the unknown keys
- if len(r.unknownKeys) > 0 || len(other.unknownKeys) > 0 {
- unknownKeys := make(map[string]struct{})
- for _, k := range r.unknownKeys {
- unknownKeys[k] = struct{}{}
- }
- for _, k := range other.unknownKeys {
- unknownKeys[k] = struct{}{}
- }
-
- result.unknownKeys = make([]string, 0, len(unknownKeys))
- for k, _ := range unknownKeys {
- result.unknownKeys = append(result.unknownKeys, k)
- }
- }
-
- return result
-}
-
-func (r *RawConfig) init() error {
- r.lock.Lock()
- defer r.lock.Unlock()
-
- r.config = r.Raw
- r.Interpolations = nil
- r.Variables = nil
-
- fn := func(node ast.Node) (interface{}, error) {
- r.Interpolations = append(r.Interpolations, node)
- vars, err := DetectVariables(node)
- if err != nil {
- return "", err
- }
-
- for _, v := range vars {
- if r.Variables == nil {
- r.Variables = make(map[string]InterpolatedVariable)
- }
-
- r.Variables[v.FullKey()] = v
- }
-
- return "", nil
- }
-
- walker := &interpolationWalker{F: fn}
- if err := reflectwalk.Walk(r.Raw, walker); err != nil {
- return err
- }
-
- return nil
-}
-
-func (r *RawConfig) interpolate(fn interpolationWalkerFunc) error {
- if r.Body != nil {
- // For RawConfigs created for the HCL2 experiement, callers must
- // use the HCL2 Body API directly rather than interpolating via
- // the RawConfig.
- return errors.New("this feature is not yet supported under the HCL2 experiment")
- }
-
- config, err := copystructure.Copy(r.Raw)
- if err != nil {
- return err
- }
- r.config = config.(map[string]interface{})
-
- w := &interpolationWalker{F: fn, Replace: true}
- err = reflectwalk.Walk(r.config, w)
- if err != nil {
- return err
- }
-
- r.unknownKeys = w.unknownKeys
- return nil
-}
-
-func (r *RawConfig) merge(r2 *RawConfig) *RawConfig {
- if r == nil && r2 == nil {
- return nil
- }
-
- if r == nil {
- r = &RawConfig{}
- }
-
- rawRaw, err := copystructure.Copy(r.Raw)
- if err != nil {
- panic(err)
- }
-
- raw := rawRaw.(map[string]interface{})
- if r2 != nil {
- for k, v := range r2.Raw {
- raw[k] = v
- }
- }
-
- result, err := NewRawConfig(raw)
- if err != nil {
- panic(err)
- }
-
- return result
-}
-
-// couldBeInteger is a helper that determines if the represented value could
-// result in an integer.
-//
-// This function only works for RawConfigs that have "Key" set, meaning that
-// a single result can be produced. Calling this function will overwrite
-// the Config and Value results to be a test value.
-//
-// This function is conservative. If there is some doubt about whether the
-// result could be an integer -- for example, if it depends on a variable
-// whose type we don't know yet -- it will still return true.
-func (r *RawConfig) couldBeInteger() bool {
- if r.Key == "" {
- // un-keyed RawConfigs can never produce numbers
- return false
- }
- if r.Body == nil {
- // Normal path: using the interpolator in this package
- // Interpolate with a fixed number to verify that its a number.
- r.interpolate(func(root ast.Node) (interface{}, error) {
- // Execute the node but transform the AST so that it returns
- // a fixed value of "5" for all interpolations.
- result, err := hil.Eval(
- hil.FixedValueTransform(
- root, &ast.LiteralNode{Value: "5", Typex: ast.TypeString}),
- nil)
- if err != nil {
- return "", err
- }
-
- return result.Value, nil
- })
- _, err := strconv.ParseInt(r.Value().(string), 0, 0)
- return err == nil
- } else {
- // We briefly tried to gradually implement HCL2 support by adding a
- // branch here, but that experiment was not successful.
- panic("HCL2 experimental path no longer supported")
- }
-}
-
-// UnknownKeys returns the keys of the configuration that are unknown
-// because they had interpolated variables that must be computed.
-func (r *RawConfig) UnknownKeys() []string {
- r.lock.Lock()
- defer r.lock.Unlock()
- return r.unknownKeys
-}
-
-// See GobEncode
-func (r *RawConfig) GobDecode(b []byte) error {
- var data gobRawConfig
- err := gob.NewDecoder(bytes.NewReader(b)).Decode(&data)
- if err != nil {
- return err
- }
-
- r.Key = data.Key
- r.Raw = data.Raw
-
- return r.init()
-}
-
-// GobEncode is a custom Gob encoder to use so that we only include the
-// raw configuration. Interpolated variables and such are lost and the
-// tree of interpolated variables is recomputed on decode, since it is
-// referentially transparent.
-func (r *RawConfig) GobEncode() ([]byte, error) {
- r.lock.Lock()
- defer r.lock.Unlock()
-
- data := gobRawConfig{
- Key: r.Key,
- Raw: r.Raw,
- }
-
- var buf bytes.Buffer
- if err := gob.NewEncoder(&buf).Encode(data); err != nil {
- return nil, err
- }
-
- return buf.Bytes(), nil
-}
-
-type gobRawConfig struct {
- Key string
- Raw map[string]interface{}
-}
-
-// langEvalConfig returns the evaluation configuration we use to execute.
-//
-// The interpolation functions are no longer available here, because this
-// codepath is no longer used. Instead, see ../lang/functions.go .
-func langEvalConfig(vs map[string]ast.Variable) *hil.EvalConfig {
- funcMap := make(map[string]ast.Function)
- for k, v := range Funcs() {
- funcMap[k] = v
- }
-
- return &hil.EvalConfig{
- GlobalScope: &ast.BasicScope{
- VarMap: vs,
- FuncMap: funcMap,
- },
- }
-}
diff --git a/vendor/github.com/hashicorp/terraform/config/resource_mode.go b/vendor/github.com/hashicorp/terraform/config/resource_mode.go
deleted file mode 100644
index dd915217c..000000000
--- a/vendor/github.com/hashicorp/terraform/config/resource_mode.go
+++ /dev/null
@@ -1,9 +0,0 @@
-package config
-
-//go:generate go run golang.org/x/tools/cmd/stringer -type=ResourceMode -output=resource_mode_string.go resource_mode.go
-type ResourceMode int
-
-const (
- ManagedResourceMode ResourceMode = iota
- DataResourceMode
-)
diff --git a/vendor/github.com/hashicorp/terraform/config/resource_mode_string.go b/vendor/github.com/hashicorp/terraform/config/resource_mode_string.go
deleted file mode 100644
index 010527824..000000000
--- a/vendor/github.com/hashicorp/terraform/config/resource_mode_string.go
+++ /dev/null
@@ -1,24 +0,0 @@
-// Code generated by "stringer -type=ResourceMode -output=resource_mode_string.go resource_mode.go"; DO NOT EDIT.
-
-package config
-
-import "strconv"
-
-func _() {
- // An "invalid array index" compiler error signifies that the constant values have changed.
- // Re-run the stringer command to generate them again.
- var x [1]struct{}
- _ = x[ManagedResourceMode-0]
- _ = x[DataResourceMode-1]
-}
-
-const _ResourceMode_name = "ManagedResourceModeDataResourceMode"
-
-var _ResourceMode_index = [...]uint8{0, 19, 35}
-
-func (i ResourceMode) String() string {
- if i < 0 || i >= ResourceMode(len(_ResourceMode_index)-1) {
- return "ResourceMode(" + strconv.FormatInt(int64(i), 10) + ")"
- }
- return _ResourceMode_name[_ResourceMode_index[i]:_ResourceMode_index[i+1]]
-}
diff --git a/vendor/github.com/hashicorp/terraform/config/testing.go b/vendor/github.com/hashicorp/terraform/config/testing.go
deleted file mode 100644
index 831fc7786..000000000
--- a/vendor/github.com/hashicorp/terraform/config/testing.go
+++ /dev/null
@@ -1,17 +0,0 @@
-package config
-
-import (
- "testing"
-)
-
-// TestRawConfig is used to create a RawConfig for testing.
-func TestRawConfig(t *testing.T, c map[string]interface{}) *RawConfig {
- t.Helper()
-
- cfg, err := NewRawConfig(c)
- if err != nil {
- t.Fatalf("err: %s", err)
- }
-
- return cfg
-}
diff --git a/vendor/github.com/hashicorp/terraform/configs/backend.go b/vendor/github.com/hashicorp/terraform/configs/backend.go
deleted file mode 100644
index 5d8b9732a..000000000
--- a/vendor/github.com/hashicorp/terraform/configs/backend.go
+++ /dev/null
@@ -1,55 +0,0 @@
-package configs
-
-import (
- "github.com/hashicorp/hcl/v2"
- "github.com/hashicorp/hcl/v2/hcldec"
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/zclconf/go-cty/cty"
-)
-
-// Backend represents a "backend" block inside a "terraform" block in a module
-// or file.
-type Backend struct {
- Type string
- Config hcl.Body
-
- TypeRange hcl.Range
- DeclRange hcl.Range
-}
-
-func decodeBackendBlock(block *hcl.Block) (*Backend, hcl.Diagnostics) {
- return &Backend{
- Type: block.Labels[0],
- TypeRange: block.LabelRanges[0],
- Config: block.Body,
- DeclRange: block.DefRange,
- }, nil
-}
-
-// Hash produces a hash value for the reciever that covers the type and the
-// portions of the config that conform to the given schema.
-//
-// If the config does not conform to the schema then the result is not
-// meaningful for comparison since it will be based on an incomplete result.
-//
-// As an exception, required attributes in the schema are treated as optional
-// for the purpose of hashing, so that an incomplete configuration can still
-// be hashed. Other errors, such as extraneous attributes, have no such special
-// case.
-func (b *Backend) Hash(schema *configschema.Block) int {
- // Don't fail if required attributes are not set. Instead, we'll just
- // hash them as nulls.
- schema = schema.NoneRequired()
- spec := schema.DecoderSpec()
- val, _ := hcldec.Decode(b.Config, spec, nil)
- if val == cty.NilVal {
- val = cty.UnknownVal(schema.ImpliedType())
- }
-
- toHash := cty.TupleVal([]cty.Value{
- cty.StringVal(b.Type),
- val,
- })
-
- return toHash.Hash()
-}
diff --git a/vendor/github.com/hashicorp/terraform/configs/configload/getter.go b/vendor/github.com/hashicorp/terraform/configs/configload/getter.go
deleted file mode 100644
index 75c7ef1f4..000000000
--- a/vendor/github.com/hashicorp/terraform/configs/configload/getter.go
+++ /dev/null
@@ -1,152 +0,0 @@
-package configload
-
-import (
- "fmt"
- "log"
- "os"
- "path/filepath"
-
- cleanhttp "github.com/hashicorp/go-cleanhttp"
- getter "github.com/hashicorp/go-getter"
-)
-
-// We configure our own go-getter detector and getter sets here, because
-// the set of sources we support is part of Terraform's documentation and
-// so we don't want any new sources introduced in go-getter to sneak in here
-// and work even though they aren't documented. This also insulates us from
-// any meddling that might be done by other go-getter callers linked into our
-// executable.
-
-var goGetterDetectors = []getter.Detector{
- new(getter.GitHubDetector),
- new(getter.BitBucketDetector),
- new(getter.GCSDetector),
- new(getter.S3Detector),
- new(getter.FileDetector),
-}
-
-var goGetterNoDetectors = []getter.Detector{}
-
-var goGetterDecompressors = map[string]getter.Decompressor{
- "bz2": new(getter.Bzip2Decompressor),
- "gz": new(getter.GzipDecompressor),
- "xz": new(getter.XzDecompressor),
- "zip": new(getter.ZipDecompressor),
-
- "tar.bz2": new(getter.TarBzip2Decompressor),
- "tar.tbz2": new(getter.TarBzip2Decompressor),
-
- "tar.gz": new(getter.TarGzipDecompressor),
- "tgz": new(getter.TarGzipDecompressor),
-
- "tar.xz": new(getter.TarXzDecompressor),
- "txz": new(getter.TarXzDecompressor),
-}
-
-var goGetterGetters = map[string]getter.Getter{
- "file": new(getter.FileGetter),
- "gcs": new(getter.GCSGetter),
- "git": new(getter.GitGetter),
- "hg": new(getter.HgGetter),
- "s3": new(getter.S3Getter),
- "http": getterHTTPGetter,
- "https": getterHTTPGetter,
-}
-
-var getterHTTPClient = cleanhttp.DefaultClient()
-
-var getterHTTPGetter = &getter.HttpGetter{
- Client: getterHTTPClient,
- Netrc: true,
-}
-
-// A reusingGetter is a helper for the module installer that remembers
-// the final resolved addresses of all of the sources it has already been
-// asked to install, and will copy from a prior installation directory if
-// it has the same resolved source address.
-//
-// The keys in a reusingGetter are resolved and trimmed source addresses
-// (with a scheme always present, and without any "subdir" component),
-// and the values are the paths where each source was previously installed.
-type reusingGetter map[string]string
-
-// getWithGoGetter retrieves the package referenced in the given address
-// into the installation path and then returns the full path to any subdir
-// indicated in the address.
-//
-// The errors returned by this function are those surfaced by the underlying
-// go-getter library, which have very inconsistent quality as
-// end-user-actionable error messages. At this time we do not have any
-// reasonable way to improve these error messages at this layer because
-// the underlying errors are not separatelyr recognizable.
-func (g reusingGetter) getWithGoGetter(instPath, addr string) (string, error) {
- packageAddr, subDir := splitAddrSubdir(addr)
-
- log.Printf("[DEBUG] will download %q to %s", packageAddr, instPath)
-
- realAddr, err := getter.Detect(packageAddr, instPath, getter.Detectors)
- if err != nil {
- return "", err
- }
-
- var realSubDir string
- realAddr, realSubDir = splitAddrSubdir(realAddr)
- if realSubDir != "" {
- subDir = filepath.Join(realSubDir, subDir)
- }
-
- if realAddr != packageAddr {
- log.Printf("[TRACE] go-getter detectors rewrote %q to %q", packageAddr, realAddr)
- }
-
- if prevDir, exists := g[realAddr]; exists {
- log.Printf("[TRACE] copying previous install %s to %s", prevDir, instPath)
- err := os.Mkdir(instPath, os.ModePerm)
- if err != nil {
- return "", fmt.Errorf("failed to create directory %s: %s", instPath, err)
- }
- err = copyDir(instPath, prevDir)
- if err != nil {
- return "", fmt.Errorf("failed to copy from %s to %s: %s", prevDir, instPath, err)
- }
- } else {
- log.Printf("[TRACE] fetching %q to %q", realAddr, instPath)
- client := getter.Client{
- Src: realAddr,
- Dst: instPath,
- Pwd: instPath,
-
- Mode: getter.ClientModeDir,
-
- Detectors: goGetterNoDetectors, // we already did detection above
- Decompressors: goGetterDecompressors,
- Getters: goGetterGetters,
- }
- err = client.Get()
- if err != nil {
- return "", err
- }
- // Remember where we installed this so we might reuse this directory
- // on subsequent calls to avoid re-downloading.
- g[realAddr] = instPath
- }
-
- // Our subDir string can contain wildcards until this point, so that
- // e.g. a subDir of * can expand to one top-level directory in a .tar.gz
- // archive. Now that we've expanded the archive successfully we must
- // resolve that into a concrete path.
- var finalDir string
- if subDir != "" {
- finalDir, err = getter.SubdirGlob(instPath, subDir)
- log.Printf("[TRACE] expanded %q to %q", subDir, finalDir)
- if err != nil {
- return "", err
- }
- } else {
- finalDir = instPath
- }
-
- // If we got this far then we have apparently succeeded in downloading
- // the requested object!
- return filepath.Clean(finalDir), nil
-}
diff --git a/vendor/github.com/hashicorp/terraform/configs/configload/source_addr.go b/vendor/github.com/hashicorp/terraform/configs/configload/source_addr.go
deleted file mode 100644
index 594cf6406..000000000
--- a/vendor/github.com/hashicorp/terraform/configs/configload/source_addr.go
+++ /dev/null
@@ -1,45 +0,0 @@
-package configload
-
-import (
- "strings"
-
- "github.com/hashicorp/go-getter"
-
- "github.com/hashicorp/terraform/registry/regsrc"
-)
-
-var localSourcePrefixes = []string{
- "./",
- "../",
- ".\\",
- "..\\",
-}
-
-func isLocalSourceAddr(addr string) bool {
- for _, prefix := range localSourcePrefixes {
- if strings.HasPrefix(addr, prefix) {
- return true
- }
- }
- return false
-}
-
-func isRegistrySourceAddr(addr string) bool {
- _, err := regsrc.ParseModuleSource(addr)
- return err == nil
-}
-
-// splitAddrSubdir splits the given address (which is assumed to be a
-// registry address or go-getter-style address) into a package portion
-// and a sub-directory portion.
-//
-// The package portion defines what should be downloaded and then the
-// sub-directory portion, if present, specifies a sub-directory within
-// the downloaded object (an archive, VCS repository, etc) that contains
-// the module's configuration files.
-//
-// The subDir portion will be returned as empty if no subdir separator
-// ("//") is present in the address.
-func splitAddrSubdir(addr string) (packageAddr, subDir string) {
- return getter.SourceDirSubdir(addr)
-}
diff --git a/vendor/github.com/hashicorp/terraform/helper/config/decode.go b/vendor/github.com/hashicorp/terraform/helper/config/decode.go
deleted file mode 100644
index f470c9b4b..000000000
--- a/vendor/github.com/hashicorp/terraform/helper/config/decode.go
+++ /dev/null
@@ -1,28 +0,0 @@
-package config
-
-import (
- "github.com/mitchellh/mapstructure"
-)
-
-func Decode(target interface{}, raws ...interface{}) (*mapstructure.Metadata, error) {
- var md mapstructure.Metadata
- decoderConfig := &mapstructure.DecoderConfig{
- Metadata: &md,
- Result: target,
- WeaklyTypedInput: true,
- }
-
- decoder, err := mapstructure.NewDecoder(decoderConfig)
- if err != nil {
- return nil, err
- }
-
- for _, raw := range raws {
- err := decoder.Decode(raw)
- if err != nil {
- return nil, err
- }
- }
-
- return &md, nil
-}
diff --git a/vendor/github.com/hashicorp/terraform/helper/hilmapstructure/hilmapstructure.go b/vendor/github.com/hashicorp/terraform/helper/hilmapstructure/hilmapstructure.go
deleted file mode 100644
index 67be1df1f..000000000
--- a/vendor/github.com/hashicorp/terraform/helper/hilmapstructure/hilmapstructure.go
+++ /dev/null
@@ -1,41 +0,0 @@
-package hilmapstructure
-
-import (
- "fmt"
- "reflect"
-
- "github.com/mitchellh/mapstructure"
-)
-
-var hilMapstructureDecodeHookEmptySlice []interface{}
-var hilMapstructureDecodeHookStringSlice []string
-var hilMapstructureDecodeHookEmptyMap map[string]interface{}
-
-// WeakDecode behaves in the same way as mapstructure.WeakDecode but has a
-// DecodeHook which defeats the backward compatibility mode of mapstructure
-// which WeakDecodes []interface{}{} into an empty map[string]interface{}. This
-// allows us to use WeakDecode (desirable), but not fail on empty lists.
-func WeakDecode(m interface{}, rawVal interface{}) error {
- config := &mapstructure.DecoderConfig{
- DecodeHook: func(source reflect.Type, target reflect.Type, val interface{}) (interface{}, error) {
- sliceType := reflect.TypeOf(hilMapstructureDecodeHookEmptySlice)
- stringSliceType := reflect.TypeOf(hilMapstructureDecodeHookStringSlice)
- mapType := reflect.TypeOf(hilMapstructureDecodeHookEmptyMap)
-
- if (source == sliceType || source == stringSliceType) && target == mapType {
- return nil, fmt.Errorf("Cannot convert a []interface{} into a map[string]interface{}")
- }
-
- return val, nil
- },
- WeaklyTypedInput: true,
- Result: rawVal,
- }
-
- decoder, err := mapstructure.NewDecoder(config)
- if err != nil {
- return err
- }
-
- return decoder.Decode(m)
-}
diff --git a/vendor/github.com/hashicorp/terraform/helper/plugin/grpc_provisioner.go b/vendor/github.com/hashicorp/terraform/helper/plugin/grpc_provisioner.go
deleted file mode 100644
index 088e94e4a..000000000
--- a/vendor/github.com/hashicorp/terraform/helper/plugin/grpc_provisioner.go
+++ /dev/null
@@ -1,201 +0,0 @@
-package plugin
-
-import (
- "log"
- "strings"
- "unicode/utf8"
-
- "github.com/hashicorp/terraform/helper/schema"
- proto "github.com/hashicorp/terraform/internal/tfplugin5"
- "github.com/hashicorp/terraform/plugin/convert"
- "github.com/hashicorp/terraform/terraform"
- "github.com/zclconf/go-cty/cty"
- ctyconvert "github.com/zclconf/go-cty/cty/convert"
- "github.com/zclconf/go-cty/cty/msgpack"
- context "golang.org/x/net/context"
-)
-
-// NewGRPCProvisionerServerShim wraps a terraform.ResourceProvisioner in a
-// proto.ProvisionerServer implementation. If the provided provisioner is not a
-// *schema.Provisioner, this will return nil,
-func NewGRPCProvisionerServerShim(p terraform.ResourceProvisioner) *GRPCProvisionerServer {
- sp, ok := p.(*schema.Provisioner)
- if !ok {
- return nil
- }
- return &GRPCProvisionerServer{
- provisioner: sp,
- }
-}
-
-type GRPCProvisionerServer struct {
- provisioner *schema.Provisioner
-}
-
-func (s *GRPCProvisionerServer) GetSchema(_ context.Context, req *proto.GetProvisionerSchema_Request) (*proto.GetProvisionerSchema_Response, error) {
- resp := &proto.GetProvisionerSchema_Response{}
-
- resp.Provisioner = &proto.Schema{
- Block: convert.ConfigSchemaToProto(schema.InternalMap(s.provisioner.Schema).CoreConfigSchema()),
- }
-
- return resp, nil
-}
-
-func (s *GRPCProvisionerServer) ValidateProvisionerConfig(_ context.Context, req *proto.ValidateProvisionerConfig_Request) (*proto.ValidateProvisionerConfig_Response, error) {
- resp := &proto.ValidateProvisionerConfig_Response{}
-
- cfgSchema := schema.InternalMap(s.provisioner.Schema).CoreConfigSchema()
-
- configVal, err := msgpack.Unmarshal(req.Config.Msgpack, cfgSchema.ImpliedType())
- if err != nil {
- resp.Diagnostics = convert.AppendProtoDiag(resp.Diagnostics, err)
- return resp, nil
- }
-
- config := terraform.NewResourceConfigShimmed(configVal, cfgSchema)
-
- warns, errs := s.provisioner.Validate(config)
- resp.Diagnostics = convert.AppendProtoDiag(resp.Diagnostics, convert.WarnsAndErrsToProto(warns, errs))
-
- return resp, nil
-}
-
-// stringMapFromValue converts a cty.Value to a map[stirng]string.
-// This will panic if the val is not a cty.Map(cty.String).
-func stringMapFromValue(val cty.Value) map[string]string {
- m := map[string]string{}
- if val.IsNull() || !val.IsKnown() {
- return m
- }
-
- for it := val.ElementIterator(); it.Next(); {
- ak, av := it.Element()
- name := ak.AsString()
-
- if !av.IsKnown() || av.IsNull() {
- continue
- }
-
- av, _ = ctyconvert.Convert(av, cty.String)
- m[name] = av.AsString()
- }
-
- return m
-}
-
-// uiOutput implements the terraform.UIOutput interface to adapt the grpc
-// stream to the legacy Provisioner.Apply method.
-type uiOutput struct {
- srv proto.Provisioner_ProvisionResourceServer
-}
-
-func (o uiOutput) Output(s string) {
- err := o.srv.Send(&proto.ProvisionResource_Response{
- Output: toValidUTF8(s, string(utf8.RuneError)),
- })
- if err != nil {
- log.Printf("[ERROR] %s", err)
- }
-}
-
-func (s *GRPCProvisionerServer) ProvisionResource(req *proto.ProvisionResource_Request, srv proto.Provisioner_ProvisionResourceServer) error {
- // We send back a diagnostics over the stream if there was a
- // provisioner-side problem.
- srvResp := &proto.ProvisionResource_Response{}
-
- cfgSchema := schema.InternalMap(s.provisioner.Schema).CoreConfigSchema()
- cfgVal, err := msgpack.Unmarshal(req.Config.Msgpack, cfgSchema.ImpliedType())
- if err != nil {
- srvResp.Diagnostics = convert.AppendProtoDiag(srvResp.Diagnostics, err)
- srv.Send(srvResp)
- return nil
- }
- resourceConfig := terraform.NewResourceConfigShimmed(cfgVal, cfgSchema)
-
- connVal, err := msgpack.Unmarshal(req.Connection.Msgpack, cty.Map(cty.String))
- if err != nil {
- srvResp.Diagnostics = convert.AppendProtoDiag(srvResp.Diagnostics, err)
- srv.Send(srvResp)
- return nil
- }
-
- conn := stringMapFromValue(connVal)
-
- instanceState := &terraform.InstanceState{
- Ephemeral: terraform.EphemeralState{
- ConnInfo: conn,
- },
- Meta: make(map[string]interface{}),
- }
-
- err = s.provisioner.Apply(uiOutput{srv}, instanceState, resourceConfig)
- if err != nil {
- srvResp.Diagnostics = convert.AppendProtoDiag(srvResp.Diagnostics, err)
- srv.Send(srvResp)
- }
- return nil
-}
-
-func (s *GRPCProvisionerServer) Stop(_ context.Context, req *proto.Stop_Request) (*proto.Stop_Response, error) {
- resp := &proto.Stop_Response{}
-
- err := s.provisioner.Stop()
- if err != nil {
- resp.Error = err.Error()
- }
-
- return resp, nil
-}
-
-// FIXME: backported from go1.13 strings package, remove once terraform is
-// using go >= 1.13
-// ToValidUTF8 returns a copy of the string s with each run of invalid UTF-8 byte sequences
-// replaced by the replacement string, which may be empty.
-func toValidUTF8(s, replacement string) string {
- var b strings.Builder
-
- for i, c := range s {
- if c != utf8.RuneError {
- continue
- }
-
- _, wid := utf8.DecodeRuneInString(s[i:])
- if wid == 1 {
- b.Grow(len(s) + len(replacement))
- b.WriteString(s[:i])
- s = s[i:]
- break
- }
- }
-
- // Fast path for unchanged input
- if b.Cap() == 0 { // didn't call b.Grow above
- return s
- }
-
- invalid := false // previous byte was from an invalid UTF-8 sequence
- for i := 0; i < len(s); {
- c := s[i]
- if c < utf8.RuneSelf {
- i++
- invalid = false
- b.WriteByte(c)
- continue
- }
- _, wid := utf8.DecodeRuneInString(s[i:])
- if wid == 1 {
- i++
- if !invalid {
- invalid = true
- b.WriteString(replacement)
- }
- continue
- }
- invalid = false
- b.WriteString(s[i : i+wid])
- i += wid
- }
-
- return b.String()
-}
diff --git a/vendor/github.com/hashicorp/terraform/helper/schema/provisioner.go b/vendor/github.com/hashicorp/terraform/helper/schema/provisioner.go
deleted file mode 100644
index eee155bfb..000000000
--- a/vendor/github.com/hashicorp/terraform/helper/schema/provisioner.go
+++ /dev/null
@@ -1,205 +0,0 @@
-package schema
-
-import (
- "context"
- "errors"
- "fmt"
- "sync"
-
- "github.com/hashicorp/go-multierror"
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/hashicorp/terraform/terraform"
-)
-
-// Provisioner represents a resource provisioner in Terraform and properly
-// implements all of the ResourceProvisioner API.
-//
-// This higher level structure makes it much easier to implement a new or
-// custom provisioner for Terraform.
-//
-// The function callbacks for this structure are all passed a context object.
-// This context object has a number of pre-defined values that can be accessed
-// via the global functions defined in context.go.
-type Provisioner struct {
- // ConnSchema is the schema for the connection settings for this
- // provisioner.
- //
- // The keys of this map are the configuration keys, and the value is
- // the schema describing the value of the configuration.
- //
- // NOTE: The value of connection keys can only be strings for now.
- ConnSchema map[string]*Schema
-
- // Schema is the schema for the usage of this provisioner.
- //
- // The keys of this map are the configuration keys, and the value is
- // the schema describing the value of the configuration.
- Schema map[string]*Schema
-
- // ApplyFunc is the function for executing the provisioner. This is required.
- // It is given a context. See the Provisioner struct docs for more
- // information.
- ApplyFunc func(ctx context.Context) error
-
- // ValidateFunc is a function for extended validation. This is optional
- // and should be used when individual field validation is not enough.
- ValidateFunc func(*terraform.ResourceConfig) ([]string, []error)
-
- stopCtx context.Context
- stopCtxCancel context.CancelFunc
- stopOnce sync.Once
-}
-
-// Keys that can be used to access data in the context parameters for
-// Provisioners.
-var (
- connDataInvalid = contextKey("data invalid")
-
- // This returns a *ResourceData for the connection information.
- // Guaranteed to never be nil.
- ProvConnDataKey = contextKey("provider conn data")
-
- // This returns a *ResourceData for the config information.
- // Guaranteed to never be nil.
- ProvConfigDataKey = contextKey("provider config data")
-
- // This returns a terraform.UIOutput. Guaranteed to never be nil.
- ProvOutputKey = contextKey("provider output")
-
- // This returns the raw InstanceState passed to Apply. Guaranteed to
- // be set, but may be nil.
- ProvRawStateKey = contextKey("provider raw state")
-)
-
-// InternalValidate should be called to validate the structure
-// of the provisioner.
-//
-// This should be called in a unit test to verify before release that this
-// structure is properly configured for use.
-func (p *Provisioner) InternalValidate() error {
- if p == nil {
- return errors.New("provisioner is nil")
- }
-
- var validationErrors error
- {
- sm := schemaMap(p.ConnSchema)
- if err := sm.InternalValidate(sm); err != nil {
- validationErrors = multierror.Append(validationErrors, err)
- }
- }
-
- {
- sm := schemaMap(p.Schema)
- if err := sm.InternalValidate(sm); err != nil {
- validationErrors = multierror.Append(validationErrors, err)
- }
- }
-
- if p.ApplyFunc == nil {
- validationErrors = multierror.Append(validationErrors, fmt.Errorf(
- "ApplyFunc must not be nil"))
- }
-
- return validationErrors
-}
-
-// StopContext returns a context that checks whether a provisioner is stopped.
-func (p *Provisioner) StopContext() context.Context {
- p.stopOnce.Do(p.stopInit)
- return p.stopCtx
-}
-
-func (p *Provisioner) stopInit() {
- p.stopCtx, p.stopCtxCancel = context.WithCancel(context.Background())
-}
-
-// Stop implementation of terraform.ResourceProvisioner interface.
-func (p *Provisioner) Stop() error {
- p.stopOnce.Do(p.stopInit)
- p.stopCtxCancel()
- return nil
-}
-
-// GetConfigSchema implementation of terraform.ResourceProvisioner interface.
-func (p *Provisioner) GetConfigSchema() (*configschema.Block, error) {
- return schemaMap(p.Schema).CoreConfigSchema(), nil
-}
-
-// Apply implementation of terraform.ResourceProvisioner interface.
-func (p *Provisioner) Apply(
- o terraform.UIOutput,
- s *terraform.InstanceState,
- c *terraform.ResourceConfig) error {
- var connData, configData *ResourceData
-
- {
- // We first need to turn the connection information into a
- // terraform.ResourceConfig so that we can use that type to more
- // easily build a ResourceData structure. We do this by simply treating
- // the conn info as configuration input.
- raw := make(map[string]interface{})
- if s != nil {
- for k, v := range s.Ephemeral.ConnInfo {
- raw[k] = v
- }
- }
-
- c := terraform.NewResourceConfigRaw(raw)
- sm := schemaMap(p.ConnSchema)
- diff, err := sm.Diff(nil, c, nil, nil, true)
- if err != nil {
- return err
- }
- connData, err = sm.Data(nil, diff)
- if err != nil {
- return err
- }
- }
-
- {
- // Build the configuration data. Doing this requires making a "diff"
- // even though that's never used. We use that just to get the correct types.
- configMap := schemaMap(p.Schema)
- diff, err := configMap.Diff(nil, c, nil, nil, true)
- if err != nil {
- return err
- }
- configData, err = configMap.Data(nil, diff)
- if err != nil {
- return err
- }
- }
-
- // Build the context and call the function
- ctx := p.StopContext()
- ctx = context.WithValue(ctx, ProvConnDataKey, connData)
- ctx = context.WithValue(ctx, ProvConfigDataKey, configData)
- ctx = context.WithValue(ctx, ProvOutputKey, o)
- ctx = context.WithValue(ctx, ProvRawStateKey, s)
- return p.ApplyFunc(ctx)
-}
-
-// Validate implements the terraform.ResourceProvisioner interface.
-func (p *Provisioner) Validate(c *terraform.ResourceConfig) (ws []string, es []error) {
- if err := p.InternalValidate(); err != nil {
- return nil, []error{fmt.Errorf(
- "Internal validation of the provisioner failed! This is always a bug\n"+
- "with the provisioner itself, and not a user issue. Please report\n"+
- "this bug:\n\n%s", err)}
- }
-
- if p.Schema != nil {
- w, e := schemaMap(p.Schema).Validate(c)
- ws = append(ws, w...)
- es = append(es, e...)
- }
-
- if p.ValidateFunc != nil {
- w, e := p.ValidateFunc(c)
- ws = append(ws, w...)
- es = append(es, e...)
- }
-
- return ws, es
-}
diff --git a/vendor/github.com/hashicorp/terraform/helper/validation/validation.go b/vendor/github.com/hashicorp/terraform/helper/validation/validation.go
deleted file mode 100644
index efded8928..000000000
--- a/vendor/github.com/hashicorp/terraform/helper/validation/validation.go
+++ /dev/null
@@ -1,341 +0,0 @@
-package validation
-
-import (
- "bytes"
- "fmt"
- "net"
- "reflect"
- "regexp"
- "strings"
- "time"
-
- "github.com/hashicorp/terraform/helper/schema"
- "github.com/hashicorp/terraform/helper/structure"
-)
-
-// All returns a SchemaValidateFunc which tests if the provided value
-// passes all provided SchemaValidateFunc
-func All(validators ...schema.SchemaValidateFunc) schema.SchemaValidateFunc {
- return func(i interface{}, k string) ([]string, []error) {
- var allErrors []error
- var allWarnings []string
- for _, validator := range validators {
- validatorWarnings, validatorErrors := validator(i, k)
- allWarnings = append(allWarnings, validatorWarnings...)
- allErrors = append(allErrors, validatorErrors...)
- }
- return allWarnings, allErrors
- }
-}
-
-// Any returns a SchemaValidateFunc which tests if the provided value
-// passes any of the provided SchemaValidateFunc
-func Any(validators ...schema.SchemaValidateFunc) schema.SchemaValidateFunc {
- return func(i interface{}, k string) ([]string, []error) {
- var allErrors []error
- var allWarnings []string
- for _, validator := range validators {
- validatorWarnings, validatorErrors := validator(i, k)
- if len(validatorWarnings) == 0 && len(validatorErrors) == 0 {
- return []string{}, []error{}
- }
- allWarnings = append(allWarnings, validatorWarnings...)
- allErrors = append(allErrors, validatorErrors...)
- }
- return allWarnings, allErrors
- }
-}
-
-// IntBetween returns a SchemaValidateFunc which tests if the provided value
-// is of type int and is between min and max (inclusive)
-func IntBetween(min, max int) schema.SchemaValidateFunc {
- return func(i interface{}, k string) (s []string, es []error) {
- v, ok := i.(int)
- if !ok {
- es = append(es, fmt.Errorf("expected type of %s to be int", k))
- return
- }
-
- if v < min || v > max {
- es = append(es, fmt.Errorf("expected %s to be in the range (%d - %d), got %d", k, min, max, v))
- return
- }
-
- return
- }
-}
-
-// IntAtLeast returns a SchemaValidateFunc which tests if the provided value
-// is of type int and is at least min (inclusive)
-func IntAtLeast(min int) schema.SchemaValidateFunc {
- return func(i interface{}, k string) (s []string, es []error) {
- v, ok := i.(int)
- if !ok {
- es = append(es, fmt.Errorf("expected type of %s to be int", k))
- return
- }
-
- if v < min {
- es = append(es, fmt.Errorf("expected %s to be at least (%d), got %d", k, min, v))
- return
- }
-
- return
- }
-}
-
-// IntAtMost returns a SchemaValidateFunc which tests if the provided value
-// is of type int and is at most max (inclusive)
-func IntAtMost(max int) schema.SchemaValidateFunc {
- return func(i interface{}, k string) (s []string, es []error) {
- v, ok := i.(int)
- if !ok {
- es = append(es, fmt.Errorf("expected type of %s to be int", k))
- return
- }
-
- if v > max {
- es = append(es, fmt.Errorf("expected %s to be at most (%d), got %d", k, max, v))
- return
- }
-
- return
- }
-}
-
-// IntInSlice returns a SchemaValidateFunc which tests if the provided value
-// is of type int and matches the value of an element in the valid slice
-func IntInSlice(valid []int) schema.SchemaValidateFunc {
- return func(i interface{}, k string) (s []string, es []error) {
- v, ok := i.(int)
- if !ok {
- es = append(es, fmt.Errorf("expected type of %s to be an integer", k))
- return
- }
-
- for _, validInt := range valid {
- if v == validInt {
- return
- }
- }
-
- es = append(es, fmt.Errorf("expected %s to be one of %v, got %d", k, valid, v))
- return
- }
-}
-
-// StringInSlice returns a SchemaValidateFunc which tests if the provided value
-// is of type string and matches the value of an element in the valid slice
-// will test with in lower case if ignoreCase is true
-func StringInSlice(valid []string, ignoreCase bool) schema.SchemaValidateFunc {
- return func(i interface{}, k string) (s []string, es []error) {
- v, ok := i.(string)
- if !ok {
- es = append(es, fmt.Errorf("expected type of %s to be string", k))
- return
- }
-
- for _, str := range valid {
- if v == str || (ignoreCase && strings.ToLower(v) == strings.ToLower(str)) {
- return
- }
- }
-
- es = append(es, fmt.Errorf("expected %s to be one of %v, got %s", k, valid, v))
- return
- }
-}
-
-// StringLenBetween returns a SchemaValidateFunc which tests if the provided value
-// is of type string and has length between min and max (inclusive)
-func StringLenBetween(min, max int) schema.SchemaValidateFunc {
- return func(i interface{}, k string) (s []string, es []error) {
- v, ok := i.(string)
- if !ok {
- es = append(es, fmt.Errorf("expected type of %s to be string", k))
- return
- }
- if len(v) < min || len(v) > max {
- es = append(es, fmt.Errorf("expected length of %s to be in the range (%d - %d), got %s", k, min, max, v))
- }
- return
- }
-}
-
-// StringMatch returns a SchemaValidateFunc which tests if the provided value
-// matches a given regexp. Optionally an error message can be provided to
-// return something friendlier than "must match some globby regexp".
-func StringMatch(r *regexp.Regexp, message string) schema.SchemaValidateFunc {
- return func(i interface{}, k string) ([]string, []error) {
- v, ok := i.(string)
- if !ok {
- return nil, []error{fmt.Errorf("expected type of %s to be string", k)}
- }
-
- if ok := r.MatchString(v); !ok {
- if message != "" {
- return nil, []error{fmt.Errorf("invalid value for %s (%s)", k, message)}
-
- }
- return nil, []error{fmt.Errorf("expected value of %s to match regular expression %q", k, r)}
- }
- return nil, nil
- }
-}
-
-// NoZeroValues is a SchemaValidateFunc which tests if the provided value is
-// not a zero value. It's useful in situations where you want to catch
-// explicit zero values on things like required fields during validation.
-func NoZeroValues(i interface{}, k string) (s []string, es []error) {
- if reflect.ValueOf(i).Interface() == reflect.Zero(reflect.TypeOf(i)).Interface() {
- switch reflect.TypeOf(i).Kind() {
- case reflect.String:
- es = append(es, fmt.Errorf("%s must not be empty", k))
- case reflect.Int, reflect.Float64:
- es = append(es, fmt.Errorf("%s must not be zero", k))
- default:
- // this validator should only ever be applied to TypeString, TypeInt and TypeFloat
- panic(fmt.Errorf("can't use NoZeroValues with %T attribute %s", i, k))
- }
- }
- return
-}
-
-// CIDRNetwork returns a SchemaValidateFunc which tests if the provided value
-// is of type string, is in valid CIDR network notation, and has significant bits between min and max (inclusive)
-func CIDRNetwork(min, max int) schema.SchemaValidateFunc {
- return func(i interface{}, k string) (s []string, es []error) {
- v, ok := i.(string)
- if !ok {
- es = append(es, fmt.Errorf("expected type of %s to be string", k))
- return
- }
-
- _, ipnet, err := net.ParseCIDR(v)
- if err != nil {
- es = append(es, fmt.Errorf(
- "expected %s to contain a valid CIDR, got: %s with err: %s", k, v, err))
- return
- }
-
- if ipnet == nil || v != ipnet.String() {
- es = append(es, fmt.Errorf(
- "expected %s to contain a valid network CIDR, expected %s, got %s",
- k, ipnet, v))
- }
-
- sigbits, _ := ipnet.Mask.Size()
- if sigbits < min || sigbits > max {
- es = append(es, fmt.Errorf(
- "expected %q to contain a network CIDR with between %d and %d significant bits, got: %d",
- k, min, max, sigbits))
- }
-
- return
- }
-}
-
-// SingleIP returns a SchemaValidateFunc which tests if the provided value
-// is of type string, and in valid single IP notation
-func SingleIP() schema.SchemaValidateFunc {
- return func(i interface{}, k string) (s []string, es []error) {
- v, ok := i.(string)
- if !ok {
- es = append(es, fmt.Errorf("expected type of %s to be string", k))
- return
- }
-
- ip := net.ParseIP(v)
- if ip == nil {
- es = append(es, fmt.Errorf(
- "expected %s to contain a valid IP, got: %s", k, v))
- }
- return
- }
-}
-
-// IPRange returns a SchemaValidateFunc which tests if the provided value
-// is of type string, and in valid IP range notation
-func IPRange() schema.SchemaValidateFunc {
- return func(i interface{}, k string) (s []string, es []error) {
- v, ok := i.(string)
- if !ok {
- es = append(es, fmt.Errorf("expected type of %s to be string", k))
- return
- }
-
- ips := strings.Split(v, "-")
- if len(ips) != 2 {
- es = append(es, fmt.Errorf(
- "expected %s to contain a valid IP range, got: %s", k, v))
- return
- }
- ip1 := net.ParseIP(ips[0])
- ip2 := net.ParseIP(ips[1])
- if ip1 == nil || ip2 == nil || bytes.Compare(ip1, ip2) > 0 {
- es = append(es, fmt.Errorf(
- "expected %s to contain a valid IP range, got: %s", k, v))
- }
- return
- }
-}
-
-// ValidateJsonString is a SchemaValidateFunc which tests to make sure the
-// supplied string is valid JSON.
-func ValidateJsonString(v interface{}, k string) (ws []string, errors []error) {
- if _, err := structure.NormalizeJsonString(v); err != nil {
- errors = append(errors, fmt.Errorf("%q contains an invalid JSON: %s", k, err))
- }
- return
-}
-
-// ValidateListUniqueStrings is a ValidateFunc that ensures a list has no
-// duplicate items in it. It's useful for when a list is needed over a set
-// because order matters, yet the items still need to be unique.
-func ValidateListUniqueStrings(v interface{}, k string) (ws []string, errors []error) {
- for n1, v1 := range v.([]interface{}) {
- for n2, v2 := range v.([]interface{}) {
- if v1.(string) == v2.(string) && n1 != n2 {
- errors = append(errors, fmt.Errorf("%q: duplicate entry - %s", k, v1.(string)))
- }
- }
- }
- return
-}
-
-// ValidateRegexp returns a SchemaValidateFunc which tests to make sure the
-// supplied string is a valid regular expression.
-func ValidateRegexp(v interface{}, k string) (ws []string, errors []error) {
- if _, err := regexp.Compile(v.(string)); err != nil {
- errors = append(errors, fmt.Errorf("%q: %s", k, err))
- }
- return
-}
-
-// ValidateRFC3339TimeString is a ValidateFunc that ensures a string parses
-// as time.RFC3339 format
-func ValidateRFC3339TimeString(v interface{}, k string) (ws []string, errors []error) {
- if _, err := time.Parse(time.RFC3339, v.(string)); err != nil {
- errors = append(errors, fmt.Errorf("%q: invalid RFC3339 timestamp", k))
- }
- return
-}
-
-// FloatBetween returns a SchemaValidateFunc which tests if the provided value
-// is of type float64 and is between min and max (inclusive).
-func FloatBetween(min, max float64) schema.SchemaValidateFunc {
- return func(i interface{}, k string) (s []string, es []error) {
- v, ok := i.(float64)
- if !ok {
- es = append(es, fmt.Errorf("expected type of %s to be float64", k))
- return
- }
-
- if v < min || v > max {
- es = append(es, fmt.Errorf("expected %s to be in the range (%f - %f), got %f", k, min, max, v))
- return
- }
-
- return
- }
-}
diff --git a/vendor/github.com/hashicorp/terraform/httpclient/client.go b/vendor/github.com/hashicorp/terraform/httpclient/client.go
deleted file mode 100644
index bb06beb47..000000000
--- a/vendor/github.com/hashicorp/terraform/httpclient/client.go
+++ /dev/null
@@ -1,18 +0,0 @@
-package httpclient
-
-import (
- "net/http"
-
- cleanhttp "github.com/hashicorp/go-cleanhttp"
-)
-
-// New returns the DefaultPooledClient from the cleanhttp
-// package that will also send a Terraform User-Agent string.
-func New() *http.Client {
- cli := cleanhttp.DefaultPooledClient()
- cli.Transport = &userAgentRoundTripper{
- userAgent: UserAgentString(),
- inner: cli.Transport,
- }
- return cli
-}
diff --git a/vendor/github.com/hashicorp/terraform/internal/initwd/load_config.go b/vendor/github.com/hashicorp/terraform/internal/initwd/load_config.go
deleted file mode 100644
index 6f77dcd84..000000000
--- a/vendor/github.com/hashicorp/terraform/internal/initwd/load_config.go
+++ /dev/null
@@ -1,56 +0,0 @@
-package initwd
-
-import (
- "fmt"
-
- version "github.com/hashicorp/go-version"
- "github.com/hashicorp/terraform-config-inspect/tfconfig"
- "github.com/hashicorp/terraform/internal/earlyconfig"
- "github.com/hashicorp/terraform/internal/modsdir"
- "github.com/hashicorp/terraform/tfdiags"
-)
-
-// LoadConfig loads a full configuration tree that has previously had all of
-// its dependent modules installed to the given modulesDir using a
-// ModuleInstaller.
-//
-// This uses the early configuration loader and thus only reads top-level
-// metadata from the modules in the configuration. Most callers should use
-// the configs/configload package to fully load a configuration.
-func LoadConfig(rootDir, modulesDir string) (*earlyconfig.Config, tfdiags.Diagnostics) {
- rootMod, diags := earlyconfig.LoadModule(rootDir)
- if rootMod == nil {
- return nil, diags
- }
-
- manifest, err := modsdir.ReadManifestSnapshotForDir(modulesDir)
- if err != nil {
- diags = diags.Append(tfdiags.Sourceless(
- tfdiags.Error,
- "Failed to read module manifest",
- fmt.Sprintf("Terraform failed to read its manifest of locally-cached modules: %s.", err),
- ))
- return nil, diags
- }
-
- return earlyconfig.BuildConfig(rootMod, earlyconfig.ModuleWalkerFunc(
- func(req *earlyconfig.ModuleRequest) (*tfconfig.Module, *version.Version, tfdiags.Diagnostics) {
- var diags tfdiags.Diagnostics
-
- key := manifest.ModuleKey(req.Path)
- record, exists := manifest[key]
- if !exists {
- diags = diags.Append(tfdiags.Sourceless(
- tfdiags.Error,
- "Module not installed",
- fmt.Sprintf("Module %s is not yet installed. Run \"terraform init\" to install all modules required by this configuration.", req.Path.String()),
- ))
- return nil, nil, diags
- }
-
- mod, mDiags := earlyconfig.LoadModule(record.Dir)
- diags = diags.Append(mDiags)
- return mod, record.Version, diags
- },
- ))
-}
diff --git a/vendor/github.com/hashicorp/terraform/internal/initwd/testing.go b/vendor/github.com/hashicorp/terraform/internal/initwd/testing.go
deleted file mode 100644
index 8cef80a35..000000000
--- a/vendor/github.com/hashicorp/terraform/internal/initwd/testing.go
+++ /dev/null
@@ -1,73 +0,0 @@
-package initwd
-
-import (
- "github.com/hashicorp/terraform/registry"
- "testing"
-
- "github.com/hashicorp/terraform/configs"
- "github.com/hashicorp/terraform/configs/configload"
- "github.com/hashicorp/terraform/tfdiags"
-)
-
-// LoadConfigForTests is a convenience wrapper around configload.NewLoaderForTests,
-// ModuleInstaller.InstallModules and configload.Loader.LoadConfig that allows
-// a test configuration to be loaded in a single step.
-//
-// If module installation fails, t.Fatal (or similar) is called to halt
-// execution of the test, under the assumption that installation failures are
-// not expected. If installation failures _are_ expected then use
-// NewLoaderForTests and work with the loader object directly. If module
-// installation succeeds but generates warnings, these warnings are discarded.
-//
-// If installation succeeds but errors are detected during loading then a
-// possibly-incomplete config is returned along with error diagnostics. The
-// test run is not aborted in this case, so that the caller can make assertions
-// against the returned diagnostics.
-//
-// As with NewLoaderForTests, a cleanup function is returned which must be
-// called before the test completes in order to remove the temporary
-// modules directory.
-func LoadConfigForTests(t *testing.T, rootDir string) (*configs.Config, *configload.Loader, func(), tfdiags.Diagnostics) {
- t.Helper()
-
- var diags tfdiags.Diagnostics
-
- loader, cleanup := configload.NewLoaderForTests(t)
- inst := NewModuleInstaller(loader.ModulesDir(), registry.NewClient(nil, nil))
-
- _, moreDiags := inst.InstallModules(rootDir, true, ModuleInstallHooksImpl{})
- diags = diags.Append(moreDiags)
- if diags.HasErrors() {
- cleanup()
- t.Fatal(diags.Err())
- return nil, nil, func() {}, diags
- }
-
- // Since module installer has modified the module manifest on disk, we need
- // to refresh the cache of it in the loader.
- if err := loader.RefreshModules(); err != nil {
- t.Fatalf("failed to refresh modules after installation: %s", err)
- }
-
- config, hclDiags := loader.LoadConfig(rootDir)
- diags = diags.Append(hclDiags)
- return config, loader, cleanup, diags
-}
-
-// MustLoadConfigForTests is a variant of LoadConfigForTests which calls
-// t.Fatal (or similar) if there are any errors during loading, and thus
-// does not return diagnostics at all.
-//
-// This is useful for concisely writing tests that don't expect errors at
-// all. For tests that expect errors and need to assert against them, use
-// LoadConfigForTests instead.
-func MustLoadConfigForTests(t *testing.T, rootDir string) (*configs.Config, *configload.Loader, func()) {
- t.Helper()
-
- config, loader, cleanup, diags := LoadConfigForTests(t, rootDir)
- if diags.HasErrors() {
- cleanup()
- t.Fatal(diags.Err())
- }
- return config, loader, cleanup
-}
diff --git a/vendor/github.com/hashicorp/terraform/internal/initwd/version_required.go b/vendor/github.com/hashicorp/terraform/internal/initwd/version_required.go
deleted file mode 100644
index 104840b93..000000000
--- a/vendor/github.com/hashicorp/terraform/internal/initwd/version_required.go
+++ /dev/null
@@ -1,83 +0,0 @@
-package initwd
-
-import (
- "fmt"
-
- version "github.com/hashicorp/go-version"
- "github.com/hashicorp/terraform/internal/earlyconfig"
- "github.com/hashicorp/terraform/tfdiags"
- tfversion "github.com/hashicorp/terraform/version"
-)
-
-// CheckCoreVersionRequirements visits each of the modules in the given
-// early configuration tree and verifies that any given Core version constraints
-// match with the version of Terraform Core that is being used.
-//
-// The returned diagnostics will contain errors if any constraints do not match.
-// The returned diagnostics might also return warnings, which should be
-// displayed to the user.
-func CheckCoreVersionRequirements(earlyConfig *earlyconfig.Config) tfdiags.Diagnostics {
- if earlyConfig == nil {
- return nil
- }
-
- var diags tfdiags.Diagnostics
- module := earlyConfig.Module
-
- var constraints version.Constraints
- for _, constraintStr := range module.RequiredCore {
- constraint, err := version.NewConstraint(constraintStr)
- if err != nil {
- // Unfortunately the early config parser doesn't preserve a source
- // location for this, so we're unable to indicate a specific
- // location where this constraint came from, but we can at least
- // say which module set it.
- switch {
- case len(earlyConfig.Path) == 0:
- diags = diags.Append(tfdiags.Sourceless(
- tfdiags.Error,
- "Invalid provider version constraint",
- fmt.Sprintf("Invalid version core constraint %q in the root module.", constraintStr),
- ))
- default:
- diags = diags.Append(tfdiags.Sourceless(
- tfdiags.Error,
- "Invalid provider version constraint",
- fmt.Sprintf("Invalid version core constraint %q in %s.", constraintStr, earlyConfig.Path),
- ))
- }
- continue
- }
- constraints = append(constraints, constraint...)
- }
-
- if !constraints.Check(tfversion.SemVer) {
- switch {
- case len(earlyConfig.Path) == 0:
- diags = diags.Append(tfdiags.Sourceless(
- tfdiags.Error,
- "Unsupported Terraform Core version",
- fmt.Sprintf(
- "This configuration does not support Terraform version %s. To proceed, either choose another supported Terraform version or update the root module's version constraint. Version constraints are normally set for good reason, so updating the constraint may lead to other errors or unexpected behavior.",
- tfversion.String(),
- ),
- ))
- default:
- diags = diags.Append(tfdiags.Sourceless(
- tfdiags.Error,
- "Unsupported Terraform Core version",
- fmt.Sprintf(
- "Module %s (from %q) does not support Terraform version %s. To proceed, either choose another supported Terraform version or update the module's version constraint. Version constraints are normally set for good reason, so updating the constraint may lead to other errors or unexpected behavior.",
- earlyConfig.Path, earlyConfig.SourceAddr, tfversion.String(),
- ),
- ))
- }
- }
-
- for _, c := range earlyConfig.Children {
- childDiags := CheckCoreVersionRequirements(c)
- diags = diags.Append(childDiags)
- }
-
- return diags
-}
diff --git a/vendor/github.com/hashicorp/terraform/plans/changes_state.go b/vendor/github.com/hashicorp/terraform/plans/changes_state.go
deleted file mode 100644
index 543e6c2bd..000000000
--- a/vendor/github.com/hashicorp/terraform/plans/changes_state.go
+++ /dev/null
@@ -1,15 +0,0 @@
-package plans
-
-import (
- "github.com/hashicorp/terraform/states"
-)
-
-// PlannedState merges the set of changes described by the receiver into the
-// given prior state to produce the planned result state.
-//
-// The result is an approximation of the state as it would exist after
-// applying these changes, omitting any values that cannot be determined until
-// the changes are actually applied.
-func (c *Changes) PlannedState(prior *states.State) (*states.State, error) {
- panic("Changes.PlannedState not yet implemented")
-}
diff --git a/vendor/github.com/hashicorp/terraform/plugin/resource_provisioner.go b/vendor/github.com/hashicorp/terraform/plugin/resource_provisioner.go
deleted file mode 100644
index f0cc341f1..000000000
--- a/vendor/github.com/hashicorp/terraform/plugin/resource_provisioner.go
+++ /dev/null
@@ -1,182 +0,0 @@
-package plugin
-
-import (
- "net/rpc"
-
- "github.com/hashicorp/go-plugin"
- "github.com/hashicorp/terraform/configs/configschema"
- "github.com/hashicorp/terraform/terraform"
-)
-
-// ResourceProvisionerPlugin is the plugin.Plugin implementation.
-type ResourceProvisionerPlugin struct {
- ResourceProvisioner func() terraform.ResourceProvisioner
-}
-
-func (p *ResourceProvisionerPlugin) Server(b *plugin.MuxBroker) (interface{}, error) {
- return &ResourceProvisionerServer{
- Broker: b,
- Provisioner: p.ResourceProvisioner(),
- }, nil
-}
-
-func (p *ResourceProvisionerPlugin) Client(
- b *plugin.MuxBroker, c *rpc.Client) (interface{}, error) {
- return &ResourceProvisioner{Broker: b, Client: c}, nil
-}
-
-// ResourceProvisioner is an implementation of terraform.ResourceProvisioner
-// that communicates over RPC.
-type ResourceProvisioner struct {
- Broker *plugin.MuxBroker
- Client *rpc.Client
-}
-
-func (p *ResourceProvisioner) GetConfigSchema() (*configschema.Block, error) {
- panic("not implemented")
- return nil, nil
-}
-
-func (p *ResourceProvisioner) Validate(c *terraform.ResourceConfig) ([]string, []error) {
- var resp ResourceProvisionerValidateResponse
- args := ResourceProvisionerValidateArgs{
- Config: c,
- }
-
- err := p.Client.Call("Plugin.Validate", &args, &resp)
- if err != nil {
- return nil, []error{err}
- }
-
- var errs []error
- if len(resp.Errors) > 0 {
- errs = make([]error, len(resp.Errors))
- for i, err := range resp.Errors {
- errs[i] = err
- }
- }
-
- return resp.Warnings, errs
-}
-
-func (p *ResourceProvisioner) Apply(
- output terraform.UIOutput,
- s *terraform.InstanceState,
- c *terraform.ResourceConfig) error {
- id := p.Broker.NextId()
- go p.Broker.AcceptAndServe(id, &UIOutputServer{
- UIOutput: output,
- })
-
- var resp ResourceProvisionerApplyResponse
- args := &ResourceProvisionerApplyArgs{
- OutputId: id,
- State: s,
- Config: c,
- }
-
- err := p.Client.Call("Plugin.Apply", args, &resp)
- if err != nil {
- return err
- }
- if resp.Error != nil {
- err = resp.Error
- }
-
- return err
-}
-
-func (p *ResourceProvisioner) Stop() error {
- var resp ResourceProvisionerStopResponse
- err := p.Client.Call("Plugin.Stop", new(interface{}), &resp)
- if err != nil {
- return err
- }
- if resp.Error != nil {
- err = resp.Error
- }
-
- return err
-}
-
-func (p *ResourceProvisioner) Close() error {
- return p.Client.Close()
-}
-
-type ResourceProvisionerValidateArgs struct {
- Config *terraform.ResourceConfig
-}
-
-type ResourceProvisionerValidateResponse struct {
- Warnings []string
- Errors []*plugin.BasicError
-}
-
-type ResourceProvisionerApplyArgs struct {
- OutputId uint32
- State *terraform.InstanceState
- Config *terraform.ResourceConfig
-}
-
-type ResourceProvisionerApplyResponse struct {
- Error *plugin.BasicError
-}
-
-type ResourceProvisionerStopResponse struct {
- Error *plugin.BasicError
-}
-
-// ResourceProvisionerServer is a net/rpc compatible structure for serving
-// a ResourceProvisioner. This should not be used directly.
-type ResourceProvisionerServer struct {
- Broker *plugin.MuxBroker
- Provisioner terraform.ResourceProvisioner
-}
-
-func (s *ResourceProvisionerServer) Apply(
- args *ResourceProvisionerApplyArgs,
- result *ResourceProvisionerApplyResponse) error {
- conn, err := s.Broker.Dial(args.OutputId)
- if err != nil {
- *result = ResourceProvisionerApplyResponse{
- Error: plugin.NewBasicError(err),
- }
- return nil
- }
- client := rpc.NewClient(conn)
- defer client.Close()
-
- output := &UIOutput{Client: client}
-
- err = s.Provisioner.Apply(output, args.State, args.Config)
- *result = ResourceProvisionerApplyResponse{
- Error: plugin.NewBasicError(err),
- }
- return nil
-}
-
-func (s *ResourceProvisionerServer) Validate(
- args *ResourceProvisionerValidateArgs,
- reply *ResourceProvisionerValidateResponse) error {
- warns, errs := s.Provisioner.Validate(args.Config)
- berrs := make([]*plugin.BasicError, len(errs))
- for i, err := range errs {
- berrs[i] = plugin.NewBasicError(err)
- }
- *reply = ResourceProvisionerValidateResponse{
- Warnings: warns,
- Errors: berrs,
- }
- return nil
-}
-
-func (s *ResourceProvisionerServer) Stop(
- _ interface{},
- reply *ResourceProvisionerStopResponse) error {
- err := s.Provisioner.Stop()
- *reply = ResourceProvisionerStopResponse{
- Error: plugin.NewBasicError(err),
- }
-
- return nil
-}
diff --git a/vendor/github.com/hashicorp/terraform/provisioners/factory.go b/vendor/github.com/hashicorp/terraform/provisioners/factory.go
deleted file mode 100644
index 590b97a84..000000000
--- a/vendor/github.com/hashicorp/terraform/provisioners/factory.go
+++ /dev/null
@@ -1,19 +0,0 @@
-package provisioners
-
-// Factory is a function type that creates a new instance of a resource
-// provisioner, or returns an error if that is impossible.
-type Factory func() (Interface, error)
-
-// FactoryFixed is a helper that creates a Factory that just returns some given
-// single provisioner.
-//
-// Unlike usual factories, the exact same instance is returned for each call
-// to the factory and so this must be used in only specialized situations where
-// the caller can take care to either not mutate the given provider at all
-// or to mutate it in ways that will not cause unexpected behavior for others
-// holding the same reference.
-func FactoryFixed(p Interface) Factory {
- return func() (Interface, error) {
- return p, nil
- }
-}
diff --git a/vendor/github.com/hashicorp/terraform/registry/response/module.go b/vendor/github.com/hashicorp/terraform/registry/response/module.go
deleted file mode 100644
index 3bd2b3df2..000000000
--- a/vendor/github.com/hashicorp/terraform/registry/response/module.go
+++ /dev/null
@@ -1,93 +0,0 @@
-package response
-
-import (
- "time"
-)
-
-// Module is the response structure with the data for a single module version.
-type Module struct {
- ID string `json:"id"`
-
- //---------------------------------------------------------------
- // Metadata about the overall module.
-
- Owner string `json:"owner"`
- Namespace string `json:"namespace"`
- Name string `json:"name"`
- Version string `json:"version"`
- Provider string `json:"provider"`
- Description string `json:"description"`
- Source string `json:"source"`
- PublishedAt time.Time `json:"published_at"`
- Downloads int `json:"downloads"`
- Verified bool `json:"verified"`
-}
-
-// ModuleDetail represents a module in full detail.
-type ModuleDetail struct {
- Module
-
- //---------------------------------------------------------------
- // Metadata about the overall module. This is only available when
- // requesting the specific module (not in list responses).
-
- // Root is the root module.
- Root *ModuleSubmodule `json:"root"`
-
- // Submodules are the other submodules that are available within
- // this module.
- Submodules []*ModuleSubmodule `json:"submodules"`
-
- //---------------------------------------------------------------
- // The fields below are only set when requesting this specific
- // module. They are available to easily know all available versions
- // and providers without multiple API calls.
-
- Providers []string `json:"providers"` // All available providers
- Versions []string `json:"versions"` // All versions
-}
-
-// ModuleSubmodule is the metadata about a specific submodule within
-// a module. This includes the root module as a special case.
-type ModuleSubmodule struct {
- Path string `json:"path"`
- Readme string `json:"readme"`
- Empty bool `json:"empty"`
-
- Inputs []*ModuleInput `json:"inputs"`
- Outputs []*ModuleOutput `json:"outputs"`
- Dependencies []*ModuleDep `json:"dependencies"`
- Resources []*ModuleResource `json:"resources"`
-}
-
-// ModuleInput is an input for a module.
-type ModuleInput struct {
- Name string `json:"name"`
- Description string `json:"description"`
- Default string `json:"default"`
-}
-
-// ModuleOutput is an output for a module.
-type ModuleOutput struct {
- Name string `json:"name"`
- Description string `json:"description"`
-}
-
-// ModuleDep is an output for a module.
-type ModuleDep struct {
- Name string `json:"name"`
- Source string `json:"source"`
- Version string `json:"version"`
-}
-
-// ModuleProviderDep is the output for a provider dependency
-type ModuleProviderDep struct {
- Name string `json:"name"`
- Version string `json:"version"`
-}
-
-// ModuleResource is an output for a module.
-type ModuleResource struct {
- Name string `json:"name"`
- Type string `json:"type"`
-}
diff --git a/vendor/github.com/hashicorp/terraform/registry/response/module_list.go b/vendor/github.com/hashicorp/terraform/registry/response/module_list.go
deleted file mode 100644
index 978374822..000000000
--- a/vendor/github.com/hashicorp/terraform/registry/response/module_list.go
+++ /dev/null
@@ -1,7 +0,0 @@
-package response
-
-// ModuleList is the response structure for a pageable list of modules.
-type ModuleList struct {
- Meta PaginationMeta `json:"meta"`
- Modules []*Module `json:"modules"`
-}
diff --git a/vendor/github.com/hashicorp/terraform/registry/response/module_provider.go b/vendor/github.com/hashicorp/terraform/registry/response/module_provider.go
deleted file mode 100644
index e48499dce..000000000
--- a/vendor/github.com/hashicorp/terraform/registry/response/module_provider.go
+++ /dev/null
@@ -1,14 +0,0 @@
-package response
-
-// ModuleProvider represents a single provider for modules.
-type ModuleProvider struct {
- Name string `json:"name"`
- Downloads int `json:"downloads"`
- ModuleCount int `json:"module_count"`
-}
-
-// ModuleProviderList is the response structure for a pageable list of ModuleProviders.
-type ModuleProviderList struct {
- Meta PaginationMeta `json:"meta"`
- Providers []*ModuleProvider `json:"providers"`
-}
diff --git a/vendor/github.com/hashicorp/terraform/registry/response/provider.go b/vendor/github.com/hashicorp/terraform/registry/response/provider.go
deleted file mode 100644
index 5e8bae354..000000000
--- a/vendor/github.com/hashicorp/terraform/registry/response/provider.go
+++ /dev/null
@@ -1,36 +0,0 @@
-package response
-
-import (
- "time"
-)
-
-// Provider is the response structure with the data for a single provider
-// version. This is just the metadata. A full provider response will be
-// ProviderDetail.
-type Provider struct {
- ID string `json:"id"`
-
- //---------------------------------------------------------------
- // Metadata about the overall provider.
-
- Owner string `json:"owner"`
- Namespace string `json:"namespace"`
- Name string `json:"name"`
- Version string `json:"version"`
- Description string `json:"description"`
- Source string `json:"source"`
- PublishedAt time.Time `json:"published_at"`
- Downloads int `json:"downloads"`
-}
-
-// ProviderDetail represents a Provider with full detail.
-type ProviderDetail struct {
- Provider
-
- //---------------------------------------------------------------
- // The fields below are only set when requesting this specific
- // module. They are available to easily know all available versions
- // without multiple API calls.
-
- Versions []string `json:"versions"` // All versions
-}
diff --git a/vendor/github.com/hashicorp/terraform/registry/response/provider_list.go b/vendor/github.com/hashicorp/terraform/registry/response/provider_list.go
deleted file mode 100644
index 1dc7d237f..000000000
--- a/vendor/github.com/hashicorp/terraform/registry/response/provider_list.go
+++ /dev/null
@@ -1,7 +0,0 @@
-package response
-
-// ProviderList is the response structure for a pageable list of providers.
-type ProviderList struct {
- Meta PaginationMeta `json:"meta"`
- Providers []*Provider `json:"providers"`
-}
diff --git a/vendor/github.com/hashicorp/terraform/registry/response/redirect.go b/vendor/github.com/hashicorp/terraform/registry/response/redirect.go
deleted file mode 100644
index d5eb49ba6..000000000
--- a/vendor/github.com/hashicorp/terraform/registry/response/redirect.go
+++ /dev/null
@@ -1,6 +0,0 @@
-package response
-
-// Redirect causes the frontend to perform a window redirect.
-type Redirect struct {
- URL string `json:"url"`
-}
diff --git a/vendor/github.com/hashicorp/terraform/states/statefile/marshal_equal.go b/vendor/github.com/hashicorp/terraform/states/statefile/marshal_equal.go
deleted file mode 100644
index 4948b39b9..000000000
--- a/vendor/github.com/hashicorp/terraform/states/statefile/marshal_equal.go
+++ /dev/null
@@ -1,40 +0,0 @@
-package statefile
-
-import (
- "bytes"
-
- "github.com/hashicorp/terraform/states"
-)
-
-// StatesMarshalEqual returns true if and only if the two given states have
-// an identical (byte-for-byte) statefile representation.
-//
-// This function compares only the portions of the state that are persisted
-// in state files, so for example it will not return false if the only
-// differences between the two states are local values or descendent module
-// outputs.
-func StatesMarshalEqual(a, b *states.State) bool {
- var aBuf bytes.Buffer
- var bBuf bytes.Buffer
-
- // nil states are not valid states, and so they can never martial equal.
- if a == nil || b == nil {
- return false
- }
-
- // We write here some temporary files that have no header information
- // populated, thus ensuring that we're only comparing the state itself
- // and not any metadata.
- err := Write(&File{State: a}, &aBuf)
- if err != nil {
- // Should never happen, because we're writing to an in-memory buffer
- panic(err)
- }
- err = Write(&File{State: b}, &bBuf)
- if err != nil {
- // Should never happen, because we're writing to an in-memory buffer
- panic(err)
- }
-
- return bytes.Equal(aBuf.Bytes(), bBuf.Bytes())
-}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/user_agent.go b/vendor/github.com/hashicorp/terraform/terraform/user_agent.go
deleted file mode 100644
index 97f1ec1f6..000000000
--- a/vendor/github.com/hashicorp/terraform/terraform/user_agent.go
+++ /dev/null
@@ -1,12 +0,0 @@
-package terraform
-
-import (
- "github.com/hashicorp/terraform/httpclient"
-)
-
-// Generate a UserAgent string
-//
-// Deprecated: Use httpclient.UserAgent(version) instead
-func UserAgentString() string {
- return httpclient.UserAgentString()
-}
diff --git a/vendor/github.com/hashicorp/terraform/terraform/version.go b/vendor/github.com/hashicorp/terraform/terraform/version.go
deleted file mode 100644
index 0caeca0ad..000000000
--- a/vendor/github.com/hashicorp/terraform/terraform/version.go
+++ /dev/null
@@ -1,10 +0,0 @@
-package terraform
-
-import (
- "github.com/hashicorp/terraform/version"
-)
-
-// Deprecated: Providers should use schema.Provider.TerraformVersion instead
-func VersionString() string {
- return version.String()
-}
diff --git a/vendor/github.com/hashicorp/terraform/tfdiags/consolidate_warnings.go b/vendor/github.com/hashicorp/terraform/tfdiags/consolidate_warnings.go
deleted file mode 100644
index 3e0983ee4..000000000
--- a/vendor/github.com/hashicorp/terraform/tfdiags/consolidate_warnings.go
+++ /dev/null
@@ -1,130 +0,0 @@
-package tfdiags
-
-import "fmt"
-
-// ConsolidateWarnings checks if there is an unreasonable amount of warnings
-// with the same summary in the receiver and, if so, returns a new diagnostics
-// with some of those warnings consolidated into a single warning in order
-// to reduce the verbosity of the output.
-//
-// This mechanism is here primarily for diagnostics printed out at the CLI. In
-// other contexts it is likely better to just return the warnings directly,
-// particularly if they are going to be interpreted by software rather than
-// by a human reader.
-//
-// The returned slice always has a separate backing array from the reciever,
-// but some diagnostic values themselves might be shared.
-//
-// The definition of "unreasonable" may change in future releases.
-func (diags Diagnostics) ConsolidateWarnings() Diagnostics {
- // We'll start grouping when there are more than this number of warnings
- // with the same summary.
- const unreasonableThreshold = 2
-
- if len(diags) == 0 {
- return nil
- }
-
- newDiags := make(Diagnostics, 0, len(diags))
-
- // We'll track how many times we've seen each warning summary so we can
- // decide when to start consolidating. Once we _have_ started consolidating,
- // we'll also track the object representing the consolidated warning
- // so we can continue appending to it.
- warningStats := make(map[string]int)
- warningGroups := make(map[string]*warningGroup)
-
- for _, diag := range diags {
- severity := diag.Severity()
- if severity != Warning || diag.Source().Subject == nil {
- // Only warnings can get special treatment, and we only
- // consolidate warnings that have source locations because
- // our primary goal here is to deal with the situation where
- // some configuration language feature is producing a warning
- // each time it's used across a potentially-large config.
- newDiags = newDiags.Append(diag)
- continue
- }
-
- desc := diag.Description()
- summary := desc.Summary
- if g, ok := warningGroups[summary]; ok {
- // We're already grouping this one, so we'll just continue it.
- g.Append(diag)
- continue
- }
-
- warningStats[summary]++
- if warningStats[summary] == unreasonableThreshold {
- // Initially creating the group doesn't really change anything
- // visibly in the result, since a group with only one warning
- // is just a passthrough anyway, but once we do this any additional
- // warnings with the same summary will get appended to this group.
- g := &warningGroup{}
- newDiags = newDiags.Append(g)
- warningGroups[summary] = g
- g.Append(diag)
- continue
- }
-
- // If this warning is not consolidating yet then we'll just append
- // it directly.
- newDiags = newDiags.Append(diag)
- }
-
- return newDiags
-}
-
-// A warningGroup is one or more warning diagnostics grouped together for
-// UI consolidation purposes.
-//
-// A warningGroup with only one diagnostic in it is just a passthrough for
-// that one diagnostic. If it has more than one then it will behave mostly
-// like the first one but its detail message will include an additional
-// sentence mentioning the consolidation. A warningGroup with no diagnostics
-// at all is invalid and will panic when used.
-type warningGroup struct {
- Warnings Diagnostics
-}
-
-var _ Diagnostic = (*warningGroup)(nil)
-
-func (wg *warningGroup) Severity() Severity {
- return wg.Warnings[0].Severity()
-}
-
-func (wg *warningGroup) Description() Description {
- desc := wg.Warnings[0].Description()
- if len(wg.Warnings) < 2 {
- return desc
- }
- extraCount := len(wg.Warnings) - 1
- var msg string
- switch extraCount {
- case 1:
- msg = "(and one more similar warning elsewhere)"
- default:
- msg = fmt.Sprintf("(and %d more similar warnings elsewhere)", extraCount)
- }
- if desc.Detail != "" {
- desc.Detail = desc.Detail + "\n\n" + msg
- } else {
- desc.Detail = msg
- }
- return desc
-}
-
-func (wg *warningGroup) Source() Source {
- return wg.Warnings[0].Source()
-}
-
-func (wg *warningGroup) FromExpr() *FromExpr {
- return wg.Warnings[0].FromExpr()
-}
-
-func (wg *warningGroup) Append(diag Diagnostic) {
- if diag.Severity() != Warning {
- panic("can't append a non-warning diagnostic to a warningGroup")
- }
- wg.Warnings = append(wg.Warnings, diag)
-}
diff --git a/vendor/github.com/mitchellh/hashstructure/LICENSE b/vendor/github.com/mitchellh/hashstructure/LICENSE
deleted file mode 100644
index a3866a291..000000000
--- a/vendor/github.com/mitchellh/hashstructure/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2016 Mitchell Hashimoto
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
diff --git a/vendor/github.com/mitchellh/hashstructure/README.md b/vendor/github.com/mitchellh/hashstructure/README.md
deleted file mode 100644
index 28ce45a3e..000000000
--- a/vendor/github.com/mitchellh/hashstructure/README.md
+++ /dev/null
@@ -1,65 +0,0 @@
-# hashstructure [![GoDoc](https://godoc.org/github.com/mitchellh/hashstructure?status.svg)](https://godoc.org/github.com/mitchellh/hashstructure)
-
-hashstructure is a Go library for creating a unique hash value
-for arbitrary values in Go.
-
-This can be used to key values in a hash (for use in a map, set, etc.)
-that are complex. The most common use case is comparing two values without
-sending data across the network, caching values locally (de-dup), and so on.
-
-## Features
-
- * Hash any arbitrary Go value, including complex types.
-
- * Tag a struct field to ignore it and not affect the hash value.
-
- * Tag a slice type struct field to treat it as a set where ordering
- doesn't affect the hash code but the field itself is still taken into
- account to create the hash value.
-
- * Optionally specify a custom hash function to optimize for speed, collision
- avoidance for your data set, etc.
-
- * Optionally hash the output of `.String()` on structs that implement fmt.Stringer,
- allowing effective hashing of time.Time
-
-## Installation
-
-Standard `go get`:
-
-```
-$ go get github.com/mitchellh/hashstructure
-```
-
-## Usage & Example
-
-For usage and examples see the [Godoc](http://godoc.org/github.com/mitchellh/hashstructure).
-
-A quick code example is shown below:
-
-```go
-type ComplexStruct struct {
- Name string
- Age uint
- Metadata map[string]interface{}
-}
-
-v := ComplexStruct{
- Name: "mitchellh",
- Age: 64,
- Metadata: map[string]interface{}{
- "car": true,
- "location": "California",
- "siblings": []string{"Bob", "John"},
- },
-}
-
-hash, err := hashstructure.Hash(v, nil)
-if err != nil {
- panic(err)
-}
-
-fmt.Printf("%d", hash)
-// Output:
-// 2307517237273902113
-```
diff --git a/vendor/github.com/mitchellh/hashstructure/go.mod b/vendor/github.com/mitchellh/hashstructure/go.mod
deleted file mode 100644
index 966582aa9..000000000
--- a/vendor/github.com/mitchellh/hashstructure/go.mod
+++ /dev/null
@@ -1 +0,0 @@
-module github.com/mitchellh/hashstructure
diff --git a/vendor/github.com/mitchellh/hashstructure/hashstructure.go b/vendor/github.com/mitchellh/hashstructure/hashstructure.go
deleted file mode 100644
index ea13a1583..000000000
--- a/vendor/github.com/mitchellh/hashstructure/hashstructure.go
+++ /dev/null
@@ -1,358 +0,0 @@
-package hashstructure
-
-import (
- "encoding/binary"
- "fmt"
- "hash"
- "hash/fnv"
- "reflect"
-)
-
-// ErrNotStringer is returned when there's an error with hash:"string"
-type ErrNotStringer struct {
- Field string
-}
-
-// Error implements error for ErrNotStringer
-func (ens *ErrNotStringer) Error() string {
- return fmt.Sprintf("hashstructure: %s has hash:\"string\" set, but does not implement fmt.Stringer", ens.Field)
-}
-
-// HashOptions are options that are available for hashing.
-type HashOptions struct {
- // Hasher is the hash function to use. If this isn't set, it will
- // default to FNV.
- Hasher hash.Hash64
-
- // TagName is the struct tag to look at when hashing the structure.
- // By default this is "hash".
- TagName string
-
- // ZeroNil is flag determining if nil pointer should be treated equal
- // to a zero value of pointed type. By default this is false.
- ZeroNil bool
-}
-
-// Hash returns the hash value of an arbitrary value.
-//
-// If opts is nil, then default options will be used. See HashOptions
-// for the default values. The same *HashOptions value cannot be used
-// concurrently. None of the values within a *HashOptions struct are
-// safe to read/write while hashing is being done.
-//
-// Notes on the value:
-//
-// * Unexported fields on structs are ignored and do not affect the
-// hash value.
-//
-// * Adding an exported field to a struct with the zero value will change
-// the hash value.
-//
-// For structs, the hashing can be controlled using tags. For example:
-//
-// struct {
-// Name string
-// UUID string `hash:"ignore"`
-// }
-//
-// The available tag values are:
-//
-// * "ignore" or "-" - The field will be ignored and not affect the hash code.
-//
-// * "set" - The field will be treated as a set, where ordering doesn't
-// affect the hash code. This only works for slices.
-//
-// * "string" - The field will be hashed as a string, only works when the
-// field implements fmt.Stringer
-//
-func Hash(v interface{}, opts *HashOptions) (uint64, error) {
- // Create default options
- if opts == nil {
- opts = &HashOptions{}
- }
- if opts.Hasher == nil {
- opts.Hasher = fnv.New64()
- }
- if opts.TagName == "" {
- opts.TagName = "hash"
- }
-
- // Reset the hash
- opts.Hasher.Reset()
-
- // Create our walker and walk the structure
- w := &walker{
- h: opts.Hasher,
- tag: opts.TagName,
- zeronil: opts.ZeroNil,
- }
- return w.visit(reflect.ValueOf(v), nil)
-}
-
-type walker struct {
- h hash.Hash64
- tag string
- zeronil bool
-}
-
-type visitOpts struct {
- // Flags are a bitmask of flags to affect behavior of this visit
- Flags visitFlag
-
- // Information about the struct containing this field
- Struct interface{}
- StructField string
-}
-
-func (w *walker) visit(v reflect.Value, opts *visitOpts) (uint64, error) {
- t := reflect.TypeOf(0)
-
- // Loop since these can be wrapped in multiple layers of pointers
- // and interfaces.
- for {
- // If we have an interface, dereference it. We have to do this up
- // here because it might be a nil in there and the check below must
- // catch that.
- if v.Kind() == reflect.Interface {
- v = v.Elem()
- continue
- }
-
- if v.Kind() == reflect.Ptr {
- if w.zeronil {
- t = v.Type().Elem()
- }
- v = reflect.Indirect(v)
- continue
- }
-
- break
- }
-
- // If it is nil, treat it like a zero.
- if !v.IsValid() {
- v = reflect.Zero(t)
- }
-
- // Binary writing can use raw ints, we have to convert to
- // a sized-int, we'll choose the largest...
- switch v.Kind() {
- case reflect.Int:
- v = reflect.ValueOf(int64(v.Int()))
- case reflect.Uint:
- v = reflect.ValueOf(uint64(v.Uint()))
- case reflect.Bool:
- var tmp int8
- if v.Bool() {
- tmp = 1
- }
- v = reflect.ValueOf(tmp)
- }
-
- k := v.Kind()
-
- // We can shortcut numeric values by directly binary writing them
- if k >= reflect.Int && k <= reflect.Complex64 {
- // A direct hash calculation
- w.h.Reset()
- err := binary.Write(w.h, binary.LittleEndian, v.Interface())
- return w.h.Sum64(), err
- }
-
- switch k {
- case reflect.Array:
- var h uint64
- l := v.Len()
- for i := 0; i < l; i++ {
- current, err := w.visit(v.Index(i), nil)
- if err != nil {
- return 0, err
- }
-
- h = hashUpdateOrdered(w.h, h, current)
- }
-
- return h, nil
-
- case reflect.Map:
- var includeMap IncludableMap
- if opts != nil && opts.Struct != nil {
- if v, ok := opts.Struct.(IncludableMap); ok {
- includeMap = v
- }
- }
-
- // Build the hash for the map. We do this by XOR-ing all the key
- // and value hashes. This makes it deterministic despite ordering.
- var h uint64
- for _, k := range v.MapKeys() {
- v := v.MapIndex(k)
- if includeMap != nil {
- incl, err := includeMap.HashIncludeMap(
- opts.StructField, k.Interface(), v.Interface())
- if err != nil {
- return 0, err
- }
- if !incl {
- continue
- }
- }
-
- kh, err := w.visit(k, nil)
- if err != nil {
- return 0, err
- }
- vh, err := w.visit(v, nil)
- if err != nil {
- return 0, err
- }
-
- fieldHash := hashUpdateOrdered(w.h, kh, vh)
- h = hashUpdateUnordered(h, fieldHash)
- }
-
- return h, nil
-
- case reflect.Struct:
- parent := v.Interface()
- var include Includable
- if impl, ok := parent.(Includable); ok {
- include = impl
- }
-
- t := v.Type()
- h, err := w.visit(reflect.ValueOf(t.Name()), nil)
- if err != nil {
- return 0, err
- }
-
- l := v.NumField()
- for i := 0; i < l; i++ {
- if innerV := v.Field(i); v.CanSet() || t.Field(i).Name != "_" {
- var f visitFlag
- fieldType := t.Field(i)
- if fieldType.PkgPath != "" {
- // Unexported
- continue
- }
-
- tag := fieldType.Tag.Get(w.tag)
- if tag == "ignore" || tag == "-" {
- // Ignore this field
- continue
- }
-
- // if string is set, use the string value
- if tag == "string" {
- if impl, ok := innerV.Interface().(fmt.Stringer); ok {
- innerV = reflect.ValueOf(impl.String())
- } else {
- return 0, &ErrNotStringer{
- Field: v.Type().Field(i).Name,
- }
- }
- }
-
- // Check if we implement includable and check it
- if include != nil {
- incl, err := include.HashInclude(fieldType.Name, innerV)
- if err != nil {
- return 0, err
- }
- if !incl {
- continue
- }
- }
-
- switch tag {
- case "set":
- f |= visitFlagSet
- }
-
- kh, err := w.visit(reflect.ValueOf(fieldType.Name), nil)
- if err != nil {
- return 0, err
- }
-
- vh, err := w.visit(innerV, &visitOpts{
- Flags: f,
- Struct: parent,
- StructField: fieldType.Name,
- })
- if err != nil {
- return 0, err
- }
-
- fieldHash := hashUpdateOrdered(w.h, kh, vh)
- h = hashUpdateUnordered(h, fieldHash)
- }
- }
-
- return h, nil
-
- case reflect.Slice:
- // We have two behaviors here. If it isn't a set, then we just
- // visit all the elements. If it is a set, then we do a deterministic
- // hash code.
- var h uint64
- var set bool
- if opts != nil {
- set = (opts.Flags & visitFlagSet) != 0
- }
- l := v.Len()
- for i := 0; i < l; i++ {
- current, err := w.visit(v.Index(i), nil)
- if err != nil {
- return 0, err
- }
-
- if set {
- h = hashUpdateUnordered(h, current)
- } else {
- h = hashUpdateOrdered(w.h, h, current)
- }
- }
-
- return h, nil
-
- case reflect.String:
- // Directly hash
- w.h.Reset()
- _, err := w.h.Write([]byte(v.String()))
- return w.h.Sum64(), err
-
- default:
- return 0, fmt.Errorf("unknown kind to hash: %s", k)
- }
-
-}
-
-func hashUpdateOrdered(h hash.Hash64, a, b uint64) uint64 {
- // For ordered updates, use a real hash function
- h.Reset()
-
- // We just panic if the binary writes fail because we are writing
- // an int64 which should never be fail-able.
- e1 := binary.Write(h, binary.LittleEndian, a)
- e2 := binary.Write(h, binary.LittleEndian, b)
- if e1 != nil {
- panic(e1)
- }
- if e2 != nil {
- panic(e2)
- }
-
- return h.Sum64()
-}
-
-func hashUpdateUnordered(a, b uint64) uint64 {
- return a ^ b
-}
-
-// visitFlag is used as a bitmask for affecting visit behavior
-type visitFlag uint
-
-const (
- visitFlagInvalid visitFlag = iota
- visitFlagSet = iota << 1
-)
diff --git a/vendor/github.com/mitchellh/hashstructure/include.go b/vendor/github.com/mitchellh/hashstructure/include.go
deleted file mode 100644
index b6289c0be..000000000
--- a/vendor/github.com/mitchellh/hashstructure/include.go
+++ /dev/null
@@ -1,15 +0,0 @@
-package hashstructure
-
-// Includable is an interface that can optionally be implemented by
-// a struct. It will be called for each field in the struct to check whether
-// it should be included in the hash.
-type Includable interface {
- HashInclude(field string, v interface{}) (bool, error)
-}
-
-// IncludableMap is an interface that can optionally be implemented by
-// a struct. It will be called when a map-type field is found to ask the
-// struct if the map item should be included in the hash.
-type IncludableMap interface {
- HashIncludeMap(field string, k, v interface{}) (bool, error)
-}
diff --git a/vendor/github.com/outscale/osc-go/LICENSE b/vendor/github.com/outscale/osc-go/LICENSE
deleted file mode 100644
index cf45906a7..000000000
--- a/vendor/github.com/outscale/osc-go/LICENSE
+++ /dev/null
@@ -1,201 +0,0 @@
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [2018] [© AnyChart.com - JavaScript charts](https://www.anychart.com).
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
\ No newline at end of file
diff --git a/vendor/github.com/outscale/osc-go/oapi/client.go b/vendor/github.com/outscale/osc-go/oapi/client.go
deleted file mode 100644
index 99d084f24..000000000
--- a/vendor/github.com/outscale/osc-go/oapi/client.go
+++ /dev/null
@@ -1,10460 +0,0 @@
-// GENERATED FILE: DO NOT EDIT!
-
-package oapi
-
-import (
- "bytes"
- "encoding/json"
- "fmt"
- "io/ioutil"
- "log"
- "net/http"
- "net/url"
- "strings"
- "time"
-
- "github.com/aws/aws-sdk-go/aws/credentials"
- v4 "github.com/aws/aws-sdk-go/aws/signer/v4"
-)
-
-type Client struct {
- service string
-
- signer *v4.Signer
-
- client *http.Client
-
- config *Config
-}
-
-type Config struct {
- AccessKey string
- SecretKey string
- Region string
- URL string
-
- //Only Used for OAPI
- Service string
-
- // User agent for client
- UserAgent string
-}
-
-func (c Config) ServiceURL() string {
- s := fmt.Sprintf("https://%s.%s.%s", c.Service, c.Region, c.URL)
-
- u, err := url.Parse(s)
- if err != nil {
- panic(err)
- }
-
- return u.String()
-}
-
-// NewClient creates an API client.
-func NewClient(config *Config, c *http.Client) *Client {
- client := &Client{}
- client.service = config.ServiceURL()
- if c != nil {
- client.client = c
- } else {
- client.client = http.DefaultClient
- }
-
- s := &v4.Signer{
- Credentials: credentials.NewStaticCredentials(config.AccessKey,
- config.SecretKey, ""),
- }
-
- client.signer = s
- client.config = config
-
- return client
-}
-
-func (c *Client) GetConfig() *Config {
- return c.config
-}
-
-// Sign ...
-func (c *Client) Sign(req *http.Request, body []byte) error {
- reader := strings.NewReader(string(body))
- timestamp := time.Now()
- _, err := c.signer.Sign(req, reader, "oapi", c.config.Region, timestamp)
-
- return err
-
-}
-
-// Do ...
-func (c *Client) Do(req *http.Request) (*http.Response, error) {
- resp, err := c.client.Do(req)
- if err != nil {
- log.Printf("[DEBUG] Error in Do Request %s", err)
- } else {
- log.Println("[DEBUG] No response to show.")
- }
-
- return resp, err
-}
-
-//
-func (client *Client) POST_AcceptNetPeering(
- acceptnetpeeringrequest AcceptNetPeeringRequest,
-) (
- response *POST_AcceptNetPeeringResponses,
- err error,
-) {
- path := client.service + "/AcceptNetPeering"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(acceptnetpeeringrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_AcceptNetPeeringResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &AcceptNetPeeringResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 409:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code409 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_AuthenticateAccount(
- authenticateaccountrequest AuthenticateAccountRequest,
-) (
- response *POST_AuthenticateAccountResponses,
- err error,
-) {
- path := client.service + "/AuthenticateAccount"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(authenticateaccountrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_AuthenticateAccountResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &AuthenticateAccountResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CheckSignature(
- checksignaturerequest CheckSignatureRequest,
-) (
- response *POST_CheckSignatureResponses,
- err error,
-) {
- path := client.service + "/CheckSignature"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(checksignaturerequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CheckSignatureResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CheckSignatureResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CopyAccount(
- copyaccountrequest CopyAccountRequest,
-) (
- response *POST_CopyAccountResponses,
- err error,
-) {
- path := client.service + "/CopyAccount"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(copyaccountrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CopyAccountResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CopyAccountResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateAccount(
- createaccountrequest CreateAccountRequest,
-) (
- response *POST_CreateAccountResponses,
- err error,
-) {
- path := client.service + "/CreateAccount"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createaccountrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateAccountResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateAccountResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateApiKey(
- createapikeyrequest CreateApiKeyRequest,
-) (
- response *POST_CreateApiKeyResponses,
- err error,
-) {
- path := client.service + "/CreateApiKey"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createapikeyrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateApiKeyResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateApiKeyResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateClientGateway(
- createclientgatewayrequest CreateClientGatewayRequest,
-) (
- response *POST_CreateClientGatewayResponses,
- err error,
-) {
- path := client.service + "/CreateClientGateway"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createclientgatewayrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateClientGatewayResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateClientGatewayResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateDhcpOptions(
- createdhcpoptionsrequest CreateDhcpOptionsRequest,
-) (
- response *POST_CreateDhcpOptionsResponses,
- err error,
-) {
- path := client.service + "/CreateDhcpOptions"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createdhcpoptionsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateDhcpOptionsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateDhcpOptionsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateDirectLink(
- createdirectlinkrequest CreateDirectLinkRequest,
-) (
- response *POST_CreateDirectLinkResponses,
- err error,
-) {
- path := client.service + "/CreateDirectLink"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createdirectlinkrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateDirectLinkResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateDirectLinkResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateDirectLinkInterface(
- createdirectlinkinterfacerequest CreateDirectLinkInterfaceRequest,
-) (
- response *POST_CreateDirectLinkInterfaceResponses,
- err error,
-) {
- path := client.service + "/CreateDirectLinkInterface"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createdirectlinkinterfacerequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateDirectLinkInterfaceResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateDirectLinkInterfaceResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateImage(
- createimagerequest CreateImageRequest,
-) (
- response *POST_CreateImageResponses,
- err error,
-) {
- path := client.service + "/CreateImage"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createimagerequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateImageResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateImageResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateImageExportTask(
- createimageexporttaskrequest CreateImageExportTaskRequest,
-) (
- response *POST_CreateImageExportTaskResponses,
- err error,
-) {
- path := client.service + "/CreateImageExportTask"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createimageexporttaskrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateImageExportTaskResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateImageExportTaskResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateInternetService(
- createinternetservicerequest CreateInternetServiceRequest,
-) (
- response *POST_CreateInternetServiceResponses,
- err error,
-) {
- path := client.service + "/CreateInternetService"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createinternetservicerequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateInternetServiceResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateInternetServiceResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateKeypair(
- createkeypairrequest CreateKeypairRequest,
-) (
- response *POST_CreateKeypairResponses,
- err error,
-) {
- path := client.service + "/CreateKeypair"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createkeypairrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateKeypairResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateKeypairResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 409:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code409 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateListenerRule(
- createlistenerrulerequest CreateListenerRuleRequest,
-) (
- response *POST_CreateListenerRuleResponses,
- err error,
-) {
- path := client.service + "/CreateListenerRule"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createlistenerrulerequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateListenerRuleResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateListenerRuleResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateLoadBalancer(
- createloadbalancerrequest CreateLoadBalancerRequest,
-) (
- response *POST_CreateLoadBalancerResponses,
- err error,
-) {
- path := client.service + "/CreateLoadBalancer"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createloadbalancerrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateLoadBalancerResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateLoadBalancerResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateLoadBalancerListeners(
- createloadbalancerlistenersrequest CreateLoadBalancerListenersRequest,
-) (
- response *POST_CreateLoadBalancerListenersResponses,
- err error,
-) {
- path := client.service + "/CreateLoadBalancerListeners"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createloadbalancerlistenersrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateLoadBalancerListenersResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateLoadBalancerListenersResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateLoadBalancerPolicy(
- createloadbalancerpolicyrequest CreateLoadBalancerPolicyRequest,
-) (
- response *POST_CreateLoadBalancerPolicyResponses,
- err error,
-) {
- path := client.service + "/CreateLoadBalancerPolicy"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createloadbalancerpolicyrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateLoadBalancerPolicyResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateLoadBalancerPolicyResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateNatService(
- createnatservicerequest CreateNatServiceRequest,
-) (
- response *POST_CreateNatServiceResponses,
- err error,
-) {
- path := client.service + "/CreateNatService"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createnatservicerequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateNatServiceResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateNatServiceResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateNet(
- createnetrequest CreateNetRequest,
-) (
- response *POST_CreateNetResponses,
- err error,
-) {
- path := client.service + "/CreateNet"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createnetrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateNetResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateNetResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 409:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code409 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateNetAccessPoint(
- createnetaccesspointrequest CreateNetAccessPointRequest,
-) (
- response *POST_CreateNetAccessPointResponses,
- err error,
-) {
- path := client.service + "/CreateNetAccessPoint"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createnetaccesspointrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateNetAccessPointResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateNetAccessPointResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateNetPeering(
- createnetpeeringrequest CreateNetPeeringRequest,
-) (
- response *POST_CreateNetPeeringResponses,
- err error,
-) {
- path := client.service + "/CreateNetPeering"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createnetpeeringrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateNetPeeringResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateNetPeeringResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateNic(
- createnicrequest CreateNicRequest,
-) (
- response *POST_CreateNicResponses,
- err error,
-) {
- path := client.service + "/CreateNic"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createnicrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateNicResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateNicResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreatePolicy(
- createpolicyrequest CreatePolicyRequest,
-) (
- response *POST_CreatePolicyResponses,
- err error,
-) {
- path := client.service + "/CreatePolicy"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createpolicyrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreatePolicyResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreatePolicyResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreatePublicIp(
- createpubliciprequest CreatePublicIpRequest,
-) (
- response *POST_CreatePublicIpResponses,
- err error,
-) {
- path := client.service + "/CreatePublicIp"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createpubliciprequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreatePublicIpResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreatePublicIpResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateRoute(
- createrouterequest CreateRouteRequest,
-) (
- response *POST_CreateRouteResponses,
- err error,
-) {
- path := client.service + "/CreateRoute"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createrouterequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateRouteResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateRouteResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateRouteTable(
- createroutetablerequest CreateRouteTableRequest,
-) (
- response *POST_CreateRouteTableResponses,
- err error,
-) {
- path := client.service + "/CreateRouteTable"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createroutetablerequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateRouteTableResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateRouteTableResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateSecurityGroup(
- createsecuritygrouprequest CreateSecurityGroupRequest,
-) (
- response *POST_CreateSecurityGroupResponses,
- err error,
-) {
- path := client.service + "/CreateSecurityGroup"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createsecuritygrouprequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateSecurityGroupResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateSecurityGroupResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateSecurityGroupRule(
- createsecuritygrouprulerequest CreateSecurityGroupRuleRequest,
-) (
- response *POST_CreateSecurityGroupRuleResponses,
- err error,
-) {
- path := client.service + "/CreateSecurityGroupRule"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createsecuritygrouprulerequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateSecurityGroupRuleResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateSecurityGroupRuleResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateServerCertificate(
- createservercertificaterequest CreateServerCertificateRequest,
-) (
- response *POST_CreateServerCertificateResponses,
- err error,
-) {
- path := client.service + "/CreateServerCertificate"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createservercertificaterequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateServerCertificateResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateServerCertificateResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateSnapshot(
- createsnapshotrequest CreateSnapshotRequest,
-) (
- response *POST_CreateSnapshotResponses,
- err error,
-) {
- path := client.service + "/CreateSnapshot"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createsnapshotrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateSnapshotResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateSnapshotResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateSnapshotExportTask(
- createsnapshotexporttaskrequest CreateSnapshotExportTaskRequest,
-) (
- response *POST_CreateSnapshotExportTaskResponses,
- err error,
-) {
- path := client.service + "/CreateSnapshotExportTask"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createsnapshotexporttaskrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateSnapshotExportTaskResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateSnapshotExportTaskResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateSubnet(
- createsubnetrequest CreateSubnetRequest,
-) (
- response *POST_CreateSubnetResponses,
- err error,
-) {
- path := client.service + "/CreateSubnet"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createsubnetrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateSubnetResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateSubnetResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 409:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code409 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateTags(
- createtagsrequest CreateTagsRequest,
-) (
- response *POST_CreateTagsResponses,
- err error,
-) {
- path := client.service + "/CreateTags"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createtagsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateTagsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateTagsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateUser(
- createuserrequest CreateUserRequest,
-) (
- response *POST_CreateUserResponses,
- err error,
-) {
- path := client.service + "/CreateUser"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createuserrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateUserResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateUserResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateUserGroup(
- createusergrouprequest CreateUserGroupRequest,
-) (
- response *POST_CreateUserGroupResponses,
- err error,
-) {
- path := client.service + "/CreateUserGroup"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createusergrouprequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateUserGroupResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateUserGroupResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateVirtualGateway(
- createvirtualgatewayrequest CreateVirtualGatewayRequest,
-) (
- response *POST_CreateVirtualGatewayResponses,
- err error,
-) {
- path := client.service + "/CreateVirtualGateway"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createvirtualgatewayrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateVirtualGatewayResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateVirtualGatewayResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateVms(
- createvmsrequest CreateVmsRequest,
-) (
- response *POST_CreateVmsResponses,
- err error,
-) {
- path := client.service + "/CreateVms"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createvmsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateVmsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateVmsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateVolume(
- createvolumerequest CreateVolumeRequest,
-) (
- response *POST_CreateVolumeResponses,
- err error,
-) {
- path := client.service + "/CreateVolume"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createvolumerequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateVolumeResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateVolumeResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- return nil, checkErrorResponse(resp)
- }
- return
-}
-
-//
-func (client *Client) POST_CreateVpnConnection(
- createvpnconnectionrequest CreateVpnConnectionRequest,
-) (
- response *POST_CreateVpnConnectionResponses,
- err error,
-) {
- path := client.service + "/CreateVpnConnection"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createvpnconnectionrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateVpnConnectionResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateVpnConnectionResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_CreateVpnConnectionRoute(
- createvpnconnectionrouterequest CreateVpnConnectionRouteRequest,
-) (
- response *POST_CreateVpnConnectionRouteResponses,
- err error,
-) {
- path := client.service + "/CreateVpnConnectionRoute"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(createvpnconnectionrouterequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_CreateVpnConnectionRouteResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &CreateVpnConnectionRouteResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteApiKey(
- deleteapikeyrequest DeleteApiKeyRequest,
-) (
- response *POST_DeleteApiKeyResponses,
- err error,
-) {
- path := client.service + "/DeleteApiKey"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deleteapikeyrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteApiKeyResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteApiKeyResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteClientGateway(
- deleteclientgatewayrequest DeleteClientGatewayRequest,
-) (
- response *POST_DeleteClientGatewayResponses,
- err error,
-) {
- path := client.service + "/DeleteClientGateway"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deleteclientgatewayrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteClientGatewayResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteClientGatewayResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteDhcpOptions(
- deletedhcpoptionsrequest DeleteDhcpOptionsRequest,
-) (
- response *POST_DeleteDhcpOptionsResponses,
- err error,
-) {
- path := client.service + "/DeleteDhcpOptions"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deletedhcpoptionsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteDhcpOptionsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteDhcpOptionsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteDirectLink(
- deletedirectlinkrequest DeleteDirectLinkRequest,
-) (
- response *POST_DeleteDirectLinkResponses,
- err error,
-) {
- path := client.service + "/DeleteDirectLink"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deletedirectlinkrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteDirectLinkResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteDirectLinkResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteDirectLinkInterface(
- deletedirectlinkinterfacerequest DeleteDirectLinkInterfaceRequest,
-) (
- response *POST_DeleteDirectLinkInterfaceResponses,
- err error,
-) {
- path := client.service + "/DeleteDirectLinkInterface"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deletedirectlinkinterfacerequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteDirectLinkInterfaceResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteDirectLinkInterfaceResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteExportTask(
- deleteexporttaskrequest DeleteExportTaskRequest,
-) (
- response *POST_DeleteExportTaskResponses,
- err error,
-) {
- path := client.service + "/DeleteExportTask"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deleteexporttaskrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteExportTaskResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteExportTaskResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteImage(
- deleteimagerequest DeleteImageRequest,
-) (
- response *POST_DeleteImageResponses,
- err error,
-) {
- path := client.service + "/DeleteImage"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deleteimagerequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteImageResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteImageResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteInternetService(
- deleteinternetservicerequest DeleteInternetServiceRequest,
-) (
- response *POST_DeleteInternetServiceResponses,
- err error,
-) {
- path := client.service + "/DeleteInternetService"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deleteinternetservicerequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteInternetServiceResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteInternetServiceResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteKeypair(
- deletekeypairrequest DeleteKeypairRequest,
-) (
- response *POST_DeleteKeypairResponses,
- err error,
-) {
- path := client.service + "/DeleteKeypair"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deletekeypairrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteKeypairResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteKeypairResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteListenerRule(
- deletelistenerrulerequest DeleteListenerRuleRequest,
-) (
- response *POST_DeleteListenerRuleResponses,
- err error,
-) {
- path := client.service + "/DeleteListenerRule"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deletelistenerrulerequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteListenerRuleResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteListenerRuleResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteLoadBalancer(
- deleteloadbalancerrequest DeleteLoadBalancerRequest,
-) (
- response *POST_DeleteLoadBalancerResponses,
- err error,
-) {
- path := client.service + "/DeleteLoadBalancer"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deleteloadbalancerrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteLoadBalancerResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteLoadBalancerResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteLoadBalancerListeners(
- deleteloadbalancerlistenersrequest DeleteLoadBalancerListenersRequest,
-) (
- response *POST_DeleteLoadBalancerListenersResponses,
- err error,
-) {
- path := client.service + "/DeleteLoadBalancerListeners"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deleteloadbalancerlistenersrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteLoadBalancerListenersResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteLoadBalancerListenersResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteLoadBalancerPolicy(
- deleteloadbalancerpolicyrequest DeleteLoadBalancerPolicyRequest,
-) (
- response *POST_DeleteLoadBalancerPolicyResponses,
- err error,
-) {
- path := client.service + "/DeleteLoadBalancerPolicy"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deleteloadbalancerpolicyrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteLoadBalancerPolicyResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteLoadBalancerPolicyResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteNatService(
- deletenatservicerequest DeleteNatServiceRequest,
-) (
- response *POST_DeleteNatServiceResponses,
- err error,
-) {
- path := client.service + "/DeleteNatService"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deletenatservicerequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteNatServiceResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteNatServiceResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteNet(
- deletenetrequest DeleteNetRequest,
-) (
- response *POST_DeleteNetResponses,
- err error,
-) {
- path := client.service + "/DeleteNet"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deletenetrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteNetResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteNetResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteNetAccessPoints(
- deletenetaccesspointsrequest DeleteNetAccessPointsRequest,
-) (
- response *POST_DeleteNetAccessPointsResponses,
- err error,
-) {
- path := client.service + "/DeleteNetAccessPoints"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deletenetaccesspointsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteNetAccessPointsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteNetAccessPointsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteNetPeering(
- deletenetpeeringrequest DeleteNetPeeringRequest,
-) (
- response *POST_DeleteNetPeeringResponses,
- err error,
-) {
- path := client.service + "/DeleteNetPeering"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deletenetpeeringrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteNetPeeringResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteNetPeeringResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 409:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code409 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteNic(
- deletenicrequest DeleteNicRequest,
-) (
- response *POST_DeleteNicResponses,
- err error,
-) {
- path := client.service + "/DeleteNic"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deletenicrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteNicResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteNicResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeletePolicy(
- deletepolicyrequest DeletePolicyRequest,
-) (
- response *POST_DeletePolicyResponses,
- err error,
-) {
- path := client.service + "/DeletePolicy"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deletepolicyrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeletePolicyResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeletePolicyResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeletePublicIp(
- deletepubliciprequest DeletePublicIpRequest,
-) (
- response *POST_DeletePublicIpResponses,
- err error,
-) {
- path := client.service + "/DeletePublicIp"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deletepubliciprequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeletePublicIpResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeletePublicIpResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteRoute(
- deleterouterequest DeleteRouteRequest,
-) (
- response *POST_DeleteRouteResponses,
- err error,
-) {
- path := client.service + "/DeleteRoute"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deleterouterequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteRouteResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteRouteResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteRouteTable(
- deleteroutetablerequest DeleteRouteTableRequest,
-) (
- response *POST_DeleteRouteTableResponses,
- err error,
-) {
- path := client.service + "/DeleteRouteTable"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deleteroutetablerequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteRouteTableResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteRouteTableResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteSecurityGroup(
- deletesecuritygrouprequest DeleteSecurityGroupRequest,
-) (
- response *POST_DeleteSecurityGroupResponses,
- err error,
-) {
- path := client.service + "/DeleteSecurityGroup"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deletesecuritygrouprequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteSecurityGroupResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteSecurityGroupResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteSecurityGroupRule(
- deletesecuritygrouprulerequest DeleteSecurityGroupRuleRequest,
-) (
- response *POST_DeleteSecurityGroupRuleResponses,
- err error,
-) {
- path := client.service + "/DeleteSecurityGroupRule"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deletesecuritygrouprulerequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteSecurityGroupRuleResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteSecurityGroupRuleResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteServerCertificate(
- deleteservercertificaterequest DeleteServerCertificateRequest,
-) (
- response *POST_DeleteServerCertificateResponses,
- err error,
-) {
- path := client.service + "/DeleteServerCertificate"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deleteservercertificaterequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteServerCertificateResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteServerCertificateResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteSnapshot(
- deletesnapshotrequest DeleteSnapshotRequest,
-) (
- response *POST_DeleteSnapshotResponses,
- err error,
-) {
- path := client.service + "/DeleteSnapshot"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deletesnapshotrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteSnapshotResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteSnapshotResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteSubnet(
- deletesubnetrequest DeleteSubnetRequest,
-) (
- response *POST_DeleteSubnetResponses,
- err error,
-) {
- path := client.service + "/DeleteSubnet"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deletesubnetrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteSubnetResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteSubnetResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteTags(
- deletetagsrequest DeleteTagsRequest,
-) (
- response *POST_DeleteTagsResponses,
- err error,
-) {
- path := client.service + "/DeleteTags"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deletetagsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteTagsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteTagsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteUser(
- deleteuserrequest DeleteUserRequest,
-) (
- response *POST_DeleteUserResponses,
- err error,
-) {
- path := client.service + "/DeleteUser"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deleteuserrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteUserResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteUserResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteUserGroup(
- deleteusergrouprequest DeleteUserGroupRequest,
-) (
- response *POST_DeleteUserGroupResponses,
- err error,
-) {
- path := client.service + "/DeleteUserGroup"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deleteusergrouprequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteUserGroupResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteUserGroupResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteVirtualGateway(
- deletevirtualgatewayrequest DeleteVirtualGatewayRequest,
-) (
- response *POST_DeleteVirtualGatewayResponses,
- err error,
-) {
- path := client.service + "/DeleteVirtualGateway"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deletevirtualgatewayrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteVirtualGatewayResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteVirtualGatewayResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteVms(
- deletevmsrequest DeleteVmsRequest,
-) (
- response *POST_DeleteVmsResponses,
- err error,
-) {
- path := client.service + "/DeleteVms"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deletevmsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteVmsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteVmsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteVolume(
- deletevolumerequest DeleteVolumeRequest,
-) (
- response *POST_DeleteVolumeResponses,
- err error,
-) {
- path := client.service + "/DeleteVolume"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deletevolumerequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteVolumeResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteVolumeResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteVpnConnection(
- deletevpnconnectionrequest DeleteVpnConnectionRequest,
-) (
- response *POST_DeleteVpnConnectionResponses,
- err error,
-) {
- path := client.service + "/DeleteVpnConnection"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deletevpnconnectionrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteVpnConnectionResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteVpnConnectionResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeleteVpnConnectionRoute(
- deletevpnconnectionrouterequest DeleteVpnConnectionRouteRequest,
-) (
- response *POST_DeleteVpnConnectionRouteResponses,
- err error,
-) {
- path := client.service + "/DeleteVpnConnectionRoute"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deletevpnconnectionrouterequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeleteVpnConnectionRouteResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeleteVpnConnectionRouteResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeregisterUserInUserGroup(
- deregisteruserinusergrouprequest DeregisterUserInUserGroupRequest,
-) (
- response *POST_DeregisterUserInUserGroupResponses,
- err error,
-) {
- path := client.service + "/DeregisterUserInUserGroup"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deregisteruserinusergrouprequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeregisterUserInUserGroupResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeregisterUserInUserGroupResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_DeregisterVmsInLoadBalancer(
- deregistervmsinloadbalancerrequest DeregisterVmsInLoadBalancerRequest,
-) (
- response *POST_DeregisterVmsInLoadBalancerResponses,
- err error,
-) {
- path := client.service + "/DeregisterVmsInLoadBalancer"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(deregistervmsinloadbalancerrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_DeregisterVmsInLoadBalancerResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &DeregisterVmsInLoadBalancerResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_LinkInternetService(
- linkinternetservicerequest LinkInternetServiceRequest,
-) (
- response *POST_LinkInternetServiceResponses,
- err error,
-) {
- path := client.service + "/LinkInternetService"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(linkinternetservicerequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_LinkInternetServiceResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &LinkInternetServiceResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_LinkNic(
- linknicrequest LinkNicRequest,
-) (
- response *POST_LinkNicResponses,
- err error,
-) {
- path := client.service + "/LinkNic"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(linknicrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_LinkNicResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &LinkNicResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_LinkPolicy(
- linkpolicyrequest LinkPolicyRequest,
-) (
- response *POST_LinkPolicyResponses,
- err error,
-) {
- path := client.service + "/LinkPolicy"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(linkpolicyrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_LinkPolicyResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &LinkPolicyResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_LinkPrivateIps(
- linkprivateipsrequest LinkPrivateIpsRequest,
-) (
- response *POST_LinkPrivateIpsResponses,
- err error,
-) {
- path := client.service + "/LinkPrivateIps"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(linkprivateipsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_LinkPrivateIpsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &LinkPrivateIpsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_LinkPublicIp(
- linkpubliciprequest LinkPublicIpRequest,
-) (
- response *POST_LinkPublicIpResponses,
- err error,
-) {
- path := client.service + "/LinkPublicIp"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(linkpubliciprequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_LinkPublicIpResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &LinkPublicIpResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_LinkRouteTable(
- linkroutetablerequest LinkRouteTableRequest,
-) (
- response *POST_LinkRouteTableResponses,
- err error,
-) {
- path := client.service + "/LinkRouteTable"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(linkroutetablerequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_LinkRouteTableResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &LinkRouteTableResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_LinkVirtualGateway(
- linkvirtualgatewayrequest LinkVirtualGatewayRequest,
-) (
- response *POST_LinkVirtualGatewayResponses,
- err error,
-) {
- path := client.service + "/LinkVirtualGateway"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(linkvirtualgatewayrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_LinkVirtualGatewayResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &LinkVirtualGatewayResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_LinkVolume(
- linkvolumerequest LinkVolumeRequest,
-) (
- response *POST_LinkVolumeResponses,
- err error,
-) {
- path := client.service + "/LinkVolume"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(linkvolumerequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_LinkVolumeResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &LinkVolumeResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_PurchaseReservedVmsOffer(
- purchasereservedvmsofferrequest PurchaseReservedVmsOfferRequest,
-) (
- response *POST_PurchaseReservedVmsOfferResponses,
- err error,
-) {
- path := client.service + "/PurchaseReservedVmsOffer"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(purchasereservedvmsofferrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_PurchaseReservedVmsOfferResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &PurchaseReservedVmsOfferResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadAccount(
- readaccountrequest ReadAccountRequest,
-) (
- response *POST_ReadAccountResponses,
- err error,
-) {
- path := client.service + "/ReadAccount"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readaccountrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadAccountResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadAccountResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadAccountConsumption(
- readaccountconsumptionrequest ReadAccountConsumptionRequest,
-) (
- response *POST_ReadAccountConsumptionResponses,
- err error,
-) {
- path := client.service + "/ReadAccountConsumption"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readaccountconsumptionrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadAccountConsumptionResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadAccountConsumptionResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadAdminPassword(
- readadminpasswordrequest ReadAdminPasswordRequest,
-) (
- response *POST_ReadAdminPasswordResponses,
- err error,
-) {
- path := client.service + "/ReadAdminPassword"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readadminpasswordrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadAdminPasswordResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadAdminPasswordResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadApiKeys(
- readapikeysrequest ReadApiKeysRequest,
-) (
- response *POST_ReadApiKeysResponses,
- err error,
-) {
- path := client.service + "/ReadApiKeys"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readapikeysrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadApiKeysResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadApiKeysResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadApiLogs(
- readapilogsrequest ReadApiLogsRequest,
-) (
- response *POST_ReadApiLogsResponses,
- err error,
-) {
- path := client.service + "/ReadApiLogs"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readapilogsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadApiLogsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadApiLogsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadBillableDigest(
- readbillabledigestrequest ReadBillableDigestRequest,
-) (
- response *POST_ReadBillableDigestResponses,
- err error,
-) {
- path := client.service + "/ReadBillableDigest"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readbillabledigestrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadBillableDigestResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadBillableDigestResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadCatalog(
- readcatalogrequest ReadCatalogRequest,
-) (
- response *POST_ReadCatalogResponses,
- err error,
-) {
- path := client.service + "/ReadCatalog"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readcatalogrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadCatalogResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadCatalogResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadClientGateways(
- readclientgatewaysrequest ReadClientGatewaysRequest,
-) (
- response *POST_ReadClientGatewaysResponses,
- err error,
-) {
- path := client.service + "/ReadClientGateways"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readclientgatewaysrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadClientGatewaysResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadClientGatewaysResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadConsoleOutput(
- readconsoleoutputrequest ReadConsoleOutputRequest,
-) (
- response *POST_ReadConsoleOutputResponses,
- err error,
-) {
- path := client.service + "/ReadConsoleOutput"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readconsoleoutputrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadConsoleOutputResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadConsoleOutputResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadDhcpOptions(
- readdhcpoptionsrequest ReadDhcpOptionsRequest,
-) (
- response *POST_ReadDhcpOptionsResponses,
- err error,
-) {
- path := client.service + "/ReadDhcpOptions"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readdhcpoptionsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadDhcpOptionsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadDhcpOptionsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadDirectLinkInterfaces(
- readdirectlinkinterfacesrequest ReadDirectLinkInterfacesRequest,
-) (
- response *POST_ReadDirectLinkInterfacesResponses,
- err error,
-) {
- path := client.service + "/ReadDirectLinkInterfaces"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readdirectlinkinterfacesrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadDirectLinkInterfacesResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadDirectLinkInterfacesResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadDirectLinks(
- readdirectlinksrequest ReadDirectLinksRequest,
-) (
- response *POST_ReadDirectLinksResponses,
- err error,
-) {
- path := client.service + "/ReadDirectLinks"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readdirectlinksrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadDirectLinksResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadDirectLinksResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadImageExportTasks(
- readimageexporttasksrequest ReadImageExportTasksRequest,
-) (
- response *POST_ReadImageExportTasksResponses,
- err error,
-) {
- path := client.service + "/ReadImageExportTasks"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readimageexporttasksrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadImageExportTasksResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadImageExportTasksResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadImages(
- readimagesrequest ReadImagesRequest,
-) (
- response *POST_ReadImagesResponses,
- err error,
-) {
- path := client.service + "/ReadImages"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readimagesrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadImagesResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadImagesResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadInternetServices(
- readinternetservicesrequest ReadInternetServicesRequest,
-) (
- response *POST_ReadInternetServicesResponses,
- err error,
-) {
- path := client.service + "/ReadInternetServices"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readinternetservicesrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadInternetServicesResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadInternetServicesResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadKeypairs(
- readkeypairsrequest ReadKeypairsRequest,
-) (
- response *POST_ReadKeypairsResponses,
- err error,
-) {
- path := client.service + "/ReadKeypairs"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readkeypairsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadKeypairsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadKeypairsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadListenerRules(
- readlistenerrulesrequest ReadListenerRulesRequest,
-) (
- response *POST_ReadListenerRulesResponses,
- err error,
-) {
- path := client.service + "/ReadListenerRules"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readlistenerrulesrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
-
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadListenerRulesResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadListenerRulesResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadLoadBalancers(
- readloadbalancersrequest ReadLoadBalancersRequest,
-) (
- response *POST_ReadLoadBalancersResponses,
- err error,
-) {
- path := client.service + "/ReadLoadBalancers"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readloadbalancersrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadLoadBalancersResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadLoadBalancersResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadLocations(
- readlocationsrequest ReadLocationsRequest,
-) (
- response *POST_ReadLocationsResponses,
- err error,
-) {
- path := client.service + "/ReadLocations"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readlocationsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadLocationsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadLocationsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadNatServices(
- readnatservicesrequest ReadNatServicesRequest,
-) (
- response *POST_ReadNatServicesResponses,
- err error,
-) {
- path := client.service + "/ReadNatServices"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readnatservicesrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadNatServicesResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadNatServicesResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadNetAccessPointServices(
- readnetaccesspointservicesrequest ReadNetAccessPointServicesRequest,
-) (
- response *POST_ReadNetAccessPointServicesResponses,
- err error,
-) {
- path := client.service + "/ReadNetAccessPointServices"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readnetaccesspointservicesrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadNetAccessPointServicesResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadNetAccessPointServicesResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadNetAccessPoints(
- readnetaccesspointsrequest ReadNetAccessPointsRequest,
-) (
- response *POST_ReadNetAccessPointsResponses,
- err error,
-) {
- path := client.service + "/ReadNetAccessPoints"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readnetaccesspointsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadNetAccessPointsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadNetAccessPointsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadNetPeerings(
- readnetpeeringsrequest ReadNetPeeringsRequest,
-) (
- response *POST_ReadNetPeeringsResponses,
- err error,
-) {
- path := client.service + "/ReadNetPeerings"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readnetpeeringsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadNetPeeringsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadNetPeeringsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadNets(
- readnetsrequest ReadNetsRequest,
-) (
- response *POST_ReadNetsResponses,
- err error,
-) {
- path := client.service + "/ReadNets"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readnetsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadNetsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadNetsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadNics(
- readnicsrequest ReadNicsRequest,
-) (
- response *POST_ReadNicsResponses,
- err error,
-) {
- path := client.service + "/ReadNics"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readnicsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadNicsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadNicsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadPolicies(
- readpoliciesrequest ReadPoliciesRequest,
-) (
- response *POST_ReadPoliciesResponses,
- err error,
-) {
- path := client.service + "/ReadPolicies"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readpoliciesrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadPoliciesResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadPoliciesResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadPrefixLists(
- readprefixlistsrequest ReadPrefixListsRequest,
-) (
- response *POST_ReadPrefixListsResponses,
- err error,
-) {
- path := client.service + "/ReadPrefixLists"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readprefixlistsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadPrefixListsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadPrefixListsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadProductTypes(
- readproducttypesrequest ReadProductTypesRequest,
-) (
- response *POST_ReadProductTypesResponses,
- err error,
-) {
- path := client.service + "/ReadProductTypes"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readproducttypesrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadProductTypesResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadProductTypesResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadPublicCatalog(
- readpubliccatalogrequest ReadPublicCatalogRequest,
-) (
- response *POST_ReadPublicCatalogResponses,
- err error,
-) {
- path := client.service + "/ReadPublicCatalog"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readpubliccatalogrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadPublicCatalogResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadPublicCatalogResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadPublicIpRanges(
- readpubliciprangesrequest ReadPublicIpRangesRequest,
-) (
- response *POST_ReadPublicIpRangesResponses,
- err error,
-) {
- path := client.service + "/ReadPublicIpRanges"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readpubliciprangesrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadPublicIpRangesResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadPublicIpRangesResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadPublicIps(
- readpublicipsrequest ReadPublicIpsRequest,
-) (
- response *POST_ReadPublicIpsResponses,
- err error,
-) {
- path := client.service + "/ReadPublicIps"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readpublicipsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadPublicIpsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadPublicIpsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadQuotas(
- readquotasrequest ReadQuotasRequest,
-) (
- response *POST_ReadQuotasResponses,
- err error,
-) {
- path := client.service + "/ReadQuotas"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readquotasrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadQuotasResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadQuotasResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadRegionConfig(
- readregionconfigrequest ReadRegionConfigRequest,
-) (
- response *POST_ReadRegionConfigResponses,
- err error,
-) {
- path := client.service + "/ReadRegionConfig"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readregionconfigrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadRegionConfigResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadRegionConfigResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadRegions(
- readregionsrequest ReadRegionsRequest,
-) (
- response *POST_ReadRegionsResponses,
- err error,
-) {
- path := client.service + "/ReadRegions"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readregionsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadRegionsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadRegionsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadReservedVmOffers(
- readreservedvmoffersrequest ReadReservedVmOffersRequest,
-) (
- response *POST_ReadReservedVmOffersResponses,
- err error,
-) {
- path := client.service + "/ReadReservedVmOffers"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readreservedvmoffersrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadReservedVmOffersResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadReservedVmOffersResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadReservedVms(
- readreservedvmsrequest ReadReservedVmsRequest,
-) (
- response *POST_ReadReservedVmsResponses,
- err error,
-) {
- path := client.service + "/ReadReservedVms"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readreservedvmsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
-
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadReservedVmsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadReservedVmsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadRouteTables(
- readroutetablesrequest ReadRouteTablesRequest,
-) (
- response *POST_ReadRouteTablesResponses,
- err error,
-) {
- path := client.service + "/ReadRouteTables"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readroutetablesrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadRouteTablesResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadRouteTablesResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadSecurityGroups(
- readsecuritygroupsrequest ReadSecurityGroupsRequest,
-) (
- response *POST_ReadSecurityGroupsResponses,
- err error,
-) {
- path := client.service + "/ReadSecurityGroups"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readsecuritygroupsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadSecurityGroupsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadSecurityGroupsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadServerCertificates(
- readservercertificatesrequest ReadServerCertificatesRequest,
-) (
- response *POST_ReadServerCertificatesResponses,
- err error,
-) {
- path := client.service + "/ReadServerCertificates"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readservercertificatesrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadServerCertificatesResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadServerCertificatesResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadSnapshotExportTasks(
- readsnapshotexporttasksrequest ReadSnapshotExportTasksRequest,
-) (
- response *POST_ReadSnapshotExportTasksResponses,
- err error,
-) {
- path := client.service + "/ReadSnapshotExportTasks"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readsnapshotexporttasksrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadSnapshotExportTasksResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadSnapshotExportTasksResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadSnapshots(
- readsnapshotsrequest ReadSnapshotsRequest,
-) (
- response *POST_ReadSnapshotsResponses,
- err error,
-) {
- path := client.service + "/ReadSnapshots"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readsnapshotsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadSnapshotsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadSnapshotsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadSubnets(
- readsubnetsrequest ReadSubnetsRequest,
-) (
- response *POST_ReadSubnetsResponses,
- err error,
-) {
- path := client.service + "/ReadSubnets"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readsubnetsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadSubnetsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadSubnetsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadSubregions(
- readsubregionsrequest ReadSubregionsRequest,
-) (
- response *POST_ReadSubregionsResponses,
- err error,
-) {
- path := client.service + "/ReadSubregions"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readsubregionsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadSubregionsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadSubregionsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadTags(
- readtagsrequest ReadTagsRequest,
-) (
- response *POST_ReadTagsResponses,
- err error,
-) {
- path := client.service + "/ReadTags"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readtagsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadTagsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadTagsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadUserGroups(
- readusergroupsrequest ReadUserGroupsRequest,
-) (
- response *POST_ReadUserGroupsResponses,
- err error,
-) {
- path := client.service + "/ReadUserGroups"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readusergroupsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadUserGroupsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadUserGroupsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadUsers(
- readusersrequest ReadUsersRequest,
-) (
- response *POST_ReadUsersResponses,
- err error,
-) {
- path := client.service + "/ReadUsers"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readusersrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadUsersResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadUsersResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadVirtualGateways(
- readvirtualgatewaysrequest ReadVirtualGatewaysRequest,
-) (
- response *POST_ReadVirtualGatewaysResponses,
- err error,
-) {
- path := client.service + "/ReadVirtualGateways"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readvirtualgatewaysrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadVirtualGatewaysResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadVirtualGatewaysResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadVmTypes(
- readvmtypesrequest ReadVmTypesRequest,
-) (
- response *POST_ReadVmTypesResponses,
- err error,
-) {
- path := client.service + "/ReadVmTypes"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readvmtypesrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadVmTypesResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadVmTypesResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadVms(
- readvmsrequest ReadVmsRequest,
-) (
- response *POST_ReadVmsResponses,
- err error,
-) {
- path := client.service + "/ReadVms"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readvmsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadVmsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadVmsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadVmsHealth(
- readvmshealthrequest ReadVmsHealthRequest,
-) (
- response *POST_ReadVmsHealthResponses,
- err error,
-) {
- path := client.service + "/ReadVmsHealth"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readvmshealthrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadVmsHealthResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadVmsHealthResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadVmsState(
- readvmsstaterequest ReadVmsStateRequest,
-) (
- response *POST_ReadVmsStateResponses,
- err error,
-) {
- path := client.service + "/ReadVmsState"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readvmsstaterequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadVmsStateResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadVmsStateResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadVolumes(
- readvolumesrequest ReadVolumesRequest,
-) (
- response *POST_ReadVolumesResponses,
- err error,
-) {
- path := client.service + "/ReadVolumes"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readvolumesrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadVolumesResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadVolumesResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ReadVpnConnections(
- readvpnconnectionsrequest ReadVpnConnectionsRequest,
-) (
- response *POST_ReadVpnConnectionsResponses,
- err error,
-) {
- path := client.service + "/ReadVpnConnections"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(readvpnconnectionsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ReadVpnConnectionsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ReadVpnConnectionsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_RebootVms(
- rebootvmsrequest RebootVmsRequest,
-) (
- response *POST_RebootVmsResponses,
- err error,
-) {
- path := client.service + "/RebootVms"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(rebootvmsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_RebootVmsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &RebootVmsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_RegisterUserInUserGroup(
- registeruserinusergrouprequest RegisterUserInUserGroupRequest,
-) (
- response *POST_RegisterUserInUserGroupResponses,
- err error,
-) {
- path := client.service + "/RegisterUserInUserGroup"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(registeruserinusergrouprequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_RegisterUserInUserGroupResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &RegisterUserInUserGroupResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_RegisterVmsInLoadBalancer(
- registervmsinloadbalancerrequest RegisterVmsInLoadBalancerRequest,
-) (
- response *POST_RegisterVmsInLoadBalancerResponses,
- err error,
-) {
- path := client.service + "/RegisterVmsInLoadBalancer"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(registervmsinloadbalancerrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_RegisterVmsInLoadBalancerResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &RegisterVmsInLoadBalancerResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_RejectNetPeering(
- rejectnetpeeringrequest RejectNetPeeringRequest,
-) (
- response *POST_RejectNetPeeringResponses,
- err error,
-) {
- path := client.service + "/RejectNetPeering"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(rejectnetpeeringrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_RejectNetPeeringResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &RejectNetPeeringResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 409:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code409 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_ResetAccountPassword(
- resetaccountpasswordrequest ResetAccountPasswordRequest,
-) (
- response *POST_ResetAccountPasswordResponses,
- err error,
-) {
- path := client.service + "/ResetAccountPassword"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(resetaccountpasswordrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_ResetAccountPasswordResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ResetAccountPasswordResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_SendResetPasswordEmail(
- sendresetpasswordemailrequest SendResetPasswordEmailRequest,
-) (
- response *POST_SendResetPasswordEmailResponses,
- err error,
-) {
- path := client.service + "/SendResetPasswordEmail"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(sendresetpasswordemailrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_SendResetPasswordEmailResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &SendResetPasswordEmailResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_StartVms(
- startvmsrequest StartVmsRequest,
-) (
- response *POST_StartVmsResponses,
- err error,
-) {
- path := client.service + "/StartVms"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(startvmsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_StartVmsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &StartVmsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_StopVms(
- stopvmsrequest StopVmsRequest,
-) (
- response *POST_StopVmsResponses,
- err error,
-) {
- path := client.service + "/StopVms"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(stopvmsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_StopVmsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &StopVmsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_UnlinkInternetService(
- unlinkinternetservicerequest UnlinkInternetServiceRequest,
-) (
- response *POST_UnlinkInternetServiceResponses,
- err error,
-) {
- path := client.service + "/UnlinkInternetService"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(unlinkinternetservicerequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
-
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_UnlinkInternetServiceResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &UnlinkInternetServiceResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_UnlinkNic(
- unlinknicrequest UnlinkNicRequest,
-) (
- response *POST_UnlinkNicResponses,
- err error,
-) {
- path := client.service + "/UnlinkNic"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(unlinknicrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_UnlinkNicResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &UnlinkNicResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_UnlinkPolicy(
- unlinkpolicyrequest UnlinkPolicyRequest,
-) (
- response *POST_UnlinkPolicyResponses,
- err error,
-) {
- path := client.service + "/UnlinkPolicy"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(unlinkpolicyrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_UnlinkPolicyResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &UnlinkPolicyResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_UnlinkPrivateIps(
- unlinkprivateipsrequest UnlinkPrivateIpsRequest,
-) (
- response *POST_UnlinkPrivateIpsResponses,
- err error,
-) {
- path := client.service + "/UnlinkPrivateIps"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(unlinkprivateipsrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_UnlinkPrivateIpsResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &UnlinkPrivateIpsResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_UnlinkPublicIp(
- unlinkpubliciprequest UnlinkPublicIpRequest,
-) (
- response *POST_UnlinkPublicIpResponses,
- err error,
-) {
- path := client.service + "/UnlinkPublicIp"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(unlinkpubliciprequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_UnlinkPublicIpResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &UnlinkPublicIpResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_UnlinkRouteTable(
- unlinkroutetablerequest UnlinkRouteTableRequest,
-) (
- response *POST_UnlinkRouteTableResponses,
- err error,
-) {
- path := client.service + "/UnlinkRouteTable"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(unlinkroutetablerequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_UnlinkRouteTableResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &UnlinkRouteTableResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_UnlinkVirtualGateway(
- unlinkvirtualgatewayrequest UnlinkVirtualGatewayRequest,
-) (
- response *POST_UnlinkVirtualGatewayResponses,
- err error,
-) {
- path := client.service + "/UnlinkVirtualGateway"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(unlinkvirtualgatewayrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_UnlinkVirtualGatewayResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &UnlinkVirtualGatewayResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_UnlinkVolume(
- unlinkvolumerequest UnlinkVolumeRequest,
-) (
- response *POST_UnlinkVolumeResponses,
- err error,
-) {
- path := client.service + "/UnlinkVolume"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(unlinkvolumerequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_UnlinkVolumeResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &UnlinkVolumeResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_UpdateAccount(
- updateaccountrequest UpdateAccountRequest,
-) (
- response *POST_UpdateAccountResponses,
- err error,
-) {
- path := client.service + "/UpdateAccount"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(updateaccountrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_UpdateAccountResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &UpdateAccountResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_UpdateApiKey(
- updateapikeyrequest UpdateApiKeyRequest,
-) (
- response *POST_UpdateApiKeyResponses,
- err error,
-) {
- path := client.service + "/UpdateApiKey"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(updateapikeyrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_UpdateApiKeyResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &UpdateApiKeyResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_UpdateHealthCheck(
- updatehealthcheckrequest UpdateHealthCheckRequest,
-) (
- response *POST_UpdateHealthCheckResponses,
- err error,
-) {
- path := client.service + "/UpdateHealthCheck"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(updatehealthcheckrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_UpdateHealthCheckResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &UpdateHealthCheckResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_UpdateImage(
- updateimagerequest UpdateImageRequest,
-) (
- response *POST_UpdateImageResponses,
- err error,
-) {
- path := client.service + "/UpdateImage"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(updateimagerequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_UpdateImageResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &UpdateImageResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_UpdateKeypair(
- updatekeypairrequest UpdateKeypairRequest,
-) (
- response *POST_UpdateKeypairResponses,
- err error,
-) {
- path := client.service + "/UpdateKeypair"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(updatekeypairrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_UpdateKeypairResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &UpdateKeypairResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_UpdateListenerRule(
- updatelistenerrulerequest UpdateListenerRuleRequest,
-) (
- response *POST_UpdateListenerRuleResponses,
- err error,
-) {
- path := client.service + "/UpdateListenerRule"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(updatelistenerrulerequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_UpdateListenerRuleResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &UpdateListenerRuleResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_UpdateLoadBalancer(
- updateloadbalancerrequest UpdateLoadBalancerRequest,
-) (
- response *POST_UpdateLoadBalancerResponses,
- err error,
-) {
- path := client.service + "/UpdateLoadBalancer"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(updateloadbalancerrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_UpdateLoadBalancerResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &UpdateLoadBalancerResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_UpdateNet(
- updatenetrequest UpdateNetRequest,
-) (
- response *POST_UpdateNetResponses,
- err error,
-) {
- path := client.service + "/UpdateNet"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(updatenetrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_UpdateNetResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &UpdateNetResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_UpdateNetAccessPoint(
- updatenetaccesspointrequest UpdateNetAccessPointRequest,
-) (
- response *POST_UpdateNetAccessPointResponses,
- err error,
-) {
- path := client.service + "/UpdateNetAccessPoint"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(updatenetaccesspointrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_UpdateNetAccessPointResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &UpdateNetAccessPointResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_UpdateNic(
- updatenicrequest UpdateNicRequest,
-) (
- response *POST_UpdateNicResponses,
- err error,
-) {
- path := client.service + "/UpdateNic"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(updatenicrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_UpdateNicResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &UpdateNicResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_UpdateRoute(
- updaterouterequest UpdateRouteRequest,
-) (
- response *POST_UpdateRouteResponses,
- err error,
-) {
- path := client.service + "/UpdateRoute"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(updaterouterequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_UpdateRouteResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &UpdateRouteResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_UpdateRoutePropagation(
- updateroutepropagationrequest UpdateRoutePropagationRequest,
-) (
- response *POST_UpdateRoutePropagationResponses,
- err error,
-) {
- path := client.service + "/UpdateRoutePropagation"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(updateroutepropagationrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_UpdateRoutePropagationResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &UpdateRoutePropagationResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_UpdateServerCertificate(
- updateservercertificaterequest UpdateServerCertificateRequest,
-) (
- response *POST_UpdateServerCertificateResponses,
- err error,
-) {
- path := client.service + "/UpdateServerCertificate"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(updateservercertificaterequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_UpdateServerCertificateResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &UpdateServerCertificateResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_UpdateSnapshot(
- updatesnapshotrequest UpdateSnapshotRequest,
-) (
- response *POST_UpdateSnapshotResponses,
- err error,
-) {
- path := client.service + "/UpdateSnapshot"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(updatesnapshotrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_UpdateSnapshotResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &UpdateSnapshotResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_UpdateUser(
- updateuserrequest UpdateUserRequest,
-) (
- response *POST_UpdateUserResponses,
- err error,
-) {
- path := client.service + "/UpdateUser"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(updateuserrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_UpdateUserResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &UpdateUserResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_UpdateUserGroup(
- updateusergrouprequest UpdateUserGroupRequest,
-) (
- response *POST_UpdateUserGroupResponses,
- err error,
-) {
- path := client.service + "/UpdateUserGroup"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(updateusergrouprequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_UpdateUserGroupResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &UpdateUserGroupResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- default:
- break
- }
- return
-}
-
-//
-func (client *Client) POST_UpdateVm(
- updatevmrequest UpdateVmRequest,
-) (
- response *POST_UpdateVmResponses,
- err error,
-) {
- path := client.service + "/UpdateVm"
- body := new(bytes.Buffer)
- json.NewEncoder(body).Encode(updatevmrequest)
- req, err := http.NewRequest("POST", path, body)
- reqHeaders := make(http.Header)
- reqHeaders.Set("Content-Type", "application/json")
- req.Header = reqHeaders
- client.Sign(req, body.Bytes())
- if err != nil {
- return
- }
-
- resp, err := client.Do(req)
- if err != nil {
- return
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- return nil, checkErrorResponse(resp)
- }
- response = &POST_UpdateVmResponses{}
- switch {
- case resp.StatusCode == 200:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &UpdateVmResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.OK = result
- case resp.StatusCode == 400:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code400 = result
- case resp.StatusCode == 401:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code401 = result
- case resp.StatusCode == 500:
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return nil, err
- }
- result := &ErrorResponse{}
- err = json.Unmarshal(body, result)
- if err != nil {
- return nil, err
- }
- response.Code500 = result
- default:
- break
- }
- return
-}
-
-func checkErrorResponse(resp *http.Response) error {
- body, err := ioutil.ReadAll(resp.Body)
- if err != nil {
- return fmt.Errorf("error reading response error body %s", err)
- }
-
- reason, errFmt := fmtErrorResponse(body)
- if errFmt != nil {
- return fmt.Errorf("error formating error resonse %s", err)
- }
-
- return fmt.Errorf("error, status code %d, reason: %s", resp.StatusCode, reason)
-}
-
-func fmtErrorResponse(errBody []byte) (string, error) {
- result := &ErrorResponse{}
- err := json.Unmarshal(errBody, result)
- if err != nil {
- return "", err
- }
-
- errors, errPretty := json.MarshalIndent(result, "", " ")
- if errPretty != nil {
- return "", err
- }
-
- return string(errors), nil
-}
-
-var _ OAPIClient = (*Client)(nil)
diff --git a/vendor/github.com/outscale/osc-go/oapi/interface.go b/vendor/github.com/outscale/osc-go/oapi/interface.go
deleted file mode 100644
index 96aeebc5d..000000000
--- a/vendor/github.com/outscale/osc-go/oapi/interface.go
+++ /dev/null
@@ -1,1213 +0,0 @@
-package oapi
-
-type OAPIClient interface {
- POST_AcceptNetPeering(
- acceptnetpeeringrequest AcceptNetPeeringRequest,
- ) (
- response *POST_AcceptNetPeeringResponses,
- err error,
- )
-
- POST_AuthenticateAccount(
- authenticateaccountrequest AuthenticateAccountRequest,
- ) (
- response *POST_AuthenticateAccountResponses,
- err error,
- )
-
- POST_CheckSignature(
- checksignaturerequest CheckSignatureRequest,
- ) (
- response *POST_CheckSignatureResponses,
- err error,
- )
-
- POST_CopyAccount(
- copyaccountrequest CopyAccountRequest,
- ) (
- response *POST_CopyAccountResponses,
- err error,
- )
-
- POST_CreateAccount(
- createaccountrequest CreateAccountRequest,
- ) (
- response *POST_CreateAccountResponses,
- err error,
- )
-
- POST_CreateApiKey(
- createapikeyrequest CreateApiKeyRequest,
- ) (
- response *POST_CreateApiKeyResponses,
- err error,
- )
-
- POST_CreateClientGateway(
- createclientgatewayrequest CreateClientGatewayRequest,
- ) (
- response *POST_CreateClientGatewayResponses,
- err error,
- )
-
- POST_CreateDhcpOptions(
- createdhcpoptionsrequest CreateDhcpOptionsRequest,
- ) (
- response *POST_CreateDhcpOptionsResponses,
- err error,
- )
-
- POST_CreateDirectLink(
- createdirectlinkrequest CreateDirectLinkRequest,
- ) (
- response *POST_CreateDirectLinkResponses,
- err error,
- )
-
- POST_CreateDirectLinkInterface(
- createdirectlinkinterfacerequest CreateDirectLinkInterfaceRequest,
- ) (
- response *POST_CreateDirectLinkInterfaceResponses,
- err error,
- )
-
- POST_CreateImage(
- createimagerequest CreateImageRequest,
- ) (
- response *POST_CreateImageResponses,
- err error,
- )
-
- POST_CreateImageExportTask(
- createimageexporttaskrequest CreateImageExportTaskRequest,
- ) (
- response *POST_CreateImageExportTaskResponses,
- err error,
- )
-
- POST_CreateInternetService(
- createinternetservicerequest CreateInternetServiceRequest,
- ) (
- response *POST_CreateInternetServiceResponses,
- err error,
- )
-
- POST_CreateKeypair(
- createkeypairrequest CreateKeypairRequest,
- ) (
- response *POST_CreateKeypairResponses,
- err error,
- )
-
- POST_CreateListenerRule(
- createlistenerrulerequest CreateListenerRuleRequest,
- ) (
- response *POST_CreateListenerRuleResponses,
- err error,
- )
-
- POST_CreateLoadBalancer(
- createloadbalancerrequest CreateLoadBalancerRequest,
- ) (
- response *POST_CreateLoadBalancerResponses,
- err error,
- )
-
- POST_CreateLoadBalancerListeners(
- createloadbalancerlistenersrequest CreateLoadBalancerListenersRequest,
- ) (
- response *POST_CreateLoadBalancerListenersResponses,
- err error,
- )
-
- POST_CreateLoadBalancerPolicy(
- createloadbalancerpolicyrequest CreateLoadBalancerPolicyRequest,
- ) (
- response *POST_CreateLoadBalancerPolicyResponses,
- err error,
- )
-
- POST_CreateNatService(
- createnatservicerequest CreateNatServiceRequest,
- ) (
- response *POST_CreateNatServiceResponses,
- err error,
- )
-
- POST_CreateNet(
- createnetrequest CreateNetRequest,
- ) (
- response *POST_CreateNetResponses,
- err error,
- )
-
- POST_CreateNetAccessPoint(
- createnetaccesspointrequest CreateNetAccessPointRequest,
- ) (
- response *POST_CreateNetAccessPointResponses,
- err error,
- )
-
- POST_CreateNetPeering(
- createnetpeeringrequest CreateNetPeeringRequest,
- ) (
- response *POST_CreateNetPeeringResponses,
- err error,
- )
-
- POST_CreateNic(
- createnicrequest CreateNicRequest,
- ) (
- response *POST_CreateNicResponses,
- err error,
- )
-
- POST_CreatePolicy(
- createpolicyrequest CreatePolicyRequest,
- ) (
- response *POST_CreatePolicyResponses,
- err error,
- )
-
- POST_CreatePublicIp(
- createpubliciprequest CreatePublicIpRequest,
- ) (
- response *POST_CreatePublicIpResponses,
- err error,
- )
-
- POST_CreateRoute(
- createrouterequest CreateRouteRequest,
- ) (
- response *POST_CreateRouteResponses,
- err error,
- )
-
- POST_CreateRouteTable(
- createroutetablerequest CreateRouteTableRequest,
- ) (
- response *POST_CreateRouteTableResponses,
- err error,
- )
-
- POST_CreateSecurityGroup(
- createsecuritygrouprequest CreateSecurityGroupRequest,
- ) (
- response *POST_CreateSecurityGroupResponses,
- err error,
- )
-
- POST_CreateSecurityGroupRule(
- createsecuritygrouprulerequest CreateSecurityGroupRuleRequest,
- ) (
- response *POST_CreateSecurityGroupRuleResponses,
- err error,
- )
-
- POST_CreateServerCertificate(
- createservercertificaterequest CreateServerCertificateRequest,
- ) (
- response *POST_CreateServerCertificateResponses,
- err error,
- )
-
- POST_CreateSnapshot(
- createsnapshotrequest CreateSnapshotRequest,
- ) (
- response *POST_CreateSnapshotResponses,
- err error,
- )
-
- POST_CreateSnapshotExportTask(
- createsnapshotexporttaskrequest CreateSnapshotExportTaskRequest,
- ) (
- response *POST_CreateSnapshotExportTaskResponses,
- err error,
- )
-
- POST_CreateSubnet(
- createsubnetrequest CreateSubnetRequest,
- ) (
- response *POST_CreateSubnetResponses,
- err error,
- )
-
- POST_CreateTags(
- createtagsrequest CreateTagsRequest,
- ) (
- response *POST_CreateTagsResponses,
- err error,
- )
-
- POST_CreateUser(
- createuserrequest CreateUserRequest,
- ) (
- response *POST_CreateUserResponses,
- err error,
- )
-
- POST_CreateUserGroup(
- createusergrouprequest CreateUserGroupRequest,
- ) (
- response *POST_CreateUserGroupResponses,
- err error,
- )
-
- POST_CreateVirtualGateway(
- createvirtualgatewayrequest CreateVirtualGatewayRequest,
- ) (
- response *POST_CreateVirtualGatewayResponses,
- err error,
- )
-
- POST_CreateVms(
- createvmsrequest CreateVmsRequest,
- ) (
- response *POST_CreateVmsResponses,
- err error,
- )
-
- POST_CreateVolume(
- createvolumerequest CreateVolumeRequest,
- ) (
- response *POST_CreateVolumeResponses,
- err error,
- )
-
- POST_CreateVpnConnection(
- createvpnconnectionrequest CreateVpnConnectionRequest,
- ) (
- response *POST_CreateVpnConnectionResponses,
- err error,
- )
-
- POST_CreateVpnConnectionRoute(
- createvpnconnectionrouterequest CreateVpnConnectionRouteRequest,
- ) (
- response *POST_CreateVpnConnectionRouteResponses,
- err error,
- )
-
- POST_DeleteApiKey(
- deleteapikeyrequest DeleteApiKeyRequest,
- ) (
- response *POST_DeleteApiKeyResponses,
- err error,
- )
-
- POST_DeleteClientGateway(
- deleteclientgatewayrequest DeleteClientGatewayRequest,
- ) (
- response *POST_DeleteClientGatewayResponses,
- err error,
- )
-
- POST_DeleteDhcpOptions(
- deletedhcpoptionsrequest DeleteDhcpOptionsRequest,
- ) (
- response *POST_DeleteDhcpOptionsResponses,
- err error,
- )
-
- POST_DeleteDirectLink(
- deletedirectlinkrequest DeleteDirectLinkRequest,
- ) (
- response *POST_DeleteDirectLinkResponses,
- err error,
- )
-
- POST_DeleteDirectLinkInterface(
- deletedirectlinkinterfacerequest DeleteDirectLinkInterfaceRequest,
- ) (
- response *POST_DeleteDirectLinkInterfaceResponses,
- err error,
- )
-
- POST_DeleteExportTask(
- deleteexporttaskrequest DeleteExportTaskRequest,
- ) (
- response *POST_DeleteExportTaskResponses,
- err error,
- )
-
- POST_DeleteImage(
- deleteimagerequest DeleteImageRequest,
- ) (
- response *POST_DeleteImageResponses,
- err error,
- )
-
- POST_DeleteInternetService(
- deleteinternetservicerequest DeleteInternetServiceRequest,
- ) (
- response *POST_DeleteInternetServiceResponses,
- err error,
- )
-
- POST_DeleteKeypair(
- deletekeypairrequest DeleteKeypairRequest,
- ) (
- response *POST_DeleteKeypairResponses,
- err error,
- )
-
- POST_DeleteListenerRule(
- deletelistenerrulerequest DeleteListenerRuleRequest,
- ) (
- response *POST_DeleteListenerRuleResponses,
- err error,
- )
-
- POST_DeleteLoadBalancer(
- deleteloadbalancerrequest DeleteLoadBalancerRequest,
- ) (
- response *POST_DeleteLoadBalancerResponses,
- err error,
- )
-
- POST_DeleteLoadBalancerListeners(
- deleteloadbalancerlistenersrequest DeleteLoadBalancerListenersRequest,
- ) (
- response *POST_DeleteLoadBalancerListenersResponses,
- err error,
- )
-
- POST_DeleteLoadBalancerPolicy(
- deleteloadbalancerpolicyrequest DeleteLoadBalancerPolicyRequest,
- ) (
- response *POST_DeleteLoadBalancerPolicyResponses,
- err error,
- )
-
- POST_DeleteNatService(
- deletenatservicerequest DeleteNatServiceRequest,
- ) (
- response *POST_DeleteNatServiceResponses,
- err error,
- )
-
- POST_DeleteNet(
- deletenetrequest DeleteNetRequest,
- ) (
- response *POST_DeleteNetResponses,
- err error,
- )
-
- POST_DeleteNetAccessPoints(
- deletenetaccesspointsrequest DeleteNetAccessPointsRequest,
- ) (
- response *POST_DeleteNetAccessPointsResponses,
- err error,
- )
-
- POST_DeleteNetPeering(
- deletenetpeeringrequest DeleteNetPeeringRequest,
- ) (
- response *POST_DeleteNetPeeringResponses,
- err error,
- )
-
- POST_DeleteNic(
- deletenicrequest DeleteNicRequest,
- ) (
- response *POST_DeleteNicResponses,
- err error,
- )
-
- POST_DeletePolicy(
- deletepolicyrequest DeletePolicyRequest,
- ) (
- response *POST_DeletePolicyResponses,
- err error,
- )
-
- POST_DeletePublicIp(
- deletepubliciprequest DeletePublicIpRequest,
- ) (
- response *POST_DeletePublicIpResponses,
- err error,
- )
-
- POST_DeleteRoute(
- deleterouterequest DeleteRouteRequest,
- ) (
- response *POST_DeleteRouteResponses,
- err error,
- )
-
- POST_DeleteRouteTable(
- deleteroutetablerequest DeleteRouteTableRequest,
- ) (
- response *POST_DeleteRouteTableResponses,
- err error,
- )
-
- POST_DeleteSecurityGroup(
- deletesecuritygrouprequest DeleteSecurityGroupRequest,
- ) (
- response *POST_DeleteSecurityGroupResponses,
- err error,
- )
-
- POST_DeleteSecurityGroupRule(
- deletesecuritygrouprulerequest DeleteSecurityGroupRuleRequest,
- ) (
- response *POST_DeleteSecurityGroupRuleResponses,
- err error,
- )
-
- POST_DeleteServerCertificate(
- deleteservercertificaterequest DeleteServerCertificateRequest,
- ) (
- response *POST_DeleteServerCertificateResponses,
- err error,
- )
-
- POST_DeleteSnapshot(
- deletesnapshotrequest DeleteSnapshotRequest,
- ) (
- response *POST_DeleteSnapshotResponses,
- err error,
- )
-
- POST_DeleteSubnet(
- deletesubnetrequest DeleteSubnetRequest,
- ) (
- response *POST_DeleteSubnetResponses,
- err error,
- )
-
- POST_DeleteTags(
- deletetagsrequest DeleteTagsRequest,
- ) (
- response *POST_DeleteTagsResponses,
- err error,
- )
-
- POST_DeleteUser(
- deleteuserrequest DeleteUserRequest,
- ) (
- response *POST_DeleteUserResponses,
- err error,
- )
-
- POST_DeleteUserGroup(
- deleteusergrouprequest DeleteUserGroupRequest,
- ) (
- response *POST_DeleteUserGroupResponses,
- err error,
- )
-
- POST_DeleteVirtualGateway(
- deletevirtualgatewayrequest DeleteVirtualGatewayRequest,
- ) (
- response *POST_DeleteVirtualGatewayResponses,
- err error,
- )
-
- POST_DeleteVms(
- deletevmsrequest DeleteVmsRequest,
- ) (
- response *POST_DeleteVmsResponses,
- err error,
- )
-
- POST_DeleteVolume(
- deletevolumerequest DeleteVolumeRequest,
- ) (
- response *POST_DeleteVolumeResponses,
- err error,
- )
-
- POST_DeleteVpnConnection(
- deletevpnconnectionrequest DeleteVpnConnectionRequest,
- ) (
- response *POST_DeleteVpnConnectionResponses,
- err error,
- )
-
- POST_DeleteVpnConnectionRoute(
- deletevpnconnectionrouterequest DeleteVpnConnectionRouteRequest,
- ) (
- response *POST_DeleteVpnConnectionRouteResponses,
- err error,
- )
-
- POST_DeregisterUserInUserGroup(
- deregisteruserinusergrouprequest DeregisterUserInUserGroupRequest,
- ) (
- response *POST_DeregisterUserInUserGroupResponses,
- err error,
- )
-
- POST_DeregisterVmsInLoadBalancer(
- deregistervmsinloadbalancerrequest DeregisterVmsInLoadBalancerRequest,
- ) (
- response *POST_DeregisterVmsInLoadBalancerResponses,
- err error,
- )
-
- POST_LinkInternetService(
- linkinternetservicerequest LinkInternetServiceRequest,
- ) (
- response *POST_LinkInternetServiceResponses,
- err error,
- )
-
- POST_LinkNic(
- linknicrequest LinkNicRequest,
- ) (
- response *POST_LinkNicResponses,
- err error,
- )
-
- POST_LinkPolicy(
- linkpolicyrequest LinkPolicyRequest,
- ) (
- response *POST_LinkPolicyResponses,
- err error,
- )
-
- POST_LinkPrivateIps(
- linkprivateipsrequest LinkPrivateIpsRequest,
- ) (
- response *POST_LinkPrivateIpsResponses,
- err error,
- )
-
- POST_LinkPublicIp(
- linkpubliciprequest LinkPublicIpRequest,
- ) (
- response *POST_LinkPublicIpResponses,
- err error,
- )
-
- POST_LinkRouteTable(
- linkroutetablerequest LinkRouteTableRequest,
- ) (
- response *POST_LinkRouteTableResponses,
- err error,
- )
-
- POST_LinkVirtualGateway(
- linkvirtualgatewayrequest LinkVirtualGatewayRequest,
- ) (
- response *POST_LinkVirtualGatewayResponses,
- err error,
- )
-
- POST_LinkVolume(
- linkvolumerequest LinkVolumeRequest,
- ) (
- response *POST_LinkVolumeResponses,
- err error,
- )
-
- POST_PurchaseReservedVmsOffer(
- purchasereservedvmsofferrequest PurchaseReservedVmsOfferRequest,
- ) (
- response *POST_PurchaseReservedVmsOfferResponses,
- err error,
- )
-
- POST_ReadAccount(
- readaccountrequest ReadAccountRequest,
- ) (
- response *POST_ReadAccountResponses,
- err error,
- )
-
- POST_ReadAccountConsumption(
- readaccountconsumptionrequest ReadAccountConsumptionRequest,
- ) (
- response *POST_ReadAccountConsumptionResponses,
- err error,
- )
-
- POST_ReadAdminPassword(
- readadminpasswordrequest ReadAdminPasswordRequest,
- ) (
- response *POST_ReadAdminPasswordResponses,
- err error,
- )
-
- POST_ReadApiKeys(
- readapikeysrequest ReadApiKeysRequest,
- ) (
- response *POST_ReadApiKeysResponses,
- err error,
- )
-
- POST_ReadApiLogs(
- readapilogsrequest ReadApiLogsRequest,
- ) (
- response *POST_ReadApiLogsResponses,
- err error,
- )
-
- POST_ReadBillableDigest(
- readbillabledigestrequest ReadBillableDigestRequest,
- ) (
- response *POST_ReadBillableDigestResponses,
- err error,
- )
-
- POST_ReadCatalog(
- readcatalogrequest ReadCatalogRequest,
- ) (
- response *POST_ReadCatalogResponses,
- err error,
- )
-
- POST_ReadClientGateways(
- readclientgatewaysrequest ReadClientGatewaysRequest,
- ) (
- response *POST_ReadClientGatewaysResponses,
- err error,
- )
-
- POST_ReadConsoleOutput(
- readconsoleoutputrequest ReadConsoleOutputRequest,
- ) (
- response *POST_ReadConsoleOutputResponses,
- err error,
- )
-
- POST_ReadDhcpOptions(
- readdhcpoptionsrequest ReadDhcpOptionsRequest,
- ) (
- response *POST_ReadDhcpOptionsResponses,
- err error,
- )
-
- POST_ReadDirectLinkInterfaces(
- readdirectlinkinterfacesrequest ReadDirectLinkInterfacesRequest,
- ) (
- response *POST_ReadDirectLinkInterfacesResponses,
- err error,
- )
-
- POST_ReadDirectLinks(
- readdirectlinksrequest ReadDirectLinksRequest,
- ) (
- response *POST_ReadDirectLinksResponses,
- err error,
- )
-
- POST_ReadImageExportTasks(
- readimageexporttasksrequest ReadImageExportTasksRequest,
- ) (
- response *POST_ReadImageExportTasksResponses,
- err error,
- )
-
- POST_ReadImages(
- readimagesrequest ReadImagesRequest,
- ) (
- response *POST_ReadImagesResponses,
- err error,
- )
-
- POST_ReadInternetServices(
- readinternetservicesrequest ReadInternetServicesRequest,
- ) (
- response *POST_ReadInternetServicesResponses,
- err error,
- )
-
- POST_ReadKeypairs(
- readkeypairsrequest ReadKeypairsRequest,
- ) (
- response *POST_ReadKeypairsResponses,
- err error,
- )
-
- POST_ReadListenerRules(
- readlistenerrulesrequest ReadListenerRulesRequest,
- ) (
- response *POST_ReadListenerRulesResponses,
- err error,
- )
-
- POST_ReadLoadBalancers(
- readloadbalancersrequest ReadLoadBalancersRequest,
- ) (
- response *POST_ReadLoadBalancersResponses,
- err error,
- )
-
- POST_ReadLocations(
- readlocationsrequest ReadLocationsRequest,
- ) (
- response *POST_ReadLocationsResponses,
- err error,
- )
-
- POST_ReadNatServices(
- readnatservicesrequest ReadNatServicesRequest,
- ) (
- response *POST_ReadNatServicesResponses,
- err error,
- )
-
- POST_ReadNetAccessPointServices(
- readnetaccesspointservicesrequest ReadNetAccessPointServicesRequest,
- ) (
- response *POST_ReadNetAccessPointServicesResponses,
- err error,
- )
-
- POST_ReadNetAccessPoints(
- readnetaccesspointsrequest ReadNetAccessPointsRequest,
- ) (
- response *POST_ReadNetAccessPointsResponses,
- err error,
- )
-
- POST_ReadNetPeerings(
- readnetpeeringsrequest ReadNetPeeringsRequest,
- ) (
- response *POST_ReadNetPeeringsResponses,
- err error,
- )
-
- POST_ReadNets(
- readnetsrequest ReadNetsRequest,
- ) (
- response *POST_ReadNetsResponses,
- err error,
- )
-
- POST_ReadNics(
- readnicsrequest ReadNicsRequest,
- ) (
- response *POST_ReadNicsResponses,
- err error,
- )
-
- POST_ReadPolicies(
- readpoliciesrequest ReadPoliciesRequest,
- ) (
- response *POST_ReadPoliciesResponses,
- err error,
- )
-
- POST_ReadPrefixLists(
- readprefixlistsrequest ReadPrefixListsRequest,
- ) (
- response *POST_ReadPrefixListsResponses,
- err error,
- )
-
- POST_ReadProductTypes(
- readproducttypesrequest ReadProductTypesRequest,
- ) (
- response *POST_ReadProductTypesResponses,
- err error,
- )
-
- POST_ReadPublicCatalog(
- readpubliccatalogrequest ReadPublicCatalogRequest,
- ) (
- response *POST_ReadPublicCatalogResponses,
- err error,
- )
-
- POST_ReadPublicIpRanges(
- readpubliciprangesrequest ReadPublicIpRangesRequest,
- ) (
- response *POST_ReadPublicIpRangesResponses,
- err error,
- )
-
- POST_ReadPublicIps(
- readpublicipsrequest ReadPublicIpsRequest,
- ) (
- response *POST_ReadPublicIpsResponses,
- err error,
- )
-
- POST_ReadQuotas(
- readquotasrequest ReadQuotasRequest,
- ) (
- response *POST_ReadQuotasResponses,
- err error,
- )
-
- POST_ReadRegionConfig(
- readregionconfigrequest ReadRegionConfigRequest,
- ) (
- response *POST_ReadRegionConfigResponses,
- err error,
- )
-
- POST_ReadRegions(
- readregionsrequest ReadRegionsRequest,
- ) (
- response *POST_ReadRegionsResponses,
- err error,
- )
-
- POST_ReadReservedVmOffers(
- readreservedvmoffersrequest ReadReservedVmOffersRequest,
- ) (
- response *POST_ReadReservedVmOffersResponses,
- err error,
- )
-
- POST_ReadReservedVms(
- readreservedvmsrequest ReadReservedVmsRequest,
- ) (
- response *POST_ReadReservedVmsResponses,
- err error,
- )
-
- POST_ReadRouteTables(
- readroutetablesrequest ReadRouteTablesRequest,
- ) (
- response *POST_ReadRouteTablesResponses,
- err error,
- )
-
- POST_ReadSecurityGroups(
- readsecuritygroupsrequest ReadSecurityGroupsRequest,
- ) (
- response *POST_ReadSecurityGroupsResponses,
- err error,
- )
-
- POST_ReadServerCertificates(
- readservercertificatesrequest ReadServerCertificatesRequest,
- ) (
- response *POST_ReadServerCertificatesResponses,
- err error,
- )
-
- POST_ReadSnapshotExportTasks(
- readsnapshotexporttasksrequest ReadSnapshotExportTasksRequest,
- ) (
- response *POST_ReadSnapshotExportTasksResponses,
- err error,
- )
-
- POST_ReadSnapshots(
- readsnapshotsrequest ReadSnapshotsRequest,
- ) (
- response *POST_ReadSnapshotsResponses,
- err error,
- )
-
- POST_ReadSubnets(
- readsubnetsrequest ReadSubnetsRequest,
- ) (
- response *POST_ReadSubnetsResponses,
- err error,
- )
-
- POST_ReadSubregions(
- readsubregionsrequest ReadSubregionsRequest,
- ) (
- response *POST_ReadSubregionsResponses,
- err error,
- )
-
- POST_ReadTags(
- readtagsrequest ReadTagsRequest,
- ) (
- response *POST_ReadTagsResponses,
- err error,
- )
-
- POST_ReadUserGroups(
- readusergroupsrequest ReadUserGroupsRequest,
- ) (
- response *POST_ReadUserGroupsResponses,
- err error,
- )
-
- POST_ReadUsers(
- readusersrequest ReadUsersRequest,
- ) (
- response *POST_ReadUsersResponses,
- err error,
- )
-
- POST_ReadVirtualGateways(
- readvirtualgatewaysrequest ReadVirtualGatewaysRequest,
- ) (
- response *POST_ReadVirtualGatewaysResponses,
- err error,
- )
- POST_ReadVmTypes(
- readvmtypesrequest ReadVmTypesRequest,
- ) (
- response *POST_ReadVmTypesResponses,
- err error,
- )
-
- POST_ReadVms(
- readvmsrequest ReadVmsRequest,
- ) (
- response *POST_ReadVmsResponses,
- err error,
- )
-
- POST_ReadVmsHealth(
- readvmshealthrequest ReadVmsHealthRequest,
- ) (
- response *POST_ReadVmsHealthResponses,
- err error,
- )
-
- POST_ReadVmsState(
- readvmsstaterequest ReadVmsStateRequest,
- ) (
- response *POST_ReadVmsStateResponses,
- err error,
- )
-
- POST_ReadVolumes(
- readvolumesrequest ReadVolumesRequest,
- ) (
- response *POST_ReadVolumesResponses,
- err error,
- )
-
- POST_ReadVpnConnections(
- readvpnconnectionsrequest ReadVpnConnectionsRequest,
- ) (
- response *POST_ReadVpnConnectionsResponses,
- err error,
- )
-
- POST_RebootVms(
- rebootvmsrequest RebootVmsRequest,
- ) (
- response *POST_RebootVmsResponses,
- err error,
- )
-
- POST_RegisterUserInUserGroup(
- registeruserinusergrouprequest RegisterUserInUserGroupRequest,
- ) (
- response *POST_RegisterUserInUserGroupResponses,
- err error,
- )
-
- POST_RegisterVmsInLoadBalancer(
- registervmsinloadbalancerrequest RegisterVmsInLoadBalancerRequest,
- ) (
- response *POST_RegisterVmsInLoadBalancerResponses,
- err error,
- )
-
- POST_RejectNetPeering(
- rejectnetpeeringrequest RejectNetPeeringRequest,
- ) (
- response *POST_RejectNetPeeringResponses,
- err error,
- )
-
- POST_ResetAccountPassword(
- resetaccountpasswordrequest ResetAccountPasswordRequest,
- ) (
- response *POST_ResetAccountPasswordResponses,
- err error,
- )
-
- POST_SendResetPasswordEmail(
- sendresetpasswordemailrequest SendResetPasswordEmailRequest,
- ) (
- response *POST_SendResetPasswordEmailResponses,
- err error,
- )
-
- POST_StartVms(
- startvmsrequest StartVmsRequest,
- ) (
- response *POST_StartVmsResponses,
- err error,
- )
-
- POST_StopVms(
- stopvmsrequest StopVmsRequest,
- ) (
- response *POST_StopVmsResponses,
- err error,
- )
-
- POST_UnlinkInternetService(
- unlinkinternetservicerequest UnlinkInternetServiceRequest,
- ) (
- response *POST_UnlinkInternetServiceResponses,
- err error,
- )
-
- POST_UnlinkNic(
- unlinknicrequest UnlinkNicRequest,
- ) (
- response *POST_UnlinkNicResponses,
- err error,
- )
-
- POST_UnlinkPolicy(
- unlinkpolicyrequest UnlinkPolicyRequest,
- ) (
- response *POST_UnlinkPolicyResponses,
- err error,
- )
-
- POST_UnlinkPrivateIps(
- unlinkprivateipsrequest UnlinkPrivateIpsRequest,
- ) (
- response *POST_UnlinkPrivateIpsResponses,
- err error,
- )
-
- POST_UnlinkPublicIp(
- unlinkpubliciprequest UnlinkPublicIpRequest,
- ) (
- response *POST_UnlinkPublicIpResponses,
- err error,
- )
-
- POST_UnlinkRouteTable(
- unlinkroutetablerequest UnlinkRouteTableRequest,
- ) (
- response *POST_UnlinkRouteTableResponses,
- err error,
- )
-
- POST_UnlinkVirtualGateway(
- unlinkvirtualgatewayrequest UnlinkVirtualGatewayRequest,
- ) (
- response *POST_UnlinkVirtualGatewayResponses,
- err error,
- )
-
- POST_UnlinkVolume(
- unlinkvolumerequest UnlinkVolumeRequest,
- ) (
- response *POST_UnlinkVolumeResponses,
- err error,
- )
-
- POST_UpdateAccount(
- updateaccountrequest UpdateAccountRequest,
- ) (
- response *POST_UpdateAccountResponses,
- err error,
- )
-
- POST_UpdateApiKey(
- updateapikeyrequest UpdateApiKeyRequest,
- ) (
- response *POST_UpdateApiKeyResponses,
- err error,
- )
-
- POST_UpdateHealthCheck(
- updatehealthcheckrequest UpdateHealthCheckRequest,
- ) (
- response *POST_UpdateHealthCheckResponses,
- err error,
- )
-
- POST_UpdateImage(
- updateimagerequest UpdateImageRequest,
- ) (
- response *POST_UpdateImageResponses,
- err error,
- )
-
- POST_UpdateKeypair(
- updatekeypairrequest UpdateKeypairRequest,
- ) (
- response *POST_UpdateKeypairResponses,
- err error,
- )
-
- POST_UpdateListenerRule(
- updatelistenerrulerequest UpdateListenerRuleRequest,
- ) (
- response *POST_UpdateListenerRuleResponses,
- err error,
- )
-
- POST_UpdateLoadBalancer(
- updateloadbalancerrequest UpdateLoadBalancerRequest,
- ) (
- response *POST_UpdateLoadBalancerResponses,
- err error,
- )
-
- POST_UpdateNet(
- updatenetrequest UpdateNetRequest,
- ) (
- response *POST_UpdateNetResponses,
- err error,
- )
-
- POST_UpdateNetAccessPoint(
- updatenetaccesspointrequest UpdateNetAccessPointRequest,
- ) (
- response *POST_UpdateNetAccessPointResponses,
- err error,
- )
-
- POST_UpdateNic(
- updatenicrequest UpdateNicRequest,
- ) (
- response *POST_UpdateNicResponses,
- err error,
- )
-
- POST_UpdateRoute(
- updaterouterequest UpdateRouteRequest,
- ) (
- response *POST_UpdateRouteResponses,
- err error,
- )
-
- POST_UpdateRoutePropagation(
- updateroutepropagationrequest UpdateRoutePropagationRequest,
- ) (
- response *POST_UpdateRoutePropagationResponses,
- err error,
- )
-
- POST_UpdateServerCertificate(
- updateservercertificaterequest UpdateServerCertificateRequest,
- ) (
- response *POST_UpdateServerCertificateResponses,
- err error,
- )
-
- POST_UpdateSnapshot(
- updatesnapshotrequest UpdateSnapshotRequest,
- ) (
- response *POST_UpdateSnapshotResponses,
- err error,
- )
-
- POST_UpdateUser(
- updateuserrequest UpdateUserRequest,
- ) (
- response *POST_UpdateUserResponses,
- err error,
- )
-
- POST_UpdateUserGroup(
- updateusergrouprequest UpdateUserGroupRequest,
- ) (
- response *POST_UpdateUserGroupResponses,
- err error,
- )
-
- POST_UpdateVm(
- updatevmrequest UpdateVmRequest,
- ) (
- response *POST_UpdateVmResponses,
- err error,
- )
-}
diff --git a/vendor/github.com/outscale/osc-go/oapi/provider.go b/vendor/github.com/outscale/osc-go/oapi/provider.go
deleted file mode 100644
index 0ae1b4d3b..000000000
--- a/vendor/github.com/outscale/osc-go/oapi/provider.go
+++ /dev/null
@@ -1,527 +0,0 @@
-// GENERATED FILE: DO NOT EDIT!
-
-package oapi
-
-// To create a server, first write a class that implements this interface.
-// Then pass an instance of it to Initialize().
-type Provider interface {
-
- //
- POST_AcceptNetPeering(parameters *POST_AcceptNetPeeringParameters, responses *POST_AcceptNetPeeringResponses) (err error)
-
- //
- POST_AuthenticateAccount(parameters *POST_AuthenticateAccountParameters, responses *POST_AuthenticateAccountResponses) (err error)
-
- //
- POST_CheckSignature(parameters *POST_CheckSignatureParameters, responses *POST_CheckSignatureResponses) (err error)
-
- //
- POST_CopyAccount(parameters *POST_CopyAccountParameters, responses *POST_CopyAccountResponses) (err error)
-
- //
- POST_CreateAccount(parameters *POST_CreateAccountParameters, responses *POST_CreateAccountResponses) (err error)
-
- //
- POST_CreateApiKey(parameters *POST_CreateApiKeyParameters, responses *POST_CreateApiKeyResponses) (err error)
-
- //
- POST_CreateClientGateway(parameters *POST_CreateClientGatewayParameters, responses *POST_CreateClientGatewayResponses) (err error)
-
- //
- POST_CreateDhcpOptions(parameters *POST_CreateDhcpOptionsParameters, responses *POST_CreateDhcpOptionsResponses) (err error)
-
- //
- POST_CreateDirectLink(parameters *POST_CreateDirectLinkParameters, responses *POST_CreateDirectLinkResponses) (err error)
-
- //
- POST_CreateDirectLinkInterface(parameters *POST_CreateDirectLinkInterfaceParameters, responses *POST_CreateDirectLinkInterfaceResponses) (err error)
-
- //
- POST_CreateImage(parameters *POST_CreateImageParameters, responses *POST_CreateImageResponses) (err error)
-
- //
- POST_CreateImageExportTask(parameters *POST_CreateImageExportTaskParameters, responses *POST_CreateImageExportTaskResponses) (err error)
-
- //
- POST_CreateInternetService(parameters *POST_CreateInternetServiceParameters, responses *POST_CreateInternetServiceResponses) (err error)
-
- //
- POST_CreateKeypair(parameters *POST_CreateKeypairParameters, responses *POST_CreateKeypairResponses) (err error)
-
- //
- POST_CreateListenerRule(parameters *POST_CreateListenerRuleParameters, responses *POST_CreateListenerRuleResponses) (err error)
-
- //
- POST_CreateLoadBalancer(parameters *POST_CreateLoadBalancerParameters, responses *POST_CreateLoadBalancerResponses) (err error)
-
- //
- POST_CreateLoadBalancerListeners(parameters *POST_CreateLoadBalancerListenersParameters, responses *POST_CreateLoadBalancerListenersResponses) (err error)
-
- //
- POST_CreateLoadBalancerPolicy(parameters *POST_CreateLoadBalancerPolicyParameters, responses *POST_CreateLoadBalancerPolicyResponses) (err error)
-
- //
- POST_CreateNatService(parameters *POST_CreateNatServiceParameters, responses *POST_CreateNatServiceResponses) (err error)
-
- //
- POST_CreateNet(parameters *POST_CreateNetParameters, responses *POST_CreateNetResponses) (err error)
-
- //
- POST_CreateNetAccessPoint(parameters *POST_CreateNetAccessPointParameters, responses *POST_CreateNetAccessPointResponses) (err error)
-
- //
- POST_CreateNetPeering(parameters *POST_CreateNetPeeringParameters, responses *POST_CreateNetPeeringResponses) (err error)
-
- //
- POST_CreateNic(parameters *POST_CreateNicParameters, responses *POST_CreateNicResponses) (err error)
-
- //
- POST_CreatePolicy(parameters *POST_CreatePolicyParameters, responses *POST_CreatePolicyResponses) (err error)
-
- //
- POST_CreatePublicIp(parameters *POST_CreatePublicIpParameters, responses *POST_CreatePublicIpResponses) (err error)
-
- //
- POST_CreateRoute(parameters *POST_CreateRouteParameters, responses *POST_CreateRouteResponses) (err error)
-
- //
- POST_CreateRouteTable(parameters *POST_CreateRouteTableParameters, responses *POST_CreateRouteTableResponses) (err error)
-
- //
- POST_CreateSecurityGroup(parameters *POST_CreateSecurityGroupParameters, responses *POST_CreateSecurityGroupResponses) (err error)
-
- //
- POST_CreateSecurityGroupRule(parameters *POST_CreateSecurityGroupRuleParameters, responses *POST_CreateSecurityGroupRuleResponses) (err error)
-
- //
- POST_CreateServerCertificate(parameters *POST_CreateServerCertificateParameters, responses *POST_CreateServerCertificateResponses) (err error)
-
- //
- POST_CreateSnapshot(parameters *POST_CreateSnapshotParameters, responses *POST_CreateSnapshotResponses) (err error)
-
- //
- POST_CreateSnapshotExportTask(parameters *POST_CreateSnapshotExportTaskParameters, responses *POST_CreateSnapshotExportTaskResponses) (err error)
-
- //
- POST_CreateSubnet(parameters *POST_CreateSubnetParameters, responses *POST_CreateSubnetResponses) (err error)
-
- //
- POST_CreateTags(parameters *POST_CreateTagsParameters, responses *POST_CreateTagsResponses) (err error)
-
- //
- POST_CreateUser(parameters *POST_CreateUserParameters, responses *POST_CreateUserResponses) (err error)
-
- //
- POST_CreateUserGroup(parameters *POST_CreateUserGroupParameters, responses *POST_CreateUserGroupResponses) (err error)
-
- //
- POST_CreateVirtualGateway(parameters *POST_CreateVirtualGatewayParameters, responses *POST_CreateVirtualGatewayResponses) (err error)
-
- //
- POST_CreateVms(parameters *POST_CreateVmsParameters, responses *POST_CreateVmsResponses) (err error)
-
- //
- POST_CreateVolume(parameters *POST_CreateVolumeParameters, responses *POST_CreateVolumeResponses) (err error)
-
- //
- POST_CreateVpnConnection(parameters *POST_CreateVpnConnectionParameters, responses *POST_CreateVpnConnectionResponses) (err error)
-
- //
- POST_CreateVpnConnectionRoute(parameters *POST_CreateVpnConnectionRouteParameters, responses *POST_CreateVpnConnectionRouteResponses) (err error)
-
- //
- POST_DeleteApiKey(parameters *POST_DeleteApiKeyParameters, responses *POST_DeleteApiKeyResponses) (err error)
-
- //
- POST_DeleteClientGateway(parameters *POST_DeleteClientGatewayParameters, responses *POST_DeleteClientGatewayResponses) (err error)
-
- //
- POST_DeleteDhcpOptions(parameters *POST_DeleteDhcpOptionsParameters, responses *POST_DeleteDhcpOptionsResponses) (err error)
-
- //
- POST_DeleteDirectLink(parameters *POST_DeleteDirectLinkParameters, responses *POST_DeleteDirectLinkResponses) (err error)
-
- //
- POST_DeleteDirectLinkInterface(parameters *POST_DeleteDirectLinkInterfaceParameters, responses *POST_DeleteDirectLinkInterfaceResponses) (err error)
-
- //
- POST_DeleteExportTask(parameters *POST_DeleteExportTaskParameters, responses *POST_DeleteExportTaskResponses) (err error)
-
- //
- POST_DeleteImage(parameters *POST_DeleteImageParameters, responses *POST_DeleteImageResponses) (err error)
-
- //
- POST_DeleteInternetService(parameters *POST_DeleteInternetServiceParameters, responses *POST_DeleteInternetServiceResponses) (err error)
-
- //
- POST_DeleteKeypair(parameters *POST_DeleteKeypairParameters, responses *POST_DeleteKeypairResponses) (err error)
-
- //
- POST_DeleteListenerRule(parameters *POST_DeleteListenerRuleParameters, responses *POST_DeleteListenerRuleResponses) (err error)
-
- //
- POST_DeleteLoadBalancer(parameters *POST_DeleteLoadBalancerParameters, responses *POST_DeleteLoadBalancerResponses) (err error)
-
- //
- POST_DeleteLoadBalancerListeners(parameters *POST_DeleteLoadBalancerListenersParameters, responses *POST_DeleteLoadBalancerListenersResponses) (err error)
-
- //
- POST_DeleteLoadBalancerPolicy(parameters *POST_DeleteLoadBalancerPolicyParameters, responses *POST_DeleteLoadBalancerPolicyResponses) (err error)
-
- //
- POST_DeleteNatService(parameters *POST_DeleteNatServiceParameters, responses *POST_DeleteNatServiceResponses) (err error)
-
- //
- POST_DeleteNet(parameters *POST_DeleteNetParameters, responses *POST_DeleteNetResponses) (err error)
-
- //
- POST_DeleteNetAccessPoints(parameters *POST_DeleteNetAccessPointsParameters, responses *POST_DeleteNetAccessPointsResponses) (err error)
-
- //
- POST_DeleteNetPeering(parameters *POST_DeleteNetPeeringParameters, responses *POST_DeleteNetPeeringResponses) (err error)
-
- //
- POST_DeleteNic(parameters *POST_DeleteNicParameters, responses *POST_DeleteNicResponses) (err error)
-
- //
- POST_DeletePolicy(parameters *POST_DeletePolicyParameters, responses *POST_DeletePolicyResponses) (err error)
-
- //
- POST_DeletePublicIp(parameters *POST_DeletePublicIpParameters, responses *POST_DeletePublicIpResponses) (err error)
-
- //
- POST_DeleteRoute(parameters *POST_DeleteRouteParameters, responses *POST_DeleteRouteResponses) (err error)
-
- //
- POST_DeleteRouteTable(parameters *POST_DeleteRouteTableParameters, responses *POST_DeleteRouteTableResponses) (err error)
-
- //
- POST_DeleteSecurityGroup(parameters *POST_DeleteSecurityGroupParameters, responses *POST_DeleteSecurityGroupResponses) (err error)
-
- //
- POST_DeleteSecurityGroupRule(parameters *POST_DeleteSecurityGroupRuleParameters, responses *POST_DeleteSecurityGroupRuleResponses) (err error)
-
- //
- POST_DeleteServerCertificate(parameters *POST_DeleteServerCertificateParameters, responses *POST_DeleteServerCertificateResponses) (err error)
-
- //
- POST_DeleteSnapshot(parameters *POST_DeleteSnapshotParameters, responses *POST_DeleteSnapshotResponses) (err error)
-
- //
- POST_DeleteSubnet(parameters *POST_DeleteSubnetParameters, responses *POST_DeleteSubnetResponses) (err error)
-
- //
- POST_DeleteTags(parameters *POST_DeleteTagsParameters, responses *POST_DeleteTagsResponses) (err error)
-
- //
- POST_DeleteUser(parameters *POST_DeleteUserParameters, responses *POST_DeleteUserResponses) (err error)
-
- //
- POST_DeleteUserGroup(parameters *POST_DeleteUserGroupParameters, responses *POST_DeleteUserGroupResponses) (err error)
-
- //
- POST_DeleteVirtualGateway(parameters *POST_DeleteVirtualGatewayParameters, responses *POST_DeleteVirtualGatewayResponses) (err error)
-
- //
- POST_DeleteVms(parameters *POST_DeleteVmsParameters, responses *POST_DeleteVmsResponses) (err error)
-
- //
- POST_DeleteVolume(parameters *POST_DeleteVolumeParameters, responses *POST_DeleteVolumeResponses) (err error)
-
- //
- POST_DeleteVpnConnection(parameters *POST_DeleteVpnConnectionParameters, responses *POST_DeleteVpnConnectionResponses) (err error)
-
- //
- POST_DeleteVpnConnectionRoute(parameters *POST_DeleteVpnConnectionRouteParameters, responses *POST_DeleteVpnConnectionRouteResponses) (err error)
-
- //
- POST_DeregisterUserInUserGroup(parameters *POST_DeregisterUserInUserGroupParameters, responses *POST_DeregisterUserInUserGroupResponses) (err error)
-
- //
- POST_DeregisterVmsInLoadBalancer(parameters *POST_DeregisterVmsInLoadBalancerParameters, responses *POST_DeregisterVmsInLoadBalancerResponses) (err error)
-
- //
- POST_LinkInternetService(parameters *POST_LinkInternetServiceParameters, responses *POST_LinkInternetServiceResponses) (err error)
-
- //
- POST_LinkNic(parameters *POST_LinkNicParameters, responses *POST_LinkNicResponses) (err error)
-
- //
- POST_LinkPolicy(parameters *POST_LinkPolicyParameters, responses *POST_LinkPolicyResponses) (err error)
-
- //
- POST_LinkPrivateIps(parameters *POST_LinkPrivateIpsParameters, responses *POST_LinkPrivateIpsResponses) (err error)
-
- //
- POST_LinkPublicIp(parameters *POST_LinkPublicIpParameters, responses *POST_LinkPublicIpResponses) (err error)
-
- //
- POST_LinkRouteTable(parameters *POST_LinkRouteTableParameters, responses *POST_LinkRouteTableResponses) (err error)
-
- //
- POST_LinkVirtualGateway(parameters *POST_LinkVirtualGatewayParameters, responses *POST_LinkVirtualGatewayResponses) (err error)
-
- //
- POST_LinkVolume(parameters *POST_LinkVolumeParameters, responses *POST_LinkVolumeResponses) (err error)
-
- //
- POST_PurchaseReservedVmsOffer(parameters *POST_PurchaseReservedVmsOfferParameters, responses *POST_PurchaseReservedVmsOfferResponses) (err error)
-
- //
- POST_ReadAccount(parameters *POST_ReadAccountParameters, responses *POST_ReadAccountResponses) (err error)
-
- //
- POST_ReadAccountConsumption(parameters *POST_ReadAccountConsumptionParameters, responses *POST_ReadAccountConsumptionResponses) (err error)
-
- //
- POST_ReadAdminPassword(parameters *POST_ReadAdminPasswordParameters, responses *POST_ReadAdminPasswordResponses) (err error)
-
- //
- POST_ReadApiKeys(parameters *POST_ReadApiKeysParameters, responses *POST_ReadApiKeysResponses) (err error)
-
- //
- POST_ReadApiLogs(parameters *POST_ReadApiLogsParameters, responses *POST_ReadApiLogsResponses) (err error)
-
- //
- POST_ReadBillableDigest(parameters *POST_ReadBillableDigestParameters, responses *POST_ReadBillableDigestResponses) (err error)
-
- //
- POST_ReadCatalog(parameters *POST_ReadCatalogParameters, responses *POST_ReadCatalogResponses) (err error)
-
- //
- POST_ReadClientGateways(parameters *POST_ReadClientGatewaysParameters, responses *POST_ReadClientGatewaysResponses) (err error)
-
- //
- POST_ReadConsoleOutput(parameters *POST_ReadConsoleOutputParameters, responses *POST_ReadConsoleOutputResponses) (err error)
-
- //
- POST_ReadDhcpOptions(parameters *POST_ReadDhcpOptionsParameters, responses *POST_ReadDhcpOptionsResponses) (err error)
-
- //
- POST_ReadDirectLinkInterfaces(parameters *POST_ReadDirectLinkInterfacesParameters, responses *POST_ReadDirectLinkInterfacesResponses) (err error)
-
- //
- POST_ReadDirectLinks(parameters *POST_ReadDirectLinksParameters, responses *POST_ReadDirectLinksResponses) (err error)
-
- //
- POST_ReadImageExportTasks(parameters *POST_ReadImageExportTasksParameters, responses *POST_ReadImageExportTasksResponses) (err error)
-
- //
- POST_ReadImages(parameters *POST_ReadImagesParameters, responses *POST_ReadImagesResponses) (err error)
-
- //
- POST_ReadInternetServices(parameters *POST_ReadInternetServicesParameters, responses *POST_ReadInternetServicesResponses) (err error)
-
- //
- POST_ReadKeypairs(parameters *POST_ReadKeypairsParameters, responses *POST_ReadKeypairsResponses) (err error)
-
- //
- POST_ReadListenerRules(parameters *POST_ReadListenerRulesParameters, responses *POST_ReadListenerRulesResponses) (err error)
-
- //
- POST_ReadLoadBalancers(parameters *POST_ReadLoadBalancersParameters, responses *POST_ReadLoadBalancersResponses) (err error)
-
- //
- POST_ReadLocations(parameters *POST_ReadLocationsParameters, responses *POST_ReadLocationsResponses) (err error)
-
- //
- POST_ReadNatServices(parameters *POST_ReadNatServicesParameters, responses *POST_ReadNatServicesResponses) (err error)
-
- //
- POST_ReadNetAccessPointServices(parameters *POST_ReadNetAccessPointServicesParameters, responses *POST_ReadNetAccessPointServicesResponses) (err error)
-
- //
- POST_ReadNetAccessPoints(parameters *POST_ReadNetAccessPointsParameters, responses *POST_ReadNetAccessPointsResponses) (err error)
-
- //
- POST_ReadNetPeerings(parameters *POST_ReadNetPeeringsParameters, responses *POST_ReadNetPeeringsResponses) (err error)
-
- //
- POST_ReadNets(parameters *POST_ReadNetsParameters, responses *POST_ReadNetsResponses) (err error)
-
- //
- POST_ReadNics(parameters *POST_ReadNicsParameters, responses *POST_ReadNicsResponses) (err error)
-
- //
- POST_ReadPolicies(parameters *POST_ReadPoliciesParameters, responses *POST_ReadPoliciesResponses) (err error)
-
- //
- POST_ReadPrefixLists(parameters *POST_ReadPrefixListsParameters, responses *POST_ReadPrefixListsResponses) (err error)
-
- //
- POST_ReadProductTypes(parameters *POST_ReadProductTypesParameters, responses *POST_ReadProductTypesResponses) (err error)
-
- //
- POST_ReadPublicCatalog(parameters *POST_ReadPublicCatalogParameters, responses *POST_ReadPublicCatalogResponses) (err error)
-
- //
- POST_ReadPublicIpRanges(parameters *POST_ReadPublicIpRangesParameters, responses *POST_ReadPublicIpRangesResponses) (err error)
-
- //
- POST_ReadPublicIps(parameters *POST_ReadPublicIpsParameters, responses *POST_ReadPublicIpsResponses) (err error)
-
- //
- POST_ReadQuotas(parameters *POST_ReadQuotasParameters, responses *POST_ReadQuotasResponses) (err error)
-
- //
- POST_ReadRegionConfig(parameters *POST_ReadRegionConfigParameters, responses *POST_ReadRegionConfigResponses) (err error)
-
- //
- POST_ReadRegions(parameters *POST_ReadRegionsParameters, responses *POST_ReadRegionsResponses) (err error)
-
- //
- POST_ReadReservedVmOffers(parameters *POST_ReadReservedVmOffersParameters, responses *POST_ReadReservedVmOffersResponses) (err error)
-
- //
- POST_ReadReservedVms(parameters *POST_ReadReservedVmsParameters, responses *POST_ReadReservedVmsResponses) (err error)
-
- //
- POST_ReadRouteTables(parameters *POST_ReadRouteTablesParameters, responses *POST_ReadRouteTablesResponses) (err error)
-
- //
- POST_ReadSecurityGroups(parameters *POST_ReadSecurityGroupsParameters, responses *POST_ReadSecurityGroupsResponses) (err error)
-
- //
- POST_ReadServerCertificates(parameters *POST_ReadServerCertificatesParameters, responses *POST_ReadServerCertificatesResponses) (err error)
-
- //
- POST_ReadSnapshotExportTasks(parameters *POST_ReadSnapshotExportTasksParameters, responses *POST_ReadSnapshotExportTasksResponses) (err error)
-
- //
- POST_ReadSnapshots(parameters *POST_ReadSnapshotsParameters, responses *POST_ReadSnapshotsResponses) (err error)
-
- //
- POST_ReadSubnets(parameters *POST_ReadSubnetsParameters, responses *POST_ReadSubnetsResponses) (err error)
-
- //
- POST_ReadSubregions(parameters *POST_ReadSubregionsParameters, responses *POST_ReadSubregionsResponses) (err error)
-
- //
- POST_ReadTags(parameters *POST_ReadTagsParameters, responses *POST_ReadTagsResponses) (err error)
-
- //
- POST_ReadUserGroups(parameters *POST_ReadUserGroupsParameters, responses *POST_ReadUserGroupsResponses) (err error)
-
- //
- POST_ReadUsers(parameters *POST_ReadUsersParameters, responses *POST_ReadUsersResponses) (err error)
-
- //
- POST_ReadVirtualGateways(parameters *POST_ReadVirtualGatewaysParameters, responses *POST_ReadVirtualGatewaysResponses) (err error)
-
- //
- POST_ReadVmTypes(parameters *POST_ReadVmTypesParameters, responses *POST_ReadVmTypesResponses) (err error)
-
- //
- POST_ReadVms(parameters *POST_ReadVmsParameters, responses *POST_ReadVmsResponses) (err error)
-
- //
- POST_ReadVmsHealth(parameters *POST_ReadVmsHealthParameters, responses *POST_ReadVmsHealthResponses) (err error)
-
- //
- POST_ReadVmsState(parameters *POST_ReadVmsStateParameters, responses *POST_ReadVmsStateResponses) (err error)
-
- //
- POST_ReadVolumes(parameters *POST_ReadVolumesParameters, responses *POST_ReadVolumesResponses) (err error)
-
- //
- POST_ReadVpnConnections(parameters *POST_ReadVpnConnectionsParameters, responses *POST_ReadVpnConnectionsResponses) (err error)
-
- //
- POST_RebootVms(parameters *POST_RebootVmsParameters, responses *POST_RebootVmsResponses) (err error)
-
- //
- POST_RegisterUserInUserGroup(parameters *POST_RegisterUserInUserGroupParameters, responses *POST_RegisterUserInUserGroupResponses) (err error)
-
- //
- POST_RegisterVmsInLoadBalancer(parameters *POST_RegisterVmsInLoadBalancerParameters, responses *POST_RegisterVmsInLoadBalancerResponses) (err error)
-
- //
- POST_RejectNetPeering(parameters *POST_RejectNetPeeringParameters, responses *POST_RejectNetPeeringResponses) (err error)
-
- //
- POST_ResetAccountPassword(parameters *POST_ResetAccountPasswordParameters, responses *POST_ResetAccountPasswordResponses) (err error)
-
- //
- POST_SendResetPasswordEmail(parameters *POST_SendResetPasswordEmailParameters, responses *POST_SendResetPasswordEmailResponses) (err error)
-
- //
- POST_StartVms(parameters *POST_StartVmsParameters, responses *POST_StartVmsResponses) (err error)
-
- //
- POST_StopVms(parameters *POST_StopVmsParameters, responses *POST_StopVmsResponses) (err error)
-
- //
- POST_UnlinkInternetService(parameters *POST_UnlinkInternetServiceParameters, responses *POST_UnlinkInternetServiceResponses) (err error)
-
- //
- POST_UnlinkNic(parameters *POST_UnlinkNicParameters, responses *POST_UnlinkNicResponses) (err error)
-
- //
- POST_UnlinkPolicy(parameters *POST_UnlinkPolicyParameters, responses *POST_UnlinkPolicyResponses) (err error)
-
- //
- POST_UnlinkPrivateIps(parameters *POST_UnlinkPrivateIpsParameters, responses *POST_UnlinkPrivateIpsResponses) (err error)
-
- //
- POST_UnlinkPublicIp(parameters *POST_UnlinkPublicIpParameters, responses *POST_UnlinkPublicIpResponses) (err error)
-
- //
- POST_UnlinkRouteTable(parameters *POST_UnlinkRouteTableParameters, responses *POST_UnlinkRouteTableResponses) (err error)
-
- //
- POST_UnlinkVirtualGateway(parameters *POST_UnlinkVirtualGatewayParameters, responses *POST_UnlinkVirtualGatewayResponses) (err error)
-
- //
- POST_UnlinkVolume(parameters *POST_UnlinkVolumeParameters, responses *POST_UnlinkVolumeResponses) (err error)
-
- //
- POST_UpdateAccount(parameters *POST_UpdateAccountParameters, responses *POST_UpdateAccountResponses) (err error)
-
- //
- POST_UpdateApiKey(parameters *POST_UpdateApiKeyParameters, responses *POST_UpdateApiKeyResponses) (err error)
-
- //
- POST_UpdateHealthCheck(parameters *POST_UpdateHealthCheckParameters, responses *POST_UpdateHealthCheckResponses) (err error)
-
- //
- POST_UpdateImage(parameters *POST_UpdateImageParameters, responses *POST_UpdateImageResponses) (err error)
-
- //
- POST_UpdateKeypair(parameters *POST_UpdateKeypairParameters, responses *POST_UpdateKeypairResponses) (err error)
-
- //
- POST_UpdateListenerRule(parameters *POST_UpdateListenerRuleParameters, responses *POST_UpdateListenerRuleResponses) (err error)
-
- //
- POST_UpdateLoadBalancer(parameters *POST_UpdateLoadBalancerParameters, responses *POST_UpdateLoadBalancerResponses) (err error)
-
- //
- POST_UpdateNet(parameters *POST_UpdateNetParameters, responses *POST_UpdateNetResponses) (err error)
-
- //
- POST_UpdateNetAccessPoint(parameters *POST_UpdateNetAccessPointParameters, responses *POST_UpdateNetAccessPointResponses) (err error)
-
- //
- POST_UpdateNic(parameters *POST_UpdateNicParameters, responses *POST_UpdateNicResponses) (err error)
-
- //
- POST_UpdateRoute(parameters *POST_UpdateRouteParameters, responses *POST_UpdateRouteResponses) (err error)
-
- //
- POST_UpdateRoutePropagation(parameters *POST_UpdateRoutePropagationParameters, responses *POST_UpdateRoutePropagationResponses) (err error)
-
- //
- POST_UpdateServerCertificate(parameters *POST_UpdateServerCertificateParameters, responses *POST_UpdateServerCertificateResponses) (err error)
-
- //
- POST_UpdateSnapshot(parameters *POST_UpdateSnapshotParameters, responses *POST_UpdateSnapshotResponses) (err error)
-
- //
- POST_UpdateUser(parameters *POST_UpdateUserParameters, responses *POST_UpdateUserResponses) (err error)
-
- //
- POST_UpdateUserGroup(parameters *POST_UpdateUserGroupParameters, responses *POST_UpdateUserGroupResponses) (err error)
-
- //
- POST_UpdateVm(parameters *POST_UpdateVmParameters, responses *POST_UpdateVmResponses) (err error)
-}
diff --git a/vendor/github.com/outscale/osc-go/oapi/types.go b/vendor/github.com/outscale/osc-go/oapi/types.go
deleted file mode 100644
index 7edd338a6..000000000
--- a/vendor/github.com/outscale/osc-go/oapi/types.go
+++ /dev/null
@@ -1,5663 +0,0 @@
-// GENERATED FILE: DO NOT EDIT!
-
-package oapi
-
-// Types used by the API.
-// implements the service definition of AcceptNetPeeringRequest
-type AcceptNetPeeringRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- NetPeeringId string `json:"NetPeeringId,omitempty"`
-}
-
-// implements the service definition of AcceptNetPeeringResponse
-type AcceptNetPeeringResponse struct {
- NetPeering NetPeering `json:"NetPeering,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of AccepterNet
-type AccepterNet struct {
- AccountId string `json:"AccountId,omitempty"`
- IpRange string `json:"IpRange,omitempty"`
- NetId string `json:"NetId,omitempty"`
-}
-
-// implements the service definition of AccessLog
-type AccessLog struct {
- IsEnabled bool `json:"IsEnabled,omitempty"`
- OsuBucketName string `json:"OsuBucketName,omitempty"`
- OsuBucketPrefix string `json:"OsuBucketPrefix,omitempty"`
- PublicationInterval int64 `json:"PublicationInterval,omitempty"`
-}
-
-// implements the service definition of Account
-type Account struct {
- AccountId string `json:"AccountId,omitempty"`
- City string `json:"City,omitempty"`
- CompanyName string `json:"CompanyName,omitempty"`
- Country string `json:"Country,omitempty"`
- CustomerId string `json:"CustomerId,omitempty"`
- Email string `json:"Email,omitempty"`
- FirstName string `json:"FirstName,omitempty"`
- JobTitle string `json:"JobTitle,omitempty"`
- LastName string `json:"LastName,omitempty"`
- Mobile string `json:"Mobile,omitempty"`
- Phone string `json:"Phone,omitempty"`
- StateProvince string `json:"StateProvince,omitempty"`
- VatNumber string `json:"VatNumber,omitempty"`
- ZipCode string `json:"ZipCode,omitempty"`
-}
-
-// implements the service definition of ApiKey
-type ApiKey struct {
- AccountId string `json:"AccountId,omitempty"`
- ApiKeyId string `json:"ApiKeyId,omitempty"`
- SecretKey string `json:"SecretKey,omitempty"`
- State string `json:"State,omitempty"`
- Tags []ResourceTag `json:"Tags,omitempty"`
- UserName string `json:"UserName,omitempty"`
-}
-
-// implements the service definition of ApplicationStickyCookiePolicy
-type ApplicationStickyCookiePolicy struct {
- CookieName string `json:"CookieName,omitempty"`
- PolicyName string `json:"PolicyName,omitempty"`
-}
-
-// implements the service definition of Attribute
-type Attribute struct {
- Key string `json:"Key,omitempty"`
- Value string `json:"Value,omitempty"`
-}
-
-// implements the service definition of AuthenticateAccountRequest
-type AuthenticateAccountRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Login string `json:"Login,omitempty"`
- Password string `json:"Password,omitempty"`
-}
-
-// implements the service definition of AuthenticateAccountResponse
-type AuthenticateAccountResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of BackendVmsHealth
-type BackendVmsHealth struct {
- Description string `json:"Description,omitempty"`
- State string `json:"State,omitempty"`
- StateReason string `json:"StateReason,omitempty"`
- VmId string `json:"VmId,omitempty"`
-}
-
-// implements the service definition of BlockDeviceMapping
-type BlockDeviceMapping struct {
- Bsu Bsu `json:"Bsu,omitempty"`
- DeviceName string `json:"DeviceName,omitempty"`
- NoDevice string `json:"NoDevice,omitempty"`
- VirtualDeviceName string `json:"VirtualDeviceName,omitempty"`
-}
-
-// implements the service definition of BlockDeviceMappingCreated
-type BlockDeviceMappingCreated struct {
- Bsu BsuCreated `json:"Bsu,omitempty"`
- DeviceName string `json:"DeviceName,omitempty"`
-}
-
-// implements the service definition of BlockDeviceMappingImage
-type BlockDeviceMappingImage struct {
- Bsu BsuToCreate `json:"Bsu,omitempty"`
- DeviceName string `json:"DeviceName,omitempty"`
- VirtualDeviceName string `json:"VirtualDeviceName,omitempty"`
-}
-
-// implements the service definition of BlockDeviceMappingVmCreation
-type BlockDeviceMappingVmCreation struct {
- Bsu BsuToCreate `json:"Bsu,omitempty"`
- DeviceName string `json:"DeviceName,omitempty"`
- NoDevice string `json:"NoDevice,omitempty"`
- VirtualDeviceName string `json:"VirtualDeviceName,omitempty"`
-}
-
-// implements the service definition of BlockDeviceMappingVmUpdate
-type BlockDeviceMappingVmUpdate struct {
- Bsu BsuToUpdateVm `json:"Bsu,omitempty"`
- DeviceName string `json:"DeviceName,omitempty"`
- NoDevice string `json:"NoDevice,omitempty"`
- VirtualDeviceName string `json:"VirtualDeviceName,omitempty"`
-}
-
-// implements the service definition of Bsu
-type Bsu struct {
- DeleteOnVmDeletion *bool `json:"DeleteOnVmDeletion,omitempty"`
- Iops int64 `json:"Iops,omitempty"`
- LinkDate string `json:"LinkDate,omitempty"`
- SnapshotId string `json:"SnapshotId,omitempty"`
- VolumeId string `json:"VolumeId,omitempty"`
- VolumeSize int64 `json:"VolumeSize,omitempty"`
- VolumeType string `json:"VolumeType,omitempty"`
-}
-
-// implements the service definition of BsuCreated
-type BsuCreated struct {
- DeleteOnVmDeletion *bool `json:"DeleteOnVmDeletion,omitempty"`
- LinkDate string `json:"LinkDate,omitempty"`
- State string `json:"State,omitempty"`
- VolumeId string `json:"VolumeId,omitempty"`
-}
-
-// implements the service definition of BsuToCreate
-type BsuToCreate struct {
- DeleteOnVmDeletion *bool `json:"DeleteOnVmDeletion,omitempty"`
- Iops int64 `json:"Iops,omitempty"`
- SnapshotId string `json:"SnapshotId,omitempty"`
- VolumeSize int64 `json:"VolumeSize,omitempty"`
- VolumeType string `json:"VolumeType,omitempty"`
-}
-
-// implements the service definition of BsuToUpdateVm
-type BsuToUpdateVm struct {
- DeleteOnVmDeletion *bool `json:"DeleteOnVmDeletion,omitempty"`
- VolumeId string `json:"VolumeId,omitempty"`
-}
-
-// implements the service definition of CatalogAttribute
-type CatalogAttribute struct {
- Key string `json:"Key,omitempty"`
- Value string `json:"Value,omitempty"`
-}
-
-// implements the service definition of CatalogEntry
-type CatalogEntry struct {
- CatalogAttributes []CatalogAttribute `json:"CatalogAttributes,omitempty"`
- EntryKey string `json:"EntryKey,omitempty"`
- EntryValue string `json:"EntryValue,omitempty"`
- ShortDescription string `json:"ShortDescription,omitempty"`
-}
-
-// implements the service definition of Catalog_0
-type Catalog_0 struct {
- Domain string `json:"Domain,omitempty"`
- Instance string `json:"Instance,omitempty"`
- SourceRegionName string `json:"SourceRegionName,omitempty"`
- TargetRegionName string `json:"TargetRegionName,omitempty"`
- Version string `json:"Version,omitempty"`
-}
-
-// implements the service definition of Catalog_1
-type Catalog_1 struct {
- CatalogAttributes []CatalogAttribute `json:"CatalogAttributes,omitempty"`
- CatalogEntries []CatalogEntry `json:"CatalogEntries,omitempty"`
-}
-
-// implements the service definition of CheckSignatureRequest
-type CheckSignatureRequest struct {
- ApiKeyId string `json:"ApiKeyId,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- RegionName string `json:"RegionName,omitempty"`
- RequestDate string `json:"RequestDate,omitempty"`
- Service string `json:"Service,omitempty"`
- Signature string `json:"Signature,omitempty"`
- SignedContent string `json:"SignedContent,omitempty"`
-}
-
-// implements the service definition of CheckSignatureResponse
-type CheckSignatureResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ClientGateway
-type ClientGateway struct {
- BgpAsn int64 `json:"BgpAsn,omitempty"`
- ClientGatewayId string `json:"ClientGatewayId,omitempty"`
- ConnectionType string `json:"ConnectionType,omitempty"`
- PublicIp string `json:"PublicIp,omitempty"`
- State string `json:"State,omitempty"`
- Tags []ResourceTag `json:"Tags,omitempty"`
-}
-
-// implements the service definition of ConsumptionEntries
-type ConsumptionEntries struct {
- Category string `json:"Category,omitempty"`
- ConsumptionValue string `json:"ConsumptionValue,omitempty"`
- Entry string `json:"Entry,omitempty"`
- ResourceType string `json:"ResourceType,omitempty"`
- Service string `json:"Service,omitempty"`
- ShortDescription string `json:"ShortDescription,omitempty"`
-}
-
-// implements the service definition of CopyAccountRequest
-type CopyAccountRequest struct {
- DestinationRegionName string `json:"DestinationRegionName,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- QuotaProfile string `json:"QuotaProfile,omitempty"`
-}
-
-// implements the service definition of CopyAccountResponse
-type CopyAccountResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of CreateAccountRequest
-type CreateAccountRequest struct {
- AccountId string `json:"AccountId,omitempty"`
- ApiKeys []ApiKey `json:"ApiKeys,omitempty"`
- City string `json:"City,omitempty"`
- CompanyName string `json:"CompanyName,omitempty"`
- Country string `json:"Country,omitempty"`
- CustomerId string `json:"CustomerId,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- Email string `json:"Email,omitempty"`
- FirstName string `json:"FirstName,omitempty"`
- JobTitle string `json:"JobTitle,omitempty"`
- LastName string `json:"LastName,omitempty"`
- Mobile string `json:"Mobile,omitempty"`
- Password string `json:"Password,omitempty"`
- Phone string `json:"Phone,omitempty"`
- QuotaProfile string `json:"QuotaProfile,omitempty"`
- StateProvince string `json:"StateProvince,omitempty"`
- VatNumber string `json:"VatNumber,omitempty"`
- ZipCode string `json:"ZipCode,omitempty"`
-}
-
-// implements the service definition of CreateAccountResponse
-type CreateAccountResponse struct {
- Account Account `json:"Account,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of CreateApiKeyRequest
-type CreateApiKeyRequest struct {
- ApiKeyId string `json:"ApiKeyId,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- SecretKey string `json:"SecretKey,omitempty"`
- Tags []ResourceTag `json:"Tags,omitempty"`
- UserName string `json:"UserName,omitempty"`
-}
-
-// implements the service definition of CreateApiKeyResponse
-type CreateApiKeyResponse struct {
- ApiKey ApiKey `json:"ApiKey,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of CreateClientGatewayRequest
-type CreateClientGatewayRequest struct {
- BgpAsn int64 `json:"BgpAsn,omitempty"`
- ConnectionType string `json:"ConnectionType,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- PublicIp string `json:"PublicIp,omitempty"`
-}
-
-// implements the service definition of CreateClientGatewayResponse
-type CreateClientGatewayResponse struct {
- ClientGateway ClientGateway `json:"ClientGateway,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of CreateDhcpOptionsRequest
-type CreateDhcpOptionsRequest struct {
- DomainName string `json:"DomainName,omitempty"`
- DomainNameServers []string `json:"DomainNameServers,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- NtpServers []string `json:"NtpServers,omitempty"`
-}
-
-// implements the service definition of CreateDhcpOptionsResponse
-type CreateDhcpOptionsResponse struct {
- DhcpOptionsSet DhcpOptionsSet `json:"DhcpOptionsSet,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of CreateDirectLinkInterfaceRequest
-type CreateDirectLinkInterfaceRequest struct {
- DirectLinkId string `json:"DirectLinkId,omitempty"`
- DirectLinkInterface DirectLinkInterface `json:"DirectLinkInterface,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
-}
-
-// implements the service definition of CreateDirectLinkInterfaceResponse
-type CreateDirectLinkInterfaceResponse struct {
- DirectLinkInterface DirectLinkInterfaces `json:"DirectLinkInterface,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of CreateDirectLinkRequest
-type CreateDirectLinkRequest struct {
- Bandwidth string `json:"Bandwidth,omitempty"`
- DirectLinkName string `json:"DirectLinkName,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- Location string `json:"Location,omitempty"`
-}
-
-// implements the service definition of CreateDirectLinkResponse
-type CreateDirectLinkResponse struct {
- DirectLink DirectLink `json:"DirectLink,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of CreateImageExportTaskRequest
-type CreateImageExportTaskRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- ImageId string `json:"ImageId,omitempty"`
- OsuExport OsuExport `json:"OsuExport,omitempty"`
-}
-
-// implements the service definition of CreateImageExportTaskResponse
-type CreateImageExportTaskResponse struct {
- ImageExportTask ImageExportTask `json:"ImageExportTask,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of CreateImageRequest
-type CreateImageRequest struct {
- Architecture string `json:"Architecture,omitempty"`
- BlockDeviceMappings []BlockDeviceMappingImage `json:"BlockDeviceMappings,omitempty"`
- Description string `json:"Description,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- FileLocation string `json:"FileLocation,omitempty"`
- ImageName string `json:"ImageName,omitempty"`
- NoReboot bool `json:"NoReboot,omitempty"`
- RootDeviceName string `json:"RootDeviceName,omitempty"`
- SourceImageId string `json:"SourceImageId,omitempty"`
- SourceRegionName string `json:"SourceRegionName,omitempty"`
- VmId string `json:"VmId,omitempty"`
-}
-
-// implements the service definition of CreateImageResponse
-type CreateImageResponse struct {
- Image Image `json:"Image,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of CreateInternetServiceRequest
-type CreateInternetServiceRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
-}
-
-// implements the service definition of CreateInternetServiceResponse
-type CreateInternetServiceResponse struct {
- InternetService InternetService `json:"InternetService,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of CreateKeypairRequest
-type CreateKeypairRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- KeypairName string `json:"KeypairName,omitempty"`
- PublicKey string `json:"PublicKey,omitempty"`
-}
-
-// implements the service definition of CreateKeypairResponse
-type CreateKeypairResponse struct {
- Keypair KeypairCreated `json:"Keypair,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of CreateListenerRuleRequest
-type CreateListenerRuleRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Listener LoadBalancerLight `json:"Listener,omitempty"`
- ListenerRule ListenerRule `json:"ListenerRule,omitempty"`
- VmIds []string `json:"VmIds,omitempty"`
-}
-
-// implements the service definition of CreateListenerRuleResponse
-type CreateListenerRuleResponse struct {
- ListenerId string `json:"ListenerId,omitempty"`
- ListenerRule ListenerRule `json:"ListenerRule,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- VmIds []string `json:"VmIds,omitempty"`
-}
-
-// implements the service definition of CreateLoadBalancerListenersRequest
-type CreateLoadBalancerListenersRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Listeners []ListenerForCreation `json:"Listeners,omitempty"`
- LoadBalancerName string `json:"LoadBalancerName,omitempty"`
-}
-
-// implements the service definition of CreateLoadBalancerListenersResponse
-type CreateLoadBalancerListenersResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of CreateLoadBalancerPolicyRequest
-type CreateLoadBalancerPolicyRequest struct {
- CookieName string `json:"CookieName,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- LoadBalancerName string `json:"LoadBalancerName,omitempty"`
- PolicyName string `json:"PolicyName,omitempty"`
- PolicyType string `json:"PolicyType,omitempty"`
-}
-
-// implements the service definition of CreateLoadBalancerPolicyResponse
-type CreateLoadBalancerPolicyResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of CreateLoadBalancerRequest
-type CreateLoadBalancerRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Listeners []ListenerForCreation `json:"Listeners,omitempty"`
- LoadBalancerName string `json:"LoadBalancerName,omitempty"`
- LoadBalancerType string `json:"LoadBalancerType,omitempty"`
- SecurityGroups []string `json:"SecurityGroups,omitempty"`
- Subnets []string `json:"Subnets,omitempty"`
- SubregionNames []string `json:"SubregionNames,omitempty"`
- Tags []ResourceTag `json:"Tags,omitempty"`
-}
-
-// implements the service definition of CreateLoadBalancerResponse
-type CreateLoadBalancerResponse struct {
- LoadBalancer LoadBalancer `json:"LoadBalancer,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of CreateNatServiceRequest
-type CreateNatServiceRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- PublicIpId string `json:"PublicIpId,omitempty"`
- SubnetId string `json:"SubnetId,omitempty"`
-}
-
-// implements the service definition of CreateNatServiceResponse
-type CreateNatServiceResponse struct {
- NatService NatService `json:"NatService,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of CreateNetAccessPointRequest
-type CreateNetAccessPointRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- NetId string `json:"NetId,omitempty"`
- PrefixListName string `json:"PrefixListName,omitempty"`
- RouteTableIds []string `json:"RouteTableIds,omitempty"`
-}
-
-// implements the service definition of CreateNetAccessPointResponse
-type CreateNetAccessPointResponse struct {
- NetAccessPoint NetAccessPoint `json:"NetAccessPoint,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of CreateNetPeeringRequest
-type CreateNetPeeringRequest struct {
- AccepterNetId string `json:"AccepterNetId,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- SourceNetId string `json:"SourceNetId,omitempty"`
-}
-
-// implements the service definition of CreateNetPeeringResponse
-type CreateNetPeeringResponse struct {
- NetPeering NetPeering `json:"NetPeering,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of CreateNetRequest
-type CreateNetRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- IpRange string `json:"IpRange,omitempty"`
- Tenancy string `json:"Tenancy,omitempty"`
-}
-
-// implements the service definition of CreateNetResponse
-type CreateNetResponse struct {
- Net Net `json:"Net,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of CreateNicRequest
-type CreateNicRequest struct {
- Description string `json:"Description,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- PrivateIps []PrivateIpLight `json:"PrivateIps,omitempty"`
- SecurityGroupIds []string `json:"SecurityGroupIds,omitempty"`
- SubnetId string `json:"SubnetId,omitempty"`
-}
-
-// implements the service definition of CreateNicResponse
-type CreateNicResponse struct {
- Nic Nic `json:"Nic,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of CreatePolicyRequest
-type CreatePolicyRequest struct {
- Description string `json:"Description,omitempty"`
- Document string `json:"Document,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- Path string `json:"Path,omitempty"`
- PolicyName string `json:"PolicyName,omitempty"`
-}
-
-// implements the service definition of CreatePolicyResponse
-type CreatePolicyResponse struct {
- Policy Policy `json:"Policy,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of CreatePublicIpRequest
-type CreatePublicIpRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
-}
-
-// implements the service definition of CreatePublicIpResponse
-type CreatePublicIpResponse struct {
- PublicIp PublicIp `json:"PublicIp,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of CreateRouteRequest
-type CreateRouteRequest struct {
- DestinationIpRange string `json:"DestinationIpRange,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- GatewayId string `json:"GatewayId,omitempty"`
- NatServiceId string `json:"NatServiceId,omitempty"`
- NetPeeringId string `json:"NetPeeringId,omitempty"`
- NicId string `json:"NicId,omitempty"`
- RouteTableId string `json:"RouteTableId,omitempty"`
- VmId string `json:"VmId,omitempty"`
-}
-
-// implements the service definition of CreateRouteResponse
-type CreateRouteResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- Route Route `json:"Route,omitempty"`
-}
-
-// implements the service definition of CreateRouteTableRequest
-type CreateRouteTableRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- NetId string `json:"NetId,omitempty"`
-}
-
-// implements the service definition of CreateRouteTableResponse
-type CreateRouteTableResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- RouteTable RouteTable `json:"RouteTable,omitempty"`
-}
-
-// implements the service definition of CreateSecurityGroupRequest
-type CreateSecurityGroupRequest struct {
- Description string `json:"Description,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- NetId string `json:"NetId,omitempty"`
- SecurityGroupName string `json:"SecurityGroupName,omitempty"`
-}
-
-// implements the service definition of CreateSecurityGroupResponse
-type CreateSecurityGroupResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- SecurityGroup SecurityGroup `json:"SecurityGroup,omitempty"`
-}
-
-// implements the service definition of CreateSecurityGroupRuleRequest
-type CreateSecurityGroupRuleRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Flow string `json:"Flow,omitempty"`
- FromPortRange int64 `json:"FromPortRange"`
- IpProtocol string `json:"IpProtocol,omitempty"`
- IpRange string `json:"IpRange,omitempty"`
- Rules []SecurityGroupRule `json:"Rules,omitempty"`
- SecurityGroupAccountIdToLink string `json:"SecurityGroupAccountIdToLink,omitempty"`
- SecurityGroupId string `json:"SecurityGroupId,omitempty"`
- SecurityGroupNameToLink string `json:"SecurityGroupNameToLink,omitempty"`
- ToPortRange int64 `json:"ToPortRange"`
-}
-
-// implements the service definition of CreateSecurityGroupRuleResponse
-type CreateSecurityGroupRuleResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- SecurityGroupRule SecurityGroupRule `json:"SecurityGroupRule,omitempty"`
-}
-
-// implements the service definition of CreateServerCertificateRequest
-type CreateServerCertificateRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- PrivateKey string `json:"PrivateKey,omitempty"`
- ServerCertificateBody string `json:"ServerCertificateBody,omitempty"`
- ServerCertificateChain string `json:"ServerCertificateChain,omitempty"`
- ServerCertificateName string `json:"ServerCertificateName,omitempty"`
- ServerCertificatePath string `json:"ServerCertificatePath,omitempty"`
-}
-
-// implements the service definition of CreateServerCertificateResponse
-type CreateServerCertificateResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- ServerCertificate ServerCertificate `json:"ServerCertificate,omitempty"`
-}
-
-// implements the service definition of CreateSnapshotExportTaskRequest
-type CreateSnapshotExportTaskRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- OsuExport OsuExport `json:"OsuExport,omitempty"`
- SnapshotId string `json:"SnapshotId,omitempty"`
-}
-
-// implements the service definition of CreateSnapshotExportTaskResponse
-type CreateSnapshotExportTaskResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- SnapshotExportTask SnapshotExportTask `json:"SnapshotExportTask,omitempty"`
-}
-
-// implements the service definition of CreateSnapshotRequest
-type CreateSnapshotRequest struct {
- Description string `json:"Description,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- FileLocation string `json:"FileLocation,omitempty"`
- SnapshotSize int64 `json:"SnapshotSize,omitempty"`
- SourceRegionName string `json:"SourceRegionName,omitempty"`
- SourceSnapshotId string `json:"SourceSnapshotId,omitempty"`
- VolumeId string `json:"VolumeId,omitempty"`
-}
-
-// implements the service definition of CreateSnapshotResponse
-type CreateSnapshotResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- Snapshot Snapshot `json:"Snapshot,omitempty"`
-}
-
-// implements the service definition of CreateSubnetRequest
-type CreateSubnetRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- IpRange string `json:"IpRange,omitempty"`
- NetId string `json:"NetId,omitempty"`
- SubregionName string `json:"SubregionName,omitempty"`
-}
-
-// implements the service definition of CreateSubnetResponse
-type CreateSubnetResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- Subnet Subnet `json:"Subnet,omitempty"`
-}
-
-// implements the service definition of CreateTagsRequest
-type CreateTagsRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- ResourceIds []string `json:"ResourceIds,omitempty"`
- Tags []ResourceTag `json:"Tags,omitempty"`
-}
-
-// implements the service definition of CreateTagsResponse
-type CreateTagsResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of CreateUserGroupRequest
-type CreateUserGroupRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Path string `json:"Path,omitempty"`
- UserGroupName string `json:"UserGroupName,omitempty"`
-}
-
-// implements the service definition of CreateUserGroupResponse
-type CreateUserGroupResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- UserGroup UserGroup `json:"UserGroup,omitempty"`
-}
-
-// implements the service definition of CreateUserRequest
-type CreateUserRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Path string `json:"Path,omitempty"`
- UserName string `json:"UserName,omitempty"`
-}
-
-// implements the service definition of CreateUserResponse
-type CreateUserResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- User User `json:"User,omitempty"`
-}
-
-// implements the service definition of CreateVirtualGatewayRequest
-type CreateVirtualGatewayRequest struct {
- ConnectionType string `json:"ConnectionType,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
-}
-
-// implements the service definition of CreateVirtualGatewayResponse
-type CreateVirtualGatewayResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- VirtualGateway VirtualGateway `json:"VirtualGateway,omitempty"`
-}
-
-// implements the service definition of CreateVmsRequest
-type CreateVmsRequest struct {
- BlockDeviceMappings []BlockDeviceMappingVmCreation `json:"BlockDeviceMappings,omitempty"`
- BsuOptimized bool `json:"BsuOptimized,omitempty"`
- ClientToken string `json:"ClientToken,omitempty"`
- DeletionProtection *bool `json:"DeletionProtection,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- ImageId string `json:"ImageId,omitempty"`
- KeypairName string `json:"KeypairName,omitempty"`
- MaxVmsCount int64 `json:"MaxVmsCount,omitempty"`
- MinVmsCount int64 `json:"MinVmsCount,omitempty"`
- Nics []NicForVmCreation `json:"Nics,omitempty"`
- Placement Placement `json:"Placement,omitempty"`
- PrivateIps []string `json:"PrivateIps,omitempty"`
- SecurityGroupIds []string `json:"SecurityGroupIds,omitempty"`
- SecurityGroups []string `json:"SecurityGroups,omitempty"`
- SubnetId string `json:"SubnetId,omitempty"`
- UserData string `json:"UserData,omitempty"`
- VmInitiatedShutdownBehavior string `json:"VmInitiatedShutdownBehavior,omitempty"`
- VmType string `json:"VmType,omitempty"`
-}
-
-// implements the service definition of CreateVmsResponse
-type CreateVmsResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- Vms []Vm `json:"Vms,omitempty"`
-}
-
-// implements the service definition of CreateVolumeRequest
-type CreateVolumeRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Iops int64 `json:"Iops,omitempty"`
- Size int64 `json:"Size,omitempty"`
- SnapshotId string `json:"SnapshotId,omitempty"`
- SubregionName string `json:"SubregionName,omitempty"`
- VolumeType string `json:"VolumeType,omitempty"`
-}
-
-// implements the service definition of CreateVolumeResponse
-type CreateVolumeResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- Volume Volume `json:"Volume,omitempty"`
-}
-
-// implements the service definition of CreateVpnConnectionRequest
-type CreateVpnConnectionRequest struct {
- ClientGatewayId string `json:"ClientGatewayId,omitempty"`
- ConnectionType string `json:"ConnectionType,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- StaticRoutesOnly bool `json:"StaticRoutesOnly,omitempty"`
- VirtualGatewayId string `json:"VirtualGatewayId,omitempty"`
-}
-
-// implements the service definition of CreateVpnConnectionResponse
-type CreateVpnConnectionResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- VpnConnection VpnConnection `json:"VpnConnection,omitempty"`
-}
-
-// implements the service definition of CreateVpnConnectionRouteRequest
-type CreateVpnConnectionRouteRequest struct {
- DestinationIpRange string `json:"DestinationIpRange,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- VpnConnectionId string `json:"VpnConnectionId,omitempty"`
-}
-
-// implements the service definition of CreateVpnConnectionRouteResponse
-type CreateVpnConnectionRouteResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteApiKeyRequest
-type DeleteApiKeyRequest struct {
- ApiKeyId string `json:"ApiKeyId,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
-}
-
-// implements the service definition of DeleteApiKeyResponse
-type DeleteApiKeyResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteClientGatewayRequest
-type DeleteClientGatewayRequest struct {
- ClientGatewayId string `json:"ClientGatewayId,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
-}
-
-// implements the service definition of DeleteClientGatewayResponse
-type DeleteClientGatewayResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteDhcpOptionsRequest
-type DeleteDhcpOptionsRequest struct {
- DhcpOptionsSetId string `json:"DhcpOptionsSetId,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
-}
-
-// implements the service definition of DeleteDhcpOptionsResponse
-type DeleteDhcpOptionsResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteDirectLinkInterfaceRequest
-type DeleteDirectLinkInterfaceRequest struct {
- DirectLinkInterfaceId string `json:"DirectLinkInterfaceId,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
-}
-
-// implements the service definition of DeleteDirectLinkInterfaceResponse
-type DeleteDirectLinkInterfaceResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteDirectLinkRequest
-type DeleteDirectLinkRequest struct {
- DirectLinkId string `json:"DirectLinkId,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
-}
-
-// implements the service definition of DeleteDirectLinkResponse
-type DeleteDirectLinkResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteExportTaskRequest
-type DeleteExportTaskRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- ExportTaskId string `json:"ExportTaskId,omitempty"`
-}
-
-// implements the service definition of DeleteExportTaskResponse
-type DeleteExportTaskResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteImageRequest
-type DeleteImageRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- ImageId string `json:"ImageId,omitempty"`
-}
-
-// implements the service definition of DeleteImageResponse
-type DeleteImageResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteInternetServiceRequest
-type DeleteInternetServiceRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- InternetServiceId string `json:"InternetServiceId,omitempty"`
-}
-
-// implements the service definition of DeleteInternetServiceResponse
-type DeleteInternetServiceResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteKeypairRequest
-type DeleteKeypairRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- KeypairName string `json:"KeypairName,omitempty"`
-}
-
-// implements the service definition of DeleteKeypairResponse
-type DeleteKeypairResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteListenerRuleRequest
-type DeleteListenerRuleRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- ListenerRuleName string `json:"ListenerRuleName,omitempty"`
-}
-
-// implements the service definition of DeleteListenerRuleResponse
-type DeleteListenerRuleResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteLoadBalancerListenersRequest
-type DeleteLoadBalancerListenersRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- LoadBalancerName string `json:"LoadBalancerName,omitempty"`
- LoadBalancerPorts []int64 `json:"LoadBalancerPorts,omitempty"`
-}
-
-// implements the service definition of DeleteLoadBalancerListenersResponse
-type DeleteLoadBalancerListenersResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteLoadBalancerPolicyRequest
-type DeleteLoadBalancerPolicyRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- LoadBalancerName string `json:"LoadBalancerName,omitempty"`
- PolicyName string `json:"PolicyName,omitempty"`
-}
-
-// implements the service definition of DeleteLoadBalancerPolicyResponse
-type DeleteLoadBalancerPolicyResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteLoadBalancerRequest
-type DeleteLoadBalancerRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- LoadBalancerName string `json:"LoadBalancerName,omitempty"`
-}
-
-// implements the service definition of DeleteLoadBalancerResponse
-type DeleteLoadBalancerResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteNatServiceRequest
-type DeleteNatServiceRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- NatServiceId string `json:"NatServiceId,omitempty"`
-}
-
-// implements the service definition of DeleteNatServiceResponse
-type DeleteNatServiceResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteNetAccessPointsRequest
-type DeleteNetAccessPointsRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- NetAccessPointIds []string `json:"NetAccessPointIds,omitempty"`
-}
-
-// implements the service definition of DeleteNetAccessPointsResponse
-type DeleteNetAccessPointsResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteNetPeeringRequest
-type DeleteNetPeeringRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- NetPeeringId string `json:"NetPeeringId,omitempty"`
-}
-
-// implements the service definition of DeleteNetPeeringResponse
-type DeleteNetPeeringResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteNetRequest
-type DeleteNetRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- NetId string `json:"NetId,omitempty"`
-}
-
-// implements the service definition of DeleteNetResponse
-type DeleteNetResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteNicRequest
-type DeleteNicRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- NicId string `json:"NicId,omitempty"`
-}
-
-// implements the service definition of DeleteNicResponse
-type DeleteNicResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeletePolicyRequest
-type DeletePolicyRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- PolicyId string `json:"PolicyId,omitempty"`
-}
-
-// implements the service definition of DeletePolicyResponse
-type DeletePolicyResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeletePublicIpRequest
-type DeletePublicIpRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- PublicIp string `json:"PublicIp,omitempty"`
- PublicIpId string `json:"PublicIpId,omitempty"`
-}
-
-// implements the service definition of DeletePublicIpResponse
-type DeletePublicIpResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteRouteRequest
-type DeleteRouteRequest struct {
- DestinationIpRange string `json:"DestinationIpRange,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- RouteTableId string `json:"RouteTableId,omitempty"`
-}
-
-// implements the service definition of DeleteRouteResponse
-type DeleteRouteResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteRouteTableRequest
-type DeleteRouteTableRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- RouteTableId string `json:"RouteTableId,omitempty"`
-}
-
-// implements the service definition of DeleteRouteTableResponse
-type DeleteRouteTableResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteSecurityGroupRequest
-type DeleteSecurityGroupRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- SecurityGroupId string `json:"SecurityGroupId,omitempty"`
- SecurityGroupName string `json:"SecurityGroupName,omitempty"`
-}
-
-// implements the service definition of DeleteSecurityGroupResponse
-type DeleteSecurityGroupResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteSecurityGroupRuleRequest
-type DeleteSecurityGroupRuleRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Flow string `json:"Flow,omitempty"`
- FromPortRange int64 `json:"FromPortRange"`
- IpProtocol string `json:"IpProtocol,omitempty"`
- IpRange string `json:"IpRange,omitempty"`
- Rules []SecurityGroupRule `json:"Rules,omitempty"`
- SecurityGroupAccountIdToUnlink string `json:"SecurityGroupAccountIdToUnlink,omitempty"`
- SecurityGroupId string `json:"SecurityGroupId,omitempty"`
- SecurityGroupNameToUnlink string `json:"SecurityGroupNameToUnlink,omitempty"`
- ToPortRange int64 `json:"ToPortRange"`
-}
-
-// implements the service definition of DeleteSecurityGroupRuleResponse
-type DeleteSecurityGroupRuleResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteServerCertificateRequest
-type DeleteServerCertificateRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- ServerCertificateName string `json:"ServerCertificateName,omitempty"`
-}
-
-// implements the service definition of DeleteServerCertificateResponse
-type DeleteServerCertificateResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteSnapshotRequest
-type DeleteSnapshotRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- SnapshotId string `json:"SnapshotId,omitempty"`
-}
-
-// implements the service definition of DeleteSnapshotResponse
-type DeleteSnapshotResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteSubnetRequest
-type DeleteSubnetRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- SubnetId string `json:"SubnetId,omitempty"`
-}
-
-// implements the service definition of DeleteSubnetResponse
-type DeleteSubnetResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteTagsRequest
-type DeleteTagsRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- ResourceIds []string `json:"ResourceIds,omitempty"`
- Tags []ResourceTag `json:"Tags,omitempty"`
-}
-
-// implements the service definition of DeleteTagsResponse
-type DeleteTagsResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteUserGroupRequest
-type DeleteUserGroupRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- UserGroupName string `json:"UserGroupName,omitempty"`
-}
-
-// implements the service definition of DeleteUserGroupResponse
-type DeleteUserGroupResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteUserRequest
-type DeleteUserRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- UserName string `json:"UserName,omitempty"`
-}
-
-// implements the service definition of DeleteUserResponse
-type DeleteUserResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteVirtualGatewayRequest
-type DeleteVirtualGatewayRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- VirtualGatewayId string `json:"VirtualGatewayId,omitempty"`
-}
-
-// implements the service definition of DeleteVirtualGatewayResponse
-type DeleteVirtualGatewayResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteVmsRequest
-type DeleteVmsRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- VmIds []string `json:"VmIds,omitempty"`
-}
-
-// implements the service definition of DeleteVmsResponse
-type DeleteVmsResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- Vms []VmState `json:"Vms,omitempty"`
-}
-
-// implements the service definition of DeleteVolumeRequest
-type DeleteVolumeRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- VolumeId string `json:"VolumeId,omitempty"`
-}
-
-// implements the service definition of DeleteVolumeResponse
-type DeleteVolumeResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteVpnConnectionRequest
-type DeleteVpnConnectionRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- VpnConnectionId string `json:"VpnConnectionId,omitempty"`
-}
-
-// implements the service definition of DeleteVpnConnectionResponse
-type DeleteVpnConnectionResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeleteVpnConnectionRouteRequest
-type DeleteVpnConnectionRouteRequest struct {
- DestinationIpRange string `json:"DestinationIpRange,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- VpnConnectionId string `json:"VpnConnectionId,omitempty"`
-}
-
-// implements the service definition of DeleteVpnConnectionRouteResponse
-type DeleteVpnConnectionRouteResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeregisterUserInUserGroupRequest
-type DeregisterUserInUserGroupRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- UserGroupName string `json:"UserGroupName,omitempty"`
- UserName string `json:"UserName,omitempty"`
-}
-
-// implements the service definition of DeregisterUserInUserGroupResponse
-type DeregisterUserInUserGroupResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DeregisterVmsInLoadBalancerRequest
-type DeregisterVmsInLoadBalancerRequest struct {
- BackendVmsIds []string `json:"BackendVmsIds,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- LoadBalancerName string `json:"LoadBalancerName,omitempty"`
-}
-
-// implements the service definition of DeregisterVmsInLoadBalancerResponse
-type DeregisterVmsInLoadBalancerResponse struct {
- BackendVmsIds []string `json:"BackendVmsIds,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of DhcpOptionsSet
-type DhcpOptionsSet struct {
- Default bool `json:"Default,omitempty"`
- DhcpOptionsName string `json:"DhcpOptionsName,omitempty"`
- DhcpOptionsSetId string `json:"DhcpOptionsSetId,omitempty"`
- DomainName string `json:"DomainName,omitempty"`
- DomainNameServers []string `json:"DomainNameServers,omitempty"`
- NtpServers []string `json:"NtpServers,omitempty"`
- Tags []ResourceTag `json:"Tags,omitempty"`
-}
-
-// implements the service definition of DirectLink
-type DirectLink struct {
- AccountId string `json:"AccountId,omitempty"`
- Bandwidth string `json:"Bandwidth,omitempty"`
- DirectLinkId string `json:"DirectLinkId,omitempty"`
- DirectLinkName string `json:"DirectLinkName,omitempty"`
- Location string `json:"Location,omitempty"`
- RegionName string `json:"RegionName,omitempty"`
- State string `json:"State,omitempty"`
-}
-
-// implements the service definition of DirectLinkInterface
-type DirectLinkInterface struct {
- BgpAsn int64 `json:"BgpAsn,omitempty"`
- BgpKey string `json:"BgpKey,omitempty"`
- ClientPrivateIp string `json:"ClientPrivateIp,omitempty"`
- DirectLinkInterfaceName string `json:"DirectLinkInterfaceName,omitempty"`
- OutscalePrivateIp string `json:"OutscalePrivateIp,omitempty"`
- VirtualGatewayId string `json:"VirtualGatewayId,omitempty"`
- Vlan int64 `json:"Vlan,omitempty"`
-}
-
-// implements the service definition of DirectLinkInterfaces
-type DirectLinkInterfaces struct {
- AccountId string `json:"AccountId,omitempty"`
- BgpAsn int64 `json:"BgpAsn,omitempty"`
- BgpKey string `json:"BgpKey,omitempty"`
- ClientPrivateIp string `json:"ClientPrivateIp,omitempty"`
- DirectLinkId string `json:"DirectLinkId,omitempty"`
- DirectLinkInterfaceId string `json:"DirectLinkInterfaceId,omitempty"`
- DirectLinkInterfaceName string `json:"DirectLinkInterfaceName,omitempty"`
- InterfaceType string `json:"InterfaceType,omitempty"`
- Location string `json:"Location,omitempty"`
- OutscalePrivateIp string `json:"OutscalePrivateIp,omitempty"`
- State string `json:"State,omitempty"`
- VirtualGatewayId string `json:"VirtualGatewayId,omitempty"`
- Vlan int64 `json:"Vlan,omitempty"`
-}
-
-// implements the service definition of ErrorResponse
-type ErrorResponse struct {
- Errors []Errors `json:"Errors,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of Errors
-type Errors struct {
- Code string `json:"Code,omitempty"`
- Details string `json:"Details,omitempty"`
- Type string `json:"Type,omitempty"`
-}
-
-// implements the service definition of FiltersApiLog
-type FiltersApiLog struct {
- QueryAccessKeys []string `json:"QueryAccessKeys,omitempty"`
- QueryApiNames []string `json:"QueryApiNames,omitempty"`
- QueryCallNames []string `json:"QueryCallNames,omitempty"`
- QueryDateAfter string `json:"QueryDateAfter,omitempty"`
- QueryDateBefore string `json:"QueryDateBefore,omitempty"`
- QueryIpAddresses []string `json:"QueryIpAddresses,omitempty"`
- QueryUserAgents []string `json:"QueryUserAgents,omitempty"`
- ResponseIds []string `json:"ResponseIds,omitempty"`
- ResponseStatusCodes []int64 `json:"ResponseStatusCodes,omitempty"`
-}
-
-// implements the service definition of FiltersDhcpOptions
-type FiltersDhcpOptions struct {
- Defaults []string `json:"Defaults,omitempty"`
- DhcpOptionsSetIds []string `json:"DhcpOptionsSetIds,omitempty"`
- DomainNameServers []string `json:"DomainNameServers,omitempty"`
- DomainNames []string `json:"DomainNames,omitempty"`
- NtpServers []string `json:"NtpServers,omitempty"`
- TagKeys []string `json:"TagKeys,omitempty"`
- TagValues []string `json:"TagValues,omitempty"`
- Tags []string `json:"Tags,omitempty"`
-}
-
-// implements the service definition of FiltersExportTask
-type FiltersExportTask struct {
- TaskIds []string `json:"TaskIds,omitempty"`
-}
-
-// implements the service definition of FiltersImage
-type FiltersImage struct {
- AccountAliases []string `json:"AccountAliases,omitempty"`
- AccountIds []string `json:"AccountIds,omitempty"`
- Architectures []string `json:"Architectures,omitempty"`
- BlockDeviceMappingDeleteOnVmTermination bool `json:"BlockDeviceMappingDeleteOnVmTermination,omitempty"`
- BlockDeviceMappingDeviceNames []string `json:"BlockDeviceMappingDeviceNames,omitempty"`
- BlockDeviceMappingSnapshotIds []string `json:"BlockDeviceMappingSnapshotIds,omitempty"`
- BlockDeviceMappingVolumeSize []int64 `json:"BlockDeviceMappingVolumeSize,omitempty"`
- BlockDeviceMappingVolumeType []string `json:"BlockDeviceMappingVolumeType,omitempty"`
- Descriptions []string `json:"Descriptions,omitempty"`
- Hypervisors []string `json:"Hypervisors,omitempty"`
- ImageIds []string `json:"ImageIds,omitempty"`
- ImageNames []string `json:"ImageNames,omitempty"`
- ImageTypes []string `json:"ImageTypes,omitempty"`
- KernelIds []string `json:"KernelIds,omitempty"`
- ManifestLocation []string `json:"ManifestLocation,omitempty"`
- PermissionsToLaunchAccountIds []string `json:"PermissionsToLaunchAccountIds,omitempty"`
- PermissionsToLaunchGlobalPermission bool `json:"PermissionsToLaunchGlobalPermission,omitempty"`
- ProductCodes []string `json:"ProductCodes,omitempty"`
- RamDiskIds []string `json:"RamDiskIds,omitempty"`
- RootDeviceNames []string `json:"RootDeviceNames,omitempty"`
- RootDeviceTypes []string `json:"RootDeviceTypes,omitempty"`
- States []string `json:"States,omitempty"`
- System []string `json:"System,omitempty"`
- TagKeys []string `json:"TagKeys,omitempty"`
- TagValues []string `json:"TagValues,omitempty"`
- Tags []string `json:"Tags,omitempty"`
- VirtualizationTypes []string `json:"VirtualizationTypes,omitempty"`
-}
-
-// implements the service definition of FiltersInternetService
-type FiltersInternetService struct {
- InternetServiceIds []string `json:"InternetServiceIds,omitempty"`
-}
-
-// implements the service definition of FiltersKeypair
-type FiltersKeypair struct {
- KeypairFingerprints []string `json:"KeypairFingerprints,omitempty"`
- KeypairNames []string `json:"KeypairNames,omitempty"`
-}
-
-// implements the service definition of FiltersLoadBalancer
-type FiltersLoadBalancer struct {
- LoadBalancerNames []string `json:"LoadBalancerNames,omitempty"`
-}
-
-// implements the service definition of FiltersNatService
-type FiltersNatService struct {
- NatServiceIds []string `json:"NatServiceIds,omitempty"`
- NetIds []string `json:"NetIds,omitempty"`
- States []string `json:"States,omitempty"`
- SubnetIds []string `json:"SubnetIds,omitempty"`
- TagKeys []string `json:"TagKeys,omitempty"`
- TagValues []string `json:"TagValues,omitempty"`
- Tags []string `json:"Tags,omitempty"`
-}
-
-// implements the service definition of FiltersNet
-type FiltersNet struct {
- DhcpOptionsSetIds []string `json:"DhcpOptionsSetIds,omitempty"`
- IpRanges []string `json:"IpRanges,omitempty"`
- IsDefault bool `json:"IsDefault,omitempty"`
- NetIds []string `json:"NetIds,omitempty"`
- States []string `json:"States,omitempty"`
- TagKeys []string `json:"TagKeys,omitempty"`
- TagValues []string `json:"TagValues,omitempty"`
- Tags []string `json:"Tags,omitempty"`
-}
-
-// implements the service definition of FiltersNetPeering
-type FiltersNetPeering struct {
- AccepterNetAccountIds []string `json:"AccepterNetAccountIds,omitempty"`
- AccepterNetIpRanges []string `json:"AccepterNetIpRanges,omitempty"`
- AccepterNetNetIds []string `json:"AccepterNetNetIds,omitempty"`
- NetPeeringIds []string `json:"NetPeeringIds,omitempty"`
- SourceNetAccountIds []string `json:"SourceNetAccountIds,omitempty"`
- SourceNetIpRanges []string `json:"SourceNetIpRanges,omitempty"`
- SourceNetNetIds []string `json:"SourceNetNetIds,omitempty"`
- StateMessages []string `json:"StateMessages,omitempty"`
- StateNames []string `json:"StateNames,omitempty"`
- TagKeys []string `json:"TagKeys,omitempty"`
- TagValues []string `json:"TagValues,omitempty"`
- Tags []string `json:"Tags,omitempty"`
-}
-
-// implements the service definition of FiltersNic
-type FiltersNic struct {
- AccountIds []string `json:"AccountIds,omitempty"`
- ActivatedChecks []string `json:"ActivatedChecks,omitempty"`
- Descriptions []string `json:"Descriptions,omitempty"`
- LinkNicDeleteOnVmDeletion bool `json:"LinkNicDeleteOnVmDeletion,omitempty"`
- LinkNicLinkDates []string `json:"LinkNicLinkDates,omitempty"`
- LinkNicLinkNicIds []string `json:"LinkNicLinkNicIds,omitempty"`
- LinkNicSortNumbers []int64 `json:"LinkNicSortNumbers,omitempty"`
- LinkNicStates []string `json:"LinkNicStates,omitempty"`
- LinkNicVmAccountIds []string `json:"LinkNicVmAccountIds,omitempty"`
- LinkNicVmIds []string `json:"LinkNicVmIds,omitempty"`
- LinkPublicIpAccountIds []string `json:"LinkPublicIpAccountIds,omitempty"`
- LinkPublicIpLinkPublicIpIds []string `json:"LinkPublicIpLinkPublicIpIds,omitempty"`
- LinkPublicIpPublicIpIds []string `json:"LinkPublicIpPublicIpIds,omitempty"`
- LinkPublicIpPublicIps []string `json:"LinkPublicIpPublicIps,omitempty"`
- MacAddresses []string `json:"MacAddresses,omitempty"`
- NetIds []string `json:"NetIds,omitempty"`
- NicIds []string `json:"NicIds,omitempty"`
- PrivateDnsNames []string `json:"PrivateDnsNames,omitempty"`
- PrivateIpsLinkPublicIpAccountIds []string `json:"PrivateIpsLinkPublicIpAccountIds,omitempty"`
- PrivateIpsLinkPublicIpPublicIps []string `json:"PrivateIpsLinkPublicIpPublicIps,omitempty"`
- PrivateIpsPrimaryIp bool `json:"PrivateIpsPrimaryIp,omitempty"`
- PrivateIpsPrivateIps []string `json:"PrivateIpsPrivateIps,omitempty"`
- SecurityGroupIds []string `json:"SecurityGroupIds,omitempty"`
- SecurityGroupNames []string `json:"SecurityGroupNames,omitempty"`
- States []string `json:"States,omitempty"`
- SubnetIds []string `json:"SubnetIds,omitempty"`
- SubregionNames []string `json:"SubregionNames,omitempty"`
-}
-
-// implements the service definition of FiltersOldFormat
-type FiltersOldFormat struct {
- Name string `json:"Name,omitempty"`
- Values []string `json:"Values,omitempty"`
-}
-
-// implements the service definition of FiltersPublicIp
-type FiltersPublicIp struct {
- LinkPublicIpIds []string `json:"LinkPublicIpIds,omitempty"`
- NicAccountIds []string `json:"NicAccountIds,omitempty"`
- NicIds []string `json:"NicIds,omitempty"`
- Placements []string `json:"Placements,omitempty"`
- PrivateIps []string `json:"PrivateIps,omitempty"`
- PublicIpIds []string `json:"PublicIpIds,omitempty"`
- PublicIps []string `json:"PublicIps,omitempty"`
- VmIds []string `json:"VmIds,omitempty"`
-}
-
-// implements the service definition of FiltersRouteTable
-type FiltersRouteTable struct {
- LinkRouteTableIds []string `json:"LinkRouteTableIds,omitempty"`
- LinkRouteTableLinkRouteTableIds []string `json:"LinkRouteTableLinkRouteTableIds,omitempty"`
- LinkRouteTableMain bool `json:"LinkRouteTableMain,omitempty"`
- LinkSubnetIds []string `json:"LinkSubnetIds,omitempty"`
- NetIds []string `json:"NetIds,omitempty"`
- RouteCreationMethods []string `json:"RouteCreationMethods,omitempty"`
- RouteDestinationIpRanges []string `json:"RouteDestinationIpRanges,omitempty"`
- RouteDestinationPrefixListIds []string `json:"RouteDestinationPrefixListIds,omitempty"`
- RouteGatewayIds []string `json:"RouteGatewayIds,omitempty"`
- RouteNatServiceIds []string `json:"RouteNatServiceIds,omitempty"`
- RouteNetPeeringIds []string `json:"RouteNetPeeringIds,omitempty"`
- RouteStates []string `json:"RouteStates,omitempty"`
- RouteTableIds []string `json:"RouteTableIds,omitempty"`
- RouteVmIds []string `json:"RouteVmIds,omitempty"`
- TagKeys []string `json:"TagKeys,omitempty"`
- TagValues []string `json:"TagValues,omitempty"`
- Tags []string `json:"Tags,omitempty"`
-}
-
-// implements the service definition of FiltersSecurityGroup
-type FiltersSecurityGroup struct {
- AccountIds []string `json:"AccountIds,omitempty"`
- Descriptions []string `json:"Descriptions,omitempty"`
- InboundRuleAccountIds []string `json:"InboundRuleAccountIds,omitempty"`
- InboundRuleFromPortRanges []int64 `json:"InboundRuleFromPortRanges,omitempty"`
- InboundRuleIpRanges []string `json:"InboundRuleIpRanges,omitempty"`
- InboundRuleProtocols []string `json:"InboundRuleProtocols,omitempty"`
- InboundRuleSecurityGroupIds []string `json:"InboundRuleSecurityGroupIds,omitempty"`
- InboundRuleSecurityGroupNames []string `json:"InboundRuleSecurityGroupNames,omitempty"`
- InboundRuleToPortRanges []int64 `json:"InboundRuleToPortRanges,omitempty"`
- NetIds []string `json:"NetIds,omitempty"`
- OutboundRuleAccountIds []string `json:"OutboundRuleAccountIds,omitempty"`
- OutboundRuleFromPortRanges []int64 `json:"OutboundRuleFromPortRanges,omitempty"`
- OutboundRuleIpRanges []string `json:"OutboundRuleIpRanges,omitempty"`
- OutboundRuleProtocols []string `json:"OutboundRuleProtocols,omitempty"`
- OutboundRuleSecurityGroupIds []string `json:"OutboundRuleSecurityGroupIds,omitempty"`
- OutboundRuleSecurityGroupNames []string `json:"OutboundRuleSecurityGroupNames,omitempty"`
- OutboundRuleToPortRanges []int64 `json:"OutboundRuleToPortRanges,omitempty"`
- SecurityGroupIds []string `json:"SecurityGroupIds,omitempty"`
- SecurityGroupNames []string `json:"SecurityGroupNames,omitempty"`
- TagKeys []string `json:"TagKeys,omitempty"`
- TagValues []string `json:"TagValues,omitempty"`
- Tags []string `json:"Tags,omitempty"`
-}
-
-// implements the service definition of FiltersServices
-type FiltersServices struct {
- Attributes []Attribute `json:"Attributes,omitempty"`
- Endpoint string `json:"Endpoint,omitempty"`
- Schema string `json:"Schema,omitempty"`
- ServiceName string `json:"ServiceName,omitempty"`
-}
-
-// implements the service definition of FiltersSnapshot
-type FiltersSnapshot struct {
- AccountAliases []string `json:"AccountAliases,omitempty"`
- AccountIds []string `json:"AccountIds,omitempty"`
- Descriptions []string `json:"Descriptions,omitempty"`
- PermissionsToCreateVolumeAccountIds []string `json:"PermissionsToCreateVolumeAccountIds,omitempty"`
- PermissionsToCreateVolumeGlobalPermission bool `json:"PermissionsToCreateVolumeGlobalPermission,omitempty"`
- Progresses []int64 `json:"Progresses,omitempty"`
- SnapshotIds []string `json:"SnapshotIds,omitempty"`
- States []string `json:"States,omitempty"`
- TagKeys []string `json:"TagKeys,omitempty"`
- TagValues []string `json:"TagValues,omitempty"`
- Tags []string `json:"Tags,omitempty"`
- VolumeIds []string `json:"VolumeIds,omitempty"`
- VolumeSizes []int64 `json:"VolumeSizes,omitempty"`
-}
-
-// implements the service definition of FiltersSubnet
-type FiltersSubnet struct {
- AvailableIpsCounts []int64 `json:"AvailableIpsCounts,omitempty"`
- IpRanges []string `json:"IpRanges,omitempty"`
- NetIds []string `json:"NetIds,omitempty"`
- States []string `json:"States,omitempty"`
- SubnetIds []string `json:"SubnetIds,omitempty"`
- SubregionNames []string `json:"SubregionNames,omitempty"`
-}
-
-// implements the service definition of FiltersTag
-type FiltersTag struct {
- Keys []string `json:"Keys,omitempty"`
- ResourceIds []string `json:"ResourceIds,omitempty"`
- ResourceTypes []string `json:"ResourceTypes,omitempty"`
- Values []string `json:"Values,omitempty"`
-}
-
-// implements the service definition of FiltersUserGroup
-type FiltersUserGroup struct {
- Paths []string `json:"Paths,omitempty"`
- UserNames []string `json:"UserNames,omitempty"`
-}
-
-// implements the service definition of FiltersVm
-type FiltersVm struct {
- AccountIds []string `json:"AccountIds,omitempty"`
- ActivatedCheck bool `json:"ActivatedCheck,omitempty"`
- Architectures []string `json:"Architectures,omitempty"`
- BlockDeviceMappingDeleteOnVmDeletion *bool `json:"BlockDeviceMappingDeleteOnVmDeletion,omitempty"`
- BlockDeviceMappingDeviceNames []string `json:"BlockDeviceMappingDeviceNames,omitempty"`
- BlockDeviceMappingLinkDates []string `json:"BlockDeviceMappingLinkDates,omitempty"`
- BlockDeviceMappingStates []string `json:"BlockDeviceMappingStates,omitempty"`
- BlockDeviceMappingVolumeIds []string `json:"BlockDeviceMappingVolumeIds,omitempty"`
- Comments []string `json:"Comments,omitempty"`
- CreationDates []string `json:"CreationDates,omitempty"`
- DnsNames []string `json:"DnsNames,omitempty"`
- Hypervisors []string `json:"Hypervisors,omitempty"`
- ImageIds []string `json:"ImageIds,omitempty"`
- KernelIds []string `json:"KernelIds,omitempty"`
- KeypairNames []string `json:"KeypairNames,omitempty"`
- LaunchSortNumbers []int64 `json:"LaunchSortNumbers,omitempty"`
- LinkNicDeleteOnVmDeletion bool `json:"LinkNicDeleteOnVmDeletion,omitempty"`
- LinkNicLinkDates []string `json:"LinkNicLinkDates,omitempty"`
- LinkNicLinkNicIds []string `json:"LinkNicLinkNicIds,omitempty"`
- LinkNicLinkPublicIpIds []string `json:"LinkNicLinkPublicIpIds,omitempty"`
- LinkNicNicIds []string `json:"LinkNicNicIds,omitempty"`
- LinkNicNicSortNumbers []int64 `json:"LinkNicNicSortNumbers,omitempty"`
- LinkNicPublicIpAccountIds []string `json:"LinkNicPublicIpAccountIds,omitempty"`
- LinkNicPublicIpIds []string `json:"LinkNicPublicIpIds,omitempty"`
- LinkNicPublicIps []string `json:"LinkNicPublicIps,omitempty"`
- LinkNicStates []string `json:"LinkNicStates,omitempty"`
- LinkNicVmAccountIds []string `json:"LinkNicVmAccountIds,omitempty"`
- LinkNicVmIds []string `json:"LinkNicVmIds,omitempty"`
- MonitoringStates []string `json:"MonitoringStates,omitempty"`
- NetIds []string `json:"NetIds,omitempty"`
- NicAccountIds []string `json:"NicAccountIds,omitempty"`
- NicActivatedCheck bool `json:"NicActivatedCheck,omitempty"`
- NicDescriptions []string `json:"NicDescriptions,omitempty"`
- NicMacAddresses []string `json:"NicMacAddresses,omitempty"`
- NicNetIds []string `json:"NicNetIds,omitempty"`
- NicNicIds []string `json:"NicNicIds,omitempty"`
- NicPrivateDnsNames []string `json:"NicPrivateDnsNames,omitempty"`
- NicSecurityGroupIds []string `json:"NicSecurityGroupIds,omitempty"`
- NicSecurityGroupNames []string `json:"NicSecurityGroupNames,omitempty"`
- NicStates []string `json:"NicStates,omitempty"`
- NicSubnetIds []string `json:"NicSubnetIds,omitempty"`
- NicSubregionNames []string `json:"NicSubregionNames,omitempty"`
- PlacementGroups []string `json:"PlacementGroups,omitempty"`
- PrivateDnsNames []string `json:"PrivateDnsNames,omitempty"`
- PrivateIpLinkPrivateIpAccountIds []string `json:"PrivateIpLinkPrivateIpAccountIds,omitempty"`
- PrivateIpLinkPublicIps []string `json:"PrivateIpLinkPublicIps,omitempty"`
- PrivateIpPrimaryIps []string `json:"PrivateIpPrimaryIps,omitempty"`
- PrivateIpPrivateIps []string `json:"PrivateIpPrivateIps,omitempty"`
- PrivateIps []string `json:"PrivateIps,omitempty"`
- ProductCodes []string `json:"ProductCodes,omitempty"`
- PublicIps []string `json:"PublicIps,omitempty"`
- RamDiskIds []string `json:"RamDiskIds,omitempty"`
- RootDeviceNames []string `json:"RootDeviceNames,omitempty"`
- RootDeviceTypes []string `json:"RootDeviceTypes,omitempty"`
- SecurityGroupIds []string `json:"SecurityGroupIds,omitempty"`
- SecurityGroupNames []string `json:"SecurityGroupNames,omitempty"`
- SpotVmRequestIds []string `json:"SpotVmRequestIds,omitempty"`
- SpotVms []string `json:"SpotVms,omitempty"`
- StateComments []string `json:"StateComments,omitempty"`
- SubnetIds []string `json:"SubnetIds,omitempty"`
- SubregionNames []string `json:"SubregionNames,omitempty"`
- Systems []string `json:"Systems,omitempty"`
- TagKeys []string `json:"TagKeys,omitempty"`
- TagValues []string `json:"TagValues,omitempty"`
- Tags []string `json:"Tags,omitempty"`
- Tenancies []string `json:"Tenancies,omitempty"`
- Tokens []string `json:"Tokens,omitempty"`
- VirtualizationTypes []string `json:"VirtualizationTypes,omitempty"`
- VmIds []string `json:"VmIds,omitempty"`
- VmStates []string `json:"VmStates,omitempty"`
- VmTypes []string `json:"VmTypes,omitempty"`
- VmsSecurityGroupIds []string `json:"VmsSecurityGroupIds,omitempty"`
- VmsSecurityGroupNames []string `json:"VmsSecurityGroupNames,omitempty"`
-}
-
-// implements the service definition of FiltersVmsState
-type FiltersVmsState struct {
- MaintenanceEventCodes []string `json:"MaintenanceEventCodes,omitempty"`
- MaintenanceEventDescriptions []string `json:"MaintenanceEventDescriptions,omitempty"`
- MaintenanceEventsNotAfter []string `json:"MaintenanceEventsNotAfter,omitempty"`
- MaintenanceEventsNotBefore []string `json:"MaintenanceEventsNotBefore,omitempty"`
- SubregionNames []string `json:"SubregionNames,omitempty"`
- VmIds []string `json:"VmIds,omitempty"`
- VmStates []string `json:"VmStates,omitempty"`
-}
-
-// implements the service definition of FiltersVolume
-type FiltersVolume struct {
- CreationDates []string `json:"CreationDates,omitempty"`
- SnapshotIds []string `json:"SnapshotIds,omitempty"`
- SubregionNames []string `json:"SubregionNames,omitempty"`
- TagKeys []string `json:"TagKeys,omitempty"`
- TagValues []string `json:"TagValues,omitempty"`
- Tags []string `json:"Tags,omitempty"`
- VolumeIds []string `json:"VolumeIds,omitempty"`
- VolumeSizes []int64 `json:"VolumeSizes,omitempty"`
- VolumeTypes []string `json:"VolumeTypes,omitempty"`
-}
-
-// implements the service definition of FiltersVpnConnection
-type FiltersVpnConnection struct {
- ConnectionTypes []string `json:"ConnectionTypes,omitempty"`
- NetToVirtualGatewayLinkNetIds []string `json:"NetToVirtualGatewayLinkNetIds,omitempty"`
- NetToVirtualGatewayLinkStates []string `json:"NetToVirtualGatewayLinkStates,omitempty"`
- States []string `json:"States,omitempty"`
- TagKeys []string `json:"TagKeys,omitempty"`
- TagValues []string `json:"TagValues,omitempty"`
- Tags []string `json:"Tags,omitempty"`
- VirtualGatewayIds []string `json:"VirtualGatewayIds,omitempty"`
-}
-
-// implements the service definition of HealthCheck
-type HealthCheck struct {
- CheckInterval int64 `json:"CheckInterval,omitempty"`
- HealthyThreshold int64 `json:"HealthyThreshold,omitempty"`
- Path string `json:"Path,omitempty"`
- Port int64 `json:"Port,omitempty"`
- Protocol string `json:"Protocol,omitempty"`
- Timeout int64 `json:"Timeout,omitempty"`
- UnhealthyThreshold int64 `json:"UnhealthyThreshold,omitempty"`
-}
-
-// implements the service definition of Image
-type Image struct {
- AccountAlias string `json:"AccountAlias,omitempty"`
- AccountId string `json:"AccountId,omitempty"`
- Architecture string `json:"Architecture,omitempty"`
- BlockDeviceMappings []BlockDeviceMappingImage `json:"BlockDeviceMappings,omitempty"`
- CreationDate string `json:"CreationDate,omitempty"`
- Description string `json:"Description,omitempty"`
- FileLocation string `json:"FileLocation,omitempty"`
- ImageId string `json:"ImageId,omitempty"`
- ImageName string `json:"ImageName,omitempty"`
- ImageType string `json:"ImageType,omitempty"`
- PermissionsToLaunch PermissionsOnResource `json:"PermissionsToLaunch,omitempty"`
- ProductCodes []string `json:"ProductCodes,omitempty"`
- RootDeviceName string `json:"RootDeviceName,omitempty"`
- RootDeviceType string `json:"RootDeviceType,omitempty"`
- State string `json:"State,omitempty"`
- StateComment StateComment `json:"StateComment,omitempty"`
- Tags []ResourceTag `json:"Tags,omitempty"`
-}
-
-// implements the service definition of ImageExportTask
-type ImageExportTask struct {
- Comment string `json:"Comment,omitempty"`
- ImageId string `json:"ImageId,omitempty"`
- OsuExport OsuExport `json:"OsuExport,omitempty"`
- Progress int64 `json:"Progress,omitempty"`
- State string `json:"State,omitempty"`
- TaskId string `json:"TaskId,omitempty"`
-}
-
-// implements the service definition of InternetService
-type InternetService struct {
- InternetServiceId string `json:"InternetServiceId,omitempty"`
- NetId string `json:"NetId,omitempty"`
- State string `json:"State,omitempty"`
- Tags []ResourceTag `json:"Tags,omitempty"`
-}
-
-// implements the service definition of Item
-type Item struct {
- AccountId string `json:"AccountId,omitempty"`
- Catalog []Catalog_0 `json:"Catalog,omitempty"`
- ComsuptionValue int `json:"ComsuptionValue,omitempty"`
- Entry string `json:"Entry,omitempty"`
- FromDate string `json:"FromDate,omitempty"`
- PayingAccountId string `json:"PayingAccountId,omitempty"`
- Service string `json:"Service,omitempty"`
- SubregionName string `json:"SubregionName,omitempty"`
- ToDate string `json:"ToDate,omitempty"`
- Type string `json:"Type,omitempty"`
-}
-
-// implements the service definition of Keypair
-type Keypair struct {
- KeypairFingerprint string `json:"KeypairFingerprint,omitempty"`
- KeypairName string `json:"KeypairName,omitempty"`
-}
-
-// implements the service definition of KeypairCreated
-type KeypairCreated struct {
- KeypairFingerprint string `json:"KeypairFingerprint,omitempty"`
- KeypairName string `json:"KeypairName,omitempty"`
- PrivateKey string `json:"PrivateKey,omitempty"`
-}
-
-// implements the service definition of LinkInternetServiceRequest
-type LinkInternetServiceRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- InternetServiceId string `json:"InternetServiceId,omitempty"`
- NetId string `json:"NetId,omitempty"`
-}
-
-// implements the service definition of LinkInternetServiceResponse
-type LinkInternetServiceResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of LinkNic
-type LinkNic struct {
- DeleteOnVmDeletion *bool `json:"DeleteOnVmDeletion,omitempty"`
- DeviceNumber int64 `json:"DeviceNumber,omitempty"`
- LinkNicId string `json:"LinkNicId,omitempty"`
- State string `json:"State,omitempty"`
- VmAccountId string `json:"VmAccountId,omitempty"`
- VmId string `json:"VmId,omitempty"`
-}
-
-// implements the service definition of LinkNicLight
-type LinkNicLight struct {
- DeleteOnVmDeletion *bool `json:"DeleteOnVmDeletion,omitempty"`
- DeviceNumber int64 `json:"DeviceNumber,omitempty"`
- LinkNicId string `json:"LinkNicId,omitempty"`
- State string `json:"State,omitempty"`
-}
-
-// implements the service definition of LinkNicRequest
-type LinkNicRequest struct {
- DeviceNumber int64 `json:"DeviceNumber,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- NicId string `json:"NicId,omitempty"`
- VmId string `json:"VmId,omitempty"`
-}
-
-// implements the service definition of LinkNicResponse
-type LinkNicResponse struct {
- LinkNicId string `json:"LinkNicId,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of LinkNicToUpdate
-type LinkNicToUpdate struct {
- DeleteOnVmDeletion *bool `json:"DeleteOnVmDeletion,omitempty"`
- LinkNicId string `json:"LinkNicId,omitempty"`
-}
-
-// implements the service definition of LinkPolicyRequest
-type LinkPolicyRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- PolicyId string `json:"PolicyId,omitempty"`
- UserGroupName string `json:"UserGroupName,omitempty"`
- UserName string `json:"UserName,omitempty"`
-}
-
-// implements the service definition of LinkPolicyResponse
-type LinkPolicyResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of LinkPrivateIpsRequest
-type LinkPrivateIpsRequest struct {
- AllowRelink bool `json:"AllowRelink,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- NicId string `json:"NicId,omitempty"`
- PrivateIps []string `json:"PrivateIps,omitempty"`
- SecondaryPrivateIpCount int64 `json:"SecondaryPrivateIpCount,omitempty"`
-}
-
-// implements the service definition of LinkPrivateIpsResponse
-type LinkPrivateIpsResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of LinkPublicIp
-type LinkPublicIp struct {
- LinkPublicIpId string `json:"LinkPublicIpId,omitempty"`
- PublicDnsName string `json:"PublicDnsName,omitempty"`
- PublicIp string `json:"PublicIp,omitempty"`
- PublicIpAccountId string `json:"PublicIpAccountId,omitempty"`
- PublicIpId string `json:"PublicIpId,omitempty"`
- Tags []ResourceTag `json:"Tags,omitempty"`
-}
-
-// implements the service definition of LinkPublicIpLightForVm
-type LinkPublicIpLightForVm struct {
- PublicDnsName string `json:"PublicDnsName,omitempty"`
- PublicIp string `json:"PublicIp,omitempty"`
- PublicIpAccountId string `json:"PublicIpAccountId,omitempty"`
-}
-
-// implements the service definition of LinkPublicIpRequest
-type LinkPublicIpRequest struct {
- AllowRelink bool `json:"AllowRelink,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- NicId string `json:"NicId,omitempty"`
- PrivateIp string `json:"PrivateIp,omitempty"`
- PublicIp string `json:"PublicIp,omitempty"`
- PublicIpId string `json:"PublicIpId,omitempty"`
- VmId string `json:"VmId,omitempty"`
-}
-
-// implements the service definition of LinkPublicIpResponse
-type LinkPublicIpResponse struct {
- LinkPublicIpId string `json:"LinkPublicIpId,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of LinkRouteTable
-type LinkRouteTable struct {
- LinkRouteTableId string `json:"LinkRouteTableId,omitempty"`
- Main bool `json:"Main,omitempty"`
- RouteTableId string `json:"RouteTableId,omitempty"`
- SubnetId string `json:"SubnetId,omitempty"`
-}
-
-// implements the service definition of LinkRouteTableRequest
-type LinkRouteTableRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- RouteTableId string `json:"RouteTableId,omitempty"`
- SubnetId string `json:"SubnetId,omitempty"`
-}
-
-// implements the service definition of LinkRouteTableResponse
-type LinkRouteTableResponse struct {
- LinkRouteTableId string `json:"LinkRouteTableId,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of LinkVirtualGatewayRequest
-type LinkVirtualGatewayRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- NetId string `json:"NetId,omitempty"`
- VirtualGatewayId string `json:"VirtualGatewayId,omitempty"`
-}
-
-// implements the service definition of LinkVirtualGatewayResponse
-type LinkVirtualGatewayResponse struct {
- NetToVirtualGatewayLink NetToVirtualGatewayLink `json:"NetToVirtualGatewayLink,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of LinkVolumeRequest
-type LinkVolumeRequest struct {
- DeviceName string `json:"DeviceName,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- VmId string `json:"VmId,omitempty"`
- VolumeId string `json:"VolumeId,omitempty"`
-}
-
-// implements the service definition of LinkVolumeResponse
-type LinkVolumeResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of LinkedVolume
-type LinkedVolume struct {
- DeleteOnVmDeletion *bool `json:"DeleteOnVmDeletion,omitempty"`
- DeviceName string `json:"DeviceName,omitempty"`
- State string `json:"State,omitempty"`
- VmId string `json:"VmId,omitempty"`
- VolumeId string `json:"VolumeId,omitempty"`
-}
-
-// implements the service definition of Listener
-type Listener struct {
- BackendPort int64 `json:"BackendPort,omitempty"`
- BackendProtocol string `json:"BackendProtocol,omitempty"`
- LoadBalancerPort int64 `json:"LoadBalancerPort,omitempty"`
- LoadBalancerProtocol string `json:"LoadBalancerProtocol,omitempty"`
- PolicyNames []string `json:"PolicyNames,omitempty"`
- ServerCertificateId string `json:"ServerCertificateId,omitempty"`
-}
-
-// implements the service definition of ListenerForCreation
-type ListenerForCreation struct {
- BackendPort int64 `json:"BackendPort,omitempty"`
- BackendProtocol string `json:"BackendProtocol,omitempty"`
- LoadBalancerPort int64 `json:"LoadBalancerPort,omitempty"`
- LoadBalancerProtocol string `json:"LoadBalancerProtocol,omitempty"`
- ServerCertificateId string `json:"ServerCertificateId,omitempty"`
-}
-
-// implements the service definition of ListenerRule
-type ListenerRule struct {
- Action string `json:"Action,omitempty"`
- HostNamePattern string `json:"HostNamePattern,omitempty"`
- ListenerRuleId string `json:"ListenerRuleId,omitempty"`
- ListenerRuleName string `json:"ListenerRuleName,omitempty"`
- PathPattern string `json:"PathPattern,omitempty"`
- Priority int64 `json:"Priority,omitempty"`
-}
-
-// implements the service definition of ListenerRules
-type ListenerRules struct {
- ListenerId string `json:"ListenerId,omitempty"`
- ListenerRule ListenerRule `json:"ListenerRule,omitempty"`
- VmIds []string `json:"VmIds,omitempty"`
-}
-
-// implements the service definition of LoadBalancer
-type LoadBalancer struct {
- AccessLog AccessLog `json:"AccessLog,omitempty"`
- ApplicationStickyCookiePolicies []ApplicationStickyCookiePolicy `json:"ApplicationStickyCookiePolicies,omitempty"`
- BackendVmsIds []string `json:"BackendVmsIds,omitempty"`
- DnsName string `json:"DnsName,omitempty"`
- HealthCheck HealthCheck `json:"HealthCheck,omitempty"`
- Listeners []Listener `json:"Listeners,omitempty"`
- LoadBalancerName string `json:"LoadBalancerName,omitempty"`
- LoadBalancerStickyCookiePolicies []LoadBalancerStickyCookiePolicy `json:"LoadBalancerStickyCookiePolicies,omitempty"`
- LoadBalancerType string `json:"LoadBalancerType,omitempty"`
- NetId string `json:"NetId,omitempty"`
- SecurityGroups []string `json:"SecurityGroups,omitempty"`
- SourceSecurityGroup SourceSecurityGroup `json:"SourceSecurityGroup,omitempty"`
- Subnets []string `json:"Subnets,omitempty"`
- SubregionNames []string `json:"SubregionNames,omitempty"`
- Tags []ResourceTag `json:"Tags,omitempty"`
-}
-
-// implements the service definition of LoadBalancerLight
-type LoadBalancerLight struct {
- LoadBalancerName string `json:"LoadBalancerName,omitempty"`
- LoadBalancerPort int64 `json:"LoadBalancerPort,omitempty"`
-}
-
-// implements the service definition of LoadBalancerStickyCookiePolicy
-type LoadBalancerStickyCookiePolicy struct {
- PolicyName string `json:"PolicyName,omitempty"`
-}
-
-// implements the service definition of Location
-type Location struct {
- Code string `json:"Code,omitempty"`
- Name string `json:"Name,omitempty"`
-}
-
-// implements the service definition of Log
-type Log struct {
- CallDuration int64 `json:"CallDuration,omitempty"`
- QueryAccessKey string `json:"QueryAccessKey,omitempty"`
- QueryApiName string `json:"QueryApiName,omitempty"`
- QueryApiVersion string `json:"QueryApiVersion,omitempty"`
- QueryCallName string `json:"QueryCallName,omitempty"`
- QueryDate string `json:"QueryDate,omitempty"`
- QueryIpAddress string `json:"QueryIpAddress,omitempty"`
- QueryRaw string `json:"QueryRaw,omitempty"`
- QuerySize int64 `json:"QuerySize,omitempty"`
- QueryUserAgent string `json:"QueryUserAgent,omitempty"`
- ResponseId string `json:"ResponseId,omitempty"`
- ResponseSize int64 `json:"ResponseSize,omitempty"`
- ResponseStatusCode int64 `json:"ResponseStatusCode,omitempty"`
-}
-
-// implements the service definition of MaintenanceEvent
-type MaintenanceEvent struct {
- Code string `json:"Code,omitempty"`
- Description string `json:"Description,omitempty"`
- NotAfter string `json:"NotAfter,omitempty"`
- NotBefore string `json:"NotBefore,omitempty"`
-}
-
-// implements the service definition of NatService
-type NatService struct {
- NatServiceId string `json:"NatServiceId,omitempty"`
- NetId string `json:"NetId,omitempty"`
- PublicIps []PublicIpLight `json:"PublicIps,omitempty"`
- State string `json:"State,omitempty"`
- SubnetId string `json:"SubnetId,omitempty"`
- Tags []ResourceTag `json:"Tags,omitempty"`
-}
-
-// implements the service definition of Net
-type Net struct {
- DhcpOptionsSetId string `json:"DhcpOptionsSetId,omitempty"`
- IpRange string `json:"IpRange,omitempty"`
- NetId string `json:"NetId,omitempty"`
- State string `json:"State,omitempty"`
- Tags []ResourceTag `json:"Tags,omitempty"`
- Tenancy string `json:"Tenancy,omitempty"`
-}
-
-// implements the service definition of NetAccessPoint
-type NetAccessPoint struct {
- NetAccessPointId string `json:"NetAccessPointId,omitempty"`
- NetId string `json:"NetId,omitempty"`
- PrefixListName string `json:"PrefixListName,omitempty"`
- RouteTableIds []string `json:"RouteTableIds,omitempty"`
- State string `json:"State,omitempty"`
-}
-
-// implements the service definition of NetPeering
-type NetPeering struct {
- AccepterNet AccepterNet `json:"AccepterNet,omitempty"`
- NetPeeringId string `json:"NetPeeringId,omitempty"`
- SourceNet SourceNet `json:"SourceNet,omitempty"`
- State NetPeeringState `json:"State,omitempty"`
- Tags []ResourceTag `json:"Tags,omitempty"`
-}
-
-// implements the service definition of NetPeeringState
-type NetPeeringState struct {
- Message string `json:"Message,omitempty"`
- Name string `json:"Name,omitempty"`
-}
-
-// implements the service definition of NetToVirtualGatewayLink
-type NetToVirtualGatewayLink struct {
- NetId string `json:"NetId,omitempty"`
- State string `json:"State,omitempty"`
-}
-
-// implements the service definition of Nic
-type Nic struct {
- AccountId string `json:"AccountId,omitempty"`
- Description string `json:"Description,omitempty"`
- IsSourceDestChecked bool `json:"IsSourceDestChecked,omitempty"`
- LinkNic LinkNic `json:"LinkNic,omitempty"`
- LinkPublicIp LinkPublicIp `json:"LinkPublicIp,omitempty"`
- MacAddress string `json:"MacAddress,omitempty"`
- NetId string `json:"NetId,omitempty"`
- NicId string `json:"NicId,omitempty"`
- PrivateDnsName string `json:"PrivateDnsName,omitempty"`
- PrivateIps []PrivateIp `json:"PrivateIps,omitempty"`
- SecurityGroups []SecurityGroupLight `json:"SecurityGroups,omitempty"`
- State string `json:"State,omitempty"`
- SubnetId string `json:"SubnetId,omitempty"`
- SubregionName string `json:"SubregionName,omitempty"`
- Tags []ResourceTag `json:"Tags,omitempty"`
-}
-
-// implements the service definition of NicForVmCreation
-type NicForVmCreation struct {
- DeleteOnVmDeletion bool `json:"DeleteOnVmDeletion,omitempty"`
- Description string `json:"Description,omitempty"`
- DeviceNumber int64 `json:"DeviceNumber"`
- NicId string `json:"NicId,omitempty"`
- PrivateIps []PrivateIpLight `json:"PrivateIps,omitempty"`
- SecondaryPrivateIpCount int64 `json:"SecondaryPrivateIpCount,omitempty"`
- SecurityGroupIds []string `json:"SecurityGroupIds,omitempty"`
- SubnetId string `json:"SubnetId,omitempty"`
-}
-
-// implements the service definition of NicLight
-type NicLight struct {
- AccountId string `json:"AccountId,omitempty"`
- Description string `json:"Description,omitempty"`
- IsSourceDestChecked bool `json:"IsSourceDestChecked,omitempty"`
- LinkNic LinkNicLight `json:"LinkNic,omitempty"`
- LinkPublicIp LinkPublicIpLightForVm `json:"LinkPublicIp,omitempty"`
- MacAddress string `json:"MacAddress,omitempty"`
- NetId string `json:"NetId,omitempty"`
- NicId string `json:"NicId,omitempty"`
- PrivateDnsName string `json:"PrivateDnsName,omitempty"`
- PrivateIps []PrivateIpLightForVm `json:"PrivateIps,omitempty"`
- SecurityGroups []SecurityGroupLight `json:"SecurityGroups,omitempty"`
- State string `json:"State,omitempty"`
- SubnetId string `json:"SubnetId,omitempty"`
-}
-
-// implements the service definition of OsuApiKey
-type OsuApiKey struct {
- ApiKeyId string `json:"ApiKeyId,omitempty"`
- SecretKey string `json:"SecretKey,omitempty"`
-}
-
-// implements the service definition of OsuExport
-type OsuExport struct {
- DiskImageFormat string `json:"DiskImageFormat,omitempty"`
- OsuApiKey OsuApiKey `json:"OsuApiKey,omitempty"`
- OsuBucket string `json:"OsuBucket,omitempty"`
- OsuManifestUrl string `json:"OsuManifestUrl,omitempty"`
- OsuPrefix string `json:"OsuPrefix,omitempty"`
-}
-
-// implements the service definition of PermissionsOnResource
-type PermissionsOnResource struct {
- AccountIds []string `json:"AccountIds,omitempty"`
- GlobalPermission bool `json:"GlobalPermission,omitempty"`
-}
-
-// implements the service definition of PermissionsOnResourceCreation
-type PermissionsOnResourceCreation struct {
- Additions PermissionsOnResource `json:"Additions,omitempty"`
- Removals PermissionsOnResource `json:"Removals,omitempty"`
-}
-
-// implements the service definition of Placement
-type Placement struct {
- SubregionName string `json:"SubregionName,omitempty"`
- Tenancy string `json:"Tenancy,omitempty"`
-}
-
-// implements the service definition of Policy
-type Policy struct {
- Description string `json:"Description,omitempty"`
- IsLinkable bool `json:"IsLinkable,omitempty"`
- Path string `json:"Path,omitempty"`
- PolicyDefaultVersionId string `json:"PolicyDefaultVersionId,omitempty"`
- PolicyId string `json:"PolicyId,omitempty"`
- PolicyName string `json:"PolicyName,omitempty"`
- ResourcesCount int64 `json:"ResourcesCount,omitempty"`
-}
-
-// implements the service definition of PrefixLists
-type PrefixLists struct {
- IpRanges []string `json:"IpRanges,omitempty"`
- PrefixListId string `json:"PrefixListId,omitempty"`
- PrefixListName string `json:"PrefixListName,omitempty"`
-}
-
-// implements the service definition of PricingDetail
-type PricingDetail struct {
- Count int64 `json:"Count,omitempty"`
-}
-
-// implements the service definition of PrivateIp
-type PrivateIp struct {
- IsPrimary bool `json:"IsPrimary,omitempty"`
- LinkPublicIp LinkPublicIp `json:"LinkPublicIp,omitempty"`
- PrivateDnsName string `json:"PrivateDnsName,omitempty"`
- PrivateIp string `json:"PrivateIp,omitempty"`
-}
-
-// implements the service definition of PrivateIpLight
-type PrivateIpLight struct {
- IsPrimary bool `json:"IsPrimary"`
- PrivateIp string `json:"PrivateIp,omitempty"`
-}
-
-// implements the service definition of PrivateIpLightForVm
-type PrivateIpLightForVm struct {
- IsPrimary bool `json:"IsPrimary,omitempty"`
- LinkPublicIp LinkPublicIpLightForVm `json:"LinkPublicIp,omitempty"`
- PrivateDnsName string `json:"PrivateDnsName,omitempty"`
- PrivateIp string `json:"PrivateIp,omitempty"`
-}
-
-// implements the service definition of ProductType
-type ProductType struct {
- Description string `json:"Description,omitempty"`
- ProductTypeId string `json:"ProductTypeId,omitempty"`
- Vendor string `json:"Vendor,omitempty"`
-}
-
-// implements the service definition of PublicIp
-type PublicIp struct {
- LinkPublicIpId string `json:"LinkPublicIpId,omitempty"`
- NicAccountId string `json:"NicAccountId,omitempty"`
- NicId string `json:"NicId,omitempty"`
- PrivateIp string `json:"PrivateIp,omitempty"`
- PublicIp string `json:"PublicIp,omitempty"`
- PublicIpId string `json:"PublicIpId,omitempty"`
- VmId string `json:"VmId,omitempty"`
- Tags []ResourceTag `json:"Tags,omitempty"`
-}
-
-// implements the service definition of PublicIpLight
-type PublicIpLight struct {
- PublicIp string `json:"PublicIp,omitempty"`
- PublicIpId string `json:"PublicIpId,omitempty"`
-}
-
-// implements the service definition of PurchaseReservedVmsOfferRequest
-type PurchaseReservedVmsOfferRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- ReservedVmsOfferId string `json:"ReservedVmsOfferId,omitempty"`
- VmCount int64 `json:"VmCount,omitempty"`
-}
-
-// implements the service definition of PurchaseReservedVmsOfferResponse
-type PurchaseReservedVmsOfferResponse struct {
- ReservedVmsId string `json:"ReservedVmsId,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of Quota
-type Quota struct {
- AccountId string `json:"AccountId,omitempty"`
- Description string `json:"Description,omitempty"`
- MaxValue int64 `json:"MaxValue,omitempty"`
- Name string `json:"Name,omitempty"`
- QuotaCollection string `json:"QuotaCollection,omitempty"`
- ShortDescription string `json:"ShortDescription,omitempty"`
- UsedValue int64 `json:"UsedValue,omitempty"`
-}
-
-// implements the service definition of QuotaTypes
-type QuotaTypes struct {
- QuotaType string `json:"QuotaType,omitempty"`
- Quotas []Quota `json:"Quotas,omitempty"`
-}
-
-// implements the service definition of ReadAccountConsumptionRequest
-type ReadAccountConsumptionRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- FromDate string `json:"FromDate,omitempty"`
- ToDate string `json:"ToDate,omitempty"`
-}
-
-// implements the service definition of ReadAccountConsumptionResponse
-type ReadAccountConsumptionResponse struct {
- ConsumptionEntries ConsumptionEntries `json:"ConsumptionEntries,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadAccountRequest
-type ReadAccountRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
-}
-
-// implements the service definition of ReadAccountResponse
-type ReadAccountResponse struct {
- Account Account `json:"Account,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadAdminPasswordRequest
-type ReadAdminPasswordRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- VmId string `json:"VmId,omitempty"`
-}
-
-// implements the service definition of ReadAdminPasswordResponse
-type ReadAdminPasswordResponse struct {
- AdminPassword string `json:"AdminPassword,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- VmId string `json:"VmId,omitempty"`
-}
-
-// implements the service definition of ReadApiKeysRequest
-type ReadApiKeysRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Tags []ResourceTag `json:"Tags,omitempty"`
- UserName string `json:"UserName,omitempty"`
-}
-
-// implements the service definition of ReadApiKeysResponse
-type ReadApiKeysResponse struct {
- ApiKeys []ApiKey `json:"ApiKeys,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadApiLogsRequest
-type ReadApiLogsRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Filters FiltersApiLog `json:"Filters,omitempty"`
- With With `json:"With,omitempty"`
-}
-
-// implements the service definition of ReadApiLogsResponse
-type ReadApiLogsResponse struct {
- Logs []Log `json:"Logs,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadBillableDigestRequest
-type ReadBillableDigestRequest struct {
- AccountId string `json:"AccountId,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- FromDate string `json:"FromDate,omitempty"`
- InvoiceState string `json:"InvoiceState,omitempty"`
- IsConsolidated bool `json:"IsConsolidated,omitempty"`
- ToDate string `json:"ToDate,omitempty"`
-}
-
-// implements the service definition of ReadBillableDigestResponse
-type ReadBillableDigestResponse struct {
- Items []Item `json:"Items,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadCatalogRequest
-type ReadCatalogRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
-}
-
-// implements the service definition of ReadCatalogResponse
-type ReadCatalogResponse struct {
- Catalog Catalog_1 `json:"Catalog,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadClientGatewaysRequest
-type ReadClientGatewaysRequest struct {
- ClientGatewayIds []string `json:"ClientGatewayIds,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- Filters []FiltersOldFormat `json:"Filters,omitempty"`
-}
-
-// implements the service definition of ReadClientGatewaysResponse
-type ReadClientGatewaysResponse struct {
- ClientGateways []ClientGateway `json:"ClientGateways,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadConsoleOutputRequest
-type ReadConsoleOutputRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- VmId string `json:"VmId,omitempty"`
-}
-
-// implements the service definition of ReadConsoleOutputResponse
-type ReadConsoleOutputResponse struct {
- ConsoleOutput string `json:"ConsoleOutput,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- VmId string `json:"VmId,omitempty"`
-}
-
-// implements the service definition of ReadDhcpOptionsRequest
-type ReadDhcpOptionsRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Filters FiltersDhcpOptions `json:"Filters,omitempty"`
-}
-
-// implements the service definition of ReadDhcpOptionsResponse
-type ReadDhcpOptionsResponse struct {
- DhcpOptionsSets []DhcpOptionsSet `json:"DhcpOptionsSets,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadDirectLinkInterfacesRequest
-type ReadDirectLinkInterfacesRequest struct {
- DirectLinkId string `json:"DirectLinkId,omitempty"`
- DirectLinkInterfaceId string `json:"DirectLinkInterfaceId,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
-}
-
-// implements the service definition of ReadDirectLinkInterfacesResponse
-type ReadDirectLinkInterfacesResponse struct {
- DirectLinkInterfaces []DirectLinkInterfaces `json:"DirectLinkInterfaces,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadDirectLinksRequest
-type ReadDirectLinksRequest struct {
- DirectLinkId string `json:"DirectLinkId,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
-}
-
-// implements the service definition of ReadDirectLinksResponse
-type ReadDirectLinksResponse struct {
- DirectLinks []DirectLink `json:"DirectLinks,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadImageExportTasksRequest
-type ReadImageExportTasksRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Filters FiltersExportTask `json:"Filters,omitempty"`
-}
-
-// implements the service definition of ReadImageExportTasksResponse
-type ReadImageExportTasksResponse struct {
- ImageExportTasks []ImageExportTask `json:"ImageExportTasks,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadImagesRequest
-type ReadImagesRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Filters FiltersImage `json:"Filters,omitempty"`
-}
-
-// implements the service definition of ReadImagesResponse
-type ReadImagesResponse struct {
- Images []Image `json:"Images,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadInternetServicesRequest
-type ReadInternetServicesRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Filters FiltersInternetService `json:"Filters,omitempty"`
-}
-
-// implements the service definition of ReadInternetServicesResponse
-type ReadInternetServicesResponse struct {
- InternetServices []InternetService `json:"InternetServices,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadKeypairsRequest
-type ReadKeypairsRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Filters FiltersKeypair `json:"Filters,omitempty"`
-}
-
-// implements the service definition of ReadKeypairsResponse
-type ReadKeypairsResponse struct {
- Keypairs []Keypair `json:"Keypairs,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadListenerRulesRequest
-type ReadListenerRulesRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- ListenerRuleNames []string `json:"ListenerRuleNames,omitempty"`
-}
-
-// implements the service definition of ReadListenerRulesResponse
-type ReadListenerRulesResponse struct {
- ListenerRules []ListenerRules `json:"ListenerRules,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadLoadBalancersRequest
-type ReadLoadBalancersRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Filters FiltersLoadBalancer `json:"Filters,omitempty"`
-}
-
-// implements the service definition of ReadLoadBalancersResponse
-type ReadLoadBalancersResponse struct {
- LoadBalancers []LoadBalancer `json:"LoadBalancers,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadLocationsRequest
-type ReadLocationsRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
-}
-
-// implements the service definition of ReadLocationsResponse
-type ReadLocationsResponse struct {
- Locations []Location `json:"Locations,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadNatServicesRequest
-type ReadNatServicesRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Filters FiltersNatService `json:"Filters,omitempty"`
-}
-
-// implements the service definition of ReadNatServicesResponse
-type ReadNatServicesResponse struct {
- NatServices []NatService `json:"NatServices,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadNetAccessPointServicesRequest
-type ReadNetAccessPointServicesRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
-}
-
-// implements the service definition of ReadNetAccessPointServicesResponse
-type ReadNetAccessPointServicesResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- ServiceNames []string `json:"ServiceNames,omitempty"`
-}
-
-// implements the service definition of ReadNetAccessPointsRequest
-type ReadNetAccessPointsRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Filters []FiltersOldFormat `json:"Filters,omitempty"`
- NetAccessPointIds []string `json:"NetAccessPointIds,omitempty"`
-}
-
-// implements the service definition of ReadNetAccessPointsResponse
-type ReadNetAccessPointsResponse struct {
- NetAccessPoints []NetAccessPoint `json:"NetAccessPoints,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadNetPeeringsRequest
-type ReadNetPeeringsRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Filters FiltersNetPeering `json:"Filters,omitempty"`
-}
-
-// implements the service definition of ReadNetPeeringsResponse
-type ReadNetPeeringsResponse struct {
- NetPeerings []NetPeering `json:"NetPeerings,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadNetsRequest
-type ReadNetsRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Filters FiltersNet `json:"Filters,omitempty"`
-}
-
-// implements the service definition of ReadNetsResponse
-type ReadNetsResponse struct {
- Nets []Net `json:"Nets,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadNicsRequest
-type ReadNicsRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Filters FiltersNic `json:"Filters,omitempty"`
-}
-
-// implements the service definition of ReadNicsResponse
-type ReadNicsResponse struct {
- Nics []Nic `json:"Nics,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadPoliciesRequest
-type ReadPoliciesRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- IsLinked bool `json:"IsLinked,omitempty"`
- Path string `json:"Path,omitempty"`
- UserGroupName string `json:"UserGroupName,omitempty"`
- UserName string `json:"UserName,omitempty"`
-}
-
-// implements the service definition of ReadPoliciesResponse
-type ReadPoliciesResponse struct {
- Policies []Policy `json:"Policies,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadPrefixListsRequest
-type ReadPrefixListsRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Filters []FiltersOldFormat `json:"Filters,omitempty"`
- PrefixListIds []string `json:"PrefixListIds,omitempty"`
-}
-
-// implements the service definition of ReadPrefixListsResponse
-type ReadPrefixListsResponse struct {
- PrefixLists []PrefixLists `json:"PrefixLists,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadProductTypesRequest
-type ReadProductTypesRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Filters []FiltersOldFormat `json:"Filters,omitempty"`
-}
-
-// implements the service definition of ReadProductTypesResponse
-type ReadProductTypesResponse struct {
- ProductTypes []ProductType `json:"ProductTypes,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadPublicCatalogRequest
-type ReadPublicCatalogRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
-}
-
-// implements the service definition of ReadPublicCatalogResponse
-type ReadPublicCatalogResponse struct {
- Catalog Catalog_1 `json:"Catalog,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadPublicIpRangesRequest
-type ReadPublicIpRangesRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
-}
-
-// implements the service definition of ReadPublicIpRangesResponse
-type ReadPublicIpRangesResponse struct {
- PublicIps []string `json:"PublicIps,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadPublicIpsRequest
-type ReadPublicIpsRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Filters FiltersPublicIp `json:"Filters,omitempty"`
-}
-
-// implements the service definition of ReadPublicIpsResponse
-type ReadPublicIpsResponse struct {
- PublicIps []PublicIp `json:"PublicIps,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadQuotasRequest
-type ReadQuotasRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Filters []FiltersOldFormat `json:"Filters,omitempty"`
- QuotaNames []string `json:"QuotaNames,omitempty"`
-}
-
-// implements the service definition of ReadQuotasResponse
-type ReadQuotasResponse struct {
- QuotaTypes []QuotaTypes `json:"QuotaTypes,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadRegionConfigRequest
-type ReadRegionConfigRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- FromDate string `json:"FromDate,omitempty"`
-}
-
-// implements the service definition of ReadRegionConfigResponse
-type ReadRegionConfigResponse struct {
- RegionConfig RegionConfig `json:"RegionConfig,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadRegionsRequest
-type ReadRegionsRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Filters []FiltersOldFormat `json:"Filters,omitempty"`
- RegionNames []string `json:"RegionNames,omitempty"`
-}
-
-// implements the service definition of ReadRegionsResponse
-type ReadRegionsResponse struct {
- Regions []Region `json:"Regions,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadReservedVmOffersRequest
-type ReadReservedVmOffersRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Filters []FiltersOldFormat `json:"Filters,omitempty"`
- OfferingType string `json:"OfferingType,omitempty"`
- ProductType string `json:"ProductType,omitempty"`
- ReservedVmsOfferIds []string `json:"ReservedVmsOfferIds,omitempty"`
- SubregionName string `json:"SubregionName,omitempty"`
- Tenancy string `json:"Tenancy,omitempty"`
- VmType string `json:"VmType,omitempty"`
-}
-
-// implements the service definition of ReadReservedVmOffersResponse
-type ReadReservedVmOffersResponse struct {
- ReservedVmsOffers []ReservedVmsOffer `json:"ReservedVmsOffers,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadReservedVmsRequest
-type ReadReservedVmsRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Filters []FiltersOldFormat `json:"Filters,omitempty"`
- OfferingType string `json:"OfferingType,omitempty"`
- ReservedVmsIds []string `json:"ReservedVmsIds,omitempty"`
- SubregionName string `json:"SubregionName,omitempty"`
-}
-
-// implements the service definition of ReadReservedVmsResponse
-type ReadReservedVmsResponse struct {
- ReservedVms []ReservedVm `json:"ReservedVms,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadRouteTablesRequest
-type ReadRouteTablesRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Filters FiltersRouteTable `json:"Filters,omitempty"`
-}
-
-// implements the service definition of ReadRouteTablesResponse
-type ReadRouteTablesResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- RouteTables []RouteTable `json:"RouteTables,omitempty"`
-}
-
-// implements the service definition of ReadSecurityGroupsRequest
-type ReadSecurityGroupsRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Filters FiltersSecurityGroup `json:"Filters,omitempty"`
-}
-
-// implements the service definition of ReadSecurityGroupsResponse
-type ReadSecurityGroupsResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- SecurityGroups []SecurityGroup `json:"SecurityGroups,omitempty"`
-}
-
-// implements the service definition of ReadServerCertificatesRequest
-type ReadServerCertificatesRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Path string `json:"Path,omitempty"`
-}
-
-// implements the service definition of ReadServerCertificatesResponse
-type ReadServerCertificatesResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- ServerCertificates []ServerCertificate `json:"ServerCertificates,omitempty"`
-}
-
-// implements the service definition of ReadSnapshotExportTasksRequest
-type ReadSnapshotExportTasksRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- TaskIds []string `json:"TaskIds,omitempty"`
-}
-
-// implements the service definition of ReadSnapshotExportTasksResponse
-type ReadSnapshotExportTasksResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- SnapshotExportTasks []SnapshotExportTask `json:"SnapshotExportTasks,omitempty"`
-}
-
-// implements the service definition of ReadSnapshotsRequest
-type ReadSnapshotsRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Filters FiltersSnapshot `json:"Filters,omitempty"`
-}
-
-// implements the service definition of ReadSnapshotsResponse
-type ReadSnapshotsResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- Snapshots []Snapshot `json:"Snapshots,omitempty"`
-}
-
-// implements the service definition of ReadSubnetsRequest
-type ReadSubnetsRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Filters FiltersSubnet `json:"Filters,omitempty"`
-}
-
-// implements the service definition of ReadSubnetsResponse
-type ReadSubnetsResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- Subnets []Subnet `json:"Subnets,omitempty"`
-}
-
-// implements the service definition of ReadSubregionsRequest
-type ReadSubregionsRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Filters []FiltersOldFormat `json:"Filters,omitempty"`
- SubregionNames []string `json:"SubregionNames,omitempty"`
-}
-
-// implements the service definition of ReadSubregionsResponse
-type ReadSubregionsResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- Subregions []Subregion `json:"Subregions,omitempty"`
-}
-
-// implements the service definition of ReadTagsRequest
-type ReadTagsRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Filters FiltersTag `json:"Filters,omitempty"`
-}
-
-// implements the service definition of ReadTagsResponse
-type ReadTagsResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- Tags []Tag `json:"Tags,omitempty"`
-}
-
-// implements the service definition of ReadUserGroupsRequest
-type ReadUserGroupsRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Filters FiltersUserGroup `json:"Filters,omitempty"`
-}
-
-// implements the service definition of ReadUserGroupsResponse
-type ReadUserGroupsResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- UserGroups []UserGroup `json:"UserGroups,omitempty"`
-}
-
-// implements the service definition of ReadUsersRequest
-type ReadUsersRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Path string `json:"Path,omitempty"`
-}
-
-// implements the service definition of ReadUsersResponse
-type ReadUsersResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- Users []User `json:"Users,omitempty"`
-}
-
-// implements the service definition of ReadVirtualGatewaysRequest
-type ReadVirtualGatewaysRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Filters []FiltersOldFormat `json:"Filters,omitempty"`
-}
-
-// implements the service definition of ReadVirtualGatewaysResponse
-type ReadVirtualGatewaysResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- VirtualGateways []VirtualGateway `json:"VirtualGateways,omitempty"`
-}
-
-// implements the service definition of ReadVmTypesRequest
-type ReadVmTypesRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Filters []FiltersOldFormat `json:"Filters,omitempty"`
-}
-
-// implements the service definition of ReadVmTypesResponse
-type ReadVmTypesResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- VmTypes []VmType `json:"VmTypes,omitempty"`
-}
-
-// implements the service definition of ReadVmsHealthRequest
-type ReadVmsHealthRequest struct {
- BackendVmsIds []string `json:"BackendVmsIds,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- LoadBalancerName string `json:"LoadBalancerName,omitempty"`
-}
-
-// implements the service definition of ReadVmsHealthResponse
-type ReadVmsHealthResponse struct {
- BackendVmsHealth []BackendVmsHealth `json:"BackendVmsHealth,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReadVmsRequest
-type ReadVmsRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Filters FiltersVm `json:"Filters,omitempty"`
-}
-
-// implements the service definition of ReadVmsResponse
-type ReadVmsResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- Vms []Vm `json:"Vms,omitempty"`
-}
-
-// implements the service definition of ReadVmsStateRequest
-type ReadVmsStateRequest struct {
- AllVms bool `json:"AllVms,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- Filters FiltersVmsState `json:"Filters,omitempty"`
-}
-
-// implements the service definition of ReadVmsStateResponse
-type ReadVmsStateResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- VmStates []VmStates `json:"VmStates,omitempty"`
-}
-
-// implements the service definition of ReadVolumesRequest
-type ReadVolumesRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Filters FiltersVolume `json:"Filters,omitempty"`
-}
-
-// implements the service definition of ReadVolumesResponse
-type ReadVolumesResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- Volumes []Volume `json:"Volumes,omitempty"`
-}
-
-// implements the service definition of ReadVpnConnectionsRequest
-type ReadVpnConnectionsRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Filters FiltersVpnConnection `json:"Filters,omitempty"`
- VpnConnectionIds []string `json:"VpnConnectionIds,omitempty"`
-}
-
-// implements the service definition of ReadVpnConnectionsResponse
-type ReadVpnConnectionsResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- VpnConnections []VpnConnection `json:"VpnConnections,omitempty"`
-}
-
-// implements the service definition of RebootVmsRequest
-type RebootVmsRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- VmIds []string `json:"VmIds,omitempty"`
-}
-
-// implements the service definition of RebootVmsResponse
-type RebootVmsResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of RecurringCharge
-type RecurringCharge struct {
- Frequency string `json:"Frequency,omitempty"`
-}
-
-// implements the service definition of Region
-type Region struct {
- RegionEndpoint string `json:"RegionEndpoint,omitempty"`
- RegionName string `json:"RegionName,omitempty"`
-}
-
-// implements the service definition of RegionConfig
-type RegionConfig struct {
- FromDate string `json:"FromDate,omitempty"`
- Regions []RegionDescription `json:"Regions,omitempty"`
- TargetRegion TargetRegion `json:"TargetRegion,omitempty"`
-}
-
-// implements the service definition of RegionDescription
-type RegionDescription struct {
- Attributes []Attribute `json:"Attributes,omitempty"`
- Continent string `json:"Continent,omitempty"`
- CurrencyCode string `json:"CurrencyCode,omitempty"`
- Entity string `json:"Entity,omitempty"`
- IsPublic bool `json:"IsPublic,omitempty"`
- IsSynchronized bool `json:"IsSynchronized,omitempty"`
- Permissions []RegionDescriptionPermission `json:"Permissions,omitempty"`
- RegionDomain string `json:"RegionDomain,omitempty"`
- RegionId string `json:"RegionId,omitempty"`
- RegionInstance string `json:"RegionInstance,omitempty"`
- RegionName string `json:"RegionName,omitempty"`
- SerialFactor int64 `json:"SerialFactor,omitempty"`
- Services []Service `json:"Services,omitempty"`
- SubregionNames []string `json:"SubregionNames,omitempty"`
-}
-
-// implements the service definition of RegionDescriptionPermission
-type RegionDescriptionPermission struct {
- Filter string `json:"Filter,omitempty"`
- PermissionType string `json:"PermissionType,omitempty"`
-}
-
-// implements the service definition of RegisterUserInUserGroupRequest
-type RegisterUserInUserGroupRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- UserGroupName string `json:"UserGroupName,omitempty"`
- UserName string `json:"UserName,omitempty"`
-}
-
-// implements the service definition of RegisterUserInUserGroupResponse
-type RegisterUserInUserGroupResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of RegisterVmsInLoadBalancerRequest
-type RegisterVmsInLoadBalancerRequest struct {
- BackendVmsIds []string `json:"BackendVmsIds,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- LoadBalancerName string `json:"LoadBalancerName,omitempty"`
-}
-
-// implements the service definition of RegisterVmsInLoadBalancerResponse
-type RegisterVmsInLoadBalancerResponse struct {
- BackendVmsIds []string `json:"BackendVmsIds,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of RejectNetPeeringRequest
-type RejectNetPeeringRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- NetPeeringId string `json:"NetPeeringId,omitempty"`
-}
-
-// implements the service definition of RejectNetPeeringResponse
-type RejectNetPeeringResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ReservedVm
-type ReservedVm struct {
- CurrencyCode string `json:"CurrencyCode,omitempty"`
- OfferingType string `json:"OfferingType,omitempty"`
- ProductType string `json:"ProductType,omitempty"`
- RecurringCharges []RecurringCharge `json:"RecurringCharges,omitempty"`
- ReservedVmsId string `json:"ReservedVmsId,omitempty"`
- State string `json:"State,omitempty"`
- SubregionName string `json:"SubregionName,omitempty"`
- Tenancy string `json:"Tenancy,omitempty"`
- VmCount int64 `json:"VmCount,omitempty"`
- VmType string `json:"VmType,omitempty"`
-}
-
-// implements the service definition of ReservedVmsOffer
-type ReservedVmsOffer struct {
- CurrencyCode string `json:"CurrencyCode,omitempty"`
- Duration int64 `json:"Duration,omitempty"`
- FixedPrice int `json:"FixedPrice,omitempty"`
- OfferingType string `json:"OfferingType,omitempty"`
- PricingDetails []PricingDetail `json:"PricingDetails,omitempty"`
- ProductType string `json:"ProductType,omitempty"`
- RecurringCharges []RecurringCharge `json:"RecurringCharges,omitempty"`
- ReservedVmsOfferId string `json:"ReservedVmsOfferId,omitempty"`
- SubregionName string `json:"SubregionName,omitempty"`
- Tenancy string `json:"Tenancy,omitempty"`
- UsagePrice int `json:"UsagePrice,omitempty"`
- VmType string `json:"VmType,omitempty"`
-}
-
-// implements the service definition of ResetAccountPasswordRequest
-type ResetAccountPasswordRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Password string `json:"Password,omitempty"`
- PasswordToken string `json:"PasswordToken,omitempty"`
-}
-
-// implements the service definition of ResetAccountPasswordResponse
-type ResetAccountPasswordResponse struct {
- Email string `json:"Email,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ResourceTag
-type ResourceTag struct {
- Key string `json:"Key,omitempty"`
- Value string `json:"Value,omitempty"`
-}
-
-// implements the service definition of ResponseContext
-type ResponseContext struct {
- RequestId string `json:"RequestId,omitempty"`
-}
-
-// implements the service definition of Route
-type Route struct {
- CreationMethod string `json:"CreationMethod,omitempty"`
- DestinationIpRange string `json:"DestinationIpRange,omitempty"`
- DestinationServiceId string `json:"DestinationServiceId,omitempty"`
- GatewayId string `json:"GatewayId,omitempty"`
- NetAccessPointId string `json:"NetAccessPointId,omitempty"`
- NetPeeringId string `json:"NetPeeringId,omitempty"`
- NicId string `json:"NicId,omitempty"`
- State string `json:"State,omitempty"`
- VmAccountId string `json:"VmAccountId,omitempty"`
- VmId string `json:"VmId,omitempty"`
-}
-
-// implements the service definition of RouteLight
-type RouteLight struct {
- DestinationIpRange string `json:"DestinationIpRange,omitempty"`
- RouteType string `json:"RouteType,omitempty"`
- State string `json:"State,omitempty"`
-}
-
-// implements the service definition of RoutePropagatingVirtualGateway
-type RoutePropagatingVirtualGateway struct {
- VirtualGatewayId string `json:"VirtualGatewayId,omitempty"`
-}
-
-// implements the service definition of RouteTable
-type RouteTable struct {
- LinkRouteTables []LinkRouteTable `json:"LinkRouteTables,omitempty"`
- NetId string `json:"NetId,omitempty"`
- RoutePropagatingVirtualGateways []RoutePropagatingVirtualGateway `json:"RoutePropagatingVirtualGateways,omitempty"`
- RouteTableId string `json:"RouteTableId,omitempty"`
- Routes []Route `json:"Routes,omitempty"`
- Tags []ResourceTag `json:"Tags,omitempty"`
-}
-
-// implements the service definition of SecurityGroup
-type SecurityGroup struct {
- AccountId string `json:"AccountId,omitempty"`
- Description string `json:"Description,omitempty"`
- InboundRules []SecurityGroupRule `json:"InboundRules,omitempty"`
- NetId string `json:"NetId,omitempty"`
- OutboundRules []SecurityGroupRule `json:"OutboundRules,omitempty"`
- SecurityGroupId string `json:"SecurityGroupId,omitempty"`
- SecurityGroupName string `json:"SecurityGroupName,omitempty"`
- Tags []ResourceTag `json:"Tags,omitempty"`
-}
-
-// implements the service definition of SecurityGroupLight
-type SecurityGroupLight struct {
- SecurityGroupId string `json:"SecurityGroupId,omitempty"`
- SecurityGroupName string `json:"SecurityGroupName,omitempty"`
-}
-
-// implements the service definition of SecurityGroupRule
-type SecurityGroupRule struct {
- FromPortRange int64 `json:"FromPortRange,omitempty"`
- IpProtocol string `json:"IpProtocol,omitempty"`
- IpRanges []string `json:"IpRanges,omitempty"`
- PrefixListIds []string `json:"PrefixListIds,omitempty"`
- SecurityGroupsMembers []SecurityGroupsMember `json:"SecurityGroupsMembers,omitempty"`
- ToPortRange int64 `json:"ToPortRange,omitempty"`
-}
-
-// implements the service definition of SecurityGroupsMember
-type SecurityGroupsMember struct {
- AccountId string `json:"AccountId,omitempty"`
- SecurityGroupId string `json:"SecurityGroupId,omitempty"`
- SecurityGroupName string `json:"SecurityGroupName,omitempty"`
-}
-
-// implements the service definition of SendResetPasswordEmailRequest
-type SendResetPasswordEmailRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Email string `json:"Email,omitempty"`
-}
-
-// implements the service definition of SendResetPasswordEmailResponse
-type SendResetPasswordEmailResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of ServerCertificate
-type ServerCertificate struct {
- Path string `json:"Path,omitempty"`
- ServerCertificateId string `json:"ServerCertificateId,omitempty"`
- ServerCertificateName string `json:"ServerCertificateName,omitempty"`
-}
-
-// implements the service definition of Service
-type Service struct {
- Filters []FiltersServices `json:"Filters,omitempty"`
- ServiceName string `json:"ServiceName,omitempty"`
- ServiceType string `json:"ServiceType,omitempty"`
-}
-
-// implements the service definition of Snapshot
-type Snapshot struct {
- AccountAlias string `json:"AccountAlias,omitempty"`
- AccountId string `json:"AccountId,omitempty"`
- Description string `json:"Description,omitempty"`
- PermissionsToCreateVolume PermissionsOnResource `json:"PermissionsToCreateVolume,omitempty"`
- Progress int64 `json:"Progress,omitempty"`
- SnapshotId string `json:"SnapshotId,omitempty"`
- State string `json:"State,omitempty"`
- Tags []ResourceTag `json:"Tags,omitempty"`
- VolumeId string `json:"VolumeId,omitempty"`
- VolumeSize int64 `json:"VolumeSize,omitempty"`
-}
-
-// implements the service definition of SnapshotExportTask
-type SnapshotExportTask struct {
- Comment string `json:"Comment,omitempty"`
- OsuExport OsuExport `json:"OsuExport,omitempty"`
- Progress int64 `json:"Progress,omitempty"`
- SnapshotId string `json:"SnapshotId,omitempty"`
- State string `json:"State,omitempty"`
- TaskId string `json:"TaskId,omitempty"`
-}
-
-// implements the service definition of SourceNet
-type SourceNet struct {
- AccountId string `json:"AccountId,omitempty"`
- IpRange string `json:"IpRange,omitempty"`
- NetId string `json:"NetId,omitempty"`
-}
-
-// implements the service definition of SourceSecurityGroup
-type SourceSecurityGroup struct {
- SecurityGroupAccountId string `json:"SecurityGroupAccountId,omitempty"`
- SecurityGroupName string `json:"SecurityGroupName,omitempty"`
-}
-
-// implements the service definition of StartVmsRequest
-type StartVmsRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- VmIds []string `json:"VmIds,omitempty"`
-}
-
-// implements the service definition of StartVmsResponse
-type StartVmsResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- Vms []VmState `json:"Vms,omitempty"`
-}
-
-// implements the service definition of StateComment
-type StateComment struct {
- StateCode string `json:"StateCode,omitempty"`
- StateMessage string `json:"StateMessage,omitempty"`
-}
-
-// implements the service definition of StopVmsRequest
-type StopVmsRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- ForceStop bool `json:"ForceStop,omitempty"`
- VmIds []string `json:"VmIds,omitempty"`
-}
-
-// implements the service definition of StopVmsResponse
-type StopVmsResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- Vms []VmState `json:"Vms,omitempty"`
-}
-
-// implements the service definition of Subnet
-type Subnet struct {
- AvailableIpsCount int64 `json:"AvailableIpsCount,omitempty"`
- IpRange string `json:"IpRange,omitempty"`
- NetId string `json:"NetId,omitempty"`
- State string `json:"State,omitempty"`
- SubnetId string `json:"SubnetId,omitempty"`
- SubregionName string `json:"SubregionName,omitempty"`
- Tags []ResourceTag `json:"Tags,omitempty"`
-}
-
-// implements the service definition of Subregion
-type Subregion struct {
- RegionName string `json:"RegionName,omitempty"`
- State string `json:"State,omitempty"`
- SubregionName string `json:"SubregionName,omitempty"`
-}
-
-// implements the service definition of Tag
-type Tag struct {
- Key string `json:"Key,omitempty"`
- ResourceId string `json:"ResourceId,omitempty"`
- ResourceType string `json:"ResourceType,omitempty"`
- Value string `json:"Value,omitempty"`
-}
-
-// implements the service definition of TargetRegion
-type TargetRegion struct {
- RegionDomain string `json:"RegionDomain,omitempty"`
- RegionId string `json:"RegionId,omitempty"`
- RegionName string `json:"RegionName,omitempty"`
-}
-
-// implements the service definition of UnlinkInternetServiceRequest
-type UnlinkInternetServiceRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- InternetServiceId string `json:"InternetServiceId,omitempty"`
- NetId string `json:"NetId,omitempty"`
-}
-
-// implements the service definition of UnlinkInternetServiceResponse
-type UnlinkInternetServiceResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of UnlinkNicRequest
-type UnlinkNicRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- LinkNicId string `json:"LinkNicId,omitempty"`
-}
-
-// implements the service definition of UnlinkNicResponse
-type UnlinkNicResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of UnlinkPolicyRequest
-type UnlinkPolicyRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- PolicyId string `json:"PolicyId,omitempty"`
- UserGroupName string `json:"UserGroupName,omitempty"`
- UserName string `json:"UserName,omitempty"`
-}
-
-// implements the service definition of UnlinkPolicyResponse
-type UnlinkPolicyResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of UnlinkPrivateIpsRequest
-type UnlinkPrivateIpsRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- NicId string `json:"NicId,omitempty"`
- PrivateIps []string `json:"PrivateIps,omitempty"`
-}
-
-// implements the service definition of UnlinkPrivateIpsResponse
-type UnlinkPrivateIpsResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of UnlinkPublicIpRequest
-type UnlinkPublicIpRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- LinkPublicIpId string `json:"LinkPublicIpId,omitempty"`
- PublicIp string `json:"PublicIp,omitempty"`
-}
-
-// implements the service definition of UnlinkPublicIpResponse
-type UnlinkPublicIpResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of UnlinkRouteTableRequest
-type UnlinkRouteTableRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- LinkRouteTableId string `json:"LinkRouteTableId,omitempty"`
-}
-
-// implements the service definition of UnlinkRouteTableResponse
-type UnlinkRouteTableResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of UnlinkVirtualGatewayRequest
-type UnlinkVirtualGatewayRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- NetId string `json:"NetId,omitempty"`
- VirtualGatewayId string `json:"VirtualGatewayId,omitempty"`
-}
-
-// implements the service definition of UnlinkVirtualGatewayResponse
-type UnlinkVirtualGatewayResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of UnlinkVolumeRequest
-type UnlinkVolumeRequest struct {
- DeviceName string `json:"DeviceName,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- ForceUnlink bool `json:"ForceUnlink,omitempty"`
- VolumeId string `json:"VolumeId,omitempty"`
-}
-
-// implements the service definition of UnlinkVolumeResponse
-type UnlinkVolumeResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of UpdateAccountRequest
-type UpdateAccountRequest struct {
- City string `json:"City,omitempty"`
- CompanyName string `json:"CompanyName,omitempty"`
- Country string `json:"Country,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- Email string `json:"Email,omitempty"`
- FirstName string `json:"FirstName,omitempty"`
- JobTitle string `json:"JobTitle,omitempty"`
- LastName string `json:"LastName,omitempty"`
- Mobile string `json:"Mobile,omitempty"`
- Password string `json:"Password,omitempty"`
- Phone string `json:"Phone,omitempty"`
- StateProvince string `json:"StateProvince,omitempty"`
- VatNumber string `json:"VatNumber,omitempty"`
- ZipCode string `json:"ZipCode,omitempty"`
-}
-
-// implements the service definition of UpdateAccountResponse
-type UpdateAccountResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of UpdateApiKeyRequest
-type UpdateApiKeyRequest struct {
- ApiKeyId string `json:"ApiKeyId,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- State string `json:"State,omitempty"`
-}
-
-// implements the service definition of UpdateApiKeyResponse
-type UpdateApiKeyResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of UpdateHealthCheckRequest
-type UpdateHealthCheckRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- HealthCheck HealthCheck `json:"HealthCheck,omitempty"`
- LoadBalancerName string `json:"LoadBalancerName,omitempty"`
-}
-
-// implements the service definition of UpdateHealthCheckResponse
-type UpdateHealthCheckResponse struct {
- HealthCheck HealthCheck `json:"HealthCheck,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of UpdateImageRequest
-type UpdateImageRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- ImageId string `json:"ImageId,omitempty"`
- PermissionsToLaunch PermissionsOnResourceCreation `json:"PermissionsToLaunch,omitempty"`
-}
-
-// implements the service definition of UpdateImageResponse
-type UpdateImageResponse struct {
- Image Image `json:"Image,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of UpdateKeypairRequest
-type UpdateKeypairRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- KeypairName string `json:"KeypairName,omitempty"`
- PublicKey string `json:"PublicKey,omitempty"`
-}
-
-// implements the service definition of UpdateKeypairResponse
-type UpdateKeypairResponse struct {
- KeypairFingerprint string `json:"KeypairFingerprint,omitempty"`
- KeypairName string `json:"KeypairName,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of UpdateListenerRuleRequest
-type UpdateListenerRuleRequest struct {
- Attribute string `json:"Attribute,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- ListenerRuleName string `json:"ListenerRuleName,omitempty"`
- Value string `json:"Value,omitempty"`
-}
-
-// implements the service definition of UpdateListenerRuleResponse
-type UpdateListenerRuleResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of UpdateLoadBalancerRequest
-type UpdateLoadBalancerRequest struct {
- AccessLog AccessLog `json:"AccessLog,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- LoadBalancerName string `json:"LoadBalancerName,omitempty"`
- LoadBalancerPort int64 `json:"LoadBalancerPort,omitempty"`
- PolicyNames []string `json:"PolicyNames,omitempty"`
- ServerCertificateId string `json:"ServerCertificateId,omitempty"`
-}
-
-// implements the service definition of UpdateLoadBalancerResponse
-type UpdateLoadBalancerResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of UpdateNetAccessPointRequest
-type UpdateNetAccessPointRequest struct {
- AddRouteTableIds []string `json:"AddRouteTableIds,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- NetAccessPointId string `json:"NetAccessPointId,omitempty"`
- RemoveRouteTableIds []string `json:"RemoveRouteTableIds,omitempty"`
-}
-
-// implements the service definition of UpdateNetAccessPointResponse
-type UpdateNetAccessPointResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of UpdateNetRequest
-type UpdateNetRequest struct {
- DhcpOptionsSetId string `json:"DhcpOptionsSetId,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- NetId string `json:"NetId,omitempty"`
-}
-
-// implements the service definition of UpdateNetResponse
-type UpdateNetResponse struct {
- Net Net `json:"Net,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of UpdateNicRequest
-type UpdateNicRequest struct {
- Description string `json:"Description,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- LinkNic LinkNicToUpdate `json:"LinkNic,omitempty"`
- NicId string `json:"NicId,omitempty"`
- SecurityGroupIds []string `json:"SecurityGroupIds,omitempty"`
-}
-
-// implements the service definition of UpdateNicResponse
-type UpdateNicResponse struct {
- Nic Nic `json:"Nic,omitempty"`
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of UpdateRoutePropagationRequest
-type UpdateRoutePropagationRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- Enable bool `json:"Enable,omitempty"`
- RouteTableId string `json:"RouteTableId,omitempty"`
- VirtualGatewayId string `json:"VirtualGatewayId,omitempty"`
-}
-
-// implements the service definition of UpdateRoutePropagationResponse
-type UpdateRoutePropagationResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- RouteTable RouteTable `json:"RouteTable,omitempty"`
-}
-
-// implements the service definition of UpdateRouteRequest
-type UpdateRouteRequest struct {
- DestinationIpRange string `json:"DestinationIpRange,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- GatewayId string `json:"GatewayId,omitempty"`
- NatServiceId string `json:"NatServiceId,omitempty"`
- NetPeeringId string `json:"NetPeeringId,omitempty"`
- NicId string `json:"NicId,omitempty"`
- RouteTableId string `json:"RouteTableId,omitempty"`
- VmId string `json:"VmId,omitempty"`
-}
-
-// implements the service definition of UpdateRouteResponse
-type UpdateRouteResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- Route Route `json:"Route,omitempty"`
-}
-
-// implements the service definition of UpdateServerCertificateRequest
-type UpdateServerCertificateRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- NewPath string `json:"NewPath,omitempty"`
- NewServerCertificateName string `json:"NewServerCertificateName,omitempty"`
- ServerCertificateName string `json:"ServerCertificateName,omitempty"`
-}
-
-// implements the service definition of UpdateServerCertificateResponse
-type UpdateServerCertificateResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of UpdateSnapshotRequest
-type UpdateSnapshotRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- PermissionsToCreateVolume PermissionsOnResourceCreation `json:"PermissionsToCreateVolume,omitempty"`
- SnapshotId string `json:"SnapshotId,omitempty"`
-}
-
-// implements the service definition of UpdateSnapshotResponse
-type UpdateSnapshotResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- Snapshot Snapshot `json:"Snapshot,omitempty"`
-}
-
-// implements the service definition of UpdateUserGroupRequest
-type UpdateUserGroupRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- NewPath string `json:"NewPath,omitempty"`
- NewUserGroupName string `json:"NewUserGroupName,omitempty"`
- UserGroupName string `json:"UserGroupName,omitempty"`
-}
-
-// implements the service definition of UpdateUserGroupResponse
-type UpdateUserGroupResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of UpdateUserRequest
-type UpdateUserRequest struct {
- DryRun bool `json:"DryRun,omitempty"`
- NewPath string `json:"NewPath,omitempty"`
- NewUserName string `json:"NewUserName,omitempty"`
- UserName string `json:"UserName,omitempty"`
-}
-
-// implements the service definition of UpdateUserResponse
-type UpdateUserResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
-}
-
-// implements the service definition of UpdateVmRequest
-type UpdateVmRequest struct {
- BlockDeviceMappings []BlockDeviceMappingVmUpdate `json:"BlockDeviceMappings,omitempty"`
- BsuOptimized bool `json:"BsuOptimized,omitempty"`
- DeletionProtection *bool `json:"DeletionProtection,omitempty"`
- DryRun bool `json:"DryRun,omitempty"`
- IsSourceDestChecked bool `json:"IsSourceDestChecked,omitempty"`
- KeypairName string `json:"KeypairName,omitempty"`
- SecurityGroupIds []string `json:"SecurityGroupIds,omitempty"`
- UserData string `json:"UserData,omitempty"`
- VmId string `json:"VmId,omitempty"`
- VmInitiatedShutdownBehavior string `json:"VmInitiatedShutdownBehavior,omitempty"`
- VmType string `json:"VmType,omitempty"`
-}
-
-// implements the service definition of UpdateVmResponse
-type UpdateVmResponse struct {
- ResponseContext ResponseContext `json:"ResponseContext,omitempty"`
- Vm Vm `json:"Vm,omitempty"`
-}
-
-// implements the service definition of User
-type User struct {
- Path string `json:"Path,omitempty"`
- UserId string `json:"UserId,omitempty"`
- UserName string `json:"UserName,omitempty"`
-}
-
-// implements the service definition of UserGroup
-type UserGroup struct {
- Path string `json:"Path,omitempty"`
- UserGroupId string `json:"UserGroupId,omitempty"`
- UserGroupName string `json:"UserGroupName,omitempty"`
-}
-
-// implements the service definition of VirtualGateway
-type VirtualGateway struct {
- ConnectionType string `json:"ConnectionType,omitempty"`
- NetToVirtualGatewayLinks []NetToVirtualGatewayLink `json:"NetToVirtualGatewayLinks,omitempty"`
- State string `json:"State,omitempty"`
- Tags []ResourceTag `json:"Tags,omitempty"`
- VirtualGatewayId string `json:"VirtualGatewayId,omitempty"`
-}
-
-// implements the service definition of Vm
-type Vm struct {
- Architecture string `json:"Architecture,omitempty"`
- BlockDeviceMappings []BlockDeviceMappingCreated `json:"BlockDeviceMappings,omitempty"`
- BsuOptimized bool `json:"BsuOptimized,omitempty"`
- ClientToken string `json:"ClientToken,omitempty"`
- DeletionProtection bool `json:"DeletionProtection,omitempty"`
- Hypervisor string `json:"Hypervisor,omitempty"`
- ImageId string `json:"ImageId,omitempty"`
- IsSourceDestChecked bool `json:"IsSourceDestChecked,omitempty"`
- KeypairName string `json:"KeypairName,omitempty"`
- LaunchNumber int64 `json:"LaunchNumber,omitempty"`
- NetId string `json:"NetId,omitempty"`
- Nics []NicLight `json:"Nics,omitempty"`
- OsFamily string `json:"OsFamily,omitempty"`
- Placement Placement `json:"Placement,omitempty"`
- PrivateDnsName string `json:"PrivateDnsName,omitempty"`
- PrivateIp string `json:"PrivateIp,omitempty"`
- ProductCodes []string `json:"ProductCodes,omitempty"`
- PublicDnsName string `json:"PublicDnsName,omitempty"`
- PublicIp string `json:"PublicIp,omitempty"`
- ReservationId string `json:"ReservationId,omitempty"`
- RootDeviceName string `json:"RootDeviceName,omitempty"`
- RootDeviceType string `json:"RootDeviceType,omitempty"`
- SecurityGroups []SecurityGroupLight `json:"SecurityGroups,omitempty"`
- State string `json:"State,omitempty"`
- StateReason string `json:"StateReason,omitempty"`
- SubnetId string `json:"SubnetId,omitempty"`
- Tags []ResourceTag `json:"Tags,omitempty"`
- UserData string `json:"UserData,omitempty"`
- VmId string `json:"VmId,omitempty"`
- VmInitiatedShutdownBehavior string `json:"VmInitiatedShutdownBehavior,omitempty"`
- VmType string `json:"VmType,omitempty"`
-}
-
-// implements the service definition of VmState
-type VmState struct {
- CurrentState string `json:"CurrentState,omitempty"`
- PreviousState string `json:"PreviousState,omitempty"`
- VmId string `json:"VmId,omitempty"`
-}
-
-// implements the service definition of VmStates
-type VmStates struct {
- MaintenanceEvents []MaintenanceEvent `json:"MaintenanceEvents,omitempty"`
- SubregionName string `json:"SubregionName,omitempty"`
- VmId string `json:"VmId,omitempty"`
- VmState string `json:"VmState,omitempty"`
-}
-
-// implements the service definition of VmType
-type VmType struct {
- IsBsuOptimized bool `json:"IsBsuOptimized,omitempty"`
- MaxPrivateIps int64 `json:"MaxPrivateIps,omitempty"`
- MemorySize int64 `json:"MemorySize,omitempty"`
- StorageCount int64 `json:"StorageCount,omitempty"`
- StorageSize int64 `json:"StorageSize,omitempty"`
- VcoreCount int64 `json:"VcoreCount,omitempty"`
- VmTypeName string `json:"VmTypeName,omitempty"`
-}
-
-// implements the service definition of Volume
-type Volume struct {
- Iops int64 `json:"Iops,omitempty"`
- LinkedVolumes []LinkedVolume `json:"LinkedVolumes,omitempty"`
- Size int64 `json:"Size,omitempty"`
- SnapshotId string `json:"SnapshotId,omitempty"`
- State string `json:"State,omitempty"`
- SubregionName string `json:"SubregionName,omitempty"`
- Tags []ResourceTag `json:"Tags,omitempty"`
- VolumeId string `json:"VolumeId,omitempty"`
- VolumeType string `json:"VolumeType,omitempty"`
-}
-
-// implements the service definition of VpnConnection
-type VpnConnection struct {
- ClientGatewayConfiguration string `json:"ClientGatewayConfiguration,omitempty"`
- ClientGatewayId string `json:"ClientGatewayId,omitempty"`
- ConnectionType string `json:"ConnectionType,omitempty"`
- Routes []RouteLight `json:"Routes,omitempty"`
- State string `json:"State,omitempty"`
- StaticRoutesOnly bool `json:"StaticRoutesOnly,omitempty"`
- Tags []ResourceTag `json:"Tags,omitempty"`
- VirtualGatewayId string `json:"VirtualGatewayId,omitempty"`
- VpnConnectionId string `json:"VpnConnectionId,omitempty"`
-}
-
-// implements the service definition of With
-type With struct {
- CallDuration bool `json:"CallDuration,omitempty"`
- QueryAccessKey bool `json:"QueryAccessKey,omitempty"`
- QueryApiName bool `json:"QueryApiName,omitempty"`
- QueryApiVersion bool `json:"QueryApiVersion,omitempty"`
- QueryCallName bool `json:"QueryCallName,omitempty"`
- QueryDate bool `json:"QueryDate,omitempty"`
- QueryIpAddress bool `json:"QueryIpAddress,omitempty"`
- QueryRaw bool `json:"QueryRaw,omitempty"`
- QuerySize bool `json:"QuerySize,omitempty"`
- QueryUserAgent bool `json:"QueryUserAgent,omitempty"`
- ResponseId bool `json:"ResponseId,omitempty"`
- ResponseSize bool `json:"ResponseSize,omitempty"`
- ResponseStatusCode bool `json:"ResponseStatusCode,omitempty"`
-}
-
-// POST_AcceptNetPeeringParameters holds parameters to POST_AcceptNetPeering
-type POST_AcceptNetPeeringParameters struct {
- Acceptnetpeeringrequest AcceptNetPeeringRequest `json:"acceptnetpeeringrequest,omitempty"`
-}
-
-// POST_AcceptNetPeeringResponses holds responses of POST_AcceptNetPeering
-type POST_AcceptNetPeeringResponses struct {
- OK *AcceptNetPeeringResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code409 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_AuthenticateAccountParameters holds parameters to POST_AuthenticateAccount
-type POST_AuthenticateAccountParameters struct {
- Authenticateaccountrequest AuthenticateAccountRequest `json:"authenticateaccountrequest,omitempty"`
-}
-
-// POST_AuthenticateAccountResponses holds responses of POST_AuthenticateAccount
-type POST_AuthenticateAccountResponses struct {
- OK *AuthenticateAccountResponse
-}
-
-// POST_CheckSignatureParameters holds parameters to POST_CheckSignature
-type POST_CheckSignatureParameters struct {
- Checksignaturerequest CheckSignatureRequest `json:"checksignaturerequest,omitempty"`
-}
-
-// POST_CheckSignatureResponses holds responses of POST_CheckSignature
-type POST_CheckSignatureResponses struct {
- OK *CheckSignatureResponse
-}
-
-// POST_CopyAccountParameters holds parameters to POST_CopyAccount
-type POST_CopyAccountParameters struct {
- Copyaccountrequest CopyAccountRequest `json:"copyaccountrequest,omitempty"`
-}
-
-// POST_CopyAccountResponses holds responses of POST_CopyAccount
-type POST_CopyAccountResponses struct {
- OK *CopyAccountResponse
-}
-
-// POST_CreateAccountParameters holds parameters to POST_CreateAccount
-type POST_CreateAccountParameters struct {
- Createaccountrequest CreateAccountRequest `json:"createaccountrequest,omitempty"`
-}
-
-// POST_CreateAccountResponses holds responses of POST_CreateAccount
-type POST_CreateAccountResponses struct {
- OK *CreateAccountResponse
-}
-
-// POST_CreateApiKeyParameters holds parameters to POST_CreateApiKey
-type POST_CreateApiKeyParameters struct {
- Createapikeyrequest CreateApiKeyRequest `json:"createapikeyrequest,omitempty"`
-}
-
-// POST_CreateApiKeyResponses holds responses of POST_CreateApiKey
-type POST_CreateApiKeyResponses struct {
- OK *CreateApiKeyResponse
-}
-
-// POST_CreateClientGatewayParameters holds parameters to POST_CreateClientGateway
-type POST_CreateClientGatewayParameters struct {
- Createclientgatewayrequest CreateClientGatewayRequest `json:"createclientgatewayrequest,omitempty"`
-}
-
-// POST_CreateClientGatewayResponses holds responses of POST_CreateClientGateway
-type POST_CreateClientGatewayResponses struct {
- OK *CreateClientGatewayResponse
-}
-
-// POST_CreateDhcpOptionsParameters holds parameters to POST_CreateDhcpOptions
-type POST_CreateDhcpOptionsParameters struct {
- Createdhcpoptionsrequest CreateDhcpOptionsRequest `json:"createdhcpoptionsrequest,omitempty"`
-}
-
-// POST_CreateDhcpOptionsResponses holds responses of POST_CreateDhcpOptions
-type POST_CreateDhcpOptionsResponses struct {
- OK *CreateDhcpOptionsResponse
-}
-
-// POST_CreateDirectLinkParameters holds parameters to POST_CreateDirectLink
-type POST_CreateDirectLinkParameters struct {
- Createdirectlinkrequest CreateDirectLinkRequest `json:"createdirectlinkrequest,omitempty"`
-}
-
-// POST_CreateDirectLinkResponses holds responses of POST_CreateDirectLink
-type POST_CreateDirectLinkResponses struct {
- OK *CreateDirectLinkResponse
-}
-
-// POST_CreateDirectLinkInterfaceParameters holds parameters to POST_CreateDirectLinkInterface
-type POST_CreateDirectLinkInterfaceParameters struct {
- Createdirectlinkinterfacerequest CreateDirectLinkInterfaceRequest `json:"createdirectlinkinterfacerequest,omitempty"`
-}
-
-// POST_CreateDirectLinkInterfaceResponses holds responses of POST_CreateDirectLinkInterface
-type POST_CreateDirectLinkInterfaceResponses struct {
- OK *CreateDirectLinkInterfaceResponse
-}
-
-// POST_CreateImageParameters holds parameters to POST_CreateImage
-type POST_CreateImageParameters struct {
- Createimagerequest CreateImageRequest `json:"createimagerequest,omitempty"`
-}
-
-// POST_CreateImageResponses holds responses of POST_CreateImage
-type POST_CreateImageResponses struct {
- OK *CreateImageResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_CreateImageExportTaskParameters holds parameters to POST_CreateImageExportTask
-type POST_CreateImageExportTaskParameters struct {
- Createimageexporttaskrequest CreateImageExportTaskRequest `json:"createimageexporttaskrequest,omitempty"`
-}
-
-// POST_CreateImageExportTaskResponses holds responses of POST_CreateImageExportTask
-type POST_CreateImageExportTaskResponses struct {
- OK *CreateImageExportTaskResponse
-}
-
-// POST_CreateInternetServiceParameters holds parameters to POST_CreateInternetService
-type POST_CreateInternetServiceParameters struct {
- Createinternetservicerequest CreateInternetServiceRequest `json:"createinternetservicerequest,omitempty"`
-}
-
-// POST_CreateInternetServiceResponses holds responses of POST_CreateInternetService
-type POST_CreateInternetServiceResponses struct {
- OK *CreateInternetServiceResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_CreateKeypairParameters holds parameters to POST_CreateKeypair
-type POST_CreateKeypairParameters struct {
- Createkeypairrequest CreateKeypairRequest `json:"createkeypairrequest,omitempty"`
-}
-
-// POST_CreateKeypairResponses holds responses of POST_CreateKeypair
-type POST_CreateKeypairResponses struct {
- OK *CreateKeypairResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code409 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_CreateListenerRuleParameters holds parameters to POST_CreateListenerRule
-type POST_CreateListenerRuleParameters struct {
- Createlistenerrulerequest CreateListenerRuleRequest `json:"createlistenerrulerequest,omitempty"`
-}
-
-// POST_CreateListenerRuleResponses holds responses of POST_CreateListenerRule
-type POST_CreateListenerRuleResponses struct {
- OK *CreateListenerRuleResponse
-}
-
-// POST_CreateLoadBalancerParameters holds parameters to POST_CreateLoadBalancer
-type POST_CreateLoadBalancerParameters struct {
- Createloadbalancerrequest CreateLoadBalancerRequest `json:"createloadbalancerrequest,omitempty"`
-}
-
-// POST_CreateLoadBalancerResponses holds responses of POST_CreateLoadBalancer
-type POST_CreateLoadBalancerResponses struct {
- OK *CreateLoadBalancerResponse
-}
-
-// POST_CreateLoadBalancerListenersParameters holds parameters to POST_CreateLoadBalancerListeners
-type POST_CreateLoadBalancerListenersParameters struct {
- Createloadbalancerlistenersrequest CreateLoadBalancerListenersRequest `json:"createloadbalancerlistenersrequest,omitempty"`
-}
-
-// POST_CreateLoadBalancerListenersResponses holds responses of POST_CreateLoadBalancerListeners
-type POST_CreateLoadBalancerListenersResponses struct {
- OK *CreateLoadBalancerListenersResponse
-}
-
-// POST_CreateLoadBalancerPolicyParameters holds parameters to POST_CreateLoadBalancerPolicy
-type POST_CreateLoadBalancerPolicyParameters struct {
- Createloadbalancerpolicyrequest CreateLoadBalancerPolicyRequest `json:"createloadbalancerpolicyrequest,omitempty"`
-}
-
-// POST_CreateLoadBalancerPolicyResponses holds responses of POST_CreateLoadBalancerPolicy
-type POST_CreateLoadBalancerPolicyResponses struct {
- OK *CreateLoadBalancerPolicyResponse
-}
-
-// POST_CreateNatServiceParameters holds parameters to POST_CreateNatService
-type POST_CreateNatServiceParameters struct {
- Createnatservicerequest CreateNatServiceRequest `json:"createnatservicerequest,omitempty"`
-}
-
-// POST_CreateNatServiceResponses holds responses of POST_CreateNatService
-type POST_CreateNatServiceResponses struct {
- OK *CreateNatServiceResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_CreateNetParameters holds parameters to POST_CreateNet
-type POST_CreateNetParameters struct {
- Createnetrequest CreateNetRequest `json:"createnetrequest,omitempty"`
-}
-
-// POST_CreateNetResponses holds responses of POST_CreateNet
-type POST_CreateNetResponses struct {
- OK *CreateNetResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code409 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_CreateNetAccessPointParameters holds parameters to POST_CreateNetAccessPoint
-type POST_CreateNetAccessPointParameters struct {
- Createnetaccesspointrequest CreateNetAccessPointRequest `json:"createnetaccesspointrequest,omitempty"`
-}
-
-// POST_CreateNetAccessPointResponses holds responses of POST_CreateNetAccessPoint
-type POST_CreateNetAccessPointResponses struct {
- OK *CreateNetAccessPointResponse
-}
-
-// POST_CreateNetPeeringParameters holds parameters to POST_CreateNetPeering
-type POST_CreateNetPeeringParameters struct {
- Createnetpeeringrequest CreateNetPeeringRequest `json:"createnetpeeringrequest,omitempty"`
-}
-
-// POST_CreateNetPeeringResponses holds responses of POST_CreateNetPeering
-type POST_CreateNetPeeringResponses struct {
- OK *CreateNetPeeringResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_CreateNicParameters holds parameters to POST_CreateNic
-type POST_CreateNicParameters struct {
- Createnicrequest CreateNicRequest `json:"createnicrequest,omitempty"`
-}
-
-// POST_CreateNicResponses holds responses of POST_CreateNic
-type POST_CreateNicResponses struct {
- OK *CreateNicResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_CreatePolicyParameters holds parameters to POST_CreatePolicy
-type POST_CreatePolicyParameters struct {
- Createpolicyrequest CreatePolicyRequest `json:"createpolicyrequest,omitempty"`
-}
-
-// POST_CreatePolicyResponses holds responses of POST_CreatePolicy
-type POST_CreatePolicyResponses struct {
- OK *CreatePolicyResponse
-}
-
-// POST_CreatePublicIpParameters holds parameters to POST_CreatePublicIp
-type POST_CreatePublicIpParameters struct {
- Createpubliciprequest CreatePublicIpRequest `json:"createpubliciprequest,omitempty"`
-}
-
-// POST_CreatePublicIpResponses holds responses of POST_CreatePublicIp
-type POST_CreatePublicIpResponses struct {
- OK *CreatePublicIpResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_CreateRouteParameters holds parameters to POST_CreateRoute
-type POST_CreateRouteParameters struct {
- Createrouterequest CreateRouteRequest `json:"createrouterequest,omitempty"`
-}
-
-// POST_CreateRouteResponses holds responses of POST_CreateRoute
-type POST_CreateRouteResponses struct {
- OK *CreateRouteResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_CreateRouteTableParameters holds parameters to POST_CreateRouteTable
-type POST_CreateRouteTableParameters struct {
- Createroutetablerequest CreateRouteTableRequest `json:"createroutetablerequest,omitempty"`
-}
-
-// POST_CreateRouteTableResponses holds responses of POST_CreateRouteTable
-type POST_CreateRouteTableResponses struct {
- OK *CreateRouteTableResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_CreateSecurityGroupParameters holds parameters to POST_CreateSecurityGroup
-type POST_CreateSecurityGroupParameters struct {
- Createsecuritygrouprequest CreateSecurityGroupRequest `json:"createsecuritygrouprequest,omitempty"`
-}
-
-// POST_CreateSecurityGroupResponses holds responses of POST_CreateSecurityGroup
-type POST_CreateSecurityGroupResponses struct {
- OK *CreateSecurityGroupResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_CreateSecurityGroupRuleParameters holds parameters to POST_CreateSecurityGroupRule
-type POST_CreateSecurityGroupRuleParameters struct {
- Createsecuritygrouprulerequest CreateSecurityGroupRuleRequest `json:"createsecuritygrouprulerequest,omitempty"`
-}
-
-// POST_CreateSecurityGroupRuleResponses holds responses of POST_CreateSecurityGroupRule
-type POST_CreateSecurityGroupRuleResponses struct {
- OK *CreateSecurityGroupRuleResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_CreateServerCertificateParameters holds parameters to POST_CreateServerCertificate
-type POST_CreateServerCertificateParameters struct {
- Createservercertificaterequest CreateServerCertificateRequest `json:"createservercertificaterequest,omitempty"`
-}
-
-// POST_CreateServerCertificateResponses holds responses of POST_CreateServerCertificate
-type POST_CreateServerCertificateResponses struct {
- OK *CreateServerCertificateResponse
-}
-
-// POST_CreateSnapshotParameters holds parameters to POST_CreateSnapshot
-type POST_CreateSnapshotParameters struct {
- Createsnapshotrequest CreateSnapshotRequest `json:"createsnapshotrequest,omitempty"`
-}
-
-// POST_CreateSnapshotResponses holds responses of POST_CreateSnapshot
-type POST_CreateSnapshotResponses struct {
- OK *CreateSnapshotResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_CreateSnapshotExportTaskParameters holds parameters to POST_CreateSnapshotExportTask
-type POST_CreateSnapshotExportTaskParameters struct {
- Createsnapshotexporttaskrequest CreateSnapshotExportTaskRequest `json:"createsnapshotexporttaskrequest,omitempty"`
-}
-
-// POST_CreateSnapshotExportTaskResponses holds responses of POST_CreateSnapshotExportTask
-type POST_CreateSnapshotExportTaskResponses struct {
- OK *CreateSnapshotExportTaskResponse
-}
-
-// POST_CreateSubnetParameters holds parameters to POST_CreateSubnet
-type POST_CreateSubnetParameters struct {
- Createsubnetrequest CreateSubnetRequest `json:"createsubnetrequest,omitempty"`
-}
-
-// POST_CreateSubnetResponses holds responses of POST_CreateSubnet
-type POST_CreateSubnetResponses struct {
- OK *CreateSubnetResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code409 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_CreateTagsParameters holds parameters to POST_CreateTags
-type POST_CreateTagsParameters struct {
- Createtagsrequest CreateTagsRequest `json:"createtagsrequest,omitempty"`
-}
-
-// POST_CreateTagsResponses holds responses of POST_CreateTags
-type POST_CreateTagsResponses struct {
- OK *CreateTagsResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_CreateUserParameters holds parameters to POST_CreateUser
-type POST_CreateUserParameters struct {
- Createuserrequest CreateUserRequest `json:"createuserrequest,omitempty"`
-}
-
-// POST_CreateUserResponses holds responses of POST_CreateUser
-type POST_CreateUserResponses struct {
- OK *CreateUserResponse
-}
-
-// POST_CreateUserGroupParameters holds parameters to POST_CreateUserGroup
-type POST_CreateUserGroupParameters struct {
- Createusergrouprequest CreateUserGroupRequest `json:"createusergrouprequest,omitempty"`
-}
-
-// POST_CreateUserGroupResponses holds responses of POST_CreateUserGroup
-type POST_CreateUserGroupResponses struct {
- OK *CreateUserGroupResponse
-}
-
-// POST_CreateVirtualGatewayParameters holds parameters to POST_CreateVirtualGateway
-type POST_CreateVirtualGatewayParameters struct {
- Createvirtualgatewayrequest CreateVirtualGatewayRequest `json:"createvirtualgatewayrequest,omitempty"`
-}
-
-// POST_CreateVirtualGatewayResponses holds responses of POST_CreateVirtualGateway
-type POST_CreateVirtualGatewayResponses struct {
- OK *CreateVirtualGatewayResponse
-}
-
-// POST_CreateVmsParameters holds parameters to POST_CreateVms
-type POST_CreateVmsParameters struct {
- Createvmsrequest CreateVmsRequest `json:"createvmsrequest,omitempty"`
-}
-
-// POST_CreateVmsResponses holds responses of POST_CreateVms
-type POST_CreateVmsResponses struct {
- OK *CreateVmsResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_CreateVolumeParameters holds parameters to POST_CreateVolume
-type POST_CreateVolumeParameters struct {
- Createvolumerequest CreateVolumeRequest `json:"createvolumerequest,omitempty"`
-}
-
-// POST_CreateVolumeResponses holds responses of POST_CreateVolume
-type POST_CreateVolumeResponses struct {
- OK *CreateVolumeResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_CreateVpnConnectionParameters holds parameters to POST_CreateVpnConnection
-type POST_CreateVpnConnectionParameters struct {
- Createvpnconnectionrequest CreateVpnConnectionRequest `json:"createvpnconnectionrequest,omitempty"`
-}
-
-// POST_CreateVpnConnectionResponses holds responses of POST_CreateVpnConnection
-type POST_CreateVpnConnectionResponses struct {
- OK *CreateVpnConnectionResponse
-}
-
-// POST_CreateVpnConnectionRouteParameters holds parameters to POST_CreateVpnConnectionRoute
-type POST_CreateVpnConnectionRouteParameters struct {
- Createvpnconnectionrouterequest CreateVpnConnectionRouteRequest `json:"createvpnconnectionrouterequest,omitempty"`
-}
-
-// POST_CreateVpnConnectionRouteResponses holds responses of POST_CreateVpnConnectionRoute
-type POST_CreateVpnConnectionRouteResponses struct {
- OK *CreateVpnConnectionRouteResponse
-}
-
-// POST_DeleteApiKeyParameters holds parameters to POST_DeleteApiKey
-type POST_DeleteApiKeyParameters struct {
- Deleteapikeyrequest DeleteApiKeyRequest `json:"deleteapikeyrequest,omitempty"`
-}
-
-// POST_DeleteApiKeyResponses holds responses of POST_DeleteApiKey
-type POST_DeleteApiKeyResponses struct {
- OK *DeleteApiKeyResponse
-}
-
-// POST_DeleteClientGatewayParameters holds parameters to POST_DeleteClientGateway
-type POST_DeleteClientGatewayParameters struct {
- Deleteclientgatewayrequest DeleteClientGatewayRequest `json:"deleteclientgatewayrequest,omitempty"`
-}
-
-// POST_DeleteClientGatewayResponses holds responses of POST_DeleteClientGateway
-type POST_DeleteClientGatewayResponses struct {
- OK *DeleteClientGatewayResponse
-}
-
-// POST_DeleteDhcpOptionsParameters holds parameters to POST_DeleteDhcpOptions
-type POST_DeleteDhcpOptionsParameters struct {
- Deletedhcpoptionsrequest DeleteDhcpOptionsRequest `json:"deletedhcpoptionsrequest,omitempty"`
-}
-
-// POST_DeleteDhcpOptionsResponses holds responses of POST_DeleteDhcpOptions
-type POST_DeleteDhcpOptionsResponses struct {
- OK *DeleteDhcpOptionsResponse
-}
-
-// POST_DeleteDirectLinkParameters holds parameters to POST_DeleteDirectLink
-type POST_DeleteDirectLinkParameters struct {
- Deletedirectlinkrequest DeleteDirectLinkRequest `json:"deletedirectlinkrequest,omitempty"`
-}
-
-// POST_DeleteDirectLinkResponses holds responses of POST_DeleteDirectLink
-type POST_DeleteDirectLinkResponses struct {
- OK *DeleteDirectLinkResponse
-}
-
-// POST_DeleteDirectLinkInterfaceParameters holds parameters to POST_DeleteDirectLinkInterface
-type POST_DeleteDirectLinkInterfaceParameters struct {
- Deletedirectlinkinterfacerequest DeleteDirectLinkInterfaceRequest `json:"deletedirectlinkinterfacerequest,omitempty"`
-}
-
-// POST_DeleteDirectLinkInterfaceResponses holds responses of POST_DeleteDirectLinkInterface
-type POST_DeleteDirectLinkInterfaceResponses struct {
- OK *DeleteDirectLinkInterfaceResponse
-}
-
-// POST_DeleteExportTaskParameters holds parameters to POST_DeleteExportTask
-type POST_DeleteExportTaskParameters struct {
- Deleteexporttaskrequest DeleteExportTaskRequest `json:"deleteexporttaskrequest,omitempty"`
-}
-
-// POST_DeleteExportTaskResponses holds responses of POST_DeleteExportTask
-type POST_DeleteExportTaskResponses struct {
- OK *DeleteExportTaskResponse
-}
-
-// POST_DeleteImageParameters holds parameters to POST_DeleteImage
-type POST_DeleteImageParameters struct {
- Deleteimagerequest DeleteImageRequest `json:"deleteimagerequest,omitempty"`
-}
-
-// POST_DeleteImageResponses holds responses of POST_DeleteImage
-type POST_DeleteImageResponses struct {
- OK *DeleteImageResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_DeleteInternetServiceParameters holds parameters to POST_DeleteInternetService
-type POST_DeleteInternetServiceParameters struct {
- Deleteinternetservicerequest DeleteInternetServiceRequest `json:"deleteinternetservicerequest,omitempty"`
-}
-
-// POST_DeleteInternetServiceResponses holds responses of POST_DeleteInternetService
-type POST_DeleteInternetServiceResponses struct {
- OK *DeleteInternetServiceResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_DeleteKeypairParameters holds parameters to POST_DeleteKeypair
-type POST_DeleteKeypairParameters struct {
- Deletekeypairrequest DeleteKeypairRequest `json:"deletekeypairrequest,omitempty"`
-}
-
-// POST_DeleteKeypairResponses holds responses of POST_DeleteKeypair
-type POST_DeleteKeypairResponses struct {
- OK *DeleteKeypairResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_DeleteListenerRuleParameters holds parameters to POST_DeleteListenerRule
-type POST_DeleteListenerRuleParameters struct {
- Deletelistenerrulerequest DeleteListenerRuleRequest `json:"deletelistenerrulerequest,omitempty"`
-}
-
-// POST_DeleteListenerRuleResponses holds responses of POST_DeleteListenerRule
-type POST_DeleteListenerRuleResponses struct {
- OK *DeleteListenerRuleResponse
-}
-
-// POST_DeleteLoadBalancerParameters holds parameters to POST_DeleteLoadBalancer
-type POST_DeleteLoadBalancerParameters struct {
- Deleteloadbalancerrequest DeleteLoadBalancerRequest `json:"deleteloadbalancerrequest,omitempty"`
-}
-
-// POST_DeleteLoadBalancerResponses holds responses of POST_DeleteLoadBalancer
-type POST_DeleteLoadBalancerResponses struct {
- OK *DeleteLoadBalancerResponse
-}
-
-// POST_DeleteLoadBalancerListenersParameters holds parameters to POST_DeleteLoadBalancerListeners
-type POST_DeleteLoadBalancerListenersParameters struct {
- Deleteloadbalancerlistenersrequest DeleteLoadBalancerListenersRequest `json:"deleteloadbalancerlistenersrequest,omitempty"`
-}
-
-// POST_DeleteLoadBalancerListenersResponses holds responses of POST_DeleteLoadBalancerListeners
-type POST_DeleteLoadBalancerListenersResponses struct {
- OK *DeleteLoadBalancerListenersResponse
-}
-
-// POST_DeleteLoadBalancerPolicyParameters holds parameters to POST_DeleteLoadBalancerPolicy
-type POST_DeleteLoadBalancerPolicyParameters struct {
- Deleteloadbalancerpolicyrequest DeleteLoadBalancerPolicyRequest `json:"deleteloadbalancerpolicyrequest,omitempty"`
-}
-
-// POST_DeleteLoadBalancerPolicyResponses holds responses of POST_DeleteLoadBalancerPolicy
-type POST_DeleteLoadBalancerPolicyResponses struct {
- OK *DeleteLoadBalancerPolicyResponse
-}
-
-// POST_DeleteNatServiceParameters holds parameters to POST_DeleteNatService
-type POST_DeleteNatServiceParameters struct {
- Deletenatservicerequest DeleteNatServiceRequest `json:"deletenatservicerequest,omitempty"`
-}
-
-// POST_DeleteNatServiceResponses holds responses of POST_DeleteNatService
-type POST_DeleteNatServiceResponses struct {
- OK *DeleteNatServiceResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_DeleteNetParameters holds parameters to POST_DeleteNet
-type POST_DeleteNetParameters struct {
- Deletenetrequest DeleteNetRequest `json:"deletenetrequest,omitempty"`
-}
-
-// POST_DeleteNetResponses holds responses of POST_DeleteNet
-type POST_DeleteNetResponses struct {
- OK *DeleteNetResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_DeleteNetAccessPointsParameters holds parameters to POST_DeleteNetAccessPoints
-type POST_DeleteNetAccessPointsParameters struct {
- Deletenetaccesspointsrequest DeleteNetAccessPointsRequest `json:"deletenetaccesspointsrequest,omitempty"`
-}
-
-// POST_DeleteNetAccessPointsResponses holds responses of POST_DeleteNetAccessPoints
-type POST_DeleteNetAccessPointsResponses struct {
- OK *DeleteNetAccessPointsResponse
-}
-
-// POST_DeleteNetPeeringParameters holds parameters to POST_DeleteNetPeering
-type POST_DeleteNetPeeringParameters struct {
- Deletenetpeeringrequest DeleteNetPeeringRequest `json:"deletenetpeeringrequest,omitempty"`
-}
-
-// POST_DeleteNetPeeringResponses holds responses of POST_DeleteNetPeering
-type POST_DeleteNetPeeringResponses struct {
- OK *DeleteNetPeeringResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code409 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_DeleteNicParameters holds parameters to POST_DeleteNic
-type POST_DeleteNicParameters struct {
- Deletenicrequest DeleteNicRequest `json:"deletenicrequest,omitempty"`
-}
-
-// POST_DeleteNicResponses holds responses of POST_DeleteNic
-type POST_DeleteNicResponses struct {
- OK *DeleteNicResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_DeletePolicyParameters holds parameters to POST_DeletePolicy
-type POST_DeletePolicyParameters struct {
- Deletepolicyrequest DeletePolicyRequest `json:"deletepolicyrequest,omitempty"`
-}
-
-// POST_DeletePolicyResponses holds responses of POST_DeletePolicy
-type POST_DeletePolicyResponses struct {
- OK *DeletePolicyResponse
-}
-
-// POST_DeletePublicIpParameters holds parameters to POST_DeletePublicIp
-type POST_DeletePublicIpParameters struct {
- Deletepubliciprequest DeletePublicIpRequest `json:"deletepubliciprequest,omitempty"`
-}
-
-// POST_DeletePublicIpResponses holds responses of POST_DeletePublicIp
-type POST_DeletePublicIpResponses struct {
- OK *DeletePublicIpResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_DeleteRouteParameters holds parameters to POST_DeleteRoute
-type POST_DeleteRouteParameters struct {
- Deleterouterequest DeleteRouteRequest `json:"deleterouterequest,omitempty"`
-}
-
-// POST_DeleteRouteResponses holds responses of POST_DeleteRoute
-type POST_DeleteRouteResponses struct {
- OK *DeleteRouteResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_DeleteRouteTableParameters holds parameters to POST_DeleteRouteTable
-type POST_DeleteRouteTableParameters struct {
- Deleteroutetablerequest DeleteRouteTableRequest `json:"deleteroutetablerequest,omitempty"`
-}
-
-// POST_DeleteRouteTableResponses holds responses of POST_DeleteRouteTable
-type POST_DeleteRouteTableResponses struct {
- OK *DeleteRouteTableResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_DeleteSecurityGroupParameters holds parameters to POST_DeleteSecurityGroup
-type POST_DeleteSecurityGroupParameters struct {
- Deletesecuritygrouprequest DeleteSecurityGroupRequest `json:"deletesecuritygrouprequest,omitempty"`
-}
-
-// POST_DeleteSecurityGroupResponses holds responses of POST_DeleteSecurityGroup
-type POST_DeleteSecurityGroupResponses struct {
- OK *DeleteSecurityGroupResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_DeleteSecurityGroupRuleParameters holds parameters to POST_DeleteSecurityGroupRule
-type POST_DeleteSecurityGroupRuleParameters struct {
- Deletesecuritygrouprulerequest DeleteSecurityGroupRuleRequest `json:"deletesecuritygrouprulerequest,omitempty"`
-}
-
-// POST_DeleteSecurityGroupRuleResponses holds responses of POST_DeleteSecurityGroupRule
-type POST_DeleteSecurityGroupRuleResponses struct {
- OK *DeleteSecurityGroupRuleResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_DeleteServerCertificateParameters holds parameters to POST_DeleteServerCertificate
-type POST_DeleteServerCertificateParameters struct {
- Deleteservercertificaterequest DeleteServerCertificateRequest `json:"deleteservercertificaterequest,omitempty"`
-}
-
-// POST_DeleteServerCertificateResponses holds responses of POST_DeleteServerCertificate
-type POST_DeleteServerCertificateResponses struct {
- OK *DeleteServerCertificateResponse
-}
-
-// POST_DeleteSnapshotParameters holds parameters to POST_DeleteSnapshot
-type POST_DeleteSnapshotParameters struct {
- Deletesnapshotrequest DeleteSnapshotRequest `json:"deletesnapshotrequest,omitempty"`
-}
-
-// POST_DeleteSnapshotResponses holds responses of POST_DeleteSnapshot
-type POST_DeleteSnapshotResponses struct {
- OK *DeleteSnapshotResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_DeleteSubnetParameters holds parameters to POST_DeleteSubnet
-type POST_DeleteSubnetParameters struct {
- Deletesubnetrequest DeleteSubnetRequest `json:"deletesubnetrequest,omitempty"`
-}
-
-// POST_DeleteSubnetResponses holds responses of POST_DeleteSubnet
-type POST_DeleteSubnetResponses struct {
- OK *DeleteSubnetResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_DeleteTagsParameters holds parameters to POST_DeleteTags
-type POST_DeleteTagsParameters struct {
- Deletetagsrequest DeleteTagsRequest `json:"deletetagsrequest,omitempty"`
-}
-
-// POST_DeleteTagsResponses holds responses of POST_DeleteTags
-type POST_DeleteTagsResponses struct {
- OK *DeleteTagsResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_DeleteUserParameters holds parameters to POST_DeleteUser
-type POST_DeleteUserParameters struct {
- Deleteuserrequest DeleteUserRequest `json:"deleteuserrequest,omitempty"`
-}
-
-// POST_DeleteUserResponses holds responses of POST_DeleteUser
-type POST_DeleteUserResponses struct {
- OK *DeleteUserResponse
-}
-
-// POST_DeleteUserGroupParameters holds parameters to POST_DeleteUserGroup
-type POST_DeleteUserGroupParameters struct {
- Deleteusergrouprequest DeleteUserGroupRequest `json:"deleteusergrouprequest,omitempty"`
-}
-
-// POST_DeleteUserGroupResponses holds responses of POST_DeleteUserGroup
-type POST_DeleteUserGroupResponses struct {
- OK *DeleteUserGroupResponse
-}
-
-// POST_DeleteVirtualGatewayParameters holds parameters to POST_DeleteVirtualGateway
-type POST_DeleteVirtualGatewayParameters struct {
- Deletevirtualgatewayrequest DeleteVirtualGatewayRequest `json:"deletevirtualgatewayrequest,omitempty"`
-}
-
-// POST_DeleteVirtualGatewayResponses holds responses of POST_DeleteVirtualGateway
-type POST_DeleteVirtualGatewayResponses struct {
- OK *DeleteVirtualGatewayResponse
-}
-
-// POST_DeleteVmsParameters holds parameters to POST_DeleteVms
-type POST_DeleteVmsParameters struct {
- Deletevmsrequest DeleteVmsRequest `json:"deletevmsrequest,omitempty"`
-}
-
-// POST_DeleteVmsResponses holds responses of POST_DeleteVms
-type POST_DeleteVmsResponses struct {
- OK *DeleteVmsResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_DeleteVolumeParameters holds parameters to POST_DeleteVolume
-type POST_DeleteVolumeParameters struct {
- Deletevolumerequest DeleteVolumeRequest `json:"deletevolumerequest,omitempty"`
-}
-
-// POST_DeleteVolumeResponses holds responses of POST_DeleteVolume
-type POST_DeleteVolumeResponses struct {
- OK *DeleteVolumeResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_DeleteVpnConnectionParameters holds parameters to POST_DeleteVpnConnection
-type POST_DeleteVpnConnectionParameters struct {
- Deletevpnconnectionrequest DeleteVpnConnectionRequest `json:"deletevpnconnectionrequest,omitempty"`
-}
-
-// POST_DeleteVpnConnectionResponses holds responses of POST_DeleteVpnConnection
-type POST_DeleteVpnConnectionResponses struct {
- OK *DeleteVpnConnectionResponse
-}
-
-// POST_DeleteVpnConnectionRouteParameters holds parameters to POST_DeleteVpnConnectionRoute
-type POST_DeleteVpnConnectionRouteParameters struct {
- Deletevpnconnectionrouterequest DeleteVpnConnectionRouteRequest `json:"deletevpnconnectionrouterequest,omitempty"`
-}
-
-// POST_DeleteVpnConnectionRouteResponses holds responses of POST_DeleteVpnConnectionRoute
-type POST_DeleteVpnConnectionRouteResponses struct {
- OK *DeleteVpnConnectionRouteResponse
-}
-
-// POST_DeregisterUserInUserGroupParameters holds parameters to POST_DeregisterUserInUserGroup
-type POST_DeregisterUserInUserGroupParameters struct {
- Deregisteruserinusergrouprequest DeregisterUserInUserGroupRequest `json:"deregisteruserinusergrouprequest,omitempty"`
-}
-
-// POST_DeregisterUserInUserGroupResponses holds responses of POST_DeregisterUserInUserGroup
-type POST_DeregisterUserInUserGroupResponses struct {
- OK *DeregisterUserInUserGroupResponse
-}
-
-// POST_DeregisterVmsInLoadBalancerParameters holds parameters to POST_DeregisterVmsInLoadBalancer
-type POST_DeregisterVmsInLoadBalancerParameters struct {
- Deregistervmsinloadbalancerrequest DeregisterVmsInLoadBalancerRequest `json:"deregistervmsinloadbalancerrequest,omitempty"`
-}
-
-// POST_DeregisterVmsInLoadBalancerResponses holds responses of POST_DeregisterVmsInLoadBalancer
-type POST_DeregisterVmsInLoadBalancerResponses struct {
- OK *DeregisterVmsInLoadBalancerResponse
-}
-
-// POST_LinkInternetServiceParameters holds parameters to POST_LinkInternetService
-type POST_LinkInternetServiceParameters struct {
- Linkinternetservicerequest LinkInternetServiceRequest `json:"linkinternetservicerequest,omitempty"`
-}
-
-// POST_LinkInternetServiceResponses holds responses of POST_LinkInternetService
-type POST_LinkInternetServiceResponses struct {
- OK *LinkInternetServiceResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_LinkNicParameters holds parameters to POST_LinkNic
-type POST_LinkNicParameters struct {
- Linknicrequest LinkNicRequest `json:"linknicrequest,omitempty"`
-}
-
-// POST_LinkNicResponses holds responses of POST_LinkNic
-type POST_LinkNicResponses struct {
- OK *LinkNicResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_LinkPolicyParameters holds parameters to POST_LinkPolicy
-type POST_LinkPolicyParameters struct {
- Linkpolicyrequest LinkPolicyRequest `json:"linkpolicyrequest,omitempty"`
-}
-
-// POST_LinkPolicyResponses holds responses of POST_LinkPolicy
-type POST_LinkPolicyResponses struct {
- OK *LinkPolicyResponse
-}
-
-// POST_LinkPrivateIpsParameters holds parameters to POST_LinkPrivateIps
-type POST_LinkPrivateIpsParameters struct {
- Linkprivateipsrequest LinkPrivateIpsRequest `json:"linkprivateipsrequest,omitempty"`
-}
-
-// POST_LinkPrivateIpsResponses holds responses of POST_LinkPrivateIps
-type POST_LinkPrivateIpsResponses struct {
- OK *LinkPrivateIpsResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_LinkPublicIpParameters holds parameters to POST_LinkPublicIp
-type POST_LinkPublicIpParameters struct {
- Linkpubliciprequest LinkPublicIpRequest `json:"linkpubliciprequest,omitempty"`
-}
-
-// POST_LinkPublicIpResponses holds responses of POST_LinkPublicIp
-type POST_LinkPublicIpResponses struct {
- OK *LinkPublicIpResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_LinkRouteTableParameters holds parameters to POST_LinkRouteTable
-type POST_LinkRouteTableParameters struct {
- Linkroutetablerequest LinkRouteTableRequest `json:"linkroutetablerequest,omitempty"`
-}
-
-// POST_LinkRouteTableResponses holds responses of POST_LinkRouteTable
-type POST_LinkRouteTableResponses struct {
- OK *LinkRouteTableResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_LinkVirtualGatewayParameters holds parameters to POST_LinkVirtualGateway
-type POST_LinkVirtualGatewayParameters struct {
- Linkvirtualgatewayrequest LinkVirtualGatewayRequest `json:"linkvirtualgatewayrequest,omitempty"`
-}
-
-// POST_LinkVirtualGatewayResponses holds responses of POST_LinkVirtualGateway
-type POST_LinkVirtualGatewayResponses struct {
- OK *LinkVirtualGatewayResponse
-}
-
-// POST_LinkVolumeParameters holds parameters to POST_LinkVolume
-type POST_LinkVolumeParameters struct {
- Linkvolumerequest LinkVolumeRequest `json:"linkvolumerequest,omitempty"`
-}
-
-// POST_LinkVolumeResponses holds responses of POST_LinkVolume
-type POST_LinkVolumeResponses struct {
- OK *LinkVolumeResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_PurchaseReservedVmsOfferParameters holds parameters to POST_PurchaseReservedVmsOffer
-type POST_PurchaseReservedVmsOfferParameters struct {
- Purchasereservedvmsofferrequest PurchaseReservedVmsOfferRequest `json:"purchasereservedvmsofferrequest,omitempty"`
-}
-
-// POST_PurchaseReservedVmsOfferResponses holds responses of POST_PurchaseReservedVmsOffer
-type POST_PurchaseReservedVmsOfferResponses struct {
- OK *PurchaseReservedVmsOfferResponse
-}
-
-// POST_ReadAccountParameters holds parameters to POST_ReadAccount
-type POST_ReadAccountParameters struct {
- Readaccountrequest ReadAccountRequest `json:"readaccountrequest,omitempty"`
-}
-
-// POST_ReadAccountResponses holds responses of POST_ReadAccount
-type POST_ReadAccountResponses struct {
- OK *ReadAccountResponse
-}
-
-// POST_ReadAccountConsumptionParameters holds parameters to POST_ReadAccountConsumption
-type POST_ReadAccountConsumptionParameters struct {
- Readaccountconsumptionrequest ReadAccountConsumptionRequest `json:"readaccountconsumptionrequest,omitempty"`
-}
-
-// POST_ReadAccountConsumptionResponses holds responses of POST_ReadAccountConsumption
-type POST_ReadAccountConsumptionResponses struct {
- OK *ReadAccountConsumptionResponse
-}
-
-// POST_ReadAdminPasswordParameters holds parameters to POST_ReadAdminPassword
-type POST_ReadAdminPasswordParameters struct {
- Readadminpasswordrequest ReadAdminPasswordRequest `json:"readadminpasswordrequest,omitempty"`
-}
-
-// POST_ReadAdminPasswordResponses holds responses of POST_ReadAdminPassword
-type POST_ReadAdminPasswordResponses struct {
- OK *ReadAdminPasswordResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_ReadApiKeysParameters holds parameters to POST_ReadApiKeys
-type POST_ReadApiKeysParameters struct {
- Readapikeysrequest ReadApiKeysRequest `json:"readapikeysrequest,omitempty"`
-}
-
-// POST_ReadApiKeysResponses holds responses of POST_ReadApiKeys
-type POST_ReadApiKeysResponses struct {
- OK *ReadApiKeysResponse
-}
-
-// POST_ReadApiLogsParameters holds parameters to POST_ReadApiLogs
-type POST_ReadApiLogsParameters struct {
- Readapilogsrequest ReadApiLogsRequest `json:"readapilogsrequest,omitempty"`
-}
-
-// POST_ReadApiLogsResponses holds responses of POST_ReadApiLogs
-type POST_ReadApiLogsResponses struct {
- OK *ReadApiLogsResponse
-}
-
-// POST_ReadBillableDigestParameters holds parameters to POST_ReadBillableDigest
-type POST_ReadBillableDigestParameters struct {
- Readbillabledigestrequest ReadBillableDigestRequest `json:"readbillabledigestrequest,omitempty"`
-}
-
-// POST_ReadBillableDigestResponses holds responses of POST_ReadBillableDigest
-type POST_ReadBillableDigestResponses struct {
- OK *ReadBillableDigestResponse
-}
-
-// POST_ReadCatalogParameters holds parameters to POST_ReadCatalog
-type POST_ReadCatalogParameters struct {
- Readcatalogrequest ReadCatalogRequest `json:"readcatalogrequest,omitempty"`
-}
-
-// POST_ReadCatalogResponses holds responses of POST_ReadCatalog
-type POST_ReadCatalogResponses struct {
- OK *ReadCatalogResponse
-}
-
-// POST_ReadClientGatewaysParameters holds parameters to POST_ReadClientGateways
-type POST_ReadClientGatewaysParameters struct {
- Readclientgatewaysrequest ReadClientGatewaysRequest `json:"readclientgatewaysrequest,omitempty"`
-}
-
-// POST_ReadClientGatewaysResponses holds responses of POST_ReadClientGateways
-type POST_ReadClientGatewaysResponses struct {
- OK *ReadClientGatewaysResponse
-}
-
-// POST_ReadConsoleOutputParameters holds parameters to POST_ReadConsoleOutput
-type POST_ReadConsoleOutputParameters struct {
- Readconsoleoutputrequest ReadConsoleOutputRequest `json:"readconsoleoutputrequest,omitempty"`
-}
-
-// POST_ReadConsoleOutputResponses holds responses of POST_ReadConsoleOutput
-type POST_ReadConsoleOutputResponses struct {
- OK *ReadConsoleOutputResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_ReadDhcpOptionsParameters holds parameters to POST_ReadDhcpOptions
-type POST_ReadDhcpOptionsParameters struct {
- Readdhcpoptionsrequest ReadDhcpOptionsRequest `json:"readdhcpoptionsrequest,omitempty"`
-}
-
-// POST_ReadDhcpOptionsResponses holds responses of POST_ReadDhcpOptions
-type POST_ReadDhcpOptionsResponses struct {
- OK *ReadDhcpOptionsResponse
-}
-
-// POST_ReadDirectLinkInterfacesParameters holds parameters to POST_ReadDirectLinkInterfaces
-type POST_ReadDirectLinkInterfacesParameters struct {
- Readdirectlinkinterfacesrequest ReadDirectLinkInterfacesRequest `json:"readdirectlinkinterfacesrequest,omitempty"`
-}
-
-// POST_ReadDirectLinkInterfacesResponses holds responses of POST_ReadDirectLinkInterfaces
-type POST_ReadDirectLinkInterfacesResponses struct {
- OK *ReadDirectLinkInterfacesResponse
-}
-
-// POST_ReadDirectLinksParameters holds parameters to POST_ReadDirectLinks
-type POST_ReadDirectLinksParameters struct {
- Readdirectlinksrequest ReadDirectLinksRequest `json:"readdirectlinksrequest,omitempty"`
-}
-
-// POST_ReadDirectLinksResponses holds responses of POST_ReadDirectLinks
-type POST_ReadDirectLinksResponses struct {
- OK *ReadDirectLinksResponse
-}
-
-// POST_ReadImageExportTasksParameters holds parameters to POST_ReadImageExportTasks
-type POST_ReadImageExportTasksParameters struct {
- Readimageexporttasksrequest ReadImageExportTasksRequest `json:"readimageexporttasksrequest,omitempty"`
-}
-
-// POST_ReadImageExportTasksResponses holds responses of POST_ReadImageExportTasks
-type POST_ReadImageExportTasksResponses struct {
- OK *ReadImageExportTasksResponse
-}
-
-// POST_ReadImagesParameters holds parameters to POST_ReadImages
-type POST_ReadImagesParameters struct {
- Readimagesrequest ReadImagesRequest `json:"readimagesrequest,omitempty"`
-}
-
-// POST_ReadImagesResponses holds responses of POST_ReadImages
-type POST_ReadImagesResponses struct {
- OK *ReadImagesResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_ReadInternetServicesParameters holds parameters to POST_ReadInternetServices
-type POST_ReadInternetServicesParameters struct {
- Readinternetservicesrequest ReadInternetServicesRequest `json:"readinternetservicesrequest,omitempty"`
-}
-
-// POST_ReadInternetServicesResponses holds responses of POST_ReadInternetServices
-type POST_ReadInternetServicesResponses struct {
- OK *ReadInternetServicesResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_ReadKeypairsParameters holds parameters to POST_ReadKeypairs
-type POST_ReadKeypairsParameters struct {
- Readkeypairsrequest ReadKeypairsRequest `json:"readkeypairsrequest,omitempty"`
-}
-
-// POST_ReadKeypairsResponses holds responses of POST_ReadKeypairs
-type POST_ReadKeypairsResponses struct {
- OK *ReadKeypairsResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_ReadListenerRulesParameters holds parameters to POST_ReadListenerRules
-type POST_ReadListenerRulesParameters struct {
- Readlistenerrulesrequest ReadListenerRulesRequest `json:"readlistenerrulesrequest,omitempty"`
-}
-
-// POST_ReadListenerRulesResponses holds responses of POST_ReadListenerRules
-type POST_ReadListenerRulesResponses struct {
- OK *ReadListenerRulesResponse
-}
-
-// POST_ReadLoadBalancersParameters holds parameters to POST_ReadLoadBalancers
-type POST_ReadLoadBalancersParameters struct {
- Readloadbalancersrequest ReadLoadBalancersRequest `json:"readloadbalancersrequest,omitempty"`
-}
-
-// POST_ReadLoadBalancersResponses holds responses of POST_ReadLoadBalancers
-type POST_ReadLoadBalancersResponses struct {
- OK *ReadLoadBalancersResponse
-}
-
-// POST_ReadLocationsParameters holds parameters to POST_ReadLocations
-type POST_ReadLocationsParameters struct {
- Readlocationsrequest ReadLocationsRequest `json:"readlocationsrequest,omitempty"`
-}
-
-// POST_ReadLocationsResponses holds responses of POST_ReadLocations
-type POST_ReadLocationsResponses struct {
- OK *ReadLocationsResponse
-}
-
-// POST_ReadNatServicesParameters holds parameters to POST_ReadNatServices
-type POST_ReadNatServicesParameters struct {
- Readnatservicesrequest ReadNatServicesRequest `json:"readnatservicesrequest,omitempty"`
-}
-
-// POST_ReadNatServicesResponses holds responses of POST_ReadNatServices
-type POST_ReadNatServicesResponses struct {
- OK *ReadNatServicesResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_ReadNetAccessPointServicesParameters holds parameters to POST_ReadNetAccessPointServices
-type POST_ReadNetAccessPointServicesParameters struct {
- Readnetaccesspointservicesrequest ReadNetAccessPointServicesRequest `json:"readnetaccesspointservicesrequest,omitempty"`
-}
-
-// POST_ReadNetAccessPointServicesResponses holds responses of POST_ReadNetAccessPointServices
-type POST_ReadNetAccessPointServicesResponses struct {
- OK *ReadNetAccessPointServicesResponse
-}
-
-// POST_ReadNetAccessPointsParameters holds parameters to POST_ReadNetAccessPoints
-type POST_ReadNetAccessPointsParameters struct {
- Readnetaccesspointsrequest ReadNetAccessPointsRequest `json:"readnetaccesspointsrequest,omitempty"`
-}
-
-// POST_ReadNetAccessPointsResponses holds responses of POST_ReadNetAccessPoints
-type POST_ReadNetAccessPointsResponses struct {
- OK *ReadNetAccessPointsResponse
-}
-
-// POST_ReadNetPeeringsParameters holds parameters to POST_ReadNetPeerings
-type POST_ReadNetPeeringsParameters struct {
- Readnetpeeringsrequest ReadNetPeeringsRequest `json:"readnetpeeringsrequest,omitempty"`
-}
-
-// POST_ReadNetPeeringsResponses holds responses of POST_ReadNetPeerings
-type POST_ReadNetPeeringsResponses struct {
- OK *ReadNetPeeringsResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_ReadNetsParameters holds parameters to POST_ReadNets
-type POST_ReadNetsParameters struct {
- Readnetsrequest ReadNetsRequest `json:"readnetsrequest,omitempty"`
-}
-
-// POST_ReadNetsResponses holds responses of POST_ReadNets
-type POST_ReadNetsResponses struct {
- OK *ReadNetsResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_ReadNicsParameters holds parameters to POST_ReadNics
-type POST_ReadNicsParameters struct {
- Readnicsrequest ReadNicsRequest `json:"readnicsrequest,omitempty"`
-}
-
-// POST_ReadNicsResponses holds responses of POST_ReadNics
-type POST_ReadNicsResponses struct {
- OK *ReadNicsResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_ReadPoliciesParameters holds parameters to POST_ReadPolicies
-type POST_ReadPoliciesParameters struct {
- Readpoliciesrequest ReadPoliciesRequest `json:"readpoliciesrequest,omitempty"`
-}
-
-// POST_ReadPoliciesResponses holds responses of POST_ReadPolicies
-type POST_ReadPoliciesResponses struct {
- OK *ReadPoliciesResponse
-}
-
-// POST_ReadPrefixListsParameters holds parameters to POST_ReadPrefixLists
-type POST_ReadPrefixListsParameters struct {
- Readprefixlistsrequest ReadPrefixListsRequest `json:"readprefixlistsrequest,omitempty"`
-}
-
-// POST_ReadPrefixListsResponses holds responses of POST_ReadPrefixLists
-type POST_ReadPrefixListsResponses struct {
- OK *ReadPrefixListsResponse
-}
-
-// POST_ReadProductTypesParameters holds parameters to POST_ReadProductTypes
-type POST_ReadProductTypesParameters struct {
- Readproducttypesrequest ReadProductTypesRequest `json:"readproducttypesrequest,omitempty"`
-}
-
-// POST_ReadProductTypesResponses holds responses of POST_ReadProductTypes
-type POST_ReadProductTypesResponses struct {
- OK *ReadProductTypesResponse
-}
-
-// POST_ReadPublicCatalogParameters holds parameters to POST_ReadPublicCatalog
-type POST_ReadPublicCatalogParameters struct {
- Readpubliccatalogrequest ReadPublicCatalogRequest `json:"readpubliccatalogrequest,omitempty"`
-}
-
-// POST_ReadPublicCatalogResponses holds responses of POST_ReadPublicCatalog
-type POST_ReadPublicCatalogResponses struct {
- OK *ReadPublicCatalogResponse
-}
-
-// POST_ReadPublicIpRangesParameters holds parameters to POST_ReadPublicIpRanges
-type POST_ReadPublicIpRangesParameters struct {
- Readpubliciprangesrequest ReadPublicIpRangesRequest `json:"readpubliciprangesrequest,omitempty"`
-}
-
-// POST_ReadPublicIpRangesResponses holds responses of POST_ReadPublicIpRanges
-type POST_ReadPublicIpRangesResponses struct {
- OK *ReadPublicIpRangesResponse
-}
-
-// POST_ReadPublicIpsParameters holds parameters to POST_ReadPublicIps
-type POST_ReadPublicIpsParameters struct {
- Readpublicipsrequest ReadPublicIpsRequest `json:"readpublicipsrequest,omitempty"`
-}
-
-// POST_ReadPublicIpsResponses holds responses of POST_ReadPublicIps
-type POST_ReadPublicIpsResponses struct {
- OK *ReadPublicIpsResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_ReadQuotasParameters holds parameters to POST_ReadQuotas
-type POST_ReadQuotasParameters struct {
- Readquotasrequest ReadQuotasRequest `json:"readquotasrequest,omitempty"`
-}
-
-// POST_ReadQuotasResponses holds responses of POST_ReadQuotas
-type POST_ReadQuotasResponses struct {
- OK *ReadQuotasResponse
-}
-
-// POST_ReadRegionConfigParameters holds parameters to POST_ReadRegionConfig
-type POST_ReadRegionConfigParameters struct {
- Readregionconfigrequest ReadRegionConfigRequest `json:"readregionconfigrequest,omitempty"`
-}
-
-// POST_ReadRegionConfigResponses holds responses of POST_ReadRegionConfig
-type POST_ReadRegionConfigResponses struct {
- OK *ReadRegionConfigResponse
-}
-
-// POST_ReadRegionsParameters holds parameters to POST_ReadRegions
-type POST_ReadRegionsParameters struct {
- Readregionsrequest ReadRegionsRequest `json:"readregionsrequest,omitempty"`
-}
-
-// POST_ReadRegionsResponses holds responses of POST_ReadRegions
-type POST_ReadRegionsResponses struct {
- OK *ReadRegionsResponse
-}
-
-// POST_ReadReservedVmOffersParameters holds parameters to POST_ReadReservedVmOffers
-type POST_ReadReservedVmOffersParameters struct {
- Readreservedvmoffersrequest ReadReservedVmOffersRequest `json:"readreservedvmoffersrequest,omitempty"`
-}
-
-// POST_ReadReservedVmOffersResponses holds responses of POST_ReadReservedVmOffers
-type POST_ReadReservedVmOffersResponses struct {
- OK *ReadReservedVmOffersResponse
-}
-
-// POST_ReadReservedVmsParameters holds parameters to POST_ReadReservedVms
-type POST_ReadReservedVmsParameters struct {
- Readreservedvmsrequest ReadReservedVmsRequest `json:"readreservedvmsrequest,omitempty"`
-}
-
-// POST_ReadReservedVmsResponses holds responses of POST_ReadReservedVms
-type POST_ReadReservedVmsResponses struct {
- OK *ReadReservedVmsResponse
-}
-
-// POST_ReadRouteTablesParameters holds parameters to POST_ReadRouteTables
-type POST_ReadRouteTablesParameters struct {
- Readroutetablesrequest ReadRouteTablesRequest `json:"readroutetablesrequest,omitempty"`
-}
-
-// POST_ReadRouteTablesResponses holds responses of POST_ReadRouteTables
-type POST_ReadRouteTablesResponses struct {
- OK *ReadRouteTablesResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_ReadSecurityGroupsParameters holds parameters to POST_ReadSecurityGroups
-type POST_ReadSecurityGroupsParameters struct {
- Readsecuritygroupsrequest ReadSecurityGroupsRequest `json:"readsecuritygroupsrequest,omitempty"`
-}
-
-// POST_ReadSecurityGroupsResponses holds responses of POST_ReadSecurityGroups
-type POST_ReadSecurityGroupsResponses struct {
- OK *ReadSecurityGroupsResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_ReadServerCertificatesParameters holds parameters to POST_ReadServerCertificates
-type POST_ReadServerCertificatesParameters struct {
- Readservercertificatesrequest ReadServerCertificatesRequest `json:"readservercertificatesrequest,omitempty"`
-}
-
-// POST_ReadServerCertificatesResponses holds responses of POST_ReadServerCertificates
-type POST_ReadServerCertificatesResponses struct {
- OK *ReadServerCertificatesResponse
-}
-
-// POST_ReadSnapshotExportTasksParameters holds parameters to POST_ReadSnapshotExportTasks
-type POST_ReadSnapshotExportTasksParameters struct {
- Readsnapshotexporttasksrequest ReadSnapshotExportTasksRequest `json:"readsnapshotexporttasksrequest,omitempty"`
-}
-
-// POST_ReadSnapshotExportTasksResponses holds responses of POST_ReadSnapshotExportTasks
-type POST_ReadSnapshotExportTasksResponses struct {
- OK *ReadSnapshotExportTasksResponse
-}
-
-// POST_ReadSnapshotsParameters holds parameters to POST_ReadSnapshots
-type POST_ReadSnapshotsParameters struct {
- Readsnapshotsrequest ReadSnapshotsRequest `json:"readsnapshotsrequest,omitempty"`
-}
-
-// POST_ReadSnapshotsResponses holds responses of POST_ReadSnapshots
-type POST_ReadSnapshotsResponses struct {
- OK *ReadSnapshotsResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_ReadSubnetsParameters holds parameters to POST_ReadSubnets
-type POST_ReadSubnetsParameters struct {
- Readsubnetsrequest ReadSubnetsRequest `json:"readsubnetsrequest,omitempty"`
-}
-
-// POST_ReadSubnetsResponses holds responses of POST_ReadSubnets
-type POST_ReadSubnetsResponses struct {
- OK *ReadSubnetsResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_ReadSubregionsParameters holds parameters to POST_ReadSubregions
-type POST_ReadSubregionsParameters struct {
- Readsubregionsrequest ReadSubregionsRequest `json:"readsubregionsrequest,omitempty"`
-}
-
-// POST_ReadSubregionsResponses holds responses of POST_ReadSubregions
-type POST_ReadSubregionsResponses struct {
- OK *ReadSubregionsResponse
-}
-
-// POST_ReadTagsParameters holds parameters to POST_ReadTags
-type POST_ReadTagsParameters struct {
- Readtagsrequest ReadTagsRequest `json:"readtagsrequest,omitempty"`
-}
-
-// POST_ReadTagsResponses holds responses of POST_ReadTags
-type POST_ReadTagsResponses struct {
- OK *ReadTagsResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_ReadUserGroupsParameters holds parameters to POST_ReadUserGroups
-type POST_ReadUserGroupsParameters struct {
- Readusergroupsrequest ReadUserGroupsRequest `json:"readusergroupsrequest,omitempty"`
-}
-
-// POST_ReadUserGroupsResponses holds responses of POST_ReadUserGroups
-type POST_ReadUserGroupsResponses struct {
- OK *ReadUserGroupsResponse
-}
-
-// POST_ReadUsersParameters holds parameters to POST_ReadUsers
-type POST_ReadUsersParameters struct {
- Readusersrequest ReadUsersRequest `json:"readusersrequest,omitempty"`
-}
-
-// POST_ReadUsersResponses holds responses of POST_ReadUsers
-type POST_ReadUsersResponses struct {
- OK *ReadUsersResponse
-}
-
-// POST_ReadVirtualGatewaysParameters holds parameters to POST_ReadVirtualGateways
-type POST_ReadVirtualGatewaysParameters struct {
- Readvirtualgatewaysrequest ReadVirtualGatewaysRequest `json:"readvirtualgatewaysrequest,omitempty"`
-}
-
-// POST_ReadVirtualGatewaysResponses holds responses of POST_ReadVirtualGateways
-type POST_ReadVirtualGatewaysResponses struct {
- OK *ReadVirtualGatewaysResponse
-}
-
-// POST_ReadVmTypesParameters holds parameters to POST_ReadVmTypes
-type POST_ReadVmTypesParameters struct {
- Readvmtypesrequest ReadVmTypesRequest `json:"readvmtypesrequest,omitempty"`
-}
-
-// POST_ReadVmTypesResponses holds responses of POST_ReadVmTypes
-type POST_ReadVmTypesResponses struct {
- OK *ReadVmTypesResponse
-}
-
-// POST_ReadVmsParameters holds parameters to POST_ReadVms
-type POST_ReadVmsParameters struct {
- Readvmsrequest ReadVmsRequest `json:"readvmsrequest,omitempty"`
-}
-
-// POST_ReadVmsResponses holds responses of POST_ReadVms
-type POST_ReadVmsResponses struct {
- OK *ReadVmsResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_ReadVmsHealthParameters holds parameters to POST_ReadVmsHealth
-type POST_ReadVmsHealthParameters struct {
- Readvmshealthrequest ReadVmsHealthRequest `json:"readvmshealthrequest,omitempty"`
-}
-
-// POST_ReadVmsHealthResponses holds responses of POST_ReadVmsHealth
-type POST_ReadVmsHealthResponses struct {
- OK *ReadVmsHealthResponse
-}
-
-// POST_ReadVmsStateParameters holds parameters to POST_ReadVmsState
-type POST_ReadVmsStateParameters struct {
- Readvmsstaterequest ReadVmsStateRequest `json:"readvmsstaterequest,omitempty"`
-}
-
-// POST_ReadVmsStateResponses holds responses of POST_ReadVmsState
-type POST_ReadVmsStateResponses struct {
- OK *ReadVmsStateResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_ReadVolumesParameters holds parameters to POST_ReadVolumes
-type POST_ReadVolumesParameters struct {
- Readvolumesrequest ReadVolumesRequest `json:"readvolumesrequest,omitempty"`
-}
-
-// POST_ReadVolumesResponses holds responses of POST_ReadVolumes
-type POST_ReadVolumesResponses struct {
- OK *ReadVolumesResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_ReadVpnConnectionsParameters holds parameters to POST_ReadVpnConnections
-type POST_ReadVpnConnectionsParameters struct {
- Readvpnconnectionsrequest ReadVpnConnectionsRequest `json:"readvpnconnectionsrequest,omitempty"`
-}
-
-// POST_ReadVpnConnectionsResponses holds responses of POST_ReadVpnConnections
-type POST_ReadVpnConnectionsResponses struct {
- OK *ReadVpnConnectionsResponse
-}
-
-// POST_RebootVmsParameters holds parameters to POST_RebootVms
-type POST_RebootVmsParameters struct {
- Rebootvmsrequest RebootVmsRequest `json:"rebootvmsrequest,omitempty"`
-}
-
-// POST_RebootVmsResponses holds responses of POST_RebootVms
-type POST_RebootVmsResponses struct {
- OK *RebootVmsResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_RegisterUserInUserGroupParameters holds parameters to POST_RegisterUserInUserGroup
-type POST_RegisterUserInUserGroupParameters struct {
- Registeruserinusergrouprequest RegisterUserInUserGroupRequest `json:"registeruserinusergrouprequest,omitempty"`
-}
-
-// POST_RegisterUserInUserGroupResponses holds responses of POST_RegisterUserInUserGroup
-type POST_RegisterUserInUserGroupResponses struct {
- OK *RegisterUserInUserGroupResponse
-}
-
-// POST_RegisterVmsInLoadBalancerParameters holds parameters to POST_RegisterVmsInLoadBalancer
-type POST_RegisterVmsInLoadBalancerParameters struct {
- Registervmsinloadbalancerrequest RegisterVmsInLoadBalancerRequest `json:"registervmsinloadbalancerrequest,omitempty"`
-}
-
-// POST_RegisterVmsInLoadBalancerResponses holds responses of POST_RegisterVmsInLoadBalancer
-type POST_RegisterVmsInLoadBalancerResponses struct {
- OK *RegisterVmsInLoadBalancerResponse
-}
-
-// POST_RejectNetPeeringParameters holds parameters to POST_RejectNetPeering
-type POST_RejectNetPeeringParameters struct {
- Rejectnetpeeringrequest RejectNetPeeringRequest `json:"rejectnetpeeringrequest,omitempty"`
-}
-
-// POST_RejectNetPeeringResponses holds responses of POST_RejectNetPeering
-type POST_RejectNetPeeringResponses struct {
- OK *RejectNetPeeringResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code409 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_ResetAccountPasswordParameters holds parameters to POST_ResetAccountPassword
-type POST_ResetAccountPasswordParameters struct {
- Resetaccountpasswordrequest ResetAccountPasswordRequest `json:"resetaccountpasswordrequest,omitempty"`
-}
-
-// POST_ResetAccountPasswordResponses holds responses of POST_ResetAccountPassword
-type POST_ResetAccountPasswordResponses struct {
- OK *ResetAccountPasswordResponse
-}
-
-// POST_SendResetPasswordEmailParameters holds parameters to POST_SendResetPasswordEmail
-type POST_SendResetPasswordEmailParameters struct {
- Sendresetpasswordemailrequest SendResetPasswordEmailRequest `json:"sendresetpasswordemailrequest,omitempty"`
-}
-
-// POST_SendResetPasswordEmailResponses holds responses of POST_SendResetPasswordEmail
-type POST_SendResetPasswordEmailResponses struct {
- OK *SendResetPasswordEmailResponse
-}
-
-// POST_StartVmsParameters holds parameters to POST_StartVms
-type POST_StartVmsParameters struct {
- Startvmsrequest StartVmsRequest `json:"startvmsrequest,omitempty"`
-}
-
-// POST_StartVmsResponses holds responses of POST_StartVms
-type POST_StartVmsResponses struct {
- OK *StartVmsResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_StopVmsParameters holds parameters to POST_StopVms
-type POST_StopVmsParameters struct {
- Stopvmsrequest StopVmsRequest `json:"stopvmsrequest,omitempty"`
-}
-
-// POST_StopVmsResponses holds responses of POST_StopVms
-type POST_StopVmsResponses struct {
- OK *StopVmsResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_UnlinkInternetServiceParameters holds parameters to POST_UnlinkInternetService
-type POST_UnlinkInternetServiceParameters struct {
- Unlinkinternetservicerequest UnlinkInternetServiceRequest `json:"unlinkinternetservicerequest,omitempty"`
-}
-
-// POST_UnlinkInternetServiceResponses holds responses of POST_UnlinkInternetService
-type POST_UnlinkInternetServiceResponses struct {
- OK *UnlinkInternetServiceResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_UnlinkNicParameters holds parameters to POST_UnlinkNic
-type POST_UnlinkNicParameters struct {
- Unlinknicrequest UnlinkNicRequest `json:"unlinknicrequest,omitempty"`
-}
-
-// POST_UnlinkNicResponses holds responses of POST_UnlinkNic
-type POST_UnlinkNicResponses struct {
- OK *UnlinkNicResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_UnlinkPolicyParameters holds parameters to POST_UnlinkPolicy
-type POST_UnlinkPolicyParameters struct {
- Unlinkpolicyrequest UnlinkPolicyRequest `json:"unlinkpolicyrequest,omitempty"`
-}
-
-// POST_UnlinkPolicyResponses holds responses of POST_UnlinkPolicy
-type POST_UnlinkPolicyResponses struct {
- OK *UnlinkPolicyResponse
-}
-
-// POST_UnlinkPrivateIpsParameters holds parameters to POST_UnlinkPrivateIps
-type POST_UnlinkPrivateIpsParameters struct {
- Unlinkprivateipsrequest UnlinkPrivateIpsRequest `json:"unlinkprivateipsrequest,omitempty"`
-}
-
-// POST_UnlinkPrivateIpsResponses holds responses of POST_UnlinkPrivateIps
-type POST_UnlinkPrivateIpsResponses struct {
- OK *UnlinkPrivateIpsResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_UnlinkPublicIpParameters holds parameters to POST_UnlinkPublicIp
-type POST_UnlinkPublicIpParameters struct {
- Unlinkpubliciprequest UnlinkPublicIpRequest `json:"unlinkpubliciprequest,omitempty"`
-}
-
-// POST_UnlinkPublicIpResponses holds responses of POST_UnlinkPublicIp
-type POST_UnlinkPublicIpResponses struct {
- OK *UnlinkPublicIpResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_UnlinkRouteTableParameters holds parameters to POST_UnlinkRouteTable
-type POST_UnlinkRouteTableParameters struct {
- Unlinkroutetablerequest UnlinkRouteTableRequest `json:"unlinkroutetablerequest,omitempty"`
-}
-
-// POST_UnlinkRouteTableResponses holds responses of POST_UnlinkRouteTable
-type POST_UnlinkRouteTableResponses struct {
- OK *UnlinkRouteTableResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_UnlinkVirtualGatewayParameters holds parameters to POST_UnlinkVirtualGateway
-type POST_UnlinkVirtualGatewayParameters struct {
- Unlinkvirtualgatewayrequest UnlinkVirtualGatewayRequest `json:"unlinkvirtualgatewayrequest,omitempty"`
-}
-
-// POST_UnlinkVirtualGatewayResponses holds responses of POST_UnlinkVirtualGateway
-type POST_UnlinkVirtualGatewayResponses struct {
- OK *UnlinkVirtualGatewayResponse
-}
-
-// POST_UnlinkVolumeParameters holds parameters to POST_UnlinkVolume
-type POST_UnlinkVolumeParameters struct {
- Unlinkvolumerequest UnlinkVolumeRequest `json:"unlinkvolumerequest,omitempty"`
-}
-
-// POST_UnlinkVolumeResponses holds responses of POST_UnlinkVolume
-type POST_UnlinkVolumeResponses struct {
- OK *UnlinkVolumeResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_UpdateAccountParameters holds parameters to POST_UpdateAccount
-type POST_UpdateAccountParameters struct {
- Updateaccountrequest UpdateAccountRequest `json:"updateaccountrequest,omitempty"`
-}
-
-// POST_UpdateAccountResponses holds responses of POST_UpdateAccount
-type POST_UpdateAccountResponses struct {
- OK *UpdateAccountResponse
-}
-
-// POST_UpdateApiKeyParameters holds parameters to POST_UpdateApiKey
-type POST_UpdateApiKeyParameters struct {
- Updateapikeyrequest UpdateApiKeyRequest `json:"updateapikeyrequest,omitempty"`
-}
-
-// POST_UpdateApiKeyResponses holds responses of POST_UpdateApiKey
-type POST_UpdateApiKeyResponses struct {
- OK *UpdateApiKeyResponse
-}
-
-// POST_UpdateHealthCheckParameters holds parameters to POST_UpdateHealthCheck
-type POST_UpdateHealthCheckParameters struct {
- Updatehealthcheckrequest UpdateHealthCheckRequest `json:"updatehealthcheckrequest,omitempty"`
-}
-
-// POST_UpdateHealthCheckResponses holds responses of POST_UpdateHealthCheck
-type POST_UpdateHealthCheckResponses struct {
- OK *UpdateHealthCheckResponse
-}
-
-// POST_UpdateImageParameters holds parameters to POST_UpdateImage
-type POST_UpdateImageParameters struct {
- Updateimagerequest UpdateImageRequest `json:"updateimagerequest,omitempty"`
-}
-
-// POST_UpdateImageResponses holds responses of POST_UpdateImage
-type POST_UpdateImageResponses struct {
- OK *UpdateImageResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_UpdateKeypairParameters holds parameters to POST_UpdateKeypair
-type POST_UpdateKeypairParameters struct {
- Updatekeypairrequest UpdateKeypairRequest `json:"updatekeypairrequest,omitempty"`
-}
-
-// POST_UpdateKeypairResponses holds responses of POST_UpdateKeypair
-type POST_UpdateKeypairResponses struct {
- OK *UpdateKeypairResponse
-}
-
-// POST_UpdateListenerRuleParameters holds parameters to POST_UpdateListenerRule
-type POST_UpdateListenerRuleParameters struct {
- Updatelistenerrulerequest UpdateListenerRuleRequest `json:"updatelistenerrulerequest,omitempty"`
-}
-
-// POST_UpdateListenerRuleResponses holds responses of POST_UpdateListenerRule
-type POST_UpdateListenerRuleResponses struct {
- OK *UpdateListenerRuleResponse
-}
-
-// POST_UpdateLoadBalancerParameters holds parameters to POST_UpdateLoadBalancer
-type POST_UpdateLoadBalancerParameters struct {
- Updateloadbalancerrequest UpdateLoadBalancerRequest `json:"updateloadbalancerrequest,omitempty"`
-}
-
-// POST_UpdateLoadBalancerResponses holds responses of POST_UpdateLoadBalancer
-type POST_UpdateLoadBalancerResponses struct {
- OK *UpdateLoadBalancerResponse
-}
-
-// POST_UpdateNetParameters holds parameters to POST_UpdateNet
-type POST_UpdateNetParameters struct {
- Updatenetrequest UpdateNetRequest `json:"updatenetrequest,omitempty"`
-}
-
-// POST_UpdateNetResponses holds responses of POST_UpdateNet
-type POST_UpdateNetResponses struct {
- OK *UpdateNetResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_UpdateNetAccessPointParameters holds parameters to POST_UpdateNetAccessPoint
-type POST_UpdateNetAccessPointParameters struct {
- Updatenetaccesspointrequest UpdateNetAccessPointRequest `json:"updatenetaccesspointrequest,omitempty"`
-}
-
-// POST_UpdateNetAccessPointResponses holds responses of POST_UpdateNetAccessPoint
-type POST_UpdateNetAccessPointResponses struct {
- OK *UpdateNetAccessPointResponse
-}
-
-// POST_UpdateNicParameters holds parameters to POST_UpdateNic
-type POST_UpdateNicParameters struct {
- Updatenicrequest UpdateNicRequest `json:"updatenicrequest,omitempty"`
-}
-
-// POST_UpdateNicResponses holds responses of POST_UpdateNic
-type POST_UpdateNicResponses struct {
- OK *UpdateNicResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_UpdateRouteParameters holds parameters to POST_UpdateRoute
-type POST_UpdateRouteParameters struct {
- Updaterouterequest UpdateRouteRequest `json:"updaterouterequest,omitempty"`
-}
-
-// POST_UpdateRouteResponses holds responses of POST_UpdateRoute
-type POST_UpdateRouteResponses struct {
- OK *UpdateRouteResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_UpdateRoutePropagationParameters holds parameters to POST_UpdateRoutePropagation
-type POST_UpdateRoutePropagationParameters struct {
- Updateroutepropagationrequest UpdateRoutePropagationRequest `json:"updateroutepropagationrequest,omitempty"`
-}
-
-// POST_UpdateRoutePropagationResponses holds responses of POST_UpdateRoutePropagation
-type POST_UpdateRoutePropagationResponses struct {
- OK *UpdateRoutePropagationResponse
-}
-
-// POST_UpdateServerCertificateParameters holds parameters to POST_UpdateServerCertificate
-type POST_UpdateServerCertificateParameters struct {
- Updateservercertificaterequest UpdateServerCertificateRequest `json:"updateservercertificaterequest,omitempty"`
-}
-
-// POST_UpdateServerCertificateResponses holds responses of POST_UpdateServerCertificate
-type POST_UpdateServerCertificateResponses struct {
- OK *UpdateServerCertificateResponse
-}
-
-// POST_UpdateSnapshotParameters holds parameters to POST_UpdateSnapshot
-type POST_UpdateSnapshotParameters struct {
- Updatesnapshotrequest UpdateSnapshotRequest `json:"updatesnapshotrequest,omitempty"`
-}
-
-// POST_UpdateSnapshotResponses holds responses of POST_UpdateSnapshot
-type POST_UpdateSnapshotResponses struct {
- OK *UpdateSnapshotResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
-
-// POST_UpdateUserParameters holds parameters to POST_UpdateUser
-type POST_UpdateUserParameters struct {
- Updateuserrequest UpdateUserRequest `json:"updateuserrequest,omitempty"`
-}
-
-// POST_UpdateUserResponses holds responses of POST_UpdateUser
-type POST_UpdateUserResponses struct {
- OK *UpdateUserResponse
-}
-
-// POST_UpdateUserGroupParameters holds parameters to POST_UpdateUserGroup
-type POST_UpdateUserGroupParameters struct {
- Updateusergrouprequest UpdateUserGroupRequest `json:"updateusergrouprequest,omitempty"`
-}
-
-// POST_UpdateUserGroupResponses holds responses of POST_UpdateUserGroup
-type POST_UpdateUserGroupResponses struct {
- OK *UpdateUserGroupResponse
-}
-
-// POST_UpdateVmParameters holds parameters to POST_UpdateVm
-type POST_UpdateVmParameters struct {
- Updatevmrequest UpdateVmRequest `json:"updatevmrequest,omitempty"`
-}
-
-// POST_UpdateVmResponses holds responses of POST_UpdateVm
-type POST_UpdateVmResponses struct {
- OK *UpdateVmResponse
- Code400 *ErrorResponse
- Code401 *ErrorResponse
- Code500 *ErrorResponse
-}
diff --git a/vendor/github.com/zclconf/go-cty/cty/capsule.go b/vendor/github.com/zclconf/go-cty/cty/capsule.go
index d273d1483..2fdc15eae 100644
--- a/vendor/github.com/zclconf/go-cty/cty/capsule.go
+++ b/vendor/github.com/zclconf/go-cty/cty/capsule.go
@@ -9,6 +9,7 @@ type capsuleType struct {
typeImplSigil
Name string
GoType reflect.Type
+ Ops *CapsuleOps
}
func (t *capsuleType) Equals(other Type) bool {
@@ -24,10 +25,22 @@ func (t *capsuleType) FriendlyName(mode friendlyTypeNameMode) string {
}
func (t *capsuleType) GoString() string {
- // To get a useful representation of our native type requires some
- // shenanigans.
- victimVal := reflect.Zero(t.GoType)
- return fmt.Sprintf("cty.Capsule(%q, reflect.TypeOf(%#v))", t.Name, victimVal.Interface())
+ impl := t.Ops.TypeGoString
+ if impl == nil {
+ // To get a useful representation of our native type requires some
+ // shenanigans.
+ victimVal := reflect.Zero(t.GoType)
+ if t.Ops == noCapsuleOps {
+ return fmt.Sprintf("cty.Capsule(%q, reflect.TypeOf(%#v))", t.Name, victimVal.Interface())
+ } else {
+ // Including the operations in the output will make this _very_ long,
+ // so in practice any capsule type with ops ought to provide a
+ // TypeGoString function to override this with something more
+ // reasonable.
+ return fmt.Sprintf("cty.CapsuleWithOps(%q, reflect.TypeOf(%#v), %#v)", t.Name, victimVal.Interface(), t.Ops)
+ }
+ }
+ return impl(t.GoType)
}
// Capsule creates a new Capsule type.
@@ -47,8 +60,11 @@ func (t *capsuleType) GoString() string {
// use the same native type.
//
// Each capsule-typed value contains a pointer to a value of the given native
-// type. A capsule-typed value supports no operations except equality, and
-// equality is implemented by pointer identity of the encapsulated pointer.
+// type. A capsule-typed value by default supports no operations except
+// equality, and equality is implemented by pointer identity of the
+// encapsulated pointer. A capsule type can optionally have its own
+// implementations of certain operations if it is created with CapsuleWithOps
+// instead of Capsule.
//
// The given name is used as the new type's "friendly name". This can be any
// string in principle, but will usually be a short, all-lowercase name aimed
@@ -65,6 +81,29 @@ func Capsule(name string, nativeType reflect.Type) Type {
&capsuleType{
Name: name,
GoType: nativeType,
+ Ops: noCapsuleOps,
+ },
+ }
+}
+
+// CapsuleWithOps is like Capsule except the caller may provide an object
+// representing some overloaded operation implementations to associate with
+// the given capsule type.
+//
+// All of the other caveats and restrictions for capsule types still apply, but
+// overloaded operations can potentially help a capsule type participate better
+// in cty operations.
+func CapsuleWithOps(name string, nativeType reflect.Type, ops *CapsuleOps) Type {
+ // Copy the operations to make sure the caller can't modify them after
+ // we're constructed.
+ ourOps := *ops
+ ourOps.assertValid()
+
+ return Type{
+ &capsuleType{
+ Name: name,
+ GoType: nativeType,
+ Ops: &ourOps,
},
}
}
diff --git a/vendor/github.com/zclconf/go-cty/cty/capsule_ops.go b/vendor/github.com/zclconf/go-cty/cty/capsule_ops.go
new file mode 100644
index 000000000..3ff6855ec
--- /dev/null
+++ b/vendor/github.com/zclconf/go-cty/cty/capsule_ops.go
@@ -0,0 +1,132 @@
+package cty
+
+import (
+ "reflect"
+)
+
+// CapsuleOps represents a set of overloaded operations for a capsule type.
+//
+// Each field is a reference to a function that can either be nil or can be
+// set to an implementation of the corresponding operation. If an operation
+// function is nil then it isn't supported for the given capsule type.
+type CapsuleOps struct {
+ // GoString provides the GoString implementation for values of the
+ // corresponding type. Conventionally this should return a string
+ // representation of an expression that would produce an equivalent
+ // value.
+ GoString func(val interface{}) string
+
+ // TypeGoString provides the GoString implementation for the corresponding
+ // capsule type itself.
+ TypeGoString func(goTy reflect.Type) string
+
+ // Equals provides the implementation of the Equals operation. This is
+ // called only with known, non-null values of the corresponding type,
+ // but if the corresponding type is a compound type then it must be
+ // ready to detect and handle nested unknown or null values, usually
+ // by recursively calling Value.Equals on those nested values.
+ //
+ // The result value must always be of type cty.Bool, or the Equals
+ // operation will panic.
+ //
+ // If RawEquals is set without also setting Equals, the RawEquals
+ // implementation will be used as a fallback implementation. That fallback
+ // is appropriate only for leaf types that do not contain any nested
+ // cty.Value that would need to distinguish Equals vs. RawEquals for their
+ // own equality.
+ //
+ // If RawEquals is nil then Equals must also be nil, selecting the default
+ // pointer-identity comparison instead.
+ Equals func(a, b interface{}) Value
+
+ // RawEquals provides the implementation of the RawEquals operation.
+ // This is called only with known, non-null values of the corresponding
+ // type, but if the corresponding type is a compound type then it must be
+ // ready to detect and handle nested unknown or null values, usually
+ // by recursively calling Value.RawEquals on those nested values.
+ //
+ // If RawEquals is nil, values of the corresponding type are compared by
+ // pointer identity of the encapsulated value.
+ RawEquals func(a, b interface{}) bool
+
+ // ConversionFrom can provide conversions from the corresponding type to
+ // some other type when values of the corresponding type are used with
+ // the "convert" package. (The main cty package does not use this operation.)
+ //
+ // This function itself returns a function, allowing it to switch its
+ // behavior depending on the given source type. Return nil to indicate
+ // that no such conversion is available.
+ ConversionFrom func(src Type) func(interface{}, Path) (Value, error)
+
+ // ConversionTo can provide conversions to the corresponding type from
+ // some other type when values of the corresponding type are used with
+ // the "convert" package. (The main cty package does not use this operation.)
+ //
+ // This function itself returns a function, allowing it to switch its
+ // behavior depending on the given destination type. Return nil to indicate
+ // that no such conversion is available.
+ ConversionTo func(dst Type) func(Value, Path) (interface{}, error)
+
+ // ExtensionData is an extension point for applications that wish to
+ // create their own extension features using capsule types.
+ //
+ // The key argument is any value that can be compared with Go's ==
+ // operator, but should be of a named type in a package belonging to the
+ // application defining the key. An ExtensionData implementation must
+ // check to see if the given key is familar to it, and if so return a
+ // suitable value for the key.
+ //
+ // If the given key is unrecognized, the ExtensionData function must
+ // return a nil interface. (Importantly, not an interface containing a nil
+ // pointer of some other type.)
+ // The common implementation of ExtensionData is a single switch statement
+ // over "key" which has a default case returning nil.
+ //
+ // The meaning of any given key is entirely up to the application that
+ // defines it. Applications consuming ExtensionData from capsule types
+ // should do so defensively: if the result of ExtensionData is not valid,
+ // prefer to ignore it or gracefully produce an error rather than causing
+ // a panic.
+ ExtensionData func(key interface{}) interface{}
+}
+
+// noCapsuleOps is a pointer to a CapsuleOps with no functions set, which
+// is used as the default operations value when a type is created using
+// the Capsule function.
+var noCapsuleOps = &CapsuleOps{}
+
+func (ops *CapsuleOps) assertValid() {
+ if ops.RawEquals == nil && ops.Equals != nil {
+ panic("Equals cannot be set without RawEquals")
+ }
+}
+
+// CapsuleOps returns a pointer to the CapsuleOps value for a capsule type,
+// or panics if the receiver is not a capsule type.
+//
+// The caller must not modify the CapsuleOps.
+func (ty Type) CapsuleOps() *CapsuleOps {
+ if !ty.IsCapsuleType() {
+ panic("not a capsule-typed value")
+ }
+
+ return ty.typeImpl.(*capsuleType).Ops
+}
+
+// CapsuleExtensionData is a convenience interface to the ExtensionData
+// function that can be optionally implemented for a capsule type. It will
+// check to see if the underlying type implements ExtensionData and call it
+// if so. If not, it will return nil to indicate that the given key is not
+// supported.
+//
+// See the documentation for CapsuleOps.ExtensionData for more information
+// on the purpose of and usage of this mechanism.
+//
+// If CapsuleExtensionData is called on a non-capsule type then it will panic.
+func (ty Type) CapsuleExtensionData(key interface{}) interface{} {
+ ops := ty.CapsuleOps()
+ if ops.ExtensionData == nil {
+ return nil
+ }
+ return ops.ExtensionData(key)
+}
diff --git a/vendor/github.com/zclconf/go-cty/cty/convert/conversion.go b/vendor/github.com/zclconf/go-cty/cty/convert/conversion.go
index f9aacb4ee..ee35e9de7 100644
--- a/vendor/github.com/zclconf/go-cty/cty/convert/conversion.go
+++ b/vendor/github.com/zclconf/go-cty/cty/convert/conversion.go
@@ -16,7 +16,19 @@ func getConversion(in cty.Type, out cty.Type, unsafe bool) conversion {
// Wrap the conversion in some standard checks that we don't want to
// have to repeat in every conversion function.
- return func(in cty.Value, path cty.Path) (cty.Value, error) {
+ var ret conversion
+ ret = func(in cty.Value, path cty.Path) (cty.Value, error) {
+ if in.IsMarked() {
+ // We must unmark during the conversion and then re-apply the
+ // same marks to the result.
+ in, inMarks := in.Unmark()
+ v, err := ret(in, path)
+ if v != cty.NilVal {
+ v = v.WithMarks(inMarks)
+ }
+ return v, err
+ }
+
if out == cty.DynamicPseudoType {
// Conversion to DynamicPseudoType always just passes through verbatim.
return in, nil
@@ -33,6 +45,8 @@ func getConversion(in cty.Type, out cty.Type, unsafe bool) conversion {
return conv(in, path)
}
+
+ return ret
}
func getConversionKnown(in cty.Type, out cty.Type, unsafe bool) conversion {
@@ -124,6 +138,30 @@ func getConversionKnown(in cty.Type, out cty.Type, unsafe bool) conversion {
outEty := out.ElementType()
return conversionObjectToMap(in, outEty, unsafe)
+ case in.IsCapsuleType() || out.IsCapsuleType():
+ if !unsafe {
+ // Capsule types can only participate in "unsafe" conversions,
+ // because we don't know enough about their conversion behaviors
+ // to be sure that they will always be safe.
+ return nil
+ }
+ if in.Equals(out) {
+ // conversion to self is never allowed
+ return nil
+ }
+ if out.IsCapsuleType() {
+ if fn := out.CapsuleOps().ConversionTo; fn != nil {
+ return conversionToCapsule(in, out, fn)
+ }
+ }
+ if in.IsCapsuleType() {
+ if fn := in.CapsuleOps().ConversionFrom; fn != nil {
+ return conversionFromCapsule(in, out, fn)
+ }
+ }
+ // No conversion operation is available, then.
+ return nil
+
default:
return nil
diff --git a/vendor/github.com/zclconf/go-cty/cty/convert/conversion_capsule.go b/vendor/github.com/zclconf/go-cty/cty/convert/conversion_capsule.go
new file mode 100644
index 000000000..ded4079d4
--- /dev/null
+++ b/vendor/github.com/zclconf/go-cty/cty/convert/conversion_capsule.go
@@ -0,0 +1,31 @@
+package convert
+
+import (
+ "github.com/zclconf/go-cty/cty"
+)
+
+func conversionToCapsule(inTy, outTy cty.Type, fn func(inTy cty.Type) func(cty.Value, cty.Path) (interface{}, error)) conversion {
+ rawConv := fn(inTy)
+ if rawConv == nil {
+ return nil
+ }
+
+ return func(in cty.Value, path cty.Path) (cty.Value, error) {
+ rawV, err := rawConv(in, path)
+ if err != nil {
+ return cty.NilVal, err
+ }
+ return cty.CapsuleVal(outTy, rawV), nil
+ }
+}
+
+func conversionFromCapsule(inTy, outTy cty.Type, fn func(outTy cty.Type) func(interface{}, cty.Path) (cty.Value, error)) conversion {
+ rawConv := fn(outTy)
+ if rawConv == nil {
+ return nil
+ }
+
+ return func(in cty.Value, path cty.Path) (cty.Value, error) {
+ return rawConv(in.EncapsulatedValue(), path)
+ }
+}
diff --git a/vendor/github.com/zclconf/go-cty/cty/element_iterator.go b/vendor/github.com/zclconf/go-cty/cty/element_iterator.go
index 0bf84c774..9e4fff66f 100644
--- a/vendor/github.com/zclconf/go-cty/cty/element_iterator.go
+++ b/vendor/github.com/zclconf/go-cty/cty/element_iterator.go
@@ -23,6 +23,8 @@ type ElementIterator interface {
func canElementIterator(val Value) bool {
switch {
+ case val.IsMarked():
+ return false
case val.ty.IsListType():
return true
case val.ty.IsMapType():
@@ -39,6 +41,7 @@ func canElementIterator(val Value) bool {
}
func elementIterator(val Value) ElementIterator {
+ val.assertUnmarked()
switch {
case val.ty.IsListType():
return &listElementIterator{
diff --git a/vendor/github.com/zclconf/go-cty/cty/function/argument.go b/vendor/github.com/zclconf/go-cty/cty/function/argument.go
index bfd30157e..5a26c275f 100644
--- a/vendor/github.com/zclconf/go-cty/cty/function/argument.go
+++ b/vendor/github.com/zclconf/go-cty/cty/function/argument.go
@@ -47,4 +47,24 @@ type Parameter struct {
// values are not, thus improving the type-check accuracy of derived
// values.
AllowDynamicType bool
+
+ // If AllowMarked is set then marked values may be passed into this
+ // argument's slot in the implementation function. If not set, any
+ // marked value will be unmarked before calling and then the markings
+ // from that value will be applied automatically to the function result,
+ // ensuring that the marks get propagated in a simplistic way even if
+ // a function is unable to handle them.
+ //
+ // For any argument whose parameter has AllowMarked set, it's the
+ // function implementation's responsibility to Unmark the given value
+ // and propagate the marks appropriatedly to the result in order to
+ // avoid losing the marks. Application-specific functions might use
+ // special rules to selectively propagate particular marks.
+ //
+ // The automatic unmarking of values applies only to the main
+ // implementation function. In an application that uses marked values,
+ // the Type implementation for a function must always be prepared to accept
+ // marked values, which is easy to achieve by consulting only the type
+ // and ignoring the value itself.
+ AllowMarked bool
}
diff --git a/vendor/github.com/zclconf/go-cty/cty/function/function.go b/vendor/github.com/zclconf/go-cty/cty/function/function.go
index 9e8bf3376..efd882725 100644
--- a/vendor/github.com/zclconf/go-cty/cty/function/function.go
+++ b/vendor/github.com/zclconf/go-cty/cty/function/function.go
@@ -142,6 +142,21 @@ func (f Function) ReturnTypeForValues(args []cty.Value) (ty cty.Type, err error)
for i, spec := range f.spec.Params {
val := posArgs[i]
+ if val.IsMarked() && !spec.AllowMarked {
+ // During type checking we just unmark values and discard their
+ // marks, under the assumption that during actual execution of
+ // the function we'll do similarly and then re-apply the marks
+ // afterwards. Note that this does mean that a function that
+ // inspects values (rather than just types) in its Type
+ // implementation can potentially fail to take into account marks,
+ // unless it specifically opts in to seeing them.
+ unmarked, _ := val.Unmark()
+ newArgs := make([]cty.Value, len(args))
+ copy(newArgs, args)
+ newArgs[i] = unmarked
+ args = newArgs
+ }
+
if val.IsNull() && !spec.AllowNull {
return cty.Type{}, NewArgErrorf(i, "argument must not be null")
}
@@ -168,6 +183,15 @@ func (f Function) ReturnTypeForValues(args []cty.Value) (ty cty.Type, err error)
for i, val := range varArgs {
realI := i + len(posArgs)
+ if val.IsMarked() && !spec.AllowMarked {
+ // See the similar block in the loop above for what's going on here.
+ unmarked, _ := val.Unmark()
+ newArgs := make([]cty.Value, len(args))
+ copy(newArgs, args)
+ newArgs[realI] = unmarked
+ args = newArgs
+ }
+
if val.IsNull() && !spec.AllowNull {
return cty.Type{}, NewArgErrorf(realI, "argument must not be null")
}
@@ -208,9 +232,10 @@ func (f Function) Call(args []cty.Value) (val cty.Value, err error) {
// Type checking already dealt with most situations relating to our
// parameter specification, but we still need to deal with unknown
- // values.
+ // values and marked values.
posArgs := args[:len(f.spec.Params)]
varArgs := args[len(f.spec.Params):]
+ var resultMarks []cty.ValueMarks
for i, spec := range f.spec.Params {
val := posArgs[i]
@@ -218,14 +243,37 @@ func (f Function) Call(args []cty.Value) (val cty.Value, err error) {
if !val.IsKnown() && !spec.AllowUnknown {
return cty.UnknownVal(expectedType), nil
}
+
+ if val.IsMarked() && !spec.AllowMarked {
+ unwrappedVal, marks := val.Unmark()
+ // In order to avoid additional overhead on applications that
+ // are not using marked values, we copy the given args only
+ // if we encounter a marked value we need to unmark. However,
+ // as a consequence we end up doing redundant copying if multiple
+ // marked values need to be unwrapped. That seems okay because
+ // argument lists are generally small.
+ newArgs := make([]cty.Value, len(args))
+ copy(newArgs, args)
+ newArgs[i] = unwrappedVal
+ resultMarks = append(resultMarks, marks)
+ args = newArgs
+ }
}
if f.spec.VarParam != nil {
spec := f.spec.VarParam
- for _, val := range varArgs {
+ for i, val := range varArgs {
if !val.IsKnown() && !spec.AllowUnknown {
return cty.UnknownVal(expectedType), nil
}
+ if val.IsMarked() && !spec.AllowMarked {
+ unwrappedVal, marks := val.Unmark()
+ newArgs := make([]cty.Value, len(args))
+ copy(newArgs, args)
+ newArgs[len(posArgs)+i] = unwrappedVal
+ resultMarks = append(resultMarks, marks)
+ args = newArgs
+ }
}
}
@@ -244,6 +292,9 @@ func (f Function) Call(args []cty.Value) (val cty.Value, err error) {
if err != nil {
return cty.NilVal, err
}
+ if len(resultMarks) > 0 {
+ retVal = retVal.WithMarks(resultMarks...)
+ }
}
// Returned value must conform to what the Type function expected, to
diff --git a/vendor/github.com/zclconf/go-cty/cty/function/stdlib/bool.go b/vendor/github.com/zclconf/go-cty/cty/function/stdlib/bool.go
index a473d0ec3..4f1ecc8d9 100644
--- a/vendor/github.com/zclconf/go-cty/cty/function/stdlib/bool.go
+++ b/vendor/github.com/zclconf/go-cty/cty/function/stdlib/bool.go
@@ -11,6 +11,7 @@ var NotFunc = function.New(&function.Spec{
Name: "val",
Type: cty.Bool,
AllowDynamicType: true,
+ AllowMarked: true,
},
},
Type: function.StaticReturnType(cty.Bool),
@@ -25,11 +26,13 @@ var AndFunc = function.New(&function.Spec{
Name: "a",
Type: cty.Bool,
AllowDynamicType: true,
+ AllowMarked: true,
},
{
Name: "b",
Type: cty.Bool,
AllowDynamicType: true,
+ AllowMarked: true,
},
},
Type: function.StaticReturnType(cty.Bool),
@@ -44,11 +47,13 @@ var OrFunc = function.New(&function.Spec{
Name: "a",
Type: cty.Bool,
AllowDynamicType: true,
+ AllowMarked: true,
},
{
Name: "b",
Type: cty.Bool,
AllowDynamicType: true,
+ AllowMarked: true,
},
},
Type: function.StaticReturnType(cty.Bool),
diff --git a/vendor/github.com/zclconf/go-cty/cty/function/stdlib/number.go b/vendor/github.com/zclconf/go-cty/cty/function/stdlib/number.go
index bd9b2e51b..480305e83 100644
--- a/vendor/github.com/zclconf/go-cty/cty/function/stdlib/number.go
+++ b/vendor/github.com/zclconf/go-cty/cty/function/stdlib/number.go
@@ -14,6 +14,7 @@ var AbsoluteFunc = function.New(&function.Spec{
Name: "num",
Type: cty.Number,
AllowDynamicType: true,
+ AllowMarked: true,
},
},
Type: function.StaticReturnType(cty.Number),
@@ -196,11 +197,13 @@ var GreaterThanFunc = function.New(&function.Spec{
Name: "a",
Type: cty.Number,
AllowDynamicType: true,
+ AllowMarked: true,
},
{
Name: "b",
Type: cty.Number,
AllowDynamicType: true,
+ AllowMarked: true,
},
},
Type: function.StaticReturnType(cty.Bool),
@@ -215,11 +218,13 @@ var GreaterThanOrEqualToFunc = function.New(&function.Spec{
Name: "a",
Type: cty.Number,
AllowDynamicType: true,
+ AllowMarked: true,
},
{
Name: "b",
Type: cty.Number,
AllowDynamicType: true,
+ AllowMarked: true,
},
},
Type: function.StaticReturnType(cty.Bool),
@@ -234,11 +239,13 @@ var LessThanFunc = function.New(&function.Spec{
Name: "a",
Type: cty.Number,
AllowDynamicType: true,
+ AllowMarked: true,
},
{
Name: "b",
Type: cty.Number,
AllowDynamicType: true,
+ AllowMarked: true,
},
},
Type: function.StaticReturnType(cty.Bool),
@@ -253,11 +260,13 @@ var LessThanOrEqualToFunc = function.New(&function.Spec{
Name: "a",
Type: cty.Number,
AllowDynamicType: true,
+ AllowMarked: true,
},
{
Name: "b",
Type: cty.Number,
AllowDynamicType: true,
+ AllowMarked: true,
},
},
Type: function.StaticReturnType(cty.Bool),
@@ -272,6 +281,7 @@ var NegateFunc = function.New(&function.Spec{
Name: "num",
Type: cty.Number,
AllowDynamicType: true,
+ AllowMarked: true,
},
},
Type: function.StaticReturnType(cty.Number),
diff --git a/vendor/github.com/zclconf/go-cty/cty/function/stdlib/string.go b/vendor/github.com/zclconf/go-cty/cty/function/stdlib/string.go
index d7c89fa82..12e9dbc3d 100644
--- a/vendor/github.com/zclconf/go-cty/cty/function/stdlib/string.go
+++ b/vendor/github.com/zclconf/go-cty/cty/function/stdlib/string.go
@@ -3,10 +3,10 @@ package stdlib
import (
"strings"
+ "github.com/apparentlymart/go-textseg/textseg"
"github.com/zclconf/go-cty/cty"
"github.com/zclconf/go-cty/cty/function"
"github.com/zclconf/go-cty/cty/gocty"
- "github.com/apparentlymart/go-textseg/textseg"
)
var UpperFunc = function.New(&function.Spec{
diff --git a/vendor/github.com/zclconf/go-cty/cty/gob.go b/vendor/github.com/zclconf/go-cty/cty/gob.go
index 6c972d7de..a0961b8a0 100644
--- a/vendor/github.com/zclconf/go-cty/cty/gob.go
+++ b/vendor/github.com/zclconf/go-cty/cty/gob.go
@@ -3,6 +3,7 @@ package cty
import (
"bytes"
"encoding/gob"
+ "errors"
"fmt"
"math/big"
@@ -15,6 +16,10 @@ import (
// Currently it is not possible to represent values of capsule types in gob,
// because the types themselves cannot be represented.
func (val Value) GobEncode() ([]byte, error) {
+ if val.IsMarked() {
+ return nil, errors.New("value is marked")
+ }
+
buf := &bytes.Buffer{}
enc := gob.NewEncoder(buf)
diff --git a/vendor/github.com/zclconf/go-cty/cty/json/marshal.go b/vendor/github.com/zclconf/go-cty/cty/json/marshal.go
index f7bea1a2f..75e02577b 100644
--- a/vendor/github.com/zclconf/go-cty/cty/json/marshal.go
+++ b/vendor/github.com/zclconf/go-cty/cty/json/marshal.go
@@ -9,6 +9,10 @@ import (
)
func marshal(val cty.Value, t cty.Type, path cty.Path, b *bytes.Buffer) error {
+ if val.IsMarked() {
+ return path.NewErrorf("value has marks, so it cannot be seralized")
+ }
+
// If we're going to decode as DynamicPseudoType then we need to save
// dynamic type information to recover the real type.
if t == cty.DynamicPseudoType && val.Type() != cty.DynamicPseudoType {
diff --git a/vendor/github.com/zclconf/go-cty/cty/marks.go b/vendor/github.com/zclconf/go-cty/cty/marks.go
new file mode 100644
index 000000000..3898e4553
--- /dev/null
+++ b/vendor/github.com/zclconf/go-cty/cty/marks.go
@@ -0,0 +1,296 @@
+package cty
+
+import (
+ "fmt"
+ "strings"
+)
+
+// marker is an internal wrapper type used to add special "marks" to values.
+//
+// A "mark" is an annotation that can be used to represent additional
+// characteristics of values that propagate through operation methods to
+// result values. However, a marked value cannot be used with integration
+// methods normally associated with its type, in order to ensure that
+// calling applications don't inadvertently drop marks as they round-trip
+// values out of cty and back in again.
+//
+// Marked values are created only explicitly by the calling application, so
+// an application that never marks a value does not need to worry about
+// encountering marked values.
+type marker struct {
+ realV interface{}
+ marks ValueMarks
+}
+
+// ValueMarks is a map, representing a set, of "mark" values associated with
+// a Value. See Value.Mark for more information on the usage of mark values.
+type ValueMarks map[interface{}]struct{}
+
+// NewValueMarks constructs a new ValueMarks set with the given mark values.
+func NewValueMarks(marks ...interface{}) ValueMarks {
+ if len(marks) == 0 {
+ return nil
+ }
+ ret := make(ValueMarks, len(marks))
+ for _, v := range marks {
+ ret[v] = struct{}{}
+ }
+ return ret
+}
+
+// Equal returns true if the receiver and the given ValueMarks both contain
+// the same marks.
+func (m ValueMarks) Equal(o ValueMarks) bool {
+ if len(m) != len(o) {
+ return false
+ }
+ for v := range m {
+ if _, ok := o[v]; !ok {
+ return false
+ }
+ }
+ return true
+}
+
+func (m ValueMarks) GoString() string {
+ var s strings.Builder
+ s.WriteString("cty.NewValueMarks(")
+ i := 0
+ for mv := range m {
+ if i != 0 {
+ s.WriteString(", ")
+ }
+ s.WriteString(fmt.Sprintf("%#v", mv))
+ i++
+ }
+ s.WriteString(")")
+ return s.String()
+}
+
+// IsMarked returns true if and only if the receiving value carries at least
+// one mark. A marked value cannot be used directly with integration methods
+// without explicitly unmarking it (and retrieving the markings) first.
+func (val Value) IsMarked() bool {
+ _, ok := val.v.(marker)
+ return ok
+}
+
+// HasMark returns true if and only if the receiving value has the given mark.
+func (val Value) HasMark(mark interface{}) bool {
+ if mr, ok := val.v.(marker); ok {
+ _, ok := mr.marks[mark]
+ return ok
+ }
+ return false
+}
+
+// ContainsMarked returns true if the receiving value or any value within it
+// is marked.
+//
+// This operation is relatively expensive. If you only need a shallow result,
+// use IsMarked instead.
+func (val Value) ContainsMarked() bool {
+ ret := false
+ Walk(val, func(_ Path, v Value) (bool, error) {
+ if v.IsMarked() {
+ ret = true
+ return false, nil
+ }
+ return true, nil
+ })
+ return ret
+}
+
+func (val Value) assertUnmarked() {
+ if val.IsMarked() {
+ panic("value is marked, so must be unmarked first")
+ }
+}
+
+// Marks returns a map (representing a set) of all of the mark values
+// associated with the receiving value, without changing the marks. Returns nil
+// if the value is not marked at all.
+func (val Value) Marks() ValueMarks {
+ if mr, ok := val.v.(marker); ok {
+ // copy so that the caller can't mutate our internals
+ ret := make(ValueMarks, len(mr.marks))
+ for k, v := range mr.marks {
+ ret[k] = v
+ }
+ return ret
+ }
+ return nil
+}
+
+// HasSameMarks returns true if an only if the receiver and the given other
+// value have identical marks.
+func (val Value) HasSameMarks(other Value) bool {
+ vm, vmOK := val.v.(marker)
+ om, omOK := other.v.(marker)
+ if vmOK != omOK {
+ return false
+ }
+ if vmOK {
+ return vm.marks.Equal(om.marks)
+ }
+ return true
+}
+
+// Mark returns a new value that as the same type and underlying value as
+// the receiver but that also carries the given value as a "mark".
+//
+// Marks are used to carry additional application-specific characteristics
+// associated with values. A marked value can be used with operation methods,
+// in which case the marks are propagated to the operation results. A marked
+// value _cannot_ be used with integration methods, so callers of those
+// must derive an unmarked value using Unmark (and thus explicitly handle
+// the markings) before calling the integration methods.
+//
+// The mark value can be any value that would be valid to use as a map key.
+// The mark value should be of a named type in order to use the type itself
+// as a namespace for markings. That type can be unexported if desired, in
+// order to ensure that the mark can only be handled through the defining
+// package's own functions.
+//
+// An application that never calls this method does not need to worry about
+// handling marked values.
+func (val Value) Mark(mark interface{}) Value {
+ var newMarker marker
+ newMarker.realV = val.v
+ if mr, ok := val.v.(marker); ok {
+ // It's already a marker, so we'll retain existing marks.
+ newMarker.marks = make(ValueMarks, len(mr.marks)+1)
+ for k, v := range mr.marks {
+ newMarker.marks[k] = v
+ }
+ } else {
+ // It's not a marker yet, so we're creating the first mark.
+ newMarker.marks = make(ValueMarks, 1)
+ }
+ newMarker.marks[mark] = struct{}{}
+ return Value{
+ ty: val.ty,
+ v: newMarker,
+ }
+}
+
+// Unmark separates the marks of the receiving value from the value itself,
+// removing a new unmarked value and a map (representing a set) of the marks.
+//
+// If the receiver isn't marked, Unmark returns it verbatim along with a nil
+// map of marks.
+func (val Value) Unmark() (Value, ValueMarks) {
+ if !val.IsMarked() {
+ return val, nil
+ }
+ mr := val.v.(marker)
+ marks := val.Marks() // copy so that the caller can't mutate our internals
+ return Value{
+ ty: val.ty,
+ v: mr.realV,
+ }, marks
+}
+
+// UnmarkDeep is similar to Unmark, but it works with an entire nested structure
+// rather than just the given value directly.
+//
+// The result is guaranteed to contain no nested values that are marked, and
+// the returned marks set includes the superset of all of the marks encountered
+// during the operation.
+func (val Value) UnmarkDeep() (Value, ValueMarks) {
+ marks := make(ValueMarks)
+ ret, _ := Transform(val, func(_ Path, v Value) (Value, error) {
+ unmarkedV, valueMarks := v.Unmark()
+ for m, s := range valueMarks {
+ marks[m] = s
+ }
+ return unmarkedV, nil
+ })
+ return ret, marks
+}
+
+func (val Value) unmarkForce() Value {
+ unw, _ := val.Unmark()
+ return unw
+}
+
+// WithMarks returns a new value that has the same type and underlying value
+// as the receiver and also has the marks from the given maps (representing
+// sets).
+func (val Value) WithMarks(marks ...ValueMarks) Value {
+ if len(marks) == 0 {
+ return val
+ }
+ ownMarks := val.Marks()
+ markCount := len(ownMarks)
+ for _, s := range marks {
+ markCount += len(s)
+ }
+ if markCount == 0 {
+ return val
+ }
+ newMarks := make(ValueMarks, markCount)
+ for m := range ownMarks {
+ newMarks[m] = struct{}{}
+ }
+ for _, s := range marks {
+ for m := range s {
+ newMarks[m] = struct{}{}
+ }
+ }
+ v := val.v
+ if mr, ok := v.(marker); ok {
+ v = mr.realV
+ }
+ return Value{
+ ty: val.ty,
+ v: marker{
+ realV: v,
+ marks: newMarks,
+ },
+ }
+}
+
+// WithSameMarks returns a new value that has the same type and underlying
+// value as the receiver and also has the marks from the given source values.
+//
+// Use this if you are implementing your own higher-level operations against
+// cty using the integration methods, to re-introduce the marks from the
+// source values of the operation.
+func (val Value) WithSameMarks(srcs ...Value) Value {
+ if len(srcs) == 0 {
+ return val
+ }
+ ownMarks := val.Marks()
+ markCount := len(ownMarks)
+ for _, sv := range srcs {
+ if mr, ok := sv.v.(marker); ok {
+ markCount += len(mr.marks)
+ }
+ }
+ if markCount == 0 {
+ return val
+ }
+ newMarks := make(ValueMarks, markCount)
+ for m := range ownMarks {
+ newMarks[m] = struct{}{}
+ }
+ for _, sv := range srcs {
+ if mr, ok := sv.v.(marker); ok {
+ for m := range mr.marks {
+ newMarks[m] = struct{}{}
+ }
+ }
+ }
+ v := val.v
+ if mr, ok := v.(marker); ok {
+ v = mr.realV
+ }
+ return Value{
+ ty: val.ty,
+ v: marker{
+ realV: v,
+ marks: newMarks,
+ },
+ }
+}
diff --git a/vendor/github.com/zclconf/go-cty/cty/msgpack/marshal.go b/vendor/github.com/zclconf/go-cty/cty/msgpack/marshal.go
index 87b096ca4..51c75aa8d 100644
--- a/vendor/github.com/zclconf/go-cty/cty/msgpack/marshal.go
+++ b/vendor/github.com/zclconf/go-cty/cty/msgpack/marshal.go
@@ -41,6 +41,10 @@ func Marshal(val cty.Value, ty cty.Type) ([]byte, error) {
}
func marshal(val cty.Value, ty cty.Type, path cty.Path, enc *msgpack.Encoder) error {
+ if val.IsMarked() {
+ return path.NewErrorf("value has marks, so it cannot be seralized")
+ }
+
// If we're going to decode as DynamicPseudoType then we need to save
// dynamic type information to recover the real type.
if ty == cty.DynamicPseudoType && val.Type() != cty.DynamicPseudoType {
diff --git a/vendor/github.com/zclconf/go-cty/cty/set_helper.go b/vendor/github.com/zclconf/go-cty/cty/set_helper.go
index a88ddaffb..962bb5295 100644
--- a/vendor/github.com/zclconf/go-cty/cty/set_helper.go
+++ b/vendor/github.com/zclconf/go-cty/cty/set_helper.go
@@ -119,7 +119,13 @@ func (s ValueSet) SymmetricDifference(other ValueSet) ValueSet {
}
// requireElementType panics if the given value is not of the set's element type.
+//
+// It also panics if the given value is marked, because marked values cannot
+// be stored in sets.
func (s ValueSet) requireElementType(v Value) {
+ if v.IsMarked() {
+ panic("cannot store marked value directly in a set (make the set itself unknown instead)")
+ }
if !v.Type().Equals(s.ElementType()) {
panic(fmt.Errorf("attempt to use %#v value with set of %#v", v.Type(), s.ElementType()))
}
diff --git a/vendor/github.com/zclconf/go-cty/cty/set_internals.go b/vendor/github.com/zclconf/go-cty/cty/set_internals.go
index f1ec98556..e7e1d3337 100644
--- a/vendor/github.com/zclconf/go-cty/cty/set_internals.go
+++ b/vendor/github.com/zclconf/go-cty/cty/set_internals.go
@@ -32,7 +32,10 @@ var _ set.OrderedRules = setRules{}
// This function is not safe to use for security-related applications, since
// the hash used is not strong enough.
func (val Value) Hash() int {
- hashBytes := makeSetHashBytes(val)
+ hashBytes, marks := makeSetHashBytes(val)
+ if len(marks) > 0 {
+ panic("can't take hash of value that has marks or has embedded values that have marks")
+ }
return int(crc32.ChecksumIEEE(hashBytes))
}
@@ -110,19 +113,20 @@ func (r setRules) Less(v1, v2 interface{}) bool {
// default consistent-but-undefined ordering then. This situation is
// not considered a compatibility constraint; callers should rely only
// on the ordering rules for primitive values.
- v1h := makeSetHashBytes(v1v)
- v2h := makeSetHashBytes(v2v)
+ v1h, _ := makeSetHashBytes(v1v)
+ v2h, _ := makeSetHashBytes(v2v)
return bytes.Compare(v1h, v2h) < 0
}
}
-func makeSetHashBytes(val Value) []byte {
+func makeSetHashBytes(val Value) ([]byte, ValueMarks) {
var buf bytes.Buffer
- appendSetHashBytes(val, &buf)
- return buf.Bytes()
+ marks := make(ValueMarks)
+ appendSetHashBytes(val, &buf, marks)
+ return buf.Bytes(), marks
}
-func appendSetHashBytes(val Value, buf *bytes.Buffer) {
+func appendSetHashBytes(val Value, buf *bytes.Buffer, marks ValueMarks) {
// Exactly what bytes we generate here don't matter as long as the following
// constraints hold:
// - Unknown and null values all generate distinct strings from
@@ -136,6 +140,19 @@ func appendSetHashBytes(val Value, buf *bytes.Buffer) {
// the Equivalent function will still distinguish values, but set
// performance will be best if we are able to produce a distinct string
// for each distinct value, unknown values notwithstanding.
+
+ // Marks aren't considered part of a value for equality-testing purposes,
+ // so we'll unmark our value before we work with it but we'll remember
+ // the marks in case the caller needs to re-apply them to a derived
+ // value.
+ if val.IsMarked() {
+ unmarkedVal, valMarks := val.Unmark()
+ for m := range valMarks {
+ marks[m] = struct{}{}
+ }
+ val = unmarkedVal
+ }
+
if !val.IsKnown() {
buf.WriteRune('?')
return
@@ -175,9 +192,9 @@ func appendSetHashBytes(val Value, buf *bytes.Buffer) {
if val.ty.IsMapType() {
buf.WriteRune('{')
val.ForEachElement(func(keyVal, elementVal Value) bool {
- appendSetHashBytes(keyVal, buf)
+ appendSetHashBytes(keyVal, buf, marks)
buf.WriteRune(':')
- appendSetHashBytes(elementVal, buf)
+ appendSetHashBytes(elementVal, buf, marks)
buf.WriteRune(';')
return false
})
@@ -188,7 +205,7 @@ func appendSetHashBytes(val Value, buf *bytes.Buffer) {
if val.ty.IsListType() || val.ty.IsSetType() {
buf.WriteRune('[')
val.ForEachElement(func(keyVal, elementVal Value) bool {
- appendSetHashBytes(elementVal, buf)
+ appendSetHashBytes(elementVal, buf, marks)
buf.WriteRune(';')
return false
})
@@ -204,7 +221,7 @@ func appendSetHashBytes(val Value, buf *bytes.Buffer) {
}
sort.Strings(attrNames)
for _, attrName := range attrNames {
- appendSetHashBytes(val.GetAttr(attrName), buf)
+ appendSetHashBytes(val.GetAttr(attrName), buf, marks)
buf.WriteRune(';')
}
buf.WriteRune('>')
@@ -214,7 +231,7 @@ func appendSetHashBytes(val Value, buf *bytes.Buffer) {
if val.ty.IsTupleType() {
buf.WriteRune('<')
val.ForEachElement(func(keyVal, elementVal Value) bool {
- appendSetHashBytes(elementVal, buf)
+ appendSetHashBytes(elementVal, buf, marks)
buf.WriteRune(';')
return false
})
diff --git a/vendor/github.com/zclconf/go-cty/cty/value.go b/vendor/github.com/zclconf/go-cty/cty/value.go
index 80cb8f76f..1025ba82e 100644
--- a/vendor/github.com/zclconf/go-cty/cty/value.go
+++ b/vendor/github.com/zclconf/go-cty/cty/value.go
@@ -45,6 +45,9 @@ func (val Value) Type() Type {
// operating on other unknown values, and so an application that never
// introduces Unknown values can be guaranteed to never receive any either.
func (val Value) IsKnown() bool {
+ if val.IsMarked() {
+ return val.unmarkForce().IsKnown()
+ }
return val.v != unknown
}
@@ -53,6 +56,9 @@ func (val Value) IsKnown() bool {
// produces null, so an application that never introduces Null values can
// be guaranteed to never receive any either.
func (val Value) IsNull() bool {
+ if val.IsMarked() {
+ return val.unmarkForce().IsNull()
+ }
return val.v == nil
}
@@ -74,6 +80,10 @@ var NilVal = Value{
// inside collections and structures to see if there are any nested unknown
// values.
func (val Value) IsWhollyKnown() bool {
+ if val.IsMarked() {
+ return val.unmarkForce().IsWhollyKnown()
+ }
+
if !val.IsKnown() {
return false
}
diff --git a/vendor/github.com/zclconf/go-cty/cty/value_init.go b/vendor/github.com/zclconf/go-cty/cty/value_init.go
index 3deeba3bd..2dafe17ae 100644
--- a/vendor/github.com/zclconf/go-cty/cty/value_init.go
+++ b/vendor/github.com/zclconf/go-cty/cty/value_init.go
@@ -240,8 +240,18 @@ func SetVal(vals []Value) Value {
}
elementType := DynamicPseudoType
rawList := make([]interface{}, len(vals))
+ var markSets []ValueMarks
for i, val := range vals {
+ if unmarkedVal, marks := val.UnmarkDeep(); len(marks) > 0 {
+ val = unmarkedVal
+ markSets = append(markSets, marks)
+ }
+ if val.ContainsMarked() {
+ // FIXME: Allow this, but unmark the values and apply the
+ // marking to the set itself instead.
+ panic("set cannot contain marked values")
+ }
if elementType == DynamicPseudoType {
elementType = val.ty
} else if val.ty != DynamicPseudoType && !elementType.Equals(val.ty) {
@@ -259,7 +269,7 @@ func SetVal(vals []Value) Value {
return Value{
ty: Set(elementType),
v: rawVal,
- }
+ }.WithMarks(markSets...)
}
// SetValFromValueSet returns a Value of set type based on an already-constructed
diff --git a/vendor/github.com/zclconf/go-cty/cty/value_ops.go b/vendor/github.com/zclconf/go-cty/cty/value_ops.go
index afd621cf4..35a644be4 100644
--- a/vendor/github.com/zclconf/go-cty/cty/value_ops.go
+++ b/vendor/github.com/zclconf/go-cty/cty/value_ops.go
@@ -11,6 +11,18 @@ import (
// GoString is an implementation of fmt.GoStringer that produces concise
// source-like representations of values suitable for use in debug messages.
func (val Value) GoString() string {
+ if val.IsMarked() {
+ unVal, marks := val.Unmark()
+ if len(marks) == 1 {
+ var mark interface{}
+ for m := range marks {
+ mark = m
+ }
+ return fmt.Sprintf("%#v.Mark(%#v)", unVal, mark)
+ }
+ return fmt.Sprintf("%#v.WithMarks(%#v)", unVal, marks)
+ }
+
if val == NilVal {
return "cty.NilVal"
}
@@ -82,7 +94,11 @@ func (val Value) GoString() string {
vals := val.AsValueMap()
return fmt.Sprintf("cty.ObjectVal(%#v)", vals)
case val.ty.IsCapsuleType():
- return fmt.Sprintf("cty.CapsuleVal(%#v, %#v)", val.ty, val.v)
+ impl := val.ty.CapsuleOps().GoString
+ if impl == nil {
+ return fmt.Sprintf("cty.CapsuleVal(%#v, %#v)", val.ty, val.v)
+ }
+ return impl(val.EncapsulatedValue())
}
// Default exposes implementation details, so should actually cover
@@ -101,6 +117,12 @@ func (val Value) GoString() string {
// Use RawEquals to compare if two values are equal *ignoring* the
// short-circuit rules and the exception for null values.
func (val Value) Equals(other Value) Value {
+ if val.IsMarked() || other.IsMarked() {
+ val, valMarks := val.Unmark()
+ other, otherMarks := other.Unmark()
+ return val.Equals(other).WithMarks(valMarks, otherMarks)
+ }
+
// Start by handling Unknown values before considering types.
// This needs to be done since Null values are always equal regardless of
// type.
@@ -288,10 +310,22 @@ func (val Value) Equals(other Value) Value {
}
}
case ty.IsCapsuleType():
- // A capsule type's encapsulated value is a pointer to a value of its
- // native type, so we can just compare these to get the identity test
- // we need.
- return BoolVal(val.v == other.v)
+ impl := val.ty.CapsuleOps().Equals
+ if impl == nil {
+ impl := val.ty.CapsuleOps().RawEquals
+ if impl == nil {
+ // A capsule type's encapsulated value is a pointer to a value of its
+ // native type, so we can just compare these to get the identity test
+ // we need.
+ return BoolVal(val.v == other.v)
+ }
+ return BoolVal(impl(val.v, other.v))
+ }
+ ret := impl(val.v, other.v)
+ if !ret.Type().Equals(Bool) {
+ panic(fmt.Sprintf("Equals for %#v returned %#v, not cty.Bool", ty, ret.Type()))
+ }
+ return ret
default:
// should never happen
@@ -314,6 +348,7 @@ func (val Value) NotEqual(other Value) Value {
// or null values. For more robust handling with unknown value
// short-circuiting, use val.Equals(cty.True).
func (val Value) True() bool {
+ val.assertUnmarked()
if val.ty != Bool {
panic("not bool")
}
@@ -338,6 +373,13 @@ func (val Value) RawEquals(other Value) bool {
if !val.ty.Equals(other.ty) {
return false
}
+ if !val.HasSameMarks(other) {
+ return false
+ }
+ // Since we've now checked the marks, we'll unmark for the rest of this...
+ val = val.unmarkForce()
+ other = other.unmarkForce()
+
if (!val.IsKnown()) && (!other.IsKnown()) {
return true
}
@@ -448,10 +490,14 @@ func (val Value) RawEquals(other Value) bool {
}
return false
case ty.IsCapsuleType():
- // A capsule type's encapsulated value is a pointer to a value of its
- // native type, so we can just compare these to get the identity test
- // we need.
- return val.v == other.v
+ impl := val.ty.CapsuleOps().RawEquals
+ if impl == nil {
+ // A capsule type's encapsulated value is a pointer to a value of its
+ // native type, so we can just compare these to get the identity test
+ // we need.
+ return val.v == other.v
+ }
+ return impl(val.v, other.v)
default:
// should never happen
@@ -462,6 +508,12 @@ func (val Value) RawEquals(other Value) bool {
// Add returns the sum of the receiver and the given other value. Both values
// must be numbers; this method will panic if not.
func (val Value) Add(other Value) Value {
+ if val.IsMarked() || other.IsMarked() {
+ val, valMarks := val.Unmark()
+ other, otherMarks := other.Unmark()
+ return val.Add(other).WithMarks(valMarks, otherMarks)
+ }
+
if shortCircuit := mustTypeCheck(Number, Number, val, other); shortCircuit != nil {
shortCircuit = forceShortCircuitType(shortCircuit, Number)
return *shortCircuit
@@ -475,6 +527,12 @@ func (val Value) Add(other Value) Value {
// Subtract returns receiver minus the given other value. Both values must be
// numbers; this method will panic if not.
func (val Value) Subtract(other Value) Value {
+ if val.IsMarked() || other.IsMarked() {
+ val, valMarks := val.Unmark()
+ other, otherMarks := other.Unmark()
+ return val.Subtract(other).WithMarks(valMarks, otherMarks)
+ }
+
if shortCircuit := mustTypeCheck(Number, Number, val, other); shortCircuit != nil {
shortCircuit = forceShortCircuitType(shortCircuit, Number)
return *shortCircuit
@@ -486,6 +544,11 @@ func (val Value) Subtract(other Value) Value {
// Negate returns the numeric negative of the receiver, which must be a number.
// This method will panic when given a value of any other type.
func (val Value) Negate() Value {
+ if val.IsMarked() {
+ val, valMarks := val.Unmark()
+ return val.Negate().WithMarks(valMarks)
+ }
+
if shortCircuit := mustTypeCheck(Number, Number, val); shortCircuit != nil {
shortCircuit = forceShortCircuitType(shortCircuit, Number)
return *shortCircuit
@@ -498,6 +561,12 @@ func (val Value) Negate() Value {
// Multiply returns the product of the receiver and the given other value.
// Both values must be numbers; this method will panic if not.
func (val Value) Multiply(other Value) Value {
+ if val.IsMarked() || other.IsMarked() {
+ val, valMarks := val.Unmark()
+ other, otherMarks := other.Unmark()
+ return val.Multiply(other).WithMarks(valMarks, otherMarks)
+ }
+
if shortCircuit := mustTypeCheck(Number, Number, val, other); shortCircuit != nil {
shortCircuit = forceShortCircuitType(shortCircuit, Number)
return *shortCircuit
@@ -520,6 +589,12 @@ func (val Value) Multiply(other Value) Value {
// If both values are zero or infinity, this function will panic with
// an instance of big.ErrNaN.
func (val Value) Divide(other Value) Value {
+ if val.IsMarked() || other.IsMarked() {
+ val, valMarks := val.Unmark()
+ other, otherMarks := other.Unmark()
+ return val.Divide(other).WithMarks(valMarks, otherMarks)
+ }
+
if shortCircuit := mustTypeCheck(Number, Number, val, other); shortCircuit != nil {
shortCircuit = forceShortCircuitType(shortCircuit, Number)
return *shortCircuit
@@ -546,6 +621,12 @@ func (val Value) Divide(other Value) Value {
// may wish to disallow such things outright or implement their own modulo
// if they disagree with the interpretation used here.
func (val Value) Modulo(other Value) Value {
+ if val.IsMarked() || other.IsMarked() {
+ val, valMarks := val.Unmark()
+ other, otherMarks := other.Unmark()
+ return val.Modulo(other).WithMarks(valMarks, otherMarks)
+ }
+
if shortCircuit := mustTypeCheck(Number, Number, val, other); shortCircuit != nil {
shortCircuit = forceShortCircuitType(shortCircuit, Number)
return *shortCircuit
@@ -576,6 +657,11 @@ func (val Value) Modulo(other Value) Value {
// Absolute returns the absolute (signless) value of the receiver, which must
// be a number or this method will panic.
func (val Value) Absolute() Value {
+ if val.IsMarked() {
+ val, valMarks := val.Unmark()
+ return val.Absolute().WithMarks(valMarks)
+ }
+
if shortCircuit := mustTypeCheck(Number, Number, val); shortCircuit != nil {
shortCircuit = forceShortCircuitType(shortCircuit, Number)
return *shortCircuit
@@ -596,6 +682,11 @@ func (val Value) Absolute() Value {
// This method may be called on a value whose type is DynamicPseudoType,
// in which case the result will also be DynamicVal.
func (val Value) GetAttr(name string) Value {
+ if val.IsMarked() {
+ val, valMarks := val.Unmark()
+ return val.GetAttr(name).WithMarks(valMarks)
+ }
+
if val.ty == DynamicPseudoType {
return DynamicVal
}
@@ -638,6 +729,12 @@ func (val Value) GetAttr(name string) Value {
// This method may be called on a value whose type is DynamicPseudoType,
// in which case the result will also be the DynamicValue.
func (val Value) Index(key Value) Value {
+ if val.IsMarked() || key.IsMarked() {
+ val, valMarks := val.Unmark()
+ key, keyMarks := key.Unmark()
+ return val.Index(key).WithMarks(valMarks, keyMarks)
+ }
+
if val.ty == DynamicPseudoType {
return DynamicVal
}
@@ -733,6 +830,12 @@ func (val Value) Index(key Value) Value {
// This method will panic if the receiver is not indexable, but does not
// impose any panic-causing type constraints on the key.
func (val Value) HasIndex(key Value) Value {
+ if val.IsMarked() || key.IsMarked() {
+ val, valMarks := val.Unmark()
+ key, keyMarks := key.Unmark()
+ return val.HasIndex(key).WithMarks(valMarks, keyMarks)
+ }
+
if val.ty == DynamicPseudoType {
return UnknownVal(Bool)
}
@@ -810,6 +913,12 @@ func (val Value) HasIndex(key Value) Value {
//
// This method will panic if the receiver is not a set, or if it is a null set.
func (val Value) HasElement(elem Value) Value {
+ if val.IsMarked() || elem.IsMarked() {
+ val, valMarks := val.Unmark()
+ elem, elemMarks := elem.Unmark()
+ return val.HasElement(elem).WithMarks(valMarks, elemMarks)
+ }
+
ty := val.Type()
if !ty.IsSetType() {
@@ -841,6 +950,11 @@ func (val Value) HasElement(elem Value) Value {
// of a string, call AsString and take the length of the native Go string
// that is returned.
func (val Value) Length() Value {
+ if val.IsMarked() {
+ val, valMarks := val.Unmark()
+ return val.Length().WithMarks(valMarks)
+ }
+
if val.Type().IsTupleType() {
// For tuples, we can return the length even if the value is not known.
return NumberIntVal(int64(val.Type().Length()))
@@ -859,6 +973,7 @@ func (val Value) Length() Value {
// This is an integration method provided for the convenience of code bridging
// into Go's type system.
func (val Value) LengthInt() int {
+ val.assertUnmarked()
if val.Type().IsTupleType() {
// For tuples, we can return the length even if the value is not known.
return val.Type().Length()
@@ -915,6 +1030,7 @@ func (val Value) LengthInt() int {
// ElementIterator is an integration method, so it cannot handle Unknown
// values. This method will panic if the receiver is Unknown.
func (val Value) ElementIterator() ElementIterator {
+ val.assertUnmarked()
if !val.IsKnown() {
panic("can't use ElementIterator on unknown value")
}
@@ -943,6 +1059,7 @@ func (val Value) CanIterateElements() bool {
// ForEachElement is an integration method, so it cannot handle Unknown
// values. This method will panic if the receiver is Unknown.
func (val Value) ForEachElement(cb ElementCallback) bool {
+ val.assertUnmarked()
it := val.ElementIterator()
for it.Next() {
key, val := it.Element()
@@ -957,6 +1074,11 @@ func (val Value) ForEachElement(cb ElementCallback) bool {
// Not returns the logical inverse of the receiver, which must be of type
// Bool or this method will panic.
func (val Value) Not() Value {
+ if val.IsMarked() {
+ val, valMarks := val.Unmark()
+ return val.Not().WithMarks(valMarks)
+ }
+
if shortCircuit := mustTypeCheck(Bool, Bool, val); shortCircuit != nil {
shortCircuit = forceShortCircuitType(shortCircuit, Bool)
return *shortCircuit
@@ -968,6 +1090,12 @@ func (val Value) Not() Value {
// And returns the result of logical AND with the receiver and the other given
// value, which must both be of type Bool or this method will panic.
func (val Value) And(other Value) Value {
+ if val.IsMarked() || other.IsMarked() {
+ val, valMarks := val.Unmark()
+ other, otherMarks := other.Unmark()
+ return val.And(other).WithMarks(valMarks, otherMarks)
+ }
+
if shortCircuit := mustTypeCheck(Bool, Bool, val, other); shortCircuit != nil {
shortCircuit = forceShortCircuitType(shortCircuit, Bool)
return *shortCircuit
@@ -979,6 +1107,12 @@ func (val Value) And(other Value) Value {
// Or returns the result of logical OR with the receiver and the other given
// value, which must both be of type Bool or this method will panic.
func (val Value) Or(other Value) Value {
+ if val.IsMarked() || other.IsMarked() {
+ val, valMarks := val.Unmark()
+ other, otherMarks := other.Unmark()
+ return val.Or(other).WithMarks(valMarks, otherMarks)
+ }
+
if shortCircuit := mustTypeCheck(Bool, Bool, val, other); shortCircuit != nil {
shortCircuit = forceShortCircuitType(shortCircuit, Bool)
return *shortCircuit
@@ -990,6 +1124,12 @@ func (val Value) Or(other Value) Value {
// LessThan returns True if the receiver is less than the other given value,
// which must both be numbers or this method will panic.
func (val Value) LessThan(other Value) Value {
+ if val.IsMarked() || other.IsMarked() {
+ val, valMarks := val.Unmark()
+ other, otherMarks := other.Unmark()
+ return val.LessThan(other).WithMarks(valMarks, otherMarks)
+ }
+
if shortCircuit := mustTypeCheck(Number, Bool, val, other); shortCircuit != nil {
shortCircuit = forceShortCircuitType(shortCircuit, Bool)
return *shortCircuit
@@ -1001,6 +1141,12 @@ func (val Value) LessThan(other Value) Value {
// GreaterThan returns True if the receiver is greater than the other given
// value, which must both be numbers or this method will panic.
func (val Value) GreaterThan(other Value) Value {
+ if val.IsMarked() || other.IsMarked() {
+ val, valMarks := val.Unmark()
+ other, otherMarks := other.Unmark()
+ return val.GreaterThan(other).WithMarks(valMarks, otherMarks)
+ }
+
if shortCircuit := mustTypeCheck(Number, Bool, val, other); shortCircuit != nil {
shortCircuit = forceShortCircuitType(shortCircuit, Bool)
return *shortCircuit
@@ -1022,6 +1168,7 @@ func (val Value) GreaterThanOrEqualTo(other Value) Value {
// AsString returns the native string from a non-null, non-unknown cty.String
// value, or panics if called on any other value.
func (val Value) AsString() string {
+ val.assertUnmarked()
if val.ty != String {
panic("not a string")
}
@@ -1041,6 +1188,7 @@ func (val Value) AsString() string {
// For more convenient conversions to other native numeric types, use the
// "gocty" package.
func (val Value) AsBigFloat() *big.Float {
+ val.assertUnmarked()
if val.ty != Number {
panic("not a number")
}
@@ -1064,6 +1212,7 @@ func (val Value) AsBigFloat() *big.Float {
// For more convenient conversions to slices of more specific types, use
// the "gocty" package.
func (val Value) AsValueSlice() []Value {
+ val.assertUnmarked()
l := val.LengthInt()
if l == 0 {
return nil
@@ -1084,6 +1233,7 @@ func (val Value) AsValueSlice() []Value {
// For more convenient conversions to maps of more specific types, use
// the "gocty" package.
func (val Value) AsValueMap() map[string]Value {
+ val.assertUnmarked()
l := val.LengthInt()
if l == 0 {
return nil
@@ -1108,6 +1258,7 @@ func (val Value) AsValueMap() map[string]Value {
//
// The returned ValueSet can store only values of the receiver's element type.
func (val Value) AsValueSet() ValueSet {
+ val.assertUnmarked()
if !val.Type().IsCollectionType() {
panic("not a collection type")
}
@@ -1130,6 +1281,7 @@ func (val Value) AsValueSet() ValueSet {
// the value. Since cty considers values to be immutable, it is strongly
// recommended to treat the encapsulated value itself as immutable too.
func (val Value) EncapsulatedValue() interface{} {
+ val.assertUnmarked()
if !val.Type().IsCapsuleType() {
panic("not a capsule-typed value")
}
diff --git a/vendor/modules.txt b/vendor/modules.txt
index 5f823221c..8d4701f6c 100644
--- a/vendor/modules.txt
+++ b/vendor/modules.txt
@@ -60,10 +60,6 @@ github.com/aws/aws-sdk-go/service/sts/stsiface
github.com/bgentry/go-netrc/netrc
# github.com/bgentry/speakeasy v0.1.0
github.com/bgentry/speakeasy
-# github.com/blang/semver v3.5.1+incompatible
-github.com/blang/semver
-# github.com/bmatcuk/doublestar v1.1.5
-github.com/bmatcuk/doublestar
# github.com/davecgh/go-spew v1.1.1
github.com/davecgh/go-spew/spew
# github.com/fatih/color v1.7.0
@@ -91,7 +87,7 @@ github.com/googleapis/gax-go/v2
github.com/hashicorp/errwrap
# github.com/hashicorp/go-cleanhttp v0.5.1
github.com/hashicorp/go-cleanhttp
-# github.com/hashicorp/go-getter v1.4.0
+# github.com/hashicorp/go-getter v1.4.2-0.20200106182914-9813cbd4eb02
github.com/hashicorp/go-getter
github.com/hashicorp/go-getter/helper/url
# github.com/hashicorp/go-hclog v0.9.2
@@ -117,8 +113,9 @@ github.com/hashicorp/hcl/hcl/token
github.com/hashicorp/hcl/json/parser
github.com/hashicorp/hcl/json/scanner
github.com/hashicorp/hcl/json/token
-# github.com/hashicorp/hcl/v2 v2.0.0
+# github.com/hashicorp/hcl/v2 v2.3.0
github.com/hashicorp/hcl/v2
+github.com/hashicorp/hcl/v2/ext/customdecode
github.com/hashicorp/hcl/v2/ext/dynblock
github.com/hashicorp/hcl/v2/ext/typeexpr
github.com/hashicorp/hcl/v2/gohcl
@@ -128,68 +125,56 @@ github.com/hashicorp/hcl/v2/hclparse
github.com/hashicorp/hcl/v2/hclsyntax
github.com/hashicorp/hcl/v2/hclwrite
github.com/hashicorp/hcl/v2/json
-# github.com/hashicorp/hcl2 v0.0.0-20190821123243-0c888d1241f6
-github.com/hashicorp/hcl2/gohcl
-github.com/hashicorp/hcl2/hcl
-github.com/hashicorp/hcl2/hcl/hclsyntax
-github.com/hashicorp/hcl2/hcl/json
-github.com/hashicorp/hcl2/hclparse
-github.com/hashicorp/hcl2/hclwrite
-# github.com/hashicorp/hil v0.0.0-20190212112733-ab17b08d6590
-github.com/hashicorp/hil
-github.com/hashicorp/hil/ast
-github.com/hashicorp/hil/parser
-github.com/hashicorp/hil/scanner
# github.com/hashicorp/logutils v1.0.0
github.com/hashicorp/logutils
-# github.com/hashicorp/terraform v0.12.17
-github.com/hashicorp/terraform/addrs
-github.com/hashicorp/terraform/command/format
-github.com/hashicorp/terraform/config
-github.com/hashicorp/terraform/configs
-github.com/hashicorp/terraform/configs/configload
-github.com/hashicorp/terraform/configs/configschema
-github.com/hashicorp/terraform/configs/hcl2shim
-github.com/hashicorp/terraform/dag
-github.com/hashicorp/terraform/flatmap
-github.com/hashicorp/terraform/helper/acctest
-github.com/hashicorp/terraform/helper/config
-github.com/hashicorp/terraform/helper/didyoumean
-github.com/hashicorp/terraform/helper/hashcode
-github.com/hashicorp/terraform/helper/hilmapstructure
-github.com/hashicorp/terraform/helper/logging
-github.com/hashicorp/terraform/helper/mutexkv
-github.com/hashicorp/terraform/helper/plugin
-github.com/hashicorp/terraform/helper/resource
-github.com/hashicorp/terraform/helper/schema
-github.com/hashicorp/terraform/helper/structure
-github.com/hashicorp/terraform/helper/validation
-github.com/hashicorp/terraform/httpclient
-github.com/hashicorp/terraform/internal/earlyconfig
-github.com/hashicorp/terraform/internal/initwd
-github.com/hashicorp/terraform/internal/modsdir
-github.com/hashicorp/terraform/internal/tfplugin5
-github.com/hashicorp/terraform/lang
-github.com/hashicorp/terraform/lang/blocktoattr
-github.com/hashicorp/terraform/lang/funcs
-github.com/hashicorp/terraform/moduledeps
-github.com/hashicorp/terraform/plans
-github.com/hashicorp/terraform/plans/objchange
-github.com/hashicorp/terraform/plugin
-github.com/hashicorp/terraform/plugin/convert
-github.com/hashicorp/terraform/plugin/discovery
-github.com/hashicorp/terraform/providers
-github.com/hashicorp/terraform/provisioners
-github.com/hashicorp/terraform/registry
-github.com/hashicorp/terraform/registry/regsrc
-github.com/hashicorp/terraform/registry/response
-github.com/hashicorp/terraform/states
-github.com/hashicorp/terraform/states/statefile
-github.com/hashicorp/terraform/terraform
-github.com/hashicorp/terraform/tfdiags
-github.com/hashicorp/terraform/version
-# github.com/hashicorp/terraform-config-inspect v0.0.0-20190821133035-82a99dc22ef4
+# github.com/hashicorp/terraform-config-inspect v0.0.0-20191212124732-c6ae6269b9d7
github.com/hashicorp/terraform-config-inspect/tfconfig
+# github.com/hashicorp/terraform-plugin-sdk v1.6.0
+github.com/hashicorp/terraform-plugin-sdk/helper/acctest
+github.com/hashicorp/terraform-plugin-sdk/helper/hashcode
+github.com/hashicorp/terraform-plugin-sdk/helper/logging
+github.com/hashicorp/terraform-plugin-sdk/helper/mutexkv
+github.com/hashicorp/terraform-plugin-sdk/helper/resource
+github.com/hashicorp/terraform-plugin-sdk/helper/schema
+github.com/hashicorp/terraform-plugin-sdk/helper/structure
+github.com/hashicorp/terraform-plugin-sdk/helper/validation
+github.com/hashicorp/terraform-plugin-sdk/httpclient
+github.com/hashicorp/terraform-plugin-sdk/internal/addrs
+github.com/hashicorp/terraform-plugin-sdk/internal/command/format
+github.com/hashicorp/terraform-plugin-sdk/internal/configs
+github.com/hashicorp/terraform-plugin-sdk/internal/configs/configload
+github.com/hashicorp/terraform-plugin-sdk/internal/configs/configschema
+github.com/hashicorp/terraform-plugin-sdk/internal/configs/hcl2shim
+github.com/hashicorp/terraform-plugin-sdk/internal/dag
+github.com/hashicorp/terraform-plugin-sdk/internal/earlyconfig
+github.com/hashicorp/terraform-plugin-sdk/internal/flatmap
+github.com/hashicorp/terraform-plugin-sdk/internal/helper/config
+github.com/hashicorp/terraform-plugin-sdk/internal/helper/didyoumean
+github.com/hashicorp/terraform-plugin-sdk/internal/helper/plugin
+github.com/hashicorp/terraform-plugin-sdk/internal/httpclient
+github.com/hashicorp/terraform-plugin-sdk/internal/initwd
+github.com/hashicorp/terraform-plugin-sdk/internal/lang
+github.com/hashicorp/terraform-plugin-sdk/internal/lang/blocktoattr
+github.com/hashicorp/terraform-plugin-sdk/internal/lang/funcs
+github.com/hashicorp/terraform-plugin-sdk/internal/modsdir
+github.com/hashicorp/terraform-plugin-sdk/internal/moduledeps
+github.com/hashicorp/terraform-plugin-sdk/internal/plans
+github.com/hashicorp/terraform-plugin-sdk/internal/plans/objchange
+github.com/hashicorp/terraform-plugin-sdk/internal/plugin/convert
+github.com/hashicorp/terraform-plugin-sdk/internal/plugin/discovery
+github.com/hashicorp/terraform-plugin-sdk/internal/providers
+github.com/hashicorp/terraform-plugin-sdk/internal/provisioners
+github.com/hashicorp/terraform-plugin-sdk/internal/registry
+github.com/hashicorp/terraform-plugin-sdk/internal/registry/regsrc
+github.com/hashicorp/terraform-plugin-sdk/internal/registry/response
+github.com/hashicorp/terraform-plugin-sdk/internal/states
+github.com/hashicorp/terraform-plugin-sdk/internal/states/statefile
+github.com/hashicorp/terraform-plugin-sdk/internal/tfdiags
+github.com/hashicorp/terraform-plugin-sdk/internal/tfplugin5
+github.com/hashicorp/terraform-plugin-sdk/internal/version
+github.com/hashicorp/terraform-plugin-sdk/meta
+github.com/hashicorp/terraform-plugin-sdk/plugin
+github.com/hashicorp/terraform-plugin-sdk/terraform
# github.com/hashicorp/terraform-svchost v0.0.0-20191011084731-65d371908596
github.com/hashicorp/terraform-svchost
github.com/hashicorp/terraform-svchost/auth
@@ -216,8 +201,6 @@ github.com/mitchellh/go-homedir
github.com/mitchellh/go-testing-interface
# github.com/mitchellh/go-wordwrap v1.0.0
github.com/mitchellh/go-wordwrap
-# github.com/mitchellh/hashstructure v1.0.0
-github.com/mitchellh/hashstructure
# github.com/mitchellh/mapstructure v1.1.2
github.com/mitchellh/mapstructure
# github.com/mitchellh/reflectwalk v1.0.1
@@ -226,8 +209,6 @@ github.com/mitchellh/reflectwalk
github.com/oklog/run
# github.com/openlyinc/pointy v1.1.2
github.com/openlyinc/pointy
-# github.com/outscale/osc-go v0.0.9
-github.com/outscale/osc-go/oapi
# github.com/posener/complete v1.2.3
github.com/posener/complete
github.com/posener/complete/cmd
@@ -245,7 +226,7 @@ github.com/ulikunitz/xz/lzma
# github.com/vmihailenco/msgpack v4.0.1+incompatible
github.com/vmihailenco/msgpack
github.com/vmihailenco/msgpack/codes
-# github.com/zclconf/go-cty v1.1.1
+# github.com/zclconf/go-cty v1.2.1
github.com/zclconf/go-cty/cty
github.com/zclconf/go-cty/cty/convert
github.com/zclconf/go-cty/cty/function
diff --git a/website/docs/d/image.html.markdown b/website/docs/d/image.html.markdown
index e521e5313..b1f1ef76e 100644
--- a/website/docs/d/image.html.markdown
+++ b/website/docs/d/image.html.markdown
@@ -10,7 +10,7 @@ description: |-
Provides information about a specific image.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+OMIs).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-image).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-image).
## Example Usage
diff --git a/website/docs/d/images.html.markdown b/website/docs/d/images.html.markdown
index 6b24f650f..b74f9ae81 100644
--- a/website/docs/d/images.html.markdown
+++ b/website/docs/d/images.html.markdown
@@ -10,7 +10,7 @@ description: |-
Provides information about images.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+OMIs).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-image).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-image).
## Example Usage
diff --git a/website/docs/d/internet_service.html.markdown b/website/docs/d/internet_service.html.markdown
index 81b7183bc..3e21e6be4 100644
--- a/website/docs/d/internet_service.html.markdown
+++ b/website/docs/d/internet_service.html.markdown
@@ -10,7 +10,7 @@ description: |-
Provides information about a specific Internet service.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+Internet+Gateways).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-internetservice).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-internetservice).
## Example Usage
diff --git a/website/docs/d/internet_services.html.markdown b/website/docs/d/internet_services.html.markdown
index e74d949e5..65a99d0df 100644
--- a/website/docs/d/internet_services.html.markdown
+++ b/website/docs/d/internet_services.html.markdown
@@ -10,7 +10,7 @@ description: |-
Provides information about Internet services.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+Internet+Gateways).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-internetservice).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-internetservice).
## Example Usage
diff --git a/website/docs/d/keypair.html.markdown b/website/docs/d/keypair.html.markdown
index 62f2cf838..e491ac324 100644
--- a/website/docs/d/keypair.html.markdown
+++ b/website/docs/d/keypair.html.markdown
@@ -10,7 +10,7 @@ description: |-
Provides information about a specific keypair.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+Keypairs).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-keypair).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-keypair).
## Example Usage
diff --git a/website/docs/d/keypairs.html.markdown b/website/docs/d/keypairs.html.markdown
index 5d3559b5e..c18c84a90 100644
--- a/website/docs/d/keypairs.html.markdown
+++ b/website/docs/d/keypairs.html.markdown
@@ -10,7 +10,7 @@ description: |-
Provides information about keypairs.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+Keypairs).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-keypair).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-keypair).
## Example Usage
diff --git a/website/docs/d/nat_service.html.markdown b/website/docs/d/nat_service.html.markdown
index 4f99a09d8..4cae0308e 100644
--- a/website/docs/d/nat_service.html.markdown
+++ b/website/docs/d/nat_service.html.markdown
@@ -10,7 +10,7 @@ description: |-
Provides information about a specific NAT service.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+NAT+Devices).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-natservice).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-natservice).
## Example Usage
diff --git a/website/docs/d/nat_services.html.markdown b/website/docs/d/nat_services.html.markdown
index f3fb46a0c..4bc4f57bf 100644
--- a/website/docs/d/nat_services.html.markdown
+++ b/website/docs/d/nat_services.html.markdown
@@ -10,7 +10,7 @@ description: |-
Provides information about NAT services.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+NAT+Devices).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-natservice).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-natservice).
## Example Usage
diff --git a/website/docs/d/net.html.markdown b/website/docs/d/net.html.markdown
index 636331eb2..dfe5328ec 100644
--- a/website/docs/d/net.html.markdown
+++ b/website/docs/d/net.html.markdown
@@ -10,7 +10,7 @@ description: |-
Provides information about a specific Net.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+VPCs).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-net).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-net).
## Example Usage
diff --git a/website/docs/d/net_attributes.html.markdown b/website/docs/d/net_attributes.html.markdown
index 40058544e..cf0c3f8c3 100644
--- a/website/docs/d/net_attributes.html.markdown
+++ b/website/docs/d/net_attributes.html.markdown
@@ -10,7 +10,7 @@ description: |-
Provides information about Net attributes.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+VPCs).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#updatenet).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#updatenet).
## Example Usage
diff --git a/website/docs/d/net_peering.html.markdown b/website/docs/d/net_peering.html.markdown
index 2289c16d8..42bbb487e 100644
--- a/website/docs/d/net_peering.html.markdown
+++ b/website/docs/d/net_peering.html.markdown
@@ -10,7 +10,7 @@ description: |-
Provides information about a specific Net peering.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+VPC+Peering+Connections).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-netpeering).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-netpeering).
## Example Usage
diff --git a/website/docs/d/net_peerings.html.markdown b/website/docs/d/net_peerings.html.markdown
index 7b889229a..d9785c347 100644
--- a/website/docs/d/net_peerings.html.markdown
+++ b/website/docs/d/net_peerings.html.markdown
@@ -10,7 +10,7 @@ description: |-
Provides information about Net peerings.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+VPC+Peering+Connections).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-netpeering).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-netpeering).
## Example Usage
diff --git a/website/docs/d/nets.html.markdown b/website/docs/d/nets.html.markdown
index 11b225305..acf9baed1 100644
--- a/website/docs/d/nets.html.markdown
+++ b/website/docs/d/nets.html.markdown
@@ -10,7 +10,7 @@ description: |-
Provides information about Nets.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+VPCs).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-net).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-net).
## Example Usage
diff --git a/website/docs/d/nic.html.markdown b/website/docs/d/nic.html.markdown
index 4c07831b4..3dd566758 100644
--- a/website/docs/d/nic.html.markdown
+++ b/website/docs/d/nic.html.markdown
@@ -10,7 +10,7 @@ description: |-
Provides information about a specific NIC.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+FNIs).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-nic).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-nic).
## Example Usage
diff --git a/website/docs/d/nics.html.markdown b/website/docs/d/nics.html.markdown
index a7bc9d883..56aa9c884 100644
--- a/website/docs/d/nics.html.markdown
+++ b/website/docs/d/nics.html.markdown
@@ -10,7 +10,7 @@ description: |-
Provides information about NICs.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+FNIs).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-nic).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-nic).
## Example Usage
diff --git a/website/docs/d/public_ip.html.markdown b/website/docs/d/public_ip.html.markdown
index 0eef07008..8ee83b69d 100644
--- a/website/docs/d/public_ip.html.markdown
+++ b/website/docs/d/public_ip.html.markdown
@@ -10,7 +10,7 @@ description: |-
Provides information about a specific public IP.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+EIPs).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-publicip).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-publicip).
## Example Usage
diff --git a/website/docs/d/public_ips.html.markdown b/website/docs/d/public_ips.html.markdown
index 147b01243..eaa912254 100644
--- a/website/docs/d/public_ips.html.markdown
+++ b/website/docs/d/public_ips.html.markdown
@@ -10,7 +10,7 @@ description: |-
Provides information about public IPs.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+EIPs).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-publicip).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-publicip).
## Example Usage
diff --git a/website/docs/d/route_table.html.markdown b/website/docs/d/route_table.html.markdown
index f2e93d7e1..6780f9671 100644
--- a/website/docs/d/route_table.html.markdown
+++ b/website/docs/d/route_table.html.markdown
@@ -10,7 +10,7 @@ description: |-
Provides information about a specific route table.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+Route+Tables).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-routetable).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-routetable).
## Example Usage
diff --git a/website/docs/d/route_tables.html.markdown b/website/docs/d/route_tables.html.markdown
index 55529bd28..1677dcb90 100644
--- a/website/docs/d/route_tables.html.markdown
+++ b/website/docs/d/route_tables.html.markdown
@@ -10,7 +10,7 @@ description: |-
Provides information about route tables.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+Route+Tables).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-routetable).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-routetable).
## Example Usage
diff --git a/website/docs/d/security_group.html.markdown b/website/docs/d/security_group.html.markdown
index afde66a8f..137f11e7b 100644
--- a/website/docs/d/security_group.html.markdown
+++ b/website/docs/d/security_group.html.markdown
@@ -10,7 +10,7 @@ description: |-
Provides information about a specific security group.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+Security+Groups).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-securitygroup).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-securitygroup).
## Example Usage
@@ -53,7 +53,7 @@ The following attributes are exported:
* `account_id` - The account ID of a user.
* `security_group_id` - The ID of the security group.
* `security_group_name` - (Public Cloud only) The name of the security group.
- * `service_ids` - One or more service IDs to allow traffic from a Net to access the corresponding 3DS OUTSCALE services. For more information, see [ReadNetAccessPointServices](https://docs-beta.outscale.com/#readnetaccesspointservices).
+ * `service_ids` - One or more service IDs to allow traffic from a Net to access the corresponding 3DS OUTSCALE services. For more information, see [ReadNetAccessPointServices](https://docs.outscale.com/api#readnetaccesspointservices).
* `to_port_range` - The end of the port range for the TCP and UDP protocols, or an ICMP type number.
* `net_id` - The ID of the Net for the security group.
* `outbound_rules` - The outbound rules associated with the security group.
@@ -64,7 +64,7 @@ The following attributes are exported:
* `account_id` - The account ID of a user.
* `security_group_id` - The ID of the security group.
* `security_group_name` - (Public Cloud only) The name of the security group.
- * `service_ids` - One or more service IDs to allow traffic from a Net to access the corresponding 3DS OUTSCALE services. For more information, see [ReadNetAccessPointServices](https://docs-beta.outscale.com/#readnetaccesspointservices).
+ * `service_ids` - One or more service IDs to allow traffic from a Net to access the corresponding 3DS OUTSCALE services. For more information, see [ReadNetAccessPointServices](https://docs.outscale.com/api#readnetaccesspointservices).
* `to_port_range` - The end of the port range for the TCP and UDP protocols, or an ICMP type number.
* `security_group_id` - The ID of the security group.
* `security_group_name` - (Public Cloud only) The name of the security group.
diff --git a/website/docs/d/security_groups.html.markdown b/website/docs/d/security_groups.html.markdown
index 5ecd6ffad..dd30e4163 100644
--- a/website/docs/d/security_groups.html.markdown
+++ b/website/docs/d/security_groups.html.markdown
@@ -10,7 +10,7 @@ description: |-
Provides information about security groups.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+Security+Groups).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-securitygroup).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-securitygroup).
## Example Usage
@@ -53,7 +53,7 @@ The following attributes are exported:
* `account_id` - The account ID of a user.
* `security_group_id` - The ID of the security group.
* `security_group_name` - (Public Cloud only) The name of the security group.
- * `service_ids` - One or more service IDs to allow traffic from a Net to access the corresponding 3DS OUTSCALE services. For more information, see [ReadNetAccessPointServices](https://docs-beta.outscale.com/#readnetaccesspointservices).
+ * `service_ids` - One or more service IDs to allow traffic from a Net to access the corresponding 3DS OUTSCALE services. For more information, see [ReadNetAccessPointServices](https://docs.outscale.com/api#readnetaccesspointservices).
* `to_port_range` - The end of the port range for the TCP and UDP protocols, or an ICMP type number.
* `net_id` - The ID of the Net for the security group.
* `outbound_rules` - The outbound rules associated with the security group.
@@ -64,7 +64,7 @@ The following attributes are exported:
* `account_id` - The account ID of a user.
* `security_group_id` - The ID of the security group.
* `security_group_name` - (Public Cloud only) The name of the security group.
- * `service_ids` - One or more service IDs to allow traffic from a Net to access the corresponding 3DS OUTSCALE services. For more information, see [ReadNetAccessPointServices](https://docs-beta.outscale.com/#readnetaccesspointservices).
+ * `service_ids` - One or more service IDs to allow traffic from a Net to access the corresponding 3DS OUTSCALE services. For more information, see [ReadNetAccessPointServices](https://docs.outscale.com/api#readnetaccesspointservices).
* `to_port_range` - The end of the port range for the TCP and UDP protocols, or an ICMP type number.
* `security_group_id` - The ID of the security group.
* `security_group_name` - (Public Cloud only) The name of the security group.
diff --git a/website/docs/d/snapshot.html.markdown b/website/docs/d/snapshot.html.markdown
index da130808b..3dfe34560 100644
--- a/website/docs/d/snapshot.html.markdown
+++ b/website/docs/d/snapshot.html.markdown
@@ -10,7 +10,7 @@ description: |-
Provides information about a specific snapshot.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+Snapshots).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-snapshot).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-snapshot).
## Example Usage
diff --git a/website/docs/d/snapshots.html.markdown b/website/docs/d/snapshots.html.markdown
index fd20d1ad2..0ef628fc0 100644
--- a/website/docs/d/snapshots.html.markdown
+++ b/website/docs/d/snapshots.html.markdown
@@ -10,7 +10,7 @@ description: |-
Provides information about snapshots.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+Snapshots).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-snapshot).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-snapshot).
## Example Usage
diff --git a/website/docs/d/subnet.html.markdown b/website/docs/d/subnet.html.markdown
index 20a0a94fc..c1419ebc7 100644
--- a/website/docs/d/subnet.html.markdown
+++ b/website/docs/d/subnet.html.markdown
@@ -10,7 +10,7 @@ description: |-
Provides information about a specific Subnet.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+VPCs).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-subnet).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-subnet).
## Example Usage
diff --git a/website/docs/d/subnets.html.markdown b/website/docs/d/subnets.html.markdown
index 252997408..8a21e536a 100644
--- a/website/docs/d/subnets.html.markdown
+++ b/website/docs/d/subnets.html.markdown
@@ -10,7 +10,7 @@ description: |-
Provides information about Subnets.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+VPCs).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-subnet).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-subnet).
## Example Usage
diff --git a/website/docs/d/vm.html.markdown b/website/docs/d/vm.html.markdown
index 1dba303d8..4e7065ee3 100644
--- a/website/docs/d/vm.html.markdown
+++ b/website/docs/d/vm.html.markdown
@@ -10,7 +10,7 @@ description: |-
Provides information about a specific virtual machine (VM).
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+Instances).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-vm).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-vm).
## Example Usage
diff --git a/website/docs/d/vm_state.html.markdown b/website/docs/d/vm_state.html.markdown
index 37380b90a..43dfd87c4 100644
--- a/website/docs/d/vm_state.html.markdown
+++ b/website/docs/d/vm_state.html.markdown
@@ -10,7 +10,7 @@ description: |-
Provides information about a specific VM state.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+Instance+Lifecycle).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#readvmsstate).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#readvmsstate).
## Example Usage
diff --git a/website/docs/d/vm_states.html.markdown b/website/docs/d/vm_states.html.markdown
index ebf563d14..e8faa5b23 100644
--- a/website/docs/d/vm_states.html.markdown
+++ b/website/docs/d/vm_states.html.markdown
@@ -10,7 +10,7 @@ description: |-
Provides information about VM states.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+Instance+Lifecycle).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#readvmsstate).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#readvmsstate).
## Example Usage
diff --git a/website/docs/d/vms.html.markdown b/website/docs/d/vms.html.markdown
index dd9fe8214..f35076071 100644
--- a/website/docs/d/vms.html.markdown
+++ b/website/docs/d/vms.html.markdown
@@ -10,7 +10,7 @@ description: |-
Provides information about virtual machines (VMs).
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+Instances).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-vm).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-vm).
## Example Usage
diff --git a/website/docs/d/volume.html.markdown b/website/docs/d/volume.html.markdown
index 09224b521..b53926c07 100644
--- a/website/docs/d/volume.html.markdown
+++ b/website/docs/d/volume.html.markdown
@@ -10,7 +10,7 @@ description: |-
Provides information about a specific volume.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+Volumes).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-volume).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-volume).
## Example Usage
diff --git a/website/docs/d/volumes.html.markdown b/website/docs/d/volumes.html.markdown
index 8963368ff..3e0da1991 100644
--- a/website/docs/d/volumes.html.markdown
+++ b/website/docs/d/volumes.html.markdown
@@ -10,7 +10,7 @@ description: |-
Provides information about volumes.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+Volumes).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-volume).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-volume).
## Example Usage
diff --git a/website/docs/index.html.markdown b/website/docs/index.html.markdown
index 82bfd65ed..e4b9cb014 100644
--- a/website/docs/index.html.markdown
+++ b/website/docs/index.html.markdown
@@ -11,7 +11,7 @@ The 3DS OUTSCALE provider is used to manage 3DS OUTSCALE Cloud resources.
Use the navigation to the left to read about the available resources.
For more information on our resources, see the [User Guide](https://wiki.outscale.net/display/EN#).
-The provider is based on our 3DS OUTSCALE API. For more information, see [APIs Reference](https://wiki.outscale.net/display/EN/3DS+OUTSCALE+APIs+Reference) and the [API Documentation](https://docs-beta.outscale.com/#3ds-outscale-api).
+The provider is based on our 3DS OUTSCALE API. For more information, see [APIs Reference](https://wiki.outscale.net/display/EN/3DS+OUTSCALE+APIs+Reference) and the [API Documentation](https://docs.outscale.com/api#3ds-outscale-api).
The provider needs to be configured with the proper credentials before it can be used.
diff --git a/website/docs/r/image.html.markdown b/website/docs/r/image.html.markdown
index 63f0ee2e7..252922a1f 100644
--- a/website/docs/r/image.html.markdown
+++ b/website/docs/r/image.html.markdown
@@ -10,7 +10,7 @@ description: |-
Manages an image.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+OMIs).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-image).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-image).
## Example Usage
diff --git a/website/docs/r/image_launch_permission.html.markdown b/website/docs/r/image_launch_permission.html.markdown
index 04b1c9ac0..9a8fa6cd8 100644
--- a/website/docs/r/image_launch_permission.html.markdown
+++ b/website/docs/r/image_launch_permission.html.markdown
@@ -10,7 +10,7 @@ description: |-
Manages an image launch permission.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+OMIs#AboutOMIs-OMIsPermissions,CopiesandExportstoOSU).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#updateimage).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#updateimage).
## Example Usage
diff --git a/website/docs/r/internet_service.html.markdown b/website/docs/r/internet_service.html.markdown
index 755409064..23cfedd1c 100644
--- a/website/docs/r/internet_service.html.markdown
+++ b/website/docs/r/internet_service.html.markdown
@@ -10,7 +10,7 @@ description: |-
Manages an Internet service.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+Internet+Gateways).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-internetservice).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-internetservice).
## Example Usage
diff --git a/website/docs/r/internet_service_link.html.markdown b/website/docs/r/internet_service_link.html.markdown
index e42dd164a..5a635dd5a 100644
--- a/website/docs/r/internet_service_link.html.markdown
+++ b/website/docs/r/internet_service_link.html.markdown
@@ -10,7 +10,7 @@ description: |-
Manages an Internet service link.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+Internet+Gateways).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#linkinternetservice).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#linkinternetservice).
## Example Usage
diff --git a/website/docs/r/keypair.html.markdown b/website/docs/r/keypair.html.markdown
index 176fa8182..5a16b3336 100644
--- a/website/docs/r/keypair.html.markdown
+++ b/website/docs/r/keypair.html.markdown
@@ -10,7 +10,7 @@ description: |-
Manages a keypair.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+Keypairs).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-keypair).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-keypair).
## Example Usage
diff --git a/website/docs/r/nat_service.html.markdown b/website/docs/r/nat_service.html.markdown
index 1762bef92..3eea55c1e 100644
--- a/website/docs/r/nat_service.html.markdown
+++ b/website/docs/r/nat_service.html.markdown
@@ -10,7 +10,7 @@ description: |-
Manages a NAT service.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+NAT+Devices).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-natservice).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-natservice).
## Example Usage
diff --git a/website/docs/r/net.html.markdown b/website/docs/r/net.html.markdown
index 8156493a0..21d4d732a 100644
--- a/website/docs/r/net.html.markdown
+++ b/website/docs/r/net.html.markdown
@@ -10,7 +10,7 @@ description: |-
Manages a Net.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+VPCs).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-net).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-net).
## Example Usage
diff --git a/website/docs/r/net_attributes.html.markdown b/website/docs/r/net_attributes.html.markdown
index 22f603aae..4a1f24ee0 100644
--- a/website/docs/r/net_attributes.html.markdown
+++ b/website/docs/r/net_attributes.html.markdown
@@ -10,7 +10,7 @@ description: |-
Manages a Net attribute.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+VPCs).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#updatenet).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#updatenet).
## Example Usage
diff --git a/website/docs/r/net_peering.html.markdown b/website/docs/r/net_peering.html.markdown
index 436222cee..b4b1200bd 100644
--- a/website/docs/r/net_peering.html.markdown
+++ b/website/docs/r/net_peering.html.markdown
@@ -10,7 +10,7 @@ description: |-
Manages a Net peering.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+VPC+Peering+Connections).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-netpeering).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-netpeering).
## Example Usage
diff --git a/website/docs/r/net_peering_acceptation.html.markdown b/website/docs/r/net_peering_acceptation.html.markdown
index 6a1f90211..30b885db8 100644
--- a/website/docs/r/net_peering_acceptation.html.markdown
+++ b/website/docs/r/net_peering_acceptation.html.markdown
@@ -10,7 +10,7 @@ description: |-
Manages a Net peering acceptation.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+VPC+Peering+Connections).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#acceptnetpeering).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#acceptnetpeering).
## Example Usage
diff --git a/website/docs/r/nic.html.markdown b/website/docs/r/nic.html.markdown
index d2728c8b5..9f5eea6d3 100644
--- a/website/docs/r/nic.html.markdown
+++ b/website/docs/r/nic.html.markdown
@@ -10,7 +10,7 @@ description: |-
Manages a NIC.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+FNIs).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-nic).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-nic).
## Example Usage
diff --git a/website/docs/r/nic_link.html.markdown b/website/docs/r/nic_link.html.markdown
index 4ad6c981f..1c9cdd678 100644
--- a/website/docs/r/nic_link.html.markdown
+++ b/website/docs/r/nic_link.html.markdown
@@ -10,7 +10,7 @@ description: |-
Manages a NIC link.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+FNIs#AboutFNIs-FNIAttachmentFNIsAttachmenttoInstances).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#linknic).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#linknic).
## Example Usage
diff --git a/website/docs/r/nic_private_ip.html.markdown b/website/docs/r/nic_private_ip.html.markdown
index d656bfc08..8c665d58d 100644
--- a/website/docs/r/nic_private_ip.html.markdown
+++ b/website/docs/r/nic_private_ip.html.markdown
@@ -10,7 +10,7 @@ description: |-
Manages a NIC private IP.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+FNIs#AboutFNIs-FNIsAttributes).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#linkprivateips).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#linkprivateips).
## Example Usage
diff --git a/website/docs/r/public_ip.html.markdown b/website/docs/r/public_ip.html.markdown
index 7a50698ca..87452d5aa 100644
--- a/website/docs/r/public_ip.html.markdown
+++ b/website/docs/r/public_ip.html.markdown
@@ -10,7 +10,7 @@ description: |-
Manages a public IP.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+EIPs).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-publicip).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-publicip).
## Example Usage
diff --git a/website/docs/r/public_ip_link.html.markdown b/website/docs/r/public_ip_link.html.markdown
index 829be07e5..8d5bd7be3 100644
--- a/website/docs/r/public_ip_link.html.markdown
+++ b/website/docs/r/public_ip_link.html.markdown
@@ -10,7 +10,7 @@ description: |-
Manages a public IP link.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+EIPs#AboutEIPs-EipAssocationEIPAssociation).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#linkpublicip).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#linkpublicip).
## Example Usage
diff --git a/website/docs/r/route.html.markdown b/website/docs/r/route.html.markdown
index 305dc019c..5f1fa6354 100644
--- a/website/docs/r/route.html.markdown
+++ b/website/docs/r/route.html.markdown
@@ -10,7 +10,7 @@ description: |-
Manages a route.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+Route+Tables#AboutRouteTables-RoutesRoutesandRoutingOptions).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-route).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-route).
## Example Usage
diff --git a/website/docs/r/route_table.html.markdown b/website/docs/r/route_table.html.markdown
index f1c9b6517..f7edd81fb 100644
--- a/website/docs/r/route_table.html.markdown
+++ b/website/docs/r/route_table.html.markdown
@@ -10,7 +10,7 @@ description: |-
Manages a route table.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+Route+Tables).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-routetable).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-routetable).
## Example Usage
diff --git a/website/docs/r/route_table_link.html.markdown b/website/docs/r/route_table_link.html.markdown
index 633336c6d..df0a5a692 100644
--- a/website/docs/r/route_table_link.html.markdown
+++ b/website/docs/r/route_table_link.html.markdown
@@ -10,7 +10,7 @@ description: |-
Manages a route table link.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+Route+Tables).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#linkroutetable).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#linkroutetable).
## Example Usage
diff --git a/website/docs/r/security_group.html.markdown b/website/docs/r/security_group.html.markdown
index aafdeb62f..f15e3440e 100644
--- a/website/docs/r/security_group.html.markdown
+++ b/website/docs/r/security_group.html.markdown
@@ -10,7 +10,7 @@ description: |-
Manages a security group.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+Security+Groups).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-securitygroup).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-securitygroup).
## Example Usage
@@ -56,7 +56,7 @@ The following attributes are exported:
* `account_id` - The account ID of a user.
* `security_group_id` - The ID of the security group.
* `security_group_name` - (Public Cloud only) The name of the security group.
- * `service_ids` - One or more service IDs to allow traffic from a Net to access the corresponding 3DS OUTSCALE services. For more information, see [ReadNetAccessPointServices](https://docs-beta.outscale.com/#readnetaccesspointservices).
+ * `service_ids` - One or more service IDs to allow traffic from a Net to access the corresponding 3DS OUTSCALE services. For more information, see [ReadNetAccessPointServices](https://docs.outscale.com/api#readnetaccesspointservices).
* `to_port_range` - The end of the port range for the TCP and UDP protocols, or an ICMP type number.
* `net_id` - The ID of the Net for the security group.
* `outbound_rules` - The outbound rules associated with the security group.
@@ -67,7 +67,7 @@ The following attributes are exported:
* `account_id` - The account ID of a user.
* `security_group_id` - The ID of the security group.
* `security_group_name` - (Public Cloud only) The name of the security group.
- * `service_ids` - One or more service IDs to allow traffic from a Net to access the corresponding 3DS OUTSCALE services. For more information, see [ReadNetAccessPointServices](https://docs-beta.outscale.com/#readnetaccesspointservices).
+ * `service_ids` - One or more service IDs to allow traffic from a Net to access the corresponding 3DS OUTSCALE services. For more information, see [ReadNetAccessPointServices](https://docs.outscale.com/api#readnetaccesspointservices).
* `to_port_range` - The end of the port range for the TCP and UDP protocols, or an ICMP type number.
* `security_group_id` - The ID of the security group.
* `security_group_name` - (Public Cloud only) The name of the security group.
diff --git a/website/docs/r/security_group_rule.html.markdown b/website/docs/r/security_group_rule.html.markdown
index 8fbcb1a91..878d734d1 100644
--- a/website/docs/r/security_group_rule.html.markdown
+++ b/website/docs/r/security_group_rule.html.markdown
@@ -10,7 +10,7 @@ description: |-
Manages a security group rule.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+Security+Group+Rules).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-securitygrouprule).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-securitygrouprule).
## Example Usage
@@ -49,7 +49,7 @@ The following arguments are supported:
* `account_id` - (Optional) The account ID of a user.
* `security_group_id` - (Required) The ID of the security group.
* `security_group_name` - (Optional) (Public Cloud only) The name of the security group.
- * `service_ids` - (Optional) One or more service IDs to allow traffic from a Net to access the corresponding 3DS OUTSCALE services. For more information, see [ReadNetAccessPointServices](https://docs-beta.outscale.com/#readnetaccesspointservices).
+ * `service_ids` - (Optional) One or more service IDs to allow traffic from a Net to access the corresponding 3DS OUTSCALE services. For more information, see [ReadNetAccessPointServices](https://docs.outscale.com/api#readnetaccesspointservices).
* `to_port_range` - (Optional) The end of the port range for the TCP and UDP protocols, or an ICMP type number.
* `security_group_account_id_to_link` - (Optional) The account ID of the owner of the security group for which you want to create a rule.
* `security_group_id` - (Required) The ID of the security group for which you want to create a rule.
@@ -71,7 +71,7 @@ The following attributes are exported:
* `account_id` - The account ID of a user.
* `security_group_id` - The ID of the security group.
* `security_group_name` - (Public Cloud only) The name of the security group.
- * `service_ids` - One or more service IDs to allow traffic from a Net to access the corresponding 3DS OUTSCALE services. For more information, see [ReadNetAccessPointServices](https://docs-beta.outscale.com/#readnetaccesspointservices).
+ * `service_ids` - One or more service IDs to allow traffic from a Net to access the corresponding 3DS OUTSCALE services. For more information, see [ReadNetAccessPointServices](https://docs.outscale.com/api#readnetaccesspointservices).
* `to_port_range` - The end of the port range for the TCP and UDP protocols, or an ICMP type number.
* `net_id` - The ID of the Net for the security group.
* `outbound_rules` - The outbound rules associated with the security group.
@@ -82,7 +82,7 @@ The following attributes are exported:
* `account_id` - The account ID of a user.
* `security_group_id` - The ID of the security group.
* `security_group_name` - (Public Cloud only) The name of the security group.
- * `service_ids` - One or more service IDs to allow traffic from a Net to access the corresponding 3DS OUTSCALE services. For more information, see [ReadNetAccessPointServices](https://docs-beta.outscale.com/#readnetaccesspointservices).
+ * `service_ids` - One or more service IDs to allow traffic from a Net to access the corresponding 3DS OUTSCALE services. For more information, see [ReadNetAccessPointServices](https://docs.outscale.com/api#readnetaccesspointservices).
* `to_port_range` - The end of the port range for the TCP and UDP protocols, or an ICMP type number.
* `security_group_id` - The ID of the security group.
* `security_group_name` - (Public Cloud only) The name of the security group.
diff --git a/website/docs/r/snapshot.html.markdown b/website/docs/r/snapshot.html.markdown
index 85f51ae50..22d42239d 100644
--- a/website/docs/r/snapshot.html.markdown
+++ b/website/docs/r/snapshot.html.markdown
@@ -10,7 +10,7 @@ description: |-
Manages a snapshot.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+Snapshots).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-snapshot).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-snapshot).
## Example Usage
diff --git a/website/docs/r/snapshot_attributes.html.markdown b/website/docs/r/snapshot_attributes.html.markdown
index 40a94575f..6861bf1df 100644
--- a/website/docs/r/snapshot_attributes.html.markdown
+++ b/website/docs/r/snapshot_attributes.html.markdown
@@ -10,7 +10,7 @@ description: |-
Manages snapshot attributes.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+Snapshots#AboutSnapshots-SnapshotPermissionsandCopy).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#updatesnapshot).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#updatesnapshot).
## Example Usage
diff --git a/website/docs/r/subnet.html.markdown b/website/docs/r/subnet.html.markdown
index 5055da816..6ec211bac 100644
--- a/website/docs/r/subnet.html.markdown
+++ b/website/docs/r/subnet.html.markdown
@@ -10,7 +10,7 @@ description: |-
Manages a Subnet.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+VPCs).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-subnet).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-subnet).
## Example Usage
diff --git a/website/docs/r/vm.html.markdown b/website/docs/r/vm.html.markdown
index b4975655c..3851e604e 100644
--- a/website/docs/r/vm.html.markdown
+++ b/website/docs/r/vm.html.markdown
@@ -10,7 +10,7 @@ description: |-
Manages a virtual machine (VM).
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+Instances).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-vm).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-vm).
## Example Usage
@@ -167,7 +167,7 @@ For more information about volume types, see [Volume Types and IOPS](https://wik
* `bsu_optimized` - (Optional) If `true`, the VM is created with optimized BSU I/O.
* `client_token` - (Optional) A unique identifier which enables you to manage the idempotency.
* `deletion_protection` - (Optional) If `true`, you cannot terminate the VM using Cockpit, the CLI or the API. If `false`, you can.
-* `image_id` - (Required) The ID of the OMI used to create the VM. You can find the list of OMIs by calling the [ReadImages](https://docs-beta.outscale.com/#readimages) method.
+* `image_id` - (Required) The ID of the OMI used to create the VM. You can find the list of OMIs by calling the [ReadImages](https://docs.outscale.com/api#readimages) method.
* `keypair_name` - (Optional) The name of the keypair.
* `max_vms_count` - (Optional) The maximum number of VMs you want to create. If all the VMs cannot be created, the largest possible number of VMs above MinVmsCount is created.
* `min_vms_count` - (Optional) The minimum number of VMs you want to create. If this number of VMs cannot be created, no VMs are created.
@@ -187,9 +187,9 @@ For more information about volume types, see [Volume Types and IOPS](https://wik
* `placement_tenancy` - (Optional) The tenancy of the VM (`default` | `dedicated`).
* `private_ips` - (Optional) One or more private IP addresses of the VM.
* `security_group_ids` - (Optional) One or more IDs of security group for the VMs.
-* `security_groups` - (Optional) One or more names of security groups for the VMs.
+* `security_group_names` - (Optional) One or more names of security groups for the VMs.
* `subnet_id` - (Optional) The ID of the Subnet in which you want to create the VM.
-* `user_data` - (Optional) Data or a script used to add a specific configuration to the VM when creating it. If you are not using a command line tool, this must be Base64-encoded.
+* `user_data` - (Optional) Data or script used to add a specific configuration to the VM. It must be base64-encoded.
* `vm_initiated_shutdown_behavior` - (Optional) The VM behavior when you stop it. By default or if set to `stop`, the VM stops. If set to `restart`, the VM stops then automatically restarts. If set to `terminate`, the VM stops and is terminated.
* `vm_type` - (Optional) The type of VM (`tinav2.c1r2` by default).
For more information, see [Instance Types](https://wiki.outscale.net/display/EN/Instance+Types).
diff --git a/website/docs/r/volume.html.markdown b/website/docs/r/volume.html.markdown
index 7967066fd..7288eff4b 100644
--- a/website/docs/r/volume.html.markdown
+++ b/website/docs/r/volume.html.markdown
@@ -10,7 +10,7 @@ description: |-
Manages a volume.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+Volumes).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#3ds-outscale-api-volume).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#3ds-outscale-api-volume).
## Example Usage
diff --git a/website/docs/r/volumes_link.html.markdown b/website/docs/r/volumes_link.html.markdown
index d2543e435..5deacbc60 100644
--- a/website/docs/r/volumes_link.html.markdown
+++ b/website/docs/r/volumes_link.html.markdown
@@ -10,7 +10,7 @@ description: |-
Manages a volume link.
For more information on this resource, see the [User Guide](https://wiki.outscale.net/display/EN/About+Volumes).
-For more information on this resource actions, see the [API documentation](https://docs-beta.outscale.com/#linkvolume).
+For more information on this resource actions, see the [API documentation](https://docs.outscale.com/api#linkvolume).
## Example Usage