diff --git a/.Makefile.swp b/.Makefile.swp new file mode 100644 index 0000000..5ee136a Binary files /dev/null and b/.Makefile.swp differ diff --git a/.gitignore b/.gitignore index f2ba980..f13c795 100644 --- a/.gitignore +++ b/.gitignore @@ -21,8 +21,5 @@ perun # Local perun configuration: .perun -# This is a temporary file which is being created when FixFunctions is called from intrinsicsolver package. -.preprocessed.yml - # Internal Visual Studio Code config .vscode diff --git a/.travis.yml b/.travis.yml index 2c1a84b..e3bfae0 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,9 +1,18 @@ language: go +sudo: required + go: - 1.9.x -install: make config-install get-deps +env: + global: + - secure: "roiMALXQRkKKlPP+cbtac795XeHHnPrKvc6M8HjVa5gzL1SZXHDn/zvcvTc//AAMBFxZXb9+V+USrU3vNyzULIoh5j7nTOutPtik4WYLIfwADVgSxpiEG9jDBYp3DLlXMAMb5uJWRuG4DSSxpUGPFujQr9+aaUa0Lz6tsPUpE2/yaQfR5GmrAVteevkEZ0h18BxiRKsf4cRIGEQpwOSAQ+x66MlaEAn/kkprS4K9imniHeDRnz4TM75SAI7VZh9Fglv6gf8HTZ73EV1sGzsbG8+k1eO53UxMYR7YneB86ihKjRFuXSXzT8K0RdoACY6Muf81kwP5JUP3SLMaNgRTrdGmzOZ+yhJzfuey0b7i2llvGHrfKCmEIUzTYC5KZgvwblqwU97u55baJpAwQJgwPOKBO2RUvyzdK00t39kj+PpGrmyrW9evQ7mGk7o7txn4K1LJfCTn/fll1Y3B1poyiflYfQ4vlj7RgKSwAgIs36+yPhYF/m1K9I+jy8BRHPCZek3JJIMnKaW3OEBzpCRBoxxmMFomN8tq2Ygde5TUVxukAbZNIrlaPPWB5ZB6ZQ1hKol0yddkqOKxivGPQJaZthlt7eHpx4rAG+ZY1e706pG3iqYcTEVJTjPPzCaKdSpbFg3oKGet3ClimF0ycNpJRFQs2eveI3AkhVCN2Z4IWX8=" #Github + +install: + - 'mkdir -p $HOME/.config/perun' + - 'cp defaults/main.yaml $HOME/.config/perun/main.yaml' + - make get-deps script: - make code-analysis @@ -13,12 +22,8 @@ before_deploy: - 'mkdir -p release' - 'GOOS=linux GOARCH=amd64 go build -o release/perun-linux-amd64' - 'GOOS=darwin GOARCH=amd64 go build -o release/perun-darwin-amd64' - - 'GOOS=windows GOARCH=amd64 go build -o release/perun-windows-amd64.exe' - - 'GOOS=windows GOARCH=386 go build -o release/perun-windows-386.exe' - 'tar -C release -czf release/perun-linux-amd64.tar.gz perun-linux-amd64' - 'tar -C release -czf release/perun-darwin-amd64.tar.gz perun-darwin-amd64' - - 'tar -C release -czf release/perun-windows-amd64.tar.gz perun-windows-amd64.exe' - - 'tar -C release -czf release/perun-windows-386.tar.gz perun-windows-386.exe' deploy: provider: releases @@ -27,8 +32,10 @@ deploy: file: - release/perun-linux-amd64.tar.gz - release/perun-darwin-amd64.tar.gz - - release/perun-windows-amd64.tar.gz - - release/perun-windows-386.tar.gz skip_cleanup: true on: tags: true + +after_deploy: + - chmod +x release.sh + - ./release.sh $TRAVIS_TAG $Github \ No newline at end of file diff --git a/LICENSE b/LICENSE index a00a133..59bdc2d 100644 --- a/LICENSE +++ b/LICENSE @@ -186,14 +186,22 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2017 Appliscale + Copyright 2018 Appliscale Maintainers and Contributors: - Piotr Figwer (piotr.figwer@appliscale.io) - - Wojciech Gawroński (wojciech.gawronski@appliscale.io) - Kacper Patro (kacper.patro@appliscale.io) - + - Maksymilian Wojczuk (max.wojczuk@appliscale.io) + - Sylwia Gargula (sylwia.gargula@appliscale.io) + - Mateusz Piwowarczyk (mateusz.piwowarczyk@appliscale.io) + - Jakub Lamparski (jakub.lamparski@appliscale.io) + - Michał Połcik (michal.polcik@appliscale.io) + - Tomasz Raus (tomasz.raus@appliscale.io) + - Aleksander Mamla + - Paweł Pikuła + - Wojciech Gawroński + Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at diff --git a/Makefile b/Makefile index 95b8bf9..7a0118e 100644 --- a/Makefile +++ b/Makefile @@ -1,11 +1,7 @@ -.PHONY: config-install get-deps code-analysis test all +.PHONY: get-deps code-analysis test all all: get-deps code-analysis test -config-install: - mkdir -p "$(HOME)/.config/perun" - cp defaults/main.yaml "$(HOME)/.config/perun/main.yaml" - get-deps: go get -t -v ./... go install ./... @@ -15,5 +11,13 @@ get-deps: code-analysis: get-deps go vet -v ./... -test: get-deps - go test -v -cover ./... +test: get-deps create-mocks + go test -cover ./... + +create-mocks: get-mockgen + `go env GOPATH`/bin/mockgen -source=./awsapi/cloudformation.go -destination=./stack/stack_mocks/mock_aws_api.go -package=stack_mocks CloudFormationAPI + `go env GOPATH`/bin/mockgen -source=./logger/logger.go -destination=./checkingrequiredfiles/mocks/mock_logger.go -package=mocks LoggerInt + +get-mockgen: + go get github.com/golang/mock/gomock + go install github.com/golang/mock/mockgen diff --git a/README.md b/README.md index 6f0e048..b76c1a6 100644 --- a/README.md +++ b/README.md @@ -1,19 +1,57 @@ -# Perun [![Build Status](https://travis-ci.org/Appliscale/perun.svg?branch=master)](https://travis-ci.org/Appliscale/perun) [![GoDoc](https://godoc.org/github.com/Appliscale/perun?status.svg)](https://godoc.org/github.com/Appliscale/perun) +# Perun [![Release](https://img.shields.io/github/release/Appliscale/perun.svg?style=flat-square)](https://github.com/Appliscale/perun/releases/latest) [![Build_Status](https://travis-ci.org/Appliscale/perun.svg?branch=master)](https://travis-ci.org/Appliscale/perun) [![License](https://img.shields.io/badge/License-Apache%202.0-orange.svg)](https://github.com/Appliscale/perun/blob/master/LICENSE.md) [![Go_Report_Card](https://goreportcard.com/badge/github.com/Appliscale/perun?style=flat-square&fuckgithubcache=1)](https://goreportcard.com/report/github.com/Appliscale/perun) [![GoDoc](https://godoc.org/github.com/Appliscale/perun?status.svg)](https://godoc.org/github.com/Appliscale/perun) -A swiss army knife for *AWS CloudFormation* templates - validation, conversion, generators and other various stuff. +

+Perun logo +

-## Goal +A command-line validation tool for *AWS Cloud Formation* that allows to conquer the cloud faster! -Perun was created to support work with CloudFormation templates. CloudFormation works in a way that it runs template online in AWS infrastructure and fails after first error - in many cases it is related with particular name length (e.g. maximum length is 64 characters). Instead of doing a round-trip, we would like to detect such cases locally. +## Goal +Perun was created to improve work experience with CloudFormation. The idea came from the team constantly using AWS CloudFormation - it runs a template online in AWS infrastructure and fails after first error - which in many cases is trivial (e.g. maximum name length is 64 characters). Instead of doing a round-trip, we would like to detect such cases locally. ## Building and Installation ### OSX +#### Homebrew: +```bash +$ brew install Appliscale/tap/perun +``` +#### From binaries: +* Go to Perun’s releases https://github.com/Appliscale/perun/releases +* Find and download perun-darwin-amd64.tar.gz +* Unpack the archive + +### Debian +#### Dpkg package manager: +* Go to https://github.com/Appliscale/perun-dpkg +* Download perun.deb +* Install: +```bash +$ dpkg -i perun.deb +``` +#### From binaries: +* Go to Perun’s releases https://github.com/Appliscale/perun/releases +* Find and download perun-linux-amd64.tar.gz +* Unpack: +```bash +$ tar xvzf perun-linux-amd64.tar.gz +``` -If you are using *homebrew* just run: +### Linux +#### Rpm package manager: +* Go to: https://github.com/Appliscale/rpmbuild/tree/master/RPMS/x86_64 +* Download perun-linux-amd64-1.2.0-1.x86_64.rpm +* Install: +```bash +$ rpm -ivh perun-linux-amd64-1.2.0-1.x86_64.rpm +``` +#### From binaries: +* Go to Perun’s releases https://github.com/Appliscale/perun/releases +* Find and download perun-linux-amd64.tar.gz +* Unpack: ```bash -brew install Appliscale/tap/perun +tar xvzf perun-linux-amd64.tar.gz ``` ### Building from sources @@ -28,57 +66,158 @@ $GOPATH $ cd perun Then build and install configuration for the application inside perun directory by executing: ```bash -perun $ make config-install -perun $ make all +perun $ make ``` -With first command a default configuration file (`defaults/main.yaml`) will be copied to your home directory under the `~/.config/perun/main.yaml` path. After second command application will be compiled as a `perun` binary inside `bin` directory in your `$GOPATH/perun` workspace. +After this, application will be compiled as a `perun` binary inside `bin` directory in your `$GOPATH/perun` workspace. + ## Working with Perun ### Commands -To validate your template with AWS API (*online validation*), just type: +#### Validation +To validate your template, just type: ```bash ~ $ perun validate ``` +Your template will be then validated using both our validation mechanism and AWS API +(*aws validation*). -To validate your template offline (*well*, almost offline :wink: - *AWS CloudFormation Resource Specification* still needs to be downloaded for a fresh installation) use `validate_offline` mode: +#### Configuration +To create your own configuration file use `configure` mode: ```bash -~ $ perun validate_offline +~ $ perun configure ``` -To convert your template between JSON and YAML formats you have to type: +Then type path and name of new configuration file. + +#### Stack Parameters +Bored of writing JSON parameter files? Perun allows you to interactively create parameters file +for a given template. You can either pass the parameters interactively or as a command-line argument. +##### Command Line Parameter way: ```bash -~ $ perun convert - - - +~ $ perun create-parameters --parameter=MyParameter1: ``` -To create your own configuration file use `configure` mode: -```bash -~ $ perun configure +The greatest thing is that you can mix those in any way you want. Perun will validate the +given parameters from command line. If everything is OK, it will just create the parameters file. +If anything is missing or invalid, it will let you know and ask for it interactively. + +#### Working with stacks + +Perun allows to create and destroy stacks. + +Cloud Formation templates can be in JSON or YAML format. + +Example JSON template which describe S3 Bucket: + +```json +{ + "Resources" : { + "HelloPerun" : { + "Type" : "AWS::S3::Bucket" + } + } +} ``` -Then type path and name of new configuration file. + +Before you create stack Perun will validate it by default :wink:. You can disable it with flag `--no-validate`. To create new stack you have to type: -``~ $ perun --mode=create-stack - --template= - --stack= -`` +```bash +~ $ perun create-stack +``` To destroy stack just type: -``~ $ perun --mode=delete-stack - --stack= -`` +```bash +~ $ perun delete-stack +``` + +You can use option ``--progress`` to show the stack creation/deletion progress in the console, but +note, that this requires setting up a remote sink. + +##### Remote sink + +To setup remote sink type: + +```bash +~ $ perun setup-remote-sink +``` + +This will create an sns topic and sqs queue with permissions for the sns topic to publish on the sqs +queue. Using above services may produce some cost: +According to the AWS SQS and SNS pricing: + +- SNS: + - notifications to the SQS queue are free +- SQS: + - The first 1 million monthly requests are free. + - After that: 0.40$ per million requests after Free Tier (Monthly) + - Typical stack creation uses around a hundred requests + +More information about pricing can be found [here](https://aws.amazon.com/sqs/pricing/). + +To destroy remote sink just type: + +```bash +~ $ perun destroy-remote-sink +``` + +#### Cost estimation + +```bash +~ $ perun estimate-cost +``` +To estimate template's cost run the command above with path to file. Perun resolves parameters located in the template and checks if it’s correct. Then you get url to Simple Monthly Calculator which will be filled with data from the template. + +#### Protecting Stack + +You can protect your stack by using Stack Policy file. It's JSON file where you describe which action is allowed or denied. This example allows to all Update Actions. + +```json +{ + "Statement": [ + { + "Effect": "Allow", + "Principal": "*", + "Action": "Update:*", + "Resource": "*" + } + ] +} +``` + +To apply your Stack Policy file you have to type: + +```bash +~ $ perun set-stack-policy +``` + +Perun has some default flags: + +- ``--block`` - Block all Update actions in stack. + +- ``--unblock`` - Unblock all Update actions in stack. + +- ``--disable-stack-termination`` - Protect stack from being deleted. + +- ``--enable-stack-termination`` - Allow to destroy stack. + +You use flag instead of template. + +```bash +~ $ perun set-stack-policy +``` + +### Configuration files -### Configuration file +Perun will help you in setting up all the needed configuration files on you first run - no previous setup required. You can find an example configuration file in the main directory of the repository in file `defaults/main.yml`. @@ -97,14 +236,18 @@ SpecificationURL: ... ``` -There are two optional parameters: +There are 6 other parameters: -* `Profile` (`default` taken by default, when no value found inside configuration files). -* `Region` (`us-east-1` taken by default, when no value found inside configuration files). +* `DefaultProfile` (`default` taken by default, when no value found inside configuration files). +* `DefautRegion` (`us-east-1` taken by default, when no value found inside configuration files). +* `DefaultDurationForMFA`: (`3600` taken by default, when no value found inside configuration files). +* `DefaultDecisionForMFA`: (`false` taken by default, when no value found inside configuration files). +* `DefaultVerbosity`: (`INFO` taken by default, when no value found inside configuration files). +* `DefaultTemporaryFilesDirectory`: (`.` taken by default, when no value found inside configuration files). ### Supporting MFA -If you account is using *MFA* (which we strongly recommend to enable) you should add `--mfa` flag to the each executed command. +If you account is using *MFA* (which we strongly recommend to enable) you should add `--mfa` flag to the each executed command or set `DefaultDecisionForMFA` to `true` in the configuration file. ```bash ~ $ perun validate --mfa @@ -121,26 +264,34 @@ aws_secret_access_key = mfa_serial = ``` -### Working with stacks +You do not need to use Perun for validation, you can just use it to obtain security credentials and use them in AWS CLI. To do this type: -Perun allows to create and destroy stacks. +```bash +~ $ perun mfa +``` -To create stack it uses your template. It can be JSON or YAML format. +### Capabilities -Example JSON template which describe S3 Bucket: +If your template includes resources that can affect permissions in your AWS account, +you must explicitly acknowledge its capabilities by adding `--capabilities=CAPABILITY` flag. -```ini -{ - "Resources" : { - "HelloPerun" : { - "Type" : "AWS::S3::Bucket" - } - } -} -``` +Valid values are `CAPABILITY_IAM` and `CAPABILITY_NAMED_IAM`. +You can specify both of them by adding `--capabilities=CAPABILITY_IAM --capabilities=CAPABILITY_NAMED_IAM`. + +### Inconsistencies between official documentation and Resource Specification + +Perun uses Resource Specification provided by AWS - using this we can determine if fields are required etc. Unfortunately, during the development process, we found inconsistencies between documentation and Resource Specification. These variances give rise to a mechanism that allows patching those exceptions in place via configuration. In a few words, inconsistency is the variation between information which we get from these sources. + +To specify inconsistencies edit `~/.config/perun/specification_inconsistency.yaml` file. + +Example configuration file: -If you want to destroy stack just type its name. -Before you create stack you should validate it with perun :wink:. +```yaml + SpecificationInconsistency: + AWS::CloudFront::Distribution.DistributionConfig: + DefaultCacheBehavior: + - Required +``` ## License @@ -148,15 +299,17 @@ Before you create stack you should validate it with perun :wink:. ## Maintainers +- [Maksymilian Wojczuk](https://github.com/maxiwoj) - [Piotr Figwer](https://github.com/pfigwer) - [Sylwia Gargula](https://github.com/SylwiaGargula) -- [Wojciech Gawroński](https://github.com/afronski) -- [Jakub Lamparski](https://github.com/jlampar) +- [Mateusz Piwowarczyk](https://github.com/piwowarc) ## Contributors +- [Wojciech Gawroński](https://github.com/afronski) (originator) +- [Jakub Lamparski](https://github.com/jlampar) - [Aleksander Mamla](https://github.com/amamla) - [Kacper Patro](https://github.com/morfeush22) - [Paweł Pikuła](https://github.com/ppikula) - [Michał Połcik](https://github.com/mwpolcik) -- [Maksymilian Wojczuk](https://github.com/maxiwoj) +- [Tomasz Raus](https://github.com/rusty-2) diff --git a/awsapi/cloudformation.go b/awsapi/cloudformation.go new file mode 100644 index 0000000..e1c219b --- /dev/null +++ b/awsapi/cloudformation.go @@ -0,0 +1,110 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package awsapi contains interface with all functions which use AWS CloudFormation API. +package awsapi + +import ( + "github.com/aws/aws-sdk-go/service/cloudformation" +) + +// CloudFormationAPI collects all functions which using Cloudformation, e.g creating stack. +type CloudFormationAPI interface { + CreateStack(input *cloudformation.CreateStackInput) (*cloudformation.CreateStackOutput, error) + DeleteStack(input *cloudformation.DeleteStackInput) (*cloudformation.DeleteStackOutput, error) + UpdateStack(input *cloudformation.UpdateStackInput) (*cloudformation.UpdateStackOutput, error) + SetStackPolicy(input *cloudformation.SetStackPolicyInput) (*cloudformation.SetStackPolicyOutput, error) + UpdateTerminationProtection(input *cloudformation.UpdateTerminationProtectionInput) (*cloudformation.UpdateTerminationProtectionOutput, error) + + EstimateTemplateCost(input *cloudformation.EstimateTemplateCostInput) (*cloudformation.EstimateTemplateCostOutput, error) + ValidateTemplate(input *cloudformation.ValidateTemplateInput) (*cloudformation.ValidateTemplateOutput, error) + + CreateChangeSet(input *cloudformation.CreateChangeSetInput) (*cloudformation.CreateChangeSetOutput, error) + DescribeChangeSet(input *cloudformation.DescribeChangeSetInput) (*cloudformation.DescribeChangeSetOutput, error) + WaitUntilChangeSetCreateComplete(input *cloudformation.DescribeChangeSetInput) error + DeleteChangeSet(input *cloudformation.DeleteChangeSetInput) (*cloudformation.DeleteChangeSetOutput, error) +} + +// AWSCloudFormationAPI implements CLoudFormationAPI. +type AWSCloudFormationAPI struct { + api *cloudformation.CloudFormation +} + +// NewAWSCloudFormation creates CloudFormationAPI. +func NewAWSCloudFormation(api *cloudformation.CloudFormation) CloudFormationAPI { + awsCloudFormationAPI := AWSCloudFormationAPI{ + api: api, + } + return &awsCloudFormationAPI +} + +// CreateStack creates stack based on user template. +func (cf *AWSCloudFormationAPI) CreateStack(input *cloudformation.CreateStackInput) (*cloudformation.CreateStackOutput, error) { + return cf.api.CreateStack(input) +} + +// DeleteStack destroys template based on stack name. +func (cf *AWSCloudFormationAPI) DeleteStack(input *cloudformation.DeleteStackInput) (*cloudformation.DeleteStackOutput, error) { + return cf.api.DeleteStack(input) +} + +// UpdateStack updates stack template. +func (cf *AWSCloudFormationAPI) UpdateStack(input *cloudformation.UpdateStackInput) (*cloudformation.UpdateStackOutput, error) { + return cf.api.UpdateStack(input) +} + +// SetStackPolicy sets policy based on template or flag. +func (cf *AWSCloudFormationAPI) SetStackPolicy(input *cloudformation.SetStackPolicyInput) (*cloudformation.SetStackPolicyOutput, error) { + return cf.api.SetStackPolicy(input) +} + +// UpdateTerminationProtection allows to change stack protection from termination. +func (cf *AWSCloudFormationAPI) UpdateTerminationProtection(input *cloudformation.UpdateTerminationProtectionInput) (*cloudformation.UpdateTerminationProtectionOutput, error) { + return cf.api.UpdateTerminationProtection(input) +} + +// EstimateTemplateCost shows stack cost. +func (cf *AWSCloudFormationAPI) EstimateTemplateCost(input *cloudformation.EstimateTemplateCostInput) (*cloudformation.EstimateTemplateCostOutput, error) { + return cf.api.EstimateTemplateCost(input) +} + +// ValidateTemplate checks template correctness. +func (cf *AWSCloudFormationAPI) ValidateTemplate(input *cloudformation.ValidateTemplateInput) (*cloudformation.ValidateTemplateOutput, error) { + return cf.api.ValidateTemplate(input) +} + +// CreateChangeSet creates ChangeSet. +func (cf *AWSCloudFormationAPI) CreateChangeSet(input *cloudformation.CreateChangeSetInput) (*cloudformation.CreateChangeSetOutput, error) { + return cf.api.CreateChangeSet(input) +} + +// DescribeChangeSet returns the inputs and a list of changes. +func (cf *AWSCloudFormationAPI) DescribeChangeSet(input *cloudformation.DescribeChangeSetInput) (*cloudformation.DescribeChangeSetOutput, error) { + return cf.api.DescribeChangeSet(input) +} + +// WaitUntilChangeSetCreateComplete uses the AWS CloudFormation API operation +// DescribeChangeSet to wait for a condition to be met before returning. +// If the condition is not met within the max attempt window, an error will +// be returned. +func (cf *AWSCloudFormationAPI) WaitUntilChangeSetCreateComplete(input *cloudformation.DescribeChangeSetInput) error { + return cf.api.WaitUntilChangeSetCreateComplete(input) +} + +// DeleteChangeSet removes ChangeSet. +func (cf *AWSCloudFormationAPI) DeleteChangeSet(input *cloudformation.DeleteChangeSetInput) (*cloudformation.DeleteChangeSetOutput, error) { + return cf.api.DeleteChangeSet(input) +} diff --git a/checkingrequiredfiles/checkingrequiredfiles.go b/checkingrequiredfiles/checkingrequiredfiles.go new file mode 100644 index 0000000..195c72e --- /dev/null +++ b/checkingrequiredfiles/checkingrequiredfiles.go @@ -0,0 +1,301 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package checkingrequiredfiles checks if .aws/config, .aws/credentials main.yaml and other configuration files exist. +package checkingrequiredfiles + +import ( + "bufio" + "github.com/Appliscale/perun/cliparser" + "github.com/Appliscale/perun/configuration" + "github.com/Appliscale/perun/configurator" + "github.com/Appliscale/perun/context" + "github.com/Appliscale/perun/helpers" + "github.com/Appliscale/perun/logger" + "github.com/Appliscale/perun/myuser" + "github.com/go-ini/ini" + "io" + "net/http" + "os" + "strings" +) + +//CheckingRequiredFiles looks for required and default files and if doesn't find will create these. +func CheckingRequiredFiles(ctx *context.Context) { + myLogger := logger.CreateDefaultLogger() + homePath, pathError := myuser.GetUserHomeDir() + if pathError != nil { + myLogger.Error(pathError.Error()) + } + mainYAMLexists, mainError := isMainYAMLPresent(&myLogger) + if mainError != nil { + myLogger.Error(mainError.Error()) + } + + downloadError := downloadDefaultFiles() + if downloadError != nil { + myLogger.Error(downloadError.Error()) + } + //Checking if perun is running on EC2. + _, isRunningOnEc2, _ := getRegion() + if isRunningOnEc2 { + if !mainYAMLexists { + profile, region, err := workingOnEC2(&myLogger) + if err == nil { + *ctx = createEC2context(profile, homePath, region, ctx, &myLogger) + } else { + myLogger.Error(err.Error()) + } + } + downloadError := downloadDefaultFiles() + if downloadError != nil { + myLogger.Error(downloadError.Error()) + } + return + } + //Checking if Mode is "offline" or "online". + if isOffline() { + *ctx = initOffline(mainYAMLexists, homePath, ctx, &myLogger) + return + } + + configAWSExists, configError := isAWSConfigPresent(&myLogger) + if configError != nil { + myLogger.Error(configError.Error()) + } + + profile := "default" + region := "us-east-1" + + if !mainYAMLexists { + profile, *ctx = configIsPresent(profile, homePath, ctx, &myLogger) + } else { + profile = ctx.Config.DefaultProfile + region = ctx.Config.DefaultRegion + } + if configAWSExists { + createCredentials(profile, homePath, ctx, &myLogger) + } else { //configAWSExists == false + var answer string + myLogger.GetInput("Config doesn't exist, create default - "+profile+" *Y* or new *N*?", &answer) + if strings.ToUpper(answer) == "N" { + profile, region, *ctx = newConfigFile(profile, region, homePath, ctx, &myLogger) + addProfileToCredentials(profile, homePath, ctx, &myLogger) + } + if strings.ToUpper(answer) == "Y" { + configurator.CreateAWSConfigFile(&myLogger, profile, region) + if !isProfileInCredentials(profile, homePath+"/.aws/credentials", &myLogger) { + configurator.CreateAWSCredentialsFile(ctx, profile) + } + if !mainYAMLexists { + *ctx = createNewMainYaml(profile, homePath, ctx, &myLogger) + } + } + addNewProfileFromCredentialsToConfig(profile, homePath, ctx, &myLogger) + } +} + +// Checking if Mode is "online" - needs config and credentials files or "offline" - needs only main.yaml. +func isOffline() bool { + args, _ := cliparser.ParseCliArguments(os.Args) + offline := [3]string{cliparser.CreateParametersMode, cliparser.LintMode, cliparser.ConfigureMode} + for _, off := range offline { + if *args.Mode == off { + return true + } + } + return false +} + +// Creating only main.yaml and context if Mode is "offline". +func initOffline(mainYAMLexists bool, homePath string, ctx *context.Context, myLogger logger.LoggerInt) context.Context { + if !mainYAMLexists { + var profile string + var region string + var answer string + myLogger.GetInput("You haven't got main.yaml, create default *Y* or new *N*", &answer) + if strings.ToUpper(answer) == "Y" { + profile = "default" + region = "us-east-1" + } else if strings.ToUpper(answer) == "N" { + myLogger.GetInput("Profile", &profile) + myLogger.GetInput("Region", ®ion) + } + con := configurator.CreateMainYaml(myLogger, profile, region) + configuration.SaveToFile(con, homePath+"/.config/perun/main.yaml", myLogger) + var err error + *ctx, err = context.GetContext(cliparser.ParseCliArguments, configuration.GetConfiguration, configuration.ReadInconsistencyConfiguration) + if err != nil { + myLogger.Error(err.Error()) + } + } + return *ctx +} + +// Looking for main.yaml. +func isMainYAMLPresent(myLogger *logger.Logger) (bool, error) { + homePath, pathError := myuser.GetUserHomeDir() + if pathError != nil { + myLogger.Error(pathError.Error()) + return false, pathError + } + _, mainError := os.Open(homePath + "/.config/perun/main.yaml") + if mainError != nil { + _, mainError = os.Open(homePath + "/etc/perun/main.yaml") + if mainError != nil { + return false, pathError + } + return true, pathError + } + return true, pathError +} + +// Looking for .aws/config. +func isAWSConfigPresent(myLogger *logger.Logger) (bool, error) { + homePath, pathError := myuser.GetUserHomeDir() + if pathError != nil { + myLogger.Error(pathError.Error()) + return false, pathError + } + _, credentialsError := os.Open(homePath + "/.aws/config") + if credentialsError != nil { + return false, credentialsError + } + return true, nil + +} + +// Looking for [profiles] in credentials or config and return all. +func getProfilesFromFile(path string) []string { + credentials, credentialsError := os.Open(path) + if credentialsError != nil { + return []string{} + } + defer credentials.Close() + profiles := make([]string, 0) + scanner := bufio.NewScanner(credentials) + for scanner.Scan() { + if strings.Contains(scanner.Text(), "[") { + profile := strings.TrimPrefix(scanner.Text(), "[") + profile = strings.TrimSuffix(profile, "]") + if strings.Contains(profile, "profile ") { + profile = strings.TrimPrefix(profile, "profile ") + } + if strings.Contains(profile, "-long-term") { + profile = strings.TrimSuffix(profile, "-long-term") + } + profiles = append(profiles, profile) + } + } + return profiles +} + +// Looking for user's profile in credentials or config. +func isProfileInCredentials(profile string, path string, mylogger logger.LoggerInt) bool { + credentials, _ := os.Open(path) + defer credentials.Close() + scanner := bufio.NewScanner(credentials) + for scanner.Scan() { + if strings.Contains(scanner.Text(), "["+profile+"]") || strings.Contains(scanner.Text(), "["+profile+"-long-term]") { + return true + } + } + return false + +} + +// Looking for region for profile. +func findRegionForProfile(profile string, path string, mylogger logger.LoggerInt) string { + configuration, loadError := ini.Load(path) + if loadError != nil { + mylogger.Error(loadError.Error()) + } + section, sectionError := configuration.GetSection(profile) + if sectionError != nil { + section, sectionError = configuration.GetSection("profile " + profile) + if sectionError != nil { + mylogger.Error(sectionError.Error()) + return "" + } + } + region := section.Key("region").Value() + return region + +} + +// Getting profiles from credentials and config, if credentials has new profile, add to config. +func findNewProfileInCredentials(credentials []string, config []string) []string { + profiles := make([]string, 0) + for i, cred := range credentials { + if strings.Contains(cred, "-long-term") { + cred = strings.TrimSuffix(cred, "-long-term") + credentials[i] = cred + } + } + for _, cred := range credentials { + isProfileHere := helpers.SliceContains(config, cred) + if !isProfileHere { + profiles = append(profiles, cred) + return profiles + } + } + return []string{} +} + +// Downloading other files. +func downloadDefaultFiles() error { + urls := make(map[string]string) + urls["blocked.json"] = "https://s3.amazonaws.com/perun-default-file/blocked.json" + urls["unblocked.json"] = "https://s3.amazonaws.com/perun-default-file/unblocked.json" + urls["style.yaml"] = "https://s3.amazonaws.com/perun-default-file/style.yaml" + urls["specification_inconsistency.yaml"] = "https://s3.amazonaws.com/perun-default-file/specification_inconsistency.yaml" + + for file, url := range urls { + homePath, _ := myuser.GetUserHomeDir() + homePath += "/.config/perun/" + + if strings.Contains(file, "blocked") { + homePath += "stack-policies/" + } + + _, err := os.Stat(homePath) + if os.IsNotExist(err) { + os.MkdirAll(homePath, 0755) + } + + _, openError := os.Open(homePath + file) //checking if file exists + if openError != nil { + out, creatingFileError := os.Create(homePath + file) + + if creatingFileError != nil { + return creatingFileError + } + defer out.Close() + + resp, httpGetError := http.Get(url) + if httpGetError != nil { + return httpGetError + } + defer resp.Body.Close() + + _, copyError := io.Copy(out, resp.Body) + if copyError != nil { + return copyError + } + } + } + return nil +} diff --git a/checkingrequiredfiles/checkingrequiredfiles_test.go b/checkingrequiredfiles/checkingrequiredfiles_test.go new file mode 100644 index 0000000..a2a90aa --- /dev/null +++ b/checkingrequiredfiles/checkingrequiredfiles_test.go @@ -0,0 +1,31 @@ +package checkingrequiredfiles + +import ( + "github.com/Appliscale/perun/logger" + "github.com/stretchr/testify/assert" + "testing" +) + +var sink logger.Logger + +func TestGetProfilesFromFile(t *testing.T) { + profiles := getProfilesFromFile("test_resources/.aws/config") + assert.NotNilf(t, profiles, "Profiles are nil") +} + +func TestIsProfileInCredentials(t *testing.T) { + answer := isProfileInCredentials("default", "test_resources/.aws/credentials", &sink) + assert.Truef(t, answer, "This profile isn't in credentials") +} + +func TestFindRegionForProfile(t *testing.T) { + region := findRegionForProfile("default", "test_resources/.aws/config", &sink) + assert.NotNilf(t, region, "Region is nil") +} + +func TestFindNewProfileInCredentials(t *testing.T) { + credentials := []string{"default", "test"} + config := []string{"default"} + profiles := findNewProfileInCredentials(credentials, config) + assert.NotNilf(t, profiles, "Profiles are nil") +} diff --git a/checkingrequiredfiles/creatingfiles.go b/checkingrequiredfiles/creatingfiles.go new file mode 100644 index 0000000..90616ba --- /dev/null +++ b/checkingrequiredfiles/creatingfiles.go @@ -0,0 +1,186 @@ +package checkingrequiredfiles + +import ( + "github.com/Appliscale/perun/cliparser" + "github.com/Appliscale/perun/configuration" + "github.com/Appliscale/perun/configurator" + "github.com/Appliscale/perun/context" + "github.com/Appliscale/perun/helpers" + "github.com/Appliscale/perun/logger" + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/awserr" + "github.com/aws/aws-sdk-go/aws/ec2metadata" + "github.com/aws/aws-sdk-go/aws/session" + "github.com/aws/aws-sdk-go/service/ec2" + "os" + "strings" +) + +// Creating main.yaml. +func createNewMainYaml(profile string, homePath string, ctx *context.Context, myLogger logger.LoggerInt) context.Context { + region := findRegionForProfile(profile, homePath+"/.aws/config", myLogger) + con := configurator.CreateMainYaml(myLogger, profile, region) + configuration.SaveToFile(con, homePath+"/.config/perun/main.yaml", myLogger) + *ctx, _ = context.GetContext(cliparser.ParseCliArguments, configuration.GetConfiguration, configuration.ReadInconsistencyConfiguration) + return *ctx +} + +// If config exists, use profile from .aws/config. +func useProfileFromConfig(profilesInConfig []string, profile string, myLogger logger.LoggerInt) string { + myLogger.Always("Available profiles from config:") + for _, prof := range profilesInConfig { + myLogger.Always(prof) + } + myLogger.GetInput("Which profile should perun use as a default?", &profile) + isUserProfile := helpers.SliceContains(profilesInConfig, profile) + for !isUserProfile { + myLogger.GetInput("I cannot find this profile, try again", &profile) + isUserProfile = helpers.SliceContains(profilesInConfig, profile) + } + return profile +} + +// If profile exists in .aws/credentials, but not in aws/config, add profile. +func addNewProfileFromCredentialsToConfig(profile string, homePath string, ctx *context.Context, myLogger logger.LoggerInt) { + profilesInCredentials := getProfilesFromFile(homePath + "/.aws/credentials") + profilesInConfig := getProfilesFromFile(homePath + "/.aws/config") + profiles := findNewProfileInCredentials(profilesInCredentials, profilesInConfig) + if len(profiles) > 0 { + for _, prof := range profiles { + var answer string + myLogger.GetInput("I found profile "+prof+" in credentials, but not in config. \nCreate new profile in config? Y/N", &answer) + if strings.ToUpper(answer) == "Y" { + configurator.ShowRegions(myLogger) + region, _ := configurator.SetRegions(myLogger) + configurator.CreateAWSConfigFile(ctx.Logger, prof, region) + } + } + } +} + +// Checking if profile is in .aws/credentials. +func addProfileToCredentials(profile string, homePath string, ctx *context.Context, myLogger logger.LoggerInt) { + profilesInCredentials := getProfilesFromFile(homePath + "/.aws/credentials") + temp := helpers.SliceContains(profilesInCredentials, profile) + if !temp { + configurator.CreateAWSCredentialsFile(ctx, profile) + } else { + myLogger.Always("Profile " + profile + " has already credentials") + } +} + +// Creating main.yaml based on .aws/config or in configure mode. +func configIsPresent(profile string, homePath string, ctx *context.Context, myLogger logger.LoggerInt) (string, context.Context) { + profilesInConfig := getProfilesFromFile(homePath + "/.aws/config") + isDefaultProfile := helpers.SliceContains(profilesInConfig, profile) + if isDefaultProfile { + var answer string + myLogger.GetInput("Default profile exists, do you want to use it *Y* or create your own *N*?", &answer) + if strings.ToUpper(answer) == "Y" { + *ctx = createNewMainYaml(profile, homePath, ctx, myLogger) + } else if strings.ToUpper(answer) == "N" { + configurator.CreateRequiredFilesInConfigureMode(ctx) + + } + } else { // isDefaultProfile == false + profile = useProfileFromConfig(profilesInConfig, profile, myLogger) + *ctx = createNewMainYaml(profile, homePath, ctx, myLogger) + } + + return profile, *ctx +} + +// Creating new .aws/config and main.yaml for profile. +func newConfigFile(profile string, region string, homePath string, ctx *context.Context, myLogger *logger.Logger) (string, string, context.Context) { + profile, region = configurator.GetRegionAndProfile(myLogger) + configurator.CreateAWSConfigFile(myLogger, profile, region) + *ctx = createNewMainYaml(profile, homePath, ctx, myLogger) + return profile, region, *ctx +} + +// Creating credentials for all present profiles. +func createCredentials(profile string, homePath string, ctx *context.Context, myLogger logger.LoggerInt) { + isProfileInPresent := isProfileInCredentials(profile, homePath+"/.aws/credentials", myLogger) + if !isProfileInPresent { + var answer string + myLogger.GetInput("I found profile "+profile+" in .aws/config without credentials, add? Y/N", &answer) + if strings.ToUpper(answer) == "Y" { + configurator.CreateAWSCredentialsFile(ctx, profile) + } + } +} + +func getIamInstanceProfileAssociations(myLogger logger.LoggerInt, region string) (*ec2.DescribeIamInstanceProfileAssociationsOutput, error) { + // Create a Session with a custom region + sess := session.Must(session.NewSession(&aws.Config{ + Region: ®ion, + })) + svc := ec2.New(sess) + input := &ec2.DescribeIamInstanceProfileAssociationsInput{} + + result, err := svc.DescribeIamInstanceProfileAssociations(input) + if err != nil { + if aerr, ok := err.(awserr.Error); ok { + switch aerr.Code() { + default: + myLogger.Error(aerr.Error()) + } + } else { + // Print the error, cast err to awserr.Error to get the Code and + // Message from an error. + myLogger.Error(err.Error()) + } + return result, err + } + + return result, err +} + +// Get AWS region and check if perun is running on EC2. +func getRegion() (string, bool, error) { + svc := ec2metadata.New(session.New()) + region, err := svc.Region() + if err != nil { + return "", false, err + } + return region, true, nil +} + +// Get IAM Instance profile name to use it as profile name. +func getInstanceProfileName(output *ec2.DescribeIamInstanceProfileAssociationsOutput) string { + arn := output.IamInstanceProfileAssociations[0].IamInstanceProfile.Arn + name := strings.SplitAfter(*arn, "/") + return name[len(name)-1] +} + +// Getting information about EC2 and prepare to run perun there. +func workingOnEC2(myLogger logger.LoggerInt) (profile string, region string, err error) { + region, _, regionError := getRegion() + myLogger.Info("Running on EC2") + if regionError != nil { + myLogger.Error(regionError.Error()) + return "", "", regionError + } + instanceProfileAssociations, instanceError := getIamInstanceProfileAssociations(myLogger, region) + if instanceError != nil { + myLogger.Error(instanceError.Error()) + return "", "", instanceError + } + instanceProfileName := getInstanceProfileName(instanceProfileAssociations) + return instanceProfileName, region, nil +} + +// Create context and main.yaml if perun is running on EC2. +func createEC2context(profile string, homePath string, region string, ctx *context.Context, myLogger logger.LoggerInt) context.Context { + con := configurator.CreateMainYaml(myLogger, profile, region) + _, err := os.Stat(homePath + "/.config/perun") + if os.IsNotExist(err) { + mkdirError := os.MkdirAll(homePath+"/.config/perun", 0755) + if mkdirError != nil { + myLogger.Error(mkdirError.Error()) + } + } + configuration.SaveToFile(con, homePath+"/.config/perun/main.yaml", myLogger) + *ctx, _ = context.GetContext(cliparser.ParseCliArguments, configuration.GetConfiguration, configuration.ReadInconsistencyConfiguration) + return *ctx +} diff --git a/checkingrequiredfiles/creatingfiles_test.go b/checkingrequiredfiles/creatingfiles_test.go new file mode 100644 index 0000000..bdc6046 --- /dev/null +++ b/checkingrequiredfiles/creatingfiles_test.go @@ -0,0 +1,109 @@ +package checkingrequiredfiles + +import ( + "github.com/stretchr/testify/assert" + "testing" + + "github.com/Appliscale/perun/checkingrequiredfiles/mocks" + "github.com/Appliscale/perun/logger" + "github.com/Appliscale/perun/stack/stack_mocks" + "github.com/golang/mock/gomock" +) + +func TestUseProfileFromConfig(t *testing.T) { + mockCtrl := gomock.NewController(t) + mockLogger := mocks.NewMockLoggerInt(mockCtrl) + defer mockCtrl.Finish() + profile := "Test" + profilesInConfig := []string{"Test", "Test1"} + messages := [3]string{ + "Available profiles from config:", + profilesInConfig[0], + profilesInConfig[1], + } + + for _, mes := range messages { + mockLogger.EXPECT().Always(mes).Times(1) + } + mockLogger.EXPECT().GetInput("Which profile should perun use as a default?", &profile).Return(nil).Times(1) + + useProfileFromConfig(profilesInConfig, profile, mockLogger) +} + +func TestAddNewProfileFromCredentialsToConfig(t *testing.T) { + mockCtrl := gomock.NewController(t) + mockLogger := mocks.NewMockLoggerInt(mockCtrl) + defer mockCtrl.Finish() + profile := "perun" + homePath := "./test_resources" + templatePath := "../stack/test_resources/test_template.yaml" + ctx := stack_mocks.SetupContext(t, []string{"cmd", "create-stack", "teststack", templatePath}) + + mockLogger.EXPECT().GetInput("I found profile "+profile+" in credentials, but not in config. \nCreate new profile in config? Y/N", gomock.Any()).Return(nil).Times(1) + + addNewProfileFromCredentialsToConfig("default", homePath, ctx, mockLogger) +} + +func TestAddProfileToCredentials(t *testing.T) { + mockCtrl := gomock.NewController(t) + mockLogger := mocks.NewMockLoggerInt(mockCtrl) + defer mockCtrl.Finish() + profile := "perun" + homePath := "./test_resources" + templatePath := "../stack/test_resources/test_template.yaml" + ctx := stack_mocks.SetupContext(t, []string{"cmd", "create-stack", "teststack", templatePath}) + + mockLogger.EXPECT().Always("Profile " + profile + " has already credentials").Times(1) + + addProfileToCredentials(profile, homePath, ctx, mockLogger) +} + +func TestConfigIsPresent(t *testing.T) { + mockCtrl := gomock.NewController(t) + mockLogger := mocks.NewMockLoggerInt(mockCtrl) + defer mockCtrl.Finish() + profile := "default" + homePath := "./test_resources" + templatePath := "../stack/test_resources/test_template.yaml" + ctx := stack_mocks.SetupContext(t, []string{"cmd", "create-stack", "teststack", templatePath}) + + mockLogger.EXPECT().GetInput("Default profile exists, do you want to use it *Y* or create your own *N*?", gomock.Any()).Return(nil).Times(1) + + configIsPresent(profile, homePath, ctx, mockLogger) +} + +func TestCreateCredentials(t *testing.T) { + mockCtrl := gomock.NewController(t) + mockLogger := mocks.NewMockLoggerInt(mockCtrl) + defer mockCtrl.Finish() + profile := "test1" + homePath := "./test_resources" + templatePath := "../stack/test_resources/test_template.yaml" + ctx := stack_mocks.SetupContext(t, []string{"cmd", "create-stack", "teststack", templatePath}) + + mockLogger.EXPECT().GetInput("I found profile "+profile+" in .aws/config without credentials, add? Y/N", gomock.Any()) + + createCredentials(profile, homePath, ctx, mockLogger) + +} + +func TestGetRegion(t *testing.T) { + region, _, _ := getRegion() + assert.Equalf(t, region, "", "Should be nil") +} + +func TestWorkingOnEC2(t *testing.T) { + sink := logger.CreateDefaultLogger() + profile, region, err := workingOnEC2(&sink) + assert.Emptyf(t, profile, "Profile should be empty") + assert.Emptyf(t, region, "Region should be empty") + assert.NotNilf(t, err, "Error should be non-nil") +} + +/*func TestGetIamInstanceProfileAssociations(t *testing.T) { + sink := logger.CreateDefaultLogger() + region := "eu-west-2" + result, err := getIamInstanceProfileAssociations(&sink, region) + assert.Emptyf(t, result, "Result should be empty") + assert.NotNilf(t, err, "Error should be non-nil") +}*/ diff --git a/checkingrequiredfiles/mocks/mock_logger.go b/checkingrequiredfiles/mocks/mock_logger.go new file mode 100644 index 0000000..364854f --- /dev/null +++ b/checkingrequiredfiles/mocks/mock_logger.go @@ -0,0 +1,167 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: ./logger/logger.go + +// Package mocks is a generated GoMock package. +package mocks + +import ( + logger "github.com/Appliscale/perun/logger" + gomock "github.com/golang/mock/gomock" + reflect "reflect" +) + +// MockLoggerInt is a mock of LoggerInt interface +type MockLoggerInt struct { + ctrl *gomock.Controller + recorder *MockLoggerIntMockRecorder +} + +// MockLoggerIntMockRecorder is the mock recorder for MockLoggerInt +type MockLoggerIntMockRecorder struct { + mock *MockLoggerInt +} + +// NewMockLoggerInt creates a new mock instance +func NewMockLoggerInt(ctrl *gomock.Controller) *MockLoggerInt { + mock := &MockLoggerInt{ctrl: ctrl} + mock.recorder = &MockLoggerIntMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use +func (m *MockLoggerInt) EXPECT() *MockLoggerIntMockRecorder { + return m.recorder +} + +// Always mocks base method +func (m *MockLoggerInt) Always(message string) { + m.ctrl.Call(m, "Always", message) +} + +// Always indicates an expected call of Always +func (mr *MockLoggerIntMockRecorder) Always(message interface{}) *gomock.Call { + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Always", reflect.TypeOf((*MockLoggerInt)(nil).Always), message) +} + +// Warning mocks base method +func (m *MockLoggerInt) Warning(warning string) { + m.ctrl.Call(m, "Warning", warning) +} + +// Warning indicates an expected call of Warning +func (mr *MockLoggerIntMockRecorder) Warning(warning interface{}) *gomock.Call { + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Warning", reflect.TypeOf((*MockLoggerInt)(nil).Warning), warning) +} + +// Error mocks base method +func (m *MockLoggerInt) Error(err string) { + m.ctrl.Call(m, "Error", err) +} + +// Error indicates an expected call of Error +func (mr *MockLoggerIntMockRecorder) Error(err interface{}) *gomock.Call { + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Error", reflect.TypeOf((*MockLoggerInt)(nil).Error), err) +} + +// Info mocks base method +func (m *MockLoggerInt) Info(info string) { + m.ctrl.Call(m, "Info", info) +} + +// Info indicates an expected call of Info +func (mr *MockLoggerIntMockRecorder) Info(info interface{}) *gomock.Call { + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Info", reflect.TypeOf((*MockLoggerInt)(nil).Info), info) +} + +// Debug mocks base method +func (m *MockLoggerInt) Debug(debug string) { + m.ctrl.Call(m, "Debug", debug) +} + +// Debug indicates an expected call of Debug +func (mr *MockLoggerIntMockRecorder) Debug(debug interface{}) *gomock.Call { + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Debug", reflect.TypeOf((*MockLoggerInt)(nil).Debug), debug) +} + +// Trace mocks base method +func (m *MockLoggerInt) Trace(trace string) { + m.ctrl.Call(m, "Trace", trace) +} + +// Trace indicates an expected call of Trace +func (mr *MockLoggerIntMockRecorder) Trace(trace interface{}) *gomock.Call { + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Trace", reflect.TypeOf((*MockLoggerInt)(nil).Trace), trace) +} + +// GetInput mocks base method +func (m *MockLoggerInt) GetInput(message string, v ...interface{}) error { + varargs := []interface{}{message} + for _, a := range v { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "GetInput", varargs...) + ret0, _ := ret[0].(error) + return ret0 +} + +// GetInput indicates an expected call of GetInput +func (mr *MockLoggerIntMockRecorder) GetInput(message interface{}, v ...interface{}) *gomock.Call { + varargs := append([]interface{}{message}, v...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetInput", reflect.TypeOf((*MockLoggerInt)(nil).GetInput), varargs...) +} + +// PrintValidationErrors mocks base method +func (m *MockLoggerInt) PrintValidationErrors() { + m.ctrl.Call(m, "PrintValidationErrors") +} + +// PrintValidationErrors indicates an expected call of PrintValidationErrors +func (mr *MockLoggerIntMockRecorder) PrintValidationErrors() *gomock.Call { + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "PrintValidationErrors", reflect.TypeOf((*MockLoggerInt)(nil).PrintValidationErrors)) +} + +// HasValidationErrors mocks base method +func (m *MockLoggerInt) HasValidationErrors() bool { + ret := m.ctrl.Call(m, "HasValidationErrors") + ret0, _ := ret[0].(bool) + return ret0 +} + +// HasValidationErrors indicates an expected call of HasValidationErrors +func (mr *MockLoggerIntMockRecorder) HasValidationErrors() *gomock.Call { + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "HasValidationErrors", reflect.TypeOf((*MockLoggerInt)(nil).HasValidationErrors)) +} + +// HasValidationWarnings mocks base method +func (m *MockLoggerInt) HasValidationWarnings() bool { + ret := m.ctrl.Call(m, "HasValidationWarnings") + ret0, _ := ret[0].(bool) + return ret0 +} + +// HasValidationWarnings indicates an expected call of HasValidationWarnings +func (mr *MockLoggerIntMockRecorder) HasValidationWarnings() *gomock.Call { + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "HasValidationWarnings", reflect.TypeOf((*MockLoggerInt)(nil).HasValidationWarnings)) +} + +// AddResourceForValidation mocks base method +func (m *MockLoggerInt) AddResourceForValidation(resourceName string) *logger.ResourceValidation { + ret := m.ctrl.Call(m, "AddResourceForValidation", resourceName) + ret0, _ := ret[0].(*logger.ResourceValidation) + return ret0 +} + +// AddResourceForValidation indicates an expected call of AddResourceForValidation +func (mr *MockLoggerIntMockRecorder) AddResourceForValidation(resourceName interface{}) *gomock.Call { + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "AddResourceForValidation", reflect.TypeOf((*MockLoggerInt)(nil).AddResourceForValidation), resourceName) +} + +// SetVerbosity mocks base method +func (m *MockLoggerInt) SetVerbosity(verbosity string) { + m.ctrl.Call(m, "SetVerbosity", verbosity) +} + +// SetVerbosity indicates an expected call of SetVerbosity +func (mr *MockLoggerIntMockRecorder) SetVerbosity(verbosity interface{}) *gomock.Call { + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetVerbosity", reflect.TypeOf((*MockLoggerInt)(nil).SetVerbosity), verbosity) +} diff --git a/checkingrequiredfiles/test_resources/.aws/config b/checkingrequiredfiles/test_resources/.aws/config new file mode 100644 index 0000000..bee2eb9 --- /dev/null +++ b/checkingrequiredfiles/test_resources/.aws/config @@ -0,0 +1,3 @@ +[default] +region = us-east-1 +output = json \ No newline at end of file diff --git a/checkingrequiredfiles/test_resources/.aws/credentials b/checkingrequiredfiles/test_resources/.aws/credentials new file mode 100644 index 0000000..c27f7d7 --- /dev/null +++ b/checkingrequiredfiles/test_resources/.aws/credentials @@ -0,0 +1,9 @@ +[default] +aws_access_key_id = TESTTEST +aws_secret_access_key = testtest +mfa_serial = arn:aws:iam: + +[perun-long-term] +aws_access_key_id = TESTTEST +aws_secret_access_key = testtest +mfa_serial = arn:aws:iam: \ No newline at end of file diff --git a/cliparser/cliparser.go b/cliparser/cliparser.go index 316f674..bbdba86 100644 --- a/cliparser/cliparser.go +++ b/cliparser/cliparser.go @@ -1,4 +1,4 @@ -// Copyright 2017 Appliscale +// Copyright 2018 Appliscale // // Maintainers and contributors are listed in README file inside repository. // @@ -20,39 +20,66 @@ package cliparser import ( "errors" + "time" + "github.com/Appliscale/perun/logger" "github.com/Appliscale/perun/utilities" "gopkg.in/alecthomas/kingpin.v2" ) var ValidateMode = "validate" -var ConvertMode = "convert" -var OfflineValidateMode = "validate_offline" var ConfigureMode = "configure" var CreateStackMode = "create-stack" var DestroyStackMode = "delete-stack" +var UpdateStackMode = "update-stack" +var MfaMode = "mfa" +var SetupSinkMode = "setup-remote-sink" +var DestroySinkMode = "destroy-remote-sink" +var CreateParametersMode = "create-parameters" +var SetStackPolicyMode = "set-stack-policy" +var CreateChangeSetMode = "create-change-set" +var DeleteChangeSetMode = "delete-change-set" +var LintMode = "lint" +var EstimateCostMode = "estimate-cost" + +var ChangeSetDefaultName string + +const JSON = "json" +const YAML = "yaml" type CliArguments struct { - Mode *string - TemplatePath *string - OutputFilePath *string - ConfigurationPath *string - Quiet *bool - Yes *bool - Verbosity *string - MFA *bool - DurationForMFA *int64 - Profile *string - Region *string - Sandbox *bool - Stack *string - PrettyPrint *bool + Mode *string + TemplatePath *string + Parameters *map[string]string + OutputFilePath *string + ConfigurationPath *string + Quiet *bool + Yes *bool + Verbosity *string + MFA *bool + DurationForMFA *int64 + Profile *string + Region *string + Sandbox *bool + Stack *string + Capabilities *[]string + PrettyPrint *bool + Progress *bool + ParametersFile *string + Block *bool + Unblock *bool + DisableStackTermination *bool + EnableStackTermination *bool + ChangeSet *string + Lint *bool + LinterConfiguration *string + SkipValidation *bool } // Get and validate CLI arguments. Returns error if validation fails. func ParseCliArguments(args []string) (cliArguments CliArguments, err error) { var ( - app = kingpin.New("Perun", "Swiss army knife for AWS CloudFormation templates - validation, conversion, generators and other various stuff.") + app = kingpin.New("Perun", "A command-line validation tool for AWS Cloud Formation that allows to conquer the cloud faster!") quiet = app.Flag("quiet", "No console output, just return code.").Short('q').Bool() yes = app.Flag("yes", "Always say yes.").Short('y').Bool() @@ -63,63 +90,188 @@ func ParseCliArguments(args []string) (cliArguments CliArguments, err error) { region = app.Flag("region", "An AWS region to use.").Short('r').String() sandbox = app.Flag("sandbox", "Do not use configuration files hierarchy.").Bool() configurationPath = app.Flag("config", "A path to the configuration file").Short('c').String() + showProgress = app.Flag("progress", "Show progress of stack creation. Option available only after setting up a remote sink").Bool() + noValidate = app.Flag("no-validate", "Disable validation before stack creation/update or creating Change Set.").Bool() - onlineValidate = app.Command(ValidateMode, "Online template Validation") - onlineValidateTemplate = onlineValidate.Arg("template", "A path to the template file.").Required().String() - - offlineValidate = app.Command(OfflineValidateMode, "Offline Template Validation") - offlineValidateTemplate = offlineValidate.Arg("template", "A path to the template file.").Required().String() + validate = app.Command(ValidateMode, "Template Validation") + validateTemplate = validate.Arg("template", "A path to the template file.").Required().String() + validateLint = validate.Flag("lint", "Enable template linting").Bool() + validateLintConfiguration = validate.Flag("lint-configuration", "A path to the configuration file").String() + validateParams = validate.Flag("parameter", "list of parameters").StringMap() + validateParametersFile = validate.Flag("parameters-file", "filename with parameters").String() - convert = app.Command(ConvertMode, "Convertion between JSON and YAML of template files") - convertTemplate = convert.Arg("template", "A path to the template file.").Required().String() - convertOutputFile = convert.Arg("output", "A path where converted file will be saved.").Required().String() - prettyPrint = convert.Flag("pretty-print", "Pretty printing JSON").Bool() + lint = app.Command(LintMode, "Additional validation and template style checks") + lintTemplate = lint.Arg("template", "A path to the template file.").Required().String() + lintConfiguration = lint.Flag("lint-configuration", "A path to the configuration file").String() configure = app.Command(ConfigureMode, "Create your own configuration mode") - createStack = app.Command(CreateStackMode, "Creates a stack on aws") - createStackName = createStack.Arg("stack", "An AWS stack name.").Required().String() - createStackTemplate = createStack.Arg("template", "A path to the template file.").Required().String() + createStack = app.Command(CreateStackMode, "Creates a stack on aws") + createStackName = createStack.Arg("stack", "An AWS stack name.").Required().String() + createStackTemplate = createStack.Arg("template", "A path to the template file.").Required().String() + createStackCapabilities = createStack.Flag("capabilities", "Capabilities: CAPABILITY_IAM | CAPABILITY_NAMED_IAM").Enums("CAPABILITY_IAM", "CAPABILITY_NAMED_IAM") + createStackParams = createStack.Flag("parameter", "list of parameters").StringMap() + createStackParametersFile = createStack.Flag("parameters-file", "filename with parameters").String() + createStackLint = createStack.Flag("lint", "Enable template linting").Bool() + createStackLintConfiguration = createStack.Flag("lint-configuration", "A path to the configuration file").String() + + createChangeSet = app.Command(CreateChangeSetMode, "Creates a changeSet on aws") + changeSetStackName = createChangeSet.Arg("stack", "An AWS stack name").Required().String() + changeSetTemplate = createChangeSet.Arg("template", "A path to the template file").Required().String() + createChangeSetName = createChangeSet.Arg("changeSet", "An AWS Change Set name").String() + createChangeSetParams = createChangeSet.Flag("parameter", "list of parameters").StringMap() + createChangeSetParametersFile = createChangeSet.Flag("parameters-file", "filename with parameters").String() + createChangeSetLint = createChangeSet.Flag("lint", "Enable template linting").Bool() + createChangeSetLintConfiguration = createChangeSet.Flag("lint-configuration", "A path to the configuration file").String() + + deleteChangeSet = app.Command(DeleteChangeSetMode, "Deletes a changeSet on aws") + deleteChangeSetStackName = deleteChangeSet.Arg("stack", "An AWS stack Name").Required().String() + deleteChangeSetName = deleteChangeSet.Arg("change-set", "An AWS Change Set name").Required().String() deleteStack = app.Command(DestroyStackMode, "Deletes a stack on aws") deleteStackName = deleteStack.Arg("stack", "An AWS stack name.").Required().String() + + updateStack = app.Command(UpdateStackMode, "Updates a stack on aws") + updateStackName = updateStack.Arg("stack", "An AWS stack name").Required().String() + updateStackTemplate = updateStack.Arg("template", "A path to the template file.").Required().String() + updateStackCapabilities = updateStack.Flag("capabilities", "Capabilities: CAPABILITY_IAM | CAPABILITY_NAMED_IAM").Enums("CAPABILITY_IAM", "CAPABILITY_NAMED_IAM") + updateStackParams = updateStack.Flag("parameter", "list of parameters").StringMap() + updateStackParametersFile = updateStack.Flag("parameters-file", "filename with parameters").String() + updateStackLint = updateStack.Flag("lint", "Enable template linting").Bool() + updateStackLintConfiguration = updateStack.Flag("lint-configuration", "A path to the configuration file").String() + + mfaCommand = app.Command(MfaMode, "Create temporary secure credentials with MFA.") + + setupSink = app.Command(SetupSinkMode, "Sets up resources required for progress report on stack events (SNS Topic, SQS Queue and SQS Queue Policy)") + + destroySink = app.Command(DestroySinkMode, "Destroys resources created with setup-remote-sink") + + createParameters = app.Command(CreateParametersMode, "Creates a JSON parameters configuration suitable for give cloud formation file") + createParametersTemplate = createParameters.Arg("template", "A path to the template file.").Required().String() + createParametersParamsOutputFile = createParameters.Arg("output", "A path to file where parameters will be saved.").Required().String() + createParametersParams = createParameters.Flag("parameter", "list of parameters").StringMap() + createParametersPrettyPrint = createParameters.Flag("pretty-print", "Pretty printing JSON").Bool() + + setStackPolicy = app.Command(SetStackPolicyMode, "Set stack policy using JSON file.") + setStackPolicyName = setStackPolicy.Arg("stack", "An AWS stack name.").Required().String() + setStackPolicyTemplate = setStackPolicy.Arg("template", "A path to the template file.").String() + setDefaultBlockingStackPolicy = setStackPolicy.Flag("block", "Blocking all actions.").Bool() + setDefaultUnblockingStackPolicy = setStackPolicy.Flag("unblock", "Unblocking all actions.").Bool() + setDisableStackTermination = setStackPolicy.Flag("disable-stack-termination", "Allow to delete a stack.").Bool() + setEnableStackTermination = setStackPolicy.Flag("enable-stack-termination", "Protecting a stack from being deleted.").Bool() + + estimateCost = app.Command(EstimateCostMode, "Estimate template's cost.") + estimateCostTemplate = estimateCost.Arg("template", "A path to the template file.").Required().String() + estimateCostParams = estimateCost.Flag("parameter", "list of parameters").StringMap() + estimateCostParametersFile = estimateCost.Flag("parameters-file", "filename with parameters").String() ) + app.HelpFlag.Short('h') app.Version(utilities.VersionStatus()) switch kingpin.MustParse(app.Parse(args[1:])) { - //online validate - case onlineValidate.FullCommand(): + // validate + case validate.FullCommand(): cliArguments.Mode = &ValidateMode - cliArguments.TemplatePath = onlineValidateTemplate - - // offline validation - case offlineValidate.FullCommand(): - cliArguments.Mode = &OfflineValidateMode - cliArguments.TemplatePath = offlineValidateTemplate - - // convert - case convert.FullCommand(): - cliArguments.Mode = &ConvertMode - cliArguments.TemplatePath = convertTemplate - cliArguments.OutputFilePath = convertOutputFile - cliArguments.PrettyPrint = prettyPrint + cliArguments.TemplatePath = validateTemplate + cliArguments.Lint = validateLint + cliArguments.LinterConfiguration = validateLintConfiguration + cliArguments.Parameters = validateParams + cliArguments.ParametersFile = validateParametersFile // configure case configure.FullCommand(): cliArguments.Mode = &ConfigureMode + case lint.FullCommand(): + cliArguments.Mode = &LintMode + cliArguments.TemplatePath = lintTemplate + cliArguments.LinterConfiguration = lintConfiguration + // create Stack case createStack.FullCommand(): cliArguments.Mode = &CreateStackMode - cliArguments.TemplatePath = createStackTemplate cliArguments.Stack = createStackName + cliArguments.TemplatePath = createStackTemplate + cliArguments.Capabilities = createStackCapabilities + cliArguments.Parameters = createStackParams + cliArguments.ParametersFile = createStackParametersFile + cliArguments.Lint = createStackLint + cliArguments.LinterConfiguration = createStackLintConfiguration - // delete Stack + // delete Stack case deleteStack.FullCommand(): cliArguments.Mode = &DestroyStackMode cliArguments.Stack = deleteStackName + + // generate MFA token + case mfaCommand.FullCommand(): + cliArguments.Mode = &MfaMode + + // update Stack + case updateStack.FullCommand(): + cliArguments.Mode = &UpdateStackMode + cliArguments.Stack = updateStackName + cliArguments.TemplatePath = updateStackTemplate + cliArguments.Capabilities = updateStackCapabilities + cliArguments.ParametersFile = updateStackParametersFile + cliArguments.Parameters = updateStackParams + cliArguments.Lint = updateStackLint + cliArguments.LinterConfiguration = updateStackLintConfiguration + + // create Parameters + case createParameters.FullCommand(): + cliArguments.Mode = &CreateParametersMode + cliArguments.TemplatePath = createParametersTemplate + cliArguments.OutputFilePath = createParametersParamsOutputFile + cliArguments.Parameters = createParametersParams + cliArguments.PrettyPrint = createParametersPrettyPrint + + // set stack policy + case setStackPolicy.FullCommand(): + cliArguments.Mode = &SetStackPolicyMode + cliArguments.Block = setDefaultBlockingStackPolicy + cliArguments.Unblock = setDefaultUnblockingStackPolicy + cliArguments.Stack = setStackPolicyName + cliArguments.TemplatePath = setStackPolicyTemplate + cliArguments.DisableStackTermination = setDisableStackTermination + cliArguments.EnableStackTermination = setEnableStackTermination + + case createChangeSet.FullCommand(): + cliArguments.Mode = &CreateChangeSetMode + if *createChangeSetName != "" { + cliArguments.ChangeSet = createChangeSetName + } else { + ChangeSetDefaultName = *changeSetStackName + time.Now().Format("-2006-01-02--15-04-05") + cliArguments.ChangeSet = &ChangeSetDefaultName + } + cliArguments.TemplatePath = changeSetTemplate + cliArguments.Stack = changeSetStackName + cliArguments.Parameters = createChangeSetParams + cliArguments.ParametersFile = createChangeSetParametersFile + cliArguments.Lint = createChangeSetLint + cliArguments.LinterConfiguration = createChangeSetLintConfiguration + + case deleteChangeSet.FullCommand(): + cliArguments.Mode = &DeleteChangeSetMode + cliArguments.Stack = deleteChangeSetStackName + cliArguments.ChangeSet = deleteChangeSetName + + // set up remote sink + case setupSink.FullCommand(): + cliArguments.Mode = &SetupSinkMode + + // destroy remote sink + case destroySink.FullCommand(): + cliArguments.Mode = &DestroySinkMode + + // estimate template cost + case estimateCost.FullCommand(): + cliArguments.Mode = &EstimateCostMode + cliArguments.TemplatePath = estimateCostTemplate + cliArguments.Parameters = estimateCostParams + cliArguments.ParametersFile = estimateCostParametersFile } // OTHER FLAGS @@ -132,6 +284,8 @@ func ParseCliArguments(args []string) (cliArguments CliArguments, err error) { cliArguments.Region = region cliArguments.Sandbox = sandbox cliArguments.ConfigurationPath = configurationPath + cliArguments.Progress = showProgress + cliArguments.SkipValidation = noValidate if *cliArguments.DurationForMFA < 0 { err = errors.New("You should specify value for duration of MFA token greater than zero") diff --git a/cliparser/cliparser_test.go b/cliparser/cliparser_test.go index ce6afd6..4630576 100644 --- a/cliparser/cliparser_test.go +++ b/cliparser/cliparser_test.go @@ -1,4 +1,4 @@ -// Copyright 2017 Appliscale +// Copyright 2018 Appliscale // // Maintainers and contributors are listed in README file inside repository. // @@ -37,7 +37,7 @@ func TestTooBigDurationForMFA(t *testing.T) { } func TestValidArgs(t *testing.T) { - assert.Nil(t, parseCliArguments([]string{"cmd", "validate_offline", "some_path"})) + assert.Nil(t, parseCliArguments([]string{"cmd", "validate", "some_path"})) } func parseCliArguments(args []string) error { diff --git a/configuration/configuration.go b/configuration/configuration.go index 8f6751d..84b8683 100644 --- a/configuration/configuration.go +++ b/configuration/configuration.go @@ -1,4 +1,4 @@ -// Copyright 2017 Appliscale +// Copyright 2018 Appliscale // // Maintainers and contributors are listed in README file inside repository. // @@ -19,11 +19,13 @@ package configuration import ( "errors" + "io/ioutil" + "os" + "strings" + "github.com/Appliscale/perun/cliparser" "github.com/Appliscale/perun/logger" "github.com/ghodss/yaml" - "io/ioutil" - "os" ) // Perun configuration. @@ -40,6 +42,8 @@ type Configuration struct { DefaultDurationForMFA int64 // Logger verbosity. DefaultVerbosity string + // Directory for temporary files. + DefaultTemporaryFilesDirectory string } // Return URL to specification file. If there is no specification file for selected region, return error. @@ -51,25 +55,21 @@ func (config Configuration) GetSpecificationFileURLForCurrentRegion() (string, e } // Return perun configuration read from file. -func GetConfiguration(cliArguments cliparser.CliArguments, logger *logger.Logger) (config Configuration, err error) { - if getMode(cliArguments) != cliparser.ConfigureMode { - var configPath string - configPath, err = getConfigurationPath(cliArguments, logger) - if err != nil { - return - } - var rawConfiguration []byte - rawConfiguration, err = ioutil.ReadFile(configPath) - if err != nil { - return - } - err = yaml.Unmarshal(rawConfiguration, &config) - if err != nil { - return - } - postProcessing(&config, cliArguments) +func GetConfiguration(cliArguments cliparser.CliArguments, logger logger.LoggerInt) (config Configuration, err error) { + mode := getMode(cliArguments) + + if mode == cliparser.ConfigureMode { + return + } + + config, err = getConfigurationFromFile(cliArguments, logger) + if err != nil && mode != cliparser.MfaMode { return } + + err = nil + postProcessing(&config, cliArguments) + return } @@ -97,15 +97,17 @@ func postProcessing(config *Configuration, cliArguments cliparser.CliArguments) if *cliArguments.Profile != "" { config.DefaultProfile = *cliArguments.Profile } - if *cliArguments.MFA != config.DefaultDecisionForMFA { + if *cliArguments.MFA { config.DefaultDecisionForMFA = *cliArguments.MFA + } else { + *cliArguments.MFA = config.DefaultDecisionForMFA } if *cliArguments.DurationForMFA > 0 { config.DefaultDurationForMFA = *cliArguments.DurationForMFA } } -func getConfigurationPath(cliArguments cliparser.CliArguments, logger *logger.Logger) (configPath string, err error) { +func getConfigurationPath(cliArguments cliparser.CliArguments, logger logger.LoggerInt) (configPath string, err error) { if *cliArguments.Sandbox { return "", errors.New("No configuration file should be used.") } @@ -116,7 +118,7 @@ func getConfigurationPath(cliArguments cliparser.CliArguments, logger *logger.Lo } else if path, ok := getConfigFileFromCurrentWorkingDirectory(os.Stat); ok { notifyUserAboutConfigurationFile(path, logger) return path, nil - } else if path, ok := getUserConfigFile(os.Stat); ok { + } else if path, ok := getUserConfigFile(os.Stat, "main.yaml"); ok { notifyUserAboutConfigurationFile(path, logger) return path, nil } else if path, ok := getGlobalConfigFile(os.Stat); ok { @@ -127,17 +129,44 @@ func getConfigurationPath(cliArguments cliparser.CliArguments, logger *logger.Lo } } -func notifyUserAboutConfigurationFile(configurationFilePath string, logger *logger.Logger) { +func notifyUserAboutConfigurationFile(configurationFilePath string, logger logger.LoggerInt) { logger.Info("Configuration file from the following location will be used: " + configurationFilePath) } -func SaveToFile(config Configuration, path string, logger logger.Logger) { +func SaveToFile(config Configuration, path string, logger logger.LoggerInt) { + wholePath := strings.Split(path, "/") + var newpath string + for i := 0; i < len(wholePath)-1; i++ { + newpath += "/" + wholePath[i] + } + os.MkdirAll(newpath, os.ModePerm) file, err := os.Create(path) defer file.Close() if err != nil { - logger.Error("Could not create file") + logger.Error(err.Error()) return } obj, _ := yaml.Marshal(config) _, err = file.Write(obj) + if err != nil { + logger.Error(err.Error()) + } +} + +func getConfigurationFromFile(cliArguments cliparser.CliArguments, logger logger.LoggerInt) (config Configuration, err error) { + var configPath string + configPath, err = getConfigurationPath(cliArguments, logger) + if err != nil { + return + } + var rawConfiguration []byte + rawConfiguration, err = ioutil.ReadFile(configPath) + if err != nil { + return + } + err = yaml.Unmarshal(rawConfiguration, &config) + if err != nil { + return + } + return } diff --git a/configuration/configuration_test.go b/configuration/configuration_test.go index 8090e00..ca56df0 100644 --- a/configuration/configuration_test.go +++ b/configuration/configuration_test.go @@ -1,4 +1,4 @@ -// Copyright 2017 Appliscale +// Copyright 2018 Appliscale // // Maintainers and contributors are listed in README file inside repository. // @@ -31,9 +31,9 @@ func setup(args []string) error { return err } - logger := logger.CreateQuietLogger() + log := logger.CreateQuietLogger() - configuration, err = GetConfiguration(cliArgs, &logger) + configuration, err = GetConfiguration(cliArgs, &log) if err != nil { return err @@ -43,13 +43,13 @@ func setup(args []string) error { } func TestSpecificationFileURL(t *testing.T) { - setup([]string{"cmd", "validate_offline", "some_path", "--config=test_resources/test_config.yaml"}) + setup([]string{"cmd", "validate", "some_path", "--config=test_resources/test_config.yaml"}) url, _ := configuration.GetSpecificationFileURLForCurrentRegion() assert.Equal(t, "https://d1uauaxba7bl26.cloudfront.net/latest/gzip/CloudFormationResourceSpecification.json", url) } func TestNoSpecificationForRegion(t *testing.T) { - setup([]string{"cmd", "validate_offline", "some_path", "--config=test_resources/test_config.yaml"}) + setup([]string{"cmd", "validate", "some_path", "--config=test_resources/test_config.yaml"}) localConfiguration := Configuration{ DefaultRegion: "someRegion", @@ -60,76 +60,76 @@ func TestNoSpecificationForRegion(t *testing.T) { } func TestGettingMFADecisionFromConfigurationFile(t *testing.T) { - setup([]string{"cmd", "validate_offline", "some_path", "--config=test_resources/test_config.yaml"}) + setup([]string{"cmd", "validate", "some_path", "--config=test_resources/test_config.yaml"}) assert.Equal(t, false, configuration.DefaultDecisionForMFA) } func TestOverrideForMFADecision(t *testing.T) { - setup([]string{"cmd", "validate_offline", "some_path", "--config=test_resources/test_config.yaml", "--mfa"}) + setup([]string{"cmd", "validate", "some_path", "--config=test_resources/test_config.yaml", "--mfa"}) assert.Equal(t, true, configuration.DefaultDecisionForMFA) } func TestNoMFADecision(t *testing.T) { - setup([]string{"cmd", "validate_offline", "some_path", "--sandbox"}) + setup([]string{"cmd", "validate", "some_path", "--sandbox"}) assert.Equal(t, false, configuration.DefaultDecisionForMFA) } func TestGettingDefaultRegionFromConfigurationFile(t *testing.T) { - setup([]string{"cmd", "validate_offline", "some_path", "--config=test_resources/test_config.yaml"}) + setup([]string{"cmd", "validate", "some_path", "--config=test_resources/test_config.yaml"}) assert.Equal(t, "us-west-2", configuration.DefaultRegion) } func TestCLIOverrideForRegion(t *testing.T) { - setup([]string{"cmd", "validate_offline", "some_path", "--config=test_resources/test_config.yaml", "--region=ap-southeast-1"}) + setup([]string{"cmd", "validate", "some_path", "--config=test_resources/test_config.yaml", "--region=ap-southeast-1"}) assert.Equal(t, "ap-southeast-1", configuration.DefaultRegion) } func TestGettingDurationForMFAFromConfigurationFile(t *testing.T) { - setup([]string{"cmd", "validate_offline", "some_path", "--config=test_resources/test_config.yaml"}) + setup([]string{"cmd", "validate", "some_path", "--config=test_resources/test_config.yaml"}) assert.Equal(t, int64(2600), configuration.DefaultDurationForMFA) } func TestCLIOverrideForDurationForMFA(t *testing.T) { - setup([]string{"cmd", "validate_offline", "some_path", "--config=test_resources/test_config.yaml", "--duration=1600"}) + setup([]string{"cmd", "validate", "some_path", "--config=test_resources/test_config.yaml", "--duration=1600"}) assert.Equal(t, int64(1600), configuration.DefaultDurationForMFA) } func TestNoDurationForMFA(t *testing.T) { - setup([]string{"cmd", "validate_offline", "some_path", "--sandbox"}) + setup([]string{"cmd", "validate", "some_path", "--sandbox"}) assert.Equal(t, int64(0), configuration.DefaultDurationForMFA) } func TestTooBigDurationForMFA(t *testing.T) { - err := setup([]string{"cmd", "validate_offline", "some_path", "--duration=600000000"}) + err := setup([]string{"cmd", "validate", "some_path", "--duration=600000000"}) assert.NotNil(t, err) } func TestTooSmallDurationForMFA(t *testing.T) { - err := setup([]string{"cmd", "validate_offline", "some_path", "--duration=-1"}) + err := setup([]string{"cmd", "validate", "some_path", "--duration=-1"}) assert.NotNil(t, err) } func TestZeroDurationForMFA(t *testing.T) { - setup([]string{"cmd", "validate_offline", "some_path", "--duration=0", "--sandbox"}) + setup([]string{"cmd", "validate", "some_path", "--duration=0", "--sandbox"}) assert.Equal(t, int64(0), configuration.DefaultDurationForMFA) } func TestGettingProfileFromConfigurationFile(t *testing.T) { - setup([]string{"cmd", "validate_offline", "some_path", "--config=test_resources/test_config.yaml"}) + setup([]string{"cmd", "validate", "some_path", "--config=test_resources/test_config.yaml"}) assert.Equal(t, "profile", configuration.DefaultProfile) } func TestCLIOverrideForProfile(t *testing.T) { - setup([]string{"cmd", "validate_offline", "some_path", "--config=test_resources/test_config.yaml", "--profile=cliProfile"}) + setup([]string{"cmd", "validate", "some_path", "--config=test_resources/test_config.yaml", "--profile=cliProfile"}) assert.Equal(t, "cliProfile", configuration.DefaultProfile) } func TestGettingVerbosityFromConfigurationFile(t *testing.T) { - setup([]string{"cmd", "validate_offline", "some_path", "--config=test_resources/test_config.yaml"}) + setup([]string{"cmd", "validate", "some_path", "--config=test_resources/test_config.yaml"}) assert.Equal(t, "ERROR", configuration.DefaultVerbosity) } func TestCLIOverrideForVerbosity(t *testing.T) { - setup([]string{"cmd", "validate_offline", "some_path", "--config=test_resources/test_config.yaml", "--verbosity=INFO"}) + setup([]string{"cmd", "validate", "some_path", "--config=test_resources/test_config.yaml", "--verbosity=INFO"}) assert.Equal(t, "INFO", configuration.DefaultVerbosity) } diff --git a/configuration/finder_unix.go b/configuration/finder_unix.go index df7d89e..50cfd65 100644 --- a/configuration/finder_unix.go +++ b/configuration/finder_unix.go @@ -1,4 +1,4 @@ -// Copyright 2017 Appliscale +// Copyright 2018 Appliscale // // Maintainers and contributors are listed in README file inside repository. // @@ -14,8 +14,6 @@ // See the License for the specific language governing permissions and // limitations under the License. -// +build !windows - package configuration import ( @@ -25,8 +23,8 @@ import ( type myStat func(string) (os.FileInfo, error) -func getUserConfigFile(existenceChecker myStat) (val string, ok bool) { - const relativeUserConfigPath = "/.config/perun/main.yaml" +func getUserConfigFile(existenceChecker myStat, fileName string) (val string, ok bool) { + relativeUserConfigPath := "/.config/perun/" + fileName var err error var usr *user.User diff --git a/configuration/finder_unix_test.go b/configuration/finder_unix_test.go index 067d9c6..3df35ff 100644 --- a/configuration/finder_unix_test.go +++ b/configuration/finder_unix_test.go @@ -1,4 +1,4 @@ -// Copyright 2017 Appliscale +// Copyright 2018 Appliscale // // Maintainers and contributors are listed in README file inside repository. // @@ -20,10 +20,11 @@ package configuration import ( "errors" - "github.com/stretchr/testify/assert" "os" "os/user" "testing" + + "github.com/stretchr/testify/assert" ) func existStub(string) (os.FileInfo, error) { @@ -36,14 +37,14 @@ func notExistStub(string) (os.FileInfo, error) { func TestGetUserConfigFile(t *testing.T) { t.Run("File exist", func(t *testing.T) { - path, ok := getUserConfigFile(existStub) + path, ok := getUserConfigFile(existStub, "main.yaml") usr, _ := user.Current() assert.Equal(t, usr.HomeDir+"/.config/perun/main.yaml", path, "Should contain user home") assert.True(t, ok, "Should exist") }) t.Run("File does not exist", func(t *testing.T) { - _, ok := getUserConfigFile(notExistStub) + _, ok := getUserConfigFile(notExistStub, "main.yaml") assert.False(t, ok, "Should not exist") }) } diff --git a/configuration/finder_windows.go b/configuration/finder_windows.go deleted file mode 100644 index d3b06a5..0000000 --- a/configuration/finder_windows.go +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright 2017 Appliscale -// -// Maintainers and contributors are listed in README file inside repository. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build windows - -package configuration - -import "os" - -type myStat func(string) (os.FileInfo, error) - -func getUserConfigFile(existenceChecker myStat) (val string, ok bool) { - const envVar = "LOCALAPPDATA" - const relativeUserConfigPath = "\\perun\\main.yaml" - - return checkConfigExistence(envVar, relativeUserConfigPath, existenceChecker) -} - -func getGlobalConfigFile(existenceChecker myStat) (val string, ok bool) { - const envVar = "ALLUSERSPROFILE" - const relativeGlobalConfigPath = "\\perun\\main.yaml" - - return checkConfigExistence(envVar, relativeGlobalConfigPath, existenceChecker) -} - -func checkConfigExistence(envVar string, relativeConfigPath string, existenceChecker myStat) (val string, ok bool) { - absoluteConfigPath, ok := buildAbsolutePath(envVar, relativeConfigPath) - if !ok { - return "", false - } - - _, err := existenceChecker(absoluteConfigPath) - if err != nil { - return "", false - } - - return absoluteConfigPath, true -} - -func buildAbsolutePath(envVar string, relativeConfigPath string) (val string, ok bool) { - envVal, ok := os.LookupEnv(envVar) - if !ok { - return "", false - } - - absoluteConfigPath := envVal + relativeConfigPath - - return absoluteConfigPath, true -} - -func getConfigFileFromCurrentWorkingDirectory(existenceChecker myStat) (val string, ok bool) { - return getConfigFileFromCurrentWorkingDirectory_(existenceChecker, "\\.perun") -} diff --git a/configuration/finder_windows_test.go b/configuration/finder_windows_test.go deleted file mode 100644 index 2041313..0000000 --- a/configuration/finder_windows_test.go +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright 2017 Appliscale -// -// Maintainers and contributors are listed in README file inside repository. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// +build windows - -package configuration - -import ( - "errors" - "github.com/stretchr/testify/assert" - "os" - "testing" -) - -func existStub(string) (os.FileInfo, error) { - return nil, nil -} - -func notExistStub(string) (os.FileInfo, error) { - return nil, errors.New("") -} - -func TestGetUserConfigFile(t *testing.T) { - t.Run("File exist", func(t *testing.T) { - path, ok := getUserConfigFile(existStub) - envVal, _ := os.LookupEnv("LOCALAPPDATA") - assert.Equal(t, envVal+"\\perun\\main.yaml", path, "Should contain Local") - assert.True(t, ok, "Should exist") - }) - - t.Run("File does not exist", func(t *testing.T) { - _, ok := getUserConfigFile(notExistStub) - assert.False(t, ok, "Should not exist") - }) -} - -func TestGetGlobalConfigFile(t *testing.T) { - t.Run("File exist", func(t *testing.T) { - path, ok := getGlobalConfigFile(existStub) - envVal, _ := os.LookupEnv("ALLUSERSPROFILE") - assert.Equal(t, envVal+"\\perun\\main.yaml", path, "Should contain ProgramData") - assert.True(t, ok, "Should exist") - }) - - t.Run("File does not exist", func(t *testing.T) { - _, ok := getGlobalConfigFile(notExistStub) - assert.False(t, ok, "Should not exist") - }) -} - -func TestGetConfigFileFromCurrentWorkingDirectory(t *testing.T) { - t.Run("File exist", func(t *testing.T) { - path, ok := getConfigFileFromCurrentWorkingDirectory(existStub) - dir, _ := os.Getwd() - assert.Equal(t, dir+"\\.perun", path, "Should contain current working directory") - assert.True(t, ok, "Should exist") - }) - - t.Run("File does not exist", func(t *testing.T) { - _, ok := getConfigFileFromCurrentWorkingDirectory(notExistStub) - assert.False(t, ok, "Should not exist") - }) -} diff --git a/configuration/linter_configuration.go b/configuration/linter_configuration.go new file mode 100644 index 0000000..5738661 --- /dev/null +++ b/configuration/linter_configuration.go @@ -0,0 +1,51 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package configuration + +import ( + "github.com/Appliscale/perun/logger" + "io/ioutil" + "os" +) + +// GetLinterConfigurationFile reads configuration from file. +func GetLinterConfigurationFile(linterFile *string, logger logger.LoggerInt) (rawLintConfiguration string) { + if *linterFile != "" { + bytesConfiguration, err := ioutil.ReadFile(*linterFile) + if err != nil { + logger.Error("Error reading linter configuration file from " + *linterFile) + logger.Error(err.Error()) + } + rawLintConfiguration = string(bytesConfiguration) + } else { + funcName(logger, "~/.config/perun/") + conf, ok := getUserConfigFile(os.Stat, "style.yaml") + if !ok { + logger.Error("Error getting linter configuration file from ~/.config/perun/") + } + bytesConfiguration, err := ioutil.ReadFile(conf) + if err != nil { + logger.Error(err.Error()) + } + rawLintConfiguration = string(bytesConfiguration) + } + return +} + +func funcName(logger logger.LoggerInt, linterFile string) { + logger.Info("Linter Configuration file from the following location will be used: " + linterFile) +} diff --git a/configuration/specification_incosistency.go b/configuration/specification_incosistency.go new file mode 100644 index 0000000..267eb95 --- /dev/null +++ b/configuration/specification_incosistency.go @@ -0,0 +1,55 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package configuration + +import ( + "io/ioutil" + "os" + + "github.com/Appliscale/perun/logger" + "github.com/ghodss/yaml" +) + +// InconsistencyConfiguration describes inconsistencies between specification and documentation. +type InconsistencyConfiguration struct { + SpecificationInconsistency map[string]Property +} + +// Property of inconsistency. +type Property map[string][]string + +// ReadInconsistencyConfiguration gets configuration from file, if could not read return warning. +func ReadInconsistencyConfiguration(logger logger.LoggerInt) (config InconsistencyConfiguration) { + if path, ok := getUserConfigFile(os.Stat, "specification_inconsistency.yaml"); ok { + rawConfig, err := ioutil.ReadFile(path) + if err != nil { + logger.Warning("Could not read specification incosistencies configuration file") + return + } + + err = yaml.Unmarshal(rawConfig, &config) + if err != nil { + logger.Warning("Specification inconsistencies configuration file format is invalid") + return + } + + return + } + + logger.Warning("Specification inconsistencies configuration file not found") + return +} diff --git a/configurator/configurator.go b/configurator/configurator.go index 319fae1..9f83c2f 100644 --- a/configurator/configurator.go +++ b/configurator/configurator.go @@ -1,14 +1,35 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package configurator allows to create configuration file main.yaml. package configurator import ( + "github.com/Appliscale/perun/cliparser" "github.com/Appliscale/perun/configuration" "github.com/Appliscale/perun/context" + "github.com/Appliscale/perun/logger" + "github.com/Appliscale/perun/myuser" "os" - "os/user" "strconv" + "strings" ) -var resourceSpecificationURL = map[string]string{ +// ResourceSpecificationURL contains links to Resource Specification for all regions. +var ResourceSpecificationURL = map[string]string{ "us-east-2": "https://dnwj8swjjbsbt.cloudfront.net", "us-east-1": "https://d1uauaxba7bl26.cloudfront.net", "us-west-1": "https://d68hl49wbnanq.cloudfront.net", @@ -22,113 +43,196 @@ var resourceSpecificationURL = map[string]string{ "eu-central-1": "https://d1mta8qj7i28i2.cloudfront.net", "eu-west-1": "https://d3teyb21fexa9r.cloudfront.net", "eu-west-2": "https://d1742qcu2c1ncx.cloudfront.net", + "eu-west-3": "https://d2d0mfegowb3wk.cloudfront.net", "sa-east-1": "https://d3c9jyj3w509b0.cloudfront.net", } -func FileName(context *context.Context) { - context.Logger.Always("Configure file could be in \n " + makeUserPath() + "\n /etc/perun") +// List of available regions. +var Regions = getAllRegions() + +// CreateRequiredFilesInConfigureMode creates main.yaml and .aws/credentials in configure mode. +func CreateRequiredFilesInConfigureMode(ctx *context.Context) { + homePath, pathError := myuser.GetUserHomeDir() + if pathError != nil { + ctx.Logger.Error(pathError.Error()) + } + myLogger := logger.CreateDefaultLogger() + homePath += "/.config/perun" + ctx.Logger.Always("Configure file could be in \n " + homePath + "\n /etc/perun") var yourPath string var yourName string - context.Logger.GetInput("Your path ", &yourPath) - context.Logger.GetInput("Filename ", &yourName) - findFile(yourPath+"/"+yourName, context) + ctx.Logger.GetInput("Your path", &yourPath) + ctx.Logger.GetInput("Filename", &yourName) + myProfile, myRegion := GetRegionAndProfile(&myLogger) + createConfigurationFile(yourPath+"/"+yourName, ctx.Logger, myProfile, myRegion) + *ctx, _ = context.GetContext(cliparser.ParseCliArguments, configuration.GetConfiguration, configuration.ReadInconsistencyConfiguration) + var answer string + ctx.Logger.GetInput("Do you want to create .aws/credentials for this profile? Y/N", &answer) + if strings.ToUpper(answer) == "Y" { + CreateAWSCredentialsFile(ctx, myProfile) + } } -func findFile(path string, context *context.Context) { - context.Logger.Always("File will be created in " + path) +// Creating main.yaml in user's path. +func createConfigurationFile(path string, myLogger logger.LoggerInt, myProfile string, myRegion string) { + myLogger.Always("File will be created in " + path) _, err := os.Stat(path) if os.IsNotExist(err) { - showRegions(context) - con := createConfig(context) - configuration.SaveToFile(con, path, *context.Logger) + con := CreateMainYaml(myLogger, myProfile, myRegion) + configuration.SaveToFile(con, path, myLogger) } else { - context.Logger.Always("File already exists in this path") - } -} + var answer string + myLogger.GetInput("File already exists in this path. Do you want to overwrite this file? Y/N", &answer) + if strings.ToUpper(answer) == "Y" { + con := CreateMainYaml(myLogger, myProfile, myRegion) + configuration.SaveToFile(con, path, myLogger) -func makeUserPath() (path string) { - usr, _ := user.Current() - path = usr.HomeDir - path = path + "/.config/perun" - return + } + } } -func showRegions(context *context.Context) { - regions := makeArrayRegions() - context.Logger.Always("Regions:") - for i := 0; i < len(regions); i++ { +//List of all available regions. +func ShowRegions(myLogger logger.LoggerInt) { + myLogger.Always("Regions:") + for i := 0; i < len(Regions); i++ { pom := strconv.Itoa(i) - context.Logger.Always("Number " + pom + " region " + regions[i]) + myLogger.Always("Number " + pom + " region " + Regions[i]) } } -func setRegions(context *context.Context) (region string, err bool) { +// Choosing one region. +func SetRegions(myLogger logger.LoggerInt) (region string, err bool) { var numberRegion int - context.Logger.GetInput("Choose region", &numberRegion) - regions := makeArrayRegions() - if numberRegion >= 0 && numberRegion < 14 { - region = regions[numberRegion] - context.Logger.Always("Your region is: " + region) + myLogger.GetInput("Choose region", &numberRegion) + if numberRegion >= 0 && numberRegion < len(ResourceSpecificationURL) { + region = Regions[numberRegion] + myLogger.Always("Your region is: " + region) err = true } else { - context.Logger.Error("Invalid region") + myLogger.Error("Invalid region") err = false } return } -func setProfile(context *context.Context) (profile string, err bool) { - context.Logger.GetInput("Input name of profile", &profile) +// Choosing one profile. +func setProfile(myLogger logger.LoggerInt) (profile string, err bool) { + myLogger.GetInput("Input name of profile", &profile) if profile != "" { - context.Logger.Always("Your profile is: " + profile) + myLogger.Always("Your profile is: " + profile) err = true } else { - context.Logger.Error("Invalid profile") + myLogger.Error("Invalid profile") err = false } return } -func createConfig(context *context.Context) configuration.Configuration { - myRegion, err := setRegions(context) +//GetRegionAndProfile gets region and profile from user. +func GetRegionAndProfile(myLogger logger.LoggerInt) (string, string) { + profile, err := setProfile(myLogger) for !err { - context.Logger.Always("Try again, invalid region") - myRegion, err = setRegions(context) + myLogger.Always("Try again, invalid profile") + profile, err = setProfile(myLogger) } - myProfile, err1 := setProfile(context) + ShowRegions(myLogger) + region, err1 := SetRegions(myLogger) for !err1 { - context.Logger.Always("Try again, invalid profile") - myProfile, err1 = setProfile(context) + myLogger.Always("Try again, invalid region") + region, err1 = SetRegions(myLogger) } - myResourceSpecificationURL := resourceSpecificationURL + return profile, region +} + +// Setting directory for temporary files. +func setTemporaryFilesDirectory(myLogger logger.LoggerInt) (path string) { + myLogger.GetInput("Directory for temporary files", &path) + myLogger.Always("Your temporary files directory is: " + path) + return path +} + +// CreateMainYaml creates new configuration file. +func CreateMainYaml(myLogger logger.LoggerInt, myProfile string, myRegion string) configuration.Configuration { + myTemporaryFilesDirectory := setTemporaryFilesDirectory(myLogger) + myResourceSpecificationURL := ResourceSpecificationURL myConfig := configuration.Configuration{ - DefaultProfile: myProfile, - DefaultRegion: myRegion, - SpecificationURL: myResourceSpecificationURL, - DefaultDecisionForMFA: false, - DefaultDurationForMFA: 3600, - DefaultVerbosity: "INFO"} + DefaultProfile: myProfile, + DefaultRegion: myRegion, + SpecificationURL: myResourceSpecificationURL, + DefaultDecisionForMFA: false, + DefaultDurationForMFA: 3600, + DefaultVerbosity: "INFO", + DefaultTemporaryFilesDirectory: myTemporaryFilesDirectory, + } return myConfig } -func makeArrayRegions() [14]string { - var regions [14]string - regions[0] = "us-east-1" - regions[1] = "us-east-2" - regions[2] = "us-west-1" - regions[3] = "us-west-2" - regions[4] = "ca-central-1" - regions[5] = "ca-central-1" - regions[6] = "eu-west-1" - regions[7] = "eu-west-2" - regions[8] = "ap-northeast-1" - regions[9] = "ap-northeast-2" - regions[10] = "ap-southeast-1" - regions[11] = "ap-southeast-2" - regions[12] = "ap-south-1" - regions[13] = "sa-east-1" - +// Array of regions. +func getAllRegions() []string { + var regions = []string{} + for region := range ResourceSpecificationURL { + regions = append(regions, region) + } return regions } + +// CreateAWSCredentialsFile creates .aws/credentials file based on information from user. The file contains access key and MFA serial. +func CreateAWSCredentialsFile(ctx *context.Context, profile string) { + if profile != "" { + ctx.Logger.Always("You haven't got .aws/credentials file for profile " + profile) + var awsAccessKeyID string + var awsSecretAccessKey string + var mfaSerial string + + ctx.Logger.GetInput("awsAccessKeyID", &awsAccessKeyID) + ctx.Logger.GetInput("awsSecretAccessKey", &awsSecretAccessKey) + ctx.Logger.GetInput("mfaSerial", &mfaSerial) + + homePath, pathError := myuser.GetUserHomeDir() + if pathError != nil { + ctx.Logger.Error(pathError.Error()) + } + path := homePath + "/.aws/credentials" + line := "[" + profile + "-long-term" + "]\n" + AppendStringToFile(path, line) + line = "aws_access_key_id" + " = " + awsAccessKeyID + "\n" + AppendStringToFile(path, line) + line = "aws_secret_access_key" + " = " + awsSecretAccessKey + "\n" + AppendStringToFile(path, line) + line = "mfa_serial" + " = " + mfaSerial + "\n" + AppendStringToFile(path, line) + } +} + +// CreateAWSConfigFile creates .aws/config file based on information from user. The file contains profile name, region and type of output. +func CreateAWSConfigFile(myLogger logger.LoggerInt, profile string, region string) { + var output string + myLogger.GetInput("Output", &output) + homePath, pathError := myuser.GetUserHomeDir() + if pathError != nil { + myLogger.Error(pathError.Error()) + } + path := homePath + "/.aws/config" + line := "[" + profile + "]\n" + AppendStringToFile(path, line) + line = "region" + " = " + region + "\n" + AppendStringToFile(path, line) + line = "output" + " = " + output + "\n" + AppendStringToFile(path, line) +} + +func AppendStringToFile(path, text string) error { + f, err := os.OpenFile(path, os.O_CREATE|os.O_APPEND|os.O_WRONLY, 0600) + if err != nil { + return err + } + defer f.Close() + + _, err = f.WriteString(text) + if err != nil { + return err + } + return nil +} diff --git a/configurator/configurator_test.go b/configurator/configurator_test.go index 2013b77..13c5aee 100644 --- a/configurator/configurator_test.go +++ b/configurator/configurator_test.go @@ -1,3 +1,19 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package configurator import ( @@ -17,12 +33,6 @@ func TestConfigurePath(t *testing.T) { subproc.Wait() } -func TestMakeUserPath(t *testing.T) { - path := makeUserPath() - a := "/.config/perun" - assert.Containsf(t, path, a, "Inccorect path") -} - func TestSetProfile(t *testing.T) { subproc := exec.Command("cmd") input := "" @@ -36,7 +46,7 @@ func TestCreateConfig(t *testing.T) { myconfig := configuration.Configuration{ DefaultProfile: "profile", DefaultRegion: "region", - SpecificationURL: resourceSpecificationURL, + SpecificationURL: ResourceSpecificationURL, DefaultDecisionForMFA: false, DefaultDurationForMFA: 3600, DefaultVerbosity: "INFO"} @@ -46,8 +56,8 @@ func TestCreateConfig(t *testing.T) { } -func TestMakeArrayRegions(t *testing.T) { - region := makeArrayRegions() +func TestGetAllRegions(t *testing.T) { + region := getAllRegions() for i := 0; i < len(region); i++ { assert.NotEmptyf(t, region[i], "Incorrect region and URL") } diff --git a/context/context.go b/context/context.go index f02e4c7..477e6d1 100644 --- a/context/context.go +++ b/context/context.go @@ -1,4 +1,4 @@ -// Copyright 2017 Appliscale +// Copyright 2018 Appliscale // // Maintainers and contributors are listed in README file inside repository. // @@ -18,52 +18,70 @@ package context import ( + "os" + + "github.com/Appliscale/perun/awsapi" "github.com/Appliscale/perun/cliparser" "github.com/Appliscale/perun/configuration" "github.com/Appliscale/perun/logger" - "os" + "github.com/aws/aws-sdk-go/aws/session" + "github.com/aws/aws-sdk-go/service/cloudformation" ) +// Context contains perun's logger, configuration, information about inconsistency +// between specification and documentation, and session. type Context struct { - CliArguments cliparser.CliArguments - Logger *logger.Logger - Config configuration.Configuration + CliArguments cliparser.CliArguments + Logger logger.LoggerInt + Config configuration.Configuration + InconsistencyConfig configuration.InconsistencyConfiguration + CloudFormation awsapi.CloudFormationAPI + CurrentSession *session.Session } type cliArgumentsParser func(args []string) (cliparser.CliArguments, error) -type configurationReader func(cliparser.CliArguments, *logger.Logger) (configuration.Configuration, error) +type configurationReader func(cliparser.CliArguments, logger.LoggerInt) (configuration.Configuration, error) +type inconsistenciesReader func(logger.LoggerInt) configuration.InconsistencyConfiguration -// Create CLI context. -func GetContext(cliArgParser cliArgumentsParser, confReader configurationReader) (context Context, err error) { - logger := logger.CreateDefaultLogger() +// GetContext creates CLI context. Creating logger and config and checking inconsistency. +func GetContext(cliArgParser cliArgumentsParser, confReader configurationReader, inconsistReader inconsistenciesReader) (context Context, err error) { + myLogger := logger.CreateDefaultLogger() cliArguments, err := cliArgParser(os.Args) if err != nil { - logger.Error(err.Error()) + myLogger.Error(err.Error()) return } if cliArguments.Quiet != nil { - logger.Quiet = *cliArguments.Quiet + myLogger.Quiet = *cliArguments.Quiet } if cliArguments.Yes != nil { - logger.Yes = *cliArguments.Yes + myLogger.Yes = *cliArguments.Yes } - config, err := confReader(cliArguments, &logger) + config, err := confReader(cliArguments, &myLogger) if err != nil { - logger.Error(err.Error()) + myLogger.Error(err.Error()) return } - logger.SetVerbosity(config.DefaultVerbosity) + myLogger.SetVerbosity(config.DefaultVerbosity) + + iconsistenciesConfig := inconsistReader(&myLogger) context = Context{ - CliArguments: cliArguments, - Logger: &logger, - Config: config, + CliArguments: cliArguments, + Logger: &myLogger, + Config: config, + InconsistencyConfig: iconsistenciesConfig, } - return } + +// InitializeAwsAPI creates session. +func (context *Context) InitializeAwsAPI() { + context.CurrentSession = InitializeSession(context) + context.CloudFormation = awsapi.NewAWSCloudFormation(cloudformation.New(context.CurrentSession)) +} diff --git a/context/context_test.go b/context/context_test.go index 422451c..d6a3910 100644 --- a/context/context_test.go +++ b/context/context_test.go @@ -1,4 +1,4 @@ -// Copyright 2017 Appliscale +// Copyright 2018 Appliscale // // Maintainers and contributors are listed in README file inside repository. // @@ -33,16 +33,22 @@ func parseCliArgumentsValidStub(cliArguments cliparser.CliArguments) cliArgument } func getConfigurationValidStub(config configuration.Configuration) configurationReader { - return func(cliparser.CliArguments, *logger.Logger) (configuration.Configuration, error) { + return func(cliparser.CliArguments, logger.LoggerInt) (configuration.Configuration, error) { return config, nil } } +func getInconsistencyConfigurationValidStub(config configuration.InconsistencyConfiguration) inconsistenciesReader { + return func(logger.LoggerInt) configuration.InconsistencyConfiguration { + return config + } +} + func parseCliArgumentsErroneous(args []string) (cliparser.CliArguments, error) { return cliparser.CliArguments{}, errors.New("") } -func getConfigurationErroneous(cliparser.CliArguments, *logger.Logger) (configuration.Configuration, error) { +func getConfigurationErroneous(cliparser.CliArguments, logger.LoggerInt) (configuration.Configuration, error) { return configuration.Configuration{}, errors.New("") } @@ -50,18 +56,26 @@ func TestCheckContextBody(t *testing.T) { t.Run("CLI arguments returned from cliArgumentsParser are the same as the ones contained in context", func(t *testing.T) { cliArguments := cliparser.CliArguments{} config := configuration.Configuration{} + inconsistencyConfig := configuration.InconsistencyConfiguration{} + cliArgParserStub := parseCliArgumentsValidStub(cliArguments) confReaderStub := getConfigurationValidStub(config) - context, _ := GetContext(cliArgParserStub, confReaderStub) + inconsistencyConfReaderStub := getInconsistencyConfigurationValidStub(inconsistencyConfig) + + context, _ := GetContext(cliArgParserStub, confReaderStub, inconsistencyConfReaderStub) assert.Equal(t, cliArguments, context.CliArguments) }) t.Run("Config returned from configurationReader is the same as the one contained in context", func(t *testing.T) { cliArguments := cliparser.CliArguments{} config := configuration.Configuration{} + inconsistencyConfig := configuration.InconsistencyConfiguration{} + cliArgsParserStub := parseCliArgumentsValidStub(cliArguments) confReaderStub := getConfigurationValidStub(config) - context, _ := GetContext(cliArgsParserStub, confReaderStub) + inconsistencyConfReaderStub := getInconsistencyConfigurationValidStub(inconsistencyConfig) + + context, _ := GetContext(cliArgsParserStub, confReaderStub, inconsistencyConfReaderStub) assert.Equal(t, config, context.Config) }) } @@ -69,15 +83,23 @@ func TestCheckContextBody(t *testing.T) { func TestCheckErroneousDependencyReturn(t *testing.T) { t.Run("Should return an error if cliArgumentsParser returns the error", func(t *testing.T) { config := configuration.Configuration{} + inconsistencyConfig := configuration.InconsistencyConfiguration{} + confReaderStub := getConfigurationValidStub(config) - _, err := GetContext(parseCliArgumentsErroneous, confReaderStub) + inconsistencyConfReaderStub := getInconsistencyConfigurationValidStub(inconsistencyConfig) + + _, err := GetContext(parseCliArgumentsErroneous, confReaderStub, inconsistencyConfReaderStub) assert.NotNil(t, err) }) t.Run("Should return an error if configurationReader returns the error", func(t *testing.T) { cliArguments := cliparser.CliArguments{} + inconsistencyConfig := configuration.InconsistencyConfiguration{} + cliArgParserStub := parseCliArgumentsValidStub(cliArguments) - _, err := GetContext(cliArgParserStub, getConfigurationErroneous) + inconsistencyConfReaderStub := getInconsistencyConfigurationValidStub(inconsistencyConfig) + + _, err := GetContext(cliArgParserStub, getConfigurationErroneous, inconsistencyConfReaderStub) assert.NotNil(t, err) }) } diff --git a/context/mysession.go b/context/mysession.go new file mode 100644 index 0000000..bc3d766 --- /dev/null +++ b/context/mysession.go @@ -0,0 +1,157 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package context + +import ( + "errors" + "github.com/Appliscale/perun/cliparser" + "os" + "os/user" + "time" + + "github.com/Appliscale/perun/utilities" + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/aws/session" + "github.com/aws/aws-sdk-go/service/sts" + "github.com/go-ini/ini" +) + +const dateFormat = "2006-01-02 15:04:05" + +// InitializeSession creates session and updates session token. +func InitializeSession(context *Context) *session.Session { + tokenError := UpdateSessionToken(context.Config.DefaultProfile, context.Config.DefaultRegion, context.Config.DefaultDurationForMFA, context) + if tokenError != nil { + context.Logger.Error(tokenError.Error()) + os.Exit(1) + } + currentSession, sessionError := CreateSession(context, context.Config.DefaultProfile, &context.Config.DefaultRegion) + if sessionError != nil { + context.Logger.Error(sessionError.Error()) + os.Exit(1) + } + return currentSession +} + +// CreateSession creates session based on profile and region. +func CreateSession(context *Context, profile string, region *string) (*session.Session, error) { + context.Logger.Info("Creating new session. Profile: " + profile + " Region: " + *region) + + currentSession, sessionWithOptionError := session.NewSessionWithOptions( + session.Options{ + Config: aws.Config{ + Region: region, + }, + Profile: profile, + }) + + if sessionWithOptionError != nil { + return nil, sessionWithOptionError + } + + return currentSession, nil +} + +// UpdateSessionToken updates session token for profile in .aws/credentials. +func UpdateSessionToken(profile string, region string, defaultDuration int64, context *Context) error { + if *context.CliArguments.MFA || *context.CliArguments.Mode == cliparser.MfaMode { + currentUser, userError := user.Current() + if userError != nil { + return userError + } + + credentialsFilePath := currentUser.HomeDir + "/.aws/credentials" + configuration, loadCredentialsError := ini.Load(credentialsFilePath) + if loadCredentialsError != nil { + return loadCredentialsError + } + + section, sectionError := configuration.GetSection(profile) + if sectionError != nil { + section, sectionError = configuration.NewSection(profile) + if sectionError != nil { + return sectionError + } + } + + profileLongTerm := profile + "-long-term" + sectionLongTerm, profileLongTermError := configuration.GetSection(profileLongTerm) + if profileLongTermError != nil { + return profileLongTermError + } + + sessionToken := section.Key("aws_session_token") + expiration := section.Key("expiration") + + expirationDate, dataError := time.Parse(dateFormat, section.Key("expiration").Value()) + if dataError == nil { + context.Logger.Info("Session token will expire in " + utilities.TruncateDuration(time.Since(expirationDate)).String() + " (" + expirationDate.Format(dateFormat) + ")") + } + + mfaDevice := sectionLongTerm.Key("mfa_serial").Value() + if mfaDevice == "" { + return errors.New("There is no mfa_serial for the profile " + profileLongTerm + ". If you haven't used --mfa option you can change the default decision for MFA in the configuration file") + } + + if sessionToken.Value() == "" || expiration.Value() == "" || time.Since(expirationDate).Nanoseconds() > 0 { + currentSession, sessionError := session.NewSessionWithOptions( + session.Options{ + Config: aws.Config{ + Region: ®ion, + }, + Profile: profileLongTerm, + }) + if sessionError != nil { + return sessionError + } + + var tokenCode string + sessionError = context.Logger.GetInput("MFA token code", &tokenCode) + if sessionError != nil { + return sessionError + } + + var duration int64 + if defaultDuration == 0 { + sessionError = context.Logger.GetInput("Duration", &duration) + if sessionError != nil { + return sessionError + } + } else { + duration = defaultDuration + } + + stsSession := sts.New(currentSession) + newToken, tokenError := stsSession.GetSessionToken(&sts.GetSessionTokenInput{ + DurationSeconds: &duration, + SerialNumber: aws.String(mfaDevice), + TokenCode: &tokenCode, + }) + if tokenError != nil { + return tokenError + } + + section.Key("aws_access_key_id").SetValue(*newToken.Credentials.AccessKeyId) + section.Key("aws_secret_access_key").SetValue(*newToken.Credentials.SecretAccessKey) + sessionToken.SetValue(*newToken.Credentials.SessionToken) + section.Key("expiration").SetValue(newToken.Credentials.Expiration.Format(dateFormat)) + + configuration.SaveTo(credentialsFilePath) + } + } + return nil +} diff --git a/converter/converter.go b/converter/converter.go deleted file mode 100644 index b8ba90a..0000000 --- a/converter/converter.go +++ /dev/null @@ -1,128 +0,0 @@ -// Copyright 2017 Appliscale -// -// Maintainers and contributors are listed in README file inside repository. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package converter provides tools for JSON/YAML CloudFormation templates -// conversion. -package converter - -import ( - "encoding/json" - "errors" - "github.com/Appliscale/perun/context" - "github.com/Appliscale/perun/intrinsicsolver" - "github.com/Appliscale/perun/logger" - "github.com/asaskevich/govalidator" - "github.com/ghodss/yaml" - "io/ioutil" - "os" -) - -// Read template from the file, convert it and check if it has valid structure. -// Then save converted template to file. -func Convert(context *context.Context) error { - rawTemplate, err := ioutil.ReadFile(*context.CliArguments.TemplatePath) - if err != nil { - return err - } - format := detectFormatFromContent(rawTemplate) - var outputTemplate []byte - - // If input type file is JSON convert to YAML. - if format == "JSON" { - outputTemplate, err = jsonToYaml(rawTemplate) - if err != nil { - return err - } - saveToFile(outputTemplate, *context.CliArguments.OutputFilePath, context.Logger) - - // If input type file is YAML, check all functions and create JSON (with or not --pretty-print flag). - } else if format == "YAML" { - preprocessed, preprocessingError := intrinsicsolver.FixFunctions(rawTemplate, context.Logger, "multiline", "elongate", "correctlong") - if preprocessingError != nil { - context.Logger.Error(preprocessingError.Error()) - } - if *context.CliArguments.PrettyPrint == false { - outputTemplate, err = yamlToJson(preprocessed) - } else if *context.CliArguments.PrettyPrint == true { - outputTemplate, err = yamlToPrettyJson(preprocessed) - } - if err != nil { - return err - } - err = saveToFile(outputTemplate, *context.CliArguments.OutputFilePath, context.Logger) - if err != nil { - return err - } - } else { - context.Logger.Always(format) - return nil - } - - return nil -} - -func jsonToYaml(jsonTemplate []byte) ([]byte, error) { - if !govalidator.IsJSON(string(jsonTemplate)) { - return nil, errors.New("This is not a valid JSON file") - } - - yamlTemplate, error := yaml.JSONToYAML(jsonTemplate) - - return yamlTemplate, error -} - -func yamlToJson(yamlTemplate []byte) ([]byte, error) { - jsonTemplate, error := yaml.YAMLToJSON(yamlTemplate) - return jsonTemplate, error -} - -func yamlToPrettyJson(yamlTemplate []byte) ([]byte, error) { - var YAMLObj interface{} - templateError := yaml.Unmarshal(yamlTemplate, &YAMLObj) - - jsonTemplate, templateError := json.MarshalIndent(YAMLObj, "", " ") - - return jsonTemplate, templateError - -} - -func saveToFile(template []byte, path string, logger *logger.Logger) error { - outputFile, err := os.Create(path) - if err != nil { - return err - } - - defer outputFile.Close() - - _, err = outputFile.Write(template) - if err != nil { - return err - } - - return nil -} - -func detectFormatFromContent(rawTemplate []byte) (format string) { - _, errorYAML := jsonToYaml(rawTemplate) - _, errorJSON := yamlToJson(rawTemplate) - - if errorYAML == nil { - return "JSON" - } else if errorJSON == nil { - return "YAML" - } - return "Unsupported file format. The input file must be either a valid JSON or YAML file." -} diff --git a/defaults/blocked.json b/defaults/blocked.json new file mode 100644 index 0000000..ca713b6 --- /dev/null +++ b/defaults/blocked.json @@ -0,0 +1,10 @@ +{ + "Statement": [ + { + "Effect": "Deny", + "Principal": "*", + "Action": "Update:*", + "Resource": "*" + } + ] +} \ No newline at end of file diff --git a/defaults/main.yaml b/defaults/main.yaml index eb47418..697fc46 100644 --- a/defaults/main.yaml +++ b/defaults/main.yaml @@ -1,8 +1,9 @@ DefaultProfile: default DefaultRegion: us-east-1 DefaultDurationForMFA: 3600 -DefaultDecisionForMFA: true +DefaultDecisionForMFA: false DefaultVerbosity: INFO +DefaultTemporaryFilesDirectory: SpecificationURL: us-east-2: "https://dnwj8swjjbsbt.cloudfront.net" us-east-1: "https://d1uauaxba7bl26.cloudfront.net" diff --git a/defaults/specification_inconsistency.yaml b/defaults/specification_inconsistency.yaml new file mode 100644 index 0000000..040ead6 --- /dev/null +++ b/defaults/specification_inconsistency.yaml @@ -0,0 +1,4 @@ +SpecificationInconsistency: + AWS::CloudFront::Distribution.DistributionConfig: + DefaultCacheBehavior: + - Required \ No newline at end of file diff --git a/defaults/style.yaml b/defaults/style.yaml new file mode 100644 index 0000000..97d833c --- /dev/null +++ b/defaults/style.yaml @@ -0,0 +1,33 @@ +yaml: + allowedQuotes: + double: true + single: true + noquotes: true + allowedLists: + inline: true + dash: true + continuationIndent: # checked only when indent is required + required: true + value: 2 + +json: + spaces: + after: [':'] + before: [] + +global: + lineLength: + required: true + value: 100 + indent: + required: true + value: 2 + blankLinesAllowed: true + +# AWS Specific + requiredFields: + templateDescription: true + parametersDescription: true + + namingConventions: + logicalNames: ".+" #regex diff --git a/defaults/unblocked.json b/defaults/unblocked.json new file mode 100644 index 0000000..faa774e --- /dev/null +++ b/defaults/unblocked.json @@ -0,0 +1,10 @@ +{ + "Statement": [ + { + "Effect": "Allow", + "Principal": "*", + "Action": "Update:*", + "Resource": "*" + } + ] +} \ No newline at end of file diff --git a/estimatecost/estimatecost.go b/estimatecost/estimatecost.go new file mode 100644 index 0000000..439ac70 --- /dev/null +++ b/estimatecost/estimatecost.go @@ -0,0 +1,57 @@ +// Copyright 2017 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package estimatecost + +import ( + "github.com/Appliscale/perun/context" + "github.com/Appliscale/perun/parameters" + "github.com/aws/aws-sdk-go/service/cloudformation" + "io/ioutil" +) + +func estimateCosts(context *context.Context, template *string) (err error) { + templateParameters, err := parameters.ResolveParameters(context) + if err != nil { + context.Logger.Error(err.Error()) + return + } + templateCostInput := cloudformation.EstimateTemplateCostInput{ + TemplateBody: template, + Parameters: templateParameters, + } + output, err := context.CloudFormation.EstimateTemplateCost(&templateCostInput) + if err != nil { + context.Logger.Error(err.Error()) + return + } + context.Logger.Info("Costs estimation: " + *output.Url) + return +} + +func EstimateCosts(ctx *context.Context) { + templatePath := *ctx.CliArguments.TemplatePath + rawTemplate, err := ioutil.ReadFile(templatePath) + if err != nil { + ctx.Logger.Error(err.Error()) + return + } + templateBody := string(rawTemplate) + estimateError := estimateCosts(ctx, &templateBody) + if estimateError != nil { + ctx.Logger.Error(estimateError.Error()) + } +} diff --git a/estimatecost/estimatecost_test.go b/estimatecost/estimatecost_test.go new file mode 100644 index 0000000..8abb15c --- /dev/null +++ b/estimatecost/estimatecost_test.go @@ -0,0 +1,39 @@ +package estimatecost + +import ( + "github.com/Appliscale/perun/parameters" + "github.com/Appliscale/perun/stack/stack_mocks" + "github.com/aws/aws-sdk-go/service/cloudformation" + "github.com/golang/mock/gomock" + "github.com/stretchr/testify/assert" + "io/ioutil" + "testing" +) + +func TestEstimateCosts(t *testing.T) { + templatePath := "../validator/test_resources/test_template.yaml" + ctx := stack_mocks.SetupContext(t, []string{"cmd", "estimate-cost", templatePath}) + + mockCtrl := gomock.NewController(t) + defer mockCtrl.Finish() + mockAWSPI := stack_mocks.NewMockCloudFormationAPI(mockCtrl) + ctx.CloudFormation = mockAWSPI + + templateBodyBytes, err := ioutil.ReadFile(templatePath) + assert.NoError(t, err) + templateBody := string(templateBodyBytes) + templateParameters, err := parameters.ResolveParameters(ctx) + assert.NoError(t, err) + + url := "url" + mockAWSPI. + EXPECT(). + EstimateTemplateCost(&cloudformation.EstimateTemplateCostInput{ + TemplateBody: &templateBody, + Parameters: templateParameters, + }). + Times(1). + Return(&cloudformation.EstimateTemplateCostOutput{Url: &url}, nil) + + EstimateCosts(ctx) +} diff --git a/formula.sh b/formula.sh new file mode 100644 index 0000000..4848061 --- /dev/null +++ b/formula.sh @@ -0,0 +1,32 @@ +#!/bin/bash +wget https://github.com/Appliscale/perun/archive/$1.tar.gz +sha=$(sha256sum $1.tar.gz | awk '{print $1}') +rm $1.tar.gz +cat < Formula/perun.rb +class Perun < Formula + desc "Swiss army knife for AWS CloudFormation templates" + homepage "https://github.com/Appliscale/perun" + url "https://github.com/Appliscale/perun/archive/$1.tar.gz" + sha256 "$sha" + + depends_on "go" => :build + + def install + ENV["GOPATH"] = buildpath + ENV.prepend_create_path "PATH", buildpath/"bin" + + dir = buildpath/"src/github.com/Appliscale/perun" + dir.install buildpath.children - [buildpath/".brew_home"] + + cd dir do + system "make", "all" + end + + bin.install "bin/perun" + end + + test do + system "#{bin}/perun", "--help" + end +end +EOT diff --git a/helpers/formathelpers.go b/helpers/formathelpers.go new file mode 100644 index 0000000..cb8218d --- /dev/null +++ b/helpers/formathelpers.go @@ -0,0 +1,240 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package helpers has some useful functions to choose parser and ease scan maps and slices. +package helpers + +import ( + "encoding/json" + "errors" + "path" + "strconv" + "strings" + + "github.com/Appliscale/perun/intrinsicsolver" + "github.com/Appliscale/perun/logger" + "github.com/Appliscale/perun/validator/template" + "github.com/awslabs/goformation" + "github.com/awslabs/goformation/cloudformation" + "github.com/ghodss/yaml" + "regexp" +) + +// GetParser chooses parser based on file extension. +func GetParser(filename string) (func([]byte, template.Template, logger.LoggerInt) (cloudformation.Template, error), error) { + templateFileExtension := path.Ext(filename) + if templateFileExtension == ".json" { + return ParseJSON, nil + } else if templateFileExtension == ".yaml" || templateFileExtension == ".yml" { + return ParseYAML, nil + } else { + return nil, errors.New("Invalid template file format.") + } +} + +// ParseJSON parses JSON template file to cloudformation template. +func ParseJSON(templateFile []byte, refTemplate template.Template, logger logger.LoggerInt) (template cloudformation.Template, err error) { + err = json.Unmarshal(templateFile, &refTemplate) + if err != nil { + if syntaxError, isSyntaxError := err.(*json.SyntaxError); isSyntaxError { + syntaxOffset := int(syntaxError.Offset) + line, character := lineAndCharacter(string(templateFile), syntaxOffset) + logger.Error("Syntax error at line " + strconv.Itoa(line) + ", column " + strconv.Itoa(character)) + } else if typeError, isTypeError := err.(*json.UnmarshalTypeError); isTypeError { + typeOffset := int(typeError.Offset) + line, character := lineAndCharacter(string(templateFile), typeOffset) + logger.Error("Type error at line " + strconv.Itoa(line) + ", column " + strconv.Itoa(character)) + } + return template, err + } + + tempJSON, err := goformation.ParseJSON(templateFile) + if err != nil { + logger.Error(err.Error()) + } + + returnTemplate := *tempJSON + + return returnTemplate, nil +} + +// ParseYAML parses YAML template file to cloudformation template. +func ParseYAML(templateFile []byte, refTemplate template.Template, logger logger.LoggerInt) (template cloudformation.Template, err error) { + err = yaml.Unmarshal(templateFile, &refTemplate) + if err != nil { + return template, err + } + for resource := range refTemplate.Resources { + var validDeletionPolicy = regexp.MustCompile("(^$)|(Delete)$|(Retain)$|(Snapshot)$") + if !validDeletionPolicy.MatchString(refTemplate.Resources[resource].DeletionPolicy) { + err = errors.New("Deletion Policy in resource: " + resource + " has to be a string literal, cannot be parametrized") + } + } + preprocessed, preprocessingError := intrinsicsolver.FixFunctions(templateFile, logger, "multiline", "elongate", "correctlong") + if preprocessingError != nil { + logger.Error(preprocessingError.Error()) + } + tempYAML, parseError := goformation.ParseYAML(preprocessed) + if parseError != nil { + return *cloudformation.NewTemplate(), parseError + } + findFnImportValue(preprocessed, tempYAML) + returnTemplate := *tempYAML + + return returnTemplate, err +} + +// Looking for ImportValue in a template. +func findFnImportValue(templateFile []byte, tempYAML *cloudformation.Template) error { + refTemplate := template.Template{} + yaml.Unmarshal(templateFile, &refTemplate) + resources := refTemplate.Resources + for resourceName, resourceValue := range resources { + var path []string + startPath := []string{resourceName, "Properties"} + path = startPath + properties := resourceValue.Properties + for name, propertyValue := range properties { + switch propertyValue.(type) { + case []interface{}: + { + for _, value := range propertyValue.([]interface{}) { + switch value.(type) { + case map[string]interface{}: + for key, val := range value.(map[string]interface{}) { + if strings.Contains(key, "ImportValue") { + addToPathAndReplace(path, name, val.(string), tempYAML, startPath) + } + } + case interface{}: + { + if _, ok := value.([]interface{}); ok { + for _, val := range value.([]interface{}) { + if strings.Contains(val.(string), "ImportValue") { + addToPathAndReplace(path, name, val.(string), tempYAML, startPath) + } + } + + } else if _, ok := value.(string); ok { + if strings.Contains(value.(string), "ImportValue") { + addToPathAndReplace(path, name, value.(string), tempYAML, startPath) + } + } + } + default: + return errors.New("Unsupported type") + } + + } + + } + case string: + { + if strings.Contains(propertyValue.(string), "ImportValue") { + addToPathAndReplace(path, name, propertyValue.(string), tempYAML, startPath) + } + } + case map[string]interface{}: + { + for key, val := range propertyValue.(map[string]interface{}) { + if strings.Contains(key, "ImportValue") { + addToPathAndReplace(path, name, val.(string), tempYAML, startPath) + } + } + } + default: + return errors.New("Unsupported type") + } + } + } + return nil +} + +// Replace nil with correct value. +func replaceImportValue(path []string, cfTemplate *cloudformation.Template) error { + len := len(path) + if len > 2 { + resource := cfTemplate.Resources[path[0]] + resourceValue, ok := resource.(map[string]interface{}) + if !ok { + return errors.New("Error during replacing") + } + name := resourceValue[path[1]] + value, ok1 := name.(map[string]interface{}) + if !ok1 { + return errors.New("Error during replacing") + } + array := []string{path[3]} + value[path[2]] = array + + } + return nil +} + +// Path helps to find where is nil instead of correct value. +func addToPathAndReplace(path []string, name string, value string, tempYAML *cloudformation.Template, startPath []string) error { + path = append(path, name) + path = append(path, value) + err := replaceImportValue(path, tempYAML) + if err != nil { + return err + } + path = startPath + return nil +} + +// PrettyPrintJSON prepares JSON file with indent to ease reading it. +func PrettyPrintJSON(toPrint interface{}) ([]byte, error) { + return json.MarshalIndent(toPrint, "", " ") +} + +func lineAndCharacter(input string, offset int) (line int, character int) { + lf := rune(0x0A) + + if offset > len(input) || offset < 0 { + return 0, 0 + } + + line = 1 + + for i, b := range input { + if b == lf { + if i < offset { + line++ + character = 0 + } + } else { + character++ + } + if i == offset { + break + } + } + return line, character +} + +// CountLeadingSpaces counts leading spaces. It's used in checkYamlIndentation() to find indentation error in template. +func CountLeadingSpaces(line string) int { + i := 0 + for _, runeValue := range line { + if runeValue == ' ' { + i++ + } else { + break + } + } + return i +} diff --git a/helpers/formathelpers_test.go b/helpers/formathelpers_test.go new file mode 100644 index 0000000..b2e876e --- /dev/null +++ b/helpers/formathelpers_test.go @@ -0,0 +1,72 @@ +package helpers + +import ( + "github.com/awslabs/goformation/cloudformation" + "github.com/stretchr/testify/assert" + "reflect" + "testing" +) + +func TestGetParser(t *testing.T) { + parser, err := GetParser("myfile.json") + assert.Equal(t, reflect.ValueOf(ParseJSON), reflect.ValueOf(parser)) + assert.Nil(t, err) + + parser, err = GetParser("myfile.yaml") + assert.Equal(t, reflect.ValueOf(ParseYAML), reflect.ValueOf(parser)) + assert.Nil(t, err) + + parser, err = GetParser("myfile.yml") + assert.Equal(t, reflect.ValueOf(ParseYAML), reflect.ValueOf(parser)) + assert.Nil(t, err) + + parser, err = GetParser("myfile.alamakota") + assert.NotEqual(t, reflect.ValueOf(ParseYAML), reflect.ValueOf(parser)) + assert.NotEqual(t, reflect.ValueOf(ParseJSON), reflect.ValueOf(parser)) + assert.NotNil(t, err) +} + +func TestCountLeadingSpaces(t *testing.T) { + assert.Equal(t, 4, CountLeadingSpaces(" dskjhfmasjkd")) + assert.Equal(t, 0, CountLeadingSpaces("ajksdasd")) + assert.Equal(t, 0, CountLeadingSpaces("a sdkajsd")) + assert.Equal(t, 0, CountLeadingSpaces(", ")) +} + +func TestLineAndCharacter(t *testing.T) { + line, character := lineAndCharacter("0123456789asd", 8) + assert.Equal(t, 1, line) + assert.Equal(t, 9, character) + + line, character = lineAndCharacter("0123456789\nasd", 11) + assert.Equal(t, 2, line) + assert.Equal(t, 1, character) +} + +func TestFindFnImportValue(t *testing.T) { + var templateFile = make([]byte, 0) + tempYAML := cloudformation.Template{} + err := findFnImportValue(templateFile, &tempYAML) + assert.Nilf(t, err, "Error should be nil") +} + +func TestReplaceImportValue(t *testing.T) { + path := []string{"Resorce", "Property", "Name", "Value"} + cfTemplate := cloudformation.Template{} + + err := replaceImportValue(path, &cfTemplate) + assert.NotNilf(t, err, "Error should not be nil") + +} + +func TestAddToPathAndReplace(t *testing.T) { + path := []string{} + name := "name" + value := "value" + tempYAML := cloudformation.Template{} + startPath := []string{} + + err := addToPathAndReplace(path, name, value, &tempYAML, startPath) + assert.Nilf(t, err, "Error should be nil") + +} diff --git a/helpers/listhelpers.go b/helpers/listhelpers.go new file mode 100644 index 0000000..ea4bbf9 --- /dev/null +++ b/helpers/listhelpers.go @@ -0,0 +1,110 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package helpers + +// SliceContains checks if slice contains given string. +func SliceContains(list []string, a string) bool { + for _, b := range list { + if b == a { + return true + } + } + return false +} + +// IsPlainMap checks if map is plain. Plain map means that it's non-nil and it doesn't contain nested maps. +//It's used in checkWhereIsNil(). +func IsPlainMap(mp map[string]interface{}) bool { + // First we check is it more complex. If so - it is worth investigating and we should stop checking. + for _, m := range mp { + if _, ok := m.(map[string]interface{}); ok { + return false + } else if _, ok := m.([]interface{}); ok { + return false + } + } + // Ok, it isn't. So is there any ? + if mapContainsNil(mp) { // Yes, it is - so it is a map worth investigating. This is not the map we're looking for. + return false + } + + return true // There is no and no complexity - it is a plain, non-nil map. +} + +// IsPlainSlice checks if slice is plain. Slice is plain if it's non-nil and doesn't contain nested maps. +//It's used in checkWhereIsNil(). +func IsPlainSlice(slc []interface{}) bool { + // The same flow as in `isPlainMap` function. + for _, s := range slc { + if _, ok := s.(map[string]interface{}); ok { + return false + } else if _, ok := s.([]interface{}); ok { + return false + } + } + + if sliceContainsNil(slc) { + return false + } + + return true +} + +// Discard looks for elements which are not the same and return only unique. +func Discard(slice []interface{}, n interface{}) []interface{} { + result := []interface{}{} + for _, s := range slice { + if s != n { + result = append(result, s) + } + } + return result +} + +// IsNonStringFloatBool checks if the element is non-string, non-float64, non-boolean. Then it is another node or . There is no other option. +func IsNonStringFloatBool(v interface{}) bool { + var isString, isFloat, isBool bool + if _, ok := v.(string); ok { + isString = true + } else if _, ok := v.(float64); ok { + isFloat = true + } else if _, ok := v.(bool); ok { + isBool = true + } + if !isString && !isFloat && !isBool { + return true + } + return false +} + +func mapContainsNil(mp map[string]interface{}) bool { + for _, m := range mp { + if m == nil { + return true + } + } + return false +} + +func sliceContainsNil(slice []interface{}) bool { + for _, s := range slice { + if s == nil { + return true + } + } + return false +} diff --git a/helpers/listhelpers_test.go b/helpers/listhelpers_test.go new file mode 100644 index 0000000..1cf6226 --- /dev/null +++ b/helpers/listhelpers_test.go @@ -0,0 +1,57 @@ +package helpers + +import ( + "github.com/stretchr/testify/assert" + "testing" +) + +func TestSliceContains(t *testing.T) { + assert.True(t, SliceContains([]string{"asd", "zxc", "qwe"}, "asd")) + assert.False(t, SliceContains([]string{"asd", "qwe", "zxc"}, "kjg")) + assert.True(t, SliceContains([]string{"a", "b", "c"}, "a")) + assert.False(t, SliceContains([]string{"asd", "bnm", "cvb"}, "a")) +} + +func TestIsPlainMap(t *testing.T) { + assert.True(t, IsPlainMap(map[string]interface{}{"asd": 1, "qwe": "a", "zxc": 1.2})) + assert.False(t, IsPlainMap(map[string]interface{}{"asd": 1, "qwe": map[string]interface{}{"xyz": 2}, "zxc": 1.2})) +} + +func TestIsPlainSlice(t *testing.T) { + assert.True(t, IsPlainSlice([]interface{}{"asd", 1, 2.3})) + assert.False(t, IsPlainSlice([]interface{}{"asd", 1, map[string]interface{}{"asd": 12, "aqwe": "asddfds"}})) +} + +func TestIsNonStringFloatBool(t *testing.T) { + assert.True(t, IsNonStringFloatBool([]interface{}{"asd", 2})) + assert.True(t, IsNonStringFloatBool([]string{"asd", "2"})) + assert.True(t, IsNonStringFloatBool(2)) + assert.False(t, IsNonStringFloatBool(2.3)) + assert.False(t, IsNonStringFloatBool("asd")) + assert.False(t, IsNonStringFloatBool(true)) +} + +func TestDiscard(t *testing.T) { + var newSlice = Discard([]interface{}{1, 2, 3}, 1) + assert.NotEmpty(t, newSlice) + assert.True(t, newSlice[0] == 2) + assert.True(t, newSlice[1] == 3) + newSlice = Discard([]interface{}{1, 2, 3}, 2) + assert.NotEmpty(t, newSlice) + assert.True(t, newSlice[0] == 1) + assert.True(t, newSlice[1] == 3) + newSlice = Discard([]interface{}{1, 2, 3}, 3) + assert.NotEmpty(t, newSlice) + assert.True(t, newSlice[0] == 1) + assert.True(t, newSlice[1] == 2) +} + +func TestMapContainsNil(t *testing.T) { + assert.True(t, mapContainsNil(map[string]interface{}{"asd": 1, "qwe": nil})) + assert.False(t, mapContainsNil(map[string]interface{}{"asd": 1, "qwe": 34.3, "ewrt": "qwer"})) +} + +func TestSliceContainsNil(t *testing.T) { + assert.True(t, sliceContainsNil([]interface{}{1, "asdfs", nil, 2.3})) + assert.False(t, sliceContainsNil([]interface{}{1, "asdfs", 2.3})) +} diff --git a/intrinsicsolver/checkIndent.go b/intrinsicsolver/checkIndent.go deleted file mode 100644 index 8eeeb15..0000000 --- a/intrinsicsolver/checkIndent.go +++ /dev/null @@ -1,10 +0,0 @@ -package intrinsicsolver - -// Function indentations checks how much an element is indented by counting all the spaces encountered in searching for the first non-space character in line. -func indentations(line string) int { - var i int - for string(line[i]) == " " { - i++ - } - return i -} diff --git a/intrinsicsolver/elongateForms.go b/intrinsicsolver/elongateForms.go index 80489b0..c80e527 100644 --- a/intrinsicsolver/elongateForms.go +++ b/intrinsicsolver/elongateForms.go @@ -1,6 +1,23 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package intrinsicsolver import ( + "regexp" "strings" ) @@ -15,26 +32,91 @@ func elongateForms(line *string, lines *[]string, idx int, name string) { full := fullForm(long) split := strings.Split(*line, short) if idx+1 < len(pLines) { - if strings.Contains(*line, name) && strings.Contains(pLines[idx+1], "-") && (len(split) != 2) { + if strings.Contains(*line, name) && strings.Contains(pLines[idx+1], "- ") && (len(split) != 2) { // If so - we don't have to surround it with quotes. if strings.Contains(*line, short) && !strings.Contains(*line, "|") { *line = strings.Replace(*line, short, full, -1) } else if strings.Contains(*line, short) && strings.Contains(*line, "|") { - *line = strings.Replace(*line, (short + " |"), full, -1) + *line = strings.Replace(*line, (short + " |"), full+" |", -1) } } else if strings.Contains(*line, name) { + + line = addQuotes(short, split, line) + + newFunctionForm := "\"" + long + "\":" + newFunctionForm = SplitLinesIfNestedFunction(split, line, newFunctionForm, lines, idx) + if strings.Contains(*line, short) && !strings.Contains(*line, "|") { - *line = strings.Replace(*line, short, ("\"" + long + "\":"), -1) + *line = strings.Replace(*line, short, newFunctionForm, -1) } else if strings.Contains(*line, short) && strings.Contains(*line, "|") { - *line = strings.Replace(*line, (short + " |"), ("\"" + long + "\":"), -1) + *line = strings.Replace(*line, (short + " |"), (newFunctionForm + " |"), -1) } else if strings.Contains(*line, full) && !strings.Contains(*line, "|") { - *line = strings.Replace(*line, full, ("\"" + long + "\":"), -1) + *line = strings.Replace(*line, full, newFunctionForm, -1) } else if strings.Contains(*line, full) && strings.Contains(*line, "|") { - *line = strings.Replace(*line, (full + " |"), ("\"" + long + "\":"), -1) + *line = strings.Replace(*line, (full + " |"), (newFunctionForm + " |"), -1) } } } currentFunctions++ } +} + +func adjustIndentForNestedFunctionBody(nestedFunctionLineNum int, line string, lines *[]string) { + if strings.Contains(line, "|") { + nextLineNum := nestedFunctionLineNum + 1 + if len(*lines) == nextLineNum { + return + } + functionIndent := countLeadingSpaces(line) + bodyLineIndent := countLeadingSpaces((*lines)[nextLineNum]) + for nextLineNum < len(*lines) && bodyLineIndent > functionIndent && + (bodyLineIndent <= countLeadingSpaces((*lines)[nextLineNum]) || len(strings.TrimSpace((*lines)[nextLineNum])) == 0) { + if len(strings.TrimSpace((*lines)[nextLineNum])) != 0 { + (*lines)[nextLineNum] = " " + (*lines)[nextLineNum] + } + nextLineNum += 1 + } + } +} +// SplitLinesIfNestedFunction parses functions to form which CloudFormation parser can read properly +// - adding indent and moving next function to new line. +func SplitLinesIfNestedFunction(split []string, line *string, newFunctionForm string, lines *[]string, idx int) string { + //if this function is nested in the same line + if len(split) > 1 && strings.Contains(split[0], "Fn::") { + indent := 2 // can be anything > + leadingSpaces := indent + countLeadingSpaces(*line) + i := 0 + spaces := "" + for i < leadingSpaces { + spaces += " " + i++ + } + newFunctionForm = "\n" + spaces + newFunctionForm + adjustIndentForNestedFunctionBody(idx, *line, lines) + } + return newFunctionForm +} + +func addQuotes(short string, split []string, line *string) *string { + // Function !Sub in its short form can take only a string - It has to be marked as string with quotes + if short == "!Sub" { + whiteSpaceTrimmed := strings.TrimSpace(split[1]) + if !regexp.MustCompile(`".*"`).MatchString(whiteSpaceTrimmed) && !strings.Contains(*line, "|") { + *line = strings.Replace(*line, whiteSpaceTrimmed, ("\"" + whiteSpaceTrimmed + "\""), -1) + } + } + return line +} + +func countLeadingSpaces(line string) int { + i := 0 + for _, runeValue := range line { + if runeValue == ' ' { + i++ + } else { + break + } + } + return i } diff --git a/intrinsicsolver/fixFunctions.go b/intrinsicsolver/fixFunctions.go index 41f19dd..d2ede46 100644 --- a/intrinsicsolver/fixFunctions.go +++ b/intrinsicsolver/fixFunctions.go @@ -1,3 +1,19 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package intrinsicsolver import ( @@ -10,22 +26,21 @@ import ( "github.com/Appliscale/perun/logger" ) -var functions = []string{"Base64", "GetAtt", "GetAZs", "ImportValue", "Ref", "FindInMap", "Join", "Select", "Split", "Sub", "And", "Equals", "If", "Not", "Or"} -var mapNature = functions[5:] +var Functions = []string{"Base64", "GetAtt", "GetAZs", "ImportValue", "Ref", "FindInMap", "Join", "Select", "Split", "Sub", "And", "Equals", "If", "Not", "Or"} +var mapNature = Functions[5:] /* FixFunctions : takes []byte file and firstly converts all single quotation marks to double ones (anything between single ones is treated as the rune in GoLang), -then deconstructs file into lines, checks for intrinsic functions. The FixFunctions has modes: `multiline`, `elongate`, `correctlong` and `temp`. +then deconstructs file into lines, checks for intrinsic functions. The FixFunctions has modes: `multiline`, `elongate` and `correctlong`. Mode `multiline` looks for functions of a map nature where the function name is located in one line and it's body (map elements) are located in the following lines (if this would be not fixed an error would be thrown: `json: unsupported type: map[interface {}]interface {}`). The function changes the notation by putting function name in the next line with proper indentation. Mode `elongate` exchanges the short function names into their proper, long equivalent. Mode `correctlong` prepares the file for conversion into JSON. If the file is a YAML with every line being solicitously indented, there is no problem and the `elongate` mode is all we need. But if there is any mixed notation (e.g. indented maps along with one-line maps, functions in one line with the key), parsing must be preceded with some additional operations. -Mode `temp` allows the user to save the result to a temporary file `.preprocessed.yml`. The result is returned as a []byte array. */ -func FixFunctions(template []byte, logger *logger.Logger, mode ...string) ([]byte, error) { +func FixFunctions(template []byte, logger logger.LoggerInt, mode ...string) ([]byte, error) { var quotationProcessed, temporaryResult []string preLines, err := parseFileIntoLines(template, logger) if err != nil { @@ -58,7 +73,7 @@ func FixFunctions(template []byte, logger *logger.Logger, mode ...string) ([]byt } } if m == "elongate" { - for _, function := range functions { + for _, function := range Functions { elongateForms(&d, &lines, idx, function) } } @@ -73,16 +88,6 @@ func FixFunctions(template []byte, logger *logger.Logger, mode ...string) ([]byt stringStream := strings.Join(temporaryResult, "\n") output := []byte(stringStream) - for _, m := range mode { - if m == "temp" { - if err := writeLines(temporaryResult, ".preprocessed.yml"); err != nil { - logger.Error(err.Error()) - return nil, err - } - logger.Info("Created temporary file of a preprocessed template `.preprocessed.yml`") - } - } - return output, nil } @@ -109,7 +114,7 @@ func shortForm(name string) string { } // Function parseFileIntoLines is reading the []byte file and returns it line by line as []string slice. -func parseFileIntoLines(template []byte, logger *logger.Logger) ([]string, error) { +func parseFileIntoLines(template []byte, logger logger.LoggerInt) ([]string, error) { bytesReader := bytes.NewReader(template) lines := make([]string, 0) scanner := bufio.NewScanner(bytesReader) diff --git a/intrinsicsolver/fixLongFormCorrectness.go b/intrinsicsolver/fixLongFormCorrectness.go index 4e327bc..dad3a15 100644 --- a/intrinsicsolver/fixLongFormCorrectness.go +++ b/intrinsicsolver/fixLongFormCorrectness.go @@ -1,3 +1,19 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package intrinsicsolver import ( diff --git a/intrinsicsolver/fixMultiLineMap.go b/intrinsicsolver/fixMultiLineMap.go index fe30572..1ead233 100644 --- a/intrinsicsolver/fixMultiLineMap.go +++ b/intrinsicsolver/fixMultiLineMap.go @@ -1,3 +1,19 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package intrinsicsolver import ( @@ -21,7 +37,7 @@ func fixMultiLineMap(line *string, lines *[]string, idx int, name string) { // If so - we have multiple-level function with a body created of a map elements as the hyphen-noted structures. if strings.Contains(*line, ":") { // If so - we have key and a function name in one line. We have to relocate the function name into the next line, indent it and change it to the long form. - nextLineIndents := indentations(pLines[idx+1]) + nextLineIndents := countLeadingSpaces(pLines[idx+1]) fullIndents := strings.Repeat(" ", nextLineIndents) replacement := "\n" + fullIndents + full *line = strings.Replace(*line, short, replacement, -1) diff --git a/intrinsicsolver/intrinsicsolver_test.go b/intrinsicsolver/intrinsicsolver_test.go index 0f309da..495536c 100644 --- a/intrinsicsolver/intrinsicsolver_test.go +++ b/intrinsicsolver/intrinsicsolver_test.go @@ -39,7 +39,7 @@ func TestMain(m *testing.M) { func TestIndentations(t *testing.T) { line := " Key: Value " - lineIndent := indentations(line) + lineIndent := countLeadingSpaces(line) firstLetter := string(line[lineIndent]) assert.Equal(t, 16, lineIndent, "MSG") assert.Equal(t, "K", firstLetter, "MSG") @@ -86,3 +86,13 @@ func TestCorrectLong(t *testing.T) { assert.Equal(t, expected, actual, "MSG") } + +func TestAdjustIndentForNestedFunctionBody(t *testing.T) { + lines := []string{"ASD:", " BCA: |", " firstLine", " secondLine with spaces", "", " fourth line # with comment", "just: anotherYaml"} + adjustIndentForNestedFunctionBody(1, lines[1], &lines) + assert.Equal(t, " firstLine", lines[2]) + assert.Equal(t, " secondLine with spaces", lines[3]) + assert.Equal(t, "", lines[4]) + assert.Equal(t, " fourth line # with comment", lines[5]) + assert.Equal(t, "just: anotherYaml", lines[6]) +} diff --git a/linter/lint_configuration.go b/linter/lint_configuration.go new file mode 100644 index 0000000..3eb3c7a --- /dev/null +++ b/linter/lint_configuration.go @@ -0,0 +1,117 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package linter provides revision of templates. Linter configuration is in default/style.yaml. +package linter + +import ( + "encoding/json" + "github.com/Appliscale/perun/configuration" + "github.com/Appliscale/perun/context" + "github.com/ghodss/yaml" + "path" + "regexp" +) + +// LinterConfiguration contains configuration for two types - Yaml and JSON, and global. +type LinterConfiguration struct { + Yaml YamlLinterConfiguration `yaml:"yaml"` + Json JsonLinterConfiguration `yaml:"json"` + Global GlobalLinterConfiguration `yaml:"global"` +} + +// GlobalLinterConfiguration describes global configuration. It's used in LinterConfiguration as one of type of Linter. +type GlobalLinterConfiguration struct { + LineLength Check `yaml:"lineLength"` + Indent Check `yaml:"indent"` + RequiredFields RequiredFields `yaml:"requiredFields"` + NamingConventions NamingConventions `yaml:"namingConventions"` + BlankLinesAllowed bool `yaml:"blankLinesAllowed"` +} + +// Check stores information about if something is required or not and value e.g indent. +type Check struct { + Required bool `yaml:"required"` + Value interface{} `yaml:"value"` +} + +// NamingConventions describes how should looks names. +type NamingConventions struct { + LogicalNames string `yaml:"logicalNames"` +} + +// RequiredFields says which elements in template are required. +type RequiredFields struct { + TemplateDescription bool `yaml:"templateDescription"` + ParametersDescription bool `yaml:"parametersDescription"` +} + +// JsonLinterConfiguration describes where should be spaces. +type JsonLinterConfiguration struct { + Spaces SpacesConfiguration `yaml:"spaces"` +} + +// SpacesConfiguration stores information about spaces before and after something. +type SpacesConfiguration struct { + After []string `yaml:"after"` + Before []string `yaml:"before"` +} + +// YamlLinterConfiguration describes configuration for Yaml - what type of quotes, lists and indent is used. +type YamlLinterConfiguration struct { + AllowedQuotes Quotes `yaml:"allowedQuotes"` + AllowedLists AllowedLists `yaml:"allowedLists"` + ContinuationIndent Check `yaml:"continuationIndent"` +} + +// Quotes describes types of quotes. +type Quotes struct { + Single bool `yaml:"single"` + Double bool `yaml:"double"` + Noquotes bool `yaml:"noquotes"` +} + +// AllowedLists describes which types of lists are correct. +type AllowedLists struct { + Inline bool `yaml:"inline"` + Dash bool `yaml:"dash"` +} + +// CheckLogicalName checks name. It use NamingConventions. +func (this LinterConfiguration) CheckLogicalName(name string) bool { + return regexp.MustCompile(this.Global.NamingConventions.LogicalNames).MatchString(name) +} + +// GetLinterConfiguration gets configuration from file. +func GetLinterConfiguration(ctx *context.Context) (err error, lintConf LinterConfiguration) { + + linterConfigurationFilename := ctx.CliArguments.LinterConfiguration + rawLintConfiguration := configuration.GetLinterConfigurationFile(linterConfigurationFilename, ctx.Logger) + + if path.Ext(*linterConfigurationFilename) == ".json" { + err = json.Unmarshal([]byte(rawLintConfiguration), &lintConf) + if err != nil { + ctx.Logger.Error(err.Error()) + } + } else { + + err = yaml.Unmarshal([]byte(rawLintConfiguration), &lintConf) + if err != nil { + ctx.Logger.Error(err.Error()) + } + } + return +} diff --git a/linter/stylechecker.go b/linter/stylechecker.go new file mode 100644 index 0000000..72b2f97 --- /dev/null +++ b/linter/stylechecker.go @@ -0,0 +1,219 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package linter + +import ( + "github.com/Appliscale/perun/context" + "github.com/Appliscale/perun/helpers" + "github.com/Appliscale/perun/validator/template" + "github.com/awslabs/goformation/cloudformation" + "io/ioutil" + "path" + "regexp" + "strconv" + "strings" +) + +// Parameter describes all element which parameter in template should have. +type Parameter struct { + Type string `json:"Type"` + Default string `json:"Default"` + AllowedValues []string `json:"AllowedValues"` + Description string `json:"Description"` +} + +// CheckStyle gets linter configuration and run checking. +func CheckStyle(ctx *context.Context) (err error) { + + err, lintConf := GetLinterConfiguration(ctx) + if err != nil { + return + } + + templateExtension := path.Ext(*ctx.CliArguments.TemplatePath) + templateBytes, err := ioutil.ReadFile(*ctx.CliArguments.TemplatePath) + if err != nil { + ctx.Logger.Error(err.Error()) + return + } + rawTemplate := string(templateBytes) + + lines := strings.Split(rawTemplate, "\n") + + checkAWSCFSpecificStuff(ctx, rawTemplate, lintConf) + checkBlankLines(lintConf, rawTemplate, ctx) + checkLineLengths(lines, lintConf, ctx) + + if templateExtension == ".json" { + checkJsonIndentation(ctx, lintConf, lines) + checkJsonSpaces(ctx, lintConf, lines) + } else if templateExtension == ".yaml" { + checkYamlIndentation(ctx, lintConf, lines) + checkYamlQuotes(ctx, lintConf, lines) + checkYamlLists(ctx, lintConf, rawTemplate) + } + + return +} + +func checkLineLengths(lines []string, lintConf LinterConfiguration, ctx *context.Context) { + for line := range lines { + if lintConf.Global.LineLength.Required && len(lines[line]) > int(lintConf.Global.LineLength.Value.(float64)) { + ctx.Logger.Warning("line " + strconv.Itoa(line+1) + ": maximum line lenght exceeded") + } + } +} + +func checkBlankLines(lintConf LinterConfiguration, rawTemplate string, ctx *context.Context) { + if !lintConf.Global.BlankLinesAllowed && regexp.MustCompile("\n\n").MatchString(rawTemplate) { + ctx.Logger.Warning("Blank lines are not allowed in current lint configuration") + } +} +func checkAWSCFSpecificStuff(ctx *context.Context, rawTemplate string, lintConf LinterConfiguration) { + var perunTemplate template.Template + parser, err := helpers.GetParser(*ctx.CliArguments.TemplatePath) + if err != nil { + ctx.Logger.Error(err.Error()) + return + } + var goFormationTemplate cloudformation.Template + goFormationTemplate, err = parser([]byte(rawTemplate), perunTemplate, ctx.Logger) + if err != nil { + ctx.Logger.Error(err.Error()) + return + } + + if lintConf.Global.RequiredFields.TemplateDescription && goFormationTemplate.Description == "" { + ctx.Logger.Warning("The template has no description") + } + + if lintConf.Global.RequiredFields.ParametersDescription { + for parameterName, parameterValue := range goFormationTemplate.Parameters { + if parameterValue.(map[string]interface{})["Description"] == nil { + ctx.Logger.Warning("No description provided for parameter " + parameterName) + } + } + } + + for resourceName := range goFormationTemplate.Resources { + if !lintConf.CheckLogicalName(resourceName) { + ctx.Logger.Warning("Resource '" + resourceName + "' does not meet the given logical Name regex: " + lintConf.Global.NamingConventions.LogicalNames) + } + } +} + +func checkJsonSpaces(ctx *context.Context, lintConf LinterConfiguration, lines []string) { + reg := regexp.MustCompile(`"([^"]*)"`) + for line := range lines { + for sign := range lintConf.Json.Spaces.After { + if strings.Count(reg.ReplaceAllString(lines[line], "\"*\""), lintConf.Json.Spaces.After[sign]) != strings.Count(reg.ReplaceAllString(lines[line], "\"*\""), lintConf.Json.Spaces.After[sign]+" ") { + ctx.Logger.Warning("line " + strconv.Itoa(line+1) + ": no space after '" + string(lintConf.Json.Spaces.After[sign]) + "'") + } + } + for sign := range lintConf.Json.Spaces.Before { + if strings.Count(reg.ReplaceAllString(lines[line], "\"*\""), lintConf.Json.Spaces.Before[sign]) != strings.Count(reg.ReplaceAllString(lines[line], "\"*\""), " "+lintConf.Json.Spaces.Before[sign]) { + ctx.Logger.Warning("line " + strconv.Itoa(line+1) + ": no space before '" + string(lintConf.Json.Spaces.Before[sign]) + "'") + } + } + } +} + +func checkYamlLists(ctx *context.Context, lintConf LinterConfiguration, template string) { + preprocessed := regexp.MustCompile("#.*\n").ReplaceAllString(template, "\n") + dashListRegex := regexp.MustCompile(".*- .*") + inlineListRegex := regexp.MustCompile(`.*: \[.*].*`) + if !lintConf.Yaml.AllowedLists.Dash && dashListRegex.MatchString(preprocessed) { + ctx.Logger.Warning("dash lists are not allowed in current lint configuration") + } + if !lintConf.Yaml.AllowedLists.Inline && inlineListRegex.MatchString(preprocessed) { + ctx.Logger.Warning("inline lists are not allowed in current lint configuration") + } +} + +func checkYamlQuotes(ctx *context.Context, lintConf LinterConfiguration, lines []string) { + for line := range lines { + if !lintConf.Yaml.AllowedQuotes.Double && strings.Contains(lines[line], "\"") { + ctx.Logger.Warning("line " + strconv.Itoa(line+1) + ": double quotes not allowed") + } + if !lintConf.Yaml.AllowedQuotes.Single && strings.Contains(lines[line], "'") { + ctx.Logger.Warning("line " + strconv.Itoa(line+1) + ": single quotes not allowed") + } + noQuotesRegex := regexp.MustCompile(".*: [^\"']*") + if !lintConf.Yaml.AllowedQuotes.Noquotes && noQuotesRegex.MatchString(lines[line]) { + ctx.Logger.Warning("line " + strconv.Itoa(line+1) + ": quotes required") + } + } +} + +func checkYamlIndentation(ctx *context.Context, lintConf LinterConfiguration, lines []string) { + indent := int(lintConf.Global.Indent.Value.(float64)) + last_spaces := 0 + for line := range lines { + if strings.HasPrefix(strings.TrimSpace(lines[line]), "#") { + continue + } + curr_spaces := helpers.CountLeadingSpaces(lines[line]) + if lintConf.Global.Indent.Required { + if curr_spaces%indent != 0 || (last_spaces < curr_spaces && last_spaces+indent != curr_spaces) { + ctx.Logger.Error("line " + strconv.Itoa(line+1) + ": indentation error") + } + } + + if last_spaces < curr_spaces { + if wrongYAMLContinuationIndent(lintConf, lines, line, last_spaces, curr_spaces) { + ctx.Logger.Error("line " + strconv.Itoa(line+1) + ": continuation indent error") + } + } + last_spaces = curr_spaces + } +} + +func wrongYAMLContinuationIndent(lintConf LinterConfiguration, lines []string, line int, last_spaces int, curr_spaces int) bool { + return lintConf.Yaml.ContinuationIndent.Required && !strings.Contains(lines[line], ": ") && !strings.Contains(lines[line], "- ") && + !strings.HasSuffix(lines[line], ":") && last_spaces+int(lintConf.Yaml.ContinuationIndent.Value.(float64)) != curr_spaces +} + +func checkJsonIndentation(ctx *context.Context, lintConf LinterConfiguration, lines []string) { + var last_spaces = 0 + if lintConf.Global.Indent.Required { + indent := int(lintConf.Global.Indent.Value.(float64)) + jsonIndentations := map[string]int{ + ",": 0, + "{": indent, + "}": -indent, + "[": indent, + "]": -indent, + "\"": -indent, + } + last_spaces = 0 + for line := range lines { + if line == 0 || len(lines[line]) == 0 { + continue + } + prevLine := lines[line-1] + indentation, found := jsonIndentations[string(prevLine[len(prevLine)-1])] + if !found { + indentation = -indentation + } + curr_spaces := helpers.CountLeadingSpaces(lines[line]) + if curr_spaces-last_spaces != indentation { + ctx.Logger.Error("line " + strconv.Itoa(line+1) + ": indentation error") + } + last_spaces = curr_spaces + } + } +} diff --git a/linter/stylechecker_test.go b/linter/stylechecker_test.go new file mode 100644 index 0000000..7877cab --- /dev/null +++ b/linter/stylechecker_test.go @@ -0,0 +1,130 @@ +package linter + +import ( + "github.com/Appliscale/perun/checkingrequiredfiles/mocks" + "github.com/Appliscale/perun/context" + "github.com/Appliscale/perun/stack/stack_mocks" + "github.com/golang/mock/gomock" + "github.com/stretchr/testify/assert" + "strconv" + "strings" + "testing" +) + +func setupTestEnv(t *testing.T, filename string, styleConfiguration string) (*context.Context, *gomock.Controller, *mocks.MockLoggerInt, LinterConfiguration) { + ctx := stack_mocks.SetupContext(t, []string{"cmd", "lint", filename, "--lint-configuration=" + styleConfiguration}) + mockCtrl := gomock.NewController(t) + mockLogger := mocks.NewMockLoggerInt(mockCtrl) + ctx.Logger = mockLogger + err, linterConf := GetLinterConfiguration(ctx) + assert.Nil(t, err) + return ctx, mockCtrl, mockLogger, linterConf +} + +func TestCheckLineLengths(t *testing.T) { + ctx, mockCtrl, mockLogger, linterConf := setupTestEnv(t, "./test_resources/blanklines_testtemplate.yaml", "test_resources/test_style.yaml") + defer mockCtrl.Finish() + + mockLogger.EXPECT().Warning("line " + strconv.Itoa(1) + ": maximum line lenght exceeded").Times(1) + + checkLineLengths([]string{"asdasdasdasdasd"}, linterConf, ctx) +} + +func TestCheckBlankLines(t *testing.T) { + ctx, mockCtrl, mockLogger, linterConf := setupTestEnv(t, "./test_resources/blanklines_testtemplate.yaml", "test_resources/test_style.yaml") + defer mockCtrl.Finish() + + mockLogger.EXPECT().Warning("Blank lines are not allowed in current lint configuration").Times(1) + + checkBlankLines(linterConf, stack_mocks.ReadFile(t, "./test_resources/blanklines_testtemplate.yaml"), ctx) + checkBlankLines(linterConf, stack_mocks.ReadFile(t, "./test_resources/noblanklines_testtemplate.yaml"), ctx) +} + +func TestCheckAWSSpecificStuff(t *testing.T) { + ctx, mockCtrl, mockLogger, linterConf := setupTestEnv(t, "./test_resources/nodescription_testtemplate.yaml", "test_resources/test_style.yaml") + defer mockCtrl.Finish() + + mockLogger.EXPECT().Warning("The template has no description").Times(1) + mockLogger.EXPECT().Warning("No description provided for parameter TestParameter2") + mockLogger.EXPECT().Warning("Resource 'S3' does not meet the given logical Name regex: Test.+") + + checkAWSCFSpecificStuff(ctx, stack_mocks.ReadFile(t, "./test_resources/nodescription_testtemplate.yaml"), linterConf) +} + +func TestTestCheckAWSSpecificStuffOk(t *testing.T) { + ctx, mockCtrl, _, linterConf := setupTestEnv(t, "./test_resources/described_testtemplate.yaml", "test_resources/test_style.yaml") + defer mockCtrl.Finish() + + checkAWSCFSpecificStuff(ctx, stack_mocks.ReadFile(t, "./test_resources/described_testtemplate.yaml"), linterConf) +} + +func TestCheckJSONSpaces(t *testing.T) { + ctx, mockCtrl, mockLogger, linterConf := setupTestEnv(t, "./test_resources/spacesjson_testtemplate.json", "test_resources/test_style.yaml") + defer mockCtrl.Finish() + + mockLogger.EXPECT().Warning("line 3: no space after ':'") + mockLogger.EXPECT().Warning("line 2: no space before ':'") + + checkJsonSpaces(ctx, linterConf, strings.Split(stack_mocks.ReadFile(t, "./test_resources/spacesjson_testtemplate.json"), "\n")) +} + +func TestCheckYamlDashLists(t *testing.T) { + ctx, mockCtrl, mockLogger, linterConf := setupTestEnv(t, "./test_resources/nodescription_testtemplate.yaml", "test_resources/test_style.yaml") + defer mockCtrl.Finish() + + mockLogger.EXPECT().Warning("dash lists are not allowed in current lint configuration").Times(1) + checkYamlLists(ctx, linterConf, stack_mocks.ReadFile(t, "./test_resources/nodescription_testtemplate.yaml")) +} + +func TestCheckYamlInlineLists(t *testing.T) { + ctx, mockCtrl, mockLogger, linterConf := setupTestEnv(t, "./test_resources/nodescription_testtemplate.yaml", "test_resources/test_styleDash.yaml") + defer mockCtrl.Finish() + + mockLogger.EXPECT().Warning("inline lists are not allowed in current lint configuration").Times(1) + checkYamlLists(ctx, linterConf, stack_mocks.ReadFile(t, "./test_resources/inlinelist_testtemplate.yaml")) +} + +func TestCheckYamlQuotesNoSingleDouble(t *testing.T) { + ctx, mockCtrl, mockLogger, linterConf := setupTestEnv(t, "./test_resources/nodescription_testtemplate.yaml", "test_resources/test_styleDash.yaml") + defer mockCtrl.Finish() + + mockLogger.EXPECT().Warning("line 1: double quotes not allowed").Times(1) + mockLogger.EXPECT().Warning("line 2: double quotes not allowed").Times(1) + + checkYamlQuotes(ctx, linterConf, []string{"ala: \"makota\"", "asd: \"qwe\""}) + + mockLogger.EXPECT().Warning("line 2: single quotes not allowed").Times(1) + mockLogger.EXPECT().Warning("line 3: single quotes not allowed").Times(1) + + checkYamlQuotes(ctx, linterConf, []string{"asd: asd", "qwe: 'qwe'", "zxc: 'zxc'"}) +} + +func TestCheckYamlQuotesNoQuotes(t *testing.T) { + ctx, mockCtrl, mockLogger, linterConf := setupTestEnv(t, "./test_resources/nodescription_testtemplate.yaml", "test_resources/test_style.yaml") + defer mockCtrl.Finish() + + mockLogger.EXPECT().Warning("line 1: quotes required").Times(1) + mockLogger.EXPECT().Warning("line 2: quotes required").Times(1) + + checkYamlQuotes(ctx, linterConf, []string{"asd: asd", "qwe: qwe"}) +} + +func TestCheckYamlIndentation(t *testing.T) { + ctx, mockCtrl, mockLogger, linterConf := setupTestEnv(t, "./test_resources/nodescription_testtemplate.yaml", "test_resources/test_style.yaml") + defer mockCtrl.Finish() + + checkYamlIndentation(ctx, linterConf, strings.Split(stack_mocks.ReadFile(t, "./test_resources/blanklines_testtemplate.yaml"), "\n")) + + mockLogger.EXPECT().Error("line 6: indentation error") + mockLogger.EXPECT().Error("line 8: indentation error") + + checkYamlIndentation(ctx, linterConf, strings.Split(stack_mocks.ReadFile(t, "./test_resources/indenterror_testtemplate.yaml"), "\n")) + +} + +func TestCheckJSONIndentation(t *testing.T) { + ctx, mockCtrl, _, linterConf := setupTestEnv(t, "./test_resources/nodescription_testtemplate.yaml", "test_resources/test_style.yaml") + defer mockCtrl.Finish() + + checkJsonIndentation(ctx, linterConf, strings.Split(stack_mocks.ReadFile(t, "./test_resources/spacesjson_testtemplate.json"), "\n")) +} diff --git a/linter/test_resources/blanklines_testtemplate.yaml b/linter/test_resources/blanklines_testtemplate.yaml new file mode 100644 index 0000000..39675cb --- /dev/null +++ b/linter/test_resources/blanklines_testtemplate.yaml @@ -0,0 +1,21 @@ +AWSTemplateFormatVersion: "2010-09-09" + +Resources: + TestS3: + Type: AWS::S3::Bucket + Properties: + BucketName: peruntestbucketnameisatestbucketname + + + + TestEC2Instance: + Type: AWS::EC2::Instance + Properties: + ImageId: "my-ami" + InstanceType: t2.small + KeyName: "my-key" + + SecondTestS3: + Type: AWS::S3::Bucket + Properties: + BucketName: peruntestbucketnameisatestbucketname2 diff --git a/linter/test_resources/described_testtemplate.yaml b/linter/test_resources/described_testtemplate.yaml new file mode 100644 index 0000000..4ee1ccd --- /dev/null +++ b/linter/test_resources/described_testtemplate.yaml @@ -0,0 +1,15 @@ +AWSTemplateFormatVersion: "2010-09-09" +Description: "Simple Description" +Parameters: + TestParameter1: + Description: "Prefix of the bucket name - environment" + Type: String + AllowedValues: + - prod + - staging + +Resources: + TestS3: + Type: AWS::S3::Bucket + Properties: + BucketName: !Ref TestParameter1 diff --git a/linter/test_resources/indenterror_testtemplate.yaml b/linter/test_resources/indenterror_testtemplate.yaml new file mode 100644 index 0000000..fd195c9 --- /dev/null +++ b/linter/test_resources/indenterror_testtemplate.yaml @@ -0,0 +1,16 @@ +AWSTemplateFormatVersion: "2010-09-09" +Resources: + TestS3: + Type: AWS::S3::Bucket + Properties: + BucketName: peruntestbucketnameisatestbucketname + TestEC2Instance: + Type: AWS::EC2::Instance + Properties: + ImageId: "my-ami" + InstanceType: t2.small + KeyName: "my-key" + SecondTestS3: + Type: AWS::S3::Bucket + Properties: + BucketName: peruntestbucketnameisatestbucketname2 diff --git a/linter/test_resources/inlinelist_testtemplate.yaml b/linter/test_resources/inlinelist_testtemplate.yaml new file mode 100644 index 0000000..399c67b --- /dev/null +++ b/linter/test_resources/inlinelist_testtemplate.yaml @@ -0,0 +1,13 @@ +AWSTemplateFormatVersion: "2010-09-09" +Description: "Simple Description" +Parameters: + TestParameter1: + Description: "Prefix of the bucket name - environment" + Type: String + AllowedValues: [prod, staging] + +Resources: + TestS3: + Type: AWS::S3::Bucket + Properties: + BucketName: !Ref TestParameter1 diff --git a/linter/test_resources/noblanklines_testtemplate.yaml b/linter/test_resources/noblanklines_testtemplate.yaml new file mode 100644 index 0000000..c2e67ac --- /dev/null +++ b/linter/test_resources/noblanklines_testtemplate.yaml @@ -0,0 +1,16 @@ +AWSTemplateFormatVersion: "2010-09-09" +Resources: + TestS3: + Type: AWS::S3::Bucket + Properties: + BucketName: peruntestbucketnameisatestbucketname + TestEC2Instance: + Type: AWS::EC2::Instance + Properties: + ImageId: "my-ami" + InstanceType: t2.small + KeyName: "my-key" + SecondTestS3: + Type: AWS::S3::Bucket + Properties: + BucketName: peruntestbucketnameisatestbucketname2 diff --git a/linter/test_resources/nodescription_testtemplate.yaml b/linter/test_resources/nodescription_testtemplate.yaml new file mode 100644 index 0000000..7063950 --- /dev/null +++ b/linter/test_resources/nodescription_testtemplate.yaml @@ -0,0 +1,17 @@ +AWSTemplateFormatVersion: "2010-09-09" +Parameters: + TestParameter1: + Description: "Prefix of the bucket name - environment" + Type: String + AllowedValues: + - prod + - staging + TestParameter2: + Type: String + AllowedPattern: "[a-z]+" + +Resources: + S3: + Type: AWS::S3::Bucket + Properties: + BucketName: !Join [ "-", [!Ref TestParameter1, !Ref TestParameter2]] diff --git a/linter/test_resources/spacesjson_testtemplate.json b/linter/test_resources/spacesjson_testtemplate.json new file mode 100644 index 0000000..77d9630 --- /dev/null +++ b/linter/test_resources/spacesjson_testtemplate.json @@ -0,0 +1,25 @@ +{ + "AWSTemplateFormatVersion": "2010-09-09", + "Resources" :{ + "TestS3" : { + "Type" : "AWS::S3::Bucket", + "Properties" : { + "BucketName" : "peruntestbucketnameisatestbucketname" + } + }, + "TestEC2Instance" : { + "Type" : "AWS::EC2::Instance", + "Properties" : { + "ImageId" : "my-ami", + "InstanceType" : "t2.small", + "KeyName" : "my-key" + } + }, + "SecondTestS3" : { + "Type" : "AWS::S3::Bucket", + "Properties" : { + "BucketName" : "peruntestbucketnameisatestbucketname2" + } + } + } +} diff --git a/linter/test_resources/test_style.yaml b/linter/test_resources/test_style.yaml new file mode 100644 index 0000000..32e5f90 --- /dev/null +++ b/linter/test_resources/test_style.yaml @@ -0,0 +1,33 @@ +yaml: + allowedQuotes: + double: true + single: true + noquotes: false + allowedLists: + inline: true + dash: false + continuationIndent: # checked only when indent is required + required: true + value: 4 + +json: + spaces: + after: [':'] + before: [':'] + +global: + lineLength: + required: true + value: 10 + indent: + required: true + value: 2 + blankLinesAllowed: false + +# AWS Specific + requiredFields: + templateDescription: true + parametersDescription: true + + namingConventions: + logicalNames: "Test.+" #regex diff --git a/linter/test_resources/test_styleDash.yaml b/linter/test_resources/test_styleDash.yaml new file mode 100644 index 0000000..4815533 --- /dev/null +++ b/linter/test_resources/test_styleDash.yaml @@ -0,0 +1,33 @@ +yaml: + allowedQuotes: + double: false + single: false + noquotes: true + allowedLists: + inline: false + dash: true + continuationIndent: # checked only when indent is required + required: true + value: 4 + +json: + spaces: + after: [':'] + before: [':'] + +global: + lineLength: + required: true + value: 10 + indent: + required: true + value: 2 + blankLinesAllowed: false + +# AWS Specific + requiredFields: + templateDescription: true + parametersDescription: true + + namingConventions: + logicalNames: "Test.+" #regex diff --git a/logger/logger.go b/logger/logger.go index 4f46d9e..9b5bd01 100644 --- a/logger/logger.go +++ b/logger/logger.go @@ -1,4 +1,4 @@ -// Copyright 2017 Appliscale +// Copyright 2018 Appliscale // // Maintainers and contributors are listed in README file inside repository. // @@ -23,6 +23,21 @@ import ( "strings" ) +// Logger contains information type of logger tool. +type LoggerInt interface { + Always(message string) + Warning(warning string) + Error(err string) + Info(info string) + Debug(debug string) + Trace(trace string) + GetInput(message string, v ...interface{}) error + PrintValidationErrors() + HasValidationErrors() bool + HasValidationWarnings() bool + AddResourceForValidation(resourceName string) *ResourceValidation + SetVerbosity(verbosity string) +} type Logger struct { Quiet bool Yes bool @@ -30,11 +45,14 @@ type Logger struct { resourceValidation []*ResourceValidation } +// ResourceValidation contains name of resource and errors. type ResourceValidation struct { ResourceName string Errors []string + Warnings []string } +// Verbosity - type of logger. type Verbosity int const ( @@ -48,7 +66,7 @@ const ( var verboseModes = [...]string{ "TRACE", "DEBUG", - " INFO", + "INFO", "ERROR", "WARNING", } @@ -110,6 +128,11 @@ func (resourceValidation *ResourceValidation) AddValidationError(error string) { resourceValidation.Errors = append(resourceValidation.Errors, error) } +// Log validation error. +func (resourceValidation *ResourceValidation) AddValidationWarning(warning string) { + resourceValidation.Warnings = append(resourceValidation.Warnings, warning) +} + // Get input from command line. func (logger *Logger) GetInput(message string, v ...interface{}) error { fmt.Printf("%s: ", message) @@ -119,7 +142,6 @@ func (logger *Logger) GetInput(message string, v ...interface{}) error { } return nil } - func (logger *Logger) log(verbosity Verbosity, message string) { if !logger.Quiet && verbosity >= logger.Verbosity { fmt.Println(verbosity.String() + ": " + message) @@ -130,18 +152,20 @@ func (logger *Logger) log(verbosity Verbosity, message string) { func (logger *Logger) PrintValidationErrors() { if !logger.Quiet { for _, resourceValidation := range logger.resourceValidation { - if len(resourceValidation.Errors) != 0 { + if len(resourceValidation.Errors) != 0 || len(resourceValidation.Warnings) != 0 { fmt.Println(resourceValidation.ResourceName) for _, err := range resourceValidation.Errors { fmt.Println(" ", err) } - } else { - fmt.Println(resourceValidation.ResourceName, " has no validation errors") + for _, warning := range resourceValidation.Warnings { + fmt.Println(" ", warning) + } } } } } +// HasValidationErrors checks if resource has errors. It's used in validateResources(). func (logger *Logger) HasValidationErrors() bool { for _, resourceValidation := range logger.resourceValidation { if len(resourceValidation.Errors) > 0 { @@ -151,7 +175,16 @@ func (logger *Logger) HasValidationErrors() bool { return false } -// AddResourceForValidation : Adds resource for validation +func (logger *Logger) HasValidationWarnings() bool { + for _, resourceValidation := range logger.resourceValidation { + if len(resourceValidation.Warnings) > 0 { + return true + } + } + return false +} + +// AddResourceForValidation : Adds resource for validation. It's used in validateResources(). func (logger *Logger) AddResourceForValidation(resourceName string) *ResourceValidation { resourceValidation := &ResourceValidation{ ResourceName: resourceName, @@ -170,6 +203,7 @@ func (logger *Logger) SetVerbosity(verbosity string) { } } +// Check if verbosity is one of the given types. func IsVerbosityValid(verbosity string) bool { switch verbosity { case diff --git a/logger/logger_test.go b/logger/logger_test.go new file mode 100644 index 0000000..faf34b7 --- /dev/null +++ b/logger/logger_test.go @@ -0,0 +1,56 @@ +package logger + +import ( + "github.com/stretchr/testify/assert" + "testing" +) + +func TestResourceValidation_AddValidationError(t *testing.T) { + resourceValidation := ResourceValidation{ResourceName: "Name", Errors: []string{}} + resourceValidation.AddValidationError("Error") + assert.NotEmpty(t, resourceValidation.Errors) + assert.Equal(t, "Error", resourceValidation.Errors[0]) +} + +func TestLogger_HasValidationErrors(t *testing.T) { + logger := CreateQuietLogger() + assert.False(t, logger.HasValidationErrors()) + + resourceValidation := logger.AddResourceForValidation("Name") + assert.False(t, logger.HasValidationErrors()) + + resourceValidation.AddValidationError("Error") + assert.True(t, logger.HasValidationErrors()) +} + +func TestLogger_AddResourceForValidation(t *testing.T) { + logger := CreateQuietLogger() + assert.Empty(t, logger.resourceValidation) + logger.AddResourceForValidation("Name") + assert.NotEmpty(t, logger.resourceValidation) +} + +func TestLogger_SetVerbosity(t *testing.T) { + logger := CreateQuietLogger() + logger.SetVerbosity("error") + assert.Equal(t, ERROR, logger.Verbosity) + + logger.SetVerbosity("Trolololo") + assert.Equal(t, ERROR, logger.Verbosity) + + logger.SetVerbosity("Warning") + assert.Equal(t, WARNING, logger.Verbosity) +} + +func TestIsVerbosityValid(t *testing.T) { + assert.True(t, IsVerbosityValid("INFO")) + assert.True(t, IsVerbosityValid("ERROR")) + assert.True(t, IsVerbosityValid("WARNING")) + assert.True(t, IsVerbosityValid("TRACE")) + assert.True(t, IsVerbosityValid("DEBUG")) + + assert.False(t, IsVerbosityValid("error")) + assert.False(t, IsVerbosityValid("debug")) + assert.False(t, IsVerbosityValid("VERBOSE")) + +} diff --git a/main.go b/main.go index bfd83a1..92dfec0 100644 --- a/main.go +++ b/main.go @@ -14,67 +14,138 @@ // See the License for the specific language governing permissions and // limitations under the License. -// A tool for CloudFormation template validation and conversion. +// A tool for CloudFormation template validation. package main import ( + "github.com/Appliscale/perun/checkingrequiredfiles" "github.com/Appliscale/perun/cliparser" "github.com/Appliscale/perun/configuration" "github.com/Appliscale/perun/configurator" "github.com/Appliscale/perun/context" - "github.com/Appliscale/perun/converter" - "github.com/Appliscale/perun/offlinevalidator" - "github.com/Appliscale/perun/onlinevalidator" + "github.com/Appliscale/perun/estimatecost" + "github.com/Appliscale/perun/linter" + "github.com/Appliscale/perun/parameters" + "github.com/Appliscale/perun/progress" "github.com/Appliscale/perun/stack" + "github.com/Appliscale/perun/utilities" + "github.com/Appliscale/perun/validator" + "github.com/Appliscale/perun/validator/validators" "os" ) func main() { - context, err := context.GetContext(cliparser.ParseCliArguments, configuration.GetConfiguration) - if err != nil { - os.Exit(1) + ctx, err := context.GetContext(cliparser.ParseCliArguments, configuration.GetConfiguration, configuration.ReadInconsistencyConfiguration) + checkingrequiredfiles.CheckingRequiredFiles(&ctx) + + if ctx.CliArguments.Lint != nil && *ctx.CliArguments.Lint { + err = linter.CheckStyle(&ctx) + if err != nil { + os.Exit(1) + } } - if *context.CliArguments.Mode == cliparser.ValidateMode { - valid := onlinevalidator.ValidateAndEstimateCosts(&context) - if valid { - os.Exit(0) - } else { + if *ctx.CliArguments.Mode == cliparser.ValidateMode { + ctx.InitializeAwsAPI() + utilities.CheckFlagAndExit(validator.Validate(&ctx)) + } + + if *ctx.CliArguments.Mode == cliparser.ConfigureMode { + configurator.CreateRequiredFilesInConfigureMode(&ctx) + os.Exit(0) + } + + if *ctx.CliArguments.Mode == cliparser.LintMode { + err = linter.CheckStyle(&ctx) + if err != nil { os.Exit(1) } + os.Exit(0) + } + + validationUnsuccessfullMsg := "To skip the validation part use the --no-validate flag" + if *ctx.CliArguments.Mode == cliparser.CreateStackMode { + ctx.InitializeAwsAPI() + if *ctx.CliArguments.SkipValidation || (validator.Validate(&ctx) && validators.UserDecideGeneralRule(&ctx)) { + utilities.CheckErrorCodeAndExit(stack.NewStack(&ctx)) + } else { + ctx.Logger.Info(validationUnsuccessfullMsg) + } + } + + if *ctx.CliArguments.Mode == cliparser.DestroyStackMode { + ctx.InitializeAwsAPI() + utilities.CheckErrorCodeAndExit(stack.DestroyStack(&ctx)) + } - if *context.CliArguments.Mode == cliparser.ConvertMode { - err := converter.Convert(&context) + if *ctx.CliArguments.Mode == cliparser.MfaMode { + err := context.UpdateSessionToken(ctx.Config.DefaultProfile, ctx.Config.DefaultRegion, ctx.Config.DefaultDurationForMFA, &ctx) if err == nil { os.Exit(0) } else { - context.Logger.Error(err.Error()) + ctx.Logger.Error(err.Error()) os.Exit(1) } } - if *context.CliArguments.Mode == cliparser.OfflineValidateMode { - valid := offlinevalidator.Validate(&context) - if valid { - os.Exit(0) + if *ctx.CliArguments.Mode == cliparser.CreateChangeSetMode { + ctx.InitializeAwsAPI() + if *ctx.CliArguments.SkipValidation || (validator.Validate(&ctx) && validators.UserDecideGeneralRule(&ctx)) { + err := stack.NewChangeSet(&ctx) + if err != nil { + ctx.Logger.Error(err.Error()) + } } else { - os.Exit(1) + ctx.Logger.Info(validationUnsuccessfullMsg) } } - if *context.CliArguments.Mode == cliparser.ConfigureMode { - configurator.FileName(&context) + if *ctx.CliArguments.Mode == cliparser.DeleteChangeSetMode { + ctx.InitializeAwsAPI() + utilities.CheckErrorCodeAndExit(stack.DeleteChangeSet(&ctx)) + } + + if *ctx.CliArguments.Mode == cliparser.UpdateStackMode { + ctx.InitializeAwsAPI() + if *ctx.CliArguments.SkipValidation || (validator.Validate(&ctx) && validators.UserDecideGeneralRule(&ctx)) { + utilities.CheckErrorCodeAndExit(stack.UpdateStack(&ctx)) + } else { + ctx.Logger.Info(validationUnsuccessfullMsg) + } + + } + + if *ctx.CliArguments.Mode == cliparser.SetupSinkMode { + progress.ConfigureRemoteSink(&ctx) os.Exit(0) } - if *context.CliArguments.Mode == cliparser.CreateStackMode { - stack.NewStack(&context) + if *ctx.CliArguments.Mode == cliparser.DestroySinkMode { + progress.DestroyRemoteSink(&ctx) os.Exit(0) } - if *context.CliArguments.Mode == cliparser.DestroyStackMode { - stack.DestroyStack(&context) + if *ctx.CliArguments.Mode == cliparser.CreateParametersMode { + parameters.ConfigureParameters(&ctx) os.Exit(0) } + + if *ctx.CliArguments.Mode == cliparser.SetStackPolicyMode { + ctx.InitializeAwsAPI() + if *ctx.CliArguments.DisableStackTermination || *ctx.CliArguments.EnableStackTermination { + utilities.CheckErrorCodeAndExit(stack.SetTerminationProtection(&ctx)) + } else { + utilities.CheckErrorCodeAndExit(stack.ApplyStackPolicy(&ctx)) + } + } + + if *ctx.CliArguments.Mode == cliparser.EstimateCostMode { + ctx.InitializeAwsAPI() + if *ctx.CliArguments.SkipValidation || validator.Validate(&ctx) { + estimatecost.EstimateCosts(&ctx) + } else { + ctx.Logger.Info(validationUnsuccessfullMsg) + } + } } diff --git a/mysession/mysession.go b/mysession/mysession.go deleted file mode 100644 index 3032763..0000000 --- a/mysession/mysession.go +++ /dev/null @@ -1,122 +0,0 @@ -package mysession - -import ( - "errors" - "github.com/Appliscale/perun/context" - "github.com/Appliscale/perun/utilities" - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/aws/session" - "github.com/aws/aws-sdk-go/service/sts" - "github.com/go-ini/ini" - "os/user" - "time" -) - -const dateFormat = "2006-01-02 15:04:05 MST" - -func CreateSession(context *context.Context, profile string, region *string) (*session.Session, error) { - context.Logger.Info("Profile: " + profile) - context.Logger.Info("Region: " + *region) - - session, sessionWithOptionError := session.NewSessionWithOptions( - session.Options{ - Config: aws.Config{ - Region: region, - }, - Profile: profile, - }) - - if sessionWithOptionError != nil { - return nil, sessionWithOptionError - } - - return session, nil -} - -func UpdateSessionToken(profile string, region string, defaultDuration int64, context *context.Context) error { - user, userError := user.Current() - if userError != nil { - return userError - } - - credentialsFilePath := user.HomeDir + "/.aws/credentials" - configuration, loadCredentialsError := ini.Load(credentialsFilePath) - if loadCredentialsError != nil { - return loadCredentialsError - } - - section, sectionError := configuration.GetSection(profile) - if sectionError != nil { - section, sectionError = configuration.NewSection(profile) - if sectionError != nil { - return sectionError - } - } - - profileLongTerm := profile + "-long-term" - sectionLongTerm, profileLongTermError := configuration.GetSection(profileLongTerm) - if profileLongTermError != nil { - return profileLongTermError - } - - sessionToken := section.Key("aws_session_token") - expiration := section.Key("expiration") - - expirationDate, dataError := time.Parse(dateFormat, section.Key("expiration").Value()) - if dataError == nil { - context.Logger.Info("Session token will expire in " + utilities.TruncateDuration(time.Since(expirationDate)).String() + " (" + expirationDate.Format(dateFormat) + ")") - } - - mfaDevice := sectionLongTerm.Key("mfa_serial").Value() - if mfaDevice == "" { - return errors.New("There is no mfa_serial for the profile " + profileLongTerm) - } - - if sessionToken.Value() == "" || expiration.Value() == "" || time.Since(expirationDate).Nanoseconds() > 0 { - session, sessionError := session.NewSessionWithOptions( - session.Options{ - Config: aws.Config{ - Region: ®ion, - }, - Profile: profileLongTerm, - }) - if sessionError != nil { - return sessionError - } - - var tokenCode string - sessionError = context.Logger.GetInput("MFA token code", &tokenCode) - if sessionError != nil { - return sessionError - } - - var duration int64 - if defaultDuration == 0 { - sessionError = context.Logger.GetInput("Duration", &duration) - if sessionError != nil { - return sessionError - } - } else { - duration = defaultDuration - } - - stsSession := sts.New(session) - newToken, tokenError := stsSession.GetSessionToken(&sts.GetSessionTokenInput{ - DurationSeconds: &duration, - SerialNumber: aws.String(mfaDevice), - TokenCode: &tokenCode, - }) - if tokenError != nil { - return tokenError - } - - section.Key("aws_access_key_id").SetValue(*newToken.Credentials.AccessKeyId) - section.Key("aws_secret_access_key").SetValue(*newToken.Credentials.SecretAccessKey) - sessionToken.SetValue(*newToken.Credentials.SessionToken) - section.Key("expiration").SetValue(newToken.Credentials.Expiration.Format(dateFormat)) - - configuration.SaveTo(credentialsFilePath) - } - - return nil -} diff --git a/myuser/myuser.go b/myuser/myuser.go new file mode 100644 index 0000000..28eee8f --- /dev/null +++ b/myuser/myuser.go @@ -0,0 +1,33 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package myuser provides function which ease work with user information e.g path to home directory. +package myuser + +import ( + "os/user" +) + +// GetUserHomeDir gets path to user's home directory. It's used when perun checks if configuration files exists. +func GetUserHomeDir() (string, error) { + user, userError := user.Current() + if userError != nil { + return "", userError + } + path := user.HomeDir + + return path, nil +} diff --git a/offlinevalidator/.DS_Store b/offlinevalidator/.DS_Store deleted file mode 100644 index db28c9d..0000000 Binary files a/offlinevalidator/.DS_Store and /dev/null differ diff --git a/onlinevalidator/onlinevalidator.go b/onlinevalidator/onlinevalidator.go deleted file mode 100644 index bed855f..0000000 --- a/onlinevalidator/onlinevalidator.go +++ /dev/null @@ -1,101 +0,0 @@ -// Copyright 2017 Appliscale -// -// Maintainers and contributors are listed in README file inside repository. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package onlinevalidator provides tools for online CloudFormation template -// validation using AWS API. -package onlinevalidator - -import ( - "github.com/Appliscale/perun/context" - "github.com/Appliscale/perun/logger" - "github.com/Appliscale/perun/mysession" - "github.com/aws/aws-sdk-go/aws/session" - "github.com/aws/aws-sdk-go/service/cloudformation" - "io/ioutil" -) - -// Validate template and get URL for cost estimation. -func ValidateAndEstimateCosts(context *context.Context) bool { - valid := false - defer printResult(&valid, context.Logger) - - if context.Config.DefaultDecisionForMFA { - err := mysession.UpdateSessionToken(context.Config.DefaultProfile, context.Config.DefaultRegion, context.Config.DefaultDurationForMFA, context) - if err != nil { - context.Logger.Error(err.Error()) - return false - } - } - - session, err := mysession.CreateSession(context, context.Config.DefaultProfile, &context.Config.DefaultRegion) - if err != nil { - context.Logger.Error(err.Error()) - return false - } - - rawTemplate, err := ioutil.ReadFile(*context.CliArguments.TemplatePath) - if err != nil { - context.Logger.Error(err.Error()) - return false - } - - template := string(rawTemplate) - valid, err = isTemplateValid(session, &template) - if err != nil { - context.Logger.Error(err.Error()) - return false - } - - estimateCosts(session, &template, context.Logger) - - return valid -} - -func isTemplateValid(session *session.Session, template *string) (bool, error) { - api := cloudformation.New(session) - templateStruct := cloudformation.ValidateTemplateInput{ - TemplateBody: template, - } - _, error := api.ValidateTemplate(&templateStruct) - if error != nil { - return false, error - } - - return true, nil -} - -func estimateCosts(session *session.Session, template *string, logger *logger.Logger) { - api := cloudformation.New(session) - templateCostInput := cloudformation.EstimateTemplateCostInput{ - TemplateBody: template, - } - output, err := api.EstimateTemplateCost(&templateCostInput) - - if err != nil { - logger.Error(err.Error()) - return - } - - logger.Info("Costs estimation: " + *output.Url) -} - -func printResult(valid *bool, logger *logger.Logger) { - if !*valid { - logger.Error("Template is invalid!") - } else { - logger.Info("Template is valid!") - } -} diff --git a/parameters/parameters.go b/parameters/parameters.go new file mode 100644 index 0000000..359fc79 --- /dev/null +++ b/parameters/parameters.go @@ -0,0 +1,210 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package parameters provides tools for interactive creation of parameters file for aws +// cloud formation. +package parameters + +import ( + "encoding/json" + "errors" + "fmt" + "io/ioutil" + "os" + "regexp" + "strings" + + "github.com/Appliscale/perun/context" + "github.com/Appliscale/perun/helpers" + "github.com/Appliscale/perun/validator/template" + cloudformation2 "github.com/aws/aws-sdk-go/service/cloudformation" + "github.com/awslabs/goformation/cloudformation" +) + +// Parameter contains Key and Value. It describes how looks each parameter. +type Parameter struct { + ParameterKey string + ParameterValue string +} + +// GetJSONParameters gets parameters from context.CliArguments.TemplatePath. +func GetJSONParameters(context *context.Context) (resultString []byte, err error) { + var parameters []*Parameter + parameters, err = GetParameters(context) + if err != nil { + context.Logger.Error(err.Error()) + return + } + + if *context.CliArguments.PrettyPrint { + resultString, err = helpers.PrettyPrintJSON(parameters) + } else { + resultString, err = json.Marshal(parameters) + } + return +} + +// ConfigureParameters allows to create file with parameters. +func ConfigureParameters(context *context.Context) error { + resultString, err := GetJSONParameters(context) + if err != nil { + return err + } + if *context.CliArguments.OutputFilePath != "" { + context.Logger.Info("Writing parameters configuration to file: " + *context.CliArguments.OutputFilePath) + + _, err = os.Stat(*context.CliArguments.OutputFilePath) + if err == nil { + context.Logger.Warning("File " + *context.CliArguments.OutputFilePath + " would be overriten by this action. Do you want to continue? [Y/N]") + var answer string + for answer != "n" && answer != "y" { + fmt.Scanf("%s", &answer) + answer = strings.ToLower(answer) + } + if answer == "n" { + context.Logger.Info("Aborting..") + return errors.New("user aborted") + } + } + err = ioutil.WriteFile(*context.CliArguments.OutputFilePath, resultString, 0666) + if err != nil { + context.Logger.Error(err.Error()) + } + } else { + println(string(resultString)) + } + return nil +} + +// GetAwsParameters gets parameters from context and parses to AWS parameters. +func GetAwsParameters(context *context.Context) (parameters []*cloudformation2.Parameter, err error) { + var params []*Parameter + params, err = GetParameters(context) + if err != nil { + return + } + parameters = ParseParameterToAwsCompatible(params) + return +} + +// ParseParameterToAwsCompatible converts parameters from file to compatible with AWS. +func ParseParameterToAwsCompatible(params []*Parameter) (parameters []*cloudformation2.Parameter) { + for paramnum := range params { + parameters = append(parameters, + &cloudformation2.Parameter{ + ParameterValue: ¶ms[paramnum].ParameterValue, + ParameterKey: ¶ms[paramnum].ParameterKey}) + } + return +} + +// Get the parameters - if parameters file provided - from file, else - interactively from user. +func ResolveParameters(context *context.Context) (params []*cloudformation2.Parameter, err error) { + if *context.CliArguments.ParametersFile == "" { + params, err = GetAwsParameters(context) + } else { + var parametersData []byte + var readParameters []*Parameter + parametersData, err = ioutil.ReadFile(*context.CliArguments.ParametersFile) + if err != nil { + return + } + err = json.Unmarshal(parametersData, &readParameters) + if err != nil { + return + } + params = ParseParameterToAwsCompatible(readParameters) + } + return +} + +// GetParameters gets parameters from file, checks correctness and adds to Parameters. +func GetParameters(context *context.Context) (parameters []*Parameter, err error) { + templateFile, err := parseTemplate(context) + if err != nil { + context.Logger.Error(err.Error()) + return nil, err + } + for parameterName, parameterSpec := range templateFile.Parameters { + var parameterValid bool + var parameterValue string + if context.CliArguments.Parameters != nil { + var exists bool + parameterValue, exists = (*context.CliArguments.Parameters)[parameterName] + if exists { + parameterValid, err = checkParameterValid(parameterName, parameterSpec.(map[string]interface{}), parameterValue, context) + } + } else { + parameterValid = false + } + for !parameterValid { + print(parameterName, ": ") + fmt.Scanf("%s", ¶meterValue) + parameterValid, err = checkParameterValid(parameterName, parameterSpec.(map[string]interface{}), parameterValue, context) + if err != nil { + context.Logger.Error(err.Error()) + return + } + } + parameters = append(parameters, &Parameter{ParameterKey: parameterName, ParameterValue: parameterValue}) + } + return +} + +func checkParameterValid(parameterName string, parameterArgument map[string]interface{}, parameterValue string, context *context.Context) (bool, error) { + if parameterArgument["AllowedValues"] != nil { + allowedValues := getAllowedValues(parameterArgument) + if !helpers.SliceContains(allowedValues, parameterValue) { + context.Logger.Error("Value '" + parameterValue + "' is not allowed for Parameter " + parameterName + ". Value must be one of following: [" + strings.Join(allowedValues, ", ") + "]") + return false, nil + } + } + + if parameterArgument["AllowedPattern"] != nil { + allowedPattern := parameterArgument["AllowedPattern"].(string) + matches, err := regexp.Match(fmt.Sprintf("^%s$", allowedPattern), []byte(parameterValue)) + if err != nil { + return false, err + } + if !matches { + context.Logger.Error("Value '" + parameterValue + "' does not match the required pattern: " + allowedPattern + " for Parameter " + parameterName) + return false, nil + } + } + return true, nil +} + +func getAllowedValues(parameterArgument map[string]interface{}) (res []string) { + list := parameterArgument["AllowedValues"].([]interface{}) + for _, val := range list { + res = append(res, val.(string)) + } + return +} + +func parseTemplate(context *context.Context) (res cloudformation.Template, err error) { + rawTemplate, err := ioutil.ReadFile(*context.CliArguments.TemplatePath) + if err != nil { + return + } + myTemplate := template.Template{} + parser, err := helpers.GetParser(*context.CliArguments.TemplatePath) + if err != nil { + return + } + res, err = parser(rawTemplate, myTemplate, context.Logger) + return +} diff --git a/perun_logo.png b/perun_logo.png new file mode 100644 index 0000000..40beec5 Binary files /dev/null and b/perun_logo.png differ diff --git a/progress/parsewriter.go b/progress/parsewriter.go new file mode 100644 index 0000000..f1b830d --- /dev/null +++ b/progress/parsewriter.go @@ -0,0 +1,120 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package progress + +import ( + "fmt" + "github.com/fatih/color" + "strings" +) + +const createCompleteStatus = "CREATE_COMPLETE" +const createInProgressStatus = "CREATE_IN_PROGRESS" +const createFailedStatus = "CREATE_FAILED" +const deleteCompleteStatus = "DELETE_COMPLETE" +const deleteFailedStatus = "DELETE_FAILED" +const deleteInProgressStatus = "DELETE_IN_PROGRESS" +const reviewInProgressStatus = "REVIEW_IN_PROGRESS" +const rollbackCompleteStatus = "ROLLBACK_COMPLETE" +const rollbackFailedStatus = "ROLLBACK_FAILED" +const rollbackInProgressStatus = "ROLLBACK_IN_PROGRESS" +const updateCompleteStatus = "UPDATE_COMPLETE" +const updateCompleteCleanupInProgressStatus = "UPDATE_COMPLETE_CLEANUP_IN_PROGRESS" +const updateInProgressStatus = "UPDATE_IN_PROGRESS" +const updateRollbackCompleteStatus = "UPDATE_ROLLBACK_COMPLETE" +const updateRollbackCompleteCleanupInProgressStatus = "UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS" +const updateRollbackFailedStatus = "UPDATE_ROLLBACK_FAILED" +const updateRollbackInProgressStatus = "UPDATE_ROLLBACK_IN_PROGRESS" + +const add = "Add" +const remove = "Remove" +const modify = "Modify" + +// ParseWriter structure describes functions to stain text. +type ParseWriter struct { + linesPrinted int + bgRed func(a ...interface{}) string + fgRed func(a ...interface{}) string + fgOrange func(a ...interface{}) string + bgOrange func(a ...interface{}) string + grey func(a ...interface{}) string + bgGreen func(a ...interface{}) string + fgGreen func(a ...interface{}) string + cyan func(a ...interface{}) string + statusColorMap map[string]func(a ...interface{}) string +} + +// NewParseWriter is used to show progress in colorful table. +func NewParseWriter() (pw *ParseWriter) { + pw = &ParseWriter{} + pw.linesPrinted = 0 + pw.bgRed = color.New(color.BgHiRed).SprintFunc() + pw.fgRed = color.New(color.FgRed).SprintFunc() + pw.fgOrange = color.New(color.FgHiYellow).SprintFunc() + pw.bgOrange = color.New(color.BgHiYellow).SprintFunc() + pw.grey = color.New(color.FgHiWhite).SprintFunc() + pw.bgGreen = color.New(color.BgGreen).SprintFunc() + pw.fgGreen = color.New(color.FgHiGreen).SprintFunc() + pw.cyan = color.New(color.FgCyan).SprintFunc() + + pw.statusColorMap = map[string]func(a ...interface{}) string{ + createFailedStatus: pw.bgRed, + rollbackFailedStatus: pw.bgRed, + rollbackCompleteStatus: pw.fgRed, + updateRollbackCompleteStatus: pw.fgRed, + updateRollbackInProgressStatus: pw.fgRed, + rollbackInProgressStatus: pw.fgRed, + deleteFailedStatus: pw.bgRed, + updateRollbackFailedStatus: pw.bgRed, + deleteCompleteStatus: pw.grey, + createInProgressStatus: pw.fgOrange, + updateRollbackCompleteCleanupInProgressStatus: pw.bgOrange, + deleteInProgressStatus: pw.fgOrange, + updateCompleteCleanupInProgressStatus: pw.fgOrange, + updateInProgressStatus: pw.fgOrange, + createCompleteStatus: pw.bgGreen, + updateCompleteStatus: pw.bgGreen, + reviewInProgressStatus: pw.cyan, + add: pw.bgGreen, + remove: pw.bgRed, + modify: pw.fgOrange, + } + return +} + +func (pw *ParseWriter) Write(p []byte) (n int, err error) { + var newString = pw.colorStatuses(string(p)) + fmt.Print(newString) + pw.linesPrinted += strings.Count(newString, "\n") - strings.Count(newString, "\033[A") + return len(p), nil +} +func (pw *ParseWriter) colorStatuses(s string) string { + for status, colorizeFun := range pw.statusColorMap { + if strings.Contains(s, status) { + s = strings.Replace(s, status, colorizeFun(status), -1) + } + } + return s +} + +func (pw *ParseWriter) returnWritten() { + for i := 0; i < pw.linesPrinted; i++ { + fmt.Print("\033[A") + } + pw.linesPrinted = 0 + return +} diff --git a/progress/progress.go b/progress/progress.go new file mode 100644 index 0000000..1f30cdd --- /dev/null +++ b/progress/progress.go @@ -0,0 +1,428 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package progress provides displaying of progress e.g during stack creation. +package progress + +import ( + "encoding/json" + "errors" + "os/user" + "strings" + "time" + + "github.com/Appliscale/perun/context" + "github.com/aws/aws-sdk-go/service/sns" + "github.com/aws/aws-sdk-go/service/sqs" + "github.com/olekukonko/tablewriter" +) + +// Connection contains elements need to get connection. +type Connection struct { + context *context.Context + + SqsClient *sqs.SQS + sqsQueueOutput *sqs.CreateQueueOutput + sqsQueueAttributes *sqs.GetQueueAttributesOutput + + snsClient *sns.SNS + TopicArn *string +} + +var sinkName = "perun-sink-" + +const awsTimestampLayout = "2006-01-02T15:04:05.000Z" + +// Configure AWS Resources needed for progress monitoring. +func ConfigureRemoteSink(context *context.Context) (err error) { + conn := initRemoteConnection(context) + snsTopicExists, sqsQueueExists, err := conn.verifyRemoteSinkConfigured() + + if snsTopicExists && sqsQueueExists { + context.Logger.Info("Remote sink has already been configured") + return + } else { + shouldSNSTopicBeRemoved := snsTopicExists && !sqsQueueExists + err = conn.deleteRemainingSinkResources(shouldSNSTopicBeRemoved, false) + if err != nil { + context.Logger.Error("error deleting up remote sink: " + err.Error()) + return + } + + if !sqsQueueExists { + err = conn.setUpSQSQueue() + if err != nil { + context.Logger.Error("Error creating sqs queue: " + err.Error()) + return + } + } + + if shouldSNSTopicBeRemoved || !snsTopicExists { // SNS Topic has been removed or does not exist + err = conn.setUpSNSNotification() + if err != nil { + context.Logger.Error("Error creating sqs queue: " + err.Error()) + return + } + } + + if err == nil { + context.Logger.Info("Remote sink configuration successful") + context.Logger.Warning("It's configuration may take up to a minute, wait before using Perun with flag --progress") + } + return + } +} + +// Remove all AWS Resources created for stack monitoring. +func DestroyRemoteSink(context *context.Context) (conn Connection, err error) { + conn = initRemoteConnection(context) + snsTopicExists, sqsQueueExists, err := conn.verifyRemoteSinkConfigured() + if err != nil { + context.Logger.Error("error verifying: " + err.Error()) + } + + if !(snsTopicExists && sqsQueueExists) { + err = errors.New("remote sink has not been configured or has already been deleted") + return + } else { + err = conn.deleteRemainingSinkResources(snsTopicExists, sqsQueueExists) + if err != nil { + return + } + context.Logger.Info("Remote sink deconstruction successful.") + return + } +} + +// Get configuration of created AWS Resources. +func GetRemoteSink(context *context.Context) (conn Connection, err error) { + conn = initMessageService(context) + snsTopicExists, sqsQueueExists, err := conn.verifyRemoteSinkConfigured() + if !(snsTopicExists && sqsQueueExists) { + err = errors.New("remote sink has not been configured, run 'perun setup-remote-sink' first. If You done it already, wait for aws sink configuration") + return + } + return +} + +func initRemoteConnection(context *context.Context) Connection { + context.InitializeAwsAPI() + return initMessageService(context) + +} +func initMessageService(context *context.Context) (conn Connection) { + currentUser, userError := user.Current() + if userError != nil { + context.Logger.Error("error reading currentUser") + } + sinkName += currentUser.Username + "-" + currentUser.Uid + conn.context = context + return +} + +func (conn *Connection) verifyRemoteSinkConfigured() (snsTopicExists bool, sqsQueueExists bool, err error) { + snsTopicExists, err = conn.getSnsTopicAttributes() + if err != nil { + conn.context.Logger.Error("Error getting sns topic configuration: " + err.Error()) + } + sqsQueueExists, err = conn.getSqsQueueAttributes() + if err != nil { + conn.context.Logger.Error("Error getting sqs queue configuration: " + err.Error()) + } + return +} + +// Message - struct with elements of message. +type Message struct { + Type string + MessageId string + TopicArn string + Subject string + Message string + Timestamp string + SignatureVersion string + Signature string + SigningCertURL string + UnsubscribeURL string +} + +// Monitor queue, that delivers messages sent by cloud formation stack progress. +func (conn *Connection) MonitorStackQueue() { + waitTimeSeconds := int64(3) + receiveMessageInput := sqs.ReceiveMessageInput{ + QueueUrl: conn.sqsQueueOutput.QueueUrl, + WaitTimeSeconds: &waitTimeSeconds, + } + + pw, table := initStackTableWriter() + + tolerance, err := time.ParseDuration("1s") + if err != nil { + conn.context.Logger.Error(err.Error()) + } + startReadingMessagesTime := time.Now().Add(-tolerance) + + AnyMessagesLeft := true + for AnyMessagesLeft { + receivedMessages, err := conn.SqsClient.ReceiveMessage(&receiveMessageInput) + if err != nil { + conn.context.Logger.Error("Error reading messages: " + err.Error()) + } + for e := range receivedMessages.Messages { + v := Message{} + jsonBlob := []byte(*receivedMessages.Messages[e].Body) + err = json.Unmarshal(jsonBlob, &v) + if err != nil { + conn.context.Logger.Error("error reading json message" + err.Error()) + } + + // DELETE READ MESSAGE (to prevent reading the same message multiple times) + conn.SqsClient.DeleteMessage(&sqs.DeleteMessageInput{ + QueueUrl: conn.sqsQueueOutput.QueueUrl, + ReceiptHandle: receivedMessages.Messages[e].ReceiptHandle, + }) + + // Parse property message + splittedMessage := strings.FieldsFunc(v.Message, func(r rune) bool { return r == '\n' }) + messageMap := map[string]string{} + for messageNum := range splittedMessage { + messages := strings.FieldsFunc(splittedMessage[messageNum], func(r rune) bool { return r == '=' }) + messageMap[messages[0]] = messages[1] + } + // Parse timestamp of message + messageArrivedTime, err := time.Parse(awsTimestampLayout, v.Timestamp) + if err != nil { + conn.context.Logger.Error(err.Error()) + } + + if startReadingMessagesTime.Before(messageArrivedTime) { + table.Append([]string{v.Timestamp, messageMap["ResourceStatus"], messageMap["ResourceType"], messageMap["LogicalResourceId"], messageMap["ResourceStatusReason"]}) + pw.returnWritten() + table.Render() + // Check if the message has been the last one (status COMPLETE for current stack resource) + if strings.Contains(messageMap["LogicalResourceId"], *conn.context.CliArguments.Stack) && + strings.Contains(messageMap["ResourceStatus"], "COMPLETE") { + AnyMessagesLeft = false + } + } + } + } +} +func initStackTableWriter() (*ParseWriter, *tablewriter.Table) { + pw := NewParseWriter() + table := tablewriter.NewWriter(pw) + table.SetHeader([]string{"Time", "Status", "Type", "LogicalID", "Status Reason"}) + table.SetBorder(false) + // Set Border to false + table.SetColumnColor(tablewriter.Colors{tablewriter.FgWhiteColor}, + tablewriter.Colors{tablewriter.Bold}, + tablewriter.Colors{tablewriter.FgWhiteColor}, + tablewriter.Colors{tablewriter.Bold, tablewriter.FgCyanColor}, + tablewriter.Colors{tablewriter.FgWhiteColor}) + return pw, table +} + +func (conn *Connection) setUpSNSNotification() (err error) { + //CREATE SNS TOPIC + conn.snsClient = sns.New(conn.context.CurrentSession) + topicInput := sns.CreateTopicInput{ + Name: &sinkName, + } + topicOutput, _ := conn.snsClient.CreateTopic(&topicInput) + conn.TopicArn = topicOutput.TopicArn + + //SET UP POLICY + err = conn.setUpSqsPolicy() + + protocolSQS := "sqs" + subscribeInput := sns.SubscribeInput{ + Endpoint: conn.sqsQueueAttributes.Attributes[sqs.QueueAttributeNameQueueArn], + Protocol: &protocolSQS, + TopicArn: conn.TopicArn, + } + conn.snsClient.Subscribe(&subscribeInput) + + conn.context.Logger.Info("Set up SNS Notification topic: " + sinkName) + return +} +func (conn *Connection) setUpSQSQueue() (err error) { + conn.SqsClient = sqs.New(conn.context.CurrentSession) + + sixtySec := "60" + sqsInput := sqs.CreateQueueInput{ + QueueName: &sinkName, + Attributes: map[string]*string{ + "MessageRetentionPeriod": &sixtySec, + }, + } + conn.sqsQueueOutput, err = conn.SqsClient.CreateQueue(&sqsInput) + if err != nil { + return + } + + arnAttribute := sqs.QueueAttributeNameAll + queueAttributesInput := sqs.GetQueueAttributesInput{ + AttributeNames: []*string{&arnAttribute}, + QueueUrl: conn.sqsQueueOutput.QueueUrl, + } + conn.sqsQueueAttributes, err = conn.SqsClient.GetQueueAttributes(&queueAttributesInput) + if err != nil { + return + } + conn.context.Logger.Info("Set up SQS Notification Queue: " + sinkName) + return +} + +func (conn *Connection) setUpSqsPolicy() (err error) { + jsonStringPolicy, err := conn.createJsonPolicy() + if err != nil { + conn.context.Logger.Error("error creating json: " + err.Error()) + } + + queueAttributes := sqs.SetQueueAttributesInput{ + QueueUrl: conn.sqsQueueOutput.QueueUrl, + Attributes: map[string]*string{ + sqs.QueueAttributeNamePolicy: &jsonStringPolicy, + }, + } + conn.SqsClient.SetQueueAttributes(&queueAttributes) + + conn.context.Logger.Info("Created SQS access policy for SNS Topic: " + sinkName) + return +} + +type PolicyDocument struct { + Version string + Statement []StatementEntry +} +type StatementEntry struct { + Sid string + Effect string + Action []string + Resource string + Condition Condition + Principal string +} +type Condition struct { + StringEquals map[string]string +} + +func (conn *Connection) createJsonPolicy() (jsonStringPolicy string, err error) { + policy := PolicyDocument{ + Version: "2012-10-17", + Statement: []StatementEntry{ + { + Effect: "Allow", + Action: []string{ + "SQS:*", + }, + Resource: *conn.sqsQueueAttributes.Attributes[sqs.QueueAttributeNameQueueArn], + Condition: Condition{ + StringEquals: map[string]string{"aws:SourceArn": *conn.TopicArn}, + }, + Principal: "*", + }, + }, + } + + jsonPolicy, err := json.Marshal(policy) + jsonStringPolicy = string(jsonPolicy) + return +} +func (conn *Connection) getSnsTopicAttributes() (topicExists bool, err error) { + conn.snsClient = sns.New(conn.context.CurrentSession) + + topicExists = false + listTopicsInput := sns.ListTopicsInput{} + err = conn.snsClient.ListTopicsPages(&listTopicsInput, + func(output *sns.ListTopicsOutput, lastPage bool) bool { + for topicNum := range output.Topics { + if strings.Contains(*output.Topics[topicNum].TopicArn, sinkName) { + topicExists = true + conn.TopicArn = output.Topics[topicNum].TopicArn + return false + } + } + return true + }) + return +} +func (conn *Connection) getSqsQueueAttributes() (queueExists bool, err error) { + conn.SqsClient = sqs.New(conn.context.CurrentSession) + + queueExists = false + listQueuesInput := sqs.ListQueuesInput{ + QueueNamePrefix: &sinkName, + } + + listQueuesOutput, err := conn.SqsClient.ListQueues(&listQueuesInput) + + for queueNum := range listQueuesOutput.QueueUrls { + if strings.Contains(*listQueuesOutput.QueueUrls[queueNum], sinkName) { + queueExists = true + conn.sqsQueueOutput = &sqs.CreateQueueOutput{ + QueueUrl: listQueuesOutput.QueueUrls[queueNum], + } + + arnAttribute := sqs.QueueAttributeNameQueueArn + queueAttributesInput := sqs.GetQueueAttributesInput{ + AttributeNames: []*string{&arnAttribute}, + QueueUrl: conn.sqsQueueOutput.QueueUrl, + } + conn.sqsQueueAttributes, err = conn.SqsClient.GetQueueAttributes(&queueAttributesInput) + if err != nil { + return + } + return + } + } + return +} + +func (conn *Connection) deleteSnsTopic() (err error) { + deleteTopicInput := sns.DeleteTopicInput{ + TopicArn: conn.TopicArn, + } + _, err = conn.snsClient.DeleteTopic(&deleteTopicInput) + return +} +func (conn *Connection) deleteSqsQueue() (err error) { + deleteQueueInput := sqs.DeleteQueueInput{ + QueueUrl: conn.sqsQueueOutput.QueueUrl, + } + _, err = conn.SqsClient.DeleteQueue(&deleteQueueInput) + return +} + +func (conn *Connection) deleteRemainingSinkResources(deleteSnsTopic bool, deleteSqsQueue bool) (err error) { + if deleteSnsTopic { + err = conn.deleteSnsTopic() + if err != nil { + conn.context.Logger.Error("Error deleting sns Topic: " + err.Error()) + return + } + conn.context.Logger.Info("Deleting SNS Topic") + } + if deleteSqsQueue { + err = conn.deleteSqsQueue() + if err != nil { + conn.context.Logger.Error("Error deleting sqs Queue: " + err.Error()) + return + } + conn.context.Logger.Info("Deleting SQS Queue") + } + return +} diff --git a/release.sh b/release.sh new file mode 100644 index 0000000..50291cf --- /dev/null +++ b/release.sh @@ -0,0 +1,61 @@ +#!/bin/sh + +user="Travis CI" +email="travis@travis-ci.org" +perun="perun-linux-amd64" +github="https://$2@github.com/Appliscale" +release="https://github.com/Appliscale/perun/releases/download/$1/$perun.tar.gz" +files="https://raw.githubusercontent.com/Appliscale/perun/master" + +cd ~ +sudo apt-get install rpm +git clone https://github.com/Appliscale/rpmbuild.git +cd rpmbuild/SOURCES +rm $perun.tar.gz +wget $release +wget $files/LICENSE +tar xvzf $perun.tar.gz +rm $perun.tar.gz +tar cvzf $perun.tar.gz $perun LICENSE +rm LICENSE $perun +cd .. +rpmbuild -ba SPECS/$perun.spec +git remote +git config user.email $email +git config user.name $user +git add . +git commit -m "[AUTO] Update RPM by Travis CI. Perun $1" +git push $github/rpmbuild.git master + +cd ~ +git clone https://github.com/Appliscale/perun-dpkg.git +cd perun-dpkg +chmod +x control.sh +./control.sh $1 +cd /perun/usr/local/bin +rm $perun +wget $release +tar xvzf $perun.tar.gz +rm $perun.tar.gz +cd ~/perun-dpkg +dpkg-deb --build perun +git remote +git config user.email $email +git config user.name $user +git add . +git commit -m "[AUTO] Update DPKG by Travis CI. Perun $1" +git push $github/perun-dpkg.git master + +cd ~ +git clone https://github.com/Appliscale/homebrew-tap.git +cd homebrew-tap +wget https://github.com/Appliscale/perun/blob/master/formula.sh +chmod +x formula.sh +./formula.sh $1 +rm formula.sh +git remote +git config user.email $email +git config user.name $user +git add . +git commit -m "[AUTO] Update Homebrew by Travis CI. Perun $1" +git push $github/homebrew-tap.git master diff --git a/sampleTemplate.yaml b/sampleTemplate.yaml new file mode 100644 index 0000000..903bb81 --- /dev/null +++ b/sampleTemplate.yaml @@ -0,0 +1,14 @@ +AWSTemplateFormatVersion: "2010-09-09" + +Resources: + MyCompanyTestS3Bucket: + Type: AWS::S3::Bucket + Properties: + BucketName: peruntestbucketname + + TestEC2Instance: + Type: AWS::EC2::Instance + Properties: + ImageId: "ami-0ff8a91507f77f867" + InstanceType: t2.micro + KeyName: "key-name" diff --git a/specification/specification.go b/specification/specification.go index da8fb95..6a358a4 100644 --- a/specification/specification.go +++ b/specification/specification.go @@ -1,4 +1,4 @@ -// Copyright 2017 Appliscale +// Copyright 2018 Appliscale // // Maintainers and contributors are listed in README file inside repository. // @@ -30,17 +30,20 @@ import ( "github.com/Appliscale/perun/context" ) +// Specification contains information about specification - type and version. type Specification struct { PropertyTypes map[string]PropertyType ResourceSpecificationVersion string ResourceTypes map[string]Resource } +// PropertyType contains Documentation and map of Properties. type PropertyType struct { Documentation string Properties map[string]Property } +// Property describes what should be in structure. type Property struct { Documentation string DuplicatesAllowed bool @@ -52,12 +55,14 @@ type Property struct { UpdateType string } +// Resource in specification contains documentation, map of attributes and properties. type Resource struct { Documentation string Attributes map[string]Attribute Properties map[string]Property } +// Attribute of item. type Attribute struct { ItemType string PrimitiveItemType string @@ -65,7 +70,7 @@ type Attribute struct { Type string } -// IsSubproperty : Checks if it is subproperty +// IsSubproperty : Checks if it is subproperty. func (property *Property) IsSubproperty() bool { if property.Type != "List" && property.Type != "Map" && len(property.Type) > 0 { return true @@ -83,7 +88,7 @@ func GetSpecification(context *context.Context) (specification Specification, er return GetSpecificationFromFile(filePath) } -// Get specification from file. +// Get specification from file. It's used in GetSpecification(). func GetSpecificationFromFile(specificationFilePath string) (specification Specification, err error) { specificationFile, err := ioutil.ReadFile(specificationFilePath) if err != nil { diff --git a/specification/specification_test.go b/specification/specification_test.go index f461fd4..b6140a4 100644 --- a/specification/specification_test.go +++ b/specification/specification_test.go @@ -1,4 +1,4 @@ -// Copyright 2017 Appliscale +// Copyright 2018 Appliscale // // Maintainers and contributors are listed in README file inside repository. // diff --git a/stack/applyingstackpolicy.go b/stack/applyingstackpolicy.go new file mode 100644 index 0000000..2301275 --- /dev/null +++ b/stack/applyingstackpolicy.go @@ -0,0 +1,51 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package stack provides methods to manage AWS CloudFormation stacks. +package stack + +import ( + "github.com/Appliscale/perun/context" + "github.com/aws/aws-sdk-go/service/cloudformation" +) + +// ApplyStackPolicy creates StackPolicy from JSON file. +func ApplyStackPolicy(context *context.Context) error { + template, stackName, incorrectPath := getTemplateFromFile(context) + if incorrectPath != nil { + context.Logger.Error(incorrectPath.Error()) + return incorrectPath + } + templateStruct := createStackPolicyInput(&template, &stackName) + + _, creationError := context.CloudFormation.SetStackPolicy(&templateStruct) + if creationError != nil { + context.Logger.Error("Error creating stack policy: " + creationError.Error()) + return creationError + } + + context.Logger.Info("Stack Policy Change request successful") + return nil +} + +// This function gets template and name of stack. It creates "CreateStackInput" structure. +func createStackPolicyInput(template *string, stackName *string) cloudformation.SetStackPolicyInput { + templateStruct := cloudformation.SetStackPolicyInput{ + StackPolicyBody: template, + StackName: stackName, + } + return templateStruct +} diff --git a/stack/applyingtackpolicy_test.go b/stack/applyingtackpolicy_test.go new file mode 100644 index 0000000..f78aeec --- /dev/null +++ b/stack/applyingtackpolicy_test.go @@ -0,0 +1,50 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package stack + +import ( + "github.com/Appliscale/perun/stack/stack_mocks" + "github.com/golang/mock/gomock" + "github.com/stretchr/testify/assert" + "testing" +) + +func TestApplyStackPolicy(t *testing.T) { + stackName := "StackName" + policyPath := "./test_resources/test_stackpolicy.json" + ctx := stack_mocks.SetupContext(t, []string{"cmd", "set-stack-policy", stackName, policyPath}) + + mockCtrl := gomock.NewController(t) + defer mockCtrl.Finish() + mockAWSPI := stack_mocks.NewMockCloudFormationAPI(mockCtrl) + ctx.CloudFormation = mockAWSPI + + template := stack_mocks.ReadFile(t, policyPath) + + input := createStackPolicyInput(&template, &stackName) + mockAWSPI.EXPECT().SetStackPolicy(&input).Return(nil, nil).Times(1) + + ApplyStackPolicy(ctx) +} + +func TestCreateStackPolicyInput(t *testing.T) { + stackName := "StackName" + templateBody := "TestTemplate" + returnedValue := createStackPolicyInput(&templateBody, &stackName) + assert.Equal(t, *returnedValue.StackName, stackName) + assert.Equal(t, *returnedValue.StackPolicyBody, templateBody) +} diff --git a/stack/changeset.go b/stack/changeset.go new file mode 100644 index 0000000..11c54c3 --- /dev/null +++ b/stack/changeset.go @@ -0,0 +1,165 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package stack + +import ( + "fmt" + "github.com/Appliscale/perun/context" + "github.com/Appliscale/perun/parameters" + "github.com/Appliscale/perun/progress" + "github.com/aws/aws-sdk-go/service/cloudformation" + "github.com/olekukonko/tablewriter" + "strings" +) + +func createChangeSetInput(template *string, stackName *string, params []*cloudformation.Parameter, context *context.Context) (cloudformation.CreateChangeSetInput, error) { + + templateStruct := cloudformation.CreateChangeSetInput{ + Parameters: params, + ChangeSetName: context.CliArguments.ChangeSet, + TemplateBody: template, + StackName: stackName, + } + return templateStruct, nil +} + +func createDeleteChangeSetInput(ctx *context.Context) cloudformation.DeleteChangeSetInput { + return cloudformation.DeleteChangeSetInput{ + ChangeSetName: ctx.CliArguments.ChangeSet, + StackName: ctx.CliArguments.Stack, + } +} + +// DeleteChangeSet deletes change set. +func DeleteChangeSet(ctx *context.Context) (err error) { + templateStruct := createDeleteChangeSetInput(ctx) + _, err = ctx.CloudFormation.DeleteChangeSet(&templateStruct) + if err != nil { + ctx.Logger.Error(err.Error()) + return + } + ctx.Logger.Info("Deletion of Change Set " + *ctx.CliArguments.ChangeSet + " request successful") + return +} + +// NewChangeSet create change set and gets parameters. +func NewChangeSet(context *context.Context) (err error) { + template, stackName, err := getTemplateFromFile(context) + if err != nil { + return + } + + params, err := parameters.ResolveParameters(context) + if err != nil { + context.Logger.Error(err.Error()) + return + } + + templateStruct, templateError := createChangeSetInput(&template, &stackName, params, context) + if templateError != nil { + return + } + + _, err = context.CloudFormation.CreateChangeSet(&templateStruct) + + if err != nil { + context.Logger.Error(err.Error()) + return + } + + describeChangeSet(context) + + if shouldExecuteChangeSet() { + templateStruct := cloudformation.UpdateStackInput{ + Parameters: params, + TemplateBody: &template, + StackName: &stackName, + } + doUpdateStack(context, templateStruct) + } + return +} + +func shouldExecuteChangeSet() bool { + println("Do You want to execute the change set? (Y/N) ") + for true { + var executeChangeSet string + fmt.Scanf("%s", &executeChangeSet) + if strings.ToLower(executeChangeSet) == "n" { + return false + } else if strings.ToLower(executeChangeSet) == "y" { + return true + } + } + return false +} + +func describeChangeSet(context *context.Context) error { + context.Logger.Info("Waiting for change set creation...") + describeChangeSetInput := cloudformation.DescribeChangeSetInput{ + ChangeSetName: context.CliArguments.ChangeSet, + StackName: context.CliArguments.Stack, + } + + err := context.CloudFormation.WaitUntilChangeSetCreateComplete(&describeChangeSetInput) + if err != nil { + context.Logger.Error(err.Error()) + return err + } + + describeChangeSetOutput, err := context.CloudFormation.DescribeChangeSet(&describeChangeSetInput) + if err != nil { + context.Logger.Error(err.Error()) + return err + } + + _, table := initStackTableWriter() + for rowNum := range describeChangeSetOutput.Changes { + currRow := describeChangeSetOutput.Changes[rowNum] + var physicalResourceId string = "" + var replacement string = "" + if currRow.ResourceChange.PhysicalResourceId != nil { + physicalResourceId = *currRow.ResourceChange.PhysicalResourceId + } + if currRow.ResourceChange.Replacement != nil { + replacement = *currRow.ResourceChange.Replacement + } + table.Append([]string{ + *currRow.ResourceChange.Action, + *currRow.ResourceChange.LogicalResourceId, + physicalResourceId, + *currRow.ResourceChange.ResourceType, + replacement, + }) + } + table.Render() + return nil +} + +func initStackTableWriter() (*progress.ParseWriter, *tablewriter.Table) { + pw := progress.NewParseWriter() + table := tablewriter.NewWriter(pw) + table.SetHeader([]string{"Action", "Logical ID", "Physical ID", "Resource Type", "Replacement"}) + table.SetBorder(false) + table.SetColumnColor( + tablewriter.Colors{tablewriter.Bold}, + tablewriter.Colors{tablewriter.Bold, tablewriter.FgCyanColor}, + tablewriter.Colors{tablewriter.FgWhiteColor}, + tablewriter.Colors{tablewriter.FgWhiteColor}, + tablewriter.Colors{tablewriter.FgWhiteColor, tablewriter.Bold}) + return pw, table +} diff --git a/stack/changeset_test.go b/stack/changeset_test.go new file mode 100644 index 0000000..9e1ffec --- /dev/null +++ b/stack/changeset_test.go @@ -0,0 +1,150 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package stack + +import ( + "github.com/Appliscale/perun/context" + "github.com/Appliscale/perun/stack/stack_mocks" + "github.com/aws/aws-sdk-go/service/cloudformation" + "github.com/golang/mock/gomock" + "github.com/stretchr/testify/assert" + "io/ioutil" + "log" + "os" + "testing" +) + +func TestNewChangeSet(t *testing.T) { + stackName := "StackName" + templatePath := "./test_resources/test_template.yaml" + changeSetName := "ChangeSetName" + template := stack_mocks.ReadFile(t, templatePath) + ctx := stack_mocks.SetupContext(t, []string{"cmd", "create-change-set", stackName, templatePath, changeSetName}) + + mockCtrl := gomock.NewController(t) + defer mockCtrl.Finish() + mockAWSPI := stack_mocks.NewMockCloudFormationAPI(mockCtrl) + ctx.CloudFormation = mockAWSPI + + describeChangeSetInput := cloudformation.DescribeChangeSetInput{ + ChangeSetName: &changeSetName, + StackName: &stackName, + } + + mockAWSPI.EXPECT().WaitUntilChangeSetCreateComplete(&describeChangeSetInput).Return(nil).Times(2) + output := cloudformation.DescribeChangeSetOutput{ + Changes: []*cloudformation.Change{}, + } + mockAWSPI.EXPECT().DescribeChangeSet(&describeChangeSetInput).Return(&output, nil).Times(2) + changeSetInput, err := createChangeSetInput(&template, &stackName, nil, ctx) + assert.Empty(t, err) + mockAWSPI.EXPECT().CreateChangeSet(&changeSetInput).Return(nil, nil).Times(2) + updateStackInput := cloudformation.UpdateStackInput{ + TemplateBody: &template, + StackName: &stackName, + } + mockAWSPI.EXPECT().UpdateStack(&updateStackInput).Return(nil, nil).Times(1) //This shouldn't be called when user input is no + + testChangeSetCreationWithUserInput("y", NewChangeSet, ctx) + testChangeSetCreationWithUserInput("n", NewChangeSet, ctx) + +} + +func TestCreateChangeSetInput(t *testing.T) { + stackName := "StackName" + templatePath := "./test_resources/test_template.yaml" + changeSetName := "ChangeSetName" + ctx := stack_mocks.SetupContext(t, []string{"cmd", "create-change-set", stackName, templatePath, changeSetName}) + template := stack_mocks.ReadFile(t, templatePath) + + returnedInput, err := createChangeSetInput(&template, &stackName, []*cloudformation.Parameter{}, ctx) + assert.Empty(t, err) + assert.Equal(t, *returnedInput.StackName, stackName) + assert.Equal(t, *returnedInput.TemplateBody, template) + assert.Equal(t, returnedInput.Parameters, []*cloudformation.Parameter{}) + assert.Equal(t, *returnedInput.ChangeSetName, changeSetName) +} + +func TestDescribeChangeSet(t *testing.T) { + stackName := "StackName" + templatePath := "./test_resources/test_template.yaml" + changeSetName := "ChangeSetName" + ctx := stack_mocks.SetupContext(t, []string{"cmd", "create-change-set", stackName, templatePath, changeSetName}) + + mockCtrl := gomock.NewController(t) + defer mockCtrl.Finish() + mockAWSPI := stack_mocks.NewMockCloudFormationAPI(mockCtrl) + ctx.CloudFormation = mockAWSPI + + describeChangeSetInput := cloudformation.DescribeChangeSetInput{ + ChangeSetName: &changeSetName, + StackName: &stackName, + } + + mockAWSPI.EXPECT().WaitUntilChangeSetCreateComplete(&describeChangeSetInput).Return(nil).Times(1) + output := cloudformation.DescribeChangeSetOutput{ + Changes: []*cloudformation.Change{}, + } + mockAWSPI.EXPECT().DescribeChangeSet(&describeChangeSetInput).Return(&output, nil).Times(1) + describeChangeSet(ctx) + +} + +func TestShouldExecuteChangeSet(t *testing.T) { + assert.True(t, testCheckUserInput("Y", shouldExecuteChangeSet)) + assert.False(t, testCheckUserInput("N", shouldExecuteChangeSet)) + assert.True(t, testCheckUserInput("y", shouldExecuteChangeSet)) + assert.False(t, testCheckUserInput("n", shouldExecuteChangeSet)) +} + +type checkFunction func() bool +type newChangeSetFunction func(*context.Context) error + +func testCheckUserInput(userInput string, function checkFunction) bool { + tmpfile, oldStdin := supportStdInputReplacement(userInput) + defer os.Remove(tmpfile.Name()) // clean up + defer func() { os.Stdin = oldStdin }() // Restore original Stdin + defer tmpfile.Close() + + return function() +} + +func testChangeSetCreationWithUserInput(userInput string, function newChangeSetFunction, context *context.Context) error { + tmpfile, oldStdin := supportStdInputReplacement(userInput) + defer os.Remove(tmpfile.Name()) // clean up + defer func() { os.Stdin = oldStdin }() // Restore original Stdin + defer tmpfile.Close() + + return function(context) +} + +func supportStdInputReplacement(userInput string) (*os.File, *os.File) { + content := []byte(userInput) + tmpfile, err := ioutil.TempFile("", "example") + if err != nil { + log.Fatal(err) + } + if _, err := tmpfile.Write(content); err != nil { + log.Fatal(err) + } + if _, err := tmpfile.Seek(0, 0); err != nil { + log.Fatal(err) + } + oldStdin := os.Stdin + os.Stdin = tmpfile + return tmpfile, oldStdin +} diff --git a/stack/creatingstack.go b/stack/creatingstack.go new file mode 100644 index 0000000..c09186a --- /dev/null +++ b/stack/creatingstack.go @@ -0,0 +1,78 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package stack + +import ( + "github.com/Appliscale/perun/context" + "github.com/Appliscale/perun/parameters" + "github.com/Appliscale/perun/progress" + "github.com/aws/aws-sdk-go/service/cloudformation" +) + +// This function gets template and name of stack. It creates "CreateStackInput" structure. +func createStackInput(template *string, stackName *string, context *context.Context) (cloudformation.CreateStackInput, error) { + params, err := parameters.ResolveParameters(context) + if err != nil { + context.Logger.Error(err.Error()) + return cloudformation.CreateStackInput{}, err + } + + templateStruct := cloudformation.CreateStackInput{ + Parameters: params, + TemplateBody: template, + StackName: stackName, + } + return templateStruct, nil +} + +// NewStack create Stack. It's get template from context.CliArguments.TemplatePath. +func NewStack(context *context.Context) error { + template, stackName, incorrectPath := getTemplateFromFile(context) + if incorrectPath != nil { + context.Logger.Error(incorrectPath.Error()) + return incorrectPath + } + templateStruct, templateError := createStackInput(&template, &stackName, context) + if templateError != nil { + context.Logger.Error(templateError.Error()) + return templateError + } + + if *context.CliArguments.Progress { + conn, remoteSinkError := progress.GetRemoteSink(context) + if remoteSinkError != nil { + context.Logger.Error("Error getting remote sink configuration: " + remoteSinkError.Error()) + return remoteSinkError + } + templateStruct.NotificationARNs = []*string{conn.TopicArn} + _, creationError := context.CloudFormation.CreateStack(&templateStruct) + if creationError != nil { + context.Logger.Error("Error creating stack: " + creationError.Error()) + return creationError + } + conn.MonitorStackQueue() + } else { + _, creationError := context.CloudFormation.CreateStack(&templateStruct) + if creationError != nil { + context.Logger.Error("Error creating stack: " + creationError.Error()) + return creationError + } + context.Logger.Info("Stack creation request successful") + } + + return nil +} diff --git a/stack/creatingstack_test.go b/stack/creatingstack_test.go new file mode 100644 index 0000000..7511401 --- /dev/null +++ b/stack/creatingstack_test.go @@ -0,0 +1,58 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package stack + +import ( + "github.com/Appliscale/perun/stack/stack_mocks" + "github.com/golang/mock/gomock" + "github.com/stretchr/testify/assert" + "testing" +) + +func TestNewStack(t *testing.T) { + stackName := "StackName" + templatePath := "./test_resources/test_template.yaml" + ctx := stack_mocks.SetupContext(t, []string{"cmd", "create-stack", stackName, templatePath}) + + mockCtrl := gomock.NewController(t) + defer mockCtrl.Finish() + mockAWSPI := stack_mocks.NewMockCloudFormationAPI(mockCtrl) + ctx.CloudFormation = mockAWSPI + + template := stack_mocks.ReadFile(t, templatePath) + + input, err := createStackInput(&template, &stackName, ctx) + if err != nil { + t.Error(err.Error()) + } + mockAWSPI.EXPECT().CreateStack(&input).Return(nil, nil).Times(1) + + NewStack(ctx) +} + +func TestCreateStackInput(t *testing.T) { + stackName := "StackName" + templatePath := "./test_resources/test_template.yaml" + ctx := stack_mocks.SetupContext(t, []string{"cmd", "create-stack", stackName, templatePath}) + templateBody := stack_mocks.ReadFile(t, templatePath) + returnedValue, err := createStackInput(&templateBody, &stackName, ctx) + if err != nil { + t.Error(err.Error()) + } + + assert.Equal(t, *returnedValue.StackName, stackName) + assert.Equal(t, *returnedValue.TemplateBody, templateBody) +} diff --git a/stack/destroyingstack.go b/stack/destroyingstack.go new file mode 100644 index 0000000..db646d8 --- /dev/null +++ b/stack/destroyingstack.go @@ -0,0 +1,60 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package stack + +import ( + "github.com/Appliscale/perun/context" + "github.com/Appliscale/perun/progress" + "github.com/aws/aws-sdk-go/service/cloudformation" +) + +// DestroyStack bases on "DeleteStackInput" structure and destroys stack. It uses "StackName" to choose which stack will be destroy. Before that it creates session. +func DestroyStack(context *context.Context) error { + delStackInput := deleteStackInput(context) + + var err error = nil + if *context.CliArguments.Progress { + conn, err := progress.GetRemoteSink(context) + if err != nil { + context.Logger.Error("Error getting remote sink configuration: " + err.Error()) + return err + } + _, err = context.CloudFormation.DeleteStack(&delStackInput) + if err != nil { + context.Logger.Error(err.Error()) + return err + } + conn.MonitorStackQueue() + } else { + _, err = context.CloudFormation.DeleteStack(&delStackInput) + if err != nil { + context.Logger.Error(err.Error()) + return err + } + context.Logger.Info("Stack deletion request successful") + } + return nil +} + +// This function gets "StackName" from Stack in CliArguments and creates "DeleteStackInput" structure. +func deleteStackInput(context *context.Context) cloudformation.DeleteStackInput { + name := *context.CliArguments.Stack + templateStruct := cloudformation.DeleteStackInput{ + StackName: &name, + } + return templateStruct +} diff --git a/stack/destroyingstack_test.go b/stack/destroyingstack_test.go new file mode 100644 index 0000000..4b3121d --- /dev/null +++ b/stack/destroyingstack_test.go @@ -0,0 +1,49 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package stack + +import ( + "github.com/Appliscale/perun/stack/stack_mocks" + "github.com/aws/aws-sdk-go/service/cloudformation" + "github.com/golang/mock/gomock" + "github.com/stretchr/testify/assert" + "testing" +) + +func TestDestroyStack(t *testing.T) { + stackName := "StackName" + context := stack_mocks.SetupContext(t, []string{"cmd", "delete-stack", stackName}) + + mockCtrl := gomock.NewController(t) + defer mockCtrl.Finish() + mockAWSPI := stack_mocks.NewMockCloudFormationAPI(mockCtrl) + context.CloudFormation = mockAWSPI + + templateStruct := cloudformation.DeleteStackInput{ + StackName: &stackName, + } + mockAWSPI.EXPECT().DeleteStack(&templateStruct).Return(nil, nil).Times(1) + + DestroyStack(context) +} + +func TestDeleteStackInput(t *testing.T) { + stackName := "StackName" + context := stack_mocks.SetupContext(t, []string{"cmd", "delete-stack", stackName}) + returnedValue := deleteStackInput(context) + assert.Equal(t, returnedValue.StackName, &stackName) +} diff --git a/stack/protectionfromtermination.go b/stack/protectionfromtermination.go new file mode 100644 index 0000000..13623dc --- /dev/null +++ b/stack/protectionfromtermination.go @@ -0,0 +1,67 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package stack + +import ( + "errors" + "github.com/Appliscale/perun/context" + "github.com/aws/aws-sdk-go/service/cloudformation" +) + +// Getting stackName and flag which describes stack termination protection to create struct. +func createUpdateTerminationProtectionInput(stackName string, isProtectionEnable bool) cloudformation.UpdateTerminationProtectionInput { + templateStruct := cloudformation.UpdateTerminationProtectionInput{ + EnableTerminationProtection: &isProtectionEnable, + StackName: &stackName, + } + + return templateStruct +} + +// Checking flag and settting protection. +func isProtectionEnable(context *context.Context) (bool, error) { + if *context.CliArguments.DisableStackTermination { + return true, nil + } else if *context.CliArguments.EnableStackTermination { + return false, nil + } + + return false, errors.New("Incorrect StackTerminationProtection flag") +} + +// SetTerminationProtection turn off or on stack protection from being deleted. +func SetTerminationProtection(context *context.Context) error { + stackName := context.CliArguments.Stack + isProtectionEnable, stackTerminationError := isProtectionEnable(context) + if stackTerminationError != nil { + return stackTerminationError + } + templateStruct := createUpdateTerminationProtectionInput(*stackName, isProtectionEnable) + _, apiError := context.CloudFormation.UpdateTerminationProtection(&templateStruct) + if apiError != nil { + context.Logger.Error("Error setting stack termination protection: " + apiError.Error()) + return apiError + } + + if isProtectionEnable { + context.Logger.Info("Terminaction Protection Enabled successfully") + } else { + context.Logger.Info("Termination Protection Disabled successfully") + } + + return nil +} diff --git a/stack/protectionfromtermination_test.go b/stack/protectionfromtermination_test.go new file mode 100644 index 0000000..1b02973 --- /dev/null +++ b/stack/protectionfromtermination_test.go @@ -0,0 +1,72 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package stack + +import ( + "github.com/Appliscale/perun/stack/stack_mocks" + "github.com/golang/mock/gomock" + "github.com/stretchr/testify/assert" + "testing" +) + +func TestCreateUpdateTerminationProtectionInput(t *testing.T) { + stackName := "StackName" + + returnedTemplateStruct := createUpdateTerminationProtectionInput(stackName, true) + + assert.Equal(t, stackName, *returnedTemplateStruct.StackName) + assert.Equal(t, true, *returnedTemplateStruct.EnableTerminationProtection) + + returnedTemplateStruct = createUpdateTerminationProtectionInput(stackName, false) + + assert.Equal(t, stackName, *returnedTemplateStruct.StackName) + assert.Equal(t, false, *returnedTemplateStruct.EnableTerminationProtection) +} + +func TestIsProtectionEnabled(t *testing.T) { + stackName := "StackName" + templatePath := "./test_resources/test_template.yaml" + + context := stack_mocks.SetupContext(t, []string{"cmd", "set-stack-policy", stackName, templatePath, "--enable-stack-termination"}) + enabled, err := isProtectionEnable(context) + assert.False(t, enabled) + assert.Equal(t, err, nil) + + context = stack_mocks.SetupContext(t, []string{"cmd", "set-stack-policy", stackName, templatePath, "--disable-stack-termination"}) + enabled, err = isProtectionEnable(context) + assert.Equal(t, err, nil) + assert.True(t, enabled) + + context = stack_mocks.SetupContext(t, []string{"cmd", "set-stack-policy", stackName, templatePath}) + _, err = isProtectionEnable(context) + assert.NotEmpty(t, err) +} + +func TestSetTerminationProtection(t *testing.T) { + stackName := "StackName" + templatePath := "./test_resources/test_template.yaml" + context := stack_mocks.SetupContext(t, []string{"cmd", "set-stack-policy", stackName, templatePath, "--enable-stack-termination"}) + + mockCtrl := gomock.NewController(t) + defer mockCtrl.Finish() + mockAWSPI := stack_mocks.NewMockCloudFormationAPI(mockCtrl) + context.CloudFormation = mockAWSPI + + templateStruct := createUpdateTerminationProtectionInput(stackName, false) + + mockAWSPI.EXPECT().UpdateTerminationProtection(&templateStruct).Return(nil, nil).Times(1) + SetTerminationProtection(context) +} diff --git a/stack/stack.go b/stack/stack.go index 0f49f2c..ce77b64 100644 --- a/stack/stack.go +++ b/stack/stack.go @@ -1,73 +1,82 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + package stack import ( - "github.com/Appliscale/perun/context" - "github.com/Appliscale/perun/mysession" - "github.com/aws/aws-sdk-go/aws/session" - "github.com/aws/aws-sdk-go/service/cloudformation" + "errors" + "path" + "io/ioutil" -) -// This function gets template and name of stack. It creates "CreateStackInput" structure. -func createStackInput(context *context.Context, template *string, stackName *string) cloudformation.CreateStackInput { - templateStruct := cloudformation.CreateStackInput{ - TemplateBody: template, - StackName: stackName, - } - return templateStruct -} + "github.com/Appliscale/perun/context" + "github.com/Appliscale/perun/myuser" +) // This function reads "StackName" from Stack in CliArguments and file from TemplatePath in CliArguments. It converts these to type string. -func getTemplateFromFile(context *context.Context) (string, string) { +func getTemplateFromFile(context *context.Context) (string, string, error) { + var rawTemplate []byte + var readFileError error + templatePath, pathError := getPath(context) + if pathError != nil { + return "", "", pathError + } - rawTemplate, readFileError := ioutil.ReadFile(*context.CliArguments.TemplatePath) + rawTemplate, readFileError = ioutil.ReadFile(templatePath) if readFileError != nil { context.Logger.Error(readFileError.Error()) + return "", "", readFileError } rawStackName := *context.CliArguments.Stack template := string(rawTemplate) stackName := rawStackName - return template, stackName -} - -// This function uses CreateStackInput variable to create Stack. -func createStack(templateStruct cloudformation.CreateStackInput, session *session.Session) { - api := cloudformation.New(session) - api.CreateStack(&templateStruct) + return template, stackName, nil } -// This function uses all functions above and session to create Stack. -func NewStack(context *context.Context) { - template, stackName := getTemplateFromFile(context) - templateStruct := createStackInput(context, &template, &stackName) - tokenError := mysession.UpdateSessionToken(context.Config.DefaultProfile, context.Config.DefaultRegion, context.Config.DefaultDurationForMFA, context) - if tokenError != nil { - context.Logger.Error(tokenError.Error()) +// Looking for path to user/default template. +func getPath(context *context.Context) (path string, err error) { + homePath, pathError := myuser.GetUserHomeDir() + if pathError != nil { + context.Logger.Error(pathError.Error()) + return "", pathError } - session, createSessionError := mysession.CreateSession(context, context.Config.DefaultProfile, &context.Config.DefaultRegion) - if createSessionError != nil { - context.Logger.Error(createSessionError.Error()) - } - createStack(templateStruct, session) -} -// This function bases on "DeleteStackInput" structure and destroys stack. It uses "StackName" to choose which stack will be destroy. Before that it creates session. -func DestroyStack(context *context.Context) { - delStackInput := deleteStackInput(context) - session, sessionError := mysession.CreateSession(context, context.Config.DefaultProfile, &context.Config.DefaultRegion) - if sessionError != nil { - context.Logger.Error(sessionError.Error()) + if *context.CliArguments.Mode == "set-stack-policy" { + if *context.CliArguments.Unblock { + path = homePath + "/.config/perun/stack-policies/unblocked.json" + } else if *context.CliArguments.Block { + path = homePath + "/.config/perun/stack-policies/blocked.json" + } else if len(*context.CliArguments.TemplatePath) > 0 && isStackPolicyFileJSON(*context.CliArguments.TemplatePath) { + path = *context.CliArguments.TemplatePath + } else { + return "", errors.New("Incorrect path") + } + } else if len(*context.CliArguments.TemplatePath) > 0 { + path = *context.CliArguments.TemplatePath } - api := cloudformation.New(session) - api.DeleteStack(&delStackInput) + return } -// This function gets "StackName" from Stack in CliArguments and creates "DeleteStackInput" structure. -func deleteStackInput(context *context.Context) cloudformation.DeleteStackInput { - name := *context.CliArguments.Stack - templateStruct := cloudformation.DeleteStackInput{ - StackName: &name, +// Checking is file type JSON. +func isStackPolicyFileJSON(filename string) bool { + templateFileExtension := path.Ext(filename) + if templateFileExtension == ".json" { + return true } - return templateStruct + + return false } diff --git a/stack/stack_mocks/mock_aws_api.go b/stack/stack_mocks/mock_aws_api.go new file mode 100644 index 0000000..e4680f4 --- /dev/null +++ b/stack/stack_mocks/mock_aws_api.go @@ -0,0 +1,176 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: ./awsapi/cloudformation.go + +// Package stack_mocks is a generated GoMock package. +package stack_mocks + +import ( + cloudformation "github.com/aws/aws-sdk-go/service/cloudformation" + gomock "github.com/golang/mock/gomock" + reflect "reflect" +) + +// MockCloudFormationAPI is a mock of CloudFormationAPI interface +type MockCloudFormationAPI struct { + ctrl *gomock.Controller + recorder *MockCloudFormationAPIMockRecorder +} + +// MockCloudFormationAPIMockRecorder is the mock recorder for MockCloudFormationAPI +type MockCloudFormationAPIMockRecorder struct { + mock *MockCloudFormationAPI +} + +// NewMockCloudFormationAPI creates a new mock instance +func NewMockCloudFormationAPI(ctrl *gomock.Controller) *MockCloudFormationAPI { + mock := &MockCloudFormationAPI{ctrl: ctrl} + mock.recorder = &MockCloudFormationAPIMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use +func (m *MockCloudFormationAPI) EXPECT() *MockCloudFormationAPIMockRecorder { + return m.recorder +} + +// CreateStack mocks base method +func (m *MockCloudFormationAPI) CreateStack(input *cloudformation.CreateStackInput) (*cloudformation.CreateStackOutput, error) { + ret := m.ctrl.Call(m, "CreateStack", input) + ret0, _ := ret[0].(*cloudformation.CreateStackOutput) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CreateStack indicates an expected call of CreateStack +func (mr *MockCloudFormationAPIMockRecorder) CreateStack(input interface{}) *gomock.Call { + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateStack", reflect.TypeOf((*MockCloudFormationAPI)(nil).CreateStack), input) +} + +// DeleteStack mocks base method +func (m *MockCloudFormationAPI) DeleteStack(input *cloudformation.DeleteStackInput) (*cloudformation.DeleteStackOutput, error) { + ret := m.ctrl.Call(m, "DeleteStack", input) + ret0, _ := ret[0].(*cloudformation.DeleteStackOutput) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// DeleteStack indicates an expected call of DeleteStack +func (mr *MockCloudFormationAPIMockRecorder) DeleteStack(input interface{}) *gomock.Call { + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteStack", reflect.TypeOf((*MockCloudFormationAPI)(nil).DeleteStack), input) +} + +// UpdateStack mocks base method +func (m *MockCloudFormationAPI) UpdateStack(input *cloudformation.UpdateStackInput) (*cloudformation.UpdateStackOutput, error) { + ret := m.ctrl.Call(m, "UpdateStack", input) + ret0, _ := ret[0].(*cloudformation.UpdateStackOutput) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// UpdateStack indicates an expected call of UpdateStack +func (mr *MockCloudFormationAPIMockRecorder) UpdateStack(input interface{}) *gomock.Call { + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateStack", reflect.TypeOf((*MockCloudFormationAPI)(nil).UpdateStack), input) +} + +// SetStackPolicy mocks base method +func (m *MockCloudFormationAPI) SetStackPolicy(input *cloudformation.SetStackPolicyInput) (*cloudformation.SetStackPolicyOutput, error) { + ret := m.ctrl.Call(m, "SetStackPolicy", input) + ret0, _ := ret[0].(*cloudformation.SetStackPolicyOutput) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// SetStackPolicy indicates an expected call of SetStackPolicy +func (mr *MockCloudFormationAPIMockRecorder) SetStackPolicy(input interface{}) *gomock.Call { + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SetStackPolicy", reflect.TypeOf((*MockCloudFormationAPI)(nil).SetStackPolicy), input) +} + +// UpdateTerminationProtection mocks base method +func (m *MockCloudFormationAPI) UpdateTerminationProtection(input *cloudformation.UpdateTerminationProtectionInput) (*cloudformation.UpdateTerminationProtectionOutput, error) { + ret := m.ctrl.Call(m, "UpdateTerminationProtection", input) + ret0, _ := ret[0].(*cloudformation.UpdateTerminationProtectionOutput) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// UpdateTerminationProtection indicates an expected call of UpdateTerminationProtection +func (mr *MockCloudFormationAPIMockRecorder) UpdateTerminationProtection(input interface{}) *gomock.Call { + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateTerminationProtection", reflect.TypeOf((*MockCloudFormationAPI)(nil).UpdateTerminationProtection), input) +} + +// EstimateTemplateCost mocks base method +func (m *MockCloudFormationAPI) EstimateTemplateCost(input *cloudformation.EstimateTemplateCostInput) (*cloudformation.EstimateTemplateCostOutput, error) { + ret := m.ctrl.Call(m, "EstimateTemplateCost", input) + ret0, _ := ret[0].(*cloudformation.EstimateTemplateCostOutput) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// EstimateTemplateCost indicates an expected call of EstimateTemplateCost +func (mr *MockCloudFormationAPIMockRecorder) EstimateTemplateCost(input interface{}) *gomock.Call { + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "EstimateTemplateCost", reflect.TypeOf((*MockCloudFormationAPI)(nil).EstimateTemplateCost), input) +} + +// ValidateTemplate mocks base method +func (m *MockCloudFormationAPI) ValidateTemplate(input *cloudformation.ValidateTemplateInput) (*cloudformation.ValidateTemplateOutput, error) { + ret := m.ctrl.Call(m, "ValidateTemplate", input) + ret0, _ := ret[0].(*cloudformation.ValidateTemplateOutput) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// ValidateTemplate indicates an expected call of ValidateTemplate +func (mr *MockCloudFormationAPIMockRecorder) ValidateTemplate(input interface{}) *gomock.Call { + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ValidateTemplate", reflect.TypeOf((*MockCloudFormationAPI)(nil).ValidateTemplate), input) +} + +// CreateChangeSet mocks base method +func (m *MockCloudFormationAPI) CreateChangeSet(input *cloudformation.CreateChangeSetInput) (*cloudformation.CreateChangeSetOutput, error) { + ret := m.ctrl.Call(m, "CreateChangeSet", input) + ret0, _ := ret[0].(*cloudformation.CreateChangeSetOutput) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CreateChangeSet indicates an expected call of CreateChangeSet +func (mr *MockCloudFormationAPIMockRecorder) CreateChangeSet(input interface{}) *gomock.Call { + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CreateChangeSet", reflect.TypeOf((*MockCloudFormationAPI)(nil).CreateChangeSet), input) +} + +// DescribeChangeSet mocks base method +func (m *MockCloudFormationAPI) DescribeChangeSet(input *cloudformation.DescribeChangeSetInput) (*cloudformation.DescribeChangeSetOutput, error) { + ret := m.ctrl.Call(m, "DescribeChangeSet", input) + ret0, _ := ret[0].(*cloudformation.DescribeChangeSetOutput) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// DescribeChangeSet indicates an expected call of DescribeChangeSet +func (mr *MockCloudFormationAPIMockRecorder) DescribeChangeSet(input interface{}) *gomock.Call { + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DescribeChangeSet", reflect.TypeOf((*MockCloudFormationAPI)(nil).DescribeChangeSet), input) +} + +// WaitUntilChangeSetCreateComplete mocks base method +func (m *MockCloudFormationAPI) WaitUntilChangeSetCreateComplete(input *cloudformation.DescribeChangeSetInput) error { + ret := m.ctrl.Call(m, "WaitUntilChangeSetCreateComplete", input) + ret0, _ := ret[0].(error) + return ret0 +} + +// WaitUntilChangeSetCreateComplete indicates an expected call of WaitUntilChangeSetCreateComplete +func (mr *MockCloudFormationAPIMockRecorder) WaitUntilChangeSetCreateComplete(input interface{}) *gomock.Call { + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "WaitUntilChangeSetCreateComplete", reflect.TypeOf((*MockCloudFormationAPI)(nil).WaitUntilChangeSetCreateComplete), input) +} + +// DeleteChangeSet mocks base method +func (m *MockCloudFormationAPI) DeleteChangeSet(input *cloudformation.DeleteChangeSetInput) (*cloudformation.DeleteChangeSetOutput, error) { + ret := m.ctrl.Call(m, "DeleteChangeSet", input) + ret0, _ := ret[0].(*cloudformation.DeleteChangeSetOutput) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// DeleteChangeSet indicates an expected call of DeleteChangeSet +func (mr *MockCloudFormationAPIMockRecorder) DeleteChangeSet(input interface{}) *gomock.Call { + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteChangeSet", reflect.TypeOf((*MockCloudFormationAPI)(nil).DeleteChangeSet), input) +} diff --git a/stack/stack_mocks/mock_context.go b/stack/stack_mocks/mock_context.go new file mode 100644 index 0000000..6fcb341 --- /dev/null +++ b/stack/stack_mocks/mock_context.go @@ -0,0 +1,53 @@ +package stack_mocks + +import ( + "github.com/Appliscale/perun/cliparser" + "github.com/Appliscale/perun/configuration" + "github.com/Appliscale/perun/configurator" + "github.com/Appliscale/perun/context" + "github.com/Appliscale/perun/logger" + "io/ioutil" + "testing" +) + +func SetupContext(t *testing.T, args []string) *context.Context { + myLogger := logger.CreateDefaultLogger() + myLogger.SetVerbosity("ERROR") + + cliArguments, err := cliparser.ParseCliArguments(args) + if err != nil { + t.Error(err.Error()) + return &context.Context{} + } + + config := createDefaultConfiguration() + iconsistenciesConfig := configuration.ReadInconsistencyConfiguration(&myLogger) + + ctx := context.Context{ + CliArguments: cliArguments, + Logger: &myLogger, + Config: config, + InconsistencyConfig: iconsistenciesConfig, + } + + return &ctx +} + +func ReadFile(t *testing.T, filePath string) string { + rawTemplate, readFileError := ioutil.ReadFile(filePath) + if readFileError != nil { + t.Error(readFileError.Error()) + } + template := string(rawTemplate) + return template +} + +func createDefaultConfiguration() configuration.Configuration { + return configuration.Configuration{ + DefaultProfile: "default", + DefaultRegion: "region", + SpecificationURL: configurator.ResourceSpecificationURL, + DefaultDecisionForMFA: false, + DefaultDurationForMFA: 3600, + DefaultVerbosity: "INFO"} +} diff --git a/stack/stack_test.go b/stack/stack_test.go new file mode 100644 index 0000000..eb9eeda --- /dev/null +++ b/stack/stack_test.go @@ -0,0 +1,72 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package stack + +import ( + "github.com/Appliscale/perun/stack/stack_mocks" + "github.com/stretchr/testify/assert" + "strings" + "testing" +) + +func TestGetTemplateFromFile(t *testing.T) { + stackName := "StackName" + templatePath := "./test_resources/test_template.yaml" + context := stack_mocks.SetupContext(t, []string{"cmd", "create-stack", stackName, templatePath}) + + templateBody := stack_mocks.ReadFile(t, templatePath) + + returnedTemplate, returnedStackName, err := getTemplateFromFile(context) + if err != nil { + t.Error(err.Error()) + } + + assert.Equal(t, returnedTemplate, templateBody) + assert.Equal(t, returnedStackName, stackName) +} + +func TestGetPath(t *testing.T) { + templatePath := "./test_resources/test_template.yaml" + stackName := "StackName" + + path := getPathForMode(t, []string{"cmd", "create-stack", stackName, templatePath}) + assert.Equal(t, path, templatePath) + path = getPathForMode(t, []string{"cmd", "update-stack", stackName, templatePath}) + assert.Equal(t, path, templatePath) + path = getPathForMode(t, []string{"cmd", "create-change-set", stackName, templatePath}) + assert.Equal(t, path, templatePath) + + path = getPathForMode(t, []string{"cmd", "set-stack-policy", stackName, templatePath, "--block"}) + assert.True(t, strings.HasSuffix(path, "/.config/perun/stack-policies/blocked.json")) + + path = getPathForMode(t, []string{"cmd", "set-stack-policy", stackName, templatePath, "--unblock"}) + assert.True(t, strings.HasSuffix(path, "/.config/perun/stack-policies/unblocked.json")) +} + +func TestIsStackPolicyFileJSON(t *testing.T) { + assert.False(t, isStackPolicyFileJSON("policyjson")) + assert.True(t, isStackPolicyFileJSON("policy.json")) + assert.False(t, isStackPolicyFileJSON("asd.yaml")) +} + +func getPathForMode(t *testing.T, args []string) string { + context := stack_mocks.SetupContext(t, args) + path, err := getPath(context) + if err != nil { + t.Error(err.Error()) + } + return path +} diff --git a/stack/test_resources/test_stackpolicy.json b/stack/test_resources/test_stackpolicy.json new file mode 100644 index 0000000..51014bf --- /dev/null +++ b/stack/test_resources/test_stackpolicy.json @@ -0,0 +1,16 @@ +{ + "Statement" : [ + { + "Effect" : "Allow", + "Action" : "Update:*", + "Principal": "*", + "Resource" : "*" + }, + { + "Effect" : "Deny", + "Action" : "Update:*", + "Principal": "*", + "Resource" : "LogicalResourceId/ProductionDatabase" + } + ] +} diff --git a/stack/test_resources/test_template.yaml b/stack/test_resources/test_template.yaml new file mode 100644 index 0000000..97ab605 --- /dev/null +++ b/stack/test_resources/test_template.yaml @@ -0,0 +1,8 @@ +AWSTemplateFormatVersion: "2010-09-09" + +Resources: + S3: + Type: AWS::S3::Bucket + Properties: + BucketName: peruntestbucket + diff --git a/stack/updatingstack.go b/stack/updatingstack.go new file mode 100644 index 0000000..657edf1 --- /dev/null +++ b/stack/updatingstack.go @@ -0,0 +1,82 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package stack + +import ( + "github.com/Appliscale/perun/context" + "github.com/Appliscale/perun/parameters" + "github.com/Appliscale/perun/progress" + "github.com/aws/aws-sdk-go/service/cloudformation" +) + +// UpdateStack prepares updateStackInput and updates stack. +func UpdateStack(context *context.Context) (err error) { + template, stackName, err := getTemplateFromFile(context) + if err != nil { + return + } + templateStruct := updateStackInput(context, &template, &stackName) + err = doUpdateStack(context, templateStruct) + return +} + +// doUpdateStack updates stack. +func doUpdateStack(context *context.Context, updateStackInput cloudformation.UpdateStackInput) error { + if *context.CliArguments.Progress { + conn, remoteSinkError := progress.GetRemoteSink(context) + if remoteSinkError != nil { + context.Logger.Error("Error getting remote sink configuration: " + remoteSinkError.Error()) + return remoteSinkError + } + updateStackInput.NotificationARNs = []*string{conn.TopicArn} + _, updateError := context.CloudFormation.UpdateStack(&updateStackInput) + if updateError != nil { + context.Logger.Error("Error updating stack: " + updateError.Error()) + return updateError + } + conn.MonitorStackQueue() + } else { + _, updateError := context.CloudFormation.UpdateStack(&updateStackInput) + if updateError != nil { + context.Logger.Error("Error updating stack: " + updateError.Error()) + return updateError + } + context.Logger.Info("Stack update request successful") + } + return nil +} + +// This function gets template and name of stack. It creates "UpdateStackInput" structure. +func updateStackInput(context *context.Context, template *string, stackName *string) cloudformation.UpdateStackInput { + params, err := parameters.ResolveParameters(context) + if err != nil { + context.Logger.Error(err.Error()) + return cloudformation.UpdateStackInput{} + } + rawCapabilities := *context.CliArguments.Capabilities + capabilities := make([]*string, len(rawCapabilities)) + for i, capability := range rawCapabilities { + capabilities[i] = &capability + } + templateStruct := cloudformation.UpdateStackInput{ + Parameters: params, + TemplateBody: template, + StackName: stackName, + Capabilities: capabilities, + } + return templateStruct +} diff --git a/stack/updatingstack_test.go b/stack/updatingstack_test.go new file mode 100644 index 0000000..fe88913 --- /dev/null +++ b/stack/updatingstack_test.go @@ -0,0 +1,70 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package stack + +import ( + "github.com/Appliscale/perun/stack/stack_mocks" + "github.com/golang/mock/gomock" + "github.com/stretchr/testify/assert" + "testing" +) + +func TestUpdateStack(t *testing.T) { + stackName := "StackName" + templatePath := "./test_resources/test_template.yaml" + context := stack_mocks.SetupContext(t, []string{"cmd", "update-stack", stackName, templatePath}) + + mockCtrl := gomock.NewController(t) + defer mockCtrl.Finish() + mockAWSPI := stack_mocks.NewMockCloudFormationAPI(mockCtrl) + context.CloudFormation = mockAWSPI + + template := stack_mocks.ReadFile(t, templatePath) + templateStruct := updateStackInput(context, &template, &stackName) + + mockAWSPI.EXPECT().UpdateStack(&templateStruct).Return(nil, nil).Times(1) + UpdateStack(context) +} + +func TestDoUpdateStack(t *testing.T) { + stackName := "StackName" + templatePath := "./test_resources/test_template.yaml" + context := stack_mocks.SetupContext(t, []string{"cmd", "update-stack", stackName, templatePath}) + + mockCtrl := gomock.NewController(t) + defer mockCtrl.Finish() + mockAWSPI := stack_mocks.NewMockCloudFormationAPI(mockCtrl) + context.CloudFormation = mockAWSPI + + template := stack_mocks.ReadFile(t, templatePath) + templateStruct := updateStackInput(context, &template, &stackName) + + mockAWSPI.EXPECT().UpdateStack(&templateStruct).Return(nil, nil).Times(1) + UpdateStack(context) +} + +func TestUpdateStackInput(t *testing.T) { + stackName := "StackName" + templatePath := "./test_resources/test_template.yaml" + context := stack_mocks.SetupContext(t, []string{"cmd", "update-stack", stackName, templatePath}) + + template := stack_mocks.ReadFile(t, templatePath) + + returnedTemplateStruct := updateStackInput(context, &template, &stackName) + + assert.Equal(t, *returnedTemplateStruct.TemplateBody, template) + assert.Equal(t, *returnedTemplateStruct.StackName, stackName) +} diff --git a/utilities/utilities.go b/utilities/utilities.go index 6445cf1..e37c1be 100644 --- a/utilities/utilities.go +++ b/utilities/utilities.go @@ -1,4 +1,4 @@ -// Copyright 2017 Appliscale +// Copyright 2018 Appliscale // // Maintainers and contributors are listed in README file inside repository. // @@ -20,18 +20,43 @@ package utilities import ( "fmt" + "os" "time" ) +// Motto - perun's motto. const Motto = "Swiss army knife for AWS CloudFormation templates" -const ReleaseName = "Nimbostratus" -const VersionNumber = "1.1.0-beta" +// ReleaseName - name of the release. +const ReleaseName = "Altostratus" +// VersionNumber - number of the release. +const VersionNumber = "1.3.1" + +// VersionStatus shows perun's release. func VersionStatus() string { return fmt.Sprintf("perun %s (%s release) - %s", VersionNumber, ReleaseName, Motto) } +// TruncateDuration prepares shorter message with duration. func TruncateDuration(d time.Duration) time.Duration { return -(d - d%(time.Duration(1)*time.Second)) } + +// CheckErrorCodeAndExit checks if error exists and exits with 1 or 0 code. +func CheckErrorCodeAndExit(err error) { + if err != nil { + os.Exit(1) + } else { + os.Exit(0) + } +} + +// CheckFlagAndExit checks error flag exits with 1 or 0 code. +func CheckFlagAndExit(valid bool) { + if valid { + os.Exit(0) + } else { + os.Exit(1) + } +} diff --git a/validator/awsvalidator.go b/validator/awsvalidator.go new file mode 100644 index 0000000..939fbee --- /dev/null +++ b/validator/awsvalidator.go @@ -0,0 +1,49 @@ +// Copyright 2018 Appliscale +// +// Maintainers and contributors are listed in README file inside repository. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package awsvalidator provides tools for online CloudFormation template +// validation using AWS API. +package validator + +import ( + "github.com/Appliscale/perun/context" + "github.com/aws/aws-sdk-go/service/cloudformation" + "strings" +) + +func awsValidate(ctx *context.Context, templateBody *string) bool { + valid, err := isTemplateValid(ctx, templateBody) + if err != nil { + ctx.Logger.Error(err.Error()) + return false + } + return valid +} + +func isTemplateValid(context *context.Context, template *string) (bool, error) { + templateStruct := cloudformation.ValidateTemplateInput{ + TemplateBody: template, + } + _, err := context.CloudFormation.ValidateTemplate(&templateStruct) + if err != nil { + if strings.Contains(err.Error(), "ExpiredToken:") { + context.Logger.Error(err.Error()) + return true, nil + } + return false, err + } + return true, nil +} diff --git a/validator/awsvalidator_test.go b/validator/awsvalidator_test.go new file mode 100644 index 0000000..94a46a6 --- /dev/null +++ b/validator/awsvalidator_test.go @@ -0,0 +1,27 @@ +package validator + +import ( + "github.com/Appliscale/perun/stack/stack_mocks" + "github.com/aws/aws-sdk-go/service/cloudformation" + "github.com/golang/mock/gomock" + "testing" +) + +func TestIsTemplateValid(t *testing.T) { + ctx := stack_mocks.SetupContext(t, []string{"cmd", "validate", "templatePath"}) + + mockCtrl := gomock.NewController(t) + defer mockCtrl.Finish() + mockAWSPI := stack_mocks.NewMockCloudFormationAPI(mockCtrl) + ctx.CloudFormation = mockAWSPI + + templateBody := "templateBody" + mockAWSPI. + EXPECT(). + ValidateTemplate(&cloudformation.ValidateTemplateInput{TemplateBody: &templateBody}). + Times(2). + Return(nil, nil) + + isTemplateValid(ctx, &templateBody) + awsValidate(ctx, &templateBody) +} diff --git a/offlinevalidator/offlinevalidator.go b/validator/localvalidator.go similarity index 61% rename from offlinevalidator/offlinevalidator.go rename to validator/localvalidator.go index 23e683d..aacb9af 100644 --- a/offlinevalidator/offlinevalidator.go +++ b/validator/localvalidator.go @@ -1,4 +1,4 @@ -// Copyright 2017 Appliscale +// Copyright 2018 Appliscale // // Maintainers and contributors are listed in README file inside repository. // @@ -14,28 +14,34 @@ // See the License for the specific language governing permissions and // limitations under the License. -// Package offlinevalidator provides tools for offline CloudFormation template +// Package validator provides tools for local CloudFormation template // validation. -package offlinevalidator +package validator import ( "encoding/json" - "errors" + "fmt" + "io" "io/ioutil" - "path" + "os" "reflect" "strconv" "strings" + "errors" + + "github.com/Appliscale/perun/configuration" "github.com/Appliscale/perun/context" - "github.com/Appliscale/perun/intrinsicsolver" + "github.com/Appliscale/perun/helpers" "github.com/Appliscale/perun/logger" - "github.com/Appliscale/perun/offlinevalidator/template" - "github.com/Appliscale/perun/offlinevalidator/validators" "github.com/Appliscale/perun/specification" + "github.com/Appliscale/perun/validator/template" + "github.com/Appliscale/perun/validator/validators" + "github.com/aws/aws-sdk-go/aws" + "github.com/aws/aws-sdk-go/service/s3" + "github.com/aws/aws-sdk-go/service/s3/s3manager" "github.com/awslabs/goformation" "github.com/awslabs/goformation/cloudformation" - "github.com/ghodss/yaml" "github.com/mitchellh/mapstructure" ) @@ -43,69 +49,118 @@ var validatorsMap = map[string]interface{}{ "AWS::EC2::VPC": validators.IsVpcValid, } -func printResult(valid *bool, logger *logger.Logger) { +func printResult(templateName string, valid *bool, logger logger.LoggerInt) { logger.PrintValidationErrors() if !*valid { - logger.Error("Template is invalid!") + logger.Error(fmt.Sprintf("Template %s is invalid!", templateName)) + } else if logger.HasValidationWarnings() { + logger.Warning(fmt.Sprintf("Template %s may not be valid", templateName)) } else { - logger.Info("Template is valid!") + logger.Info(fmt.Sprintf("Template %s is valid!", templateName)) } } // Validate CloudFormation template. -func Validate(context *context.Context) bool { - valid := false - defer printResult(&valid, context.Logger) +func Validate(ctx *context.Context) bool { + return validateTemplateFile(*ctx.CliArguments.TemplatePath, *ctx.CliArguments.TemplatePath, ctx) +} - specification, err := specification.GetSpecification(context) +func validateTemplateFile(templatePath string, templateName string, context *context.Context) (valid bool) { + valid = false + defer printResult(templateName, &valid, context.Logger) + + resourceSpecification, err := specification.GetSpecification(context) if err != nil { context.Logger.Error(err.Error()) - return false + return } - rawTemplate, err := ioutil.ReadFile(*context.CliArguments.TemplatePath) + rawTemplate, err := ioutil.ReadFile(templatePath) if err != nil { context.Logger.Error(err.Error()) - return false + return } var perunTemplate template.Template var goFormationTemplate cloudformation.Template - templateFileExtension := path.Ext(*context.CliArguments.TemplatePath) - if templateFileExtension == ".json" { - goFormationTemplate, err = parseJSON(rawTemplate, perunTemplate, context.Logger) - } else if templateFileExtension == ".yaml" || templateFileExtension == ".yml" { - goFormationTemplate, err = parseYAML(rawTemplate, perunTemplate, context.Logger) - } else { - err = errors.New("Invalid template file format.") + parser, err := helpers.GetParser(*context.CliArguments.TemplatePath) + if err != nil { + context.Logger.Error(err.Error()) + return } - + goFormationTemplate, err = parser(rawTemplate, perunTemplate, context.Logger) if err != nil { context.Logger.Error(err.Error()) - return false + return } deNilizedTemplate, _ := nilNeutralize(goFormationTemplate, context.Logger) resources := obtainResources(deNilizedTemplate, perunTemplate, context.Logger) deadResources := getNilResources(resources) deadProperties := getNilProperties(resources) + if hasAllowedValuesParametersValid(goFormationTemplate.Parameters) { + valid = true + } else { + valid = false + context.Logger.AddResourceForValidation("Parameters").AddValidationError("Allowed Values supports only Type String") + } + + specInconsistency := context.InconsistencyConfig.SpecificationInconsistency + + templateBody := string(rawTemplate) + valid = validateResources(resources, &resourceSpecification, deadProperties, deadResources, specInconsistency, context) && valid + valid = awsValidate(context, &templateBody) && valid - valid = validateResources(resources, &specification, context.Logger, deadProperties, deadResources) return valid } -func validateResources(resources map[string]template.Resource, specification *specification.Specification, sink *logger.Logger, deadProp []string, deadRes []string) bool { +// Looking for AllowedValues and checking what Type is it. If it finds Type other than String then it will return false. +func hasAllowedValuesParametersValid(parameters template.Parameters) bool { + isType := false + isAllovedValues := false + for _, value := range parameters { + valueof := reflect.ValueOf(value) + isAllovedValues = false + isType = false + + for _, key := range valueof.MapKeys() { + + keyValue := valueof.MapIndex(key) + textType := "Type" + keyString := key.Interface().(string) + textValues := "AllowedValues" + + if textType == keyString { + textString := "String" + keyValueString := keyValue.Interface().(string) + if textString != keyValueString { + isType = true + } + } else if textValues == keyString { + isAllovedValues = true + } + + if isAllovedValues && isType { + return false + } + } + } + return true +} +func validateResources(resources map[string]template.Resource, specification *specification.Specification, deadProp []string, deadRes []string, specInconsistency map[string]configuration.Property, ctx *context.Context) bool { + sink := ctx.Logger for resourceName, resourceValue := range resources { - if deadResource := sliceContains(deadRes, resourceName); !deadResource { + if deadResource := helpers.SliceContains(deadRes, resourceName); !deadResource { resourceValidation := sink.AddResourceForValidation(resourceName) - + processNestedTemplates(resourceValue.Properties, ctx) + validators.GeneralValidateResourceByName(resourceValue, resourceValidation, ctx) if resourceSpecification, ok := specification.ResourceTypes[resourceValue.Type]; ok { for propertyName, propertyValue := range resourceSpecification.Properties { - if deadProperty := sliceContains(deadProp, propertyName); !deadProperty { - validateProperties(specification, resourceValue, propertyName, propertyValue, resourceValidation) + if deadProperty := helpers.SliceContains(deadProp, propertyName); !deadProperty { + validateProperties(specification, resourceValue, propertyName, propertyValue, resourceValidation, specInconsistency, sink) } } } else { @@ -125,28 +180,44 @@ func validateProperties( resourceValue template.Resource, propertyName string, propertyValue specification.Property, - resourceValidation *logger.ResourceValidation) { + resourceValidation *logger.ResourceValidation, + specInconsistency map[string]configuration.Property, + logger logger.LoggerInt) { + warnAboutSpecificationInconsistencies(propertyName, specInconsistency[resourceValue.Type], logger) if _, ok := resourceValue.Properties[propertyName]; !ok { if propertyValue.Required { resourceValidation.AddValidationError("Property " + propertyName + " is required") } } else if len(propertyValue.Type) > 0 { if propertyValue.Type != "List" && propertyValue.Type != "Map" { - checkNestedProperties(specification, resourceValue.Properties, resourceValue.Type, propertyName, propertyValue.Type, resourceValidation) + checkNestedProperties(specification, resourceValue.Properties, resourceValue.Type, propertyName, propertyValue.Type, resourceValidation, specInconsistency, logger) } else if propertyValue.Type == "List" { - checkListProperties(specification, resourceValue.Properties, resourceValue.Type, propertyName, propertyValue.ItemType, resourceValidation) + checkListProperties(specification, resourceValue.Properties, resourceValue.Type, propertyName, propertyValue.ItemType, resourceValidation, specInconsistency, logger) } else if propertyValue.Type == "Map" { checkMapProperties(resourceValue.Properties, propertyName, resourceValidation) } } } +// check should be before validate, someone might add property because he thought it is required and here he would not get notified about inconsistency... +func warnAboutSpecificationInconsistencies(subpropertyName string, specInconsistentProperty configuration.Property, logger logger.LoggerInt) { + if specInconsistentProperty[subpropertyName] != nil { + for _, inconsistentPropertyName := range specInconsistentProperty[subpropertyName] { + if inconsistentPropertyName == "Required" { + logger.Warning(subpropertyName + "->" + inconsistentPropertyName + " in documentation is not consistent with specification") + } + } + } +} + func checkListProperties( spec *specification.Specification, resourceProperties map[string]interface{}, resourceValueType, propertyName, listItemType string, - resourceValidation *logger.ResourceValidation) { + resourceValidation *logger.ResourceValidation, + specInconsistency map[string]configuration.Property, + logger logger.LoggerInt) { if listItemType == "" { resourceSubproperties := toStringList(resourceProperties, propertyName) @@ -154,17 +225,17 @@ func checkListProperties( resourceValidation.AddValidationError(propertyName + " must be a List") } } else if propertySpec, hasSpec := spec.PropertyTypes[resourceValueType+"."+listItemType]; hasSpec { - resourceSubproperties := toMapList(resourceProperties, propertyName) for subpropertyName, subpropertyValue := range propertySpec.Properties { for _, listItem := range resourceSubproperties { + warnAboutSpecificationInconsistencies(subpropertyName, specInconsistency[resourceValueType+"."+listItemType], logger) if _, isPresent := listItem[subpropertyName]; !isPresent { if subpropertyValue.Required { resourceValidation.AddValidationError("Property " + subpropertyName + " is required in " + listItemType) } } else if isPresent { if subpropertyValue.IsSubproperty() { - checkNestedProperties(spec, listItem, resourceValueType, subpropertyName, subpropertyValue.Type, resourceValidation) + checkNestedProperties(spec, listItem, resourceValueType, subpropertyName, subpropertyValue.Type, resourceValidation, specInconsistency, logger) } else if subpropertyValue.Type == "Map" { checkMapProperties(listItem, propertyName, resourceValidation) } @@ -178,20 +249,23 @@ func checkNestedProperties( spec *specification.Specification, resourceProperties map[string]interface{}, resourceValueType, propertyName, propertyType string, - resourceValidation *logger.ResourceValidation) { + resourceValidation *logger.ResourceValidation, + specInconsistency map[string]configuration.Property, + logger logger.LoggerInt) { if propertySpec, hasSpec := spec.PropertyTypes[resourceValueType+"."+propertyType]; hasSpec { resourceSubproperties, _ := toMap(resourceProperties, propertyName) for subpropertyName, subpropertyValue := range propertySpec.Properties { + warnAboutSpecificationInconsistencies(subpropertyName, specInconsistency[resourceValueType+"."+propertyName], logger) if _, isPresent := resourceSubproperties[subpropertyName]; !isPresent { if subpropertyValue.Required { - resourceValidation.AddValidationError("Property " + subpropertyName + " is required" + "in " + propertyName) + resourceValidation.AddValidationError("Property " + subpropertyName + " is required " + "in " + propertyName) } } else if isPresent { if subpropertyValue.IsSubproperty() { - checkNestedProperties(spec, resourceSubproperties, resourceValueType, subpropertyName, subpropertyValue.Type, resourceValidation) + checkNestedProperties(spec, resourceSubproperties, resourceValueType, subpropertyName, subpropertyValue.Type, resourceValidation, specInconsistency, logger) } else if subpropertyValue.Type == "List" { - checkListProperties(spec, resourceSubproperties, resourceValueType, subpropertyName, subpropertyValue.ItemType, resourceValidation) + checkListProperties(spec, resourceSubproperties, resourceValueType, subpropertyName, subpropertyValue.ItemType, resourceValidation, specInconsistency, logger) } else if subpropertyValue.Type == "Map" { checkMapProperties(resourceSubproperties, subpropertyName, resourceValidation) } @@ -200,65 +274,86 @@ func checkNestedProperties( } } -func checkMapProperties( - resourceProperties map[string]interface{}, - propertyName string, - resourceValidation *logger.ResourceValidation) { - - _, err := toMap(resourceProperties, propertyName) - if err != nil { - resourceValidation.AddValidationError(err.Error()) +func processNestedTemplates(properties map[string]interface{}, ctx *context.Context) { + if rawTemplateURL, ok := properties["TemplateURL"]; ok { + if templateURL, ok := rawTemplateURL.(string); ok { + err := validateNestedTemplate(templateURL, ctx) + if err != nil { + ctx.Logger.Error(err.Error()) + os.Exit(1) + } + } } } -func parseJSON(templateFile []byte, refTemplate template.Template, logger *logger.Logger) (template cloudformation.Template, err error) { - - err = json.Unmarshal(templateFile, &refTemplate) +func validateNestedTemplate(templateURL string, ctx *context.Context) error { + err := context.UpdateSessionToken(ctx.Config.DefaultProfile, ctx.Config.DefaultRegion, ctx.Config.DefaultDurationForMFA, ctx) if err != nil { - if syntaxError, isSyntaxError := err.(*json.SyntaxError); isSyntaxError { - syntaxOffset := int(syntaxError.Offset) - line, character := lineAndCharacter(string(templateFile), syntaxOffset) - logger.Error("Syntax error at line " + strconv.Itoa(line) + ", column " + strconv.Itoa(character)) - } else if typeError, isTypeError := err.(*json.UnmarshalTypeError); isTypeError { - typeOffset := int(typeError.Offset) - line, character := lineAndCharacter(string(templateFile), typeOffset) - logger.Error("Type error at line " + strconv.Itoa(line) + ", column " + strconv.Itoa(character)) - } - return template, err + return err } - tempJSON, err := goformation.ParseJSON(templateFile) + tempfile, err := ioutil.TempFile(ctx.Config.DefaultTemporaryFilesDirectory, "") if err != nil { - logger.Error(err.Error()) + return err } + defer os.Remove(tempfile.Name()) - returnTemplate := *tempJSON + if err := downloadTemplateFromBucket(templateURL, tempfile, ctx); err != nil { + return err + } - return returnTemplate, nil + validateTemplateFile(tempfile.Name(), templateURL, ctx) + + if err = tempfile.Close(); err != nil { + return err + } + + return nil } -func parseYAML(templateFile []byte, refTemplate template.Template, logger *logger.Logger) (template cloudformation.Template, err error) { +func downloadTemplateFromBucket(templateURL string, file io.WriterAt, ctx *context.Context) error { + region, bucket, key := fetchBucketDataFromURL(templateURL) - err = yaml.Unmarshal(templateFile, &refTemplate) + session, err := context.CreateSession(ctx, ctx.Config.DefaultProfile, ®ion) if err != nil { - return template, err + return err } - preprocessed, preprocessingError := intrinsicsolver.FixFunctions(templateFile, logger, "multiline", "elongate", "correctlong") - if preprocessingError != nil { - logger.Error(preprocessingError.Error()) - } - tempYAML, err := goformation.ParseYAML(preprocessed) + downloader := s3manager.NewDownloader(session) + + _, err = downloader.Download(file, &s3.GetObjectInput{ + Bucket: aws.String(bucket), + Key: aws.String(key), + }) if err != nil { - logger.Error(err.Error()) + return err } - returnTemplate := *tempYAML + return nil +} - return returnTemplate, nil +func fetchBucketDataFromURL(url string) (region string, bucket string, key string) { + path := strings.SplitN(url, "/", 5) + host := strings.Split(path[2], ".") + + region = host[1] + bucket = path[3] + key = path[4] + return } -func obtainResources(goformationTemplate cloudformation.Template, perunTemplate template.Template, logger *logger.Logger) map[string]template.Resource { +func checkMapProperties( + resourceProperties map[string]interface{}, + propertyName string, + resourceValidation *logger.ResourceValidation) { + + _, err := toMap(resourceProperties, propertyName) + if err != nil { + resourceValidation.AddValidationError(err.Error()) + } +} + +func obtainResources(goformationTemplate cloudformation.Template, perunTemplate template.Template, logger logger.LoggerInt) map[string]template.Resource { perunResources := perunTemplate.Resources goformationResources := goformationTemplate.Resources @@ -319,7 +414,7 @@ func toMap(resourceProperties map[string]interface{}, propertyName string) (map[ // There is a possibility that a hash map inside the template would have one of it's element's being an intrinsic function designed to output `key : value` pair. // If this function would be unresolved, it would output a standalone of type interface{}. It would be an alien element in a hash map. // To prevent the parser from breaking, we wipe out the entire, expected hash map element. -func nilNeutralize(template cloudformation.Template, logger *logger.Logger) (output cloudformation.Template, err error) { +func nilNeutralize(template cloudformation.Template, logger logger.LoggerInt) (output cloudformation.Template, err error) { bytes, initErr := json.Marshal(template) if initErr != nil { logger.Error(err.Error()) @@ -403,119 +498,7 @@ func getNilResources(resources map[string]template.Resource) []string { return list } -func sliceContains(slice []string, match string) bool { - for _, s := range slice { - if s == match { - return true - } - } - return false -} - -func mapContainsNil(mp map[string]interface{}) bool { - for _, m := range mp { - if m == nil { - return true - } - } - return false -} - -func sliceContainsNil(slice []interface{}) bool { - for _, s := range slice { - if s == nil { - return true - } - } - return false -} - -// We check if the element is non-string, non-float64, non-boolean. Then it is another node or . There is no other option. -func isNonStringFloatBool(v interface{}) bool { - var isString, isFloat, isBool bool - if _, ok := v.(string); ok { - isString = true - } else if _, ok := v.(float64); ok { - isFloat = true - } else if _, ok := v.(bool); ok { - isBool = true - } - if !isString && !isFloat && !isBool { - return true - } - return false -} - -func isPlainMap(mp map[string]interface{}) bool { - // First we check is it more complex. If so - it is worth investigating and we should stop checking. - for _, m := range mp { - if _, ok := m.(map[string]interface{}); ok { - return false - } else if _, ok := m.([]interface{}); ok { - return false - } - } - // Ok, it isn't. So is there any ? - if mapContainsNil(mp) { // Yes, it is - so it is a map worth investigating. This is not the map we're looking for. - return false - } - - return true // There is no and no complexity - it is a plain, non-nil map. -} - -func isPlainSlice(slc []interface{}) bool { - // The same flow as in `isPlainMap` function. - for _, s := range slc { - if _, ok := s.(map[string]interface{}); ok { - return false - } else if _, ok := s.([]interface{}); ok { - return false - } - } - - if sliceContainsNil(slc) { - return false - } - - return true -} - -func discard(slice []interface{}, n interface{}) []interface{} { - result := []interface{}{} - for _, s := range slice { - if s != n { - result = append(result, s) - } - } - return result -} - -func lineAndCharacter(input string, offset int) (line int, character int) { - lf := rune(0x0A) - - if offset > len(input) || offset < 0 { - return 0, 0 - } - - line = 1 - - for i, b := range input { - if b == lf { - if i < offset { - line++ - character = 0 - } - } else { - character++ - } - if i == offset { - break - } - } - return line, character -} - -func checkWhereIsNil(n interface{}, v interface{}, baseLevel string, logger *logger.Logger, fullPath []interface{}, dsc *interface{}) { +func checkWhereIsNil(n interface{}, v interface{}, baseLevel string, logger logger.LoggerInt, fullPath []interface{}, dsc *interface{}) { if v == nil { // Value we encountered is nil - this is the end of investigation. where := "" for _, element := range fullPath { @@ -531,26 +514,26 @@ func checkWhereIsNil(n interface{}, v interface{}, baseLevel string, logger *log } logger.Warning(baseLevel + ": " + where + " <--- is nil.") } else if mp, ok := v.(map[string]interface{}); ok { // Value we encountered is a map. - if isPlainMap(mp) { // Check is it plain, non-nil map. + if helpers.IsPlainMap(mp) { // Check is it plain, non-nil map. // It is - we shouldn't dive into. *dsc = n // The name is stored in the `discarded` container as the name of the blind alley. } else { for kmp, vmp := range mp { - if isNonStringFloatBool(vmp) { + if helpers.IsNonStringFloatBool(vmp) { fullPath = append(fullPath, kmp) - fullPath = discard(fullPath, *dsc) // If the output path would be different, it seems that we've encountered some node which is not on the way to the . It will be discarded from the path. Otherwise the paths are the same and we hit the point. + fullPath = helpers.Discard(fullPath, *dsc) // If the output path would be different, it seems that we've encountered some node which is not on the way to the . It will be discarded from the path. Otherwise the paths are the same and we hit the point. checkWhereIsNil(kmp, vmp, baseLevel, logger, fullPath, dsc) } } } } else if slc, ok := v.([]interface{}); ok { // The same flow as above. - if isPlainSlice(slc) { + if helpers.IsPlainSlice(slc) { *dsc = n } else { for islc, vslc := range slc { - if isNonStringFloatBool(vslc) { + if helpers.IsNonStringFloatBool(vslc) { fullPath = append(fullPath, islc) - fullPath = discard(fullPath, *dsc) + fullPath = helpers.Discard(fullPath, *dsc) checkWhereIsNil(islc, vslc, baseLevel, logger, fullPath, dsc) } } diff --git a/offlinevalidator/offlinevalidator_test.go b/validator/localvalidator_test.go similarity index 63% rename from offlinevalidator/offlinevalidator_test.go rename to validator/localvalidator_test.go index 8309665..08b5033 100644 --- a/offlinevalidator/offlinevalidator_test.go +++ b/validator/localvalidator_test.go @@ -1,4 +1,4 @@ -// Copyright 2017 Appliscale +// Copyright 2018 Appliscale // // Maintainers and contributors are listed in README file inside repository. // @@ -14,24 +14,28 @@ // See the License for the specific language governing permissions and // limitations under the License. -package offlinevalidator +package validator import ( "os" "testing" + "github.com/Appliscale/perun/configuration" + "github.com/Appliscale/perun/context" "github.com/Appliscale/perun/logger" - "github.com/Appliscale/perun/offlinevalidator/template" "github.com/Appliscale/perun/specification" + "github.com/Appliscale/perun/validator/template" "github.com/stretchr/testify/assert" ) var spec specification.Specification -var sink logger.Logger +//var _ logger.Logger var deadProp = make([]string, 0) var deadRes = make([]string, 0) +var specInconsistency map[string]configuration.Property +var mockContext = context.Context{} func setup() { var err error @@ -50,62 +54,63 @@ func TestMain(m *testing.M) { } func TestValidResource(t *testing.T) { - sink = logger.Logger{} + + mockContext.Logger = &logger.Logger{} resources := make(map[string]template.Resource) resources["ExampleResource"] = createResourceWithOneProperty("ExampleResourceType", "ExampleProperty", "Property value") - assert.True(t, validateResources(resources, &spec, &sink, deadProp, deadRes), "This resource should be valid") + assert.True(t, validateResources(resources, &spec, deadProp, deadRes, specInconsistency, &mockContext), "This resource should be valid") } func TestInvalidResourceType(t *testing.T) { - sink = logger.Logger{} + mockContext.Logger = &logger.Logger{} resources := make(map[string]template.Resource) resources["ExampleResource"] = createResourceWithOneProperty("InvalidType", "ExampleProperty", "Property value") - assert.False(t, validateResources(resources, &spec, &sink, deadProp, deadRes), "This resource should be invalid, it has invalid resource type") + assert.False(t, validateResources(resources, &spec, deadProp, deadRes, specInconsistency, &mockContext), "This resource should be invalid, it has invalid resource type") } func TestLackOfRequiredPropertyInResource(t *testing.T) { - sink = logger.Logger{} + mockContext.Logger = &logger.Logger{} resources := make(map[string]template.Resource) resources["ExampleResource"] = createResourceWithOneProperty("ExampleResourceType", "SomeProperty", "Property value") - assert.False(t, validateResources(resources, &spec, &sink, deadProp, deadRes), "This resource should not be valid, it does not have required property") + assert.False(t, validateResources(resources, &spec, deadProp, deadRes, specInconsistency, &mockContext), "This resource should not be valid, it does not have required property") } func TestLackOfSubpropertyWithSpecification(t *testing.T) { - sink = logger.Logger{} + mockContext.Logger = &logger.Logger{} resources := make(map[string]template.Resource) properties := map[string]interface{}{ "Ec2KeyName": "SomeValue", } resources["cluster"] = createResourceWithNestedProperties("AWS::Nested3::Cluster", "SomeProperty", properties) - assert.False(t, validateResources(resources, &spec, &sink, deadProp, deadRes), "This resource should not be valid, it does not have property with specification") + assert.False(t, validateResources(resources, &spec, deadProp, deadRes, specInconsistency, &mockContext), "This resource should not be valid, it does not have property with specification") } func TestValidPrimitiveTypeInProperty(t *testing.T) { - sink = logger.Logger{} + mockContext.Logger = &logger.Logger{} resources := make(map[string]template.Resource) properties := map[string]interface{}{ "Ec2KeyName": "SomeValue", } resources["cluster"] = createResourceWithNestedProperties("AWS::Nested3::Cluster", "Instances", properties) - assert.True(t, validateResources(resources, &spec, &sink, deadProp, deadRes), "This resource should be valid") + assert.True(t, validateResources(resources, &spec, deadProp, deadRes, specInconsistency, &mockContext), "This resource should be valid") } func TestLackOfPrimitiveTypeInProperty(t *testing.T) { - sink = logger.Logger{} + mockContext.Logger = &logger.Logger{} resources := make(map[string]template.Resource) properties := map[string]interface{}{ "SomeProperty": "SomeValue", } resources["cluster"] = createResourceWithNestedProperties("AWS::Nested3::Cluster", "Instances", properties) - assert.False(t, validateResources(resources, &spec, &sink, deadProp, deadRes), "This resource shouldn't be valid") + assert.False(t, validateResources(resources, &spec, deadProp, deadRes, specInconsistency, &mockContext), "This resource shouldn't be valid") } func TestLackOfPrimitiveTypeInPropertyNestedInProperty(t *testing.T) { - sink = logger.Logger{} + mockContext.Logger = &logger.Logger{} resources := make(map[string]template.Resource) properties := map[string]interface{}{ "CoreInstanceGroup": map[string]interface{}{ @@ -115,10 +120,10 @@ func TestLackOfPrimitiveTypeInPropertyNestedInProperty(t *testing.T) { } resources["cluster"] = createResourceWithNestedProperties("AWS::Nested1::Cluster", "Instances", properties) - assert.False(t, validateResources(resources, &spec, &sink, deadProp, deadRes), "This resource shouldn't be valid, it lacks required property") + assert.False(t, validateResources(resources, &spec, deadProp, deadRes, specInconsistency, &mockContext), "This resource shouldn't be valid, it lacks required property") } func TestLackOfRequiredSubproperty(t *testing.T) { - sink = logger.Logger{} + mockContext.Logger = &logger.Logger{} resources := make(map[string]template.Resource) properties := map[string]interface{}{ "DummySubproperty": map[string]interface{}{ @@ -127,21 +132,21 @@ func TestLackOfRequiredSubproperty(t *testing.T) { } resources["cluster"] = createResourceWithNestedProperties("AWS::Nested1::Cluster", "Instances", properties) - assert.False(t, validateResources(resources, &spec, &sink, deadProp, deadRes), "This resource shouldn't be valid, required subproperty is missing") + assert.False(t, validateResources(resources, &spec, deadProp, deadRes, specInconsistency, &mockContext), "This resource shouldn't be valid, required subproperty is missing") } func TestLackOfRequiredPrimitiveTypeInNonrequiredSubproperty(t *testing.T) { - sink = logger.Logger{} + mockContext.Logger = &logger.Logger{} resources := make(map[string]template.Resource) properties := map[string]interface{}{ "ETag": "SomeEtagValue", } resources["ApiGatewayResource"] = createResourceWithNestedProperties("AWS::Nested2::RestApi", "BodyS3Location", properties) - assert.False(t, validateResources(resources, &spec, &sink, deadProp, deadRes), "This resource shouldn't be valid, required primitive property in nonrequired subproperty is missing") + assert.False(t, validateResources(resources, &spec, deadProp, deadRes, specInconsistency, &mockContext), "This resource shouldn't be valid, required primitive property in nonrequired subproperty is missing") } func TestLackOfRequiredPropertyInNonRequiredProperty(t *testing.T) { - sink = logger.Logger{} + mockContext.Logger = &logger.Logger{} resources := make(map[string]template.Resource) properties := map[string]interface{}{ @@ -151,11 +156,11 @@ func TestLackOfRequiredPropertyInNonRequiredProperty(t *testing.T) { } resources["ExampleResource"] = createResourceWithNestedProperties("AWS::Nested4::Method", "Definition", properties) - assert.True(t, validateResources(resources, &spec, &sink, deadProp, deadRes), "This resource should be valid") + assert.True(t, validateResources(resources, &spec, deadProp, deadRes, specInconsistency, &mockContext), "This resource should be valid") } func TestLackOfRequiredNestedPrimitivePropertyInListItem(t *testing.T) { - sink = logger.Logger{} + mockContext.Logger = &logger.Logger{} resources := make(map[string]template.Resource) properties := []interface{}{ 0: map[string]interface{}{ @@ -171,11 +176,11 @@ func TestLackOfRequiredNestedPrimitivePropertyInListItem(t *testing.T) { resource.Properties["BootstrapActions"] = properties resources["ExampleResource"] = resource - assert.False(t, validateResources(resources, &spec, &sink, deadProp, deadRes), "This resource should not be valid, List is empty") + assert.False(t, validateResources(resources, &spec, deadProp, deadRes, specInconsistency, &mockContext), "This resource should not be valid, List is empty") } func TestLackOfRequiredListItemSubpropertyInList(t *testing.T) { - sink = logger.Logger{} + mockContext.Logger = &logger.Logger{} resources := make(map[string]template.Resource) properties := map[string]interface{}{ "RoutingRules": []interface{}{ @@ -188,11 +193,11 @@ func TestLackOfRequiredListItemSubpropertyInList(t *testing.T) { } resources["ExampleResource"] = createResourceWithNestedProperties("AWS::List2::Bucket", "WebsiteConfiguration", properties) - assert.False(t, validateResources(resources, &spec, &sink, deadProp, deadRes), "This resource should not be valid, It must contain RedirectRule property") + assert.False(t, validateResources(resources, &spec, deadProp, deadRes, specInconsistency, &mockContext), "This resource should not be valid, It must contain RedirectRule property") } func TestLackOfRequiredPrimitiveTypeListItemInList(t *testing.T) { - sink = logger.Logger{} + mockContext.Logger = &logger.Logger{} resources := make(map[string]template.Resource) properties := map[string]interface{}{ "RoutingRules": []interface{}{ @@ -212,11 +217,11 @@ func TestLackOfRequiredPrimitiveTypeListItemInList(t *testing.T) { } resources["ExampleResource"] = createResourceWithNestedProperties("AWS::List2::Bucket", "WebsiteConfiguration", properties) - assert.False(t, validateResources(resources, &spec, &sink, deadProp, deadRes), "This resource should not be valid, RedirectRule must contain HostName and HttpRedirectCode") + assert.False(t, validateResources(resources, &spec, deadProp, deadRes, specInconsistency, &mockContext), "This resource should not be valid, RedirectRule must contain HostName and HttpRedirectCode") } func TestValidRequiredPrimitiveTypeListItemInList(t *testing.T) { - sink = logger.Logger{} + mockContext.Logger = &logger.Logger{} resources := make(map[string]template.Resource) properties := map[string]interface{}{ "RoutingRules": []interface{}{ @@ -230,11 +235,11 @@ func TestValidRequiredPrimitiveTypeListItemInList(t *testing.T) { } resources["ExampleResource"] = createResourceWithNestedProperties("AWS::List2::Bucket", "WebsiteConfiguration", properties) - assert.True(t, validateResources(resources, &spec, &sink, deadProp, deadRes), "This resource should be valid") + assert.True(t, validateResources(resources, &spec, deadProp, deadRes, specInconsistency, &mockContext), "This resource should be valid") } func TestLackOfNonRequiredNestedListItemProperty(t *testing.T) { - sink = logger.Logger{} + mockContext.Logger = &logger.Logger{} resources := make(map[string]template.Resource) properties := map[string]interface{}{ @@ -248,22 +253,22 @@ func TestLackOfNonRequiredNestedListItemProperty(t *testing.T) { } resources["ExampleResource"] = createResourceWithNestedProperties("AWS::List3::Bucket", "LifecycleConfiguration", properties) - assert.True(t, validateResources(resources, &spec, &sink, deadProp, deadRes), "This resource should be valid") + assert.True(t, validateResources(resources, &spec, deadProp, deadRes, specInconsistency, &mockContext), "This resource should be valid") } func TestInvalidList(t *testing.T) { - sink = logger.Logger{} + mockContext.Logger = &logger.Logger{} resources := make(map[string]template.Resource) properties := "DummyValue" resources["ExampleResource"] = createResourceWithOneProperty("AWS::List4::DBSubnetGroup", "SubnetIds", properties) - assert.False(t, validateResources(resources, &spec, &sink, deadProp, deadRes), "This resource should be valid") + assert.False(t, validateResources(resources, &spec, deadProp, deadRes, specInconsistency, &mockContext), "This resource should be valid") } func TestValidList(t *testing.T) { - sink = logger.Logger{} + mockContext.Logger = &logger.Logger{} resources := make(map[string]template.Resource) @@ -275,11 +280,11 @@ func TestValidList(t *testing.T) { } resources["ExampleResource"] = resource - assert.True(t, validateResources(resources, &spec, &sink, deadProp, deadRes), "This resource should be valid") + assert.True(t, validateResources(resources, &spec, deadProp, deadRes, specInconsistency, &mockContext), "This resource should be valid") } func TestValidIfMapInNestedPropertyIsMap(t *testing.T) { - sink = logger.Logger{} + mockContext.Logger = &logger.Logger{} resources := make(map[string]template.Resource) properties := map[string]interface{}{ @@ -289,11 +294,11 @@ func TestValidIfMapInNestedPropertyIsMap(t *testing.T) { } resources["ExampleResource"] = createResourceWithNestedProperties("AWS::Map2::Thing", "AttributePayload", properties) - assert.True(t, validateResources(resources, &spec, &sink, deadProp, deadRes), "This resource should be valid") + assert.True(t, validateResources(resources, &spec, deadProp, deadRes, specInconsistency, &mockContext), "This resource should be valid") } func TestInvalidNestedNonMapProperty(t *testing.T) { - sink = logger.Logger{} + mockContext.Logger = &logger.Logger{} resources := make(map[string]template.Resource) properties := map[string]interface{}{ @@ -301,11 +306,11 @@ func TestInvalidNestedNonMapProperty(t *testing.T) { } resources["ExampleResource"] = createResourceWithNestedProperties("AWS::Map2::Thing", "AttributePayload", properties) - assert.False(t, validateResources(resources, &spec, &sink, deadProp, deadRes), "This resource shouldn't be valid - Attributes should be a Map") + assert.False(t, validateResources(resources, &spec, deadProp, deadRes, specInconsistency, &mockContext), "This resource shouldn't be valid - Attributes should be a Map") } func TestValidMapProperty(t *testing.T) { - sink = logger.Logger{} + mockContext.Logger = &logger.Logger{} resources := make(map[string]template.Resource) resource := template.Resource{} @@ -319,11 +324,11 @@ func TestValidMapProperty(t *testing.T) { resource.Properties["Family"] = "mysql5.6" resources["ExampleResource"] = resource - assert.True(t, validateResources(resources, &spec, &sink, deadProp, deadRes), "This resource should be valid") + assert.True(t, validateResources(resources, &spec, deadProp, deadRes, specInconsistency, &mockContext), "This resource should be valid") } func TestInvalidMapProperty(t *testing.T) { - sink = logger.Logger{} + mockContext.Logger = &logger.Logger{} resources := make(map[string]template.Resource) resource := template.Resource{} @@ -333,11 +338,32 @@ func TestInvalidMapProperty(t *testing.T) { resource.Properties["Family"] = "mysql5.6" resources["ExampleResource"] = resource - assert.False(t, validateResources(resources, &spec, &sink, deadProp, deadRes), "This resource should be valid") + assert.False(t, validateResources(resources, &spec, deadProp, deadRes, specInconsistency, &mockContext), "This resource should be valid") } -func createResourceWithNestedProperties(resourceType string, propertyName string, nestedPropertyValue map[string]interface{}) template.Resource { +func TestHasAllowedValuesParametersValid(t *testing.T) { + _ = logger.Logger{} + data := make(map[string]interface{}) + + data["AllowedValues"] = "" + data["Type"] = "String" + parameters := createParameters("Correct", data) + + assert.True(t, hasAllowedValuesParametersValid(parameters), "This template has AllowedValues with Type String") +} + +func TestHasAllowedValuesParametersInvalid(t *testing.T) { + _ = logger.Logger{} + data := make(map[string]interface{}) + + data["AllowedValues"] = "" + data["Type"] = "AWS::EC2::VPC::Id" + parameters := createParameters("Incorrect", data) + assert.False(t, hasAllowedValuesParametersValid(parameters), "This template has AllowedValues with Type other than String") +} + +func createResourceWithNestedProperties(resourceType string, propertyName string, nestedPropertyValue map[string]interface{}) template.Resource { resource := template.Resource{} resource.Type = resourceType resource.Properties = make(map[string]interface{}) @@ -354,3 +380,10 @@ func createResourceWithOneProperty(resourceType string, propertyName string, pro return resource } + +func createParameters(name string, value map[string]interface{}) map[string]interface{} { + parameters := make(map[string]interface{}) + parameters[name] = value + + return parameters +} diff --git a/offlinevalidator/parsers/jsonparser.go b/validator/parsers/jsonparser.go similarity index 97% rename from offlinevalidator/parsers/jsonparser.go rename to validator/parsers/jsonparser.go index 9d62200..5591cc2 100644 --- a/offlinevalidator/parsers/jsonparser.go +++ b/validator/parsers/jsonparser.go @@ -14,14 +14,16 @@ // See the License for the specific language governing permissions and // limitations under the License. +// Package parsers provides some parsers to prepare Template. package parsers import ( "github.com/Appliscale/jsonparser" - "github.com/Appliscale/perun/offlinevalidator/template" + "github.com/Appliscale/perun/validator/template" "strconv" ) +// ParseJson parses byte to TemplateWithDetails. func ParseJson(fileContents []byte, tmpl *template.TemplateWithDetails) error { elements, err := parse(fileContents) if err != nil { diff --git a/offlinevalidator/parsers/jsonparser_test.go b/validator/parsers/jsonparser_test.go similarity index 97% rename from offlinevalidator/parsers/jsonparser_test.go rename to validator/parsers/jsonparser_test.go index dd55542..ff6044d 100644 --- a/offlinevalidator/parsers/jsonparser_test.go +++ b/validator/parsers/jsonparser_test.go @@ -18,7 +18,7 @@ package parsers import ( "fmt" - template "github.com/Appliscale/perun/offlinevalidator/template" + template "github.com/Appliscale/perun/validator/template" "github.com/stretchr/testify/assert" "io/ioutil" "os" diff --git a/offlinevalidator/parsers/test_resources/sample_template.json b/validator/parsers/test_resources/sample_template.json similarity index 100% rename from offlinevalidator/parsers/test_resources/sample_template.json rename to validator/parsers/test_resources/sample_template.json diff --git a/offlinevalidator/template/template.go b/validator/template/template.go similarity index 74% rename from offlinevalidator/template/template.go rename to validator/template/template.go index 6dc82a2..7fcebfc 100644 --- a/offlinevalidator/template/template.go +++ b/validator/template/template.go @@ -1,4 +1,4 @@ -// Copyright 2017 Appliscale +// Copyright 2018 Appliscale // // Maintainers and contributors are listed in README file inside repository. // @@ -14,8 +14,10 @@ // See the License for the specific language governing permissions and // limitations under the License. +// Package template provides struct which describes AWS Template. package template +// Template describes AWS Template. type Template struct { AWSTemplateFormatVersion string `yaml:"AWSTemplateFormatVersion"` Description string `yaml:"Description"` @@ -28,7 +30,12 @@ type Template struct { Outputs map[string]interface{} `yaml:"Outputs"` } +// Resource describes structure of Resources in Template. type Resource struct { - Type string `yaml:"Type"` - Properties map[string]interface{} `yaml:"Properties"` + Type string `yaml:"Type"` + Properties map[string]interface{} `yaml:"Properties"` + DeletionPolicy string `yaml:"DeletionPolicy"` } + +// Parameters describes structure of Parameters in Template. +type Parameters map[string]interface{} diff --git a/offlinevalidator/template/templateWithDetails.go b/validator/template/templateWithDetails.go similarity index 88% rename from offlinevalidator/template/templateWithDetails.go rename to validator/template/templateWithDetails.go index 0d9d07d..03739ec 100644 --- a/offlinevalidator/template/templateWithDetails.go +++ b/validator/template/templateWithDetails.go @@ -16,6 +16,7 @@ package template +// TemplateWithDetails - more detailed template. type TemplateWithDetails struct { AWSTemplateFormatVersion *TemplateElement Description *TemplateElement @@ -28,6 +29,7 @@ type TemplateWithDetails struct { Outputs *TemplateElement } +// TemplateElement describe each Element. type TemplateElement struct { Name string Value interface{} @@ -37,6 +39,7 @@ type TemplateElement struct { Column int } +// TemplateElementValueType describe type of the element value. type TemplateElementValueType int const ( @@ -50,14 +53,17 @@ const ( Unknown ) +// GetChildrenMap - get map in Element. func (te *TemplateElement) GetChildrenMap() map[string]*TemplateElement { return te.Children.(map[string]*TemplateElement) } +// GetChildrenSlice - get slice in Element. func (te *TemplateElement) GetChildrenSlice() []*TemplateElement { return *te.Children.(*[]*TemplateElement) } +// Traverse - get all maps and slices. func (te *TemplateElement) Traverse(iterator func(element *TemplateElement, parent *TemplateElement, depth int)) { if te != nil { te.traverse(iterator, nil, 0) @@ -77,6 +83,7 @@ func (te *TemplateElement) traverse(iterator func(element *TemplateElement, pare } } +// Traverse - get all maps and slices for each element. func (twd TemplateWithDetails) Traverse(iterator func(element *TemplateElement, parent *TemplateElement, depth int)) { twd.AWSTemplateFormatVersion.Traverse(iterator) twd.Description.Traverse(iterator) diff --git a/offlinevalidator/test_resources/test_specification.json b/validator/test_resources/test_specification.json similarity index 100% rename from offlinevalidator/test_resources/test_specification.json rename to validator/test_resources/test_specification.json diff --git a/validator/test_resources/test_template.yaml b/validator/test_resources/test_template.yaml new file mode 100644 index 0000000..97ab605 --- /dev/null +++ b/validator/test_resources/test_template.yaml @@ -0,0 +1,8 @@ +AWSTemplateFormatVersion: "2010-09-09" + +Resources: + S3: + Type: AWS::S3::Bucket + Properties: + BucketName: peruntestbucket + diff --git a/validator/validators/generalvalidator.go b/validator/validators/generalvalidator.go new file mode 100644 index 0000000..538aecb --- /dev/null +++ b/validator/validators/generalvalidator.go @@ -0,0 +1,85 @@ +package validators + +import ( + "github.com/Appliscale/perun/context" + "github.com/Appliscale/perun/logger" + "github.com/Appliscale/perun/validator/template" + "strconv" + "strings" +) + +type Restrictor func(string) (bool, string) + +var defaultRestrictor Restrictor = func(propertyName string) (valid bool, msg string) { return true, "Should pass" } + +func GetRestrictor(key string, ctx *context.Context) Restrictor { + return defaultRestrictor +} + +func GeneralValidateResourceByName(resourceValue template.Resource, resourceValidation *logger.ResourceValidation, ctx *context.Context) { + for propertyName, propertyValue := range resourceValue.Properties { + analyzeObject(propertyName, propertyValue, resourceValidation, propertyName+" ", ctx) + } +} + +func analyzeObject(propertyName string, propertyValue interface{}, resourceValidation *logger.ResourceValidation, preMessage string, ctx *context.Context) { + var propertyRestrictor Restrictor + switch propertyValue.(type) { + case string: + propertyRestrictor = GetRestrictor(propertyName, ctx) + if valid, msg := propertyRestrictor(propertyValue.(string)); !valid { + resourceValidation.AddValidationWarning(preMessage + ": " + msg + ", but the value is: \"" + propertyValue.(string) + "\"") + } + break + case []interface{}: + if isStringList(propertyValue.([]interface{})) { + propertyRestrictor = GetRestrictor(propertyName, ctx) + for index, value := range propertyValue.([]interface{}) { + if valid, msg := propertyRestrictor(value.(string)); !valid { + resourceValidation.AddValidationWarning(preMessage + " -> [" + strconv.Itoa(index) + "]: " + msg + ", but the value is: \"" + value.(string) + "\"") + } + } + } else { + for index, value := range propertyValue.([]interface{}) { + analyzeObject(propertyName, value, resourceValidation, preMessage+" -> ["+strconv.Itoa(index)+"]", ctx) + } + } + break + case map[string]interface{}: + for k, v := range propertyValue.(map[string]interface{}) { + analyzeObject(k, v, resourceValidation, preMessage+" -> "+k, ctx) + } + break + default: + //Do nothing - preparser is not ideal and skips some properties with intristic functions + } +} + +func isStringList(list []interface{}) bool { + for _, v := range list { + switch v.(type) { + case string: + break + default: + return false + } + } + return true +} + +func UserDecideGeneralRule(ctx *context.Context) bool { + if ctx.Logger.HasValidationErrors() { + return false + } else if ctx.Logger.HasValidationWarnings() { + var ans string + for true { + ctx.Logger.GetInput("Template found some possible validation errors. Do you want to force the operation? [Y/n]", &ans) + if strings.ToLower(ans) == "y" || ans == "" { + return true + } else if strings.ToLower(ans) == "n" { + return false + } + } + } + return true +} diff --git a/offlinevalidator/validators/tags.go b/validator/validators/tags.go similarity index 85% rename from offlinevalidator/validators/tags.go rename to validator/validators/tags.go index ae442cf..5a88367 100644 --- a/offlinevalidator/validators/tags.go +++ b/validator/validators/tags.go @@ -1,4 +1,4 @@ -// Copyright 2017 Appliscale +// Copyright 2018 Appliscale // // Maintainers and contributors are listed in README file inside repository. // @@ -14,8 +14,10 @@ // See the License for the specific language governing permissions and // limitations under the License. +// Package validators contains additional validators. package validators +// Tag - structure of Tag in Template. type Tag struct { Key string Value string diff --git a/offlinevalidator/validators/vpc.go b/validator/validators/vpc.go similarity index 88% rename from offlinevalidator/validators/vpc.go rename to validator/validators/vpc.go index 0504b9a..a044e52 100644 --- a/offlinevalidator/validators/vpc.go +++ b/validator/validators/vpc.go @@ -1,4 +1,4 @@ -// Copyright 2017 Appliscale +// Copyright 2018 Appliscale // // Maintainers and contributors are listed in README file inside repository. // @@ -18,11 +18,12 @@ package validators import ( "github.com/Appliscale/perun/logger" - "github.com/Appliscale/perun/offlinevalidator/template" + "github.com/Appliscale/perun/validator/template" "github.com/asaskevich/govalidator" "github.com/mitchellh/mapstructure" ) +// VpcProperties describes structure of Vpc. type VpcProperties struct { CidrBlock string EnableDnsSupport bool @@ -31,7 +32,7 @@ type VpcProperties struct { Tags []Tag } -// IsVpcValid : Checks if CIDR block is valid +// IsVpcValid : Checks if CIDR block is valid. func IsVpcValid(vpc template.Resource, resourceValidation *logger.ResourceValidation) bool { valid := true var properties VpcProperties diff --git a/offlinevalidator/validators/vpc_test.go b/validator/validators/vpc_test.go similarity index 94% rename from offlinevalidator/validators/vpc_test.go rename to validator/validators/vpc_test.go index c64a081..210d5bd 100644 --- a/offlinevalidator/validators/vpc_test.go +++ b/validator/validators/vpc_test.go @@ -1,4 +1,4 @@ -// Copyright 2017 Appliscale +// Copyright 2018 Appliscale // // Maintainers and contributors are listed in README file inside repository. // @@ -21,7 +21,7 @@ import ( "testing" "github.com/Appliscale/perun/logger" - "github.com/Appliscale/perun/offlinevalidator/template" + "github.com/Appliscale/perun/validator/template" "github.com/stretchr/testify/assert" )