Skip to content

Commit

Permalink
Handle URL resolution errors properly
Browse files Browse the repository at this point in the history
Do not panic right away when an URL fails to resolve. Capture the error and make it visible in the statistic. The change also treats the fail CLI options properly so that the programs fails after all links have been processed independent of the outcome.
  • Loading branch information
bmuschko committed Dec 28, 2018
1 parent c57b89f commit 17d5b7b
Show file tree
Hide file tree
Showing 6 changed files with 70 additions and 15 deletions.
7 changes: 7 additions & 0 deletions file/file_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,13 @@ func TestReadFile(t *testing.T) {
deleteFile(path1)
}

func TestPanicWhenReadingNonExistentFile(t *testing.T) {
path := filepath.Join("1.adoc")
Panics(t, func() {
ReadFile(path)
})
}

func createDir(path string) {
err := os.MkdirAll(path, 0755)

Expand Down
13 changes: 8 additions & 5 deletions http/http.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,24 +16,26 @@ func SetTimeout(timeout int) {
// Get emits a HTTP GET request for a given URL. Captures the status code, status and outcome of the call.
// Returns with information about the response.
func Get(link string) HttpResponse {
result := HttpResponse{Url: link, Success: true}
result := HttpResponse{Url: link}
url, err := url.ParseRequestURI(link)

if err != nil {
panic(err)
result.Error = err
return result
}

resp, err := client.Get(url.String())

if err != nil {
panic(err)
result.Error = err
return result
}

result.StatusCode = resp.StatusCode
result.Status = resp.Status

if resp.StatusCode != 200 {
result.Success = false
if resp.StatusCode == 200 {
result.Success = true
}

resp.Body.Close()
Expand All @@ -46,4 +48,5 @@ type HttpResponse struct {
Success bool
StatusCode int
Status string
Error error
}
29 changes: 23 additions & 6 deletions http/http_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,27 +6,44 @@ import (
"testing"
)

func TestSetTimeout(t *testing.T) {
SetTimeout(20)
}

func TestGetValidUrl(t *testing.T) {
url := "http://www.google.com/"
result := Get(url)

Equal(t, url, result.Url)
True(t, result.Success)
Nil(t, result.Error)
Equal(t, 200, result.StatusCode)
}

func TestGetUrlForBadRequest(t *testing.T) {
url := "https://www.googleapis.com/urlshortener/v1/url"
result := Get(url)

Equal(t, url, result.Url)
False(t, result.Success)
Nil(t, result.Error)
Equal(t, 400, result.StatusCode)
}

func TestGetNonExistentUrl(t *testing.T) {
url := "http://www.unknown1x.com/"
Panics(t, func() {
Get(url)
})
result := Get(url)

Equal(t, url, result.Url)
False(t, result.Success)
NotNil(t, result.Error)
}

func TestGetUrlForBadRequest(t *testing.T) {
url := "https://www.googleapis.com/urlshortener/v1/url"
func TestGetInvalidUrl(t *testing.T) {
url := "123://www.invalid.com/"
result := Get(url)

Equal(t, url, result.Url)
False(t, result.Success)
Equal(t, 400, result.StatusCode)
NotNil(t, result.Error)
}
11 changes: 11 additions & 0 deletions stat/stat.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,16 @@ func SumFailures(aggregateSummary []Summary) int {
return sum(failures)
}

// SumErrors sums up all errors.
// Returns the overall number of errors.
func SumErrors(aggregateSummary []Summary) int {
errors := collect(aggregateSummary, func(summary Summary) int {
return summary.Errored
})

return sum(errors)
}

func collect(list []Summary, f convert) []int {
result := make([]int, len(list))

Expand All @@ -46,4 +56,5 @@ type convert func(Summary) int
type Summary struct {
Successful int
Failed int
Errored int
}
15 changes: 14 additions & 1 deletion stat/stat_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ func TestSumSuccessesForEmptySlice(t *testing.T) {
func TestSumSuccessesForPopulatedSlice(t *testing.T) {
aggregateSummary := summaries()
sum := SumSuccesses(aggregateSummary)
Equal(t, 29, sum)
Equal(t, 41, sum)
}

func TestSumFailuresForEmptySlice(t *testing.T) {
Expand All @@ -30,10 +30,23 @@ func TestSumFailuresForPopulatedSlice(t *testing.T) {
Equal(t, 70, sum)
}

func TestSumErrorsForEmptySlice(t *testing.T) {
aggregateSummary := []Summary{}
sum := SumErrors(aggregateSummary)
Equal(t, 0, sum)
}

func TestSumErrorsForPopulatedSlice(t *testing.T) {
aggregateSummary := summaries()
sum := SumErrors(aggregateSummary)
Equal(t, 2, sum)
}

func summaries() []Summary {
summaries := []Summary{}
summaries = append(summaries, Summary{Successful: 20, Failed: 3})
summaries = append(summaries, Summary{Successful: 7, Failed: 67})
summaries = append(summaries, Summary{Successful: 2, Failed: 0})
summaries = append(summaries, Summary{Successful: 12, Failed: 0, Errored: 2})
return summaries
}
10 changes: 7 additions & 3 deletions verify/verify.go
Original file line number Diff line number Diff line change
Expand Up @@ -49,12 +49,13 @@ func Process(files []string, fail bool) {
if len(aggregateSummary) > 0 {
successCount := stat.SumSuccesses(aggregateSummary)
failureCount := stat.SumFailures(aggregateSummary)
stats := fmt.Sprintf("SUCCESSFUL: %s, FAILED: %s", strconv.Itoa(successCount), strconv.Itoa(failureCount))
errorCount := stat.SumErrors(aggregateSummary)
stats := fmt.Sprintf("SUCCESSFUL: %s, FAILED: %s, ERRORED: %s", strconv.Itoa(successCount), strconv.Itoa(failureCount), strconv.Itoa(errorCount))
fmt.Println()
fmt.Println(calculateSeparator(stats))
fmt.Println(stats)

if failureCount > 0 && !fail {
if (failureCount > 0 || errorCount > 0) && !fail {
os.Exit(1)
}
}
Expand Down Expand Up @@ -84,7 +85,10 @@ func parseLinks(content string) stat.Summary {
func validateLink(link string, summary *stat.Summary, ch chan<- string) {
response := http.Get(link)

if response.Success {
if response.Error != nil {
summary.Errored++
ch <- fmt.Sprintf("[ERROR] %s (%s)", link, response.Error.Error())
} else if response.Success {
summary.Successful++
ch <- fmt.Sprintf("[OK] %s", link)
} else {
Expand Down

0 comments on commit 17d5b7b

Please sign in to comment.