This commit is contained in:
commit
0ef15167d5
28 changed files with 2789 additions and 0 deletions
192
pkg/reporter/reporter.go
Normal file
192
pkg/reporter/reporter.go
Normal file
|
@ -0,0 +1,192 @@
|
|||
package reporter
|
||||
|
||||
import (
|
||||
"encoding/csv"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"git.nakama.town/fmartingr/dharma/pkg/scraper"
|
||||
"github.com/fatih/color"
|
||||
)
|
||||
|
||||
// Reporter is an interface for report generators
|
||||
type Reporter interface {
|
||||
Generate(results *scraper.Results, writer io.Writer) error
|
||||
}
|
||||
|
||||
// New creates a new reporter based on the format
|
||||
func New(format string) (Reporter, error) {
|
||||
switch strings.ToLower(format) {
|
||||
case "pretty":
|
||||
return &PrettyReporter{}, nil
|
||||
case "json":
|
||||
return &JSONReporter{}, nil
|
||||
case "csv":
|
||||
return &CSVReporter{}, nil
|
||||
default:
|
||||
return nil, fmt.Errorf("unsupported format: %s", format)
|
||||
}
|
||||
}
|
||||
|
||||
// PrettyReporter generates a human-readable report for terminal
|
||||
type PrettyReporter struct{}
|
||||
|
||||
// Generate generates a pretty report
|
||||
func (r *PrettyReporter) Generate(results *scraper.Results, writer io.Writer) error {
|
||||
red := color.New(color.FgRed).SprintFunc()
|
||||
green := color.New(color.FgGreen).SprintFunc()
|
||||
yellow := color.New(color.FgYellow).SprintFunc()
|
||||
blue := color.New(color.FgBlue).SprintFunc()
|
||||
cyan := color.New(color.FgCyan).SprintFunc()
|
||||
|
||||
// Count internal vs external links
|
||||
countInternalSuccess := 0
|
||||
countInternalErrors := 0
|
||||
countExternalSuccess := 0
|
||||
countExternalErrors := 0
|
||||
|
||||
for _, result := range results.Successes {
|
||||
if result.IsExternal {
|
||||
countExternalSuccess++
|
||||
} else {
|
||||
countInternalSuccess++
|
||||
}
|
||||
}
|
||||
|
||||
for _, result := range results.Errors {
|
||||
if result.IsExternal {
|
||||
countExternalErrors++
|
||||
} else {
|
||||
countInternalErrors++
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Fprintf(writer, "Website scan report for: %s\n", blue(results.BaseURL))
|
||||
fmt.Fprintf(writer, "Scanned at: %s\n", time.Now().Format(time.RFC1123))
|
||||
fmt.Fprintf(writer, "Total resources checked: %d\n", results.Total)
|
||||
fmt.Fprintf(writer, "Success: %s, Errors: %s\n",
|
||||
green(len(results.Successes)),
|
||||
red(len(results.Errors)))
|
||||
fmt.Fprintf(writer, "Internal links: %s success, %s errors\n",
|
||||
green(countInternalSuccess),
|
||||
red(countInternalErrors))
|
||||
fmt.Fprintf(writer, "External links: %s success, %s errors\n\n",
|
||||
green(countExternalSuccess),
|
||||
red(countExternalErrors))
|
||||
|
||||
if len(results.Errors) == 0 {
|
||||
fmt.Fprintf(writer, "%s No errors found!\n", green("✓"))
|
||||
return nil
|
||||
}
|
||||
|
||||
// Group errors by internal/external
|
||||
internalErrors := []scraper.Result{}
|
||||
externalErrors := []scraper.Result{}
|
||||
|
||||
for _, result := range results.Errors {
|
||||
if result.IsExternal {
|
||||
externalErrors = append(externalErrors, result)
|
||||
} else {
|
||||
internalErrors = append(internalErrors, result)
|
||||
}
|
||||
}
|
||||
|
||||
// Print internal errors first if we have any
|
||||
if len(internalErrors) > 0 {
|
||||
fmt.Fprintln(writer, "Errors found:")
|
||||
|
||||
for _, result := range internalErrors {
|
||||
status := fmt.Sprintf("%d", result.Status)
|
||||
if result.Status == 0 {
|
||||
status = "ERR"
|
||||
}
|
||||
|
||||
fmt.Fprintf(writer, "%-6s (%-10s) %s [from: %s]\n",
|
||||
red(status),
|
||||
yellow(result.Type),
|
||||
result.URL,
|
||||
result.SourceURL,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Print external errors if we have any
|
||||
if len(externalErrors) > 0 {
|
||||
if len(internalErrors) > 0 {
|
||||
fmt.Fprintln(writer, "")
|
||||
}
|
||||
fmt.Fprintln(writer, "External Errors:")
|
||||
fmt.Fprintln(writer, strings.Repeat("-", 80))
|
||||
fmt.Fprintf(writer, "%-6s | %-10s | %s | %s\n", "Status", "Type", "URL", "Source")
|
||||
fmt.Fprintln(writer, strings.Repeat("-", 80))
|
||||
|
||||
for _, result := range externalErrors {
|
||||
status := fmt.Sprintf("%d", result.Status)
|
||||
if result.Status == 0 {
|
||||
status = "ERR"
|
||||
}
|
||||
|
||||
fmt.Fprintf(writer, "%-6s | %-10s | %s | %s\n",
|
||||
red(status),
|
||||
cyan(result.Type),
|
||||
result.URL,
|
||||
result.SourceURL,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// JSONReporter generates a JSON report
|
||||
type JSONReporter struct{}
|
||||
|
||||
// Generate generates a JSON report
|
||||
func (r *JSONReporter) Generate(results *scraper.Results, writer io.Writer) error {
|
||||
return json.NewEncoder(writer).Encode(results)
|
||||
}
|
||||
|
||||
// CSVReporter generates a CSV report
|
||||
type CSVReporter struct{}
|
||||
|
||||
// Generate generates a CSV report
|
||||
func (r *CSVReporter) Generate(results *scraper.Results, writer io.Writer) error {
|
||||
csvWriter := csv.NewWriter(writer)
|
||||
defer csvWriter.Flush()
|
||||
|
||||
// Write header
|
||||
if err := csvWriter.Write([]string{"Status", "Type", "URL", "Source URL", "Error"}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Write errors
|
||||
for _, result := range results.Errors {
|
||||
status := fmt.Sprintf("%d", result.Status)
|
||||
if result.Status == 0 {
|
||||
status = "ERROR"
|
||||
}
|
||||
|
||||
if err := csvWriter.Write([]string{
|
||||
status,
|
||||
result.Type,
|
||||
result.URL,
|
||||
result.SourceURL,
|
||||
result.Error,
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Helper function to truncate strings
|
||||
func truncate(s string, maxLen int) string {
|
||||
if len(s) <= maxLen {
|
||||
return s
|
||||
}
|
||||
return s[:maxLen-3] + "..."
|
||||
}
|
206
pkg/reporter/reporter_test.go
Normal file
206
pkg/reporter/reporter_test.go
Normal file
|
@ -0,0 +1,206 @@
|
|||
package reporter
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"git.nakama.town/fmartingr/dharma/pkg/scraper"
|
||||
)
|
||||
|
||||
func TestNew(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
format string
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "Pretty format",
|
||||
format: "pretty",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "JSON format",
|
||||
format: "json",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "CSV format",
|
||||
format: "csv",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "Unsupported format",
|
||||
format: "xml",
|
||||
wantErr: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got, err := New(tt.format)
|
||||
if (err != nil) != tt.wantErr {
|
||||
t.Errorf("New() error = %v, wantErr %v", err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
if !tt.wantErr && got == nil {
|
||||
t.Errorf("New() = nil, want non-nil")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONReporter_Generate(t *testing.T) {
|
||||
// Create test results
|
||||
results := &scraper.Results{
|
||||
BaseURL: "https://example.com",
|
||||
Errors: []scraper.Result{
|
||||
{
|
||||
URL: "https://example.com/error",
|
||||
SourceURL: "https://example.com",
|
||||
Status: 404,
|
||||
Error: "HTTP Error: 404 Not Found",
|
||||
Type: "link",
|
||||
},
|
||||
},
|
||||
Successes: []scraper.Result{
|
||||
{
|
||||
URL: "https://example.com/success",
|
||||
SourceURL: "https://example.com",
|
||||
Status: 200,
|
||||
Type: "link",
|
||||
},
|
||||
},
|
||||
Total: 2,
|
||||
}
|
||||
|
||||
// Create reporter and buffer
|
||||
reporter := &JSONReporter{}
|
||||
buf := &bytes.Buffer{}
|
||||
|
||||
// Generate report
|
||||
if err := reporter.Generate(results, buf); err != nil {
|
||||
t.Fatalf("Generate() error = %v", err)
|
||||
}
|
||||
|
||||
// Parse output
|
||||
var output scraper.Results
|
||||
if err := json.Unmarshal(buf.Bytes(), &output); err != nil {
|
||||
t.Fatalf("Failed to parse JSON: %v", err)
|
||||
}
|
||||
|
||||
// Verify output
|
||||
if output.BaseURL != results.BaseURL {
|
||||
t.Errorf("BaseURL = %v, want %v", output.BaseURL, results.BaseURL)
|
||||
}
|
||||
if len(output.Errors) != len(results.Errors) {
|
||||
t.Errorf("Errors count = %v, want %v", len(output.Errors), len(results.Errors))
|
||||
}
|
||||
if len(output.Successes) != len(results.Successes) {
|
||||
t.Errorf("Successes count = %v, want %v", len(output.Successes), len(results.Successes))
|
||||
}
|
||||
if output.Total != results.Total {
|
||||
t.Errorf("Total = %v, want %v", output.Total, results.Total)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCSVReporter_Generate(t *testing.T) {
|
||||
// Create test results
|
||||
results := &scraper.Results{
|
||||
BaseURL: "https://example.com",
|
||||
Errors: []scraper.Result{
|
||||
{
|
||||
URL: "https://example.com/error",
|
||||
SourceURL: "https://example.com",
|
||||
Status: 404,
|
||||
Error: "HTTP Error: 404 Not Found",
|
||||
Type: "link",
|
||||
},
|
||||
},
|
||||
Successes: []scraper.Result{},
|
||||
Total: 1,
|
||||
}
|
||||
|
||||
// Create reporter and buffer
|
||||
reporter := &CSVReporter{}
|
||||
buf := &bytes.Buffer{}
|
||||
|
||||
// Generate report
|
||||
if err := reporter.Generate(results, buf); err != nil {
|
||||
t.Fatalf("Generate() error = %v", err)
|
||||
}
|
||||
|
||||
// Verify output
|
||||
lines := strings.Split(strings.TrimSpace(buf.String()), "\n")
|
||||
if len(lines) != 2 { // Header + 1 error
|
||||
t.Errorf("Expected 2 lines, got %d", len(lines))
|
||||
}
|
||||
|
||||
// Check header
|
||||
expectedHeader := "Status,Type,URL,Source URL,Error"
|
||||
if lines[0] != expectedHeader {
|
||||
t.Errorf("Header = %v, want %v", lines[0], expectedHeader)
|
||||
}
|
||||
}
|
||||
|
||||
func TestPrettyReporter_Generate(t *testing.T) {
|
||||
// Test with errors
|
||||
results := &scraper.Results{
|
||||
BaseURL: "https://example.com",
|
||||
Errors: []scraper.Result{
|
||||
{
|
||||
URL: "https://example.com/error",
|
||||
SourceURL: "https://example.com",
|
||||
Status: 404,
|
||||
Error: "HTTP Error: 404 Not Found",
|
||||
Type: "link",
|
||||
},
|
||||
},
|
||||
Successes: []scraper.Result{
|
||||
{
|
||||
URL: "https://example.com/success",
|
||||
SourceURL: "https://example.com",
|
||||
Status: 200,
|
||||
Type: "link",
|
||||
},
|
||||
},
|
||||
Total: 2,
|
||||
}
|
||||
|
||||
// Create reporter and buffer
|
||||
reporter := &PrettyReporter{}
|
||||
buf := &bytes.Buffer{}
|
||||
|
||||
// Generate report
|
||||
if err := reporter.Generate(results, buf); err != nil {
|
||||
t.Fatalf("Generate() error = %v", err)
|
||||
}
|
||||
|
||||
// Check that output contains key sections
|
||||
output := buf.String()
|
||||
if !strings.Contains(output, "Website scan report for") {
|
||||
t.Error("Output doesn't contain report title")
|
||||
}
|
||||
if !strings.Contains(output, "Internal Errors:") {
|
||||
t.Error("Output doesn't contain errors section")
|
||||
}
|
||||
|
||||
// Test with no errors
|
||||
results = &scraper.Results{
|
||||
BaseURL: "https://example.com",
|
||||
Errors: []scraper.Result{},
|
||||
Successes: []scraper.Result{},
|
||||
Total: 0,
|
||||
}
|
||||
|
||||
buf = &bytes.Buffer{}
|
||||
if err := reporter.Generate(results, buf); err != nil {
|
||||
t.Fatalf("Generate() error = %v", err)
|
||||
}
|
||||
|
||||
output = buf.String()
|
||||
if !strings.Contains(output, "No errors found") {
|
||||
t.Error("Output doesn't contain 'No errors found' message")
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue