Skip to content

Commit

Permalink
Merge pull request #17 from austin1237/execution-logs
Browse files Browse the repository at this point in the history
each sites execution time is now logged
  • Loading branch information
austin1237 authored Mar 31, 2024
2 parents ecf91c7 + 4131068 commit f70f398
Showing 1 changed file with 17 additions and 3 deletions.
20 changes: 17 additions & 3 deletions scraper/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ import (
"scraper/sited"
"scraper/sitee"
"scraper/sitef"
"time"

"github.com/aws/aws-lambda-go/lambda"
)
Expand All @@ -24,6 +25,11 @@ type Site struct {
BaseURL string
}

type Result struct {
Elapsed time.Duration
URL string
}

var (
proxyURL string
scraperWebhook string
Expand Down Expand Up @@ -102,23 +108,31 @@ func lookForNewJobs() {
// Add more sites here
}

doneChannel := make(chan bool, len(sites))
results := make([]Result, 0, len(sites))
doneChannel := make(chan Result, len(sites))
for _, site := range sites {
go func(site Site) {
start := time.Now()
uncachedJobs, interestingJobs := site.ScanNewJobs(site.BaseURL, proxyURL, cache)
errs := discord.SendJobsToDiscord(interestingJobs, scraperWebhook)
if len(errs) == 0 {
cache.WriteCompaniesToCache(uncachedJobs)
} else {
log.Println("Error sending to discord", errs)
}
doneChannel <- true
elapsed := time.Since(start)
doneChannel <- Result{Elapsed: elapsed, URL: site.BaseURL}
}(site)
}

// Wait for all goroutines to finish
for range sites {
<-doneChannel
result := <-doneChannel
results = append(results, result)
}

for _, result := range results {
log.Printf("Execution took %s for %s \n", result.Elapsed, result.URL)
}

}
Expand Down

0 comments on commit f70f398

Please sign in to comment.