diff --git a/scraper/sitea/sitea.go b/scraper/sitea/sitea.go index 8b6e0de..9b859fb 100644 --- a/scraper/sitea/sitea.go +++ b/scraper/sitea/sitea.go @@ -81,8 +81,8 @@ func ScanNewJobs(siteABaseUrl string, proxyUrl string) []job.Job { page++ } - log.Println("siteA total jobs found", len(possibleJobs)) + log.Println(siteABaseUrl+" total jobs found", len(possibleJobs)) interestingJobs := interest.FilterInterest(proxyUrl, possibleJobs, GetSiteAJobInfo) - log.Println("siteA interesting jobs found", len(interestingJobs)) + log.Println(siteABaseUrl+" interesting jobs found", len(interestingJobs)) return interestingJobs } diff --git a/scraper/siteb/siteb.go b/scraper/siteb/siteb.go index 8b38abf..ecd895b 100644 --- a/scraper/siteb/siteb.go +++ b/scraper/siteb/siteb.go @@ -62,8 +62,8 @@ func getSiteBJobInfo(jobUrl string, proxyUrl string) (string, error) { func ScanNewJobs(sitebBaseUrl string, proxyUrl string) []job.Job { jobs := job.GetNewJobs(sitebBaseUrl+"/jobs", proxyUrl, siteBJobListParser) - log.Println("siteB total jobs found", len(jobs)) + log.Println(sitebBaseUrl+" total jobs found", len(jobs)) interestingJobs := interest.FilterInterest(proxyUrl, jobs, getSiteBJobInfo) - log.Println("siteB interesting jobs", len(interestingJobs)) + log.Println(sitebBaseUrl+" interesting jobs", len(interestingJobs)) return interestingJobs } diff --git a/scraper/sitec/sitec.go b/scraper/sitec/sitec.go index b5f74f2..2941f39 100644 --- a/scraper/sitec/sitec.go +++ b/scraper/sitec/sitec.go @@ -66,8 +66,8 @@ func ScanNewJobs(sitecBaseUrl string, proxyUrl string) []job.Job { } jobs = job.DeduplicatedLinks(jobs) - log.Println("siteC total jobs found", len(jobs)) + log.Println(sitecBaseUrl+" total jobs found", len(jobs)) interestingJobs := interest.FilterInterest(proxyUrl, jobs, getSiteCJobInfo) - log.Println("siteC interesting jobs", len(interestingJobs)) + log.Println(sitecBaseUrl+" interesting jobs", len(interestingJobs)) return interestingJobs } diff --git a/scraper/sited/sited.go b/scraper/sited/sited.go index 67cad22..bc65782 100644 --- a/scraper/sited/sited.go +++ b/scraper/sited/sited.go @@ -44,8 +44,8 @@ func getSiteDJobInfo(jobUrl string, proxyUrl string) (string, error) { func ScanNewJobs(siteDBaseUrl string, proxyUrl string) []job.Job { jobs := job.GetNewJobs(siteDBaseUrl+"/remote-jobs/developer/", proxyUrl, siteDJobListParser) - log.Println("siteD total jobs found", len(jobs)) + log.Println(siteDBaseUrl+" total jobs found", len(jobs)) interestingJobs := interest.FilterInterest(proxyUrl, jobs, getSiteDJobInfo) - log.Println("siteD interesting jobs", len(interestingJobs)) + log.Println(siteDBaseUrl+" interesting jobs", len(interestingJobs)) return interestingJobs }