targetscraper.go 1.8 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162
  1. package scrape
  2. import (
  3. "github.com/opencost/opencost/core/pkg/log"
  4. "github.com/opencost/opencost/modules/collector-source/pkg/metric"
  5. "github.com/opencost/opencost/modules/collector-source/pkg/scrape/parser"
  6. "github.com/opencost/opencost/modules/collector-source/pkg/scrape/target"
  7. )
  8. type TargetScraper struct {
  9. targetProvider target.TargetProvider
  10. metricNames map[string]struct{} // filter for which metrics will be processed
  11. includeMetrics bool // toggle to make metrics an include or exclude list
  12. }
  13. func newTargetScrapper(provider target.TargetProvider, metricNames []string, includeMetrics bool) *TargetScraper {
  14. metricSet := make(map[string]struct{})
  15. for _, metricName := range metricNames {
  16. metricSet[metricName] = struct{}{}
  17. }
  18. return &TargetScraper{
  19. targetProvider: provider,
  20. metricNames: metricSet,
  21. includeMetrics: includeMetrics,
  22. }
  23. }
  24. func (s *TargetScraper) Scrape() []metric.Update {
  25. targets := s.targetProvider.GetTargets()
  26. var scrapeFuncs []ScrapeFunc
  27. for i := range targets {
  28. target := targets[i]
  29. fn := func() []metric.Update {
  30. var scrapeResults []metric.Update
  31. f, err := target.Load()
  32. if err != nil {
  33. log.Errorf("failed to scrape target: %s", err.Error())
  34. return scrapeResults
  35. }
  36. results, err := parser.Parse(f)
  37. if err != nil {
  38. log.Errorf("failed to parse target: %s", err.Error())
  39. return scrapeResults
  40. }
  41. for _, result := range results {
  42. // filter metrics to be processed by name
  43. if _, ok := s.metricNames[result.Name]; ok != s.includeMetrics {
  44. continue
  45. }
  46. scrapeResults = append(scrapeResults, metric.Update{
  47. Name: result.Name,
  48. Labels: result.Labels,
  49. Value: result.Value,
  50. })
  51. }
  52. return scrapeResults
  53. }
  54. scrapeFuncs = append(scrapeFuncs, fn)
  55. }
  56. return concurrentScrape(scrapeFuncs...)
  57. }