package crawler import ( "context" "fmt" "log/slog" "time" "github.com/maximotejeda/us_dop_scrapper/config" "github.com/maximotejeda/us_dop_scrapper/helpers" "github.com/maximotejeda/us_dop_scrapper/internal/application/core/domain" "github.com/maximotejeda/us_dop_scrapper/internal/ports" "github.com/playwright-community/playwright-go" ) type bdr struct{} func NewBDR() ports.APIPorts { return &bdr{} } func (bd bdr) Scrape(ctx context.Context, page playwright.Page, log *slog.Logger) (insts []*domain.History, err error) { tout := 120000.00 log = log.With("scrapper", "bdr") if _, err := page.Goto(config.GetBDRURL(), playwright.PageGotoOptions{ Timeout: &tout, WaitUntil: playwright.WaitUntilStateLoad, }); err != nil { log.Error("could not get info", "error", err) return nil, err } err = page.WaitForLoadState() if err != nil { log.Error("waiting for page state", "err", err) return nil, err } page.Locator("section#divisas").WaitFor() compraLocator := page.Locator("span#compraUS") compraLocator.WaitFor() ventaLocator := page.Locator("span#ventaUS") compraSTR, err := compraLocator.TextContent() if err != nil { log.Error("parsing compra", "err", err) return nil, err } ventaSTR, err := ventaLocator.TextContent() if err != nil { log.Error("parsing compra", "err", err) return nil, err } inst := &domain.History{ Name: "banreservas", Parser: "brd", Parsed: time.Now().Unix(), } compra := helpers.Normalize(compraSTR) venta := helpers.Normalize(ventaSTR) inst.Compra = compra inst.Venta = venta if inst.Compra == 0 || inst.Venta == 0 { return nil, fmt.Errorf("brd: institution not parsed") } insts = append(insts, inst) return insts, nil }