package crawler import ( "context" "fmt" "log/slog" "os" "time" "github.com/maximotejeda/us_dop_scrapper/helpers" "github.com/maximotejeda/us_dop_scrapper/internal/application/core/domain" "github.com/maximotejeda/us_dop_scrapper/internal/ports" "github.com/playwright-community/playwright-go" ) var ( uri = os.Getenv("APA") ) type Apap struct { } func NewApap() ports.APIPorts { return &Apap{} } func (a Apap) Scrape(ctx context.Context, page playwright.Page, log *slog.Logger) (insts []*domain.History, err error) { tout := 120000.00 log = log.With("scrapper", "apap") if _, err := page.Goto(uri, playwright.PageGotoOptions{ Timeout: &tout, WaitUntil: playwright.WaitUntilStateLoad, }); err != nil { log.Error("could not get info", "error", err) return nil, err } button := page.Locator("#exchangesRates") button.WaitFor() button.Click() compraLocator := page.Locator("#currency-buy-USD") ventaLocator := page.Locator("#currency-sell-USD") compraSTR, err := compraLocator.TextContent() if err != nil { log.Error("could not get compra str", "err", err) return nil, err } ventaSTR, err := ventaLocator.TextContent() if err != nil { log.Error("could not get venta string", "err", err) return nil, err } inst := &domain.History{ Name: "asociacion popular de ahorros y prestamos", Parser: "apap", Parsed: time.Now().Unix(), } inst.Venta = helpers.Normalize(ventaSTR) inst.Compra = helpers.Normalize(compraSTR) if inst.Compra == 0 || inst.Venta == 0 { return nil, fmt.Errorf("apa: institution not parsed: %v", inst) } log.Info("parsed", "value", inst) return []*domain.History{inst}, nil } func (a Apap) ExecParser( ctx context.Context, browser *playwright.Browser, log *slog.Logger) (err error) { t := true ua := helpers.NewMobileUA() b := *browser page, err := b.NewPage(playwright.BrowserNewPageOptions{ UserAgent: &ua, // IsMobile: &t, HasTouch: &t, Viewport: &playwright.Size{ Width: 412, Height: 915, }, Screen: &playwright.Size{ Width: 412, Height: 915, }, }) if err != nil { log.Error("creating page", "error", err) os.Exit(1) } ctx, cancel := context.WithTimeout(ctx, 6*time.Minute) defer page.Close() defer cancel() _, err = a.Scrape(ctx, page, log) // here we execute db operations if err != nil { return err } return err }