crawler

package
v0.1.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Mar 22, 2026 License: MIT Imports: 13 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

This section is empty.

Types

type Crawler

type Crawler struct {
	// contains filtered or unexported fields
}

func New

func New(fetcher *Fetcher, store store.Store, src source.Source, workers int) *Crawler

func (*Crawler) Run

func (c *Crawler) Run(ctx context.Context, opts RunOptions) error

type Fetcher

type Fetcher struct {
	// contains filtered or unexported fields
}

func NewFetcher

func NewFetcher(requestsPerSecond int, maxRetry int, userAgent string) *Fetcher

func (*Fetcher) Close

func (f *Fetcher) Close()

func (*Fetcher) Fetch

func (f *Fetcher) Fetch(ctx context.Context, url string) ([]byte, error)

Fetch downloads a URL with rate limiting, retries, and exponential backoff.

type RunOptions

type RunOptions struct {
	Resume      bool
	RetryFailed bool
}

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL