node

package
v0.0.0-...-a95d079 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Apr 16, 2015 License: MIT Imports: 7 Imported by: 0

Documentation

Index

Constants

View Source
const (
	CLUSTER_STATUS_INIT = iota
	CLUSTER_STATUS_JOIN
	CLUSTER_STATUS_ELECTION
	CLUSTER_STATUS_READY
)

cluster status * init:where every thing have init * join:try to connect to other node ,if not make itself master,else ,get other master * election(option):when circle is builded a start to elect a master * ready:ready to start crawl

Variables

This section is empty.

Functions

This section is empty.

Types

type Cluster

type Cluster struct {
	ClusterInfo   *ClusterInfo
	RequestStatus *RequestStatus
	// contains filtered or unexported fields
}

func NewCluster

func NewCluster(settings *util.Settings, localNode *NodeInfo) *Cluster

func (*Cluster) AddNode

func (this *Cluster) AddNode(nodeInfo *NodeInfo)

add a node to cluster node list

func (*Cluster) AddRequest

func (this *Cluster) AddRequest(request *http.Request)

add a request to quene

func (*Cluster) AddToCrawlingQuene

func (this *Cluster) AddToCrawlingQuene(request *http.Request)

record distribute request job

func (*Cluster) CrawlStatus

func (this *Cluster) CrawlStatus() *crawler.CrawlerStatus

get crawl status

func (*Cluster) Crawled

func (this *Cluster) Crawled(scrapyResult *crawler.ScrapeResult)

a request job is done delete it from crawling quene add crawled num

func (*Cluster) DeleteDeadNode

func (this *Cluster) DeleteDeadNode(nodeName string)

func (*Cluster) ElectMaster

func (this *Cluster) ElectMaster() *NodeInfo

choose a new master node

func (*Cluster) GetMasterName

func (this *Cluster) GetMasterName() string

get master node name

func (*Cluster) GetMasterNode

func (this *Cluster) GetMasterNode() *NodeInfo

get master node

func (*Cluster) HasNode

func (this *Cluster) HasNode(nodeName string) bool

func (*Cluster) IsMasterNode

func (this *Cluster) IsMasterNode() bool

is local node master node

func (*Cluster) IsReady

func (this *Cluster) IsReady() bool

is cluster ready for crawl

func (*Cluster) IsSpiderRunning

func (this *Cluster) IsSpiderRunning(spiderName string) bool

is the spider running

func (*Cluster) IsStop

func (this *Cluster) IsStop() bool

is all loop stop

func (*Cluster) Join

func (this *Cluster) Join()

func (*Cluster) MakeMasterNode

func (this *Cluster) MakeMasterNode(nodeName string)

make master node by node name

func (*Cluster) PopRequest

func (this *Cluster) PopRequest() *http.Request

pop a request from waiting quene add to crawling quenu

func (*Cluster) Ready

func (this *Cluster) Ready()

func (*Cluster) StartSpider

func (this *Cluster) StartSpider(spiderName string)

when start a spider ,cluster should record it

type ClusterInfo

type ClusterInfo struct {
	Status     int
	Name       string
	NodeList   []*NodeInfo
	LocalNode  *NodeInfo
	MasterNode *NodeInfo
}

basic cluster infomation

type Node

type Node struct {
	NodeInfo *NodeInfo
	Settings *util.Settings
	Cluster  *Cluster
	Crawler  *crawler.Crawler
}

func NewNode

func NewNode(settings *util.Settings, resultQuene *crawler.ResultQuene) *Node

func (*Node) AcceptRequest

func (this *Node) AcceptRequest(request *http.Request)

get distribute request if node not running ,start it

func (*Node) AcceptResult

func (this *Node) AcceptResult(scrapyResult *crawler.ScrapeResult)

result of crawl request add scraped request to cluster tell cluster request is down

func (*Node) AddMasterNode

func (this *Node) AddMasterNode(masterNodeInfo *NodeInfo)

slave node get request of master node info then change the master node

func (*Node) AddNodeToCluster

func (this *Node) AddNodeToCluster(nodeInfo *NodeInfo)

add a node to cluster if this is master node,elect a new master node and send it to other

func (*Node) AddToCrawlingQuene

func (this *Node) AddToCrawlingQuene(request *http.Request)

func (*Node) DeleteDeadNode

func (this *Node) DeleteDeadNode(nodeName string)

first of all this is master node parse crawler remove it from cluster unparse crawler

func (*Node) DistributeRequest

func (this *Node) DistributeRequest(request *http.Request)

distribute request to every node judge node tell cluster where is the request

func (*Node) GetAllNode

func (this *Node) GetAllNode() []*NodeInfo

get all node info

func (*Node) GetMasterName

func (this *Node) GetMasterName() string

get master name of cluster

func (*Node) GetMasterNode

func (this *Node) GetMasterNode() *NodeInfo

get master node of cluster

func (*Node) IsMasterNode

func (this *Node) IsMasterNode() bool

if this is the master node

func (*Node) IsMe

func (this *Node) IsMe(nodeName string) bool

is the node is myself

func (*Node) IsStop

func (this *Node) IsStop() bool

if there is none request left ,return true

func (*Node) Join

func (this *Node) Join()

func (*Node) MakeMasterNode

func (this *Node) MakeMasterNode(nodeName string)

make master node

func (*Node) PauseCrawl

func (this *Node) PauseCrawl()

pause crawl

func (*Node) Ready

func (this *Node) Ready()

func (*Node) ReportToMaster

func (this *Node) ReportToMaster(result *crawler.ScrapeResult)

report result of request to master

func (*Node) StartCrawl

func (this *Node) StartCrawl()

start dead loop for all job

func (*Node) StartSpider

func (this *Node) StartSpider(spiderName string) (bool, string)

if spider is running return false tell cluster start a spider get start requests, push them to cluster request try to start the crawler,

func (*Node) StopCrawl

func (this *Node) StopCrawl()

stop all crawl job

func (*Node) UnpauseCrawl

func (this *Node) UnpauseCrawl()

unpause crawl

type NodeInfo

type NodeInfo struct {
	Name     string
	Ip       string
	Port     int
	Settings *util.Settings
}

type RequestStatus

type RequestStatus struct {
	CrawledMap   map[string]int // node + num
	CrawlingMap  map[string]map[string]*http.Request
	WaitingQuene *crawler.RequestQuene
}

receive basic request and record crawled requets

func NewRequestStatus

func NewRequestStatus() *RequestStatus

func (*RequestStatus) Crawled

func (this *RequestStatus) Crawled(scrapyResult *crawler.ScrapeResult)

delete in CrawlingMap add for CrawledMap

func (*RequestStatus) DeleteDeadNode

func (this *RequestStatus) DeleteDeadNode(nodeName string)

remove request from crawlingmap for dead node add those requests to waiting quenu

func (*RequestStatus) IsStop

func (this *RequestStatus) IsStop() bool

is all loop stop

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL