Documentation
¶
Index ¶
- Constants
- Variables
- func FindAuxiliaryNonce(auxId []types.Hash, maxNonce uint32) (nonce uint32, ok bool)
- func GetAuxiliarySlot(id types.Hash, nonce, numberAuxiliaryChains uint32) (auxiliarySlot uint32)
- type AuxiliaryJob
- type AuxiliaryJobDonation
- func (j *AuxiliaryJobDonation) AppendBinary(preAllocatedBuf []byte) (data []byte, err error)
- func (j *AuxiliaryJobDonation) BufferLength() int
- func (j *AuxiliaryJobDonation) FromReader(reader utils.ReaderAndByteReader) (err error)
- func (j *AuxiliaryJobDonation) MarshalBinary() (data []byte, err error)
- func (j *AuxiliaryJobDonation) Verify(now time.Time) (ok bool, err error)
- type AuxiliaryJobDonationDataEntry
- type Client
- type GenericClient
- func (c *GenericClient) GetChainId() (id types.Hash, err error)
- func (c *GenericClient) GetJob(chainAddress string, auxiliaryHash types.Hash, height uint64, ...) (job AuxiliaryJob, same bool, err error)
- func (c *GenericClient) SubmitSolution(job AuxiliaryJob, blob []byte, proof crypto.MerkleProof, proofPath uint32, ...) (status string, err error)
- type MergeMiningGetChainIdResult
- type MergeMiningGetJobJSON
- type MergeMiningGetJobResult
- type MergeMiningSubmitSolutionJSON
- type MergeMiningSubmitSolutionResult
- type RPCJSON
- type Tag
Constants ¶
View Source
const MaxChains = 256
View Source
const MaxChainsLog2 = 8
Variables ¶
View Source
var AuxiliaryJobDonationMasterPublicKey = ed25519.PublicKey{
51, 175, 37, 73, 203, 241, 188, 115,
195, 255, 123, 53, 218, 120, 90, 74,
186, 240, 82, 178, 67, 139, 124, 91,
180, 106, 188, 181, 187, 51, 236, 10,
}
AuxiliaryJobDonationMasterPublicKey Master key controlled by p2pool admins See https://github.com/SChernykh/p2pool/blob/5aea5768a7f328dbe5ba684cecab79d12fdc91cd/src/util.cpp#L63
Functions ¶
func FindAuxiliaryNonce ¶
Types ¶
type AuxiliaryJob ¶
type AuxiliaryJobDonation ¶
type AuxiliaryJobDonation struct {
SecondaryPublicKey [ed25519.PublicKeySize]byte
SecondaryPublicKeyExpiration int64
SecondarySignature [ed25519.SignatureSize]byte
Timestamp int64
Entries []AuxiliaryJobDonationDataEntry
DataSignature [ed25519.SignatureSize]byte
}
func (*AuxiliaryJobDonation) AppendBinary ¶
func (j *AuxiliaryJobDonation) AppendBinary(preAllocatedBuf []byte) (data []byte, err error)
func (*AuxiliaryJobDonation) BufferLength ¶
func (j *AuxiliaryJobDonation) BufferLength() int
func (*AuxiliaryJobDonation) FromReader ¶
func (j *AuxiliaryJobDonation) FromReader(reader utils.ReaderAndByteReader) (err error)
func (*AuxiliaryJobDonation) MarshalBinary ¶
func (j *AuxiliaryJobDonation) MarshalBinary() (data []byte, err error)
type AuxiliaryJobDonationDataEntry ¶
type AuxiliaryJobDonationDataEntry struct {
AuxId types.Hash
AuxHash types.Hash
AuxDifficulty types.Difficulty
}
func (*AuxiliaryJobDonationDataEntry) AppendBinary ¶
func (e *AuxiliaryJobDonationDataEntry) AppendBinary(preAllocatedBuf []byte) (data []byte, err error)
func (*AuxiliaryJobDonationDataEntry) BufferLength ¶
func (e *AuxiliaryJobDonationDataEntry) BufferLength() int
func (*AuxiliaryJobDonationDataEntry) MarshalBinary ¶
func (e *AuxiliaryJobDonationDataEntry) MarshalBinary() (data []byte, err error)
type Client ¶
type Client interface {
GetChainId() (id types.Hash, err error)
GetJob(chainAddress string, auxiliaryHash types.Hash, height uint64, prevId types.Hash) (job AuxiliaryJob, same bool, err error)
SubmitSolution(job AuxiliaryJob, blob []byte, proof crypto.MerkleProof, proofPath uint32, seedHash types.Hash) (status string, err error)
}
type GenericClient ¶
GenericClient Implements the RPC proposed api See https://github.com/SChernykh/p2pool/blob/master/docs/MERGE_MINING.MD#proposed-rpc-api
func NewGenericClient ¶
func NewGenericClient(address string, client *http.Client) (*GenericClient, error)
func (*GenericClient) GetChainId ¶
func (c *GenericClient) GetChainId() (id types.Hash, err error)
func (*GenericClient) GetJob ¶
func (c *GenericClient) GetJob(chainAddress string, auxiliaryHash types.Hash, height uint64, prevId types.Hash) (job AuxiliaryJob, same bool, err error)
func (*GenericClient) SubmitSolution ¶
func (c *GenericClient) SubmitSolution(job AuxiliaryJob, blob []byte, proof crypto.MerkleProof, proofPath uint32, seedHash types.Hash) (status string, err error)
type MergeMiningGetJobJSON ¶
type MergeMiningGetJobJSON struct {
// Address A wallet address on the merge mined chain
Address string `json:"address"`
// AuxiliaryHash Merge mining job that is currently being used
AuxiliaryHash types.Hash `json:"aux_hash"`
// Height Monero height
Height uint64 `json:"height"`
// PreviousId Hash of the previous Monero block
PreviousId types.Hash `json:"prev_id"`
}
type MergeMiningGetJobResult ¶
type MergeMiningGetJobResult struct {
Result AuxiliaryJob `json:"result"`
Error string `json:"error"`
}
type MergeMiningSubmitSolutionJSON ¶
type MergeMiningSubmitSolutionJSON struct {
// AuxiliaryBlob blob of data returned by merge_mining_get_job.
AuxiliaryBlob types.Bytes `json:"aux_blob"`
// AuxiliaryHash A 32-byte hex-encoded hash of the aux_blob - the same value that was returned by merge_mining_get_job.
AuxiliaryHash types.Hash `json:"aux_hash"`
// Blob Monero block template that has enough PoW to satisfy difficulty returned by merge_mining_get_job.
// It also must have a merge mining tag in tx_extra of the coinbase transaction.
Blob types.Bytes `json:"blob"`
// MerkleProof A proof that aux_hash was included when calculating Merkle root hash from the merge mining tag
MerkleProof crypto.MerkleProof `json:"merkle_proof"`
// Path bitmap (32-bit unsigned integer) that complements MerkleProof
Path uint32 `json:"path"`
// SeedHash Key that is used to initialize RandomX dataset
SeedHash types.Hash `json:"seed_hash"`
}
type Tag ¶
func (*Tag) FromReader ¶
func (t *Tag) FromReader(reader utils.ReaderAndByteReader) error
FromReader Decodes the merge mining tag located in coinbase transaction Format according to https://github.com/SChernykh/p2pool/blob/e6b8292d5b59692921af23613456674ccab4958b/docs/MERGE_MINING.MD
func (*Tag) MarshalBinary ¶
func (*Tag) MarshalTreeData ¶
Click to show internal directories.
Click to hide internal directories.