New Research: Supply Chain Attack on Axios Pulls Malicious Dependency from npm.Details →
Socket
Book a DemoSign in
Socket

plotman

Package Overview
Dependencies
Maintainers
1
Versions
9
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

plotman - pypi Package Compare versions

Comparing version
0.5.1
to
0.5.2
src/plotman/_tests/plotters/__init__.py
+77
import importlib.resources
import pendulum
import plotman.job
import plotman.plotters.bladebit
import plotman._tests.resources
def test_byte_by_byte_full_load() -> None:
read_bytes = importlib.resources.read_binary(
package=plotman._tests.resources,
resource="bladebit.plot.log",
)
parser = plotman.plotters.bladebit.Plotter()
for byte in (bytes([byte]) for byte in read_bytes):
parser.update(chunk=byte)
assert parser.info == plotman.plotters.bladebit.SpecificInfo(
phase=plotman.job.Phase(major=5, minor=1),
started_at=pendulum.datetime(2021, 8, 29, 22, 22, 0, tz=None),
plot_id="1fc7b57baae24da78e3bea44d58ab51f162a3ed4d242bab2fbcc24f6577d88b3",
threads=88,
plot_size=32,
dst_dir="/mnt/tmp/01/manual-transfer/",
phase1_duration_raw=313.98,
phase2_duration_raw=44.60,
phase3_duration_raw=203.26,
phase4_duration_raw=1.11,
total_time_raw=582.91,
filename="plot-k32-2021-08-29-22-22-1fc7b57baae24da78e3bea44d58ab51f162a3ed4d242bab2fbcc24f6577d88b3.plot",
plot_name="plot-k32-2021-08-29-22-22-1fc7b57baae24da78e3bea44d58ab51f162a3ed4d242bab2fbcc24f6577d88b3",
)
def test_log_phases() -> None:
# TODO: CAMPid 0978413087474699698142013249869897439887
read_bytes = importlib.resources.read_binary(
package=plotman._tests.resources,
resource="bladebit.marked",
)
parser = plotman.plotters.bladebit.Plotter()
wrong = []
for marked_line in read_bytes.splitlines(keepends=True):
phase_bytes, _, line_bytes = marked_line.partition(b",")
major, _, minor = phase_bytes.decode("utf-8").partition(":")
phase = plotman.job.Phase(major=int(major), minor=int(minor))
parser.update(chunk=line_bytes)
if parser.info.phase != phase: # pragma: nocov
wrong.append([parser.info.phase, phase, line_bytes.decode("utf-8")])
assert wrong == []
def test_marked_log_matches() -> None:
# TODO: CAMPid 909831931987460871349879878609830987138931700871340870
marked_bytes = importlib.resources.read_binary(
package=plotman._tests.resources,
resource="bladebit.marked",
)
log_bytes = importlib.resources.read_binary(
package=plotman._tests.resources,
resource="bladebit.plot.log",
)
for marked_line, log_line in zip(
marked_bytes.splitlines(keepends=True), log_bytes.splitlines(keepends=True)
):
_, _, marked_just_line = marked_line.partition(b",")
assert marked_just_line == log_line
import contextlib
import datetime
import importlib.resources
import locale
import pathlib
import typing
import click
import pendulum
import pytest
import _pytest.fixtures
import plotman.job
import plotman.plotters.chianetwork
import plotman._tests.resources
clean_specific_info = plotman.plotters.chianetwork.SpecificInfo()
@pytest.fixture(name="with_a_locale", params=["C", "en_US.UTF-8", "de_DE.UTF-8"])
def with_a_locale_fixture(
request: _pytest.fixtures.SubRequest,
) -> typing.Iterator[None]:
with set_locale(request.param):
yield
def test_byte_by_byte_full_load(with_a_locale: None) -> None:
read_bytes = importlib.resources.read_binary(
package=plotman._tests.resources,
resource="chianetwork.plot.log",
)
parser = plotman.plotters.chianetwork.Plotter()
for byte in (bytes([byte]) for byte in read_bytes):
parser.update(chunk=byte)
assert parser.info == plotman.plotters.chianetwork.SpecificInfo(
process_id=None,
phase=plotman.job.Phase(major=5, minor=3),
started_at=pendulum.datetime(2021, 7, 14, 22, 33, 24, tz=None),
plot_id="d2540dcfcffddbfbd7e60b4aca4d54fb937db71991298fabc253f020a87ff7d4",
buckets=128,
threads=4,
buffer=5000,
plot_size=32,
tmp_dir1="/farm/yards/902",
tmp_dir2="/farm/yards/902/fake_tmp2",
phase1_duration_raw=8134.66,
phase2_duration_raw=3304.86,
phase3_duration_raw=6515.266,
phase4_duration_raw=425.637,
total_time_raw=18380.426,
copy_time_raw=178.438,
filename="/farm/yards/902/fake_dst/plot-k32-2021-07-14-22-33-d2540dcfcffddbfbd7e60b4aca4d54fb937db71991298fabc253f020a87ff7d4.plot",
)
@contextlib.contextmanager
def set_locale(name: str) -> typing.Generator[str, None, None]:
# This is terrible and not thread safe.
original = locale.setlocale(locale.LC_ALL)
try:
yield locale.setlocale(locale.LC_ALL, name)
finally:
locale.setlocale(locale.LC_ALL, original)
def test_log_phases() -> None:
# TODO: CAMPid 0978413087474699698142013249869897439887
read_bytes = importlib.resources.read_binary(
package=plotman._tests.resources,
resource="chianetwork.marked",
)
parser = plotman.plotters.chianetwork.Plotter()
wrong = []
for marked_line in read_bytes.splitlines(keepends=True):
phase_bytes, _, line_bytes = marked_line.partition(b",")
major, _, minor = phase_bytes.decode("utf-8").partition(":")
phase = plotman.job.Phase(major=int(major), minor=int(minor))
parser.update(chunk=line_bytes)
if parser.info.phase != phase: # pragma: nocov
wrong.append([parser.info.phase, phase, line_bytes.decode("utf-8")])
assert wrong == []
def test_marked_log_matches() -> None:
# TODO: CAMPid 909831931987460871349879878609830987138931700871340870
marked_bytes = importlib.resources.read_binary(
package=plotman._tests.resources,
resource="chianetwork.marked",
)
log_bytes = importlib.resources.read_binary(
package=plotman._tests.resources,
resource="chianetwork.plot.log",
)
for marked_line, log_line in zip(
marked_bytes.splitlines(keepends=True), log_bytes.splitlines(keepends=True)
):
_, _, marked_just_line = marked_line.partition(b",")
assert marked_just_line == log_line
import click
import pytest
import plotman.plotters.core
def test_command_version_already_registered_raises() -> None:
commands = plotman.plotters.core.Commands()
version = (1, 2, 3)
@commands.register(version=version)
@click.command # type: ignore[misc]
def f() -> None:
pass
with pytest.raises(Exception, match=r"Version already registered:"):
@commands.register(version=version)
@click.command # type: ignore[misc]
def g() -> None:
pass
def test_command_version_not_a_tuple_raises() -> None:
commands = plotman.plotters.core.Commands()
with pytest.raises(Exception, match=r"Version must be a tuple:"):
commands.register(version="1.2.3") # type: ignore[arg-type]
def test_command_getitem_works() -> None:
commands = plotman.plotters.core.Commands()
version = (1, 2, 3)
@commands.register(version=version)
@click.command # type: ignore[misc]
def f() -> None:
pass
assert commands[version] == f
import importlib.resources
import pathlib
import typing
import attr
import click
import pytest
import _pytest
import plotman.errors
import plotman.job
import plotman.plotters
import plotman.plotters.bladebit
import plotman.plotters.chianetwork
import plotman.plotters.madmax
import plotman._tests.resources
@pytest.fixture(name="line_decoder")
def line_decoder_fixture() -> typing.Iterator[plotman.plotters.LineDecoder]:
decoder = plotman.plotters.LineDecoder()
yield decoder
# assert decoder.buffer == ""
def test_decoder_single_chunk(line_decoder: plotman.plotters.LineDecoder) -> None:
lines = line_decoder.update(b"abc\n123\n\xc3\xa4\xc3\xab\xc3\xaf\n")
assert lines == ["abc", "123", "äëï"]
def test_decoder_individual_byte_chunks(
line_decoder: plotman.plotters.LineDecoder,
) -> None:
lines = []
for byte in b"abc\n123\n\xc3\xa4\xc3\xab\xc3\xaf\n":
lines.extend(line_decoder.update(bytes([byte])))
assert lines == ["abc", "123", "äëï"]
def test_decoder_partial_line_with_final(
line_decoder: plotman.plotters.LineDecoder,
) -> None:
lines = []
lines.extend(line_decoder.update(b"abc\n123\n\xc3\xa4\xc3\xab"))
lines.extend(line_decoder.update(b"\xc3\xaf", final=True))
assert lines == ["abc", "123", "äëï"]
def test_decoder_partial_line_without_final(
line_decoder: plotman.plotters.LineDecoder,
) -> None:
lines = []
lines.extend(line_decoder.update(b"abc\n123\n\xc3\xa4\xc3\xab"))
lines.extend(line_decoder.update(b"\xc3\xaf"))
assert lines == ["abc", "123"]
@pytest.mark.parametrize(
argnames=["resource_name", "correct_plotter"],
argvalues=[
["chianetwork.plot.log", plotman.plotters.chianetwork.Plotter],
["madmax.plot.log", plotman.plotters.madmax.Plotter],
],
)
def test_plotter_identifies_log(
resource_name: str,
correct_plotter: typing.Type[plotman.plotters.Plotter],
) -> None:
with importlib.resources.open_text(
package=plotman._tests.resources,
resource=resource_name,
encoding="utf-8",
) as f:
plotter = plotman.plotters.get_plotter_from_log(lines=f)
assert plotter == correct_plotter
def test_plotter_not_identified() -> None:
with pytest.raises(plotman.errors.UnableToIdentifyPlotterFromLogError):
plotman.plotters.get_plotter_from_log(lines=["a", "b"])
@attr.frozen
class CommandLineExample:
line: typing.List[str]
plotter: typing.Optional[typing.Type[plotman.plotters.Plotter]]
parsed: typing.Optional[plotman.job.ParsedChiaPlotsCreateCommand] = None
cwd: str = ""
default_bladebit_arguments = dict(
sorted(
{
"threads": None,
"count": 1,
"farmer_key": None,
"pool_key": None,
"pool_contract": None,
"warm_start": False,
"plot_id": None,
"memo": None,
"show_memo": False,
"verbose": False,
"no_numa": False,
"no_cpu_affinity": False,
"out_dir": pathlib.PosixPath("."),
}.items()
)
)
default_chia_network_arguments = dict(
sorted(
{
"size": 32,
"override_k": False,
"num": 1,
"buffer": 3389,
"num_threads": 2,
"buckets": 128,
"alt_fingerprint": None,
"pool_contract_address": None,
"farmer_public_key": None,
"pool_public_key": None,
"tmp_dir": ".",
"tmp2_dir": None,
"final_dir": ".",
"plotid": None,
"memo": None,
"nobitfield": False,
"exclude_final_dir": False,
}.items()
)
)
default_madmax_arguments = dict(
sorted(
{
"size": 32,
"count": 1,
"threads": 4,
"buckets": 256,
"buckets3": 256,
"tmpdir": pathlib.PosixPath("."),
"tmpdir2": None,
"finaldir": pathlib.PosixPath("."),
"waitforcopy": False,
"poolkey": None,
"contract": None,
"farmerkey": None,
"tmptoggle": None,
"rmulti2": 1,
}.items()
)
)
bladebit_command_line_examples: typing.List[CommandLineExample] = [
CommandLineExample(
line=["bladebit"],
plotter=plotman.plotters.bladebit.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=None,
help=False,
parameters={**default_bladebit_arguments},
),
),
CommandLineExample(
line=["bladebit", "-h"],
plotter=plotman.plotters.bladebit.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=None,
help=True,
parameters={**default_bladebit_arguments},
),
),
CommandLineExample(
line=["bladebit", "--help"],
plotter=plotman.plotters.bladebit.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=None,
help=True,
parameters={**default_bladebit_arguments},
),
),
CommandLineExample(
line=["bladebit", "--invalid-option"],
plotter=plotman.plotters.bladebit.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=click.NoSuchOption("--invalid-option"),
help=False,
parameters={},
),
),
CommandLineExample(
line=["bladebit", "--pool-contract", "xch123abc", "--farmer-key", "abc123"],
plotter=plotman.plotters.bladebit.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=None,
help=False,
parameters={
**default_bladebit_arguments,
"pool_contract": "xch123abc",
"farmer_key": "abc123",
},
),
),
CommandLineExample(
line=["here/there/bladebit"],
plotter=plotman.plotters.bladebit.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=None,
help=False,
parameters={**default_bladebit_arguments},
),
),
CommandLineExample(
line=[
"bladebit",
"final/dir",
],
cwd="/cwd",
plotter=plotman.plotters.bladebit.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=None,
help=False,
parameters={
**default_bladebit_arguments,
"out_dir": pathlib.Path("/", "cwd", "final", "dir"),
},
),
),
CommandLineExample(
line=plotman.plotters.bladebit.create_command_line(
options=plotman.plotters.bladebit.Options(),
tmpdir="",
tmp2dir=None,
dstdir="/farm/dst/dir",
farmer_public_key=None,
pool_public_key=None,
pool_contract_address=None,
),
plotter=plotman.plotters.bladebit.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=None,
help=False,
parameters={
**default_bladebit_arguments,
"verbose": True,
"out_dir": pathlib.Path("/farm/dst/dir"),
},
),
),
CommandLineExample(
line=plotman.plotters.bladebit.create_command_line(
options=plotman.plotters.bladebit.Options(),
tmpdir="/farm/tmp/dir",
tmp2dir="/farm/tmp2/dir",
dstdir="/farm/dst/dir",
farmer_public_key="farmerpublickey",
pool_public_key="poolpublickey",
pool_contract_address="poolcontractaddress",
),
plotter=plotman.plotters.bladebit.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=None,
help=False,
parameters={
**default_bladebit_arguments,
"farmer_key": "farmerpublickey",
"pool_key": "poolpublickey",
"pool_contract": "poolcontractaddress",
"verbose": True,
"out_dir": pathlib.Path("/farm/dst/dir"),
},
),
),
]
chianetwork_command_line_examples: typing.List[CommandLineExample] = [
CommandLineExample(
line=["python", "chia", "plots", "create"],
plotter=plotman.plotters.chianetwork.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=None,
help=False,
parameters={**default_chia_network_arguments},
),
),
CommandLineExample(
line=["python", "chia", "plots", "create", "-k", "32"],
plotter=plotman.plotters.chianetwork.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=None,
help=False,
parameters={**default_chia_network_arguments, "size": 32},
),
),
CommandLineExample(
line=["python", "chia", "plots", "create", "-k32"],
plotter=plotman.plotters.chianetwork.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=None,
help=False,
parameters={**default_chia_network_arguments, "size": 32},
),
),
CommandLineExample(
line=["python", "chia", "plots", "create", "--size", "32"],
plotter=plotman.plotters.chianetwork.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=None,
help=False,
parameters={**default_chia_network_arguments, "size": 32},
),
),
CommandLineExample(
line=["python", "chia", "plots", "create", "--size=32"],
plotter=plotman.plotters.chianetwork.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=None,
help=False,
parameters={**default_chia_network_arguments, "size": 32},
),
),
CommandLineExample(
line=["python", "chia", "plots", "create", "--size32"],
plotter=plotman.plotters.chianetwork.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=click.NoSuchOption("--size32"),
help=False,
parameters={},
),
),
CommandLineExample(
line=["python", "chia", "plots", "create", "-h"],
plotter=plotman.plotters.chianetwork.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=None,
help=True,
parameters={**default_chia_network_arguments},
),
),
CommandLineExample(
line=["python", "chia", "plots", "create", "--help"],
plotter=plotman.plotters.chianetwork.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=None,
help=True,
parameters={**default_chia_network_arguments},
),
),
CommandLineExample(
line=["python", "chia", "plots", "create", "-k", "32", "--help"],
plotter=plotman.plotters.chianetwork.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=None,
help=True,
parameters={**default_chia_network_arguments},
),
),
CommandLineExample(
line=["python", "chia", "plots", "create", "--invalid-option"],
plotter=plotman.plotters.chianetwork.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=click.NoSuchOption("--invalid-option"),
help=False,
parameters={},
),
),
CommandLineExample(
line=[
"python",
"chia",
"plots",
"create",
"--pool_contract_address",
"xch123abc",
"--farmer_public_key",
"abc123",
],
plotter=plotman.plotters.chianetwork.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=None,
help=False,
parameters={
**default_chia_network_arguments,
"pool_contract_address": "xch123abc",
"farmer_public_key": "abc123",
},
),
),
# macOS system python
CommandLineExample(
line=["Python", "chia", "plots", "create"],
plotter=plotman.plotters.chianetwork.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=None,
help=False,
parameters={**default_chia_network_arguments},
),
),
# binary installer
CommandLineExample(
line=["chia", "plots", "create", "--final_dir", "/blue/red"],
plotter=plotman.plotters.chianetwork.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=None,
help=False,
parameters={
**default_chia_network_arguments,
"final_dir": "/blue/red",
},
),
),
CommandLineExample(
line=[
"python",
"chia",
"plots",
"create",
"--final_dir",
"final/dir",
"--tmp_dir",
"tmp/dir",
"--tmp2_dir",
"tmp2/dir",
],
cwd="/cwd",
plotter=plotman.plotters.chianetwork.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=None,
help=False,
parameters={
**default_chia_network_arguments,
"final_dir": "/cwd/final/dir",
"tmp_dir": "/cwd/tmp/dir",
"tmp2_dir": "/cwd/tmp2/dir",
},
),
),
CommandLineExample(
line=plotman.plotters.chianetwork.create_command_line(
options=plotman.plotters.chianetwork.Options(),
tmpdir="/farm/tmp/dir",
tmp2dir=None,
dstdir="/farm/dst/dir",
farmer_public_key=None,
pool_public_key=None,
pool_contract_address=None,
),
plotter=plotman.plotters.chianetwork.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=None,
help=False,
parameters={
**default_chia_network_arguments,
"final_dir": "/farm/dst/dir",
"tmp_dir": "/farm/tmp/dir",
},
),
),
CommandLineExample(
line=plotman.plotters.chianetwork.create_command_line(
options=plotman.plotters.chianetwork.Options(
e=True,
x=True,
),
tmpdir="/farm/tmp/dir",
tmp2dir="/farm/tmp2/dir",
dstdir="/farm/dst/dir",
farmer_public_key="farmerpublickey",
pool_public_key="poolpublickey",
pool_contract_address="poolcontractaddress",
),
plotter=plotman.plotters.chianetwork.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=None,
help=False,
parameters={
**default_chia_network_arguments,
"exclude_final_dir": True,
"nobitfield": True,
"farmer_public_key": "farmerpublickey",
"pool_public_key": "poolpublickey",
"pool_contract_address": "poolcontractaddress",
"final_dir": "/farm/dst/dir",
"tmp_dir": "/farm/tmp/dir",
"tmp2_dir": "/farm/tmp2/dir",
},
),
),
]
madmax_command_line_examples: typing.List[CommandLineExample] = [
CommandLineExample(
line=["chia_plot"],
plotter=plotman.plotters.madmax.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=None,
help=False,
parameters={**default_madmax_arguments},
),
),
CommandLineExample(
line=["chia_plot", "-h"],
plotter=plotman.plotters.madmax.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=None,
help=True,
parameters={**default_madmax_arguments},
),
),
CommandLineExample(
line=["chia_plot", "--help"],
plotter=plotman.plotters.madmax.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=None,
help=True,
parameters={**default_madmax_arguments},
),
),
CommandLineExample(
line=["chia_plot", "--invalid-option"],
plotter=plotman.plotters.madmax.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=click.NoSuchOption("--invalid-option"),
help=False,
parameters={},
),
),
CommandLineExample(
line=["chia_plot", "--contract", "xch123abc", "--farmerkey", "abc123"],
plotter=plotman.plotters.madmax.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=None,
help=False,
parameters={
**default_madmax_arguments,
"contract": "xch123abc",
"farmerkey": "abc123",
},
),
),
CommandLineExample(
line=["here/there/chia_plot"],
plotter=plotman.plotters.madmax.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=None,
help=False,
parameters={**default_madmax_arguments},
),
),
CommandLineExample(
line=[
"chia_plot",
"--finaldir",
"final/dir",
"--tmpdir",
"tmp/dir",
"--tmpdir2",
"tmp/dir2",
],
cwd="/cwd",
plotter=plotman.plotters.madmax.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=None,
help=False,
parameters={
**default_madmax_arguments,
"finaldir": pathlib.Path("/", "cwd", "final", "dir"),
"tmpdir": pathlib.Path("/", "cwd", "tmp", "dir"),
"tmpdir2": pathlib.Path("/", "cwd", "tmp", "dir2"),
},
),
),
CommandLineExample(
line=plotman.plotters.madmax.create_command_line(
options=plotman.plotters.madmax.Options(),
tmpdir="/farm/tmp/dir",
tmp2dir=None,
dstdir="/farm/dst/dir",
farmer_public_key=None,
pool_public_key=None,
pool_contract_address=None,
),
plotter=plotman.plotters.madmax.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=None,
help=False,
parameters={
**default_madmax_arguments,
"finaldir": pathlib.Path("/farm/dst/dir"),
"tmpdir": pathlib.Path("/farm/tmp/dir"),
},
),
),
CommandLineExample(
line=plotman.plotters.madmax.create_command_line(
options=plotman.plotters.madmax.Options(),
tmpdir="/farm/tmp/dir",
tmp2dir="/farm/tmp2/dir",
dstdir="/farm/dst/dir",
farmer_public_key="farmerpublickey",
pool_public_key="poolpublickey",
pool_contract_address="poolcontractaddress",
),
plotter=plotman.plotters.madmax.Plotter,
parsed=plotman.job.ParsedChiaPlotsCreateCommand(
error=None,
help=False,
parameters={
**default_madmax_arguments,
"farmerkey": "farmerpublickey",
"poolkey": "poolpublickey",
"contract": "poolcontractaddress",
"finaldir": pathlib.Path("/farm/dst/dir"),
"tmpdir": pathlib.Path("/farm/tmp/dir"),
"tmpdir2": pathlib.Path("/farm/tmp2/dir"),
},
),
),
]
command_line_examples: typing.List[CommandLineExample] = [
*bladebit_command_line_examples,
*chianetwork_command_line_examples,
*madmax_command_line_examples,
]
not_command_line_examples: typing.List[CommandLineExample] = [
CommandLineExample(line=["something/else"], plotter=None),
CommandLineExample(line=["another"], plotter=None),
CommandLineExample(line=["some/chia/not"], plotter=None),
CommandLineExample(line=["chia", "other"], plotter=None),
CommandLineExample(line=["chia_plot/blue"], plotter=None),
CommandLineExample(line=[], plotter=None, parsed=None),
]
@pytest.fixture(
name="command_line_example",
params=command_line_examples,
ids=lambda param: repr(param.line),
)
def command_line_example_fixture(
request: _pytest.fixtures.SubRequest,
) -> typing.Iterator[CommandLineExample]:
return request.param # type: ignore[no-any-return]
@pytest.fixture(
name="not_command_line_example",
params=not_command_line_examples,
ids=lambda param: repr(param.line),
)
def not_command_line_example_fixture(
request: _pytest.fixtures.SubRequest,
) -> typing.Iterator[CommandLineExample]:
return request.param # type: ignore[no-any-return]
def test_plotter_identifies_command_line(
command_line_example: CommandLineExample,
) -> None:
plotter = plotman.plotters.get_plotter_from_command_line(
command_line=command_line_example.line,
)
assert plotter == command_line_example.plotter
def test_plotter_fails_to_identify_command_line(
not_command_line_example: CommandLineExample,
) -> None:
with pytest.raises(plotman.plotters.UnableToIdentifyCommandLineError):
plotman.plotters.get_plotter_from_command_line(
command_line=not_command_line_example.line,
)
def test_is_plotting_command_line(command_line_example: CommandLineExample) -> None:
assert plotman.plotters.is_plotting_command_line(
command_line=command_line_example.line,
)
def test_is_not_plotting_command_line(
not_command_line_example: CommandLineExample,
) -> None:
assert not plotman.plotters.is_plotting_command_line(
command_line=not_command_line_example.line,
)
def test_command_line_parsed_correctly(
command_line_example: CommandLineExample,
) -> None:
assert command_line_example.plotter is not None
plotter = command_line_example.plotter()
plotter.parse_command_line(
command_line=command_line_example.line,
cwd=command_line_example.cwd,
)
assert plotter.parsed_command_line == command_line_example.parsed
import importlib.resources
import pathlib
import typing
import pendulum
import plotman.job
import plotman.plotters.madmax
import plotman._tests.resources
def test_byte_by_byte_full_load() -> None:
read_bytes = importlib.resources.read_binary(
package=plotman._tests.resources,
resource="madmax.plot.log",
)
parser = plotman.plotters.madmax.Plotter()
for byte in (bytes([byte]) for byte in read_bytes):
parser.update(chunk=byte)
assert parser.info == plotman.plotters.madmax.SpecificInfo(
phase=plotman.job.Phase(major=5, minor=2),
started_at=pendulum.datetime(2021, 7, 14, 21, 56, 0, tz=None),
plot_id="522acbd6308af7e229281352f746449134126482cfabd51d38e0f89745d21698",
p1_buckets=256,
p34_buckets=256,
threads=8,
plot_size=32,
tmp_dir="/farm/yards/902/",
tmp2_dir="/farm/yards/902/fake_tmp2/",
dst_dir="/farm/yards/902/fake_dst/",
phase1_duration_raw=2197.52,
phase2_duration_raw=1363.42,
phase3_duration_raw=1320.47,
phase4_duration_raw=86.9555,
total_time_raw=4968.41,
filename="",
plot_name="plot-k32-2021-07-14-21-56-522acbd6308af7e229281352f746449134126482cfabd51d38e0f89745d21698",
)
def test_log_phases() -> None:
# TODO: CAMPid 0978413087474699698142013249869897439887
read_bytes = importlib.resources.read_binary(
package=plotman._tests.resources,
resource="madmax.marked",
)
parser = plotman.plotters.madmax.Plotter()
wrong = []
for marked_line in read_bytes.splitlines(keepends=True):
phase_bytes, _, line_bytes = marked_line.partition(b",")
major, _, minor = phase_bytes.decode("utf-8").partition(":")
phase = plotman.job.Phase(major=int(major), minor=int(minor))
parser.update(chunk=line_bytes)
if parser.info.phase != phase: # pragma: nocov
wrong.append([parser.info.phase, phase, line_bytes.decode("utf-8")])
assert wrong == []
def test_marked_log_matches() -> None:
# TODO: CAMPid 909831931987460871349879878609830987138931700871340870
marked_bytes = importlib.resources.read_binary(
package=plotman._tests.resources,
resource="madmax.marked",
)
log_bytes = importlib.resources.read_binary(
package=plotman._tests.resources,
resource="madmax.plot.log",
)
for marked_line, log_line in zip(
marked_bytes.splitlines(keepends=True), log_bytes.splitlines(keepends=True)
):
_, _, marked_just_line = marked_line.partition(b",")
assert marked_just_line == log_line

Sorry, the diff of this file is not supported yet

Creating 1 plots:
Output path : /mnt/tmp/01/manual-transfer/
Thread count : 88
Warm start enabled : false
Farmer public key : b0a374845f4f4d6eab62fc4c5e17965d82ad7eee105818e5bd0cfcb46275a16acc4cd30955779bec841a716473416b21
Pool contract address : xch1u8ll2ztwhseej45d6u2zp9j4mlnzhwseccr0axqws9fl2tyj5u0svdy04y
System Memory: 348/503 GiB.
Memory required: 416 GiB.
Warning: Not enough memory available. Buffer allocation may fail.
Allocating buffers.
Generating plot 1 / 1: 1fc7b57baae24da78e3bea44d58ab51f162a3ed4d242bab2fbcc24f6577d88b3
Running Phase 1
Generating F1...
Finished F1 generation in 6.93 seconds.
Sorting F1...
Finished F1 sort in 18.23 seconds.
Forward propagating to table 2...
Pairing L/R groups...
Finished pairing L/R groups in 13.2870 seconds. Created 4294962218 pairs.
Average of 236.1403 pairs per group.
Computing Fx...
Finished computing Fx in 9.0360 seconds.
Sorting entries...
Finished sorting in 33.83 seconds.
Finished forward propagating table 2 in 56.61 seconds.
Forward propagating to table 3...
Pairing L/R groups...
Finished pairing L/R groups in 10.9170 seconds. Created 4294967296 pairs.
Average of 236.1406 pairs per group.
Computing Fx...
Finished computing Fx in 8.6420 seconds.
Sorting entries...
Finished sorting in 33.37 seconds.
Finished forward propagating table 3 in 53.39 seconds.
Forward propagating to table 4...
Pairing L/R groups...
Finished pairing L/R groups in 10.9450 seconds. Created 4294947733 pairs.
Average of 236.1396 pairs per group.
Computing Fx...
Finished computing Fx in 9.3490 seconds.
Sorting entries...
Finished sorting in 32.60 seconds.
Finished forward propagating table 4 in 53.35 seconds.
Forward propagating to table 5...
Pairing L/R groups...
Finished pairing L/R groups in 10.8420 seconds. Created 4294889963 pairs.
Average of 236.1364 pairs per group.
Computing Fx...
Finished computing Fx in 9.5180 seconds.
Sorting entries...
Finished sorting in 32.60 seconds.
Finished forward propagating table 5 in 53.42 seconds.
Forward propagating to table 6...
Pairing L/R groups...
Finished pairing L/R groups in 10.9870 seconds. Created 4294907255 pairs.
Average of 236.1373 pairs per group.
Computing Fx...
Finished computing Fx in 8.5110 seconds.
Sorting entries...
Finished sorting in 31.58 seconds.
Finished forward propagating table 6 in 51.54 seconds.
Forward propagating to table 7...
Pairing L/R groups...
Finished pairing L/R groups in 11.0050 seconds. Created 4294773122 pairs.
Average of 236.1300 pairs per group.
Computing Fx...
Finished computing Fx in 9.0510 seconds.
Finished forward propagating table 7 in 20.51 seconds.
Finished Phase 1 in 313.98 seconds.
Running Phase 2
Prunning table 6...
Finished prunning table 6 in 0.59 seconds.
Prunning table 5...
Finished prunning table 5 in 11.53 seconds.
Prunning table 4...
Finished prunning table 4 in 10.86 seconds.
Prunning table 3...
Finished prunning table 3 in 10.57 seconds.
Prunning table 2...
Finished prunning table 2 in 10.60 seconds.
Finished Phase 2 in 44.60 seconds.
Running Phase 3
Compressing tables 1 and 2...
Finished compressing tables 1 and 2 in 31.20 seconds
Table 1 now has 3429423491 / 4294962218 entries ( 79.85% ).
Compressing tables 2 and 3...
Finished compressing tables 2 and 3 in 35.05 seconds
Table 2 now has 3439923954 / 4294967296 entries ( 80.09% ).
Compressing tables 3 and 4...
Finished compressing tables 3 and 4 in 32.41 seconds
Table 3 now has 3466101892 / 4294947733 entries ( 80.70% ).
Compressing tables 4 and 5...
Finished compressing tables 4 and 5 in 33.40 seconds
Table 4 now has 3532981230 / 4294889963 entries ( 82.26% ).
Compressing tables 5 and 6...
Finished compressing tables 5 and 6 in 34.78 seconds
Table 5 now has 3713621551 / 4294907255 entries ( 86.47% ).
Compressing tables 6 and 7...
Finished compressing tables 6 and 7 in 36.41 seconds
Table 6 now has 4294773122 / 4294773122 entries ( 100.00% ).
Finished Phase 3 in 203.26 seconds.
Running Phase 4
Writing P7.
Finished writing P7 in 0.71 seconds.
Writing C1 table.
Finished writing C1 table in 0.00 seconds.
Writing C2 table.
Finished writing C2 table in 0.00 seconds.
Writing C3 table.
Finished writing C3 table in 0.40 seconds.
Finished Phase 4 in 1.11 seconds.
Writing final plot tables to disk
Plot /mnt/tmp/01/manual-transfer/plot-k32-2021-08-29-22-22-1fc7b57baae24da78e3bea44d58ab51f162a3ed4d242bab2fbcc24f6577d88b3.plot finished writing to disk:
Table 1 pointer : 4096 ( 0x0000000000001000 )
Table 2 pointer : 14839635968 ( 0x000000037482e000 )
Table 3 pointer : 28822732800 ( 0x00000006b5f80000 )
Table 4 pointer : 42912239616 ( 0x00000009fdc4d000 )
Table 5 pointer : 57273606144 ( 0x0000000d55c5e000 )
Table 6 pointer : 72369262592 ( 0x00000010d98b5000 )
Table 7 pointer : 89827270656 ( 0x00000014ea1f6000 )
C1 table pointer : 107543220224 ( 0x000000190a135000 )
C2 table pointer : 107544940544 ( 0x000000190a2d9000 )
C3 table pointer : 107544944640 ( 0x000000190a2da000 )
Finished writing tables to disk in 19.96 seconds.
Finished plotting in 582.91 seconds (9.72 minutes).

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Multi-threaded pipelined Chia k32 plotter - 974d6e5
(Sponsored by Flexpool.io - Check them out if you're looking for a secure and scalable Chia pool)
Final Directory: /farm/yards/902/fake_dst/
Number of Plots: 1
Crafting plot 1 out of 1
Process ID: 190552
Number of Threads: 8
Number of Buckets P1: 2^8 (256)
Number of Buckets P3+P4: 2^8 (256)
Pool Puzzle Hash: e1fff5096ebc3399568dd714209655dfe62bba19c606fe980e8153f52c92a71f
Farmer Public Key: b0a374845f4f4d6eab62fc4c5e17965d82ad7eee105818e5bd0cfcb46275a16acc4cd30955779bec841a716473416b21
Working Directory: /farm/yards/902/
Working Directory 2: /farm/yards/902/fake_tmp2/
Plot Name: plot-k32-2021-07-14-21-56-522acbd6308af7e229281352f746449134126482cfabd51d38e0f89745d21698
[P1] Table 1 took 24.6839 sec
[P1] Table 2 took 300.748 sec, found 4295036171 matches
[P1] Table 3 took 345.724 sec, found 4295004796 matches
[P1] Table 4 took 426.064 sec, found 4295077044 matches
[P1] Table 5 took 424.572 sec, found 4295076857 matches
[P1] Table 6 took 389.573 sec, found 4295015692 matches
[P1] Table 7 took 286.065 sec, found 4294961988 matches
Phase 1 took 2197.52 sec
[P2] max_table_size = 4295077044
[P2] Table 7 scan took 39.6625 sec
[P2] Table 7 rewrite took 95.6021 sec, dropped 0 entries (0 %)
[P2] Table 6 scan took 105.963 sec
[P2] Table 6 rewrite took 142.273 sec, dropped 581280956 entries (13.5338 %)
[P2] Table 5 scan took 106.798 sec
[P2] Table 5 rewrite took 132.968 sec, dropped 762014431 entries (17.7416 %)
[P2] Table 4 scan took 99.019 sec
[P2] Table 4 rewrite took 129.953 sec, dropped 828921602 entries (19.2993 %)
[P2] Table 3 scan took 87.3119 sec
[P2] Table 3 rewrite took 151.927 sec, dropped 855040283 entries (19.9078 %)
[P2] Table 2 scan took 103.825 sec
[P2] Table 2 rewrite took 159.486 sec, dropped 865588810 entries (20.1532 %)
Phase 2 took 1363.42 sec
Wrote plot header with 252 bytes
[P3-1] Table 2 took 134.927 sec, wrote 3429447361 right entries
[P3-2] Table 2 took 101.677 sec, wrote 3429447361 left entries, 3429447361 final
[P3-1] Table 3 took 149.549 sec, wrote 3439964513 right entries
[P3-2] Table 3 took 81.7904 sec, wrote 3439964513 left entries, 3439964513 final
[P3-1] Table 4 took 120.011 sec, wrote 3466155442 right entries
[P3-2] Table 4 took 78.9534 sec, wrote 3466155442 left entries, 3466155442 final
[P3-1] Table 5 took 124.946 sec, wrote 3533062426 right entries
[P3-2] Table 5 took 82.0401 sec, wrote 3533062426 left entries, 3533062426 final
[P3-1] Table 6 took 129.488 sec, wrote 3713734736 right entries
[P3-2] Table 6 took 85.6095 sec, wrote 3713734736 left entries, 3713734736 final
[P3-1] Table 7 took 125.34 sec, wrote 4294961988 right entries
[P3-2] Table 7 took 104.142 sec, wrote 4294961988 left entries, 4294961988 final
Phase 3 took 1320.47 sec, wrote 21877326466 entries to final plot
[P4] Starting to write C1 and C3 tables
[P4] Finished writing C1 and C3 tables
[P4] Writing C2 table
[P4] Finished writing C2 table
Phase 4 took 86.9555 sec, final plot size is 108836238815 bytes
Total plot creation time was 4968.41 sec (82.8069 min)
Started copy to /farm/yards/902/fake_dst/plot-k32-2021-07-14-21-56-522acbd6308af7e229281352f746449134126482cfabd51d38e0f89745d21698.plot
Renamed final plot to /farm/yards/902/fake_dst/plot-k32-2021-07-14-21-56-522acbd6308af7e229281352f746449134126482cfabd51d38e0f89745d21698.plot
class PlotmanError(Exception):
"""An exception type for all plotman errors to inherit from. This is
never to be raised.
"""
pass
class UnableToIdentifyPlotterFromLogError(PlotmanError):
def __init__(self) -> None:
super().__init__("Failed to identify the plotter definition for parsing log")
import codecs
import collections
import functools
import pathlib
import re
import typing
import attr
import click
import pendulum
import typing_extensions
import plotman.job
import plotman.plotters.core
T = typing.TypeVar("T")
class UnableToIdentifyCommandLineError(Exception):
pass
@attr.mutable
class LineDecoder:
decoder: codecs.IncrementalDecoder = attr.ib(
factory=lambda: codecs.getincrementaldecoder(encoding="utf-8")(),
)
buffer: str = ""
def update(self, chunk: bytes, final: bool = False) -> typing.List[str]:
self.buffer += self.decoder.decode(input=chunk, final=final)
if final:
index = len(self.buffer)
else:
newline_index = self.buffer.rfind("\n")
if newline_index == -1:
return []
index = newline_index + 1
splittable = self.buffer[:index]
self.buffer = self.buffer[index:]
return splittable.splitlines()
# from https://github.com/altendky/qtrio/blob/e891874bae70a8671b969a4f9de25ea160bdf211/qtrio/_util.py#L17-L42
class ProtocolChecker(typing.Generic[T]):
"""Instances of this class can be used as decorators that will result in type hint
checks to verifying that other classes implement a given protocol. Generally you
would create a single instance where you define each protocol and then use that
instance as the decorator. Note that this usage is, at least in part, due to
Python not supporting type parameter specification in the ``@`` decorator
expression.
.. code-block:: python
import typing
class MyProtocol(typing.Protocol):
def a_method(self): ...
check_my_protocol = qtrio._util.ProtocolChecker[MyProtocol]()
@check_my_protocol
class AClass:
def a_method(self):
return 42092
"""
def __call__(self, cls: typing.Type[T]) -> typing.Type[T]:
return cls
# TODO: should use pendulum without these helpers
def duration_to_minutes(duration: float) -> int:
return round(duration / 60)
# TODO: should use pendulum without these helpers
def duration_to_hours(duration: float) -> float:
return round(duration / 60 / 60, 2)
@attr.frozen
class CommonInfo:
type: str
phase: plotman.job.Phase
tmpdir: str
tmp2dir: str
dstdir: str
buckets: int
threads: int
filename: str
buffer: typing.Optional[int] = None
plot_size: int = 0
phase1_duration_raw: float = 0
phase2_duration_raw: float = 0
phase3_duration_raw: float = 0
phase4_duration_raw: float = 0
total_time_raw: float = 0
copy_time_raw: float = 0
started_at: typing.Optional[pendulum.DateTime] = None
tmp_files: typing.List[pathlib.Path] = attr.ib(factory=list)
plot_id: typing.Optional[str] = None
process_id: typing.Optional[int] = None
completed: bool = False
# Phase 1 duration
@property
def phase1_duration(self) -> int:
return round(self.phase1_duration_raw)
@property
def phase1_duration_minutes(self) -> int:
return duration_to_minutes(self.phase1_duration_raw)
@property
def phase1_duration_hours(self) -> float:
return duration_to_hours(self.phase1_duration_raw)
# Phase 2 duration
@property
def phase2_duration(self) -> int:
return round(self.phase2_duration_raw)
@property
def phase2_duration_minutes(self) -> int:
return duration_to_minutes(self.phase2_duration_raw)
@property
def phase2_duration_hours(self) -> float:
return duration_to_hours(self.phase2_duration_raw)
# Phase 3 duration
@property
def phase3_duration(self) -> int:
return round(self.phase3_duration_raw)
@property
def phase3_duration_minutes(self) -> int:
return duration_to_minutes(self.phase3_duration_raw)
@property
def phase3_duration_hours(self) -> float:
return duration_to_hours(self.phase3_duration_raw)
# Phase 4 duration
@property
def phase4_duration(self) -> int:
return round(self.phase4_duration_raw)
@property
def phase4_duration_minutes(self) -> int:
return duration_to_minutes(self.phase4_duration_raw)
@property
def phase4_duration_hours(self) -> float:
return duration_to_hours(self.phase4_duration_raw)
# Total time
@property
def total_time(self) -> int:
return round(self.total_time_raw)
@property
def total_time_minutes(self) -> int:
return duration_to_minutes(self.total_time_raw)
@property
def total_time_hours(self) -> float:
return duration_to_hours(self.total_time_raw)
# Copy time
@property
def copy_time(self) -> int:
return round(self.copy_time_raw)
@property
def copy_time_minutes(self) -> int:
return duration_to_minutes(self.copy_time_raw)
@property
def copy_time_hours(self) -> float:
return duration_to_hours(self.copy_time_raw)
class SpecificInfo(typing_extensions.Protocol):
def common(self) -> CommonInfo:
...
check_SpecificInfo = ProtocolChecker[SpecificInfo]()
class LineHandler(typing_extensions.Protocol, typing.Generic[T]):
def __call__(self, match: typing.Match[str], info: T) -> T:
...
@attr.mutable
class RegexLineHandlers(typing.Generic[T]):
mapping: typing.Dict[typing.Pattern[str], typing.List[LineHandler[T]]] = attr.ib(
factory=lambda: collections.defaultdict(list),
)
def register(
self, expression: str
) -> typing.Callable[[LineHandler[T]], LineHandler[T]]:
return functools.partial(self._decorator, expression=expression)
def _decorator(self, handler: LineHandler[T], expression: str) -> LineHandler[T]:
self.mapping[re.compile(expression)].append(handler)
return handler
class Plotter(typing_extensions.Protocol):
parsed_command_line: typing.Optional[plotman.job.ParsedChiaPlotsCreateCommand]
def __init__(self) -> None:
...
def common_info(self) -> CommonInfo:
...
@classmethod
def identify_log(cls, line: str) -> bool:
...
@classmethod
def identify_process(cls, command_line: typing.List[str]) -> bool:
...
def parse_command_line(self, command_line: typing.List[str], cwd: str) -> None:
...
def update(self, chunk: bytes) -> SpecificInfo:
...
check_Plotter = ProtocolChecker[Plotter]()
def all_plotters() -> typing.List[typing.Type[Plotter]]:
# TODO: maybe avoid the import loop some other way
import plotman.plotters.bladebit
import plotman.plotters.chianetwork
import plotman.plotters.madmax
return [
plotman.plotters.bladebit.Plotter,
plotman.plotters.chianetwork.Plotter,
plotman.plotters.madmax.Plotter,
]
def get_plotter_from_log(lines: typing.Iterable[str]) -> typing.Type[Plotter]:
import plotman.plotters.chianetwork
import plotman.plotters.madmax
plotters = all_plotters()
for line in lines:
for plotter in plotters:
if plotter.identify_log(line=line):
return plotter
raise plotman.errors.UnableToIdentifyPlotterFromLogError()
def get_plotter_from_command_line(
command_line: typing.List[str],
) -> typing.Type[Plotter]:
for plotter in all_plotters():
if plotter.identify_process(command_line=command_line):
return plotter
raise UnableToIdentifyCommandLineError(
"Failed to identify the plotter definition for parsing the command line",
)
def parse_command_line_with_click(
command: "plotman.plotters.core.CommandProtocol",
arguments: typing.List[str],
) -> plotman.job.ParsedChiaPlotsCreateCommand:
# nice idea, but this doesn't include -h
# help_option_names = command.get_help_option_names(ctx=context)
help_option_names = {"--help", "-h"}
command_arguments = [
argument for argument in arguments if argument not in help_option_names
]
try:
context = command.make_context(info_name="", args=list(command_arguments))
except click.ClickException as e:
error = e
params = {}
else:
error = None
params = context.params
return plotman.job.ParsedChiaPlotsCreateCommand(
error=error,
help=len(arguments) > len(command_arguments),
parameters=dict(sorted(params.items())),
)
def is_plotting_command_line(command_line: typing.List[str]) -> bool:
try:
get_plotter_from_command_line(command_line=command_line)
except UnableToIdentifyCommandLineError:
return False
return True
# mypy: allow_untyped_decorators
import collections
import os
import pathlib
import subprocess
import typing
import attr
import click
import packaging.version
import pendulum
import plotman.job
import plotman.plotters
@attr.frozen
class Options:
executable: str = "bladebit"
threads: typing.Optional[int] = None
no_numa: bool = False
def check_configuration(
options: Options, pool_contract_address: typing.Optional[str]
) -> None:
completed_process = subprocess.run(
args=[options.executable, "--version"],
capture_output=True,
check=True,
encoding="utf-8",
)
version = packaging.version.Version(completed_process.stdout)
required_version = packaging.version.Version("1.1.0")
if version < required_version:
raise Exception(
f"BladeBit version {required_version} required for monitoring logs but"
f" found: {version}"
)
if pool_contract_address is not None:
completed_process = subprocess.run(
args=[options.executable, "--help"],
capture_output=True,
check=True,
encoding="utf-8",
)
# TODO: report upstream
if (
"--pool-contract" not in completed_process.stdout
and "--pool-contract" not in completed_process.stderr
):
print(completed_process.stdout)
raise Exception(
f"found BladeBit version does not support the `--pool-contract`"
f" option for pools."
)
def create_command_line(
options: Options,
tmpdir: str,
tmp2dir: typing.Optional[str],
dstdir: str,
farmer_public_key: typing.Optional[str],
pool_public_key: typing.Optional[str],
pool_contract_address: typing.Optional[str],
) -> typing.List[str]:
args = [
options.executable,
"-v",
"-n",
"1",
]
if options.threads is not None:
args.append("-t")
args.append(str(options.threads))
if farmer_public_key is not None:
args.append("-f")
args.append(farmer_public_key)
if pool_public_key is not None:
args.append("-p")
args.append(pool_public_key)
if pool_contract_address is not None:
args.append("-c")
args.append(pool_contract_address)
args.append(dstdir)
return args
@plotman.plotters.check_SpecificInfo
@attr.frozen
class SpecificInfo:
process_id: typing.Optional[int] = None
phase: plotman.job.Phase = plotman.job.Phase(known=False)
started_at: typing.Optional[pendulum.DateTime] = None
plot_id: str = ""
threads: int = 0
# buffer: int = 0
plot_size: int = 32
dst_dir: str = ""
phase1_duration_raw: float = 0
phase2_duration_raw: float = 0
phase3_duration_raw: float = 0
phase4_duration_raw: float = 0
total_time_raw: float = 0
# copy_time_raw: float = 0
filename: str = ""
plot_name: str = ""
def common(self) -> plotman.plotters.CommonInfo:
return plotman.plotters.CommonInfo(
type="bladebit",
dstdir=self.dst_dir,
phase=self.phase,
tmpdir="",
tmp2dir="",
started_at=self.started_at,
plot_id=self.plot_id,
plot_size=self.plot_size,
buckets=0,
threads=self.threads,
phase1_duration_raw=self.phase1_duration_raw,
phase2_duration_raw=self.phase2_duration_raw,
phase3_duration_raw=self.phase3_duration_raw,
phase4_duration_raw=self.phase4_duration_raw,
total_time_raw=self.total_time_raw,
filename=self.filename,
)
@plotman.plotters.check_Plotter
@attr.mutable
class Plotter:
decoder: plotman.plotters.LineDecoder = attr.ib(
factory=plotman.plotters.LineDecoder
)
info: SpecificInfo = attr.ib(factory=SpecificInfo)
parsed_command_line: typing.Optional[
plotman.job.ParsedChiaPlotsCreateCommand
] = None
@classmethod
def identify_log(cls, line: str) -> bool:
return "Warm start enabled" in line
@classmethod
def identify_process(cls, command_line: typing.List[str]) -> bool:
if len(command_line) == 0:
return False
return "bladebit" == os.path.basename(command_line[0]).lower()
def common_info(self) -> plotman.plotters.CommonInfo:
return self.info.common()
def parse_command_line(self, command_line: typing.List[str], cwd: str) -> None:
# drop the bladebit
arguments = command_line[1:]
# TODO: We could at some point do version detection and pick the
# associated command. For now we'll just use the latest one we have
# copied.
command = commands.latest_command()
self.parsed_command_line = plotman.plotters.parse_command_line_with_click(
command=command,
arguments=arguments,
)
for key in ["out_dir"]:
original: os.PathLike[str] = self.parsed_command_line.parameters.get(key) # type: ignore[assignment]
if original is not None:
self.parsed_command_line.parameters[key] = pathlib.Path(cwd).joinpath(
original
)
def update(self, chunk: bytes) -> SpecificInfo:
new_lines = self.decoder.update(chunk=chunk)
for line in new_lines:
if not self.info.phase.known:
self.info = attr.evolve(
self.info, phase=plotman.job.Phase(major=0, minor=0)
)
for pattern, handler_functions in handlers.mapping.items():
match = pattern.search(line)
if match is None:
continue
for handler_function in handler_functions:
self.info = handler_function(match=match, info=self.info)
break
return self.info
handlers = plotman.plotters.RegexLineHandlers[SpecificInfo]()
@handlers.register(expression=r"^Running Phase (?P<phase>\d+)")
def running_phase(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Running Phase 1
major = int(match.group("phase"))
return attr.evolve(info, phase=plotman.job.Phase(major=major, minor=0))
@handlers.register(
expression=r"^Finished Phase (?P<phase>\d+) in (?P<duration>[^ ]+) seconds."
)
def phase_finished(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Finished Phase 1 in 313.98 seconds.
major = int(match.group("phase"))
duration = float(match.group("duration"))
duration_dict = {f"phase{major}_duration_raw": duration}
return attr.evolve(
info, phase=plotman.job.Phase(major=major + 1, minor=0), **duration_dict
)
@handlers.register(expression=r"^Allocating buffers\.$")
def allocating_buffers(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Allocating buffers.
return attr.evolve(info, phase=plotman.job.Phase(major=0, minor=1))
@handlers.register(expression=r"^Finished F1 generation in")
def finished_f1(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Finished F1 generation in 6.93 seconds.
return attr.evolve(info, phase=plotman.job.Phase(major=1, minor=1))
@handlers.register(expression=r"^Forward propagating to table (?P<table>\d+)")
def forward_propagating(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Forward propagating to table 2...
minor = int(match.group("table"))
return attr.evolve(info, phase=plotman.job.Phase(major=1, minor=minor))
@handlers.register(expression=r"^ *Prunn?ing table (?P<table>\d+)")
def pruning_table(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Prunning table 6...
table = int(match.group("table"))
minor = 7 - table
return attr.evolve(info, phase=plotman.job.Phase(major=2, minor=minor))
@handlers.register(expression=r"^ *Compressing tables (?P<table>\d+)")
def compressing_tables(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Compressing tables 1 and 2...
minor = int(match.group("table"))
return attr.evolve(info, phase=plotman.job.Phase(major=3, minor=minor))
@handlers.register(expression=r"^ *Writing (?P<tag>(P7|C1|C2|C3))")
def phase_4_writing(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Writing P7.
minors = {"P7": 1, "C1": 2, "C2": 3, "C3": 4}
tag = match.group("tag")
minor = minors[tag]
return attr.evolve(info, phase=plotman.job.Phase(major=4, minor=minor))
@handlers.register(expression=r"^Generating plot .*: (?P<plot_id>[^ ]+)")
def generating_plot(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Generating plot 1 / 1: 1fc7b57baae24da78e3bea44d58ab51f162a3ed4d242bab2fbcc24f6577d88b3
return attr.evolve(
info,
phase=plotman.job.Phase(major=0, minor=2),
plot_id=match.group("plot_id"),
)
@handlers.register(expression=r"^Writing final plot tables to disk$")
def writing_final(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Writing final plot tables to disk
return attr.evolve(info, phase=plotman.job.Phase(major=5, minor=1))
@handlers.register(expression=r"^Finished plotting in (?P<duration>[^ ]+) seconds")
def total_duration(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Finished plotting in 582.91 seconds (9.72 minutes).
duration = float(match.group("duration"))
return attr.evolve(info, total_time_raw=duration)
@handlers.register(expression=r"^ *Output path *: *(.+)")
def dst_dir(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Output path : /mnt/tmp/01/manual-transfer/
return attr.evolve(info, dst_dir=match.group(1))
@handlers.register(
expression=r"^Plot .*/(?P<filename>(?P<name>plot-k(?P<size>\d+)-(?P<year>\d+)-(?P<month>\d+)-(?P<day>\d+)-(?P<hour>\d+)-(?P<minute>\d+)-(?P<plot_id>\w+)).plot) .*"
)
def plot_name_line(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Plot /mnt/tmp/01/manual-transfer/plot-k32-2021-08-29-22-22-1fc7b57baae24da78e3bea44d58ab51f162a3ed4d242bab2fbcc24f6577d88b3.plot finished writing to disk:
return attr.evolve(
info,
plot_size=int(match.group("size")),
plot_name=match.group("name"),
started_at=pendulum.datetime(
year=int(match.group("year")),
month=int(match.group("month")),
day=int(match.group("day")),
hour=int(match.group("hour")),
minute=int(match.group("minute")),
tz=None,
),
filename=match.group("filename"),
plot_id=match.group("plot_id"),
)
@handlers.register(expression=r"^ *Thread count *: *(\d+)")
def threads(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Thread count : 88
return attr.evolve(info, threads=int(match.group(1)))
commands = plotman.plotters.core.Commands()
# BladeBit Git on 2021-08-29 -> https://github.com/harold-b/bladebit/commit/f3fbfff43ce493ec9e02db6f72c3b44f656ef137
@commands.register(version=(0,))
@click.command()
# https://github.com/harold-b/bladebit/blob/f3fbfff43ce493ec9e02db6f72c3b44f656ef137/LICENSE
# https://github.com/harold-b/bladebit/blob/f7cf06fa685c9b1811465ecd47129402bb7548a0/src/main.cpp#L75-L108
@click.option(
"-t",
"--threads",
help=(
"Maximum number of threads to use."
" For best performance, use all available threads (default behavior)."
" Values below 2 are not recommended."
),
type=int,
show_default=True,
)
@click.option(
"-n",
"--count",
help="Number of plots to create. Default = 1.",
type=int,
default=1,
show_default=True,
)
@click.option(
"-f",
"--farmer-key",
help="Farmer public key, specified in hexadecimal format.",
type=str,
)
@click.option(
"-p",
"--pool-key",
help=(
"Pool public key, specified in hexadecimal format."
" Either a pool public key or a pool contract address must be specified."
),
type=str,
)
@click.option(
"-c",
"--pool-contract",
help=(
"Pool contract address, specified in hexadecimal format."
" Address where the pool reward will be sent to."
" Only used if pool public key is not specified."
),
type=str,
)
@click.option(
"-w",
"--warm-start",
help="Touch all pages of buffer allocations before starting to plot.",
is_flag=True,
type=bool,
default=False,
)
@click.option(
"-i",
"--plot-id",
help="Specify a plot id for debugging.",
type=str,
)
@click.option(
"-v",
"--verbose",
help="Enable verbose output.",
is_flag=True,
type=bool,
default=False,
)
@click.option(
"-m",
"--no-numa",
help=(
"Disable automatic NUMA aware memory binding."
" If you set this parameter in a NUMA system you will likely get degraded performance."
),
is_flag=True,
type=bool,
default=False,
)
@click.argument(
"out_dir",
# help=(
# "Output directory in which to output the plots." " This directory must exist."
# ),
type=click.Path(),
default=pathlib.Path("."),
# show_default=True,
)
def _cli_f3fbfff43ce493ec9e02db6f72c3b44f656ef137() -> None:
pass
# BladeBit Git on 2021-08-29 -> https://github.com/harold-b/bladebit/commit/b48f262336362acd6f23c5ca9a43cfd6d244cb88
@commands.register(version=(1, 1, 0))
@click.command()
# https://github.com/harold-b/bladebit/blob/b48f262336362acd6f23c5ca9a43cfd6d244cb88/LICENSE
# https://github.com/harold-b/bladebit/blob/b48f262336362acd6f23c5ca9a43cfd6d244cb88/src/main.cpp#L77-L119
@click.option(
"-t",
"--threads",
help=(
"Maximum number of threads to use."
" For best performance, use all available threads (default behavior)."
" Values below 2 are not recommended."
),
type=int,
show_default=True,
)
@click.option(
"-n",
"--count",
help="Number of plots to create. Default = 1.",
type=int,
default=1,
show_default=True,
)
@click.option(
"-f",
"--farmer-key",
help="Farmer public key, specified in hexadecimal format.",
type=str,
)
@click.option(
"-p",
"--pool-key",
help=(
"Pool public key, specified in hexadecimal format."
" Either a pool public key or a pool contract address must be specified."
),
type=str,
)
@click.option(
"-c",
"--pool-contract",
help=(
"Pool contract address, specified in hexadecimal format."
" Address where the pool reward will be sent to."
" Only used if pool public key is not specified."
),
type=str,
)
@click.option(
"-w",
"--warm-start",
help="Touch all pages of buffer allocations before starting to plot.",
is_flag=True,
type=bool,
default=False,
)
@click.option(
"-i",
"--plot-id",
help="Specify a plot id for debugging.",
type=str,
)
@click.option(
"--memo",
help="Specify a plot memo for debugging.",
type=str,
)
@click.option(
"--show-memo",
help="Output the memo of the next plot the be plotted.",
is_flag=True,
type=bool,
default=False,
)
@click.option(
"-v",
"--verbose",
help="Enable verbose output.",
is_flag=True,
type=bool,
default=False,
)
@click.option(
"-m",
"--no-numa",
help=(
"Disable automatic NUMA aware memory binding."
" If you set this parameter in a NUMA system you will likely get degraded performance."
),
is_flag=True,
type=bool,
default=False,
)
@click.option(
"--no-cpu-affinity",
help=(
"Disable assigning automatic thread affinity."
" This is useful when running multiple simultaneous instances of bladebit as you can manually assign thread affinity yourself when launching bladebit."
),
is_flag=True,
type=bool,
default=False,
)
@click.argument(
"out_dir",
# help=(
# "Output directory in which to output the plots." " This directory must exist."
# ),
type=click.Path(),
default=pathlib.Path("."),
# show_default=True,
)
def _cli_b48f262336362acd6f23c5ca9a43cfd6d244cb88() -> None:
pass
# mypy: allow_untyped_decorators
import collections
import os
import pathlib
import subprocess
import typing
import attr
import click
import packaging.version
import pendulum
import plotman.job
import plotman.plotters
def parse_chia_plot_time(s: str) -> pendulum.DateTime:
# This will grow to try ISO8601 as well for when Chia logs that way
# TODO: unignore once fixed upstream
# https://github.com/sdispater/pendulum/pull/548
return pendulum.from_format(s, "ddd MMM DD HH:mm:ss YYYY", locale="en", tz=None) # type: ignore[arg-type]
@attr.frozen
class Options:
executable: str = "chia"
n_threads: int = 2
n_buckets: int = 128
k: typing.Optional[int] = 32
e: typing.Optional[bool] = False
job_buffer: typing.Optional[int] = 3389
x: bool = False
def check_configuration(
options: Options, pool_contract_address: typing.Optional[str]
) -> None:
if pool_contract_address is not None:
completed_process = subprocess.run(
args=[options.executable, "version"],
capture_output=True,
check=True,
encoding="utf-8",
)
version = packaging.version.Version(completed_process.stdout)
required_version = packaging.version.Version("1.2")
if version < required_version:
raise Exception(
f"Chia version {required_version} required for creating pool"
f" plots but found: {version}"
)
def create_command_line(
options: Options,
tmpdir: str,
tmp2dir: typing.Optional[str],
dstdir: str,
farmer_public_key: typing.Optional[str],
pool_public_key: typing.Optional[str],
pool_contract_address: typing.Optional[str],
) -> typing.List[str]:
args = [
options.executable,
"plots",
"create",
"-k",
str(options.k),
"-r",
str(options.n_threads),
"-u",
str(options.n_buckets),
"-b",
str(options.job_buffer),
"-t",
tmpdir,
"-d",
dstdir,
]
if options.e:
args.append("-e")
if options.x:
args.append("-x")
if tmp2dir is not None:
args.append("-2")
args.append(tmp2dir)
if farmer_public_key is not None:
args.append("-f")
args.append(farmer_public_key)
if pool_public_key is not None:
args.append("-p")
args.append(pool_public_key)
if pool_contract_address is not None:
args.append("-c")
args.append(pool_contract_address)
return args
@plotman.plotters.check_SpecificInfo
@attr.frozen
class SpecificInfo:
process_id: typing.Optional[int] = None
phase: plotman.job.Phase = plotman.job.Phase(known=False)
started_at: typing.Optional[pendulum.DateTime] = None
plot_id: str = ""
buckets: int = 0
threads: int = 0
buffer: int = 0
plot_size: int = 0
dst_dir: str = ""
tmp_dir1: str = ""
tmp_dir2: str = ""
phase1_duration_raw: float = 0
phase2_duration_raw: float = 0
phase3_duration_raw: float = 0
phase4_duration_raw: float = 0
total_time_raw: float = 0
copy_time_raw: float = 0
filename: str = ""
def common(self) -> plotman.plotters.CommonInfo:
return plotman.plotters.CommonInfo(
type="chia",
dstdir=self.dst_dir,
phase=self.phase,
tmpdir=self.tmp_dir1,
tmp2dir=self.tmp_dir2,
completed=self.total_time_raw > 0,
started_at=self.started_at,
plot_id=self.plot_id,
plot_size=self.plot_size,
buffer=self.buffer,
buckets=self.buckets,
threads=self.threads,
phase1_duration_raw=self.phase1_duration_raw,
phase2_duration_raw=self.phase2_duration_raw,
phase3_duration_raw=self.phase3_duration_raw,
phase4_duration_raw=self.phase4_duration_raw,
total_time_raw=self.total_time_raw,
copy_time_raw=self.copy_time_raw,
filename=self.filename,
)
@plotman.plotters.check_Plotter
@attr.mutable
class Plotter:
decoder: plotman.plotters.LineDecoder = attr.ib(
factory=plotman.plotters.LineDecoder
)
info: SpecificInfo = attr.ib(factory=SpecificInfo)
parsed_command_line: typing.Optional[
plotman.job.ParsedChiaPlotsCreateCommand
] = None
@classmethod
def identify_log(cls, line: str) -> bool:
segments = [
"chia.plotting.create_plots",
"src.plotting.create_plots",
]
return any(segment in line for segment in segments)
@classmethod
def identify_process(cls, command_line: typing.List[str]) -> bool:
if len(command_line) == 0:
return False
if "python" == os.path.basename(command_line[0]).lower():
command_line = command_line[1:]
return (
len(command_line) >= 3
and "chia" in command_line[0]
and "plots" == command_line[1]
and "create" == command_line[2]
)
def common_info(self) -> plotman.plotters.CommonInfo:
return self.info.common()
def parse_command_line(self, command_line: typing.List[str], cwd: str) -> None:
if "python" in os.path.basename(command_line[0]).casefold():
# drop the python
command_line = command_line[1:]
# drop the chia plots create
arguments = command_line[3:]
# TODO: We could at some point do chia version detection and pick the
# associated command. For now we'll just use the latest one we have
# copied.
command = commands.latest_command()
self.parsed_command_line = plotman.plotters.parse_command_line_with_click(
command=command,
arguments=arguments,
)
for key in ["tmp_dir", "tmp2_dir", "final_dir"]:
original: os.PathLike[str] = self.parsed_command_line.parameters.get(key) # type: ignore[assignment]
if original is not None:
self.parsed_command_line.parameters[key] = os.path.join(cwd, original)
if self.parsed_command_line.error is None and not self.parsed_command_line.help:
self.info = attr.evolve(
self.info,
dst_dir=self.parsed_command_line.parameters["final_dir"],
)
def update(self, chunk: bytes) -> SpecificInfo:
new_lines = self.decoder.update(chunk=chunk)
for line in new_lines:
if not self.info.phase.known:
self.info = attr.evolve(
self.info, phase=plotman.job.Phase(major=0, minor=0)
)
for pattern, handler_functions in handlers.mapping.items():
match = pattern.search(line)
if match is None:
continue
for handler_function in handler_functions:
self.info = handler_function(match=match, info=self.info)
break
return self.info
handlers = plotman.plotters.RegexLineHandlers[SpecificInfo]()
@handlers.register(expression=r"^\tBucket")
def ignore_line(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Ignore lines starting with Bucket
# Bucket 0 uniform sort. Ram: 3.250GiB, u_sort min: 0.563GiB, qs min: 0.281GiB.
return info
@handlers.register(expression=r"^ID: (.+)$")
def plot_id(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# ID: 3eb8a37981de1cc76187a36ed947ab4307943cf92967a7e166841186c7899e24
return attr.evolve(info, plot_id=match.group(1))
@handlers.register(
expression=r"^Starting phase (\d+)/4: (Forward Propagation into tmp files\.\.\. (.+))?"
)
def phase_major(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Starting phase 1/4: Forward Propagation into tmp files... Wed Jul 14 22:33:24 2021
major = int(match.group(1))
timestamp = match.group(3)
new_info = attr.evolve(info, phase=plotman.job.Phase(major=major, minor=0))
if timestamp is None:
return new_info
return attr.evolve(
new_info,
started_at=parse_chia_plot_time(s=match.group(3)),
)
@handlers.register(expression=r"^Computing table (\d+)$")
def subphase_1(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Computing table 1
minor = int(match.group(1))
phase = attr.evolve(info.phase, minor=minor)
return attr.evolve(info, phase=phase)
@handlers.register(expression=r"^Backpropagating on table (\d+)$")
def subphase_2(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Backpropagating on table 7
table = int(match.group(1))
minor = 8 - table
phase = attr.evolve(info.phase, minor=minor)
return attr.evolve(info, phase=phase)
@handlers.register(expression=r"^Compressing tables (\d+) and")
def subphase_3(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Compressing tables 1 and 2
minor = int(match.group(1))
phase = attr.evolve(info.phase, minor=minor)
return attr.evolve(info, phase=phase)
@handlers.register(expression=r"^table 1 new size: ")
def phase2_7(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# table 1 new size: 3425157261
phase = attr.evolve(info.phase, minor=7)
return attr.evolve(info, phase=phase)
@handlers.register(expression=r"^\tStarting to write C1 and C3 tables$")
def phase4_1(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# \tStarting to write C1 and C3 tables
phase = attr.evolve(info.phase, minor=1)
return attr.evolve(info, phase=phase)
@handlers.register(expression=r"^\tWriting C2 table$")
def phase4_2(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# \tWriting C2 table
phase = attr.evolve(info.phase, minor=2)
return attr.evolve(info, phase=phase)
@handlers.register(expression=r"^\tFinal table pointers:$")
def phase4_3(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# \tFinal table pointers:
phase = attr.evolve(info.phase, minor=3)
return attr.evolve(info, phase=phase)
@handlers.register(expression=r"^Approximate working space used")
def phase5(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Approximate working space used (without final file): 269.297 GiB
phase = plotman.job.Phase(major=5, minor=0)
return attr.evolve(info, phase=phase)
@handlers.register(expression=r"^Copied final file from ")
def phase5_1(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Copied final file from "/farm/yards/902/fake_tmp2/plot-k32-2021-07-14-22-33-d2540dcfcffddbfbd7e60b4aca4d54fb937db71991298fabc253f020a87ff7d4.plot.2.tmp" to "/farm/yards/902/fake_dst/plot-k32-2021-07-14-22-33-d2540dcfcffddbfbd7e60b4aca4d54fb937db71991298fabc253f020a87ff7d4.plot.2.tmp"
phase = attr.evolve(info.phase, minor=1)
return attr.evolve(info, phase=phase)
# @handlers.register(expression=r"^Copy time = (\d+\.\d+) seconds")
# def phase5_2(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# # Copy time = 178.438 seconds. CPU (41.390%) Thu Jul 15 03:42:44 2021
# phase = attr.evolve(info.phase, minor=2)
# return attr.evolve(info, phase=phase, copy_time_raw=float(match.group(1)))
@handlers.register(expression=r"^Removed temp2 file ")
def phase5_2(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Removed temp2 file "/farm/yards/902/fake_tmp2/plot-k32-2021-07-14-22-33-d2540dcfcffddbfbd7e60b4aca4d54fb937db71991298fabc253f020a87ff7d4.plot.2.tmp"? 1
phase = attr.evolve(info.phase, minor=2)
return attr.evolve(info, phase=phase)
@handlers.register(expression=r'^Renamed final file from ".+" to "(.+)"')
def phase5_3(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Renamed final file from "/farm/yards/902/fake_dst/plot-k32-2021-07-14-22-33-d2540dcfcffddbfbd7e60b4aca4d54fb937db71991298fabc253f020a87ff7d4.plot.2.tmp" to "/farm/yards/902/fake_dst/plot-k32-2021-07-14-22-33-d2540dcfcffddbfbd7e60b4aca4d54fb937db71991298fabc253f020a87ff7d4.plot"
phase = attr.evolve(info.phase, minor=3)
return attr.evolve(info, phase=phase, filename=match.group(1))
@handlers.register(expression=r"^Time for phase 1 = (\d+\.\d+) seconds")
def phase1_duration(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Time for phase 1 = 8134.660 seconds. CPU (194.060%) Thu Jul 15 00:48:59 2021
return attr.evolve(info, phase1_duration_raw=float(match.group(1)))
@handlers.register(expression=r"^Time for phase 2 = (\d+\.\d+) seconds")
def phase2_duration(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Time for phase 2 = 6911.621 seconds. CPU (71.780%) Mon Apr 5 01:48:54 2021
return attr.evolve(info, phase2_duration_raw=float(match.group(1)))
@handlers.register(expression=r"^Time for phase 3 = (\d+\.\d+) seconds")
def phase3_duration(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Time for phase 3 = 14537.188 seconds. CPU (82.730%) Mon Apr 5 05:51:11 2021
return attr.evolve(info, phase3_duration_raw=float(match.group(1)))
@handlers.register(expression=r"^Time for phase 4 = (\d+\.\d+) seconds")
def phase4_duration(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Time for phase 4 = 924.288 seconds. CPU (86.810%) Mon Apr 5 06:06:35 2021
return attr.evolve(info, phase4_duration_raw=float(match.group(1)))
@handlers.register(expression=r"^Total time = (\d+\.\d+) seconds")
def total_time(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Total time = 39945.080 seconds. CPU (123.100%) Mon Apr 5 06:06:35 2021
return attr.evolve(info, total_time_raw=float(match.group(1)))
@handlers.register(expression=r"^Copy time = (\d+\.\d+) seconds")
def copy_time(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Copy time = 501.696 seconds. CPU (23.860%) Sun May 9 22:52:41 2021
return attr.evolve(info, copy_time_raw=float(match.group(1)))
@handlers.register(
expression=r"^Starting plotting progress into temporary dirs: (.+) and (.+)$"
)
def plot_dirs(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Starting plotting progress into temporary dirs: /farm/yards/901 and /farm/yards/901
return attr.evolve(info, tmp_dir1=match.group(1), tmp_dir2=match.group(2))
@handlers.register(expression=r"^Using (\d+) threads of stripe size (\d+)")
def threads(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Using 4 threads of stripe size 65536
return attr.evolve(info, threads=int(match.group(1)))
@handlers.register(expression=r"^Using (\d+) buckets")
def buckets(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# "^Using (\\d+) buckets"
return attr.evolve(info, buckets=int(match.group(1)))
@handlers.register(expression=r"^Buffer size is: (\d+)MiB")
def buffer_size(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Buffer size is: 4000MiB
return attr.evolve(info, buffer=int(match.group(1)))
@handlers.register(expression=r"^Plot size is: (\d+)")
def plot_size(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Plot size is: 32
return attr.evolve(info, plot_size=int(match.group(1)))
commands = plotman.plotters.core.Commands()
@commands.register(version=(1, 1, 2))
@click.command()
# https://github.com/Chia-Network/chia-blockchain/blob/1.1.2/LICENSE
# https://github.com/Chia-Network/chia-blockchain/blob/1.1.2/chia/cmds/plots.py#L39-L83
# start copied code
@click.option("-k", "--size", help="Plot size", type=int, default=32, show_default=True)
@click.option(
"--override-k",
help="Force size smaller than 32",
default=False,
show_default=True,
is_flag=True,
)
@click.option(
"-n",
"--num",
help="Number of plots or challenges",
type=int,
default=1,
show_default=True,
)
@click.option(
"-b",
"--buffer",
help="Megabytes for sort/plot buffer",
type=int,
default=4608,
show_default=True,
)
@click.option(
"-r",
"--num_threads",
help="Number of threads to use",
type=int,
default=2,
show_default=True,
)
@click.option(
"-u",
"--buckets",
help="Number of buckets",
type=int,
default=128,
show_default=True,
)
@click.option(
"-a",
"--alt_fingerprint",
type=int,
default=None,
help="Enter the alternative fingerprint of the key you want to use",
)
@click.option(
"-c",
"--pool_contract_address",
type=str,
default=None,
help="Address of where the pool reward will be sent to. Only used if alt_fingerprint and pool public key are None",
)
@click.option(
"-f", "--farmer_public_key", help="Hex farmer public key", type=str, default=None
)
@click.option(
"-p", "--pool_public_key", help="Hex public key of pool", type=str, default=None
)
@click.option(
"-t",
"--tmp_dir",
help="Temporary directory for plotting files",
type=click.Path(),
default=pathlib.Path("."),
show_default=True,
)
@click.option(
"-2",
"--tmp2_dir",
help="Second temporary directory for plotting files",
type=click.Path(),
default=None,
)
@click.option(
"-d",
"--final_dir",
help="Final directory for plots (relative or absolute)",
type=click.Path(),
default=pathlib.Path("."),
show_default=True,
)
@click.option(
"-i",
"--plotid",
help="PlotID in hex for reproducing plots (debugging only)",
type=str,
default=None,
)
@click.option(
"-m",
"--memo",
help="Memo in hex for reproducing plots (debugging only)",
type=str,
default=None,
)
@click.option(
"-e", "--nobitfield", help="Disable bitfield", default=False, is_flag=True
)
@click.option(
"-x",
"--exclude_final_dir",
help="Skips adding [final dir] to harvester for farming",
default=False,
is_flag=True,
)
# end copied code
def _cli_1_1_2() -> None:
pass
@commands.register(version=(1, 1, 3))
@click.command()
# https://github.com/Chia-Network/chia-blockchain/blob/1.1.3/LICENSE
# https://github.com/Chia-Network/chia-blockchain/blob/1.1.3/chia/cmds/plots.py#L39-L83
# start copied code
@click.option("-k", "--size", help="Plot size", type=int, default=32, show_default=True)
@click.option(
"--override-k",
help="Force size smaller than 32",
default=False,
show_default=True,
is_flag=True,
)
@click.option(
"-n",
"--num",
help="Number of plots or challenges",
type=int,
default=1,
show_default=True,
)
@click.option(
"-b",
"--buffer",
help="Megabytes for sort/plot buffer",
type=int,
default=4608,
show_default=True,
)
@click.option(
"-r",
"--num_threads",
help="Number of threads to use",
type=int,
default=2,
show_default=True,
)
@click.option(
"-u",
"--buckets",
help="Number of buckets",
type=int,
default=128,
show_default=True,
)
@click.option(
"-a",
"--alt_fingerprint",
type=int,
default=None,
help="Enter the alternative fingerprint of the key you want to use",
)
@click.option(
"-c",
"--pool_contract_address",
type=str,
default=None,
help="Address of where the pool reward will be sent to. Only used if alt_fingerprint and pool public key are None",
)
@click.option(
"-f", "--farmer_public_key", help="Hex farmer public key", type=str, default=None
)
@click.option(
"-p", "--pool_public_key", help="Hex public key of pool", type=str, default=None
)
@click.option(
"-t",
"--tmp_dir",
help="Temporary directory for plotting files",
type=click.Path(),
default=pathlib.Path("."),
show_default=True,
)
@click.option(
"-2",
"--tmp2_dir",
help="Second temporary directory for plotting files",
type=click.Path(),
default=None,
)
@click.option(
"-d",
"--final_dir",
help="Final directory for plots (relative or absolute)",
type=click.Path(),
default=pathlib.Path("."),
show_default=True,
)
@click.option(
"-i",
"--plotid",
help="PlotID in hex for reproducing plots (debugging only)",
type=str,
default=None,
)
@click.option(
"-m",
"--memo",
help="Memo in hex for reproducing plots (debugging only)",
type=str,
default=None,
)
@click.option(
"-e", "--nobitfield", help="Disable bitfield", default=False, is_flag=True
)
@click.option(
"-x",
"--exclude_final_dir",
help="Skips adding [final dir] to harvester for farming",
default=False,
is_flag=True,
)
# end copied code
def _cli_1_1_3() -> None:
pass
@commands.register(version=(1, 1, 4))
@click.command()
# https://github.com/Chia-Network/chia-blockchain/blob/1.1.4/LICENSE
# https://github.com/Chia-Network/chia-blockchain/blob/1.1.4/chia/cmds/plots.py#L39-L83
# start copied code
@click.option("-k", "--size", help="Plot size", type=int, default=32, show_default=True)
@click.option(
"--override-k",
help="Force size smaller than 32",
default=False,
show_default=True,
is_flag=True,
)
@click.option(
"-n",
"--num",
help="Number of plots or challenges",
type=int,
default=1,
show_default=True,
)
@click.option(
"-b",
"--buffer",
help="Megabytes for sort/plot buffer",
type=int,
default=3389,
show_default=True,
)
@click.option(
"-r",
"--num_threads",
help="Number of threads to use",
type=int,
default=2,
show_default=True,
)
@click.option(
"-u",
"--buckets",
help="Number of buckets",
type=int,
default=128,
show_default=True,
)
@click.option(
"-a",
"--alt_fingerprint",
type=int,
default=None,
help="Enter the alternative fingerprint of the key you want to use",
)
@click.option(
"-c",
"--pool_contract_address",
type=str,
default=None,
help="Address of where the pool reward will be sent to. Only used if alt_fingerprint and pool public key are None",
)
@click.option(
"-f", "--farmer_public_key", help="Hex farmer public key", type=str, default=None
)
@click.option(
"-p", "--pool_public_key", help="Hex public key of pool", type=str, default=None
)
@click.option(
"-t",
"--tmp_dir",
help="Temporary directory for plotting files",
type=click.Path(),
default=pathlib.Path("."),
show_default=True,
)
@click.option(
"-2",
"--tmp2_dir",
help="Second temporary directory for plotting files",
type=click.Path(),
default=None,
)
@click.option(
"-d",
"--final_dir",
help="Final directory for plots (relative or absolute)",
type=click.Path(),
default=pathlib.Path("."),
show_default=True,
)
@click.option(
"-i",
"--plotid",
help="PlotID in hex for reproducing plots (debugging only)",
type=str,
default=None,
)
@click.option(
"-m",
"--memo",
help="Memo in hex for reproducing plots (debugging only)",
type=str,
default=None,
)
@click.option(
"-e", "--nobitfield", help="Disable bitfield", default=False, is_flag=True
)
@click.option(
"-x",
"--exclude_final_dir",
help="Skips adding [final dir] to harvester for farming",
default=False,
is_flag=True,
)
# end copied code
def _cli_1_1_4() -> None:
pass
@commands.register(version=(1, 1, 5))
@click.command()
# https://github.com/Chia-Network/chia-blockchain/blob/1.1.5/LICENSE
# https://github.com/Chia-Network/chia-blockchain/blob/1.1.5/chia/cmds/plots.py#L39-L83
# start copied code
@click.option("-k", "--size", help="Plot size", type=int, default=32, show_default=True)
@click.option(
"--override-k",
help="Force size smaller than 32",
default=False,
show_default=True,
is_flag=True,
)
@click.option(
"-n",
"--num",
help="Number of plots or challenges",
type=int,
default=1,
show_default=True,
)
@click.option(
"-b",
"--buffer",
help="Megabytes for sort/plot buffer",
type=int,
default=3389,
show_default=True,
)
@click.option(
"-r",
"--num_threads",
help="Number of threads to use",
type=int,
default=2,
show_default=True,
)
@click.option(
"-u",
"--buckets",
help="Number of buckets",
type=int,
default=128,
show_default=True,
)
@click.option(
"-a",
"--alt_fingerprint",
type=int,
default=None,
help="Enter the alternative fingerprint of the key you want to use",
)
@click.option(
"-c",
"--pool_contract_address",
type=str,
default=None,
help="Address of where the pool reward will be sent to. Only used if alt_fingerprint and pool public key are None",
)
@click.option(
"-f", "--farmer_public_key", help="Hex farmer public key", type=str, default=None
)
@click.option(
"-p", "--pool_public_key", help="Hex public key of pool", type=str, default=None
)
@click.option(
"-t",
"--tmp_dir",
help="Temporary directory for plotting files",
type=click.Path(),
default=pathlib.Path("."),
show_default=True,
)
@click.option(
"-2",
"--tmp2_dir",
help="Second temporary directory for plotting files",
type=click.Path(),
default=None,
)
@click.option(
"-d",
"--final_dir",
help="Final directory for plots (relative or absolute)",
type=click.Path(),
default=pathlib.Path("."),
show_default=True,
)
@click.option(
"-i",
"--plotid",
help="PlotID in hex for reproducing plots (debugging only)",
type=str,
default=None,
)
@click.option(
"-m",
"--memo",
help="Memo in hex for reproducing plots (debugging only)",
type=str,
default=None,
)
@click.option(
"-e", "--nobitfield", help="Disable bitfield", default=False, is_flag=True
)
@click.option(
"-x",
"--exclude_final_dir",
help="Skips adding [final dir] to harvester for farming",
default=False,
is_flag=True,
)
# end copied code
def _cli_1_1_5() -> None:
pass
@commands.register(version=(1, 1, 6))
@click.command()
# https://github.com/Chia-Network/chia-blockchain/blob/1.1.6/LICENSE
# https://github.com/Chia-Network/chia-blockchain/blob/1.1.6/chia/cmds/plots.py#L39-L83
# start copied code
@click.option("-k", "--size", help="Plot size", type=int, default=32, show_default=True)
@click.option(
"--override-k",
help="Force size smaller than 32",
default=False,
show_default=True,
is_flag=True,
)
@click.option(
"-n",
"--num",
help="Number of plots or challenges",
type=int,
default=1,
show_default=True,
)
@click.option(
"-b",
"--buffer",
help="Megabytes for sort/plot buffer",
type=int,
default=3389,
show_default=True,
)
@click.option(
"-r",
"--num_threads",
help="Number of threads to use",
type=int,
default=2,
show_default=True,
)
@click.option(
"-u",
"--buckets",
help="Number of buckets",
type=int,
default=128,
show_default=True,
)
@click.option(
"-a",
"--alt_fingerprint",
type=int,
default=None,
help="Enter the alternative fingerprint of the key you want to use",
)
@click.option(
"-c",
"--pool_contract_address",
type=str,
default=None,
help="Address of where the pool reward will be sent to. Only used if alt_fingerprint and pool public key are None",
)
@click.option(
"-f", "--farmer_public_key", help="Hex farmer public key", type=str, default=None
)
@click.option(
"-p", "--pool_public_key", help="Hex public key of pool", type=str, default=None
)
@click.option(
"-t",
"--tmp_dir",
help="Temporary directory for plotting files",
type=click.Path(),
default=pathlib.Path("."),
show_default=True,
)
@click.option(
"-2",
"--tmp2_dir",
help="Second temporary directory for plotting files",
type=click.Path(),
default=None,
)
@click.option(
"-d",
"--final_dir",
help="Final directory for plots (relative or absolute)",
type=click.Path(),
default=pathlib.Path("."),
show_default=True,
)
@click.option(
"-i",
"--plotid",
help="PlotID in hex for reproducing plots (debugging only)",
type=str,
default=None,
)
@click.option(
"-m",
"--memo",
help="Memo in hex for reproducing plots (debugging only)",
type=str,
default=None,
)
@click.option(
"-e", "--nobitfield", help="Disable bitfield", default=False, is_flag=True
)
@click.option(
"-x",
"--exclude_final_dir",
help="Skips adding [final dir] to harvester for farming",
default=False,
is_flag=True,
)
# end copied code
def _cli_1_1_6() -> None:
pass
@commands.register(version=(1, 1, 7))
@click.command()
# https://github.com/Chia-Network/chia-blockchain/blob/1.1.7/LICENSE
# https://github.com/Chia-Network/chia-blockchain/blob/1.1.7/chia/cmds/plots.py#L39-L83
# start copied code
@click.option("-k", "--size", help="Plot size", type=int, default=32, show_default=True)
@click.option(
"--override-k",
help="Force size smaller than 32",
default=False,
show_default=True,
is_flag=True,
)
@click.option(
"-n",
"--num",
help="Number of plots or challenges",
type=int,
default=1,
show_default=True,
)
@click.option(
"-b",
"--buffer",
help="Megabytes for sort/plot buffer",
type=int,
default=3389,
show_default=True,
)
@click.option(
"-r",
"--num_threads",
help="Number of threads to use",
type=int,
default=2,
show_default=True,
)
@click.option(
"-u",
"--buckets",
help="Number of buckets",
type=int,
default=128,
show_default=True,
)
@click.option(
"-a",
"--alt_fingerprint",
type=int,
default=None,
help="Enter the alternative fingerprint of the key you want to use",
)
@click.option(
"-c",
"--pool_contract_address",
type=str,
default=None,
help="Address of where the pool reward will be sent to. Only used if alt_fingerprint and pool public key are None",
)
@click.option(
"-f", "--farmer_public_key", help="Hex farmer public key", type=str, default=None
)
@click.option(
"-p", "--pool_public_key", help="Hex public key of pool", type=str, default=None
)
@click.option(
"-t",
"--tmp_dir",
help="Temporary directory for plotting files",
type=click.Path(),
default=pathlib.Path("."),
show_default=True,
)
@click.option(
"-2",
"--tmp2_dir",
help="Second temporary directory for plotting files",
type=click.Path(),
default=None,
)
@click.option(
"-d",
"--final_dir",
help="Final directory for plots (relative or absolute)",
type=click.Path(),
default=pathlib.Path("."),
show_default=True,
)
@click.option(
"-i",
"--plotid",
help="PlotID in hex for reproducing plots (debugging only)",
type=str,
default=None,
)
@click.option(
"-m",
"--memo",
help="Memo in hex for reproducing plots (debugging only)",
type=str,
default=None,
)
@click.option(
"-e", "--nobitfield", help="Disable bitfield", default=False, is_flag=True
)
@click.option(
"-x",
"--exclude_final_dir",
help="Skips adding [final dir] to harvester for farming",
default=False,
is_flag=True,
)
# end copied code
def _cli_1_1_7() -> None:
pass
@commands.register(version=(1, 2, 0))
@click.command()
# https://github.com/Chia-Network/chia-blockchain/blob/1.2.0/LICENSE
# https://github.com/Chia-Network/chia-blockchain/blob/1.2.0/chia/cmds/plots.py#L39-L83
# start copied code
@click.option("-k", "--size", help="Plot size", type=int, default=32, show_default=True)
@click.option(
"--override-k",
help="Force size smaller than 32",
default=False,
show_default=True,
is_flag=True,
)
@click.option(
"-n",
"--num",
help="Number of plots or challenges",
type=int,
default=1,
show_default=True,
)
@click.option(
"-b",
"--buffer",
help="Megabytes for sort/plot buffer",
type=int,
default=3389,
show_default=True,
)
@click.option(
"-r",
"--num_threads",
help="Number of threads to use",
type=int,
default=2,
show_default=True,
)
@click.option(
"-u",
"--buckets",
help="Number of buckets",
type=int,
default=128,
show_default=True,
)
@click.option(
"-a",
"--alt_fingerprint",
type=int,
default=None,
help="Enter the alternative fingerprint of the key you want to use",
)
@click.option(
"-c",
"--pool_contract_address",
type=str,
default=None,
help="Address of where the pool reward will be sent to. Only used if alt_fingerprint and pool public key are None",
)
@click.option(
"-f", "--farmer_public_key", help="Hex farmer public key", type=str, default=None
)
@click.option(
"-p", "--pool_public_key", help="Hex public key of pool", type=str, default=None
)
@click.option(
"-t",
"--tmp_dir",
help="Temporary directory for plotting files",
type=click.Path(),
default=pathlib.Path("."),
show_default=True,
)
@click.option(
"-2",
"--tmp2_dir",
help="Second temporary directory for plotting files",
type=click.Path(),
default=None,
)
@click.option(
"-d",
"--final_dir",
help="Final directory for plots (relative or absolute)",
type=click.Path(),
default=pathlib.Path("."),
show_default=True,
)
@click.option(
"-i",
"--plotid",
help="PlotID in hex for reproducing plots (debugging only)",
type=str,
default=None,
)
@click.option(
"-m",
"--memo",
help="Memo in hex for reproducing plots (debugging only)",
type=str,
default=None,
)
@click.option(
"-e", "--nobitfield", help="Disable bitfield", default=False, is_flag=True
)
@click.option(
"-x",
"--exclude_final_dir",
help="Skips adding [final dir] to harvester for farming",
default=False,
is_flag=True,
)
# end copied code
def _cli_1_2_0() -> None:
pass
@commands.register(version=(1, 2, 1))
@click.command()
# https://github.com/Chia-Network/chia-blockchain/blob/1.2.1/LICENSE
# https://github.com/Chia-Network/chia-blockchain/blob/1.2.1/chia/cmds/plots.py#L39-L83
# start copied code
@click.option("-k", "--size", help="Plot size", type=int, default=32, show_default=True)
@click.option(
"--override-k",
help="Force size smaller than 32",
default=False,
show_default=True,
is_flag=True,
)
@click.option(
"-n",
"--num",
help="Number of plots or challenges",
type=int,
default=1,
show_default=True,
)
@click.option(
"-b",
"--buffer",
help="Megabytes for sort/plot buffer",
type=int,
default=3389,
show_default=True,
)
@click.option(
"-r",
"--num_threads",
help="Number of threads to use",
type=int,
default=2,
show_default=True,
)
@click.option(
"-u",
"--buckets",
help="Number of buckets",
type=int,
default=128,
show_default=True,
)
@click.option(
"-a",
"--alt_fingerprint",
type=int,
default=None,
help="Enter the alternative fingerprint of the key you want to use",
)
@click.option(
"-c",
"--pool_contract_address",
type=str,
default=None,
help="Address of where the pool reward will be sent to. Only used if alt_fingerprint and pool public key are None",
)
@click.option(
"-f", "--farmer_public_key", help="Hex farmer public key", type=str, default=None
)
@click.option(
"-p", "--pool_public_key", help="Hex public key of pool", type=str, default=None
)
@click.option(
"-t",
"--tmp_dir",
help="Temporary directory for plotting files",
type=click.Path(),
default=pathlib.Path("."),
show_default=True,
)
@click.option(
"-2",
"--tmp2_dir",
help="Second temporary directory for plotting files",
type=click.Path(),
default=None,
)
@click.option(
"-d",
"--final_dir",
help="Final directory for plots (relative or absolute)",
type=click.Path(),
default=pathlib.Path("."),
show_default=True,
)
@click.option(
"-i",
"--plotid",
help="PlotID in hex for reproducing plots (debugging only)",
type=str,
default=None,
)
@click.option(
"-m",
"--memo",
help="Memo in hex for reproducing plots (debugging only)",
type=str,
default=None,
)
@click.option(
"-e", "--nobitfield", help="Disable bitfield", default=False, is_flag=True
)
@click.option(
"-x",
"--exclude_final_dir",
help="Skips adding [final dir] to harvester for farming",
default=False,
is_flag=True,
)
# end copied code
def _cli_1_2_1() -> None:
pass
@commands.register(version=(1, 2, 2))
@click.command()
# https://github.com/Chia-Network/chia-blockchain/blob/1.2.2/LICENSE
# https://github.com/Chia-Network/chia-blockchain/blob/1.2.2/chia/cmds/plots.py#L39-L83
# start copied code
@click.option("-k", "--size", help="Plot size", type=int, default=32, show_default=True)
@click.option(
"--override-k",
help="Force size smaller than 32",
default=False,
show_default=True,
is_flag=True,
)
@click.option(
"-n",
"--num",
help="Number of plots or challenges",
type=int,
default=1,
show_default=True,
)
@click.option(
"-b",
"--buffer",
help="Megabytes for sort/plot buffer",
type=int,
default=3389,
show_default=True,
)
@click.option(
"-r",
"--num_threads",
help="Number of threads to use",
type=int,
default=2,
show_default=True,
)
@click.option(
"-u",
"--buckets",
help="Number of buckets",
type=int,
default=128,
show_default=True,
)
@click.option(
"-a",
"--alt_fingerprint",
type=int,
default=None,
help="Enter the alternative fingerprint of the key you want to use",
)
@click.option(
"-c",
"--pool_contract_address",
type=str,
default=None,
help="Address of where the pool reward will be sent to. Only used if alt_fingerprint and pool public key are None",
)
@click.option(
"-f", "--farmer_public_key", help="Hex farmer public key", type=str, default=None
)
@click.option(
"-p", "--pool_public_key", help="Hex public key of pool", type=str, default=None
)
@click.option(
"-t",
"--tmp_dir",
help="Temporary directory for plotting files",
type=click.Path(),
default=pathlib.Path("."),
show_default=True,
)
@click.option(
"-2",
"--tmp2_dir",
help="Second temporary directory for plotting files",
type=click.Path(),
default=None,
)
@click.option(
"-d",
"--final_dir",
help="Final directory for plots (relative or absolute)",
type=click.Path(),
default=pathlib.Path("."),
show_default=True,
)
@click.option(
"-i",
"--plotid",
help="PlotID in hex for reproducing plots (debugging only)",
type=str,
default=None,
)
@click.option(
"-m",
"--memo",
help="Memo in hex for reproducing plots (debugging only)",
type=str,
default=None,
)
@click.option(
"-e", "--nobitfield", help="Disable bitfield", default=False, is_flag=True
)
@click.option(
"-x",
"--exclude_final_dir",
help="Skips adding [final dir] to harvester for farming",
default=False,
is_flag=True,
)
# end copied code
def _cli_1_2_2() -> None:
pass
import functools
import typing
import click
import typing_extensions
class CommandProtocol(typing_extensions.Protocol):
def make_context(self, info_name: str, args: typing.List[str]) -> click.Context:
...
def __call__(self) -> None:
...
class Commands:
def __init__(self) -> None:
self.by_version: typing.Dict[typing.Sequence[int], CommandProtocol] = {}
def register(
self, version: typing.Sequence[int]
) -> typing.Callable[[CommandProtocol], CommandProtocol]:
if version in self.by_version:
raise Exception(f"Version already registered: {version!r}")
if not isinstance(version, tuple):
raise Exception(f"Version must be a tuple: {version!r}")
return functools.partial(self._decorator, version=version)
def _decorator(
self, command: CommandProtocol, *, version: typing.Sequence[int]
) -> CommandProtocol:
self.by_version[version] = command
# self.by_version = dict(sorted(self.by_version.items()))
return command
def __getitem__(self, item: typing.Sequence[int]) -> typing.Callable[[], None]:
return self.by_version[item]
def latest_command(self) -> CommandProtocol:
return max(self.by_version.items())[1]
# mypy: allow_untyped_decorators
import collections
import os.path
import pathlib
import subprocess
import typing
import attr
import click
import pendulum
import plotman.job
import plotman.plotters
@attr.frozen
class Options:
executable: str = "chia_plot"
n_threads: int = 4
n_buckets: int = 256
n_buckets3: int = 256
n_rmulti2: int = 1
def check_configuration(
options: Options, pool_contract_address: typing.Optional[str]
) -> None:
if pool_contract_address is not None:
completed_process = subprocess.run(
args=[options.executable, "--help"],
capture_output=True,
check=True,
encoding="utf-8",
)
if "--contract" not in completed_process.stdout:
raise Exception(
f"found madMAx version does not support the `--contract`"
f" option for pools."
)
def create_command_line(
options: Options,
tmpdir: str,
tmp2dir: typing.Optional[str],
dstdir: str,
farmer_public_key: typing.Optional[str],
pool_public_key: typing.Optional[str],
pool_contract_address: typing.Optional[str],
) -> typing.List[str]:
args = [
options.executable,
"-n",
str(1),
"-r",
str(options.n_threads),
"-u",
str(options.n_buckets),
"-t",
tmpdir if tmpdir.endswith("/") else (tmpdir + "/"),
"-d",
dstdir if dstdir.endswith("/") else (dstdir + "/"),
]
if tmp2dir is not None:
args.append("-2")
args.append(tmp2dir if tmp2dir.endswith("/") else (tmp2dir + "/"))
if options.n_buckets3 is not None:
args.append("-v")
args.append(str(options.n_buckets3))
if options.n_rmulti2 is not None:
args.append("-K")
args.append(str(options.n_rmulti2))
if farmer_public_key is not None:
args.append("-f")
args.append(farmer_public_key)
if pool_public_key is not None:
args.append("-p")
args.append(pool_public_key)
if pool_contract_address is not None:
args.append("-c")
args.append(pool_contract_address)
return args
# @plotman.plotters.ProtocolChecker[plotman.plotters.SpecificInfo]()
@plotman.plotters.check_SpecificInfo
@attr.frozen
class SpecificInfo:
process_id: typing.Optional[int] = None
phase: plotman.job.Phase = plotman.job.Phase(known=False)
started_at: typing.Optional[pendulum.DateTime] = None
plot_id: str = ""
p1_buckets: int = 0
p34_buckets: int = 0
threads: int = 0
# buffer: int = 0
plot_size: int = 0
tmp_dir: str = ""
tmp2_dir: str = ""
dst_dir: str = ""
phase1_duration_raw: float = 0
phase2_duration_raw: float = 0
phase3_duration_raw: float = 0
phase4_duration_raw: float = 0
total_time_raw: float = 0
# copy_time_raw: float = 0
filename: str = ""
plot_name: str = ""
def common(self) -> plotman.plotters.CommonInfo:
return plotman.plotters.CommonInfo(
type="madmax",
dstdir=self.dst_dir,
phase=self.phase,
tmpdir=self.tmp_dir,
tmp2dir=self.tmp2_dir,
started_at=self.started_at,
plot_id=self.plot_id,
plot_size=self.plot_size,
# TODO: handle p34_buckets as well somehow
buckets=self.p1_buckets,
threads=self.threads,
phase1_duration_raw=self.phase1_duration_raw,
phase2_duration_raw=self.phase2_duration_raw,
phase3_duration_raw=self.phase3_duration_raw,
phase4_duration_raw=self.phase4_duration_raw,
total_time_raw=self.total_time_raw,
filename=self.filename,
)
@plotman.plotters.check_Plotter
@attr.mutable
class Plotter:
decoder: plotman.plotters.LineDecoder = attr.ib(
factory=plotman.plotters.LineDecoder
)
info: SpecificInfo = attr.ib(factory=SpecificInfo)
parsed_command_line: typing.Optional[
plotman.job.ParsedChiaPlotsCreateCommand
] = None
@classmethod
def identify_log(cls, line: str) -> bool:
return "Multi-threaded pipelined Chia" in line
@classmethod
def identify_process(cls, command_line: typing.List[str]) -> bool:
if len(command_line) == 0:
return False
return "chia_plot" == os.path.basename(command_line[0]).lower()
def common_info(self) -> plotman.plotters.CommonInfo:
return self.info.common()
def parse_command_line(self, command_line: typing.List[str], cwd: str) -> None:
# drop the chia_plot
arguments = command_line[1:]
# TODO: We could at some point do chia version detection and pick the
# associated command. For now we'll just use the latest one we have
# copied.
command = commands.latest_command()
self.parsed_command_line = plotman.plotters.parse_command_line_with_click(
command=command,
arguments=arguments,
)
for key in ["tmpdir", "tmpdir2", "finaldir"]:
original: os.PathLike[str] = self.parsed_command_line.parameters.get(key) # type: ignore[assignment]
if original is not None:
self.parsed_command_line.parameters[key] = pathlib.Path(cwd).joinpath(
original
)
def update(self, chunk: bytes) -> SpecificInfo:
new_lines = self.decoder.update(chunk=chunk)
for line in new_lines:
if not self.info.phase.known:
self.info = attr.evolve(
self.info, phase=plotman.job.Phase(major=0, minor=0)
)
for pattern, handler_functions in handlers.mapping.items():
match = pattern.search(line)
if match is None:
continue
for handler_function in handler_functions:
self.info = handler_function(match=match, info=self.info)
break
return self.info
handlers = plotman.plotters.RegexLineHandlers[SpecificInfo]()
@handlers.register(expression=r"^\[P1\] Table ([1-6])")
def phase_1(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# [P1] Table 1 took 39.8662 sec
# [P1] Table 2 took 211.248 sec, found 4294987039 matches
# [P1] Table 3 took 295.536 sec, found 4295003219 matches
# [P1] Table 4 took 360.731 sec, found 4295083991 matches
# [P1] Table 5 took 346.816 sec, found 4295198226 matches
# [P1] Table 6 took 337.844 sec, found 4295283897 matches
minor = int(match.group(1)) + 1
return attr.evolve(info, phase=plotman.job.Phase(major=1, minor=minor))
@handlers.register(expression=r"^\[P2\] max_table_size")
def phase_2_start(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# [P2] max_table_size = 4295422716
return attr.evolve(info, phase=plotman.job.Phase(major=2, minor=1))
@handlers.register(expression=r"^\[P2\] Table ([2-7]) rewrite")
def phase_2(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# [P2] Table 7 scan took 18.4493 sec
# [P2] Table 7 rewrite took 60.7659 sec, dropped 0 entries (0 %)
# [P2] Table 6 scan took 82.9818 sec
# [P2] Table 6 rewrite took 142.287 sec, dropped 581464719 entries (13.5373 %)
# [P2] Table 5 scan took 122.71 sec
# [P2] Table 5 rewrite took 205.382 sec, dropped 762140364 entries (17.744 %)
# [P2] Table 4 scan took 119.723 sec
# [P2] Table 4 rewrite took 131.374 sec, dropped 828922032 entries (19.2993 %)
# [P2] Table 3 scan took 87.8078 sec
# [P2] Table 3 rewrite took 135.269 sec, dropped 855096923 entries (19.9091 %)
# [P2] Table 2 scan took 103.825 sec
# [P2] Table 2 rewrite took 159.486 sec, dropped 865588810 entries (20.1532 %)
minor_in_log = int(match.group(1))
active_minor = 8 - minor_in_log + 1
return attr.evolve(info, phase=plotman.job.Phase(major=2, minor=active_minor))
@handlers.register(expression=r"^Phase 2 took (\d+(\.\d+)) sec")
def phase3_0(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Phase 2 took 1344.24 sec
return attr.evolve(
info,
phase=plotman.job.Phase(major=3, minor=0),
phase2_duration_raw=float(match.group(1)),
)
@handlers.register(expression=r"^Wrote plot header")
def phase_3_start(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Wrote plot header with 252 bytes
return attr.evolve(info, phase=plotman.job.Phase(major=3, minor=1))
@handlers.register(expression=r"^\[P3-2\] Table ([2-6]) took")
def phase_3(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# [P3-1] Table 2 took 80.1436 sec, wrote 3429403335 right entries
# [P3-2] Table 2 took 69.0526 sec, wrote 3429403335 left entries, 3429403335 final
# [P3-1] Table 3 took 104.477 sec, wrote 3439906296 right entries
# [P3-2] Table 3 took 69.8111 sec, wrote 3439906296 left entries, 3439906296 final
# [P3-1] Table 4 took 111.704 sec, wrote 3466161959 right entries
# [P3-2] Table 4 took 68.1434 sec, wrote 3466161959 left entries, 3466161959 final
# [P3-1] Table 5 took 106.097 sec, wrote 3533057862 right entries
# [P3-2] Table 5 took 69.3742 sec, wrote 3533057862 left entries, 3533057862 final
# [P3-1] Table 6 took 105.378 sec, wrote 3713819178 right entries
# [P3-2] Table 6 took 60.371 sec, wrote 3713819178 left entries, 3713819178 final
minor = int(match.group(1))
return attr.evolve(info, phase=plotman.job.Phase(major=3, minor=minor))
@handlers.register(expression=r"^Phase 3 took (\d+(\.\d+)) sec")
def phase4(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Phase 3 took 1002.89 sec, wrote 21877315926 entries to final plot
return attr.evolve(
info,
phase=plotman.job.Phase(major=4, minor=0),
phase3_duration_raw=float(match.group(1)),
)
@handlers.register(expression=r"^\[P4\] Starting")
def phase_4_1(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# [P4] Starting to write C1 and C3 tables
return attr.evolve(info, phase=plotman.job.Phase(major=4, minor=1))
@handlers.register(expression=r"^\[P4\] Writing C2 table")
def phase_4_2(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# [P4] Writing C2 table
return attr.evolve(info, phase=plotman.job.Phase(major=4, minor=2))
@handlers.register(expression=r"^Phase 4 took (\d+(\.\d+)) sec")
def phase5(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Phase 4 took 77.9891 sec, final plot size is 108836186159 bytes
return attr.evolve(
info,
phase=plotman.job.Phase(major=5, minor=0),
phase4_duration_raw=float(match.group(1)),
)
@handlers.register(expression=r"^Started copy to ")
def phase_5_1(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Started copy to /farm/yards/902/fake_dst/plot-k32-2021-07-14-21-56-522acbd6308af7e229281352f746449134126482cfabd51d38e0f89745d21698.plot
return attr.evolve(info, phase=plotman.job.Phase(major=5, minor=1))
@handlers.register(expression=r"^Renamed final plot to ")
def phase_5_2(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Renamed final plot to /farm/yards/902/fake_dst/plot-k32-2021-07-14-21-56-522acbd6308af7e229281352f746449134126482cfabd51d38e0f89745d21698.plot
return attr.evolve(info, phase=plotman.job.Phase(major=5, minor=2))
@handlers.register(expression=r"^Final Directory:\s*(.+)")
def dst_dir(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Final Directory: /farm/yards/907/
return attr.evolve(info, dst_dir=match.group(1))
@handlers.register(expression=r"^Working Directory:\s*(.+)")
def tmp_dir(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Working Directory: /farm/yards/907/
return attr.evolve(info, tmp_dir=match.group(1))
@handlers.register(expression=r"^Working Directory 2:\s*(.+)")
def tmp2_dir(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Working Directory 2: /farm/yards/907/
return attr.evolve(info, tmp2_dir=match.group(1))
@handlers.register(
expression=r"^Plot Name: (?P<name>plot-k(?P<size>\d+)-(?P<year>\d+)-(?P<month>\d+)-(?P<day>\d+)-(?P<hour>\d+)-(?P<minute>\d+)-(?P<plot_id>\w+))$"
)
def plot_name_line(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Plot Name: plot-k32-2021-07-11-16-52-3a3872f5a124497a17fb917dfe027802aa1867f8b0a8cbac558ed12aa5b697b2
return attr.evolve(
info,
plot_size=int(match.group("size")),
plot_name=match.group("name"),
started_at=pendulum.datetime(
year=int(match.group("year")),
month=int(match.group("month")),
day=int(match.group("day")),
hour=int(match.group("hour")),
minute=int(match.group("minute")),
tz=None,
),
plot_id=match.group("plot_id"),
phase=plotman.job.Phase(major=1, minor=1),
)
@handlers.register(expression=r"^Number of Threads:\s*(\d+)")
def threads(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Number of Threads: 9
return attr.evolve(info, threads=int(match.group(1)))
@handlers.register(expression=r"^Number of Buckets P1:.*\((\d+)\)")
def p1_buckets(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Number of Buckets P1: 2^8 (256)
return attr.evolve(info, p1_buckets=int(match.group(1)))
@handlers.register(expression=r"^Number of Buckets P3\+P4:.*\((\d+)\)")
def p34_buckets(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Number of Buckets P3+P4: 2^8 (256)
return attr.evolve(info, p34_buckets=int(match.group(1)))
@handlers.register(expression=r"^Phase 1 took (\d+(\.\d+)) sec")
def phase1_duration_raw(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Phase 1 took 1851.12 sec
return attr.evolve(info, phase1_duration_raw=float(match.group(1)))
@handlers.register(expression=r"^Total plot creation time was (\d+(\.\d+)) sec")
def total_time(match: typing.Match[str], info: SpecificInfo) -> SpecificInfo:
# Total plot creation time was 4276.32 sec (71.272 min)
return attr.evolve(info, total_time_raw=float(match.group(1)))
commands = plotman.plotters.core.Commands()
# Madmax Git on 2021-06-19 -> https://github.com/madMAx43v3r/chia-plotter/commit/c8121b987186c42c895b49818e6c13acecc51332
@commands.register(version=(0,))
@click.command()
# https://github.com/madMAx43v3r/chia-plotter/blob/c8121b987186c42c895b49818e6c13acecc51332/LICENSE
# https://github.com/madMAx43v3r/chia-plotter/blob/c8121b987186c42c895b49818e6c13acecc51332/src/chia_plot.cpp#L177-L188
@click.option(
"-n",
"--count",
help="Number of plots to create (default = 1, -1 = infinite)",
type=int,
default=1,
show_default=True,
)
@click.option(
"-r",
"--threads",
help="Number of threads (default = 4)",
type=int,
default=4,
show_default=True,
)
@click.option(
"-u",
"--buckets",
help="Number of buckets (default = 256)",
type=int,
default=256,
show_default=True,
)
@click.option(
"-v",
"--buckets3",
help="Number of buckets for phase 3+4 (default = buckets)",
type=int,
default=256,
)
@click.option(
"-t",
"--tmpdir",
help="Temporary directory, needs ~220 GiB (default = $PWD)",
type=click.Path(),
default=pathlib.Path("."),
show_default=True,
)
@click.option(
"-2",
"--tmpdir2",
help="Temporary directory 2, needs ~110 GiB [RAM] (default = <tmpdir>)",
type=click.Path(),
default=None,
)
@click.option(
"-d",
"--finaldir",
help="Final directory (default = <tmpdir>)",
type=click.Path(),
default=pathlib.Path("."),
show_default=True,
)
@click.option(
"-p", "--poolkey", help="Pool Public Key (48 bytes)", type=str, default=None
)
@click.option(
"-f", "--farmerkey", help="Farmer Public Key (48 bytes)", type=str, default=None
)
@click.option(
"-G", "--tmptoggle", help="Alternate tmpdir/tmpdir2", type=str, default=None
)
def _cli_c8121b987186c42c895b49818e6c13acecc51332() -> None:
pass
# Madmax Git on 2021-07-12 -> https://github.com/madMAx43v3r/chia-plotter/commit/974d6e5f1440f68c48492122ca33828a98864dfc
@commands.register(version=(1,))
@click.command()
# https://github.com/madMAx43v3r/chia-plotter/blob/974d6e5f1440f68c48492122ca33828a98864dfc/LICENSE
# https://github.com/madMAx43v3r/chia-plotter/blob/974d6e5f1440f68c48492122ca33828a98864dfc/src/chia_plot.cpp#L235-L249
@click.option(
"-n",
"--count",
help="Number of plots to create (default = 1, -1 = infinite)",
type=int,
default=1,
show_default=True,
)
@click.option(
"-r",
"--threads",
help="Number of threads (default = 4)",
type=int,
default=4,
show_default=True,
)
@click.option(
"-u",
"--buckets",
help="Number of buckets (default = 256)",
type=int,
default=256,
show_default=True,
)
@click.option(
"-v",
"--buckets3",
help="Number of buckets for phase 3+4 (default = buckets)",
type=int,
default=256,
)
@click.option(
"-t",
"--tmpdir",
help="Temporary directory, needs ~220 GiB (default = $PWD)",
type=click.Path(),
default=pathlib.Path("."),
show_default=True,
)
@click.option(
"-2",
"--tmpdir2",
help="Temporary directory 2, needs ~110 GiB [RAM] (default = <tmpdir>)",
type=click.Path(),
default=None,
)
@click.option(
"-d",
"--finaldir",
help="Final directory (default = <tmpdir>)",
type=click.Path(),
default=pathlib.Path("."),
show_default=True,
)
@click.option(
"-w",
"--waitforcopy",
help="Wait for copy to start next plot",
type=bool,
default=False,
show_default=True,
)
@click.option(
"-p", "--poolkey", help="Pool Public Key (48 bytes)", type=str, default=None
)
@click.option(
"-c", "--contract", help="Pool Contract Address (62 chars)", type=str, default=None
)
@click.option(
"-f", "--farmerkey", help="Farmer Public Key (48 bytes)", type=str, default=None
)
@click.option(
"-G", "--tmptoggle", help="Alternate tmpdir/tmpdir2", type=str, default=None
)
@click.option(
"-K",
"--rmulti2",
help="Thread multiplier for P2 (default = 1)",
type=int,
default=1,
)
def _cli_974d6e5f1440f68c48492122ca33828a98864dfc() -> None:
pass
# Madmax Git on 2021-08-22 -> https://github.com/madMAx43v3r/chia-plotter/commit/aaa3214d4abbd49bb99c2ec087e27c765424cd65
@commands.register(version=(2,))
@click.command()
# https://github.com/madMAx43v3r/chia-plotter/blob/aaa3214d4abbd49bb99c2ec087e27c765424cd65/LICENSE
# https://github.com/madMAx43v3r/chia-plotter/blob/aaa3214d4abbd49bb99c2ec087e27c765424cd65/src/chia_plot.cpp#L238-L253
@click.option(
"-k",
"--size",
help="K size (default = 32, k <= 32)",
type=int,
default=32,
show_default=True,
)
@click.option(
"-n",
"--count",
help="Number of plots to create (default = 1, -1 = infinite)",
type=int,
default=1,
show_default=True,
)
@click.option(
"-r",
"--threads",
help="Number of threads (default = 4)",
type=int,
default=4,
show_default=True,
)
@click.option(
"-u",
"--buckets",
help="Number of buckets (default = 256)",
type=int,
default=256,
show_default=True,
)
@click.option(
"-v",
"--buckets3",
help="Number of buckets for phase 3+4 (default = buckets)",
type=int,
default=256,
)
@click.option(
"-t",
"--tmpdir",
help="Temporary directory, needs ~220 GiB (default = $PWD)",
type=click.Path(),
default=pathlib.Path("."),
show_default=True,
)
@click.option(
"-2",
"--tmpdir2",
help="Temporary directory 2, needs ~110 GiB [RAM] (default = <tmpdir>)",
type=click.Path(),
default=None,
)
@click.option(
"-d",
"--finaldir",
help="Final directory (default = <tmpdir>)",
type=click.Path(),
default=pathlib.Path("."),
show_default=True,
)
@click.option(
"-w",
"--waitforcopy",
help="Wait for copy to start next plot",
type=bool,
default=False,
show_default=True,
)
@click.option(
"-p", "--poolkey", help="Pool Public Key (48 bytes)", type=str, default=None
)
@click.option(
"-c", "--contract", help="Pool Contract Address (62 chars)", type=str, default=None
)
@click.option(
"-f", "--farmerkey", help="Farmer Public Key (48 bytes)", type=str, default=None
)
@click.option(
"-G", "--tmptoggle", help="Alternate tmpdir/tmpdir2", type=str, default=None
)
@click.option(
"-K",
"--rmulti2",
help="Thread multiplier for P2 (default = 1)",
type=int,
default=1,
)
def _cli_aaa3214d4abbd49bb99c2ec087e27c765424cd65() -> None:
pass
+19
-0

@@ -8,2 +8,21 @@ # Change Log

## [0.5.2] - 2021-09-12
### Fixed
- Temp files are correctly identified for cleanup.
([#912](https://github.com/ericaltendorf/plotman/pull/913))
- Correct where trailing `/` on dst directories resulted in them being considered unused.
([#920](https://github.com/ericaltendorf/plotman/pull/920))
### Added
- `-v`/`--buckets3` and `-K`/`--rmulti2` are configurable for madMAx.
([#869](https://github.com/ericaltendorf/plotman/pull/869))
- A combined major/minor value for Prometheus status output.
([#885](https://github.com/ericaltendorf/plotman/pull/885))
- `supervisord` now used in Docker image.
([#898](https://github.com/ericaltendorf/plotman/pull/898))
- Output same entries to plotman.log from 'plotman interactive' and ' plotman plot/archive' "daemons".
([#878](https://github.com/ericaltendorf/plotman/pull/878))
- [BladeBit](https://github.com/harold-b/bladebit) support.
Requires BladeBit v1.1.0 for proper log monitoring.
([#916](https://github.com/ericaltendorf/plotman/pull/916))
## [0.5.1] - 2021-07-15

@@ -10,0 +29,0 @@ ### Fixed

+1
-1
Metadata-Version: 2.1
Name: plotman
Version: 0.5.1
Version: 0.5.2
Summary: Chia plotting manager

@@ -5,0 +5,0 @@ Home-page: https://github.com/ericaltendorf/plotman

[build-system]
requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta"
[tool.black]
target-version = ['py36', 'py37', 'py38', 'py39']
include = '''
^/(
setup.py
| src/plotman/.*\.pyi?
)$
'''

@@ -71,2 +71,3 @@ [metadata]

checks =
black == 21.7b0
check-manifest ~= 0.46

@@ -73,0 +74,0 @@ mypy == 0.902

Metadata-Version: 2.1
Name: plotman
Version: 0.5.1
Version: 0.5.2
Summary: Chia plotting manager

@@ -5,0 +5,0 @@ Home-page: https://github.com/ericaltendorf/plotman

@@ -14,2 +14,3 @@ appdirs~=1.4

[checks]
black==21.7b0
check-manifest~=0.46

@@ -16,0 +17,0 @@ mypy==0.902

@@ -18,13 +18,10 @@ .coveragerc

src/plotman/archive.py
src/plotman/chia.py
src/plotman/chiapos.py
src/plotman/configuration.py
src/plotman/csv_exporter.py
src/plotman/errors.py
src/plotman/interactive.py
src/plotman/job.py
src/plotman/log_parser.py
src/plotman/madmax.py
src/plotman/manager.py
src/plotman/plot_util.py
src/plotman/plotinfo.py
src/plotman/plotman.py

@@ -41,10 +38,25 @@ src/plotman/reporting.py

src/plotman/_tests/configuration_test.py
src/plotman/_tests/job_test.py
src/plotman/_tests/log_parser_test.py
src/plotman/_tests/manager_test.py
src/plotman/_tests/plot_util_test.py
src/plotman/_tests/reporting_test.py
src/plotman/_tests/resources/2021-04-04T19_00_47.681088-0400.log
src/plotman/_tests/resources/2021-04-04T19_00_47.681088-0400.notes
src/plotman/_tests/plotters/__init__.py
src/plotman/_tests/plotters/test_bladebit.py
src/plotman/_tests/plotters/test_chianetwork.py
src/plotman/_tests/plotters/test_core.py
src/plotman/_tests/plotters/test_init.py
src/plotman/_tests/plotters/test_madmax.py
src/plotman/_tests/resources/__init__.py
src/plotman/_tests/resources/bladebit.marked
src/plotman/_tests/resources/bladebit.plot.log
src/plotman/_tests/resources/chianetwork.marked
src/plotman/_tests/resources/chianetwork.notes
src/plotman/_tests/resources/chianetwork.plot.log
src/plotman/_tests/resources/madmax.marked
src/plotman/_tests/resources/madmax.notes
src/plotman/_tests/resources/madmax.plot.log
src/plotman/plotters/__init__.py
src/plotman/plotters/bladebit.py
src/plotman/plotters/chianetwork.py
src/plotman/plotters/core.py
src/plotman/plotters/madmax.py
src/plotman/resources/__init__.py

@@ -51,0 +63,0 @@ src/plotman/resources/plotman.yaml

@@ -5,3 +5,4 @@ from plotman import archive, job

def test_compute_priority() -> None:
assert (archive.compute_priority( job.Phase(major=3, minor=1), 1000, 10) >
archive.compute_priority( job.Phase(major=3, minor=6), 1000, 10) )
assert archive.compute_priority(
job.Phase(major=3, minor=1), 1000, 10
) > archive.compute_priority(job.Phase(major=3, minor=6), 1000, 10)

@@ -11,3 +11,3 @@ """Tests for plotman/configuration.py"""

@pytest.fixture(name='config_text')
@pytest.fixture(name="config_text")
def config_text_fixture() -> str:

@@ -17,15 +17,21 @@ return importlib.resources.read_text(plotman_resources, "plotman.yaml")

@pytest.fixture(name='target_definitions_text')
@pytest.fixture(name="target_definitions_text")
def target_definitions_text_fixture() -> str:
return importlib.resources.read_text(
plotman_resources, "target_definitions.yaml",
plotman_resources,
"target_definitions.yaml",
)
def test_get_validated_configs__default(config_text: str, target_definitions_text: str) -> None:
def test_get_validated_configs__default(
config_text: str, target_definitions_text: str
) -> None:
"""Check that get_validated_configs() works with default/example plotman.yaml file."""
res = configuration.get_validated_configs(config_text, '', target_definitions_text)
res = configuration.get_validated_configs(config_text, "", target_definitions_text)
assert isinstance(res, configuration.PlotmanConfig)
def test_get_validated_configs__malformed(config_text: str, target_definitions_text: str) -> None:
def test_get_validated_configs__malformed(
config_text: str, target_definitions_text: str
) -> None:
"""Check that get_validated_configs() raises exception with invalid plotman.yaml contents."""

@@ -39,3 +45,5 @@ loaded_yaml = yaml.load(config_text, Loader=yaml.SafeLoader)

with pytest.raises(configuration.ConfigurationException) as exc_info:
configuration.get_validated_configs(malformed_config_text, '/the_path', target_definitions_text)
configuration.get_validated_configs(
malformed_config_text, "/the_path", target_definitions_text
)

@@ -48,3 +56,3 @@ assert exc_info.value.args[0] == f"Config file at: '/the_path' is malformed"

with pytest.raises(configuration.ConfigurationException) as exc_info:
configuration.read_configuration_text('/invalid_path')
configuration.read_configuration_text("/invalid_path")

@@ -57,3 +65,5 @@ assert exc_info.value.args[0] == (

def test_loads_without_user_interface(config_text: str, target_definitions_text: str) -> None:
def test_loads_without_user_interface(
config_text: str, target_definitions_text: str
) -> None:
loaded_yaml = yaml.load(config_text, Loader=yaml.SafeLoader)

@@ -65,3 +75,5 @@

reloaded_yaml = configuration.get_validated_configs(stripped_config_text, '', target_definitions_text)
reloaded_yaml = configuration.get_validated_configs(
stripped_config_text, "", target_definitions_text
)

@@ -71,3 +83,5 @@ assert reloaded_yaml.user_interface == configuration.UserInterface()

def test_loads_without_user_archiving(config_text: str, target_definitions_text: str) -> None:
def test_loads_without_user_archiving(
config_text: str, target_definitions_text: str
) -> None:
loaded_yaml = yaml.load(config_text, Loader=yaml.SafeLoader)

@@ -79,3 +93,5 @@

reloaded_yaml = configuration.get_validated_configs(stripped_config_text, '', target_definitions_text)
reloaded_yaml = configuration.get_validated_configs(
stripped_config_text, "", target_definitions_text
)

@@ -86,4 +102,4 @@ assert reloaded_yaml.archiving is None

def test_get_dst_directories_gets_dst() -> None:
tmp = ['/tmp']
dst = ['/dst0', '/dst1']
tmp = ["/tmp"]
dst = ["/dst0", "/dst1"]
directories = configuration.Directories(tmp=tmp, dst=dst)

@@ -95,3 +111,3 @@

def test_get_dst_directories_gets_tmp() -> None:
tmp = ['/tmp']
tmp = ["/tmp"]
directories = configuration.Directories(tmp=tmp)

@@ -103,4 +119,4 @@

def test_dst_is_dst() -> None:
tmp = ['/tmp']
dst = ['/dst0', '/dst1']
tmp = ["/tmp"]
dst = ["/dst0", "/dst1"]
directories = configuration.Directories(tmp=tmp, dst=dst)

@@ -112,5 +128,5 @@

def test_dst_is_tmp() -> None:
tmp = ['/tmp']
tmp = ["/tmp"]
directories = configuration.Directories(tmp=tmp)
assert directories.dst_is_tmp()
import typing
# TODO: migrate away from unittest patch

@@ -19,83 +20,98 @@ from unittest.mock import patch

tmpdir_max_jobs=3,
tmp_overrides={"/mnt/tmp/04": configuration.TmpOverrides(tmpdir_max_jobs=4)}
tmp_overrides={"/mnt/tmp/04": configuration.TmpOverrides(tmpdir_max_jobs=4)},
)
@pytest.fixture
def dir_cfg() -> configuration.Directories:
return configuration.Directories(
tmp=["/var/tmp", "/tmp"],
dst=["/mnt/dst/00", "/mnt/dst/01", "/mnt/dst/03"]
tmp=["/var/tmp", "/tmp"], dst=["/mnt/dst/00", "/mnt/dst/01", "/mnt/dst/03"]
)
def test_permit_new_job_post_milestone(sched_cfg: configuration.Scheduling, dir_cfg: configuration.Directories) -> None:
phases = job.Phase.list_from_tuples([ (3, 8), (4, 1) ])
assert manager.phases_permit_new_job(
phases, '/mnt/tmp/00', sched_cfg, dir_cfg)
def test_permit_new_job_pre_milestone(sched_cfg: configuration.Scheduling, dir_cfg: configuration.Directories) -> None:
phases = job.Phase.list_from_tuples([ (2, 3), (4, 1) ])
assert not manager.phases_permit_new_job(
phases, '/mnt/tmp/00', sched_cfg, dir_cfg)
def test_permit_new_job_post_milestone(
sched_cfg: configuration.Scheduling, dir_cfg: configuration.Directories
) -> None:
phases = job.Phase.list_from_tuples([(3, 8), (4, 1)])
assert manager.phases_permit_new_job(phases, "/mnt/tmp/00", sched_cfg, dir_cfg)
def test_permit_new_job_too_many_jobs(sched_cfg: configuration.Scheduling, dir_cfg: configuration.Directories) -> None:
phases = job.Phase.list_from_tuples([ (3, 1), (3, 2), (3, 3) ])
assert not manager.phases_permit_new_job(
phases, '/mnt/tmp/00', sched_cfg, dir_cfg)
def test_permit_new_job_too_many_jobs_zerophase(sched_cfg: configuration.Scheduling, dir_cfg: configuration.Directories) -> None:
phases = job.Phase.list_from_tuples([ (3, 0), (3, 1), (3, 3) ])
assert not manager.phases_permit_new_job(
phases, '/mnt/tmp/00', sched_cfg, dir_cfg)
def test_permit_new_job_pre_milestone(
sched_cfg: configuration.Scheduling, dir_cfg: configuration.Directories
) -> None:
phases = job.Phase.list_from_tuples([(2, 3), (4, 1)])
assert not manager.phases_permit_new_job(phases, "/mnt/tmp/00", sched_cfg, dir_cfg)
def test_permit_new_job_too_many_jobs_nonephase(sched_cfg: configuration.Scheduling, dir_cfg: configuration.Directories) -> None:
phases = job.Phase.list_from_tuples([ (None, None), (3, 1), (3, 3) ])
assert manager.phases_permit_new_job(
phases, '/mnt/tmp/00', sched_cfg, dir_cfg)
def test_permit_new_job_override_tmp_dir(sched_cfg: configuration.Scheduling, dir_cfg: configuration.Directories) -> None:
phases = job.Phase.list_from_tuples([ (3, 1), (3, 2), (3, 3) ])
assert manager.phases_permit_new_job(
phases, '/mnt/tmp/04', sched_cfg, dir_cfg)
phases = job.Phase.list_from_tuples([ (3, 1), (3, 2), (3, 3), (3, 6) ])
assert not manager.phases_permit_new_job(
phases, '/mnt/tmp/04', sched_cfg,
dir_cfg)
def test_permit_new_job_too_many_jobs(
sched_cfg: configuration.Scheduling, dir_cfg: configuration.Directories
) -> None:
phases = job.Phase.list_from_tuples([(3, 1), (3, 2), (3, 3)])
assert not manager.phases_permit_new_job(phases, "/mnt/tmp/00", sched_cfg, dir_cfg)
@patch('plotman.job.Job')
def job_w_tmpdir_phase(tmpdir: str, phase: job.Phase, MockJob: typing.Any) -> typing.Any:
j = MockJob()
j.progress.return_value = phase
j.tmpdir = tmpdir
return j
@patch('plotman.job.Job')
def job_w_dstdir_phase(dstdir: str, phase: job.Phase, MockJob: typing.Any) -> typing.Any:
def test_permit_new_job_too_many_jobs_zerophase(
sched_cfg: configuration.Scheduling, dir_cfg: configuration.Directories
) -> None:
phases = job.Phase.list_from_tuples([(3, 0), (3, 1), (3, 3)])
assert not manager.phases_permit_new_job(phases, "/mnt/tmp/00", sched_cfg, dir_cfg)
def test_permit_new_job_too_many_jobs_nonephase(
sched_cfg: configuration.Scheduling, dir_cfg: configuration.Directories
) -> None:
phases = job.Phase.list_from_tuples([(None, None), (3, 1), (3, 3)])
assert manager.phases_permit_new_job(phases, "/mnt/tmp/00", sched_cfg, dir_cfg)
def test_permit_new_job_override_tmp_dir(
sched_cfg: configuration.Scheduling, dir_cfg: configuration.Directories
) -> None:
phases = job.Phase.list_from_tuples([(3, 1), (3, 2), (3, 3)])
assert manager.phases_permit_new_job(phases, "/mnt/tmp/04", sched_cfg, dir_cfg)
phases = job.Phase.list_from_tuples([(3, 1), (3, 2), (3, 3), (3, 6)])
assert not manager.phases_permit_new_job(phases, "/mnt/tmp/04", sched_cfg, dir_cfg)
@patch("plotman.job.Job")
def job_w_dstdir_phase(
dstdir: str, phase: job.Phase, MockJob: typing.Any
) -> typing.Any:
j = MockJob()
j.progress.return_value = phase
j.dstdir = dstdir
i = MockJob()
j.plotter.common_info.return_value = i
i.dstdir = dstdir
return j
def test_dstdirs_to_furthest_phase() -> None:
all_jobs = [ job_w_dstdir_phase('/plots1', job.Phase(1, 5)),
job_w_dstdir_phase('/plots2', job.Phase(1, 1)),
job_w_dstdir_phase('/plots2', job.Phase(3, 1)),
job_w_dstdir_phase('/plots2', job.Phase(2, 1)),
job_w_dstdir_phase('/plots3', job.Phase(4, 1)) ]
all_jobs = [
job_w_dstdir_phase("/plots1", job.Phase(1, 5)),
job_w_dstdir_phase("/plots2", job.Phase(1, 1)),
job_w_dstdir_phase("/plots2", job.Phase(3, 1)),
job_w_dstdir_phase("/plots2", job.Phase(2, 1)),
job_w_dstdir_phase("/plots3", job.Phase(4, 1)),
]
assert (manager.dstdirs_to_furthest_phase(all_jobs) ==
{ '/plots1' : job.Phase(1, 5),
'/plots2' : job.Phase(3, 1),
'/plots3' : job.Phase(4, 1) } )
assert manager.dstdirs_to_furthest_phase(all_jobs) == {
"/plots1": job.Phase(1, 5),
"/plots2": job.Phase(3, 1),
"/plots3": job.Phase(4, 1),
}
def test_dstdirs_to_youngest_phase() -> None:
all_jobs = [ job_w_dstdir_phase('/plots1', job.Phase(1, 5)),
job_w_dstdir_phase('/plots2', job.Phase(1, 1)),
job_w_dstdir_phase('/plots2', job.Phase(3, 1)),
job_w_dstdir_phase('/plots2', job.Phase(2, 1)),
job_w_dstdir_phase('/plots3', job.Phase(4, 1)) ]
all_jobs = [
job_w_dstdir_phase("/plots1", job.Phase(1, 5)),
job_w_dstdir_phase("/plots2", job.Phase(1, 1)),
job_w_dstdir_phase("/plots2", job.Phase(3, 1)),
job_w_dstdir_phase("/plots2", job.Phase(2, 1)),
job_w_dstdir_phase("/plots3", job.Phase(4, 1)),
]
assert (manager.dstdirs_to_youngest_phase(all_jobs) ==
{ '/plots1' : job.Phase(1, 5),
'/plots2' : job.Phase(1, 1),
'/plots3' : job.Phase(4, 1) } )
assert manager.dstdirs_to_youngest_phase(all_jobs) == {
"/plots1": job.Phase(1, 5),
"/plots2": job.Phase(1, 1),
"/plots3": job.Phase(4, 1),
}

@@ -10,62 +10,68 @@ import os

def test_human_format() -> None:
assert (plot_util.human_format(3442000000, 0) == '3G')
assert (plot_util.human_format(3542000, 2) == '3.54M')
assert (plot_util.human_format(354, 0) == '354')
assert (plot_util.human_format(354, 0, True) == '354')
assert (plot_util.human_format(354, 2) == '354.00')
assert (plot_util.human_format(422399296143, 2) == '422.40G')
assert (plot_util.human_format(422399296143, 2, True) == '393.39Gi')
assert plot_util.human_format(3442000000, 0) == "3G"
assert plot_util.human_format(3542000, 2) == "3.54M"
assert plot_util.human_format(354, 0) == "354"
assert plot_util.human_format(354, 0, True) == "354"
assert plot_util.human_format(354, 2) == "354.00"
assert plot_util.human_format(422399296143, 2) == "422.40G"
assert plot_util.human_format(422399296143, 2, True) == "393.39Gi"
def test_time_format() -> None:
assert (plot_util.time_format(34) == '34s')
assert (plot_util.time_format(59) == '59s')
assert (plot_util.time_format(60) == '0:01')
assert (plot_util.time_format(119) == '0:01')
assert (plot_util.time_format(120) == '0:02')
assert (plot_util.time_format(3694) == '1:01')
assert plot_util.time_format(34) == "34s"
assert plot_util.time_format(59) == "59s"
assert plot_util.time_format(60) == "0:01"
assert plot_util.time_format(119) == "0:01"
assert plot_util.time_format(120) == "0:02"
assert plot_util.time_format(3694) == "1:01"
def test_split_path_prefix() -> None:
assert (plot_util.split_path_prefix( [] ) ==
('', []) )
assert (plot_util.split_path_prefix([ '/a/0', '/b/1', '/c/2' ]) ==
('', ['/a/0', '/b/1', '/c/2']) )
assert ( plot_util.split_path_prefix([ '/a/b/0', '/a/b/1', '/a/b/2' ]) ==
('/a/b', ['0', '1', '2']) )
assert plot_util.split_path_prefix([]) == ("", [])
assert plot_util.split_path_prefix(["/a/0", "/b/1", "/c/2"]) == (
"",
["/a/0", "/b/1", "/c/2"],
)
assert plot_util.split_path_prefix(["/a/b/0", "/a/b/1", "/a/b/2"]) == (
"/a/b",
["0", "1", "2"],
)
def test_columns() -> None:
assert (plot_util.column_wrap(list(range(8)), 3, filler='--') ==
[ [ 0, 3, 6 ],
[ 1, 4, 7 ],
[ 2, 5, '--'] ] )
assert (plot_util.column_wrap(list(range(9)), 3, filler='--') ==
[ [ 0, 3, 6 ],
[ 1, 4, 7 ],
[ 2, 5, 8 ] ] )
assert (plot_util.column_wrap(list(range(3)), 1, filler='--') ==
[ [ 0 ],
[ 1 ],
[ 2 ] ] )
assert plot_util.column_wrap(list(range(8)), 3, filler="--") == [
[0, 3, 6],
[1, 4, 7],
[2, 5, "--"],
]
assert plot_util.column_wrap(list(range(9)), 3, filler="--") == [
[0, 3, 6],
[1, 4, 7],
[2, 5, 8],
]
assert plot_util.column_wrap(list(range(3)), 1, filler="--") == [[0], [1], [2]]
def test_list_plots(fs: pyfakefs.fake_filesystem.FakeFilesystem) -> None:
fs.create_file('/t/plot-k32-0.plot', st_size=108 * GB)
fs.create_file('/t/plot-k32-1.plot', st_size=108 * GB)
fs.create_file('/t/.plot-k32-2.plot', st_size=108 * GB)
fs.create_file('/t/plot-k32-3.plot.2.tmp', st_size=108 * GB)
fs.create_file('/t/plot-k32-4.plot', st_size=100 * GB)
fs.create_file('/t/plot-k32-5.plot', st_size=108 * GB)
fs.create_file("/t/plot-k32-0.plot", st_size=108 * GB)
fs.create_file("/t/plot-k32-1.plot", st_size=108 * GB)
fs.create_file("/t/.plot-k32-2.plot", st_size=108 * GB)
fs.create_file("/t/plot-k32-3.plot.2.tmp", st_size=108 * GB)
fs.create_file("/t/plot-k32-4.plot", st_size=100 * GB)
fs.create_file("/t/plot-k32-5.plot", st_size=108 * GB)
fs.create_file('/t/plot-k33-6.plot', st_size=108 * GB)
fs.create_file('/t/plot-k33-7.plot', st_size=216 * GB)
fs.create_file("/t/plot-k33-6.plot", st_size=108 * GB)
fs.create_file("/t/plot-k33-7.plot", st_size=216 * GB)
assert (plot_util.list_plots('/t/') ==
[ '/t/plot-k32-0.plot',
'/t/plot-k32-1.plot',
'/t/plot-k32-5.plot',
'/t/plot-k33-7.plot' ] )
assert plot_util.list_plots("/t/") == [
"/t/plot-k32-0.plot",
"/t/plot-k32-1.plot",
"/t/plot-k32-5.plot",
"/t/plot-k33-7.plot",
]
def test_get_plotsize() -> None:
assert (
[659272492, 107287518791, 221143636517, 455373353413, 936816632588]
== [plot_util.get_plotsize(n) for n in [25, 32, 33, 34, 35]]
)
assert [659272492, 107287518791, 221143636517, 455373353413, 936816632588] == [
plot_util.get_plotsize(n) for n in [25, 32, 33, 34, 35]
]

@@ -11,22 +11,29 @@ # TODO: migrate away from unittest patch

def test_phases_str_basic() -> None:
phases = job.Phase.list_from_tuples([(1,2), (2,3), (3,4), (4,0)])
assert reporting.phases_str(phases) == '1:2 2:3 3:4 4:0'
phases = job.Phase.list_from_tuples([(1, 2), (2, 3), (3, 4), (4, 0)])
assert reporting.phases_str(phases) == "1:2 2:3 3:4 4:0"
def test_phases_str_elipsis_1() -> None:
phases = job.Phase.list_from_tuples([(1,2), (2,3), (3,4), (4,0)])
assert reporting.phases_str(phases, 3) == '1:2 [+1] 3:4 4:0'
phases = job.Phase.list_from_tuples([(1, 2), (2, 3), (3, 4), (4, 0)])
assert reporting.phases_str(phases, 3) == "1:2 [+1] 3:4 4:0"
def test_phases_str_elipsis_2() -> None:
phases = job.Phase.list_from_tuples([(1,2), (2,3), (3,4), (4,0)])
assert reporting.phases_str(phases, 2) == '1:2 [+2] 4:0'
phases = job.Phase.list_from_tuples([(1, 2), (2, 3), (3, 4), (4, 0)])
assert reporting.phases_str(phases, 2) == "1:2 [+2] 4:0"
def test_phases_str_none() -> None:
phases = job.Phase.list_from_tuples([(None, None), (3, 0)])
assert reporting.phases_str(phases) == '?:? 3:0'
assert reporting.phases_str(phases) == "?:? 3:0"
def test_job_viz_empty() -> None:
assert(reporting.job_viz([]) == '1 2 3 4 ')
assert reporting.job_viz([]) == "1 2 3 4 "
@patch('plotman.job.Job')
def job_w_phase(ph: typing.Tuple[typing.Optional[int], typing.Optional[int]], MockJob: Mock) -> Mock:
@patch("plotman.job.Job")
def job_w_phase(
ph: typing.Tuple[typing.Optional[int], typing.Optional[int]], MockJob: Mock
) -> Mock:
j = MockJob()

@@ -36,48 +43,58 @@ j.progress.return_value = job.Phase.from_tuple(ph)

def test_job_viz_positions() -> None:
jobs = [job_w_phase((1, 1)),
job_w_phase((2, 0)),
job_w_phase((2, 4)),
job_w_phase((2, 7)),
job_w_phase((4, 0))]
jobs = [
job_w_phase((1, 1)),
job_w_phase((2, 0)),
job_w_phase((2, 4)),
job_w_phase((2, 7)),
job_w_phase((4, 0)),
]
assert(reporting.job_viz(jobs) == '1 . 2. . .3 4.') # type: ignore[arg-type]
assert reporting.job_viz(jobs) == "1 . 2. . .3 4." # type: ignore[arg-type]
def test_job_viz_counts() -> None:
jobs = [job_w_phase((2, 2)),
job_w_phase((2, 3)),
job_w_phase((2, 3)),
job_w_phase((2, 4)),
job_w_phase((2, 4)),
job_w_phase((2, 4)),
job_w_phase((2, 5)),
job_w_phase((2, 5)),
job_w_phase((2, 5)),
job_w_phase((2, 5)),
job_w_phase((3, 1)),
job_w_phase((3, 1)),
job_w_phase((3, 1)),
job_w_phase((3, 1)),
job_w_phase((3, 1)),
job_w_phase((3, 1)),
]
jobs = [
job_w_phase((2, 2)),
job_w_phase((2, 3)),
job_w_phase((2, 3)),
job_w_phase((2, 4)),
job_w_phase((2, 4)),
job_w_phase((2, 4)),
job_w_phase((2, 5)),
job_w_phase((2, 5)),
job_w_phase((2, 5)),
job_w_phase((2, 5)),
job_w_phase((3, 1)),
job_w_phase((3, 1)),
job_w_phase((3, 1)),
job_w_phase((3, 1)),
job_w_phase((3, 1)),
job_w_phase((3, 1)),
]
assert(reporting.job_viz(jobs) == '1 2 .:;! 3 ! 4 ') # type: ignore[arg-type]
assert reporting.job_viz(jobs) == "1 2 .:;! 3 ! 4 " # type: ignore[arg-type]
def test_to_prometheus_format() -> None:
prom_stati = [
('foo="bar",baz="2"', {'metric1': 1, 'metric2': 2}),
('foo="blubb",baz="3"', {'metric1': 2, 'metric2': 3})
('foo="bar",baz="2"', {"metric1": 1, "metric2": 2}),
('foo="blubb",baz="3"', {"metric1": 2, "metric2": 3}),
]
metrics = {'metric1': 'This is foo', 'metric2': 'In a parallel universe this is foo'}
metrics = {
"metric1": "This is foo",
"metric2": "In a parallel universe this is foo",
}
expected = [
'# HELP metric1 This is foo.',
'# TYPE metric1 gauge',
"# HELP metric1 This is foo.",
"# TYPE metric1 gauge",
'metric1{foo="bar",baz="2"} 1',
'metric1{foo="blubb",baz="3"} 2',
'# HELP metric2 In a parallel universe this is foo.',
'# TYPE metric2 gauge',
'metric2{foo="bar",baz="2"} 2','metric2{foo="blubb",baz="3"} 3'
"# HELP metric2 In a parallel universe this is foo.",
"# TYPE metric2 gauge",
'metric2{foo="bar",baz="2"} 2',
'metric2{foo="blubb",baz="3"} 3',
]
result = reporting.to_prometheus_format(metrics, prom_stati)
assert(result == expected)
assert result == expected

@@ -12,8 +12,10 @@ import os

def analyze(logfilenames: typing.List[str], clipterminals: bool, bytmp: bool, bybitfield: bool) -> None:
def analyze(
logfilenames: typing.List[str], clipterminals: bool, bytmp: bool, bybitfield: bool
) -> None:
data: typing.Dict[str, typing.Dict[str, typing.List[float]]] = {}
for logfilename in logfilenames:
with open(logfilename, 'r') as f:
with open(logfilename, "r") as f:
# Record of slicing and data associated with the slice
sl = 'x' # Slice key
sl = "x" # Slice key
phase_time: typing.Dict[str, float] = {} # Map from phase index to time

@@ -30,6 +32,6 @@ n_sorts = 0

# 2021-04-08T13:33:43.542 chia.plotting.create_plots : INFO Starting plot 1/5
m = re.search(r'Starting plot (\d*)/(\d*)', line)
m = re.search(r"Starting plot (\d*)/(\d*)", line)
if m:
# (re)-initialize data structures
sl = 'x' # Slice key
sl = "x" # Slice key
phase_time = {} # Map from phase index to time

@@ -45,3 +47,3 @@ n_sorts = 0

# Starting plotting progress into temporary dirs: /mnt/tmp/01 and /mnt/tmp/a
m = re.search(r'^Starting plotting.*dirs: (.*) and (.*)', line)
m = re.search(r"^Starting plotting.*dirs: (.*) and (.*)", line)
if m:

@@ -51,3 +53,3 @@ # Record tmpdir, if slicing by it

tmpdir = m.group(1)
sl += '-' + tmpdir
sl += "-" + tmpdir

@@ -58,19 +60,21 @@ # Bitfield marker. Sample log line(s):

# Starting phase 2/4: Backpropagation into tmp files... Fri Apr 2 03:17:32 2021
m = re.search(r'^Starting phase 2/4: Backpropagation', line)
m = re.search(r"^Starting phase 2/4: Backpropagation", line)
if bybitfield and m:
if 'without bitfield' in line:
sl += '-nobitfield'
if "without bitfield" in line:
sl += "-nobitfield"
else:
sl += '-bitfield'
sl += "-bitfield"
# CHIA: Phase timing. Sample log line:
# Time for phase 1 = 22796.7 seconds. CPU (98%) Tue Sep 29 17:57:19 2020
for phase in ['1', '2', '3', '4']:
m = re.search(r'^Time for phase ' + phase + ' = (\d+.\d+) seconds..*', line)
for phase in ["1", "2", "3", "4"]:
m = re.search(
r"^Time for phase " + phase + " = (\d+.\d+) seconds..*", line
)
if m:
phase_time[phase] = float(m.group(1))
# MADMAX: Phase timing. Sample log line: "Phase 2 took 2193.37 sec"
for phase in ['1', '2', '3', '4']:
m = re.search(r'^Phase ' + phase + ' took (\d+.\d+) sec.*', line)
for phase in ["1", "2", "3", "4"]:
m = re.search(r"^Phase " + phase + " took (\d+.\d+) sec.*", line)
if m:

@@ -85,12 +89,12 @@ phase_time[phase] = float(m.group(1))

# Bucket 511 QS. Ram: 0.920GiB, u_sort min: 0.375GiB, qs min: 0.094GiB. force_qs: 1
m = re.search(r'Bucket \d+ ([^\.]+)\..*', line)
if m and not 'force_qs' in line:
m = re.search(r"Bucket \d+ ([^\.]+)\..*", line)
if m and not "force_qs" in line:
sorter = m.group(1)
n_sorts += 1
if sorter == 'uniform sort':
if sorter == "uniform sort":
n_uniform += 1
elif sorter == 'QS':
elif sorter == "QS":
pass
else:
print ('Warning: unrecognized sort ' + sorter)
print("Warning: unrecognized sort " + sorter)

@@ -100,3 +104,3 @@ # CHIA: Job completion. Record total time in sliced data store.

# Total time = 49487.1 seconds. CPU (97.26%) Wed Sep 30 01:22:10 2020
m = re.search(r'^Total time = (\d+.\d+) seconds.*', line)
m = re.search(r"^Total time = (\d+.\d+) seconds.*", line)
if m:

@@ -106,22 +110,34 @@ if clipterminals and is_first_last:

else:
data.setdefault(sl, {}).setdefault('total time', []).append(float(m.group(1)))
for phase in ['1', '2', '3', '4']:
data.setdefault(sl, {}).setdefault('phase ' + phase, []).append(phase_time[phase])
data.setdefault(sl, {}).setdefault('%usort', []).append(100 * n_uniform // n_sorts)
data.setdefault(sl, {}).setdefault("total time", []).append(
float(m.group(1))
)
for phase in ["1", "2", "3", "4"]:
data.setdefault(sl, {}).setdefault(
"phase " + phase, []
).append(phase_time[phase])
data.setdefault(sl, {}).setdefault("%usort", []).append(
100 * n_uniform // n_sorts
)
# MADMAX: Job completion. Record total time in sliced data store.
# Sample log line: "Total plot creation time was 2530.76 sec"
m = re.search(r'^Total plot creation time was (\d+.\d+) sec.*', line)
m = re.search(r"^Total plot creation time was (\d+.\d+) sec.*", line)
if m:
data.setdefault(sl, {}).setdefault('total time', []).append(float(m.group(1)))
for phase in ['1', '2', '3', '4']:
data.setdefault(sl, {}).setdefault('phase ' + phase, []).append(phase_time[phase])
data.setdefault(sl, {}).setdefault('%usort', []).append(0) # Not available for MADMAX
data.setdefault(sl, {}).setdefault("total time", []).append(
float(m.group(1))
)
for phase in ["1", "2", "3", "4"]:
data.setdefault(sl, {}).setdefault("phase " + phase, []).append(
phase_time[phase]
)
data.setdefault(sl, {}).setdefault("%usort", []).append(
0
) # Not available for MADMAX
# Prepare report
tab = tt.Texttable()
all_measures = ['%usort', 'phase 1', 'phase 2', 'phase 3', 'phase 4', 'total time']
headings = ['Slice', 'n'] + all_measures
all_measures = ["%usort", "phase 1", "phase 2", "phase 3", "phase 4", "total time"]
headings = ["Slice", "n"] + all_measures
tab.header(headings)
for sl in data.keys():

@@ -138,5 +154,5 @@ row = [sl]

if sample_size_lower_bound == sample_size_upper_bound:
row.append('%d' % sample_size_lower_bound)
row.append("%d" % sample_size_lower_bound)
else:
row.append('%d-%d' % (sample_size_lower_bound, sample_size_upper_bound))
row.append("%d-%d" % (sample_size_lower_bound, sample_size_upper_bound))

@@ -146,19 +162,20 @@ # Phase timings

values = data.get(sl, {}).get(measure, [])
if(len(values) > 1):
row.append('μ=%s σ=%s' % (
plot_util.human_format(statistics.mean(values), 1),
plot_util.human_format(statistics.stdev(values), 0)
))
elif(len(values) == 1):
if len(values) > 1:
row.append(
"μ=%s σ=%s"
% (
plot_util.human_format(statistics.mean(values), 1),
plot_util.human_format(statistics.stdev(values), 0),
)
)
elif len(values) == 1:
row.append(plot_util.human_format(values[0], 1))
else:
row.append('N/A')
row.append("N/A")
tab.add_row(row)
(rows, columns) = os.popen('stty size', 'r').read().split()
(rows, columns) = os.popen("stty size", "r").read().split()
tab.set_max_width(int(columns))
s = tab.draw()
print(s)

@@ -21,12 +21,18 @@ import argparse

logger = logging.getLogger(__name__)
disk_space_logger = logging.getLogger("disk_space")
_WINDOWS = sys.platform == 'win32'
_WINDOWS = sys.platform == "win32"
# TODO : write-protect and delete-protect archived plots
def spawn_archive_process(dir_cfg: configuration.Directories, arch_cfg: configuration.Archiving, log_cfg: configuration.Logging, all_jobs: typing.List[job.Job]) -> typing.Tuple[typing.Union[bool, str, typing.Dict[str, object]], typing.List[str]]:
'''Spawns a new archive process using the command created
in the archive() function. Returns archiving status and a log message to print.'''
def spawn_archive_process(
dir_cfg: configuration.Directories,
arch_cfg: configuration.Archiving,
log_cfg: configuration.Logging,
all_jobs: typing.List[job.Job],
) -> typing.Tuple[typing.Union[bool, str, typing.Dict[str, object]], typing.List[str]]:
"""Spawns a new archive process using the command created
in the archive() function. Returns archiving status and a log message to print."""
log_messages = []

@@ -37,6 +43,10 @@ archiving_status = None

# even though the scheduler should only run one at a time.
arch_jobs: typing.List[typing.Union[int, str]] = [*get_running_archive_jobs(arch_cfg)]
arch_jobs: typing.List[typing.Union[int, str]] = [
*get_running_archive_jobs(arch_cfg)
]
if not arch_jobs:
(should_start, status_or_cmd, archive_log_messages) = archive(dir_cfg, arch_cfg, all_jobs)
(should_start, status_or_cmd, archive_log_messages) = archive(
dir_cfg, arch_cfg, all_jobs
)
log_messages.extend(archive_log_messages)

@@ -50,10 +60,12 @@ if not should_start:

log_messages.append(f'Starting archive: {args["args"]} ; logging to {log_file_path}')
log_messages.append(
f'Starting archive: {args["args"]} ; logging to {log_file_path}'
)
# TODO: CAMPid 09840103109429840981397487498131
try:
open_log_file = open(log_file_path, 'x')
open_log_file = open(log_file_path, "x")
except FileExistsError:
log_messages.append(
f'Archiving log file already exists, skipping attempt to start a'
f' new archive transfer: {log_file_path!r}'
f"Archiving log file already exists, skipping attempt to start a"
f" new archive transfer: {log_file_path!r}"
)

@@ -63,4 +75,4 @@ return (False, log_messages)

message = (
f'Unable to open log file. Verify that the directory exists'
f' and has proper write permissions: {log_file_path!r}'
f"Unable to open log file. Verify that the directory exists"
f" and has proper write permissions: {log_file_path!r}"
)

@@ -75,3 +87,3 @@ raise Exception(message) from e

if sys.platform == 'win32':
if sys.platform == "win32":
creationflags = subprocess.CREATE_NO_WINDOW

@@ -83,3 +95,4 @@ else:

# start_new_sessions to make the job independent of this controlling tty.
p = subprocess.Popen(**args, # type: ignore[call-overload]
p = subprocess.Popen( # type: ignore[call-overload]
**args,
shell=True,

@@ -89,3 +102,4 @@ stdout=open_log_file,

start_new_session=True,
creationflags=creationflags)
creationflags=creationflags,
)
# At least for now it seems that even if we get a new running

@@ -97,9 +111,10 @@ # archive jobs list it doesn't contain the new rsync process.

# will get filled on the next cycle.
arch_jobs.append('<pending>')
arch_jobs.append("<pending>")
if archiving_status is None:
archiving_status = 'pid: ' + ', '.join(map(str, arch_jobs))
archiving_status = "pid: " + ", ".join(map(str, arch_jobs))
return archiving_status, log_messages
def compute_priority(phase: job.Phase, gb_free: float, n_plots: int) -> int:

@@ -115,16 +130,16 @@ # All these values are designed around dst buffer dirs of about

# ignore.
if (phase.known):
if (phase == job.Phase(3, 4)):
if phase.known:
if phase == job.Phase(3, 4):
priority -= 4
elif (phase == job.Phase(3, 5)):
elif phase == job.Phase(3, 5):
priority -= 8
elif (phase == job.Phase(3, 6)):
elif phase == job.Phase(3, 6):
priority -= 16
elif (phase >= job.Phase(3, 7)):
elif phase >= job.Phase(3, 7):
priority -= 32
# If a drive is getting full, we should prioritize it
if (gb_free < 1000):
if gb_free < 1000:
priority += 1 + int((1000 - gb_free) / 100)
if (gb_free < 500):
if gb_free < 500:
priority += 1 + int((500 - gb_free) / 100)

@@ -138,3 +153,6 @@

def get_archdir_freebytes(arch_cfg: configuration.Archiving) -> typing.Tuple[typing.Dict[str, int], typing.List[str]]:
def get_archdir_freebytes(
arch_cfg: configuration.Archiving,
) -> typing.Tuple[typing.Dict[str, int], typing.List[str]]:
log_messages = []

@@ -154,19 +172,19 @@ target = arch_cfg.target_definition()

except subprocess.TimeoutExpired as e:
log_messages.append(f'Disk space check timed out in {timeout} seconds')
log_messages.append(f"Disk space check timed out in {timeout} seconds")
if e.stdout is None:
stdout = ''
stdout = ""
else:
stdout = e.stdout.decode('utf-8', errors='ignore').strip()
stdout = e.stdout.decode("utf-8", errors="ignore").strip()
if e.stderr is None:
stderr = ''
stderr = ""
else:
stderr = e.stderr.decode('utf-8', errors='ignore').strip()
stderr = e.stderr.decode("utf-8", errors="ignore").strip()
else:
stdout = completed_process.stdout.decode('utf-8', errors='ignore').strip()
stderr = completed_process.stderr.decode('utf-8', errors='ignore').strip()
stdout = completed_process.stdout.decode("utf-8", errors="ignore").strip()
stderr = completed_process.stderr.decode("utf-8", errors="ignore").strip()
for line in stdout.splitlines():
line = line.strip()
split = line.split(':')
split = line.split(":")
if len(split) != 2:
log_messages.append(f'Unable to parse disk script line: {line!r}')
log_messages.append(f"Unable to parse disk script line: {line!r}")
continue

@@ -178,18 +196,19 @@ archdir, space = split

for line in log_messages:
logger.info(line)
disk_space_logger.info(line)
logger.info('stdout from disk space script:')
disk_space_logger.info("stdout from disk space script:")
for line in stdout.splitlines():
logger.info(f' {line}')
disk_space_logger.info(f" {line}")
logger.info('stderr from disk space script:')
disk_space_logger.info("stderr from disk space script:")
for line in stderr.splitlines():
logger.info(f' {line}')
disk_space_logger.info(f" {line}")
return archdir_freebytes, log_messages
# TODO: maybe consolidate with similar code in job.py?
def get_running_archive_jobs(arch_cfg: configuration.Archiving) -> typing.List[int]:
'''Look for running rsync jobs that seem to match the pattern we use for archiving
them. Return a list of PIDs of matching jobs.'''
"""Look for running rsync jobs that seem to match the pattern we use for archiving
them. Return a list of PIDs of matching jobs."""
jobs = []

@@ -210,7 +229,14 @@ target = arch_cfg.target_definition()

def archive(dir_cfg: configuration.Directories, arch_cfg: configuration.Archiving, all_jobs: typing.List[job.Job]) -> typing.Tuple[bool, typing.Optional[typing.Union[typing.Dict[str, object], str]], typing.List[str]]:
'''Configure one archive job. Needs to know all jobs so it can avoid IO
def archive(
dir_cfg: configuration.Directories,
arch_cfg: configuration.Archiving,
all_jobs: typing.List[job.Job],
) -> typing.Tuple[
bool, typing.Optional[typing.Union[typing.Dict[str, object], str]], typing.List[str]
]:
"""Configure one archive job. Needs to know all jobs so it can avoid IO
contention on the plotting dstdir drives. Returns either (False, <reason>)
if we should not execute an archive job or (True, <cmd>) with the archive
command if we should.'''
command if we should."""
log_messages: typing.List[str] = []

@@ -235,3 +261,3 @@ if arch_cfg is None:

if not chosen_plot:
return (False, 'No plots found', log_messages)
return (False, "No plots found", log_messages)

@@ -247,5 +273,5 @@ # TODO: sanity check that archive machine is available

if not archdir_freebytes:
return(False, 'No free archive dirs found.', log_messages)
return (False, "No free archive dirs found.", log_messages)
archdir = ''
archdir = ""
chosen_plot_size = os.stat(chosen_plot).st_size

@@ -255,4 +281,7 @@ # 10MB is big enough to outsize filesystem block sizes hopefully, but small

free_space_margin = 10_000_000
available = [(d, space) for (d, space) in archdir_freebytes.items() if
space > (chosen_plot_size + free_space_margin)]
available = [
(d, space)
for (d, space) in archdir_freebytes.items()
if space > (chosen_plot_size + free_space_margin)
]
if len(available) > 0:

@@ -263,3 +292,7 @@ index = arch_cfg.index % len(available)

if not archdir:
return(False, 'No archive directories found with enough free space', log_messages)
return (
False,
"No archive directories found with enough free space",
log_messages,
)

@@ -271,6 +304,6 @@ env = arch_cfg.environment(

subprocess_arguments: typing.Dict[str, object] = {
'args': arch_cfg.target_definition().transfer_path,
'env': {**os.environ, **env}
"args": arch_cfg.target_definition().transfer_path,
"env": {**os.environ, **env},
}
return (True, subprocess_arguments, log_messages)

@@ -6,25 +6,25 @@ # version = 1.0.2

# Unique plot id which will be used as a ChaCha8 key, and determines the PoSpace.
kIdLen = 32;
kIdLen = 32
# Distance between matching entries is stored in the offset
kOffsetSize = 10;
kOffsetSize = 10
# Max matches a single entry can have, used for hardcoded memory allocation
kMaxMatchesSingleEntry = 30;
kMinBuckets = 16;
kMaxBuckets = 128;
kMaxMatchesSingleEntry = 30
kMinBuckets = 16
kMaxBuckets = 128
# During backprop and compress, the write pointer is ahead of the read pointer
# Note that the large the offset, the higher these values must be
kReadMinusWrite = 1 << kOffsetSize;
kCachedPositionsSize = kReadMinusWrite * 4;
kReadMinusWrite = 1 << kOffsetSize
kCachedPositionsSize = kReadMinusWrite * 4
# Must be set high enough to prevent attacks of fast plotting
kMinPlotSize = 18;
kMinPlotSize = 18
# Set to 50 since k + kExtraBits + k*4 must not exceed 256 (BLAKE3 output size)
kMaxPlotSize = 50;
kMaxPlotSize = 50
# The amount of spare space used for sort on disk (multiplied time memory buffer size)
kSpareMultiplier = 5;
kSpareMultiplier = 5

@@ -34,27 +34,27 @@ # The proportion of memory to allocate to the Sort Manager for reading in buckets and sorting them

# number also allows a higher proportion for writing, which reduces seeks for HDD.
kMemSortProportion = 0.75;
kMemSortProportionLinePoint = 0.85;
kMemSortProportion = 0.75
kMemSortProportionLinePoint = 0.85
# How many f7s per C1 entry, and how many C1 entries per C2 entry
kCheckpoint1Interval = 10000;
kCheckpoint2Interval = 10000;
kCheckpoint1Interval = 10000
kCheckpoint2Interval = 10000
# F1 evaluations are done in batches of 2^kBatchSizes
kBatchSizes = 8;
kBatchSizes = 8
# EPP for the final file, the higher this is, the less variability, and lower delta
# Note: if this is increased, ParkVector size must increase
kEntriesPerPark = 2048;
kEntriesPerPark = 2048
# To store deltas for EPP entries, the average delta must be less than this number of bits
kMaxAverageDeltaTable1 = 5.6;
kMaxAverageDelta = 3.5;
kMaxAverageDeltaTable1 = 5.6
kMaxAverageDelta = 3.5
# C3 entries contain deltas for f7 values, the max average size is the following
kC3BitsPerEntry = 2.4;
kC3BitsPerEntry = 2.4
# The number of bits in the stub is k minus this value
kStubMinusBits = 3;
kStubMinusBits = 3
#end ported code
# end ported code

@@ -66,3 +66,5 @@ # version = 1.0.2

def ByteAlign(num_bits: float) -> float:
return (num_bits + (8 - ((num_bits) % 8)) % 8)
return num_bits + (8 - ((num_bits) % 8)) % 8
# end ported code

@@ -77,15 +79,23 @@

# This is the full size of the deltas section in a park. However, it will not be fully filled
def CalculateMaxDeltasSize(k: int, table_index: int) -> float:
if (table_index == 1):
if table_index == 1:
return ByteAlign((kEntriesPerPark - 1) * kMaxAverageDeltaTable1) / 8
return ByteAlign((kEntriesPerPark - 1) * kMaxAverageDelta) / 8
def CalculateStubsSize(k: int) -> float:
return ByteAlign((kEntriesPerPark - 1) * (k - kStubMinusBits)) / 8
def CalculateParkSize(k: int, table_index: int) -> float:
return CalculateLinePointSize(k) + CalculateStubsSize(k) + CalculateMaxDeltasSize(k, table_index);
return (
CalculateLinePointSize(k)
+ CalculateStubsSize(k)
+ CalculateMaxDeltasSize(k, table_index)
)
# end ported code

@@ -5,3 +5,2 @@ import contextlib

import stat
import subprocess
import tempfile

@@ -14,2 +13,3 @@ import textwrap

import desert
# TODO: should be a desert.ib() but mypy doesn't understand it then, see below

@@ -20,3 +20,2 @@ import desert._make

import marshmallow.validate
import packaging.version
import pendulum

@@ -26,2 +25,5 @@ import yaml

from plotman import resources as plotman_resources
import plotman.plotters.bladebit
import plotman.plotters.chianetwork
import plotman.plotters.madmax

@@ -50,3 +52,5 @@

def get_validated_configs(config_text: str, config_path: str, preset_target_definitions_text: str) -> "PlotmanConfig":
def get_validated_configs(
config_text: str, config_path: str, preset_target_definitions_text: str
) -> "PlotmanConfig":
"""Return a validated instance of PlotmanConfig with data from plotman.yaml

@@ -59,3 +63,3 @@

version = config_objects.get('version', (0,))
version = config_objects.get("version", (0,))

@@ -65,6 +69,8 @@ expected_major_version = 2

if version[0] != expected_major_version:
message = textwrap.dedent(f"""\
message = textwrap.dedent(
f"""\
Expected major version {expected_major_version}, found version {version}
See https://github.com/ericaltendorf/plotman/wiki/Configuration#versions
""")
"""
)

@@ -81,3 +87,24 @@ raise Exception(message)

if loaded.plotting.type == "chia":
if loaded.plotting.type == "bladebit":
if loaded.plotting.bladebit is None:
# TODO: fix all the `TODO: use the configured executable` so this is not
# needed.
raise ConfigurationException(
"BladeBit selected as plotter but plotting: bladebit: was not specified in the config",
)
if (
loaded.plotting.pool_pk is not None
and loaded.plotting.pool_contract_address is not None
):
raise ConfigurationException(
"BladeBit plotter accepts up to one of plotting: pool_pk: and pool_contract_address: but both are specified",
)
executable_name = os.path.basename(loaded.plotting.bladebit.executable)
if executable_name != "bladebit":
raise ConfigurationException(
"plotting: bladebit: executable: must refer to an executable named bladebit"
)
elif loaded.plotting.type == "chia":
if loaded.plotting.chia is None:

@@ -90,3 +117,6 @@ # TODO: fix all the `TODO: use the configured executable` so this is not

if loaded.plotting.pool_pk is not None and loaded.plotting.pool_contract_address is not None:
if (
loaded.plotting.pool_pk is not None
and loaded.plotting.pool_contract_address is not None
):
raise ConfigurationException(

@@ -112,7 +142,13 @@ "Chia Network plotter accepts up to one of plotting: pool_pk: and pool_contract_address: but both are specified",

if loaded.plotting.pool_pk is None and loaded.plotting.pool_contract_address is None:
if (
loaded.plotting.pool_pk is None
and loaded.plotting.pool_contract_address is None
):
raise ConfigurationException(
"madMAx plotter requires one of plotting: pool_pk: or pool_contract_address: to be specified but neither is",
)
elif loaded.plotting.pool_pk is not None and loaded.plotting.pool_contract_address is not None:
elif (
loaded.plotting.pool_pk is not None
and loaded.plotting.pool_contract_address is not None
):
raise ConfigurationException(

@@ -142,4 +178,11 @@ "madMAx plotter accepts only one of plotting: pool_pk: and pool_contract_address: but both are specified",

class CustomStringField(marshmallow.fields.String):
def _deserialize(self, value: object, attr: Optional[str], data: Optional[Mapping[str, object]], **kwargs: Dict[str, object]) -> str:
def _deserialize(
self,
value: object,
attr: Optional[str],
data: Optional[Mapping[str, object]],
**kwargs: Dict[str, object],
) -> str:
if isinstance(value, int):

@@ -150,2 +193,3 @@ value = str(value)

# Data models used to deserializing/formatting plotman.yaml files.

@@ -164,3 +208,3 @@

desert._make._DESERT_SENTINEL: {
'marshmallow_field': marshmallow.fields.Dict(
"marshmallow_field": marshmallow.fields.Dict(
keys=marshmallow.fields.String(),

@@ -177,2 +221,3 @@ values=CustomStringField(allow_none=True),

@attr.frozen

@@ -182,2 +227,3 @@ class PresetTargetDefinitions:

# TODO: bah, mutable? bah.

@@ -193,3 +239,3 @@ @attr.mutable

desert._make._DESERT_SENTINEL: {
'marshmallow_field': marshmallow.fields.Dict(
"marshmallow_field": marshmallow.fields.Dict(
keys=marshmallow.fields.String(),

@@ -208,5 +254,5 @@ values=CustomStringField(),

def environment(
self,
source: Optional[str] = None,
destination: Optional[str] = None,
self,
source: Optional[str] = None,
destination: Optional[str] = None,
) -> Dict[str, str]:

@@ -217,5 +263,3 @@ target = self.target_definition()

complete = {
key: value
for key, value in maybe_complete.items()
if value is not None
key: value for key, value in maybe_complete.items() if value is not None
}

@@ -226,14 +270,16 @@

target_repr = repr(self.target)
missing = ', '.join(repr(key) for key in missing_mandatory_keys)
message = f'Missing env options for archival target {target_repr}: {missing}'
missing = ", ".join(repr(key) for key in missing_mandatory_keys)
message = (
f"Missing env options for archival target {target_repr}: {missing}"
)
raise Exception(message)
variables = {**os.environ, **complete}
complete['process_name'] = target.transfer_process_name.format(**variables)
complete["process_name"] = target.transfer_process_name.format(**variables)
if source is not None:
complete['source'] = source
complete["source"] = source
if destination is not None:
complete['destination'] = destination
complete["destination"] = destination

@@ -248,8 +294,10 @@ return complete

if target.disk_space_script is None:
raise Exception(f"One of `disk_space_path` or `disk_space_script` must be specified. Using target {self.target!r}")
raise Exception(
f"One of `disk_space_path` or `disk_space_script` must be specified. Using target {self.target!r}"
)
with tempfile.NamedTemporaryFile(
mode='w',
encoding='utf-8',
prefix='plotman-disk-space-script',
mode="w",
encoding="utf-8",
prefix="plotman-disk-space-script",
delete=False,

@@ -265,8 +313,10 @@ dir=temp,

if target.transfer_script is None:
raise Exception(f"One of `transfer_path` or `transfer_script` must be specified. Using target {self.target!r}")
raise Exception(
f"One of `transfer_path` or `transfer_script` must be specified. Using target {self.target!r}"
)
with tempfile.NamedTemporaryFile(
mode='w',
encoding='utf-8',
prefix='plotman-transfer-script',
mode="w",
encoding="utf-8",
prefix="plotman-transfer-script",
delete=False,

@@ -280,2 +330,3 @@ dir=temp,

@attr.frozen

@@ -288,7 +339,11 @@ class TmpOverrides:

@attr.frozen
class Logging:
plots: str = os.path.join(appdirs.user_data_dir("plotman"), 'plots')
transfers: str = os.path.join(appdirs.user_data_dir("plotman"), 'transfers')
application: str = os.path.join(appdirs.user_log_dir("plotman"), 'plotman.log')
plots: str = os.path.join(appdirs.user_data_dir("plotman"), "plots")
transfers: str = os.path.join(appdirs.user_data_dir("plotman"), "transfers")
application: str = os.path.join(appdirs.user_log_dir("plotman"), "plotman.log")
disk_spaces: str = os.path.join(
appdirs.user_log_dir("plotman"), "plotman-disk_spaces.log"
)

@@ -304,3 +359,3 @@ def setup(self) -> None:

directory=self.plots,
group='plot',
group="plot",
)

@@ -312,9 +367,19 @@

directory=self.transfers,
group='transfer',
group="transfer",
)
def _create_log_path(self, time: pendulum.DateTime, directory: str, group: str) -> str:
timestamp = time.isoformat(timespec='microseconds').replace(':', '_')
return os.path.join(directory, f'{timestamp}.{group}.log')
def create_tdisk_space_log_path(self, time: pendulum.DateTime) -> str:
return self._create_log_path(
time=time,
directory=self.disk_spaces,
group="disk_space",
)
def _create_log_path(
self, time: pendulum.DateTime, directory: str, group: str
) -> str:
timestamp = time.isoformat(timespec="microseconds").replace(":", "_")
return os.path.join(directory, f"{timestamp}.{group}.log")
@attr.frozen

@@ -343,2 +408,3 @@ class Directories:

@attr.frozen

@@ -352,22 +418,9 @@ class Scheduling:

tmpdir_stagger_phase_minor: int
tmpdir_stagger_phase_limit: int = 1 # If not explicit, "tmpdir_stagger_phase_limit" will default to 1
tmpdir_stagger_phase_limit: int = (
1 # If not explicit, "tmpdir_stagger_phase_limit" will default to 1
)
tmp_overrides: Optional[Dict[str, TmpOverrides]] = None
@attr.frozen
class ChiaPlotterOptions:
executable: str = "chia"
n_threads: int = 2
n_buckets: int = 128
k: Optional[int] = 32
e: Optional[bool] = False
job_buffer: Optional[int] = 3389
x: bool = False
@attr.frozen
class MadmaxPlotterOptions:
executable: str = "chia_plot"
n_threads: int = 4
n_buckets: int = 256
@attr.frozen
class Plotting:

@@ -381,4 +434,6 @@ farmer_pk: Optional[str] = None

desert._make._DESERT_SENTINEL: {
'marshmallow_field': marshmallow.fields.String(
validate=marshmallow.validate.OneOf(choices=["chia", "madmax"]),
"marshmallow_field": marshmallow.fields.String(
validate=marshmallow.validate.OneOf(
choices=["bladebit", "chia", "madmax"]
),
),

@@ -388,5 +443,7 @@ },

)
chia: Optional[ChiaPlotterOptions] = None
madmax: Optional[MadmaxPlotterOptions] = None
bladebit: Optional[plotman.plotters.bladebit.Options] = None
chia: Optional[plotman.plotters.chianetwork.Options] = None
madmax: Optional[plotman.plotters.madmax.Options] = None
@attr.frozen

@@ -396,2 +453,3 @@ class UserInterface:

@attr.frozen

@@ -402,2 +460,3 @@ class Interactive:

@attr.frozen

@@ -407,2 +466,3 @@ class Commands:

@attr.frozen

@@ -421,3 +481,3 @@ class PlotmanConfig:

def setup(self) -> Generator[None, None, None]:
if self.plotting.type == 'chia':
if self.plotting.type == "chia":
if self.plotting.chia is None:

@@ -429,17 +489,8 @@ message = (

raise Exception(message)
if self.plotting.pool_contract_address is not None:
completed_process = subprocess.run(
args=[self.plotting.chia.executable, 'version'],
capture_output=True,
check=True,
encoding='utf-8',
)
version = packaging.version.Version(completed_process.stdout)
required_version = packaging.version.Version('1.2')
if version < required_version:
raise Exception(
f'Chia version {required_version} required for creating pool'
f' plots but found: {version}'
)
elif self.plotting.type == 'madmax':
plotman.plotters.chianetwork.check_configuration(
options=self.plotting.chia,
pool_contract_address=self.plotting.pool_contract_address,
)
elif self.plotting.type == "madmax":
if self.plotting.madmax is None:

@@ -452,17 +503,21 @@ message = (

if self.plotting.pool_contract_address is not None:
completed_process = subprocess.run(
args=[self.plotting.madmax.executable, '--help'],
capture_output=True,
check=True,
encoding='utf-8',
plotman.plotters.madmax.check_configuration(
options=self.plotting.madmax,
pool_contract_address=self.plotting.pool_contract_address,
)
elif self.plotting.type == "bladebit":
if self.plotting.bladebit is None:
message = (
"internal plotman error, please report the full traceback and your"
+ " full configuration file"
)
if '--contract' not in completed_process.stdout:
raise Exception(
f'found madMAx version does not support the `--contract`'
f' option for pools.'
)
raise Exception(message)
prefix = f'plotman-pid_{os.getpid()}-'
plotman.plotters.bladebit.check_configuration(
options=self.plotting.bladebit,
pool_contract_address=self.plotting.pool_contract_address,
)
prefix = f"plotman-pid_{os.getpid()}-"
self.logging.setup()

@@ -469,0 +524,0 @@

@@ -9,4 +9,3 @@ import csv

from plotman.log_parser import PlotLogParser
import plotman.plotinfo
import plotman.plotters

@@ -16,45 +15,85 @@

class Row:
plot_id: str = attr.ib(converter=str, metadata={'name': 'Plot ID'})
started_at: str = attr.ib(converter=str, metadata={'name': 'Started at'})
date: str = attr.ib(converter=str, metadata={'name': 'Date'})
size: str = attr.ib(converter=str, metadata={'name': 'Size'})
buffer: str = attr.ib(converter=str, metadata={'name': 'Buffer'})
buckets: str = attr.ib(converter=str, metadata={'name': 'Buckets'})
threads: str = attr.ib(converter=str, metadata={'name': 'Threads'})
tmp_dir_1: str = attr.ib(converter=str, metadata={'name': 'Tmp dir 1'})
tmp_dir_2: str = attr.ib(converter=str, metadata={'name': 'Tmp dir 2'})
phase_1_duration_raw: str = attr.ib(converter=str, metadata={'name': 'Phase 1 duration (raw)'})
phase_1_duration: str = attr.ib(converter=str, metadata={'name': 'Phase 1 duration'})
phase_1_duration_minutes: str = attr.ib(converter=str, metadata={'name': 'Phase 1 duration (minutes)'})
phase_1_duration_hours: str = attr.ib(converter=str, metadata={'name': 'Phase 1 duration (hours)'})
phase_2_duration_raw: str = attr.ib(converter=str, metadata={'name': 'Phase 2 duration (raw)'})
phase_2_duration: str = attr.ib(converter=str, metadata={'name': 'Phase 2 duration'})
phase_2_duration_minutes: str = attr.ib(converter=str, metadata={'name': 'Phase 2 duration (minutes)'})
phase_2_duration_hours: str = attr.ib(converter=str, metadata={'name': 'Phase 2 duration (hours)'})
phase_3_duration_raw: str = attr.ib(converter=str, metadata={'name': 'Phase 3 duration (raw)'})
phase_3_duration: str = attr.ib(converter=str, metadata={'name': 'Phase 3 duration'})
phase_3_duration_minutes: str = attr.ib(converter=str, metadata={'name': 'Phase 3 duration (minutes)'})
phase_3_duration_hours: str = attr.ib(converter=str, metadata={'name': 'Phase 3 duration (hours)'})
phase_4_duration_raw: str = attr.ib(converter=str, metadata={'name': 'Phase 4 duration (raw)'})
phase_4_duration: str = attr.ib(converter=str, metadata={'name': 'Phase 4 duration'})
phase_4_duration_minutes: str = attr.ib(converter=str, metadata={'name': 'Phase 4 duration (minutes)'})
phase_4_duration_hours: str = attr.ib(converter=str, metadata={'name': 'Phase 4 duration (hours)'})
total_time_raw: str = attr.ib(converter=str, metadata={'name': 'Total time (raw)'})
total_time: str = attr.ib(converter=str, metadata={'name': 'Total time'})
total_time_minutes: str = attr.ib(converter=str, metadata={'name': 'Total time (minutes)'})
total_time_hours: str = attr.ib(converter=str, metadata={'name': 'Total time (hours)'})
copy_time_raw: str = attr.ib(converter=str, metadata={'name': 'Copy time (raw)'})
copy_time: str = attr.ib(converter=str, metadata={'name': 'Copy time'})
copy_time_minutes: str = attr.ib(converter=str, metadata={'name': 'Copy time (minutes)'})
copy_time_hours: str = attr.ib(converter=str, metadata={'name': 'Copy time (hours)'})
filename: str = attr.ib(converter=str, metadata={'name': 'Filename'})
plot_id: str = attr.ib(converter=str, metadata={"name": "Plot ID"})
started_at: str = attr.ib(converter=str, metadata={"name": "Started at"})
date: str = attr.ib(converter=str, metadata={"name": "Date"})
size: str = attr.ib(converter=str, metadata={"name": "Size"})
buffer: str = attr.ib(converter=str, metadata={"name": "Buffer"})
buckets: str = attr.ib(converter=str, metadata={"name": "Buckets"})
threads: str = attr.ib(converter=str, metadata={"name": "Threads"})
tmp_dir_1: str = attr.ib(converter=str, metadata={"name": "Tmp dir 1"})
tmp_dir_2: str = attr.ib(converter=str, metadata={"name": "Tmp dir 2"})
phase_1_duration_raw: str = attr.ib(
converter=str, metadata={"name": "Phase 1 duration (raw)"}
)
phase_1_duration: str = attr.ib(
converter=str, metadata={"name": "Phase 1 duration"}
)
phase_1_duration_minutes: str = attr.ib(
converter=str, metadata={"name": "Phase 1 duration (minutes)"}
)
phase_1_duration_hours: str = attr.ib(
converter=str, metadata={"name": "Phase 1 duration (hours)"}
)
phase_2_duration_raw: str = attr.ib(
converter=str, metadata={"name": "Phase 2 duration (raw)"}
)
phase_2_duration: str = attr.ib(
converter=str, metadata={"name": "Phase 2 duration"}
)
phase_2_duration_minutes: str = attr.ib(
converter=str, metadata={"name": "Phase 2 duration (minutes)"}
)
phase_2_duration_hours: str = attr.ib(
converter=str, metadata={"name": "Phase 2 duration (hours)"}
)
phase_3_duration_raw: str = attr.ib(
converter=str, metadata={"name": "Phase 3 duration (raw)"}
)
phase_3_duration: str = attr.ib(
converter=str, metadata={"name": "Phase 3 duration"}
)
phase_3_duration_minutes: str = attr.ib(
converter=str, metadata={"name": "Phase 3 duration (minutes)"}
)
phase_3_duration_hours: str = attr.ib(
converter=str, metadata={"name": "Phase 3 duration (hours)"}
)
phase_4_duration_raw: str = attr.ib(
converter=str, metadata={"name": "Phase 4 duration (raw)"}
)
phase_4_duration: str = attr.ib(
converter=str, metadata={"name": "Phase 4 duration"}
)
phase_4_duration_minutes: str = attr.ib(
converter=str, metadata={"name": "Phase 4 duration (minutes)"}
)
phase_4_duration_hours: str = attr.ib(
converter=str, metadata={"name": "Phase 4 duration (hours)"}
)
total_time_raw: str = attr.ib(converter=str, metadata={"name": "Total time (raw)"})
total_time: str = attr.ib(converter=str, metadata={"name": "Total time"})
total_time_minutes: str = attr.ib(
converter=str, metadata={"name": "Total time (minutes)"}
)
total_time_hours: str = attr.ib(
converter=str, metadata={"name": "Total time (hours)"}
)
copy_time_raw: str = attr.ib(converter=str, metadata={"name": "Copy time (raw)"})
copy_time: str = attr.ib(converter=str, metadata={"name": "Copy time"})
copy_time_minutes: str = attr.ib(
converter=str, metadata={"name": "Copy time (minutes)"}
)
copy_time_hours: str = attr.ib(
converter=str, metadata={"name": "Copy time (hours)"}
)
filename: str = attr.ib(converter=str, metadata={"name": "Filename"})
@classmethod
def names(cls) -> typing.List[str]:
return [field.metadata['name'] for field in attr.fields(cls)]
return [field.metadata["name"] for field in attr.fields(cls)]
@classmethod
def from_info(cls, info: plotman.plotinfo.PlotInfo) -> "Row":
def from_info(cls, info: plotman.plotters.CommonInfo) -> "Row":
if info.started_at is None:
raise Exception(f'Unexpected None start time for file: {info.filename}')
raise Exception(f"Unexpected None start time for file: {info.filename}")

@@ -69,4 +108,4 @@ return cls(

threads=info.threads,
tmp_dir_1=info.tmp_dir1,
tmp_dir_2=info.tmp_dir2,
tmp_dir_1=info.tmpdir,
tmp_dir_2=info.tmp2dir,
phase_1_duration_raw=info.phase1_duration_raw,

@@ -101,3 +140,3 @@ phase_1_duration=info.phase1_duration,

return {
field.metadata['name']: value
field.metadata["name"]: value
for field, value in zip(attr.fields(type(self)), attr.astuple(self))

@@ -107,3 +146,5 @@ }

def key_on_plot_info_started_at(element: plotman.plotinfo.PlotInfo) -> pendulum.DateTime:
def key_on_plot_info_started_at(
element: plotman.plotters.CommonInfo,
) -> pendulum.DateTime:
if element.started_at is None:

@@ -115,4 +156,5 @@ return pendulum.now().add(years=9999)

def parse_logs(logfilenames: typing.Sequence[str]) -> typing.List[plotman.plotinfo.PlotInfo]:
parser = PlotLogParser()
def parse_logs(
logfilenames: typing.Sequence[str],
) -> typing.List[plotman.plotters.CommonInfo]:
result = []

@@ -122,5 +164,16 @@

with open(filename) as file:
info = parser.parse(file)
try:
plotter_type = plotman.plotters.get_plotter_from_log(lines=file)
except plotman.errors.UnableToIdentifyPlotterFromLogError:
continue
if not info.in_progress():
parser = plotter_type()
with open(filename, "rb") as binary_file:
read_bytes = binary_file.read()
parser.update(chunk=read_bytes)
info = parser.common_info()
if info.completed:
result.append(info)

@@ -127,0 +180,0 @@

@@ -9,2 +9,3 @@ import curses

import typing
import logging

@@ -14,6 +15,9 @@ from plotman import archive, configuration, manager, reporting

root_logger = logging.getLogger()
class TerminalTooSmallError(Exception):
pass
class Log:

@@ -29,13 +33,13 @@ entries: typing.List[str]

def log(self, msg: str) -> None:
'''Log the message and scroll to the end of the log'''
ts = datetime.datetime.now().strftime('%m-%d %H:%M:%S')
self.entries.append(ts + ' ' + msg)
"""Log the message and scroll to the end of the log"""
ts = datetime.datetime.now().strftime("%m-%d %H:%M:%S")
self.entries.append(ts + " " + msg)
self.cur_pos = len(self.entries)
def tail(self, num_entries: int) -> typing.List[str]:
'''Return the entries at the end of the log. Consider cur_slice() instead.'''
"""Return the entries at the end of the log. Consider cur_slice() instead."""
return self.entries[-num_entries:]
def shift_slice(self, offset: int) -> None:
'''Positive shifts towards end, negative shifts towards beginning'''
"""Positive shifts towards end, negative shifts towards beginning"""
self.cur_pos = max(0, min(len(self.entries), self.cur_pos + offset))

@@ -50,27 +54,35 @@

def cur_slice(self, num_entries: int) -> typing.List[str]:
'''Return num_entries log entries up to the current slice position'''
"""Return num_entries log entries up to the current slice position"""
return self.entries[max(0, self.cur_pos - num_entries) : self.cur_pos]
def fill_log(self) -> None:
'''Add a bunch of stuff to the log. Useful for testing.'''
"""Add a bunch of stuff to the log. Useful for testing."""
for i in range(100):
self.log('Log line %d' % i)
self.log("Log line %d" % i)
def plotting_status_msg(active: bool, status: str) -> str:
if active:
return '(active) ' + status
return "(active) " + status
else:
return '(inactive) ' + status
return "(inactive) " + status
def archiving_status_msg(configured: bool, active: bool, status: str) -> str:
if configured:
if active:
return '(active) ' + status
return "(active) " + status
else:
return '(inactive) ' + status
return "(inactive) " + status
else:
return '(not configured)'
return "(not configured)"
# cmd_autostart_plotting is the (optional) argument passed from the command line. May be None
def curses_main(stdscr: typing.Any, cmd_autostart_plotting: typing.Optional[bool], cmd_autostart_archiving: typing.Optional[bool], cfg: configuration.PlotmanConfig) -> None:
def curses_main(
stdscr: typing.Any,
cmd_autostart_plotting: typing.Optional[bool],
cmd_autostart_archiving: typing.Optional[bool],
cfg: configuration.PlotmanConfig,
) -> None:
log = Log()

@@ -92,4 +104,4 @@

plotting_status = '<startup>' # todo rename these msg?
archiving_status: typing.Union[bool, str, typing.Dict[str, object]] = '<startup>'
plotting_status = "<startup>" # todo rename these msg?
archiving_status: typing.Union[bool, str, typing.Dict[str, object]] = "<startup>"

@@ -108,3 +120,3 @@ stdscr.nodelay(True) # make getch() non-blocking

pressed_key = '' # For debugging
pressed_key = "" # For debugging

@@ -120,3 +132,3 @@ archdir_freebytes = None

do_full_refresh = False
elapsed = 0 # Time since last refresh, or zero if no prev. refresh
elapsed = 0 # Time since last refresh, or zero if no prev. refresh
if last_refresh is None:

@@ -139,3 +151,3 @@ do_full_refresh = True

)
if (started):
if started:
if aging_reason is not None:

@@ -145,3 +157,3 @@ log.log(aging_reason)

log.log(msg)
plotting_status = '<just started job>'
plotting_status = "<just started job>"
jobs = Job.get_running_jobs(cfg.logging.plots, cached_jobs=jobs)

@@ -153,14 +165,22 @@ else:

plotting_status = msg
root_logger.info("[plot] %s", msg)
if cfg.archiving is not None:
if archiving_active:
archiving_status, log_messages = archive.spawn_archive_process(cfg.directories, cfg.archiving, cfg.logging, jobs)
for log_message in log_messages:
log.log(log_message)
archiving_status, log_messages = archive.spawn_archive_process(
cfg.directories, cfg.archiving, cfg.logging, jobs
)
if log_messages:
for log_message in log_messages:
log.log(log_message)
root_logger.info("[archive] %s", log_message)
else:
root_logger.info("[archive] %s", archiving_status)
archdir_freebytes, log_messages = archive.get_archdir_freebytes(cfg.archiving)
archdir_freebytes, log_messages = archive.get_archdir_freebytes(
cfg.archiving
)
for log_message in log_messages:
log.log(log_message)
# Get terminal size. Recommended method is stdscr.getmaxyx(), but this

@@ -178,3 +198,3 @@ # does not seem to work on some systems. It may be a bug in Python

completed_process = subprocess.run(
['stty', 'size'], check=True, encoding='utf-8', stdout=subprocess.PIPE
["stty", "size"], check=True, encoding="utf-8", stdout=subprocess.PIPE
)

@@ -201,3 +221,3 @@ elements = completed_process.stdout.split()

if len(archive_directories) == 0:
arch_prefix = ''
arch_prefix = ""
else:

@@ -210,11 +230,13 @@ arch_prefix = os.path.commonpath(archive_directories)

tmp_report = reporting.tmp_dir_report(
jobs, cfg.directories, cfg.scheduling, n_cols, 0, n_tmpdirs, tmp_prefix)
dst_report = reporting.dst_dir_report(
jobs, dst_dir, n_cols, dst_prefix)
jobs, cfg.directories, cfg.scheduling, n_cols, 0, n_tmpdirs, tmp_prefix
)
dst_report = reporting.dst_dir_report(jobs, dst_dir, n_cols, dst_prefix)
if archdir_freebytes is not None:
arch_report = reporting.arch_dir_report(archdir_freebytes, n_cols, arch_prefix)
arch_report = reporting.arch_dir_report(
archdir_freebytes, n_cols, arch_prefix
)
if not arch_report:
arch_report = '<no archive dir info>'
arch_report = "<no archive dir info>"
else:
arch_report = '<archiving not configured>'
arch_report = "<archiving not configured>"

@@ -253,4 +275,5 @@ #

except Exception:
raise Exception('Failed to initialize curses windows, try a larger '
'terminal window.')
raise Exception(
"Failed to initialize curses windows, try a larger " "terminal window."
)

@@ -262,17 +285,27 @@ #

# Header
header_win.addnstr(0, 0, 'Plotman', linecap, curses.A_BOLD)
header_win.addnstr(0, 0, "Plotman", linecap, curses.A_BOLD)
timestamp = datetime.datetime.now().strftime("%H:%M:%S")
refresh_msg = "now" if do_full_refresh else f"{int(elapsed)}s/{cfg.scheduling.polling_time_s}"
refresh_msg = (
"now"
if do_full_refresh
else f"{int(elapsed)}s/{cfg.scheduling.polling_time_s}"
)
header_win.addnstr(f" {timestamp} (refresh {refresh_msg})", linecap)
header_win.addnstr(' | <P>lotting: ', linecap, curses.A_BOLD)
header_win.addnstr(" | <P>lotting: ", linecap, curses.A_BOLD)
header_win.addnstr(
plotting_status_msg(plotting_active, plotting_status), linecap)
header_win.addnstr(' <A>rchival: ', linecap, curses.A_BOLD)
plotting_status_msg(plotting_active, plotting_status), linecap
)
header_win.addnstr(" <A>rchival: ", linecap, curses.A_BOLD)
header_win.addnstr(
archiving_status_msg(archiving_configured,
archiving_active, archiving_status), linecap) # type: ignore[arg-type]
archiving_status_msg(
archiving_configured,
archiving_active,
archiving_status, # type: ignore[arg-type]
),
linecap,
)
# Oneliner progress display
header_win.addnstr(1, 0, 'Jobs (%d): ' % len(jobs), linecap)
header_win.addnstr('[' + reporting.job_viz(jobs) + ']', linecap)
header_win.addnstr(1, 0, "Jobs (%d): " % len(jobs), linecap)
header_win.addnstr("[" + reporting.job_viz(jobs) + "]", linecap)

@@ -282,17 +315,17 @@ # These are useful for debugging.

# if pressed_key:
# header_win.addnstr(' (keypress %s)' % str(pressed_key), linecap)
header_win.addnstr(2, 0, 'Prefixes:', linecap, curses.A_BOLD)
header_win.addnstr(' tmp=', linecap, curses.A_BOLD)
# header_win.addnstr(' (keypress %s)' % str(pressed_key), linecap)
header_win.addnstr(2, 0, "Prefixes:", linecap, curses.A_BOLD)
header_win.addnstr(" tmp=", linecap, curses.A_BOLD)
header_win.addnstr(tmp_prefix, linecap)
header_win.addnstr(' dst=', linecap, curses.A_BOLD)
header_win.addnstr(" dst=", linecap, curses.A_BOLD)
header_win.addnstr(dst_prefix, linecap)
if archiving_configured:
header_win.addnstr(' archive=', linecap, curses.A_BOLD)
header_win.addnstr(" archive=", linecap, curses.A_BOLD)
header_win.addnstr(arch_prefix, linecap)
header_win.addnstr(' (remote)', linecap)
header_win.addnstr(" (remote)", linecap)
# Jobs
jobs_win.addstr(0, 0, reporting.status_report(jobs, n_cols, jobs_h,
tmp_prefix, dst_prefix))
jobs_win.addstr(
0, 0, reporting.status_report(jobs, n_cols, jobs_h, tmp_prefix, dst_prefix)
)
jobs_win.chgat(0, 0, curses.A_REVERSE)

@@ -305,5 +338,3 @@

tmpwin = curses.newwin(
tmp_h, tmp_w,
dirs_pos + int(maxtd_h - tmp_h), 0)
tmpwin = curses.newwin(tmp_h, tmp_w, dirs_pos + int(maxtd_h - tmp_h), 0)
tmpwin.addstr(tmp_report)

@@ -313,4 +344,7 @@ tmpwin.chgat(0, 0, curses.A_REVERSE)

dstwin = curses.newwin(
dst_h, dst_w,
dirs_pos + int((maxtd_h - dst_h) / 2), tmp_w + tmpwin_dstwin_gutter)
dst_h,
dst_w,
dirs_pos + int((maxtd_h - dst_h) / 2),
tmp_w + tmpwin_dstwin_gutter,
)
dstwin.addstr(dst_report)

@@ -320,3 +354,3 @@ dstwin.chgat(0, 0, curses.A_REVERSE)

archwin = curses.newwin(arch_h, arch_w, dirs_pos + maxtd_h, 0)
archwin.addstr(0, 0, 'Archive dirs free space', curses.A_REVERSE)
archwin.addstr(0, 0, "Archive dirs free space", curses.A_REVERSE)
archwin.addstr(1, 0, arch_report)

@@ -326,4 +360,9 @@

# this seems easier.
log_win.addnstr(0, 0, ('Log: %d (<up>/<down>/<end> to scroll)\n' % log.get_cur_pos() ),
linecap, curses.A_REVERSE)
log_win.addnstr(
0,
0,
("Log: %d (<up>/<down>/<end> to scroll)\n" % log.get_cur_pos()),
linecap,
curses.A_REVERSE,
)
for i, logline in enumerate(log.cur_slice(logs_h - 1)):

@@ -344,20 +383,20 @@ log_win.addnstr(i + 1, 0, logline, linecap)

except KeyboardInterrupt:
key = ord('q')
key = ord("q")
if key == curses.KEY_UP:
log.shift_slice(-1)
pressed_key = 'up'
pressed_key = "up"
elif key == curses.KEY_DOWN:
log.shift_slice(1)
pressed_key = 'dwn'
pressed_key = "dwn"
elif key == curses.KEY_END:
log.shift_slice_to_end()
pressed_key = 'end'
elif key == ord('p'):
pressed_key = "end"
elif key == ord("p"):
plotting_active = not plotting_active
pressed_key = 'p'
elif key == ord('a'):
pressed_key = "p"
elif key == ord("a"):
archiving_active = not archiving_active
pressed_key = 'a'
elif key == ord('q'):
pressed_key = "a"
elif key == ord("q"):
break

@@ -367,4 +406,9 @@ else:

def run_interactive(cfg: configuration.PlotmanConfig, autostart_plotting: typing.Optional[bool] = None, autostart_archiving: typing.Optional[bool] = None) -> None:
locale.setlocale(locale.LC_ALL, '')
def run_interactive(
cfg: configuration.PlotmanConfig,
autostart_plotting: typing.Optional[bool] = None,
autostart_archiving: typing.Optional[bool] = None,
) -> None:
locale.setlocale(locale.LC_ALL, "")
code = locale.getpreferredencoding()

@@ -371,0 +415,0 @@ # Then use code as the encoding for str.encode() calls.

@@ -1,15 +0,7 @@

# TODO do we use all these?
import argparse
import contextlib
import functools
import logging
import os
import random
import re
import sys
import glob
import time
from datetime import datetime
from enum import Enum, auto
from subprocess import call
import typing

@@ -19,100 +11,50 @@

import click
import pendulum
import psutil
from plotman import chia, madmax
import plotman.errors
if typing.TYPE_CHECKING:
import plotman.errors
def job_phases_for_tmpdir(d: str, all_jobs: typing.List["Job"]) -> typing.List["Phase"]:
'''Return phase 2-tuples for jobs running on tmpdir d'''
return sorted([j.progress() for j in all_jobs if os.path.normpath(j.tmpdir) == os.path.normpath(d)])
"""Return phase 2-tuples for jobs running on tmpdir d"""
return sorted(
[
j.progress()
for j in all_jobs
if os.path.normpath(j.plotter.common_info().tmpdir) == os.path.normpath(d)
]
)
def job_phases_for_dstdir(d: str, all_jobs: typing.List["Job"]) -> typing.List["Phase"]:
'''Return phase 2-tuples for jobs outputting to dstdir d'''
return sorted([j.progress() for j in all_jobs if os.path.normpath(j.dstdir) == os.path.normpath(d)])
"""Return phase 2-tuples for jobs outputting to dstdir d"""
return sorted(
[
j.progress()
for j in all_jobs
if os.path.normpath(j.plotter.common_info().dstdir) == os.path.normpath(d)
]
)
def is_plotting_cmdline(cmdline: typing.List[str]) -> bool:
if len(cmdline) == 0:
return False
if 'chia_plot' == os.path.basename(cmdline[0].lower()): # Madmax plotter
# TODO: use the configured executable
return True
else:
if 'python' in cmdline[0].lower(): # Stock Chia plotter
cmdline = cmdline[1:]
@attr.frozen
class ParsedChiaPlotsCreateCommand:
error: typing.Optional[click.ClickException]
help: bool
parameters: typing.Dict[str, object]
def __eq__(self, other: object) -> bool:
if not isinstance(other, type(self)):
return False
return (
len(cmdline) >= 3
# TODO: use the configured executable
and 'chia' in cmdline[0]
and 'plots' == cmdline[1]
and 'create' == cmdline[2]
type(self.error) == type(other.error)
and str(self.error) == str(other.error)
and self.help == other.help
and self.parameters == other.parameters
)
def parse_chia_plot_time(s: str) -> pendulum.DateTime:
# This will grow to try ISO8601 as well for when Chia logs that way
# TODO: unignore once fixed upstream
# https://github.com/sdispater/pendulum/pull/548
return pendulum.from_format(s, 'ddd MMM DD HH:mm:ss YYYY', locale='en', tz=None) # type: ignore[arg-type]
def parse_chia_plots_create_command_line(
command_line: typing.List[str],
) -> "ParsedChiaPlotsCreateCommand":
command_line = list(command_line)
# Parse command line args
if 'chia_plot' == os.path.basename(command_line[0].lower()): # Madmax plotter
# TODO: use the configured executable
all_command_arguments = command_line[1:]
command = madmax._cli_c8121b9
else:
if 'python' in command_line[0].lower(): # Stock Chia plotter
command_line = command_line[1:]
assert len(command_line) >= 3
# TODO: use the configured executable
assert 'chia' in command_line[0]
assert 'plots' == command_line[1]
assert 'create' == command_line[2]
all_command_arguments = command_line[3:]
# TODO: We could at some point do chia version detection and pick the
# associated command. For now we'll just use the latest one we have
# copied.
command = chia.commands.latest_command()
# nice idea, but this doesn't include -h
# help_option_names = command.get_help_option_names(ctx=context)
help_option_names = {'--help', '-h'}
command_arguments = [
argument
for argument in all_command_arguments
if argument not in help_option_names
]
try:
context = command.make_context(info_name='', args=list(command_arguments))
except click.ClickException as e:
error = e
params = {}
else:
error = None
params = context.params
return ParsedChiaPlotsCreateCommand(
error=error,
help=len(all_command_arguments) > len(command_arguments),
parameters=params,
)
class ParsedChiaPlotsCreateCommand:
def __init__(
self,
error: click.ClickException,
help: bool,
parameters: typing.Dict[str, object],
) -> None:
self.error = error
self.help = help
self.parameters = parameters
@functools.total_ordering

@@ -126,14 +68,17 @@ @attr.frozen(order=False)

def __lt__(self, other: "Phase") -> bool:
return (
(not self.known, self.major, self.minor)
< (not other.known, other.major, other.minor)
return (not self.known, self.major, self.minor) < (
not other.known,
other.major,
other.minor,
)
@classmethod
def from_tuple(cls, t: typing.Tuple[typing.Optional[int], typing.Optional[int]]) -> "Phase":
def from_tuple(
cls, t: typing.Tuple[typing.Optional[int], typing.Optional[int]]
) -> "Phase":
if len(t) != 2:
raise Exception(f'phase must be created from 2-tuple: {t!r}')
raise Exception(f"phase must be created from 2-tuple: {t!r}")
if None in t and not t[0] is t[1]:
raise Exception(f'phase can not be partially known: {t!r}')
raise Exception(f"phase can not be partially known: {t!r}")

@@ -154,24 +99,16 @@ if t[0] is None:

if not self.known:
return '?:?'
return f'{self.major}:{self.minor}'
return "?:?"
return f"{self.major}:{self.minor}"
# TODO: be more principled and explicit about what we cache vs. what we look up
# dynamically from the logfile
class Job:
'Represents a plotter job'
"Represents a plotter job"
logfile: str = ''
jobfile: str = ''
plotter: "plotman.plotters.Plotter"
logfile: str = ""
job_id: int = 0
plot_id: str = '--------'
plotter: str = ''
proc: psutil.Process
k: int
r: int
u: int
b: int
n: int
tmpdir: str
tmp2dir: str
dstdir: str

@@ -184,7 +121,7 @@ @classmethod

) -> typing.List["Job"]:
'''Return a list of running plot jobs. If a cache of preexisting jobs is provided,
reuse those previous jobs without updating their information. Always look for
new jobs not already in the cache.'''
"""Return a list of running plot jobs. If a cache of preexisting jobs is provided,
reuse those previous jobs without updating their information. Always look for
new jobs not already in the cache."""
jobs: typing.List[Job] = []
cached_jobs_by_pid = { j.proc.pid: j for j in cached_jobs }
cached_jobs_by_pid = {j.proc.pid: j for j in cached_jobs}

@@ -202,3 +139,6 @@ with contextlib.ExitStack() as exit_stack:

exit_stack.enter_context(process.oneshot())
if is_plotting_cmdline(process.cmdline()):
# TODO: handle import loop
import plotman.plotters
if plotman.plotters.is_plotting_command_line(process.cmdline()):
ppids.add(process.ppid())

@@ -218,5 +158,3 @@ pids.add(process.pid)

wanted_processes = [
process
for process in processes
if process.pid in wanted_pids
process for process in processes if process.pid in wanted_pids
]

@@ -234,14 +172,32 @@

continue
parsed_command = parse_chia_plots_create_command_line(
command_line=command_line,
# TODO: handle import loop
import plotman.plotters
plotter_type = (
plotman.plotters.get_plotter_from_command_line(
command_line=command_line,
)
)
if parsed_command.error is not None:
plotter = plotter_type()
plotter.parse_command_line(
command_line=command_line, cwd=proc.cwd()
)
if plotter.parsed_command_line is None:
continue
if plotter.parsed_command_line.error is not None:
continue
if plotter.parsed_command_line.help:
continue
job = cls(
proc=proc,
parsed_command=parsed_command,
# parsed_command=plotter.parsed_command_line,
plotter=plotter,
logroot=logroot,
)
if job.help:
continue
# TODO: stop reloading every time...
with open(job.logfile, "rb") as f:
r = f.read()
job.plotter.update(chunk=r)
jobs.append(job)

@@ -251,63 +207,13 @@

def __init__(
self,
proc: psutil.Process,
parsed_command: ParsedChiaPlotsCreateCommand,
plotter: "plotman.plotters.Plotter",
# parsed_command: ParsedChiaPlotsCreateCommand,
logroot: str,
) -> None:
'''Initialize from an existing psutil.Process object. must know logroot in order to understand open files'''
"""Initialize from an existing psutil.Process object. must know logroot in order to understand open files"""
self.proc = proc
# These are dynamic, cached, and need to be udpated periodically
self.phase = Phase(known=False)
self.plotter = plotter
self.help = parsed_command.help
self.args = parsed_command.parameters
# an example as of 1.0.5
# {
# 'size': 32,
# 'num_threads': 4,
# 'buckets': 128,
# 'buffer': 6000,
# 'tmp_dir': '/farm/yards/901',
# 'final_dir': '/farm/wagons/801',
# 'override_k': False,
# 'num': 1,
# 'alt_fingerprint': None,
# 'pool_contract_address': None,
# 'farmer_public_key': None,
# 'pool_public_key': None,
# 'tmp2_dir': None,
# 'plotid': None,
# 'memo': None,
# 'nobitfield': False,
# 'exclude_final_dir': False,
# }
# TODO: use the configured executable
if proc.name().startswith("chia_plot"): # MADMAX
self.k = 32
self.r = self.args['threads'] # type: ignore[assignment]
self.u = self.args['buckets'] # type: ignore[assignment]
self.b = 0
self.n = self.args['count'] # type: ignore[assignment]
self.tmpdir = self.args['tmpdir'] # type: ignore[assignment]
self.tmp2dir = self.args['tmpdir2'] # type: ignore[assignment]
self.dstdir = self.args['finaldir'] # type: ignore[assignment]
else: # CHIA
self.k = self.args['size'] # type: ignore[assignment]
self.r = self.args['num_threads'] # type: ignore[assignment]
self.u = self.args['buckets'] # type: ignore[assignment]
self.b = self.args['buffer'] # type: ignore[assignment]
self.n = self.args['num'] # type: ignore[assignment]
self.tmpdir = self.args['tmp_dir'] # type: ignore[assignment]
self.tmp2dir = self.args['tmp2_dir'] # type: ignore[assignment]
self.dstdir = self.args['final_dir'] # type: ignore[assignment]
plot_cwd: str = self.proc.cwd()
self.tmpdir = os.path.join(plot_cwd, self.tmpdir)
if self.tmp2dir is not None:
self.tmp2dir = os.path.join(plot_cwd, self.tmp2dir)
self.dstdir = os.path.join(plot_cwd, self.dstdir)
# Find logfile (whatever file is open under the log root). The

@@ -323,180 +229,41 @@ # file may be open more than once, e.g. for STDOUT and STDERR.

if self.logfile:
# Initialize data that needs to be loaded from the logfile
self.init_from_logfile()
# TODO: turn this into logging or somesuch
# else:
# print('Found plotting process PID {pid}, but could not find '
# 'logfile in its open files:'.format(pid = self.proc.pid))
# for f in self.proc.open_files():
# print(f.path)
def init_from_logfile(self) -> None:
'''Read plot ID and job start time from logfile. Return true if we
find all the info as expected, false otherwise'''
assert self.logfile
# Try reading for a while; it can take a while for the job to get started as it scans
# existing plot dirs (especially if they are NFS).
found_id = False
found_log = False
for attempt_number in range(3):
with open(self.logfile, 'r') as f:
with contextlib.suppress(UnicodeDecodeError):
for line in f:
m = re.match('^ID: ([0-9a-f]*)', line)
if m: # CHIA
self.plot_id = m.group(1)
self.plotter = 'chia'
found_id = True
else:
m = re.match(r"^Plot Name: plot-k(\d+)-(\d+)-(\d+)-(\d+)-(\d+)-(\d+)-(\w+)$", line)
if m: # MADMAX
self.plot_id = m.group(7)
self.plotter = 'madmax'
self.start_time = pendulum.from_timestamp(os.path.getctime(self.logfile))
found_id = True
found_log = True
break
m = re.match(r'^Starting phase 1/4:.*\.\.\. (.*)', line)
if m: # CHIA
# Mon Nov 2 08:39:53 2020
self.start_time = parse_chia_plot_time(m.group(1))
found_log = True
break # Stop reading lines in file
if found_id and found_log:
break # Stop trying
else:
time.sleep(1) # Sleep and try again
# If we couldn't find the line in the logfile, the job is probably just getting started
# (and being slow about it). In this case, use the last metadata change as the start time.
# TODO: we never come back to this; e.g. plot_id may remain uninitialized.
# TODO: should we just use the process start time instead?
if not found_log:
self.start_time = pendulum.from_timestamp(os.path.getctime(self.logfile))
# Load things from logfile that are dynamic
self.update_from_logfile()
def update_from_logfile(self) -> None:
self.set_phase_from_logfile()
def set_phase_from_logfile(self) -> None:
assert self.logfile
# Map from phase number to subphase number reached in that phase.
# Phase 1 subphases are <started>, table1, table2, ...
# Phase 2 subphases are <started>, table7, table6, ...
# Phase 3 subphases are <started>, tables1&2, tables2&3, ...
# Phase 4 subphases are <started>
phase_subphases = {}
with open(self.logfile, 'r') as f:
with contextlib.suppress(UnicodeDecodeError):
for line in f:
if self.plotter == "madmax":
# MADMAX reports after completion of phases so increment the reported subphases
# and assume that phase 1 has already started
# MADMAX: "[P1]" or "[P2]" or "[P4]"
m = re.match(r'^\[P(\d)\].*', line)
if m:
phase = int(m.group(1))
phase_subphases[phase] = 1
# MADMAX: "[P1] or [P2] Table 7"
m = re.match(r'^\[P(\d)\] Table (\d).*', line)
if m:
phase = int(m.group(1))
if phase == 1:
phase_subphases[1] = max(phase_subphases[1], (int(m.group(2))+1))
elif phase == 2:
if 'rewrite' in line:
phase_subphases[2] = max(phase_subphases[2], (9 - int(m.group(2))))
else:
phase_subphases[2] = max(phase_subphases[2], (8 - int(m.group(2))))
# MADMAX: Phase 3: "[P3-1] Table 4"
m = re.match(r'^\[P3\-(\d)\] Table (\d).*', line)
if m:
if 3 in phase_subphases:
if int(m.group(1)) == 2:
phase_subphases[3] = max(phase_subphases[3], int(m.group(2)))
else:
phase_subphases[3] = max(phase_subphases[3], int(m.group(2))-1)
else:
phase_subphases[3] = 1
else:
# CHIA: "Starting phase 1/4: Forward Propagation into tmp files... Sat Oct 31 11:27:04 2020"
m = re.match(r'^Starting phase (\d).*', line)
if m:
phase = int(m.group(1))
phase_subphases[phase] = 0
# CHIA: Phase 1: "Computing table 2"
m = re.match(r'^Computing table (\d).*', line)
if m:
phase_subphases[1] = max(phase_subphases[1], int(m.group(1)))
# CHIA: Phase 2: "Backpropagating on table 2"
m = re.match(r'^Backpropagating on table (\d).*', line)
if m:
phase_subphases[2] = max(phase_subphases[2], 7 - int(m.group(1)))
# CHIA: Phase 3: "Compressing tables 4 and 5"
m = re.match(r'^Compressing tables (\d) and (\d).*', line)
if m:
phase_subphases[3] = max(phase_subphases[3], int(m.group(1)))
# TODO also collect timing info:
# "Time for phase 1 = 22796.7 seconds. CPU (98%) Tue Sep 29 17:57:19 2020"
# for phase in ['1', '2', '3', '4']:
# m = re.match(r'^Time for phase ' + phase + ' = (\d+.\d+) seconds..*', line)
# data.setdefault....
# Total time = 49487.1 seconds. CPU (97.26%) Wed Sep 30 01:22:10 2020
# m = re.match(r'^Total time = (\d+.\d+) seconds.*', line)
# if m:
# data.setdefault(key, {}).setdefault('total time', []).append(float(m.group(1)))
if phase_subphases:
phase = max(phase_subphases.keys())
self.phase = Phase(major=phase, minor=phase_subphases[phase])
else:
self.phase = Phase(major=0, minor=0)
def progress(self) -> Phase:
'''Return a 2-tuple with the job phase and subphase (by reading the logfile)'''
return self.phase
"""Return a 2-tuple with the job phase and subphase (by reading the logfile)"""
return self.plotter.common_info().phase
def plot_id_prefix(self) -> str:
return self.plot_id[:8]
plot_id = self.plotter.common_info().plot_id
if plot_id is None:
return "--------"
return plot_id[:8]
# TODO: make this more useful and complete, and/or make it configurable
def status_str_long(self) -> str:
return '{plot_id}\nk={k} r={r} b={b} u={u}\npid:{pid}\ntmp:{tmp}\ntmp2:{tmp2}\ndst:{dst}\nlogfile:{logfile}'.format(
plot_id = self.plot_id,
k = self.k,
r = self.r,
b = self.b,
u = self.u,
pid = self.proc.pid,
tmp = self.tmpdir,
tmp2 = self.tmp2dir,
dst = self.dstdir,
logfile = self.logfile
)
# TODO: get the rest of this filled out
info = self.plotter.common_info()
return "{plot_id}\npid:{pid}\ntmp:{tmp}\ndst:{dst}\nlogfile:{logfile}".format(
plot_id=info.plot_id,
pid=self.proc.pid,
tmp=info.tmpdir,
dst=info.dstdir,
logfile=self.logfile,
)
# return '{plot_id}\nk={k} r={r} b={b} u={u}\npid:{pid}\ntmp:{tmp}\ntmp2:{tmp2}\ndst:{dst}\nlogfile:{logfile}'.format(
# plot_id = info.plot_id,
# # k = self.k,
# # r = self.r,
# # b = self.b,
# # u = self.u,
# pid = self.proc.pid,
# tmp = info.tmpdir,
# # tmp2 = self.tmp2dir,
# dst = info.dstdir,
# logfile = self.logfile
# )
def print_logs(self, follow: bool = False) -> None:
with open(self.logfile, 'r') as f:
with open(self.logfile, "r") as f:
if follow:
line = ''
line = ""
while True:

@@ -507,4 +274,4 @@ tmp = f.readline()

if line.endswith("\n"):
print(line.rstrip('\n'))
line = ''
print(line.rstrip("\n"))
line = ""
else:

@@ -516,8 +283,10 @@ time.sleep(0.1)

def to_dict(self) -> typing.Dict[str, object]:
'''Exports important information as dictionary.'''
"""Exports important information as dictionary."""
info = self.plotter.common_info()
# TODO: get the rest of this filled out
return dict(
plot_id=self.plot_id[:8],
k=self.k,
tmp_dir=self.tmpdir,
dst_dir=self.dstdir,
plot_id=self.plot_id_prefix(),
# k=self.k,
tmp_dir=info.tmpdir,
dst_dir=info.dstdir,
progress=str(self.progress()),

@@ -531,6 +300,5 @@ tmp_usage=self.get_tmp_usage(),

time_sys=self.get_time_sys(),
time_iowait=self.get_time_iowait()
time_iowait=self.get_time_iowait(),
)
def get_mem_usage(self) -> int:

@@ -542,7 +310,8 @@ # Total, inc swapped

total_bytes = 0
info = self.plotter.common_info()
with contextlib.suppress(FileNotFoundError):
# The directory might not exist at this name, or at all, anymore
with os.scandir(self.tmpdir) as it:
with os.scandir(info.tmpdir) as it:
for entry in it:
if self.plot_id in entry.name:
if info.plot_id is not None and info.plot_id in entry.name:
with contextlib.suppress(FileNotFoundError):

@@ -554,12 +323,12 @@ # The file might disappear; this being an estimate we don't care

def get_run_status(self) -> str:
'''Running, suspended, etc.'''
"""Running, suspended, etc."""
status = self.proc.status()
if status == psutil.STATUS_RUNNING:
return 'RUN'
return "RUN"
elif status == psutil.STATUS_SLEEPING:
return 'SLP'
return "SLP"
elif status == psutil.STATUS_DISK_SLEEP:
return 'DSK'
return "DSK"
elif status == psutil.STATUS_STOPPED:
return 'STP'
return "STP"
else:

@@ -580,3 +349,3 @@ return self.proc.status() # type: ignore[no-any-return]

cpu_times = self.proc.cpu_times()
iowait = getattr(cpu_times, 'iowait', None)
iowait = getattr(cpu_times, "iowait", None)
if iowait is None:

@@ -587,3 +356,3 @@ return None

def suspend(self, reason: str = '') -> None:
def suspend(self, reason: str = "") -> None:
self.proc.suspend()

@@ -599,5 +368,8 @@ self.status_note = reason

for dir in [self.tmpdir, self.tmp2dir, self.dstdir]:
info = self.plotter.common_info()
for dir in [info.tmpdir, info.tmp2dir, info.dstdir]:
if dir is not None:
temp_files.update(glob.glob(os.path.join(dir, f"plot-*-{self.plot_id}.tmp")))
temp_files.update(
glob.glob(os.path.join(dir, f"plot-*-{info.plot_id}*.tmp"))
)

@@ -607,3 +379,3 @@ return temp_files

def cancel(self) -> None:
'Cancel an already running job'
"Cancel an already running job"
# We typically suspend the job as the first action in killing it, so it

@@ -610,0 +382,0 @@ # doesn't create more tmp files during death. However, terminate() won't

@@ -16,36 +16,54 @@ import logging

# Plotman libraries
from plotman import \
archive # for get_archdir_freebytes(). TODO: move to avoid import loop
from plotman import (
archive,
) # for get_archdir_freebytes(). TODO: move to avoid import loop
from plotman import job, plot_util
import plotman.configuration
import plotman.plotters.chianetwork
import plotman.plotters.madmax
# Constants
MIN = 60 # Seconds
HR = 3600 # Seconds
MIN = 60 # Seconds
HR = 3600 # Seconds
MAX_AGE = 1000_000_000 # Arbitrary large number of seconds
MAX_AGE = 1000_000_000 # Arbitrary large number of seconds
def dstdirs_to_furthest_phase(all_jobs: typing.List[job.Job]) -> typing.Dict[str, job.Phase]:
'''Return a map from dst dir to a phase tuple for the most progressed job
that is emitting to that dst dir.'''
def dstdirs_to_furthest_phase(
all_jobs: typing.List[job.Job],
) -> typing.Dict[str, job.Phase]:
"""Return a map from dst dir to a phase tuple for the most progressed job
that is emitting to that dst dir."""
result: typing.Dict[str, job.Phase] = {}
for j in all_jobs:
if not j.dstdir in result.keys() or result[j.dstdir] < j.progress():
result[j.dstdir] = j.progress()
dstdir = j.plotter.common_info().dstdir
if not dstdir in result.keys() or result[dstdir] < j.progress():
result[dstdir] = j.progress()
return result
def dstdirs_to_youngest_phase(all_jobs: typing.List[job.Job]) -> typing.Dict[str, job.Phase]:
'''Return a map from dst dir to a phase tuple for the least progressed job
that is emitting to that dst dir.'''
def dstdirs_to_youngest_phase(
all_jobs: typing.List[job.Job],
) -> typing.Dict[str, job.Phase]:
"""Return a map from dst dir to a phase tuple for the least progressed job
that is emitting to that dst dir."""
result: typing.Dict[str, job.Phase] = {}
for j in all_jobs:
if j.dstdir is None:
dstdir = j.plotter.common_info().dstdir
if dstdir is None:
continue
if not j.dstdir in result.keys() or result[j.dstdir] > j.progress():
result[j.dstdir] = j.progress()
if not dstdir in result.keys() or result[dstdir] > j.progress():
result[dstdir] = j.progress()
return result
def phases_permit_new_job(phases: typing.List[job.Phase], d: str, sched_cfg: plotman.configuration.Scheduling, dir_cfg: plotman.configuration.Directories) -> bool:
'''Scheduling logic: return True if it's OK to start a new job on a tmp dir
with existing jobs in the provided phases.'''
def phases_permit_new_job(
phases: typing.List[job.Phase],
d: str,
sched_cfg: plotman.configuration.Scheduling,
dir_cfg: plotman.configuration.Directories,
) -> bool:
"""Scheduling logic: return True if it's OK to start a new job on a tmp dir
with existing jobs in the provided phases."""
# Filter unknown-phase jobs

@@ -62,11 +80,11 @@ phases = [ph for ph in phases if ph.known]

stagger_phase_limit = sched_cfg.tmpdir_stagger_phase_limit
# Limit the total number of jobs per tmp dir. Default to overall max
# jobs configuration, but restrict to any configured overrides.
max_plots = sched_cfg.tmpdir_max_jobs
# Check if any overrides exist for the current job
if sched_cfg.tmp_overrides is not None and d in sched_cfg.tmp_overrides:
curr_overrides = sched_cfg.tmp_overrides[d]
# Check for and assign major & minor phase overrides

@@ -83,5 +101,5 @@ if curr_overrides.tmpdir_stagger_phase_major is not None:

max_plots = curr_overrides.tmpdir_max_jobs
milestone = job.Phase(major,minor)
milestone = job.Phase(major, minor)
# Check if phases pass the criteria

@@ -96,3 +114,9 @@ if len([p for p in phases if p < milestone]) >= stagger_phase_limit:

def maybe_start_new_plot(dir_cfg: plotman.configuration.Directories, sched_cfg: plotman.configuration.Scheduling, plotting_cfg: plotman.configuration.Plotting, log_cfg: plotman.configuration.Logging) -> typing.Tuple[bool, str]:
def maybe_start_new_plot(
dir_cfg: plotman.configuration.Directories,
sched_cfg: plotman.configuration.Scheduling,
plotting_cfg: plotman.configuration.Plotting,
log_cfg: plotman.configuration.Logging,
) -> typing.Tuple[bool, str]:
jobs = job.Job.get_running_jobs(log_cfg.plots)

@@ -102,17 +126,33 @@

youngest_job_age = min(jobs, key=job.Job.get_time_wall).get_time_wall() if jobs else MAX_AGE
youngest_job_age = (
min(jobs, key=job.Job.get_time_wall).get_time_wall() if jobs else MAX_AGE
)
global_stagger = int(sched_cfg.global_stagger_m * MIN)
if (youngest_job_age < global_stagger):
wait_reason = 'stagger (%ds/%ds)' % (youngest_job_age, global_stagger)
if youngest_job_age < global_stagger:
wait_reason = "stagger (%ds/%ds)" % (youngest_job_age, global_stagger)
elif len(jobs) >= sched_cfg.global_max_jobs:
wait_reason = 'max jobs (%d) - (%ds/%ds)' % (sched_cfg.global_max_jobs, youngest_job_age, global_stagger)
wait_reason = "max jobs (%d) - (%ds/%ds)" % (
sched_cfg.global_max_jobs,
youngest_job_age,
global_stagger,
)
else:
tmp_to_all_phases = [(d, job.job_phases_for_tmpdir(d, jobs)) for d in dir_cfg.tmp]
eligible = [ (d, phases) for (d, phases) in tmp_to_all_phases
if phases_permit_new_job(phases, d, sched_cfg, dir_cfg) ]
rankable = [ (d, phases[0]) if phases else (d, job.Phase(known=False))
for (d, phases) in eligible ]
tmp_to_all_phases = [
(d, job.job_phases_for_tmpdir(d, jobs)) for d in dir_cfg.tmp
]
eligible = [
(d, phases)
for (d, phases) in tmp_to_all_phases
if phases_permit_new_job(phases, d, sched_cfg, dir_cfg)
]
rankable = [
(d, phases[0]) if phases else (d, job.Phase(known=False))
for (d, phases) in eligible
]
if not eligible:
wait_reason = 'no eligible tempdirs (%ds/%ds)' % (youngest_job_age, global_stagger)
wait_reason = "no eligible tempdirs (%ds/%ds)" % (
youngest_job_age,
global_stagger,
)
else:

@@ -122,3 +162,3 @@ # Plot to oldest tmpdir.

dst_dirs = dir_cfg.get_dst_directories()
dst_dirs = [d.rstrip("/") for d in dir_cfg.get_dst_directories()]

@@ -134,11 +174,18 @@ dstdir: str

# Select the dst dir least recently selected
dir2ph = { d:ph for (d, ph) in dstdirs_to_youngest_phase(jobs).items()
if d in dst_dirs and ph is not None}
unused_dirs = [d for d in dst_dirs if d not in dir2ph.keys()]
dstdir = ''
dir2ph = {
d.rstrip("/"): ph
for (d, ph) in dstdirs_to_youngest_phase(jobs).items()
if d.rstrip("/") in dst_dirs and ph is not None
}
unused_dirs = [
d.rstrip("/") for d in dst_dirs if d not in dir2ph.keys()
]
dstdir = ""
if unused_dirs:
dstdir = random.choice(unused_dirs)
else:
def key(key: str) -> job.Phase:
return dir2ph[key]
dstdir = max(dir2ph, key=key)

@@ -149,3 +196,17 @@

plot_args: typing.List[str]
if plotting_cfg.type == "madmax":
if plotting_cfg.type == "bladebit":
if plotting_cfg.bladebit is None:
raise Exception(
"bladebit plotter selected but not configured, report this as a plotman bug",
)
plot_args = plotman.plotters.bladebit.create_command_line(
options=plotting_cfg.bladebit,
tmpdir=tmpdir,
tmp2dir=dir_cfg.tmp2,
dstdir=dstdir,
farmer_public_key=plotting_cfg.farmer_pk,
pool_public_key=plotting_cfg.pool_pk,
pool_contract_address=plotting_cfg.pool_contract_address,
)
elif plotting_cfg.type == "madmax":
if plotting_cfg.madmax is None:

@@ -155,12 +216,11 @@ raise Exception(

)
plot_args = [
plotting_cfg.madmax.executable,
'-n', str(1),
'-r', str(plotting_cfg.madmax.n_threads),
'-u', str(plotting_cfg.madmax.n_buckets),
'-t', tmpdir if tmpdir.endswith('/') else (tmpdir + '/'),
'-d', dstdir if dstdir.endswith('/') else (dstdir + '/') ]
if dir_cfg.tmp2 is not None:
plot_args.append('-2')
plot_args.append(dir_cfg.tmp2 if dir_cfg.tmp2.endswith('/') else (dir_cfg.tmp2 + '/'))
plot_args = plotman.plotters.madmax.create_command_line(
options=plotting_cfg.madmax,
tmpdir=tmpdir,
tmp2dir=dir_cfg.tmp2,
dstdir=dstdir,
farmer_public_key=plotting_cfg.farmer_pk,
pool_public_key=plotting_cfg.pool_pk,
pool_contract_address=plotting_cfg.pool_contract_address,
)
else:

@@ -171,33 +231,20 @@ if plotting_cfg.chia is None:

)
plot_args = [plotting_cfg.chia.executable, 'plots', 'create',
'-k', str(plotting_cfg.chia.k),
'-r', str(plotting_cfg.chia.n_threads),
'-u', str(plotting_cfg.chia.n_buckets),
'-b', str(plotting_cfg.chia.job_buffer),
'-t', tmpdir,
'-d', dstdir ]
if plotting_cfg.chia.e:
plot_args.append('-e')
if plotting_cfg.chia.x:
plot_args.append('-x')
if dir_cfg.tmp2 is not None:
plot_args.append('-2')
plot_args.append(dir_cfg.tmp2)
if plotting_cfg.farmer_pk is not None:
plot_args.append('-f')
plot_args.append(plotting_cfg.farmer_pk)
if plotting_cfg.pool_pk is not None:
plot_args.append('-p')
plot_args.append(plotting_cfg.pool_pk)
if plotting_cfg.pool_contract_address is not None:
plot_args.append('-c')
plot_args.append(plotting_cfg.pool_contract_address)
plot_args = plotman.plotters.chianetwork.create_command_line(
options=plotting_cfg.chia,
tmpdir=tmpdir,
tmp2dir=dir_cfg.tmp2,
dstdir=dstdir,
farmer_public_key=plotting_cfg.farmer_pk,
pool_public_key=plotting_cfg.pool_pk,
pool_contract_address=plotting_cfg.pool_contract_address,
)
logmsg = "Starting plot job: %s ; logging to %s" % (
" ".join(plot_args),
log_file_path,
)
logmsg = ('Starting plot job: %s ; logging to %s' % (' '.join(plot_args), log_file_path))
# TODO: CAMPid 09840103109429840981397487498131
try:
open_log_file = open(log_file_path, 'x')
open_log_file = open(log_file_path, "x")
except FileExistsError:

@@ -211,4 +258,4 @@ # The desired log file name already exists. Most likely another

message = (
f'Plot log file already exists, skipping attempt to start a'
f' new plot: {log_file_path!r}'
f"Plot log file already exists, skipping attempt to start a"
f" new plot: {log_file_path!r}"
)

@@ -218,4 +265,4 @@ return (False, logmsg)

message = (
f'Unable to open log file. Verify that the directory exists'
f' and has proper write permissions: {log_file_path!r}'
f"Unable to open log file. Verify that the directory exists"
f" and has proper write permissions: {log_file_path!r}"
)

@@ -230,3 +277,3 @@ raise Exception(message) from e

if sys.platform == 'win32':
if sys.platform == "win32":
creationflags = subprocess.CREATE_NO_WINDOW

@@ -241,7 +288,9 @@ nice = psutil.BELOW_NORMAL_PRIORITY_CLASS

# subprocess.CREATE_NO_WINDOW to make the process independent of this controlling tty and have no console window on Windows.
p = subprocess.Popen(plot_args,
p = subprocess.Popen(
plot_args,
stdout=open_log_file,
stderr=subprocess.STDOUT,
start_new_session=True,
creationflags=creationflags)
creationflags=creationflags,
)

@@ -253,7 +302,13 @@ psutil.Process(p.pid).nice(nice)

def select_jobs_by_partial_id(jobs: typing.List[job.Job], partial_id: str) -> typing.List[job.Job]:
def select_jobs_by_partial_id(
jobs: typing.List[job.Job], partial_id: str
) -> typing.List[job.Job]:
selected = []
for j in jobs:
if j.plot_id.startswith(partial_id):
plot_id = j.plotter.common_info().plot_id
if plot_id is None:
continue
if plot_id.startswith(partial_id):
selected.append(j)
return selected

@@ -12,10 +12,13 @@ import math

def df_b(d: str) -> int:
'Return free space for directory (in bytes)'
"Return free space for directory (in bytes)"
usage = shutil.disk_usage(d)
return usage.free
def get_plotsize(k: int) -> int:
return (int)(_get_plotsize_scaler(k) * k * pow(2, k))
def human_format(num: float, precision: int, powerOfTwo: bool = False) -> str:

@@ -28,31 +31,36 @@ divisor = 1024 if powerOfTwo else 1000

num /= divisor
result = (('%.' + str(precision) + 'f%s') %
(num, ['', 'K', 'M', 'G', 'T', 'P'][magnitude]))
result = ("%." + str(precision) + "f%s") % (
num,
["", "K", "M", "G", "T", "P"][magnitude],
)
if powerOfTwo and magnitude > 0:
result += 'i'
result += "i"
return result
def time_format(sec: typing.Optional[int]) -> str:
if sec is None:
return '-'
return "-"
if sec < 60:
return '%ds' % sec
return "%ds" % sec
else:
return '%d:%02d' % (int(sec / 3600), int((sec % 3600) / 60))
return "%d:%02d" % (int(sec / 3600), int((sec % 3600) / 60))
def split_path_prefix(items: typing.List[str]) -> typing.Tuple[str, typing.List[str]]:
if not items:
return ('', [])
return ("", [])
prefix = os.path.commonpath(items)
if prefix == '/':
return ('', items)
if prefix == "/":
return ("", items)
else:
remainders = [ os.path.relpath(i, prefix) for i in items ]
remainders = [os.path.relpath(i, prefix) for i in items]
return (prefix, remainders)
def list_plots(d: str) -> typing.List[str]:
'List completed plots in a directory (not recursive)'
"List completed plots in a directory (not recursive)"
plots = []

@@ -72,2 +80,3 @@ for plot in os.listdir(d):

def column_wrap(

@@ -78,13 +87,17 @@ items: typing.Sequence[object],

) -> typing.List[typing.List[typing.Optional[object]]]:
'''Take items, distribute among n_cols columns, and return a set
of rows containing the slices of those columns.'''
"""Take items, distribute among n_cols columns, and return a set
of rows containing the slices of those columns."""
rows: typing.List[typing.List[typing.Optional[object]]] = []
n_rows = math.ceil(len(items) / n_cols)
for row in range(n_rows):
row_items = items[row : : n_rows]
row_items = items[row::n_rows]
# Pad and truncate
padded: typing.List[typing.Optional[object]] = [*row_items, *([filler] * n_cols)]
padded: typing.List[typing.Optional[object]] = [
*row_items,
*([filler] * n_cols),
]
rows.append(list(padded[:n_cols]))
return rows
# use k as index to get plotsize_scaler, note that 0 means the value is not calculated yet

@@ -94,6 +107,7 @@ # we can safely assume that k is never going to be greater than 100, due to the exponential nature of plot file size, this avoids using constants from chiapos

def calc_average_size_of_entry(k: int, table_index: int) -> float:
'''
"""
calculate the average size of entries in bytes, given k and table_index
'''
"""
# assumes that chia uses constant park size for each table

@@ -103,6 +117,7 @@ # it is approximately k/8, uses chia's actual park size calculation to get a more accurate estimation

def _get_probability_of_entries_kept(k: int, table_index: int) -> float:
'''
"""
get the probibility of entries in table of table_index that is not dropped
'''
"""
# the formula is derived from https://www.chia.net/assets/proof_of_space.pdf, section Space Required, p5 and pt

@@ -113,4 +128,4 @@

pow_2_k = 2**k
pow_2_k = 2 ** k
if table_index == 5:

@@ -123,6 +138,7 @@ # p5

def _get_plotsize_scaler(k: int) -> float:
'''
"""
get scaler for plot size so that the plot size can be calculated by scaler * k * 2 ** k
'''
"""
result = _plotsize_scaler_cache[k]

@@ -135,6 +151,7 @@ if result > 0:

def _get_plotsize_scaler_impl(k: int) -> float:
'''
"""
get scaler for plot size so that the plot size can be calculated by scaler * k * 2 ** k
'''
"""

@@ -150,2 +167,1 @@ result = 0.0

return result

@@ -18,84 +18,143 @@ import argparse

# Plotman libraries
from plotman import analyzer, archive, configuration, interactive, manager, plot_util, reporting, csv_exporter
from plotman import (
analyzer,
archive,
configuration,
interactive,
manager,
plot_util,
reporting,
csv_exporter,
)
from plotman import resources as plotman_resources
from plotman.job import Job
class PlotmanArgParser:
def add_idprefix_arg(self, subparser: argparse.ArgumentParser) -> None:
subparser.add_argument(
'idprefix',
type=str,
nargs='+',
help='disambiguating prefix of plot ID')
"idprefix", type=str, nargs="+", help="disambiguating prefix of plot ID"
)
def parse_args(self) -> typing.Any:
parser = argparse.ArgumentParser(description='Chia plotting manager.')
sp = parser.add_subparsers(dest='cmd')
parser = argparse.ArgumentParser(description="Chia plotting manager.")
sp = parser.add_subparsers(dest="cmd")
sp.add_parser('version', help='print the version')
sp.add_parser("version", help="print the version")
p_status = sp.add_parser('status', help='show current plotting status')
p_status.add_argument("--json", action="store_true",
help="export status report in json format")
p_status = sp.add_parser("status", help="show current plotting status")
p_status.add_argument(
"--json", action="store_true", help="export status report in json format"
)
sp.add_parser('prometheus', help='show current plotting status in prometheus readable format')
sp.add_parser(
"prometheus",
help="show current plotting status in prometheus readable format",
)
sp.add_parser('dirs', help='show directories info')
sp.add_parser("dirs", help="show directories info")
p_interactive = sp.add_parser('interactive', help='run interactive control/monitoring mode')
p_interactive.add_argument('--autostart-plotting', action='store_true', default=None, dest='autostart_plotting')
p_interactive.add_argument('--no-autostart-plotting', action='store_false', default=None, dest='autostart_plotting')
p_interactive.add_argument('--autostart-archiving', action='store_true', default=None, dest='autostart_archiving')
p_interactive.add_argument('--no-autostart-archiving', action='store_false', default=None, dest='autostart_archiving')
p_interactive = sp.add_parser(
"interactive", help="run interactive control/monitoring mode"
)
p_interactive.add_argument(
"--autostart-plotting",
action="store_true",
default=None,
dest="autostart_plotting",
)
p_interactive.add_argument(
"--no-autostart-plotting",
action="store_false",
default=None,
dest="autostart_plotting",
)
p_interactive.add_argument(
"--autostart-archiving",
action="store_true",
default=None,
dest="autostart_archiving",
)
p_interactive.add_argument(
"--no-autostart-archiving",
action="store_false",
default=None,
dest="autostart_archiving",
)
sp.add_parser('dsched', help='print destination dir schedule')
sp.add_parser("dsched", help="print destination dir schedule")
sp.add_parser('plot', help='run plotting loop')
sp.add_parser("plot", help="run plotting loop")
sp.add_parser('archive', help='move completed plots to farming location')
sp.add_parser("archive", help="move completed plots to farming location")
p_export = sp.add_parser('export', help='exports metadata from the plot logs as CSV')
p_export.add_argument('-o', dest='save_to', default=None, type=str, help='save to file. Optional, prints to stdout by default')
p_export = sp.add_parser(
"export", help="exports metadata from the plot logs as CSV"
)
p_export.add_argument(
"-o",
dest="save_to",
default=None,
type=str,
help="save to file. Optional, prints to stdout by default",
)
p_config = sp.add_parser('config', help='display or generate plotman.yaml configuration')
sp_config = p_config.add_subparsers(dest='config_subcommand')
sp_config.add_parser('generate', help='generate a default plotman.yaml file and print path')
sp_config.add_parser('path', help='show path to current plotman.yaml file')
p_config = sp.add_parser(
"config", help="display or generate plotman.yaml configuration"
)
sp_config = p_config.add_subparsers(dest="config_subcommand")
sp_config.add_parser(
"generate", help="generate a default plotman.yaml file and print path"
)
sp_config.add_parser("path", help="show path to current plotman.yaml file")
p_details = sp.add_parser('details', help='show details for job')
p_details = sp.add_parser("details", help="show details for job")
self.add_idprefix_arg(p_details)
p_logs = sp.add_parser('logs', help='fetch the logs for job')
p_logs = sp.add_parser("logs", help="fetch the logs for job")
p_logs.add_argument('-f', '--follow', action='store_true', help='Follow log output')
p_logs.add_argument(
"-f", "--follow", action="store_true", help="Follow log output"
)
self.add_idprefix_arg(p_logs)
p_files = sp.add_parser('files', help='show temp files associated with job')
p_files = sp.add_parser("files", help="show temp files associated with job")
self.add_idprefix_arg(p_files)
p_kill = sp.add_parser('kill', help='kill job (and cleanup temp files)')
p_kill.add_argument('-f', '--force', action='store_true', default=False, help="Don't ask for confirmation before killing the plot job")
p_kill = sp.add_parser("kill", help="kill job (and cleanup temp files)")
p_kill.add_argument(
"-f",
"--force",
action="store_true",
default=False,
help="Don't ask for confirmation before killing the plot job",
)
self.add_idprefix_arg(p_kill)
p_suspend = sp.add_parser('suspend', help='suspend job')
p_suspend = sp.add_parser("suspend", help="suspend job")
self.add_idprefix_arg(p_suspend)
p_resume = sp.add_parser('resume', help='resume suspended job')
p_resume = sp.add_parser("resume", help="resume suspended job")
self.add_idprefix_arg(p_resume)
p_analyze = sp.add_parser('analyze', help='analyze timing stats of completed jobs')
p_analyze = sp.add_parser(
"analyze", help="analyze timing stats of completed jobs"
)
p_analyze.add_argument('--clipterminals',
action='store_true',
help='Ignore first and last plot in a logfile, useful for '
'focusing on the steady-state in a staggered parallel '
'plotting test (requires plotting with -n>2)')
p_analyze.add_argument('--bytmp',
action='store_true',
help='slice by tmp dirs')
p_analyze.add_argument('--bybitfield',
action='store_true',
help='slice by bitfield/non-bitfield sorting')
p_analyze.add_argument('logfile', type=str, nargs='+',
help='logfile(s) to analyze')
p_analyze.add_argument(
"--clipterminals",
action="store_true",
help="Ignore first and last plot in a logfile, useful for "
"focusing on the steady-state in a staggered parallel "
"plotting test (requires plotting with -n>2)",
)
p_analyze.add_argument("--bytmp", action="store_true", help="slice by tmp dirs")
p_analyze.add_argument(
"--bybitfield",
action="store_true",
help="slice by bitfield/non-bitfield sorting",
)
p_analyze.add_argument(
"logfile", type=str, nargs="+", help="logfile(s) to analyze"
)

@@ -105,5 +164,6 @@ args = parser.parse_args()

def get_term_width() -> int:
try:
(rows_string, columns_string) = os.popen('stty size', 'r').read().split()
(rows_string, columns_string) = os.popen("stty size", "r").read().split()
columns = int(columns_string)

@@ -114,7 +174,11 @@ except:

class Iso8601Formatter(logging.Formatter):
def formatTime(self, record: logging.LogRecord, datefmt: typing.Optional[str] = None) -> str:
time = pendulum.from_timestamp(timestamp=record.created, tz='local')
return time.isoformat(timespec='microseconds')
def formatTime(
self, record: logging.LogRecord, datefmt: typing.Optional[str] = None
) -> str:
time = pendulum.from_timestamp(timestamp=record.created, tz="local")
return time.isoformat(timespec="microseconds")
def main() -> None:

@@ -126,17 +190,20 @@ random.seed()

if args.cmd == 'version':
if args.cmd == "version":
import pkg_resources
print(pkg_resources.get_distribution('plotman'))
print(pkg_resources.get_distribution("plotman"))
return
elif args.cmd == 'config':
elif args.cmd == "config":
config_file_path = configuration.get_path()
if args.config_subcommand == 'path':
if args.config_subcommand == "path":
if os.path.isfile(config_file_path):
print(config_file_path)
return
print(f"No 'plotman.yaml' file exists at expected location: '{config_file_path}'")
print(
f"No 'plotman.yaml' file exists at expected location: '{config_file_path}'"
)
print(f"To generate a default config file, run: 'plotman config generate'")
return
if args.config_subcommand == 'generate':
if args.config_subcommand == "generate":
if os.path.isfile(config_file_path):

@@ -148,4 +215,4 @@ overwrite = None

"\tInput 'y' to overwrite existing file, or 'n' to exit without overwrite."
).lower()
if overwrite == 'n':
).lower()
if overwrite == "n":
print("\nExited without overrwriting file")

@@ -156,3 +223,5 @@ return

# creating the parent plotman file/directory if it does not yet exist
with importlib.resources.path(plotman_resources, "plotman.yaml") as default_config:
with importlib.resources.path(
plotman_resources, "plotman.yaml"
) as default_config:
config_dir = os.path.dirname(config_file_path)

@@ -172,32 +241,54 @@

preset_target_definitions_text = importlib.resources.read_text(
plotman_resources, "target_definitions.yaml",
plotman_resources,
"target_definitions.yaml",
)
cfg = configuration.get_validated_configs(config_text, config_path, preset_target_definitions_text)
cfg = configuration.get_validated_configs(
config_text, config_path, preset_target_definitions_text
)
with cfg.setup():
root_logger = logging.getLogger()
handler = logging.handlers.RotatingFileHandler(
root_handler = logging.handlers.RotatingFileHandler(
backupCount=10,
encoding='utf-8',
encoding="utf-8",
filename=cfg.logging.application,
maxBytes=10_000_000,
)
formatter = Iso8601Formatter(fmt='%(asctime)s: %(message)s')
handler.setFormatter(formatter)
root_logger.addHandler(handler)
root_formatter = Iso8601Formatter(fmt="%(asctime)s: %(message)s")
root_handler.setFormatter(root_formatter)
root_logger.addHandler(root_handler)
root_logger.setLevel(logging.INFO)
root_logger.info('abc')
root_logger.info("Start root logger")
disk_space_logger = logging.getLogger("disk_space")
disk_space_logger.propagate = False
disk_space_handler = logging.handlers.RotatingFileHandler(
backupCount=10,
encoding="utf-8",
filename=cfg.logging.disk_spaces,
maxBytes=10_000_000,
)
disk_space_formatter = Iso8601Formatter(fmt="%(asctime)s: %(message)s")
disk_space_handler.setFormatter(disk_space_formatter)
disk_space_logger.addHandler(disk_space_handler)
disk_space_logger.setLevel(logging.INFO)
disk_space_logger.info("Start disk space logger")
#
# Stay alive, spawning plot jobs
#
if args.cmd == 'plot':
print('...starting plot loop')
if args.cmd == "plot":
print("...starting plot loop")
while True:
wait_reason = manager.maybe_start_new_plot(cfg.directories, cfg.scheduling, cfg.plotting, cfg.logging)
(started, msg) = manager.maybe_start_new_plot(
cfg.directories, cfg.scheduling, cfg.plotting, cfg.logging
)
# TODO: report this via a channel that can be polled on demand, so we don't spam the console
if wait_reason:
print('...sleeping %d s: %s' % (cfg.scheduling.polling_time_s, wait_reason))
if started:
print("%s" % (msg))
else:
print("...sleeping %d s: %s" % (cfg.scheduling.polling_time_s, msg))
root_logger.info("[plot] %s", msg)

@@ -209,6 +300,7 @@ time.sleep(cfg.scheduling.polling_time_s)

#
elif args.cmd == 'analyze':
elif args.cmd == "analyze":
analyzer.analyze(args.logfile, args.clipterminals,
args.bytmp, args.bybitfield)
analyzer.analyze(
args.logfile, args.clipterminals, args.bytmp, args.bybitfield
)

@@ -218,8 +310,8 @@ #

#
elif args.cmd == 'export':
logfilenames = glob.glob(os.path.join(cfg.logging.plots, '*.plot.log'))
elif args.cmd == "export":
logfilenames = glob.glob(os.path.join(cfg.logging.plots, "*.plot.log"))
if args.save_to is None:
csv_exporter.generate(logfilenames=logfilenames, file=sys.stdout)
else:
with open(args.save_to, 'w', encoding='utf-8') as file:
with open(args.save_to, "w", encoding="utf-8") as file:
csv_exporter.generate(logfilenames=logfilenames, file=file)

@@ -231,3 +323,3 @@

# Status report
if args.cmd == 'status':
if args.cmd == "status":
if args.json:

@@ -245,10 +337,18 @@ # convert jobs list into json

# Prometheus report
if args.cmd == 'prometheus':
if args.cmd == "prometheus":
print(reporting.prometheus_report(jobs))
# Directories report
elif args.cmd == 'dirs':
print(reporting.dirs_report(jobs, cfg.directories, cfg.archiving, cfg.scheduling, get_term_width()))
elif args.cmd == "dirs":
print(
reporting.dirs_report(
jobs,
cfg.directories,
cfg.archiving,
cfg.scheduling,
get_term_width(),
)
)
elif args.cmd == 'interactive':
elif args.cmd == "interactive":
interactive.run_interactive(

@@ -261,24 +361,38 @@ cfg=cfg,

# Start running archival
elif args.cmd == 'archive':
elif args.cmd == "archive":
if cfg.archiving is None:
print('archiving not configured but is required for this command')
start_msg = (
"archiving not configured but is required for this command"
)
print(start_msg)
root_logger.info("[archive] %s", start_msg)
else:
print('...starting archive loop')
start_msg = "...starting archive loop"
print(start_msg)
root_logger.info("[archive] %s", start_msg)
firstit = True
while True:
if not firstit:
print('Sleeping 60s until next iteration...')
time.sleep(60)
print(
"Sleeping %d s until next iteration..."
% (cfg.scheduling.polling_time_s)
)
time.sleep(cfg.scheduling.polling_time_s)
jobs = Job.get_running_jobs(cfg.logging.plots)
firstit = False
archiving_status, log_messages = archive.spawn_archive_process(cfg.directories, cfg.archiving, cfg.logging, jobs)
for log_message in log_messages:
print(log_message)
archiving_status, log_messages = archive.spawn_archive_process(
cfg.directories, cfg.archiving, cfg.logging, jobs
)
if log_messages:
for log_message in log_messages:
print(log_message)
root_logger.info("[archive] %s", log_message)
else:
root_logger.info("[archive] %s", archiving_status)
# Debugging: show the destination drive usage schedule
elif args.cmd == 'dsched':
elif args.cmd == "dsched":
for (d, ph) in manager.dstdirs_to_furthest_phase(jobs).items():
print(' %s : %s' % (d, str(ph)))
print(" %s : %s" % (d, str(ph)))

@@ -288,3 +402,3 @@ #

#
elif args.cmd in [ 'details', 'logs', 'files', 'kill', 'suspend', 'resume' ]:
elif args.cmd in ["details", "logs", "files", "kill", "suspend", "resume"]:
print(args)

@@ -295,3 +409,3 @@

# TODO: clean up treatment of wildcard
if args.idprefix[0] == 'all':
if args.idprefix[0] == "all":
selected = jobs

@@ -301,45 +415,53 @@ else:

selected = manager.select_jobs_by_partial_id(jobs, args.idprefix[0])
if (len(selected) == 0):
print('Error: %s matched no jobs.' % args.idprefix[0])
if len(selected) == 0:
print("Error: %s matched no jobs." % args.idprefix[0])
elif len(selected) > 1:
print('Error: "%s" matched multiple jobs:' % args.idprefix[0])
for j in selected:
print(' %s' % j.plot_id)
print(" %s" % j.plotter.common_info().plot_id)
selected = []
for job in selected:
if args.cmd == 'details':
if args.cmd == "details":
print(job.status_str_long())
elif args.cmd == 'logs':
elif args.cmd == "logs":
job.print_logs(args.follow)
elif args.cmd == 'files':
elif args.cmd == "files":
temp_files = job.get_temp_files()
for f in temp_files:
print(' %s' % f)
print(" %s" % f)
elif args.cmd == 'kill':
elif args.cmd == "kill":
info = job.plotter.common_info()
# First suspend so job doesn't create new files
print('Pausing PID %d, plot id %s' % (job.proc.pid, job.plot_id))
print(
"Pausing PID %d, plot id %s" % (job.proc.pid, info.plot_id)
)
job.suspend()
temp_files = job.get_temp_files()
print('Will kill pid %d, plot id %s' % (job.proc.pid, job.plot_id))
print('Will delete %d temp files' % len(temp_files))
print(
"Will kill pid %d, plot id %s"
% (job.proc.pid, info.plot_id)
)
print("Will delete %d temp files" % len(temp_files))
if args.force:
conf = 'y'
conf = "y"
else:
conf = input('Are you sure? ("y" to confirm): ')
if (conf != 'y'):
print('Canceled. If you wish to resume the job, do so manually.')
if conf != "y":
print(
"Canceled. If you wish to resume the job, do so manually."
)
else:
print('killing...')
print("killing...")
job.cancel()
print('cleaning up temp files...')
print("cleaning up temp files...")

@@ -349,7 +471,7 @@ for f in temp_files:

elif args.cmd == 'suspend':
print('Suspending ' + job.plot_id)
elif args.cmd == "suspend":
print(f"Suspending {job.plotter.common_info().plot_id}")
job.suspend()
elif args.cmd == 'resume':
print('Resuming ' + job.plot_id)
elif args.cmd == "resume":
print(f"Resuming {job.plotter.common_info().plot_id}")
job.resume()

@@ -19,6 +19,9 @@ import time

def phases_str(phases: typing.List[job.Phase], max_num: typing.Optional[int] = None) -> str:
'''Take a list of phase-subphase pairs and return them as a compact string'''
def phases_str(
phases: typing.List[job.Phase], max_num: typing.Optional[int] = None
) -> str:
"""Take a list of phase-subphase pairs and return them as a compact string"""
if not max_num or len(phases) <= max_num:
return ' '.join([str(pair) for pair in phases])
return " ".join([str(pair) for pair in phases])
else:

@@ -28,10 +31,12 @@ n_first = math.floor(max_num / 2)

n_elided = len(phases) - (n_first + n_last)
first = ' '.join([str(pair) for pair in phases[:n_first]])
first = " ".join([str(pair) for pair in phases[:n_first]])
elided = " [+%d] " % n_elided
last = ' '.join([str(pair) for pair in phases[n_first + n_elided:]])
last = " ".join([str(pair) for pair in phases[n_first + n_elided :]])
return first + elided + last
def n_at_ph(jobs: typing.List[job.Job], ph: job.Phase) -> int:
return sum([1 for j in jobs if j.progress() == ph])
def n_to_char(n: int) -> str:

@@ -41,3 +46,3 @@ n_to_char_map = dict(enumerate(" .:;!"))

if n < 0:
return 'X' # Should never be negative
return "X" # Should never be negative
elif n >= len(n_to_char_map):

@@ -48,24 +53,32 @@ n = len(n_to_char_map) - 1

def job_viz(jobs: typing.List[job.Job]) -> str:
# TODO: Rewrite this in a way that ensures we count every job
# even if the reported phases don't line up with expectations.
result = ''
result += '1'
result = ""
result += "1"
for i in range(0, 8):
result += n_to_char(n_at_ph(jobs, job.Phase(1, i)))
result += '2'
result += "2"
for i in range(0, 8):
result += n_to_char(n_at_ph(jobs, job.Phase(2, i)))
result += '3'
result += "3"
for i in range(0, 7):
result += n_to_char(n_at_ph(jobs, job.Phase(3, i)))
result += '4'
result += "4"
result += n_to_char(n_at_ph(jobs, job.Phase(4, 0)))
return result
# Command: plotman status
# Shows a general overview of all running jobs
def status_report(jobs: typing.List[job.Job], width: int, height: typing.Optional[int] = None, tmp_prefix: str = '', dst_prefix: str = '') -> str:
'''height, if provided, will limit the number of rows in the table,
showing first and last rows, row numbers and an elipsis in the middle.'''
def status_report(
jobs: typing.List[job.Job],
width: int,
height: typing.Optional[int] = None,
tmp_prefix: str = "",
dst_prefix: str = "",
) -> str:
"""height, if provided, will limit the number of rows in the table,
showing first and last rows, row numbers and an elipsis in the middle."""
abbreviate_jobs_list = False

@@ -82,10 +95,24 @@ n_begin_rows = 0

tab = tt.Texttable()
headings = ['plot id', 'plotter', 'k', 'tmp', 'dst', 'wall', 'phase', 'tmp',
'pid', 'stat', 'mem', 'user', 'sys', 'io']
headings = [
"plot id",
"plotter",
"k",
"tmp",
"dst",
"wall",
"phase",
"tmp",
"pid",
"stat",
"mem",
"user",
"sys",
"io",
]
if height:
headings.insert(0, '#')
headings.insert(0, "#")
tab.header(headings)
tab.set_cols_dtype('t' * len(headings))
tab.set_cols_align('r' * len(headings))
tab.set_header_align('r' * len(headings))
tab.set_cols_dtype("t" * len(headings))
tab.set_cols_align("r" * len(headings))
tab.set_header_align("r" * len(headings))

@@ -95,3 +122,3 @@ for i, j in enumerate(sorted(jobs, key=job.Job.get_time_wall)):

if abbreviate_jobs_list and i == n_begin_rows:
row = ['...'] + ([''] * (len(headings) - 1))
row = ["..."] + ([""] * (len(headings) - 1))
# Omitted row

@@ -105,23 +132,29 @@ elif abbreviate_jobs_list and i > n_begin_rows and i < (len(jobs) - n_end_rows):

with j.proc.oneshot():
row = [j.plot_id[:8], # Plot ID
str(j.plotter), # chia or madmax
str(j.k), # k size
abbr_path(j.tmpdir, tmp_prefix), # Temp directory
abbr_path(j.dstdir, dst_prefix), # Destination directory
plot_util.time_format(j.get_time_wall()), # Time wall
str(j.progress()), # Overall progress (major:minor)
plot_util.human_format(j.get_tmp_usage(), 0), # Current temp file size
j.proc.pid, # System pid
j.get_run_status(), # OS status for the job process
plot_util.human_format(j.get_mem_usage(), 1, True), # Memory usage
plot_util.time_format(j.get_time_user()), # user system time
plot_util.time_format(j.get_time_sys()), # system time
plot_util.time_format(j.get_time_iowait()) # io wait
]
info = j.plotter.common_info()
row = [
j.plot_id_prefix(), # Plot ID
info.type, # chia or madmax
str(info.plot_size), # k size
abbr_path(info.tmpdir, tmp_prefix), # Temp directory
abbr_path(info.dstdir, dst_prefix), # Destination directory
plot_util.time_format(j.get_time_wall()), # Time wall
str(j.progress()), # Overall progress (major:minor)
plot_util.human_format(
j.get_tmp_usage(), 0
), # Current temp file size
j.proc.pid, # System pid
j.get_run_status(), # OS status for the job process
plot_util.human_format(
j.get_mem_usage(), 1, True
), # Memory usage
plot_util.time_format(j.get_time_user()), # user system time
plot_util.time_format(j.get_time_sys()), # system time
plot_util.time_format(j.get_time_iowait()), # io wait
]
except (psutil.NoSuchProcess, psutil.AccessDenied):
# In case the job has disappeared
row = [j.plot_id[:8]] + (['--'] * (len(headings) - 2))
row = [j.plot_id_prefix()] + (["--"] * (len(headings) - 2))
if height:
row.insert(0, '%3d' % i)
row.insert(0, "%3d" % i)

@@ -135,66 +168,89 @@ tab.add_row(row)

def to_prometheus_format(metrics: typing.Dict[str, str], prom_stati: typing.Sequence[typing.Tuple[str, typing.Mapping[str, typing.Optional[int]]]]) -> typing.List[str]:
def to_prometheus_format(
metrics: typing.Dict[str, str],
prom_stati: typing.Sequence[
typing.Tuple[str, typing.Mapping[str, typing.Union[int, float, None]]]
],
) -> typing.List[str]:
prom_str_list = []
for metric_name, metric_desc in metrics.items():
prom_str_list.append(f'# HELP {metric_name} {metric_desc}.')
prom_str_list.append(f'# TYPE {metric_name} gauge')
prom_str_list.append(f"# HELP {metric_name} {metric_desc}.")
prom_str_list.append(f"# TYPE {metric_name} gauge")
for label_str, values in prom_stati:
prom_str_list.append('%s{%s} %s' % (metric_name, label_str, values[metric_name]))
prom_str_list.append(
"%s{%s} %s" % (metric_name, label_str, values[metric_name])
)
return prom_str_list
def prometheus_report(jobs: typing.List[job.Job], tmp_prefix: str = '', dst_prefix: str = '') -> str:
def prometheus_report(
jobs: typing.List[job.Job], tmp_prefix: str = "", dst_prefix: str = ""
) -> str:
metrics = {
'plotman_plot_phase_major': 'The phase the plot is currently in',
'plotman_plot_phase_minor': 'The part of the phase the plot is currently in',
'plotman_plot_tmp_usage': 'Tmp dir usage in bytes',
'plotman_plot_mem_usage': 'Memory usage in bytes',
'plotman_plot_user_time': 'Processor time (user) in s',
'plotman_plot_sys_time': 'Processor time (sys) in s',
'plotman_plot_iowait_time': 'Processor time (iowait) in s',
"plotman_plot_phase_major": "The phase the plot is currently in",
"plotman_plot_phase_minor": "The part of the phase the plot is currently in",
"plotman_plot_phase_major_minor": "major and minor",
"plotman_plot_tmp_usage": "Tmp dir usage in bytes",
"plotman_plot_mem_usage": "Memory usage in bytes",
"plotman_plot_user_time": "Processor time (user) in s",
"plotman_plot_sys_time": "Processor time (sys) in s",
"plotman_plot_iowait_time": "Processor time (iowait) in s",
}
prom_stati = []
for j in jobs:
info = j.plotter.common_info()
labels = {
'plot_id': j.plot_id[:8],
'tmp_dir': abbr_path(j.tmpdir, tmp_prefix),
'dst_dir': abbr_path(j.dstdir, dst_prefix),
'run_status': j.get_run_status(),
'phase': str(j.progress()),
"plot_id": j.plot_id_prefix(),
"tmp_dir": abbr_path(info.tmpdir, tmp_prefix),
"dst_dir": abbr_path(info.dstdir, dst_prefix),
"run_status": j.get_run_status(),
"phase": str(j.progress()),
}
label_str = ','.join([f'{k}="{v}"' for k, v in labels.items()])
label_str = ",".join([f'{k}="{v}"' for k, v in labels.items()])
values = {
'plotman_plot_phase_major': j.progress().major,
'plotman_plot_phase_minor': j.progress().minor,
'plotman_plot_tmp_usage': j.get_tmp_usage(),
'plotman_plot_mem_usage': j.get_mem_usage(),
'plotman_plot_user_time': j.get_time_user(),
'plotman_plot_sys_time': j.get_time_sys(),
'plotman_plot_iowait_time': j.get_time_iowait(),
"plotman_plot_phase_major": j.progress().major,
"plotman_plot_phase_minor": j.progress().minor,
"plotman_plot_phase_major_minor": j.progress().major
+ (j.progress().minor / 10),
"plotman_plot_tmp_usage": j.get_tmp_usage(),
"plotman_plot_mem_usage": j.get_mem_usage(),
"plotman_plot_user_time": j.get_time_user(),
"plotman_plot_sys_time": j.get_time_sys(),
"plotman_plot_iowait_time": j.get_time_iowait(),
}
prom_stati += [(label_str, values)]
return '\n'.join(to_prometheus_format(metrics, prom_stati))
return "\n".join(to_prometheus_format(metrics, prom_stati))
def summary(jobs: typing.List[job.Job], tmp_prefix: str = '') -> str:
def summary(jobs: typing.List[job.Job], tmp_prefix: str = "") -> str:
"""Creates a small summary of running jobs"""
summary = [
'Total jobs: {0}'.format(len(jobs))
]
summary = ["Total jobs: {0}".format(len(jobs))]
# Number of jobs in each tmp disk
tmp_dir_paths = sorted([abbr_path(job.tmpdir, tmp_prefix) for job in jobs])
tmp_dir_paths = sorted(
[abbr_path(job.plotter.common_info().tmpdir, tmp_prefix) for job in jobs]
)
for key, group in groupby(tmp_dir_paths, lambda dir: dir):
summary.append(
'Jobs in {0}: {1}'.format(key, len(list(group)))
)
summary.append("Jobs in {0}: {1}".format(key, len(list(group))))
return '\n'.join(summary)
return "\n".join(summary)
def tmp_dir_report(jobs: typing.List[job.Job], dir_cfg: configuration.Directories, sched_cfg: configuration.Scheduling, width: int, start_row: typing.Optional[int] = None, end_row: typing.Optional[int] = None, prefix: str = '') -> str:
'''start_row, end_row let you split the table up if you want'''
def tmp_dir_report(
jobs: typing.List[job.Job],
dir_cfg: configuration.Directories,
sched_cfg: configuration.Scheduling,
width: int,
start_row: typing.Optional[int] = None,
end_row: typing.Optional[int] = None,
prefix: str = "",
) -> str:
"""start_row, end_row let you split the table up if you want"""
tab = tt.Texttable()
headings = ['tmp', 'ready', 'phases']
headings = ["tmp", "ready", "phases"]
tab.header(headings)
tab.set_cols_dtype('t' * len(headings))
tab.set_cols_align('r' * (len(headings) - 1) + 'l')
tab.set_cols_dtype("t" * len(headings))
tab.set_cols_align("r" * (len(headings) - 1) + "l")
for i, d in enumerate(sorted(dir_cfg.tmp)):

@@ -205,17 +261,20 @@ if (start_row and i < start_row) or (end_row and i >= end_row):

ready = manager.phases_permit_new_job(phases, d, sched_cfg, dir_cfg)
row = [abbr_path(d, prefix), 'OK' if ready else '--', phases_str(phases, 5)]
row = [abbr_path(d, prefix), "OK" if ready else "--", phases_str(phases, 5)]
tab.add_row(row)
tab.set_max_width(width)
tab.set_deco(tt.Texttable.BORDER | tt.Texttable.HEADER )
tab.set_deco(tt.Texttable.BORDER | tt.Texttable.HEADER)
tab.set_deco(0) # No borders
return tab.draw() # type: ignore[no-any-return]
def dst_dir_report(jobs: typing.List[job.Job], dstdirs: typing.List[str], width: int, prefix: str='') -> str:
def dst_dir_report(
jobs: typing.List[job.Job], dstdirs: typing.List[str], width: int, prefix: str = ""
) -> str:
tab = tt.Texttable()
dir2oldphase = manager.dstdirs_to_furthest_phase(jobs)
dir2newphase = manager.dstdirs_to_youngest_phase(jobs)
headings = ['dst', 'plots', 'GBfree', 'inbnd phases', 'pri']
headings = ["dst", "plots", "GBfree", "inbnd phases", "pri"]
tab.header(headings)
tab.set_cols_dtype('t' * len(headings))
tab.set_cols_dtype("t" * len(headings))

@@ -232,15 +291,19 @@ for d in sorted(dstdirs):

priority = archive.compute_priority(eldest_ph, gb_free, n_plots)
row = [abbr_path(d, prefix), n_plots, gb_free,
phases_str(phases, 5), priority]
row = [abbr_path(d, prefix), n_plots, gb_free, phases_str(phases, 5), priority]
tab.add_row(row)
tab.set_max_width(width)
tab.set_deco(tt.Texttable.BORDER | tt.Texttable.HEADER )
tab.set_deco(tt.Texttable.BORDER | tt.Texttable.HEADER)
tab.set_deco(0) # No borders
return tab.draw() # type: ignore[no-any-return]
def arch_dir_report(archdir_freebytes: typing.Dict[str, int], width: int, prefix: str = '') -> str:
cells = ['%s:%5dG' % (abbr_path(d, prefix), int(int(space) / plot_util.GB))
for (d, space) in sorted(archdir_freebytes.items())]
def arch_dir_report(
archdir_freebytes: typing.Dict[str, int], width: int, prefix: str = ""
) -> str:
cells = [
"%s:%5dG" % (abbr_path(d, prefix), int(int(space) / plot_util.GB))
for (d, space) in sorted(archdir_freebytes.items())
]
if not cells:
return ''
return ""

@@ -250,10 +313,17 @@ n_columns = int(width / (len(max(cells, key=len)) + 3))

tab.set_max_width(width)
for row in plot_util.column_wrap(cells, n_columns, filler=''):
for row in plot_util.column_wrap(cells, n_columns, filler=""):
tab.add_row(row)
tab.set_cols_align('r' * (n_columns))
tab.set_cols_align("r" * (n_columns))
tab.set_deco(tt.Texttable.VLINES)
return tab.draw() # type: ignore[no-any-return]
# TODO: remove this
def dirs_report(jobs: typing.List[job.Job], dir_cfg: configuration.Directories, arch_cfg: typing.Optional[configuration.Archiving], sched_cfg: configuration.Scheduling, width: int) -> str:
def dirs_report(
jobs: typing.List[job.Job],
dir_cfg: configuration.Directories,
arch_cfg: typing.Optional[configuration.Archiving],
sched_cfg: configuration.Scheduling,
width: int,
) -> str:
dst_dir = dir_cfg.get_dst_directories()

@@ -266,10 +336,13 @@ reports = [

freebytes, archive_log_messages = archive.get_archdir_freebytes(arch_cfg)
reports.extend([
'archive dirs free space:',
arch_dir_report(freebytes, width),
*archive_log_messages,
])
reports.extend(
[
"archive dirs free space:",
arch_dir_report(freebytes, width),
*archive_log_messages,
]
)
return '\n'.join(reports) + '\n'
return "\n".join(reports) + "\n"
def json_report(jobs: typing.List[job.Job]) -> str:

@@ -288,2 +361,1 @@ jobs_dicts = []

return json.dumps(stuff)

@@ -178,1 +178,7 @@ # Default/example plotman.yaml configuration file

n_buckets: 256 # Default is 256
n_buckets3: 256 # Default is 256
n_rmulti2: 1 # Default is 1
bladebit:
# executable: /path/to/bladebit/.bin/release/bladebit
threads: 2
no_numa: false
+21
-1

@@ -29,3 +29,3 @@ [tox]

coverage report --fail-under=35 --ignore-errors --show-missing
diff-cover --fail-under=100 {posargs:--compare-branch=development} coverage.xml
diff-cover {posargs:--compare-branch=development} coverage.xml

@@ -38,1 +38,21 @@ [testenv:check-hints-py{37,38,39}]

mypy --package plotman
[testenv:check-format]
basepython = python3.8
changedir = {toxinidir}
extras =
checks
# TODO: would be nice to install extras but not package...
#skip_install = true
commands =
black --config {toxinidir}/pyproject.toml --check --diff {toxinidir}
[testenv:format]
basepython = python3.8
changedir = {toxinidir}
extras =
checks
# TODO: would be nice to install extras but not package...
#skip_install = true
commands =
black --config {toxinidir}/pyproject.toml {toxinidir}

@@ -1,1 +0,1 @@

0.5.1
0.5.2
import contextlib
import datetime
import locale
import importlib.resources
import os
import pathlib
import typing
import pendulum
import pytest
from plotman import job
from plotman._tests import resources
class FauxJobWithLogfile:
# plotman.job.Job does too much in its .__init_() so we have this to let us
# test its .init_from_logfile().
start_time: pendulum.DateTime
def __init__(self, logfile_path: str) -> None:
self.logfile = logfile_path
def update_from_logfile(self) -> None:
pass
@pytest.fixture(name='logfile_path')
def logfile_fixture(tmp_path: pathlib.Path) -> pathlib.Path:
log_name = '2021-04-04T19_00_47.681088-0400.log'
log_contents = importlib.resources.read_binary(resources, log_name)
log_file_path = tmp_path.joinpath(log_name)
log_file_path.write_bytes(log_contents)
return log_file_path
@contextlib.contextmanager
def set_locale(name: str) -> typing.Generator[str, None, None]:
# This is terrible and not thread safe.
original = locale.setlocale(locale.LC_ALL)
try:
yield locale.setlocale(locale.LC_ALL, name)
finally:
locale.setlocale(locale.LC_ALL, original)
with set_locale('C'):
log_file_time = datetime.datetime.strptime('Sun Apr 4 19:00:50 2021', '%a %b %d %H:%M:%S %Y')
@pytest.mark.parametrize(
argnames=['locale_name'],
argvalues=[['C'], ['en_US.UTF-8'], ['de_DE.UTF-8']],
)
def test_job_parses_time_with_non_english_locale(logfile_path: pathlib.Path, locale_name: str) -> None:
faux_job_with_logfile = FauxJobWithLogfile(logfile_path=os.fspath(logfile_path))
with set_locale(locale_name):
job.Job.init_from_logfile(self=faux_job_with_logfile) # type: ignore[arg-type]
assert faux_job_with_logfile.start_time == log_file_time
@pytest.mark.parametrize(
argnames=['arguments'],
argvalues=[
[['-h']],
[['--help']],
[['-k', '32']],
[['-k32']],
[['-k', '32', '--help']],
],
ids=str,
)
def test_chia_plots_create_parsing_does_not_fail(arguments: typing.List[str]) -> None:
job.parse_chia_plots_create_command_line(
command_line=['python', 'chia', 'plots', 'create', *arguments],
)
@pytest.mark.parametrize(
argnames=['arguments'],
argvalues=[
[['-h']],
[['--help']],
[['-k', '32', '--help']],
],
ids=str,
)
def test_chia_plots_create_parsing_detects_help(arguments: typing.List[str]) -> None:
parsed = job.parse_chia_plots_create_command_line(
command_line=['python', 'chia', 'plots', 'create', *arguments],
)
assert parsed.help
@pytest.mark.parametrize(
argnames=['arguments'],
argvalues=[
[[]],
[['-k32']],
[['-k', '32']],
],
ids=str,
)
def test_chia_plots_create_parsing_detects_not_help(arguments: typing.List[str]) -> None:
parsed = job.parse_chia_plots_create_command_line(
command_line=['python', 'chia', 'plots', 'create', *arguments],
)
assert not parsed.help
@pytest.mark.parametrize(
argnames=['arguments'],
argvalues=[
[[]],
[['-k32']],
[['-k', '32']],
[['--size', '32']],
],
ids=str,
)
def test_chia_plots_create_parsing_handles_argument_forms(arguments: typing.List[str]) -> None:
parsed = job.parse_chia_plots_create_command_line(
command_line=['python', 'chia', 'plots', 'create', *arguments],
)
assert parsed.parameters['size'] == 32
@pytest.mark.parametrize(
argnames=['arguments'],
argvalues=[
[['--size32']],
[['--not-an-actual-option']],
],
ids=str,
)
def test_chia_plots_create_parsing_identifies_errors(arguments: typing.List[str]) -> None:
parsed = job.parse_chia_plots_create_command_line(
command_line=['python', 'chia', 'plots', 'create', *arguments],
)
assert parsed.error is not None
import importlib.resources
from plotman._tests import resources
from plotman.log_parser import PlotLogParser
import plotman.job
import plotman.plotinfo
example_info = plotman.plotinfo.PlotInfo(
started_at=plotman.job.parse_chia_plot_time(s="Sun Apr 4 19:00:50 2021"),
plot_id="3eb8a37981de1cc76187a36ed947ab4307943cf92967a7e166841186c7899e24",
buckets=128,
threads=4,
buffer=4000,
plot_size=32,
tmp_dir1="/farm/yards/901",
tmp_dir2="/farm/yards/901",
phase1_duration_raw=17571.981,
phase2_duration_raw=6911.621,
phase3_duration_raw=14537.188,
phase4_duration_raw=924.288,
total_time_raw=39945.080,
copy_time_raw=501.696,
filename="/farm/wagons/801/plot-k32-2021-04-04-19-00-3eb8a37981de1cc76187a36ed947ab4307943cf92967a7e166841186c7899e24.plot",
)
def test_should_correctly_parse() -> None:
with importlib.resources.open_text(
resources,
"2021-04-04T19_00_47.681088-0400.log",
) as file:
parser = PlotLogParser()
info = parser.parse(file)
assert info == example_info
assert info.phase1_duration == 17572
assert info.phase1_duration_minutes == 293
assert info.phase1_duration_hours == 4.88
assert info.phase2_duration == 6912
assert info.phase2_duration_minutes == 115
assert info.phase2_duration_hours == 1.92
assert info.phase3_duration == 14537
assert info.phase3_duration_minutes == 242
assert info.phase3_duration_hours == 4.04
assert info.phase4_duration == 924
assert info.phase4_duration_minutes == 15
assert info.phase4_duration_hours == 0.26
assert info.total_time == 39945
assert info.total_time_minutes == 666
assert info.total_time_hours == 11.10
assert info.copy_time == 502
assert info.copy_time_minutes == 8
assert info.copy_time_hours == 0.14

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

# mypy: allow_untyped_decorators
import functools
import typing
import click
from pathlib import Path
import typing_extensions
class CommandProtocol(typing_extensions.Protocol):
def make_context(self, info_name: str, args: typing.List[str]) -> click.Context:
...
def __call__(self) -> None:
...
class Commands:
def __init__(self) -> None:
self.by_version: typing.Dict[typing.Sequence[int], CommandProtocol] = {}
def register(self, version: typing.Sequence[int]) -> typing.Callable[[CommandProtocol], None]:
if version in self.by_version:
raise Exception(f'Version already registered: {version!r}')
if not isinstance(version, tuple):
raise Exception(f'Version must be a tuple: {version!r}')
return functools.partial(self._decorator, version=version)
def _decorator(self, command: CommandProtocol, *, version: typing.Sequence[int]) -> None:
self.by_version[version] = command
# self.by_version = dict(sorted(self.by_version.items()))
def __getitem__(self, item: typing.Sequence[int]) -> typing.Callable[[], None]:
return self.by_version[item]
def latest_command(self) -> CommandProtocol:
return max(self.by_version.items())[1]
commands = Commands()
@commands.register(version=(1, 1, 2))
@click.command()
# https://github.com/Chia-Network/chia-blockchain/blob/1.1.2/LICENSE
# https://github.com/Chia-Network/chia-blockchain/blob/1.1.2/chia/cmds/plots.py#L39-L83
# start copied code
@click.option("-k", "--size", help="Plot size", type=int, default=32, show_default=True)
@click.option("--override-k", help="Force size smaller than 32", default=False, show_default=True, is_flag=True)
@click.option("-n", "--num", help="Number of plots or challenges", type=int, default=1, show_default=True)
@click.option("-b", "--buffer", help="Megabytes for sort/plot buffer", type=int, default=4608, show_default=True)
@click.option("-r", "--num_threads", help="Number of threads to use", type=int, default=2, show_default=True)
@click.option("-u", "--buckets", help="Number of buckets", type=int, default=128, show_default=True)
@click.option(
"-a",
"--alt_fingerprint",
type=int,
default=None,
help="Enter the alternative fingerprint of the key you want to use",
)
@click.option(
"-c",
"--pool_contract_address",
type=str,
default=None,
help="Address of where the pool reward will be sent to. Only used if alt_fingerprint and pool public key are None",
)
@click.option("-f", "--farmer_public_key", help="Hex farmer public key", type=str, default=None)
@click.option("-p", "--pool_public_key", help="Hex public key of pool", type=str, default=None)
@click.option(
"-t",
"--tmp_dir",
help="Temporary directory for plotting files",
type=click.Path(),
default=Path("."),
show_default=True,
)
@click.option("-2", "--tmp2_dir", help="Second temporary directory for plotting files", type=click.Path(), default=None)
@click.option(
"-d",
"--final_dir",
help="Final directory for plots (relative or absolute)",
type=click.Path(),
default=Path("."),
show_default=True,
)
@click.option("-i", "--plotid", help="PlotID in hex for reproducing plots (debugging only)", type=str, default=None)
@click.option("-m", "--memo", help="Memo in hex for reproducing plots (debugging only)", type=str, default=None)
@click.option("-e", "--nobitfield", help="Disable bitfield", default=False, is_flag=True)
@click.option(
"-x", "--exclude_final_dir", help="Skips adding [final dir] to harvester for farming", default=False, is_flag=True
)
# end copied code
def _cli_1_1_2() -> None:
pass
@commands.register(version=(1, 1, 3))
@click.command()
# https://github.com/Chia-Network/chia-blockchain/blob/1.1.3/LICENSE
# https://github.com/Chia-Network/chia-blockchain/blob/1.1.3/chia/cmds/plots.py#L39-L83
# start copied code
@click.option("-k", "--size", help="Plot size", type=int, default=32, show_default=True)
@click.option("--override-k", help="Force size smaller than 32", default=False, show_default=True, is_flag=True)
@click.option("-n", "--num", help="Number of plots or challenges", type=int, default=1, show_default=True)
@click.option("-b", "--buffer", help="Megabytes for sort/plot buffer", type=int, default=4608, show_default=True)
@click.option("-r", "--num_threads", help="Number of threads to use", type=int, default=2, show_default=True)
@click.option("-u", "--buckets", help="Number of buckets", type=int, default=128, show_default=True)
@click.option(
"-a",
"--alt_fingerprint",
type=int,
default=None,
help="Enter the alternative fingerprint of the key you want to use",
)
@click.option(
"-c",
"--pool_contract_address",
type=str,
default=None,
help="Address of where the pool reward will be sent to. Only used if alt_fingerprint and pool public key are None",
)
@click.option("-f", "--farmer_public_key", help="Hex farmer public key", type=str, default=None)
@click.option("-p", "--pool_public_key", help="Hex public key of pool", type=str, default=None)
@click.option(
"-t",
"--tmp_dir",
help="Temporary directory for plotting files",
type=click.Path(),
default=Path("."),
show_default=True,
)
@click.option("-2", "--tmp2_dir", help="Second temporary directory for plotting files", type=click.Path(), default=None)
@click.option(
"-d",
"--final_dir",
help="Final directory for plots (relative or absolute)",
type=click.Path(),
default=Path("."),
show_default=True,
)
@click.option("-i", "--plotid", help="PlotID in hex for reproducing plots (debugging only)", type=str, default=None)
@click.option("-m", "--memo", help="Memo in hex for reproducing plots (debugging only)", type=str, default=None)
@click.option("-e", "--nobitfield", help="Disable bitfield", default=False, is_flag=True)
@click.option(
"-x", "--exclude_final_dir", help="Skips adding [final dir] to harvester for farming", default=False, is_flag=True
)
# end copied code
def _cli_1_1_3() -> None:
pass
@commands.register(version=(1, 1, 4))
@click.command()
# https://github.com/Chia-Network/chia-blockchain/blob/1.1.4/LICENSE
# https://github.com/Chia-Network/chia-blockchain/blob/1.1.4/chia/cmds/plots.py#L39-L83
# start copied code
@click.option("-k", "--size", help="Plot size", type=int, default=32, show_default=True)
@click.option("--override-k", help="Force size smaller than 32", default=False, show_default=True, is_flag=True)
@click.option("-n", "--num", help="Number of plots or challenges", type=int, default=1, show_default=True)
@click.option("-b", "--buffer", help="Megabytes for sort/plot buffer", type=int, default=3389, show_default=True)
@click.option("-r", "--num_threads", help="Number of threads to use", type=int, default=2, show_default=True)
@click.option("-u", "--buckets", help="Number of buckets", type=int, default=128, show_default=True)
@click.option(
"-a",
"--alt_fingerprint",
type=int,
default=None,
help="Enter the alternative fingerprint of the key you want to use",
)
@click.option(
"-c",
"--pool_contract_address",
type=str,
default=None,
help="Address of where the pool reward will be sent to. Only used if alt_fingerprint and pool public key are None",
)
@click.option("-f", "--farmer_public_key", help="Hex farmer public key", type=str, default=None)
@click.option("-p", "--pool_public_key", help="Hex public key of pool", type=str, default=None)
@click.option(
"-t",
"--tmp_dir",
help="Temporary directory for plotting files",
type=click.Path(),
default=Path("."),
show_default=True,
)
@click.option("-2", "--tmp2_dir", help="Second temporary directory for plotting files", type=click.Path(), default=None)
@click.option(
"-d",
"--final_dir",
help="Final directory for plots (relative or absolute)",
type=click.Path(),
default=Path("."),
show_default=True,
)
@click.option("-i", "--plotid", help="PlotID in hex for reproducing plots (debugging only)", type=str, default=None)
@click.option("-m", "--memo", help="Memo in hex for reproducing plots (debugging only)", type=str, default=None)
@click.option("-e", "--nobitfield", help="Disable bitfield", default=False, is_flag=True)
@click.option(
"-x", "--exclude_final_dir", help="Skips adding [final dir] to harvester for farming", default=False, is_flag=True
)
# end copied code
def _cli_1_1_4() -> None:
pass
@commands.register(version=(1, 1, 5))
@click.command()
# https://github.com/Chia-Network/chia-blockchain/blob/1.1.5/LICENSE
# https://github.com/Chia-Network/chia-blockchain/blob/1.1.5/chia/cmds/plots.py#L39-L83
# start copied code
@click.option("-k", "--size", help="Plot size", type=int, default=32, show_default=True)
@click.option("--override-k", help="Force size smaller than 32", default=False, show_default=True, is_flag=True)
@click.option("-n", "--num", help="Number of plots or challenges", type=int, default=1, show_default=True)
@click.option("-b", "--buffer", help="Megabytes for sort/plot buffer", type=int, default=3389, show_default=True)
@click.option("-r", "--num_threads", help="Number of threads to use", type=int, default=2, show_default=True)
@click.option("-u", "--buckets", help="Number of buckets", type=int, default=128, show_default=True)
@click.option(
"-a",
"--alt_fingerprint",
type=int,
default=None,
help="Enter the alternative fingerprint of the key you want to use",
)
@click.option(
"-c",
"--pool_contract_address",
type=str,
default=None,
help="Address of where the pool reward will be sent to. Only used if alt_fingerprint and pool public key are None",
)
@click.option("-f", "--farmer_public_key", help="Hex farmer public key", type=str, default=None)
@click.option("-p", "--pool_public_key", help="Hex public key of pool", type=str, default=None)
@click.option(
"-t",
"--tmp_dir",
help="Temporary directory for plotting files",
type=click.Path(),
default=Path("."),
show_default=True,
)
@click.option("-2", "--tmp2_dir", help="Second temporary directory for plotting files", type=click.Path(), default=None)
@click.option(
"-d",
"--final_dir",
help="Final directory for plots (relative or absolute)",
type=click.Path(),
default=Path("."),
show_default=True,
)
@click.option("-i", "--plotid", help="PlotID in hex for reproducing plots (debugging only)", type=str, default=None)
@click.option("-m", "--memo", help="Memo in hex for reproducing plots (debugging only)", type=str, default=None)
@click.option("-e", "--nobitfield", help="Disable bitfield", default=False, is_flag=True)
@click.option(
"-x", "--exclude_final_dir", help="Skips adding [final dir] to harvester for farming", default=False, is_flag=True
)
# end copied code
def _cli_1_1_5() -> None:
pass
@commands.register(version=(1, 1, 6))
@click.command()
# https://github.com/Chia-Network/chia-blockchain/blob/1.1.6/LICENSE
# https://github.com/Chia-Network/chia-blockchain/blob/1.1.6/chia/cmds/plots.py#L39-L83
# start copied code
@click.option("-k", "--size", help="Plot size", type=int, default=32, show_default=True)
@click.option("--override-k", help="Force size smaller than 32", default=False, show_default=True, is_flag=True)
@click.option("-n", "--num", help="Number of plots or challenges", type=int, default=1, show_default=True)
@click.option("-b", "--buffer", help="Megabytes for sort/plot buffer", type=int, default=3389, show_default=True)
@click.option("-r", "--num_threads", help="Number of threads to use", type=int, default=2, show_default=True)
@click.option("-u", "--buckets", help="Number of buckets", type=int, default=128, show_default=True)
@click.option(
"-a",
"--alt_fingerprint",
type=int,
default=None,
help="Enter the alternative fingerprint of the key you want to use",
)
@click.option(
"-c",
"--pool_contract_address",
type=str,
default=None,
help="Address of where the pool reward will be sent to. Only used if alt_fingerprint and pool public key are None",
)
@click.option("-f", "--farmer_public_key", help="Hex farmer public key", type=str, default=None)
@click.option("-p", "--pool_public_key", help="Hex public key of pool", type=str, default=None)
@click.option(
"-t",
"--tmp_dir",
help="Temporary directory for plotting files",
type=click.Path(),
default=Path("."),
show_default=True,
)
@click.option("-2", "--tmp2_dir", help="Second temporary directory for plotting files", type=click.Path(), default=None)
@click.option(
"-d",
"--final_dir",
help="Final directory for plots (relative or absolute)",
type=click.Path(),
default=Path("."),
show_default=True,
)
@click.option("-i", "--plotid", help="PlotID in hex for reproducing plots (debugging only)", type=str, default=None)
@click.option("-m", "--memo", help="Memo in hex for reproducing plots (debugging only)", type=str, default=None)
@click.option("-e", "--nobitfield", help="Disable bitfield", default=False, is_flag=True)
@click.option(
"-x", "--exclude_final_dir", help="Skips adding [final dir] to harvester for farming", default=False, is_flag=True
)
# end copied code
def _cli_1_1_6() -> None:
pass
@commands.register(version=(1, 1, 7))
@click.command()
# https://github.com/Chia-Network/chia-blockchain/blob/1.1.7/LICENSE
# https://github.com/Chia-Network/chia-blockchain/blob/1.1.7/chia/cmds/plots.py#L39-L83
# start copied code
@click.option("-k", "--size", help="Plot size", type=int, default=32, show_default=True)
@click.option("--override-k", help="Force size smaller than 32", default=False, show_default=True, is_flag=True)
@click.option("-n", "--num", help="Number of plots or challenges", type=int, default=1, show_default=True)
@click.option("-b", "--buffer", help="Megabytes for sort/plot buffer", type=int, default=3389, show_default=True)
@click.option("-r", "--num_threads", help="Number of threads to use", type=int, default=2, show_default=True)
@click.option("-u", "--buckets", help="Number of buckets", type=int, default=128, show_default=True)
@click.option(
"-a",
"--alt_fingerprint",
type=int,
default=None,
help="Enter the alternative fingerprint of the key you want to use",
)
@click.option(
"-c",
"--pool_contract_address",
type=str,
default=None,
help="Address of where the pool reward will be sent to. Only used if alt_fingerprint and pool public key are None",
)
@click.option("-f", "--farmer_public_key", help="Hex farmer public key", type=str, default=None)
@click.option("-p", "--pool_public_key", help="Hex public key of pool", type=str, default=None)
@click.option(
"-t",
"--tmp_dir",
help="Temporary directory for plotting files",
type=click.Path(),
default=Path("."),
show_default=True,
)
@click.option("-2", "--tmp2_dir", help="Second temporary directory for plotting files", type=click.Path(), default=None)
@click.option(
"-d",
"--final_dir",
help="Final directory for plots (relative or absolute)",
type=click.Path(),
default=Path("."),
show_default=True,
)
@click.option("-i", "--plotid", help="PlotID in hex for reproducing plots (debugging only)", type=str, default=None)
@click.option("-m", "--memo", help="Memo in hex for reproducing plots (debugging only)", type=str, default=None)
@click.option("-e", "--nobitfield", help="Disable bitfield", default=False, is_flag=True)
@click.option(
"-x", "--exclude_final_dir", help="Skips adding [final dir] to harvester for farming", default=False, is_flag=True
)
# end copied code
def _cli_1_1_7() -> None:
pass
import os
import re
import typing
from plotman.plotinfo import PlotInfo
import plotman.job
class PlotLogParser:
"""Parser for a finished plotting job"""
def parse(self, file: typing.TextIO) -> PlotInfo:
"""Parses a single log and returns its info"""
entry = PlotInfo()
matchers = [
self.ignore_line,
self.plot_id,
self.plot_start_date,
self.plot_size,
self.buffer_size,
self.buckets,
self.threads,
self.plot_dirs,
self.phase1_duration,
self.phase2_duration,
self.phase3_duration,
self.phase4_duration,
self.total_time,
self.copy_time,
self.filename
]
for line in file:
for matcher in matchers:
if (matcher(line, entry)):
break
return entry
# ID: 3eb8a37981de1cc76187a36ed947ab4307943cf92967a7e166841186c7899e24
def plot_id(self, line: str, entry: PlotInfo) -> bool:
m = re.search(r'^ID: (.+)$', line)
if m:
entry.plot_id = m.group(1)
return m != None
# Renamed final file from "/farm/wagons/801/abc.plot.2.tmp" to "/farm/wagons/801/abc.plot"
def filename(self, line: str, entry: PlotInfo) -> bool:
m = re.search(r'^Renamed final file from ".+" to "(.+)"', line)
if m:
entry.filename = m.group(1)
return m != None
# Time for phase 1 = 17571.981 seconds. CPU (178.600%) Sun Apr 4 23:53:42 2021
def phase1_duration(self, line: str, entry: PlotInfo) -> bool:
m = re.search(r"^Time for phase 1 = (\d+\.\d+) seconds", line)
if m:
entry.phase1_duration_raw = float(m.group(1))
return m != None
# Time for phase 2 = 6911.621 seconds. CPU (71.780%) Mon Apr 5 01:48:54 2021
def phase2_duration(self, line: str, entry: PlotInfo) -> bool:
m = re.search(r"^Time for phase 2 = (\d+\.\d+) seconds", line)
if m:
entry.phase2_duration_raw = float(m.group(1))
return m != None
# Time for phase 3 = 14537.188 seconds. CPU (82.730%) Mon Apr 5 05:51:11 2021
def phase3_duration(self, line: str, entry: PlotInfo) -> bool:
m = re.search(r"^Time for phase 3 = (\d+\.\d+) seconds", line)
if m:
entry.phase3_duration_raw = float(m.group(1))
return m != None
# Time for phase 4 = 924.288 seconds. CPU (86.810%) Mon Apr 5 06:06:35 2021
def phase4_duration(self, line: str, entry: PlotInfo) -> bool:
m = re.search(r"^Time for phase 4 = (\d+\.\d+) seconds", line)
if m:
entry.phase4_duration_raw = float(m.group(1))
return m != None
# Total time = 39945.080 seconds. CPU (123.100%) Mon Apr 5 06:06:35 2021
def total_time(self, line: str, entry: PlotInfo) -> bool:
m = re.search(r"^Total time = (\d+\.\d+) seconds", line)
if m:
entry.total_time_raw = float(m.group(1))
return m != None
# Copy time = 501.696 seconds. CPU (23.860%) Sun May 9 22:52:41 2021
def copy_time(self, line: str, entry: PlotInfo) -> bool:
m = re.search(r"^Copy time = (\d+\.\d+) seconds", line)
if m:
entry.copy_time_raw = float(m.group(1))
return m != None
# Starting plotting progress into temporary dirs: /farm/yards/901 and /farm/yards/901
def plot_dirs(self, line: str, entry: PlotInfo) -> bool:
m = re.search(r"^Starting plotting progress into temporary dirs: (.+) and (.+)$", line)
if m:
entry.tmp_dir1 = m.group(1)
entry.tmp_dir2 = m.group(2)
return m != None
# Using 4 threads of stripe size 65536
def threads(self, line: str, entry: PlotInfo) -> bool:
m = re.search(r"^Using (\d+) threads of stripe size (\d+)", line)
if m:
entry.threads = int(m.group(1))
return m != None
# "^Using (\\d+) buckets"
def buckets(self, line: str, entry: PlotInfo) -> bool:
m = re.search(r"^Using (\d+) buckets", line)
if m:
entry.buckets = int(m.group(1))
return m != None
# Buffer size is: 4000MiB
def buffer_size(self, line: str, entry: PlotInfo) -> bool:
m = re.search(r"^Buffer size is: (\d+)MiB", line)
if m:
entry.buffer = int(m.group(1))
return m != None
# Plot size is: 32
def plot_size(self, line: str, entry: PlotInfo) -> bool:
m = re.search(r'^Plot size is: (\d+)', line)
if m:
entry.plot_size = int(m.group(1))
return m != None
# Starting phase 1/4: Forward Propagation into tmp files... Sun May 9 17:36:12 2021
def plot_start_date(self, line: str, entry: PlotInfo) -> bool:
m = re.search(r'^Starting phase 1/4: Forward Propagation into tmp files\.\.\. (.+)', line)
if m:
entry.started_at = plotman.job.parse_chia_plot_time(s=m.group(1))
return m != None
# Ignore lines starting with Bucket
# Bucket 0 uniform sort. Ram: 3.250GiB, u_sort min: 0.563GiB, qs min: 0.281GiB.
def ignore_line(self, line: str, _: PlotInfo) -> bool:
m = re.search(r'^\tBucket', line)
return m != None
# mypy: allow_untyped_decorators
#
# Madmax is written in C++. Below is a mapping of its CLI options to Python.
# See: https://github.com/madMAx43v3r/chia-plotter/tree/master/src
# Note: versions are git commit refs, not semantic versioning
#
import functools
import typing
import click
from pathlib import Path
import typing_extensions
class CommandProtocol(typing_extensions.Protocol):
def make_context(self, info_name: str, args: typing.List[str]) -> click.Context:
...
def __call__(self) -> None:
...
class Commands:
def __init__(self) -> None:
self.by_version: typing.Dict[typing.Sequence[int], CommandProtocol] = {}
def register(self, version: typing.Sequence[int]) -> typing.Callable[[CommandProtocol], None]:
if version in self.by_version:
raise Exception(f'Version already registered: {version!r}')
return functools.partial(self._decorator, version=version)
def _decorator(self, command: CommandProtocol, *, version: typing.Sequence[int]) -> None:
self.by_version[version] = command
def __getitem__(self, item: typing.Sequence[int]) -> typing.Callable[[], None]:
return self.by_version[item]
def latest_command(self) -> CommandProtocol:
return max(self.by_version.items())[1]
commands = Commands()
# Madmax Git on 2021-06-19 -> https://github.com/madMAx43v3r/chia-plotter/commit/c8121b987186c42c895b49818e6c13acecc51332
# TODO: make Commands able to handle this. maybe configure with a list defining order?
# for now we can just access directly.
# @commands.register(version=("c8121b9"))
@click.command()
# https://github.com/madMAx43v3r/chia-plotter/blob/master/LICENSE
# https://github.com/madMAx43v3r/chia-plotter/blob/master/src/chia_plot.cpp#L180
@click.option("-n", "--count", help="Number of plots to create (default = 1, -1 = infinite)",
type=int, default=1, show_default=True)
@click.option("-r", "--threads", help="Number of threads (default = 4)",
type=int, default=4, show_default=True)
@click.option("-u", "--buckets", help="Number of buckets (default = 256)",
type=int, default=256, show_default=True)
@click.option("-v", "--buckets3", help="Number of buckets for phase 3+4 (default = buckets)",
type=int, default=256)
@click.option("-t", "--tmpdir", help="Temporary directory, needs ~220 GiB (default = $PWD)",
type=click.Path(), default=Path("."), show_default=True)
@click.option("-2", "--tmpdir2", help="Temporary directory 2, needs ~110 GiB [RAM] (default = <tmpdir>)",
type=click.Path(), default=None)
@click.option("-d", "--finaldir", help="Final directory (default = <tmpdir>)",
type=click.Path(), default=Path("."), show_default=True)
@click.option("-p", "--poolkey", help="Pool Public Key (48 bytes)",
type=str, default=None)
@click.option("-f", "--farmerkey", help="Farmer Public Key (48 bytes)",
type=str, default=None)
@click.option("-c", "--contract", help="Pool Contract Address (64 chars)",
type=str, default=None)
@click.option("-G", "--tmptoggle", help="Alternate tmpdir/tmpdir2",
type=str, default=None)
def _cli_c8121b9() -> None:
pass
import typing
import attr
import pendulum
@attr.mutable
class PlotInfo:
"""Represents the results of a finished plot job"""
started_at: typing.Optional[pendulum.DateTime] = None
plot_id: str = ""
buckets: int = 0
threads: int = 0
buffer: int = 0
plot_size: int = 0
tmp_dir1: str = ""
tmp_dir2: str = ""
phase1_duration_raw: float = 0
phase2_duration_raw: float = 0
phase3_duration_raw: float = 0
phase4_duration_raw: float = 0
total_time_raw: float = 0
copy_time_raw: float = 0
filename: str = ""
def in_progress(self) -> bool:
"The plot is in progress if no total time has been reported."
return self.total_time == 0
# Phase 1 duration
@property
def phase1_duration(self) -> int:
return round(self.phase1_duration_raw)
@property
def phase1_duration_minutes(self) -> int:
return self.duration_to_minutes(self.phase1_duration_raw)
@property
def phase1_duration_hours(self) -> float:
return self.duration_to_hours(self.phase1_duration_raw)
# Phase 2 duration
@property
def phase2_duration(self) -> int:
return round(self.phase2_duration_raw)
@property
def phase2_duration_minutes(self) -> int:
return self.duration_to_minutes(self.phase2_duration_raw)
@property
def phase2_duration_hours(self) -> float:
return self.duration_to_hours(self.phase2_duration_raw)
# Phase 3 duration
@property
def phase3_duration(self) -> int:
return round(self.phase3_duration_raw)
@property
def phase3_duration_minutes(self) -> int:
return self.duration_to_minutes(self.phase3_duration_raw)
@property
def phase3_duration_hours(self) -> float:
return self.duration_to_hours(self.phase3_duration_raw)
# Phase 4 duration
@property
def phase4_duration(self) -> int:
return round(self.phase4_duration_raw)
@property
def phase4_duration_minutes(self) -> int:
return self.duration_to_minutes(self.phase4_duration_raw)
@property
def phase4_duration_hours(self) -> float:
return self.duration_to_hours(self.phase4_duration_raw)
# Total time
@property
def total_time(self) -> int:
return round(self.total_time_raw)
@property
def total_time_minutes(self) -> int:
return self.duration_to_minutes(self.total_time_raw)
@property
def total_time_hours(self) -> float:
return self.duration_to_hours(self.total_time_raw)
# Copy time
@property
def copy_time(self) -> int:
return round(self.copy_time_raw)
@property
def copy_time_minutes(self) -> int:
return self.duration_to_minutes(self.copy_time_raw)
@property
def copy_time_hours(self) -> float:
return self.duration_to_hours(self.copy_time_raw)
def duration_to_minutes(self, duration: float) -> int:
return round(duration / 60)
def duration_to_hours(self, duration: float) -> float:
return round(duration / 60 / 60, 2)