Rename nMigen to Amaranth HDL.
This commit is contained in:
parent
0b28a97ca0
commit
909a3b8be7
200 changed files with 14493 additions and 14451 deletions
|
|
@ -1,3 +1,6 @@
|
|||
from .dsl import *
|
||||
from .res import ResourceError
|
||||
from .plat import *
|
||||
from amaranth.build import *
|
||||
|
||||
|
||||
import warnings
|
||||
warnings.warn("instead of nmigen.build, use amaranth.build",
|
||||
DeprecationWarning, stacklevel=2)
|
||||
|
|
|
|||
|
|
@ -1,256 +1,7 @@
|
|||
from collections import OrderedDict
|
||||
from amaranth.build.dsl import *
|
||||
from amaranth.build.dsl import __all__
|
||||
|
||||
|
||||
__all__ = ["Pins", "PinsN", "DiffPairs", "DiffPairsN",
|
||||
"Attrs", "Clock", "Subsignal", "Resource", "Connector"]
|
||||
|
||||
|
||||
class Pins:
|
||||
def __init__(self, names, *, dir="io", invert=False, conn=None, assert_width=None):
|
||||
if not isinstance(names, str):
|
||||
raise TypeError("Names must be a whitespace-separated string, not {!r}"
|
||||
.format(names))
|
||||
names = names.split()
|
||||
|
||||
if conn is not None:
|
||||
conn_name, conn_number = conn
|
||||
if not (isinstance(conn_name, str) and isinstance(conn_number, (int, str))):
|
||||
raise TypeError("Connector must be None or a pair of string (connector name) and "
|
||||
"integer/string (connector number), not {!r}"
|
||||
.format(conn))
|
||||
names = ["{}_{}:{}".format(conn_name, conn_number, name) for name in names]
|
||||
|
||||
if dir not in ("i", "o", "io", "oe"):
|
||||
raise TypeError("Direction must be one of \"i\", \"o\", \"oe\", or \"io\", not {!r}"
|
||||
.format(dir))
|
||||
|
||||
if assert_width is not None and len(names) != assert_width:
|
||||
raise AssertionError("{} names are specified ({}), but {} names are expected"
|
||||
.format(len(names), " ".join(names), assert_width))
|
||||
|
||||
self.names = names
|
||||
self.dir = dir
|
||||
self.invert = bool(invert)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.names)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.names)
|
||||
|
||||
def map_names(self, mapping, resource):
|
||||
mapped_names = []
|
||||
for name in self.names:
|
||||
while ":" in name:
|
||||
if name not in mapping:
|
||||
raise NameError("Resource {!r} refers to nonexistent connector pin {}"
|
||||
.format(resource, name))
|
||||
name = mapping[name]
|
||||
mapped_names.append(name)
|
||||
return mapped_names
|
||||
|
||||
def __repr__(self):
|
||||
return "(pins{} {} {})".format("-n" if self.invert else "",
|
||||
self.dir, " ".join(self.names))
|
||||
|
||||
|
||||
def PinsN(*args, **kwargs):
|
||||
return Pins(*args, invert=True, **kwargs)
|
||||
|
||||
|
||||
class DiffPairs:
|
||||
def __init__(self, p, n, *, dir="io", invert=False, conn=None, assert_width=None):
|
||||
self.p = Pins(p, dir=dir, conn=conn, assert_width=assert_width)
|
||||
self.n = Pins(n, dir=dir, conn=conn, assert_width=assert_width)
|
||||
|
||||
if len(self.p.names) != len(self.n.names):
|
||||
raise TypeError("Positive and negative pins must have the same width, but {!r} "
|
||||
"and {!r} do not"
|
||||
.format(self.p, self.n))
|
||||
|
||||
self.dir = dir
|
||||
self.invert = bool(invert)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.p.names)
|
||||
|
||||
def __iter__(self):
|
||||
return zip(self.p.names, self.n.names)
|
||||
|
||||
def __repr__(self):
|
||||
return "(diffpairs{} {} (p {}) (n {}))".format("-n" if self.invert else "",
|
||||
self.dir, " ".join(self.p.names), " ".join(self.n.names))
|
||||
|
||||
|
||||
def DiffPairsN(*args, **kwargs):
|
||||
return DiffPairs(*args, invert=True, **kwargs)
|
||||
|
||||
|
||||
class Attrs(OrderedDict):
|
||||
def __init__(self, **attrs):
|
||||
for key, value in attrs.items():
|
||||
if not (value is None or isinstance(value, (str, int)) or hasattr(value, "__call__")):
|
||||
raise TypeError("Value of attribute {} must be None, int, str, or callable, "
|
||||
"not {!r}"
|
||||
.format(key, value))
|
||||
|
||||
super().__init__(**attrs)
|
||||
|
||||
def __repr__(self):
|
||||
items = []
|
||||
for key, value in self.items():
|
||||
if value is None:
|
||||
items.append("!" + key)
|
||||
else:
|
||||
items.append(key + "=" + repr(value))
|
||||
return "(attrs {})".format(" ".join(items))
|
||||
|
||||
|
||||
class Clock:
|
||||
def __init__(self, frequency):
|
||||
if not isinstance(frequency, (float, int)):
|
||||
raise TypeError("Clock frequency must be a number")
|
||||
|
||||
self.frequency = float(frequency)
|
||||
|
||||
@property
|
||||
def period(self):
|
||||
return 1 / self.frequency
|
||||
|
||||
def __repr__(self):
|
||||
return "(clock {})".format(self.frequency)
|
||||
|
||||
|
||||
class Subsignal:
|
||||
def __init__(self, name, *args):
|
||||
self.name = name
|
||||
self.ios = []
|
||||
self.attrs = Attrs()
|
||||
self.clock = None
|
||||
|
||||
if not args:
|
||||
raise ValueError("Missing I/O constraints")
|
||||
for arg in args:
|
||||
if isinstance(arg, (Pins, DiffPairs)):
|
||||
if not self.ios:
|
||||
self.ios.append(arg)
|
||||
else:
|
||||
raise TypeError("Pins and DiffPairs are incompatible with other location or "
|
||||
"subsignal constraints, but {!r} appears after {!r}"
|
||||
.format(arg, self.ios[-1]))
|
||||
elif isinstance(arg, Subsignal):
|
||||
if not self.ios or isinstance(self.ios[-1], Subsignal):
|
||||
self.ios.append(arg)
|
||||
else:
|
||||
raise TypeError("Subsignal is incompatible with location constraints, but "
|
||||
"{!r} appears after {!r}"
|
||||
.format(arg, self.ios[-1]))
|
||||
elif isinstance(arg, Attrs):
|
||||
self.attrs.update(arg)
|
||||
elif isinstance(arg, Clock):
|
||||
if self.ios and isinstance(self.ios[-1], (Pins, DiffPairs)):
|
||||
if self.clock is None:
|
||||
self.clock = arg
|
||||
else:
|
||||
raise ValueError("Clock constraint can be applied only once")
|
||||
else:
|
||||
raise TypeError("Clock constraint can only be applied to Pins or DiffPairs, "
|
||||
"not {!r}"
|
||||
.format(self.ios[-1]))
|
||||
else:
|
||||
raise TypeError("Constraint must be one of Pins, DiffPairs, Subsignal, Attrs, "
|
||||
"or Clock, not {!r}"
|
||||
.format(arg))
|
||||
|
||||
def _content_repr(self):
|
||||
parts = []
|
||||
for io in self.ios:
|
||||
parts.append(repr(io))
|
||||
if self.clock is not None:
|
||||
parts.append(repr(self.clock))
|
||||
if self.attrs:
|
||||
parts.append(repr(self.attrs))
|
||||
return " ".join(parts)
|
||||
|
||||
def __repr__(self):
|
||||
return "(subsignal {} {})".format(self.name, self._content_repr())
|
||||
|
||||
|
||||
class Resource(Subsignal):
|
||||
@classmethod
|
||||
def family(cls, name_or_number, number=None, *, ios, default_name, name_suffix=""):
|
||||
# This constructor accepts two different forms:
|
||||
# 1. Number-only form:
|
||||
# Resource.family(0, default_name="name", ios=[Pins("A0 A1")])
|
||||
# 2. Name-and-number (name override) form:
|
||||
# Resource.family("override", 0, default_name="name", ios=...)
|
||||
# This makes it easier to build abstractions for resources, e.g. an SPIResource abstraction
|
||||
# could simply delegate to `Resource.family(*args, default_name="spi", ios=ios)`.
|
||||
# The name_suffix argument is meant to support creating resources with
|
||||
# similar names, such as spi_flash, spi_flash_2x, etc.
|
||||
if name_suffix: # Only add "_" if we actually have a suffix.
|
||||
name_suffix = "_" + name_suffix
|
||||
|
||||
if number is None: # name_or_number is number
|
||||
return cls(default_name + name_suffix, name_or_number, *ios)
|
||||
else: # name_or_number is name
|
||||
return cls(name_or_number + name_suffix, number, *ios)
|
||||
|
||||
def __init__(self, name, number, *args):
|
||||
super().__init__(name, *args)
|
||||
|
||||
self.number = number
|
||||
|
||||
def __repr__(self):
|
||||
return "(resource {} {} {})".format(self.name, self.number, self._content_repr())
|
||||
|
||||
|
||||
class Connector:
|
||||
def __init__(self, name, number, io, *, conn=None):
|
||||
self.name = name
|
||||
self.number = number
|
||||
mapping = OrderedDict()
|
||||
|
||||
if isinstance(io, dict):
|
||||
for conn_pin, plat_pin in io.items():
|
||||
if not isinstance(conn_pin, str):
|
||||
raise TypeError("Connector pin name must be a string, not {!r}"
|
||||
.format(conn_pin))
|
||||
if not isinstance(plat_pin, str):
|
||||
raise TypeError("Platform pin name must be a string, not {!r}"
|
||||
.format(plat_pin))
|
||||
mapping[conn_pin] = plat_pin
|
||||
|
||||
elif isinstance(io, str):
|
||||
for conn_pin, plat_pin in enumerate(io.split(), start=1):
|
||||
if plat_pin == "-":
|
||||
continue
|
||||
|
||||
mapping[str(conn_pin)] = plat_pin
|
||||
else:
|
||||
raise TypeError("Connector I/Os must be a dictionary or a string, not {!r}"
|
||||
.format(io))
|
||||
|
||||
if conn is not None:
|
||||
conn_name, conn_number = conn
|
||||
if not (isinstance(conn_name, str) and isinstance(conn_number, (int, str))):
|
||||
raise TypeError("Connector must be None or a pair of string (connector name) and "
|
||||
"integer/string (connector number), not {!r}"
|
||||
.format(conn))
|
||||
|
||||
for conn_pin, plat_pin in mapping.items():
|
||||
mapping[conn_pin] = "{}_{}:{}".format(conn_name, conn_number, plat_pin)
|
||||
|
||||
self.mapping = mapping
|
||||
|
||||
def __repr__(self):
|
||||
return "(connector {} {} {})".format(self.name, self.number,
|
||||
" ".join("{}=>{}".format(conn, plat)
|
||||
for conn, plat in self.mapping.items()))
|
||||
|
||||
def __len__(self):
|
||||
return len(self.mapping)
|
||||
|
||||
def __iter__(self):
|
||||
for conn_pin, plat_pin in self.mapping.items():
|
||||
yield "{}_{}:{}".format(self.name, self.number, conn_pin), plat_pin
|
||||
import warnings
|
||||
warnings.warn("instead of nmigen.build.dsl, use amaranth.build.dsl",
|
||||
DeprecationWarning, stacklevel=2)
|
||||
|
|
|
|||
|
|
@ -1,444 +1,7 @@
|
|||
from collections import OrderedDict
|
||||
from abc import ABCMeta, abstractmethod, abstractproperty
|
||||
import os
|
||||
import textwrap
|
||||
import re
|
||||
import jinja2
|
||||
from amaranth.build.plat import *
|
||||
from amaranth.build.plat import __all__
|
||||
|
||||
from .. import __version__
|
||||
from .._toolchain import *
|
||||
from ..hdl import *
|
||||
from ..hdl.xfrm import SampleLowerer, DomainLowerer
|
||||
from ..lib.cdc import ResetSynchronizer
|
||||
from ..back import rtlil, verilog
|
||||
from .res import *
|
||||
from .run import *
|
||||
|
||||
|
||||
__all__ = ["Platform", "TemplatedPlatform"]
|
||||
|
||||
|
||||
class Platform(ResourceManager, metaclass=ABCMeta):
|
||||
resources = abstractproperty()
|
||||
connectors = abstractproperty()
|
||||
default_clk = None
|
||||
default_rst = None
|
||||
required_tools = abstractproperty()
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(self.resources, self.connectors)
|
||||
|
||||
self.extra_files = OrderedDict()
|
||||
|
||||
self._prepared = False
|
||||
|
||||
@property
|
||||
def default_clk_constraint(self):
|
||||
if self.default_clk is None:
|
||||
raise AttributeError("Platform '{}' does not define a default clock"
|
||||
.format(type(self).__name__))
|
||||
return self.lookup(self.default_clk).clock
|
||||
|
||||
@property
|
||||
def default_clk_frequency(self):
|
||||
constraint = self.default_clk_constraint
|
||||
if constraint is None:
|
||||
raise AttributeError("Platform '{}' does not constrain its default clock"
|
||||
.format(type(self).__name__))
|
||||
return constraint.frequency
|
||||
|
||||
def add_file(self, filename, content):
|
||||
if not isinstance(filename, str):
|
||||
raise TypeError("File name must be a string, not {!r}"
|
||||
.format(filename))
|
||||
if hasattr(content, "read"):
|
||||
content = content.read()
|
||||
elif not isinstance(content, (str, bytes)):
|
||||
raise TypeError("File contents must be str, bytes, or a file-like object, not {!r}"
|
||||
.format(content))
|
||||
if filename in self.extra_files:
|
||||
if self.extra_files[filename] != content:
|
||||
raise ValueError("File {!r} already exists"
|
||||
.format(filename))
|
||||
else:
|
||||
self.extra_files[filename] = content
|
||||
|
||||
def iter_files(self, *suffixes):
|
||||
for filename in self.extra_files:
|
||||
if filename.endswith(suffixes):
|
||||
yield filename
|
||||
|
||||
@property
|
||||
def _toolchain_env_var(self):
|
||||
return f"NMIGEN_ENV_{self.toolchain}"
|
||||
|
||||
def build(self, elaboratable, name="top",
|
||||
build_dir="build", do_build=True,
|
||||
program_opts=None, do_program=False,
|
||||
**kwargs):
|
||||
# The following code performs a best-effort check for presence of required tools upfront,
|
||||
# before performing any build actions, to provide a better diagnostic. It does not handle
|
||||
# several corner cases:
|
||||
# 1. `require_tool` does not source toolchain environment scripts, so if such a script
|
||||
# is used, the check is skipped, and `execute_local()` may fail;
|
||||
# 2. if the design is not built (do_build=False), most of the tools are not required and
|
||||
# in fact might not be available if the design will be built manually with a different
|
||||
# environment script specified, or on a different machine; however, Yosys is required
|
||||
# by virtually every platform anyway, to provide debug Verilog output, and `prepare()`
|
||||
# may fail.
|
||||
# This is OK because even if `require_tool` succeeds, the toolchain might be broken anyway.
|
||||
# The check only serves to catch common errors earlier.
|
||||
if do_build and self._toolchain_env_var not in os.environ:
|
||||
for tool in self.required_tools:
|
||||
require_tool(tool)
|
||||
|
||||
plan = self.prepare(elaboratable, name, **kwargs)
|
||||
if not do_build:
|
||||
return plan
|
||||
|
||||
products = plan.execute_local(build_dir)
|
||||
if not do_program:
|
||||
return products
|
||||
|
||||
self.toolchain_program(products, name, **(program_opts or {}))
|
||||
|
||||
def has_required_tools(self):
|
||||
if self._toolchain_env_var in os.environ:
|
||||
return True
|
||||
return all(has_tool(name) for name in self.required_tools)
|
||||
|
||||
def create_missing_domain(self, name):
|
||||
# Simple instantiation of a clock domain driven directly by the board clock and reset.
|
||||
# This implementation uses a single ResetSynchronizer to ensure that:
|
||||
# * an external reset is definitely synchronized to the system clock;
|
||||
# * release of power-on reset, which is inherently asynchronous, is synchronized to
|
||||
# the system clock.
|
||||
# Many device families provide advanced primitives for tackling reset. If these exist,
|
||||
# they should be used instead.
|
||||
if name == "sync" and self.default_clk is not None:
|
||||
clk_i = self.request(self.default_clk).i
|
||||
if self.default_rst is not None:
|
||||
rst_i = self.request(self.default_rst).i
|
||||
else:
|
||||
rst_i = Const(0)
|
||||
|
||||
m = Module()
|
||||
m.domains += ClockDomain("sync")
|
||||
m.d.comb += ClockSignal("sync").eq(clk_i)
|
||||
m.submodules.reset_sync = ResetSynchronizer(rst_i, domain="sync")
|
||||
return m
|
||||
|
||||
def prepare(self, elaboratable, name="top", **kwargs):
|
||||
assert not self._prepared
|
||||
self._prepared = True
|
||||
|
||||
fragment = Fragment.get(elaboratable, self)
|
||||
fragment = SampleLowerer()(fragment)
|
||||
fragment._propagate_domains(self.create_missing_domain, platform=self)
|
||||
fragment = DomainLowerer()(fragment)
|
||||
|
||||
def add_pin_fragment(pin, pin_fragment):
|
||||
pin_fragment = Fragment.get(pin_fragment, self)
|
||||
if not isinstance(pin_fragment, Instance):
|
||||
pin_fragment.flatten = True
|
||||
fragment.add_subfragment(pin_fragment, name="pin_{}".format(pin.name))
|
||||
|
||||
for pin, port, attrs, invert in self.iter_single_ended_pins():
|
||||
if pin.dir == "i":
|
||||
add_pin_fragment(pin, self.get_input(pin, port, attrs, invert))
|
||||
if pin.dir == "o":
|
||||
add_pin_fragment(pin, self.get_output(pin, port, attrs, invert))
|
||||
if pin.dir == "oe":
|
||||
add_pin_fragment(pin, self.get_tristate(pin, port, attrs, invert))
|
||||
if pin.dir == "io":
|
||||
add_pin_fragment(pin, self.get_input_output(pin, port, attrs, invert))
|
||||
|
||||
for pin, port, attrs, invert in self.iter_differential_pins():
|
||||
if pin.dir == "i":
|
||||
add_pin_fragment(pin, self.get_diff_input(pin, port, attrs, invert))
|
||||
if pin.dir == "o":
|
||||
add_pin_fragment(pin, self.get_diff_output(pin, port, attrs, invert))
|
||||
if pin.dir == "oe":
|
||||
add_pin_fragment(pin, self.get_diff_tristate(pin, port, attrs, invert))
|
||||
if pin.dir == "io":
|
||||
add_pin_fragment(pin, self.get_diff_input_output(pin, port, attrs, invert))
|
||||
|
||||
fragment._propagate_ports(ports=self.iter_ports(), all_undef_as_ports=False)
|
||||
return self.toolchain_prepare(fragment, name, **kwargs)
|
||||
|
||||
@abstractmethod
|
||||
def toolchain_prepare(self, fragment, name, **kwargs):
|
||||
"""
|
||||
Convert the ``fragment`` and constraints recorded in this :class:`Platform` into
|
||||
a :class:`BuildPlan`.
|
||||
"""
|
||||
raise NotImplementedError # :nocov:
|
||||
|
||||
def toolchain_program(self, products, name, **kwargs):
|
||||
"""
|
||||
Extract bitstream for fragment ``name`` from ``products`` and download it to a target.
|
||||
"""
|
||||
raise NotImplementedError("Platform '{}' does not support programming"
|
||||
.format(type(self).__name__))
|
||||
|
||||
def _check_feature(self, feature, pin, attrs, valid_xdrs, valid_attrs):
|
||||
if len(valid_xdrs) == 0:
|
||||
raise NotImplementedError("Platform '{}' does not support {}"
|
||||
.format(type(self).__name__, feature))
|
||||
elif pin.xdr not in valid_xdrs:
|
||||
raise NotImplementedError("Platform '{}' does not support {} for XDR {}"
|
||||
.format(type(self).__name__, feature, pin.xdr))
|
||||
|
||||
if not valid_attrs and attrs:
|
||||
raise NotImplementedError("Platform '{}' does not support attributes for {}"
|
||||
.format(type(self).__name__, feature))
|
||||
|
||||
@staticmethod
|
||||
def _invert_if(invert, value):
|
||||
if invert:
|
||||
return ~value
|
||||
else:
|
||||
return value
|
||||
|
||||
def get_input(self, pin, port, attrs, invert):
|
||||
self._check_feature("single-ended input", pin, attrs,
|
||||
valid_xdrs=(0,), valid_attrs=None)
|
||||
|
||||
m = Module()
|
||||
m.d.comb += pin.i.eq(self._invert_if(invert, port))
|
||||
return m
|
||||
|
||||
def get_output(self, pin, port, attrs, invert):
|
||||
self._check_feature("single-ended output", pin, attrs,
|
||||
valid_xdrs=(0,), valid_attrs=None)
|
||||
|
||||
m = Module()
|
||||
m.d.comb += port.eq(self._invert_if(invert, pin.o))
|
||||
return m
|
||||
|
||||
def get_tristate(self, pin, port, attrs, invert):
|
||||
self._check_feature("single-ended tristate", pin, attrs,
|
||||
valid_xdrs=(0,), valid_attrs=None)
|
||||
|
||||
m = Module()
|
||||
m.submodules += Instance("$tribuf",
|
||||
p_WIDTH=pin.width,
|
||||
i_EN=pin.oe,
|
||||
i_A=self._invert_if(invert, pin.o),
|
||||
o_Y=port,
|
||||
)
|
||||
return m
|
||||
|
||||
def get_input_output(self, pin, port, attrs, invert):
|
||||
self._check_feature("single-ended input/output", pin, attrs,
|
||||
valid_xdrs=(0,), valid_attrs=None)
|
||||
|
||||
m = Module()
|
||||
m.submodules += Instance("$tribuf",
|
||||
p_WIDTH=pin.width,
|
||||
i_EN=pin.oe,
|
||||
i_A=self._invert_if(invert, pin.o),
|
||||
o_Y=port,
|
||||
)
|
||||
m.d.comb += pin.i.eq(self._invert_if(invert, port))
|
||||
return m
|
||||
|
||||
def get_diff_input(self, pin, port, attrs, invert):
|
||||
self._check_feature("differential input", pin, attrs,
|
||||
valid_xdrs=(), valid_attrs=None)
|
||||
|
||||
def get_diff_output(self, pin, port, attrs, invert):
|
||||
self._check_feature("differential output", pin, attrs,
|
||||
valid_xdrs=(), valid_attrs=None)
|
||||
|
||||
def get_diff_tristate(self, pin, port, attrs, invert):
|
||||
self._check_feature("differential tristate", pin, attrs,
|
||||
valid_xdrs=(), valid_attrs=None)
|
||||
|
||||
def get_diff_input_output(self, pin, port, attrs, invert):
|
||||
self._check_feature("differential input/output", pin, attrs,
|
||||
valid_xdrs=(), valid_attrs=None)
|
||||
|
||||
|
||||
class TemplatedPlatform(Platform):
|
||||
toolchain = abstractproperty()
|
||||
file_templates = abstractproperty()
|
||||
command_templates = abstractproperty()
|
||||
|
||||
build_script_templates = {
|
||||
"build_{{name}}.sh": """
|
||||
# {{autogenerated}}
|
||||
set -e{{verbose("x")}}
|
||||
[ -n "${{platform._toolchain_env_var}}" ] && . "${{platform._toolchain_env_var}}"
|
||||
{{emit_commands("sh")}}
|
||||
""",
|
||||
"build_{{name}}.bat": """
|
||||
@rem {{autogenerated}}
|
||||
{{quiet("@echo off")}}
|
||||
if defined {{platform._toolchain_env_var}} call %{{platform._toolchain_env_var}}%
|
||||
{{emit_commands("bat")}}
|
||||
""",
|
||||
}
|
||||
|
||||
def iter_clock_constraints(self):
|
||||
for net_signal, port_signal, frequency in super().iter_clock_constraints():
|
||||
# Skip any clock constraints placed on signals that are never used in the design.
|
||||
# Otherwise, it will cause a crash in the vendor platform if it supports clock
|
||||
# constraints on non-port nets.
|
||||
if net_signal not in self._name_map:
|
||||
continue
|
||||
yield net_signal, port_signal, frequency
|
||||
|
||||
def toolchain_prepare(self, fragment, name, **kwargs):
|
||||
# Restrict the name of the design to a strict alphanumeric character set. Platforms will
|
||||
# interpolate the name of the design in many different contexts: filesystem paths, Python
|
||||
# scripts, Tcl scripts, ad-hoc constraint files, and so on. It is not practical to add
|
||||
# escaping code that handles every one of their edge cases, so make sure we never hit them
|
||||
# in the first place.
|
||||
invalid_char = re.match(r"[^A-Za-z0-9_]", name)
|
||||
if invalid_char:
|
||||
raise ValueError("Design name {!r} contains invalid character {!r}; only alphanumeric "
|
||||
"characters are valid in design names"
|
||||
.format(name, invalid_char.group(0)))
|
||||
|
||||
# This notice serves a dual purpose: to explain that the file is autogenerated,
|
||||
# and to incorporate the nMigen version into generated code.
|
||||
autogenerated = "Automatically generated by nMigen {}. Do not edit.".format(__version__)
|
||||
|
||||
rtlil_text, self._name_map = rtlil.convert_fragment(fragment, name=name)
|
||||
|
||||
def emit_rtlil():
|
||||
return rtlil_text
|
||||
|
||||
def emit_verilog(opts=()):
|
||||
return verilog._convert_rtlil_text(rtlil_text,
|
||||
strip_internal_attrs=True, write_verilog_opts=opts)
|
||||
|
||||
def emit_debug_verilog(opts=()):
|
||||
return verilog._convert_rtlil_text(rtlil_text,
|
||||
strip_internal_attrs=False, write_verilog_opts=opts)
|
||||
|
||||
def emit_commands(syntax):
|
||||
commands = []
|
||||
|
||||
for name in self.required_tools:
|
||||
env_var = tool_env_var(name)
|
||||
if syntax == "sh":
|
||||
template = ": ${{{env_var}:={name}}}"
|
||||
elif syntax == "bat":
|
||||
template = \
|
||||
"if [%{env_var}%] equ [\"\"] set {env_var}=\n" \
|
||||
"if [%{env_var}%] equ [] set {env_var}={name}"
|
||||
else:
|
||||
assert False
|
||||
commands.append(template.format(env_var=env_var, name=name))
|
||||
|
||||
for index, command_tpl in enumerate(self.command_templates):
|
||||
command = render(command_tpl, origin="<command#{}>".format(index + 1),
|
||||
syntax=syntax)
|
||||
command = re.sub(r"\s+", " ", command)
|
||||
if syntax == "sh":
|
||||
commands.append(command)
|
||||
elif syntax == "bat":
|
||||
commands.append(command + " || exit /b")
|
||||
else:
|
||||
assert False
|
||||
|
||||
return "\n".join(commands)
|
||||
|
||||
def get_override(var):
|
||||
var_env = "NMIGEN_{}".format(var)
|
||||
if var_env in os.environ:
|
||||
# On Windows, there is no way to define an "empty but set" variable; it is tempting
|
||||
# to use a quoted empty string, but it doesn't do what one would expect. Recognize
|
||||
# this as a useful pattern anyway, and treat `set VAR=""` on Windows the same way
|
||||
# `export VAR=` is treated on Linux.
|
||||
return re.sub(r'^\"\"$', "", os.environ[var_env])
|
||||
elif var in kwargs:
|
||||
if isinstance(kwargs[var], str):
|
||||
return textwrap.dedent(kwargs[var]).strip()
|
||||
else:
|
||||
return kwargs[var]
|
||||
else:
|
||||
return jinja2.Undefined(name=var)
|
||||
|
||||
@jinja2.contextfunction
|
||||
def invoke_tool(context, name):
|
||||
env_var = tool_env_var(name)
|
||||
if context.parent["syntax"] == "sh":
|
||||
return "\"${}\"".format(env_var)
|
||||
elif context.parent["syntax"] == "bat":
|
||||
return "%{}%".format(env_var)
|
||||
else:
|
||||
assert False
|
||||
|
||||
def options(opts):
|
||||
if isinstance(opts, str):
|
||||
return opts
|
||||
else:
|
||||
return " ".join(opts)
|
||||
|
||||
def hierarchy(signal, separator):
|
||||
return separator.join(self._name_map[signal][1:])
|
||||
|
||||
def ascii_escape(string):
|
||||
def escape_one(match):
|
||||
if match.group(1) is None:
|
||||
return match.group(2)
|
||||
else:
|
||||
return "_{:02x}_".format(ord(match.group(1)[0]))
|
||||
return "".join(escape_one(m) for m in re.finditer(r"([^A-Za-z0-9_])|(.)", string))
|
||||
|
||||
def tcl_escape(string):
|
||||
return "{" + re.sub(r"([{}\\])", r"\\\1", string) + "}"
|
||||
|
||||
def tcl_quote(string):
|
||||
return '"' + re.sub(r"([$[\\])", r"\\\1", string) + '"'
|
||||
|
||||
def verbose(arg):
|
||||
if get_override("verbose"):
|
||||
return arg
|
||||
else:
|
||||
return jinja2.Undefined(name="quiet")
|
||||
|
||||
def quiet(arg):
|
||||
if get_override("verbose"):
|
||||
return jinja2.Undefined(name="quiet")
|
||||
else:
|
||||
return arg
|
||||
|
||||
def render(source, origin, syntax=None):
|
||||
try:
|
||||
source = textwrap.dedent(source).strip()
|
||||
compiled = jinja2.Template(source,
|
||||
trim_blocks=True, lstrip_blocks=True, undefined=jinja2.StrictUndefined)
|
||||
compiled.environment.filters["options"] = options
|
||||
compiled.environment.filters["hierarchy"] = hierarchy
|
||||
compiled.environment.filters["ascii_escape"] = ascii_escape
|
||||
compiled.environment.filters["tcl_escape"] = tcl_escape
|
||||
compiled.environment.filters["tcl_quote"] = tcl_quote
|
||||
except jinja2.TemplateSyntaxError as e:
|
||||
e.args = ("{} (at {}:{})".format(e.message, origin, e.lineno),)
|
||||
raise
|
||||
return compiled.render({
|
||||
"name": name,
|
||||
"platform": self,
|
||||
"emit_rtlil": emit_rtlil,
|
||||
"emit_verilog": emit_verilog,
|
||||
"emit_debug_verilog": emit_debug_verilog,
|
||||
"emit_commands": emit_commands,
|
||||
"syntax": syntax,
|
||||
"invoke_tool": invoke_tool,
|
||||
"get_override": get_override,
|
||||
"verbose": verbose,
|
||||
"quiet": quiet,
|
||||
"autogenerated": autogenerated,
|
||||
})
|
||||
|
||||
plan = BuildPlan(script="build_{}".format(name))
|
||||
for filename_tpl, content_tpl in self.file_templates.items():
|
||||
plan.add_file(render(filename_tpl, origin=filename_tpl),
|
||||
render(content_tpl, origin=content_tpl))
|
||||
for filename, content in self.extra_files.items():
|
||||
plan.add_file(filename, content)
|
||||
return plan
|
||||
import warnings
|
||||
warnings.warn("instead of nmigen.build.plat, use amaranth.build.plat",
|
||||
DeprecationWarning, stacklevel=2)
|
||||
|
|
|
|||
|
|
@ -1,256 +1,7 @@
|
|||
from collections import OrderedDict
|
||||
|
||||
from ..hdl.ast import *
|
||||
from ..hdl.rec import *
|
||||
from ..lib.io import *
|
||||
|
||||
from .dsl import *
|
||||
from amaranth.build.res import *
|
||||
from amaranth.build.res import __all__
|
||||
|
||||
|
||||
__all__ = ["ResourceError", "ResourceManager"]
|
||||
|
||||
|
||||
class ResourceError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class ResourceManager:
|
||||
def __init__(self, resources, connectors):
|
||||
self.resources = OrderedDict()
|
||||
self._requested = OrderedDict()
|
||||
self._phys_reqd = OrderedDict()
|
||||
|
||||
self.connectors = OrderedDict()
|
||||
self._conn_pins = OrderedDict()
|
||||
|
||||
# Constraint lists
|
||||
self._ports = []
|
||||
self._clocks = SignalDict()
|
||||
|
||||
self.add_resources(resources)
|
||||
self.add_connectors(connectors)
|
||||
|
||||
def add_resources(self, resources):
|
||||
for res in resources:
|
||||
if not isinstance(res, Resource):
|
||||
raise TypeError("Object {!r} is not a Resource".format(res))
|
||||
if (res.name, res.number) in self.resources:
|
||||
raise NameError("Trying to add {!r}, but {!r} has the same name and number"
|
||||
.format(res, self.resources[res.name, res.number]))
|
||||
self.resources[res.name, res.number] = res
|
||||
|
||||
def add_connectors(self, connectors):
|
||||
for conn in connectors:
|
||||
if not isinstance(conn, Connector):
|
||||
raise TypeError("Object {!r} is not a Connector".format(conn))
|
||||
if (conn.name, conn.number) in self.connectors:
|
||||
raise NameError("Trying to add {!r}, but {!r} has the same name and number"
|
||||
.format(conn, self.connectors[conn.name, conn.number]))
|
||||
self.connectors[conn.name, conn.number] = conn
|
||||
|
||||
for conn_pin, plat_pin in conn:
|
||||
assert conn_pin not in self._conn_pins
|
||||
self._conn_pins[conn_pin] = plat_pin
|
||||
|
||||
def lookup(self, name, number=0):
|
||||
if (name, number) not in self.resources:
|
||||
raise ResourceError("Resource {}#{} does not exist"
|
||||
.format(name, number))
|
||||
return self.resources[name, number]
|
||||
|
||||
def request(self, name, number=0, *, dir=None, xdr=None):
|
||||
resource = self.lookup(name, number)
|
||||
if (resource.name, resource.number) in self._requested:
|
||||
raise ResourceError("Resource {}#{} has already been requested"
|
||||
.format(name, number))
|
||||
|
||||
def merge_options(subsignal, dir, xdr):
|
||||
if isinstance(subsignal.ios[0], Subsignal):
|
||||
if dir is None:
|
||||
dir = dict()
|
||||
if xdr is None:
|
||||
xdr = dict()
|
||||
if not isinstance(dir, dict):
|
||||
raise TypeError("Directions must be a dict, not {!r}, because {!r} "
|
||||
"has subsignals"
|
||||
.format(dir, subsignal))
|
||||
if not isinstance(xdr, dict):
|
||||
raise TypeError("Data rate must be a dict, not {!r}, because {!r} "
|
||||
"has subsignals"
|
||||
.format(xdr, subsignal))
|
||||
for sub in subsignal.ios:
|
||||
sub_dir = dir.get(sub.name, None)
|
||||
sub_xdr = xdr.get(sub.name, None)
|
||||
dir[sub.name], xdr[sub.name] = merge_options(sub, sub_dir, sub_xdr)
|
||||
else:
|
||||
if dir is None:
|
||||
dir = subsignal.ios[0].dir
|
||||
if xdr is None:
|
||||
xdr = 0
|
||||
if dir not in ("i", "o", "oe", "io", "-"):
|
||||
raise TypeError("Direction must be one of \"i\", \"o\", \"oe\", \"io\", "
|
||||
"or \"-\", not {!r}"
|
||||
.format(dir))
|
||||
if dir != subsignal.ios[0].dir and \
|
||||
not (subsignal.ios[0].dir == "io" or dir == "-"):
|
||||
raise ValueError("Direction of {!r} cannot be changed from \"{}\" to \"{}\"; "
|
||||
"direction can be changed from \"io\" to \"i\", \"o\", or "
|
||||
"\"oe\", or from anything to \"-\""
|
||||
.format(subsignal.ios[0], subsignal.ios[0].dir, dir))
|
||||
if not isinstance(xdr, int) or xdr < 0:
|
||||
raise ValueError("Data rate of {!r} must be a non-negative integer, not {!r}"
|
||||
.format(subsignal.ios[0], xdr))
|
||||
return dir, xdr
|
||||
|
||||
def resolve(resource, dir, xdr, name, attrs):
|
||||
for attr_key, attr_value in attrs.items():
|
||||
if hasattr(attr_value, "__call__"):
|
||||
attr_value = attr_value(self)
|
||||
assert attr_value is None or isinstance(attr_value, str)
|
||||
if attr_value is None:
|
||||
del attrs[attr_key]
|
||||
else:
|
||||
attrs[attr_key] = attr_value
|
||||
|
||||
if isinstance(resource.ios[0], Subsignal):
|
||||
fields = OrderedDict()
|
||||
for sub in resource.ios:
|
||||
fields[sub.name] = resolve(sub, dir[sub.name], xdr[sub.name],
|
||||
name="{}__{}".format(name, sub.name),
|
||||
attrs={**attrs, **sub.attrs})
|
||||
return Record([
|
||||
(f_name, f.layout) for (f_name, f) in fields.items()
|
||||
], fields=fields, name=name)
|
||||
|
||||
elif isinstance(resource.ios[0], (Pins, DiffPairs)):
|
||||
phys = resource.ios[0]
|
||||
if isinstance(phys, Pins):
|
||||
phys_names = phys.names
|
||||
port = Record([("io", len(phys))], name=name)
|
||||
if isinstance(phys, DiffPairs):
|
||||
phys_names = []
|
||||
record_fields = []
|
||||
if not self.should_skip_port_component(None, attrs, "p"):
|
||||
phys_names += phys.p.names
|
||||
record_fields.append(("p", len(phys)))
|
||||
if not self.should_skip_port_component(None, attrs, "n"):
|
||||
phys_names += phys.n.names
|
||||
record_fields.append(("n", len(phys)))
|
||||
port = Record(record_fields, name=name)
|
||||
if dir == "-":
|
||||
pin = None
|
||||
else:
|
||||
pin = Pin(len(phys), dir, xdr=xdr, name=name)
|
||||
|
||||
for phys_name in phys_names:
|
||||
if phys_name in self._phys_reqd:
|
||||
raise ResourceError("Resource component {} uses physical pin {}, but it "
|
||||
"is already used by resource component {} that was "
|
||||
"requested earlier"
|
||||
.format(name, phys_name, self._phys_reqd[phys_name]))
|
||||
self._phys_reqd[phys_name] = name
|
||||
|
||||
self._ports.append((resource, pin, port, attrs))
|
||||
|
||||
if pin is not None and resource.clock is not None:
|
||||
self.add_clock_constraint(pin.i, resource.clock.frequency)
|
||||
|
||||
return pin if pin is not None else port
|
||||
|
||||
else:
|
||||
assert False # :nocov:
|
||||
|
||||
value = resolve(resource,
|
||||
*merge_options(resource, dir, xdr),
|
||||
name="{}_{}".format(resource.name, resource.number),
|
||||
attrs=resource.attrs)
|
||||
self._requested[resource.name, resource.number] = value
|
||||
return value
|
||||
|
||||
def iter_single_ended_pins(self):
|
||||
for res, pin, port, attrs in self._ports:
|
||||
if pin is None:
|
||||
continue
|
||||
if isinstance(res.ios[0], Pins):
|
||||
yield pin, port, attrs, res.ios[0].invert
|
||||
|
||||
def iter_differential_pins(self):
|
||||
for res, pin, port, attrs in self._ports:
|
||||
if pin is None:
|
||||
continue
|
||||
if isinstance(res.ios[0], DiffPairs):
|
||||
yield pin, port, attrs, res.ios[0].invert
|
||||
|
||||
def should_skip_port_component(self, port, attrs, component):
|
||||
return False
|
||||
|
||||
def iter_ports(self):
|
||||
for res, pin, port, attrs in self._ports:
|
||||
if isinstance(res.ios[0], Pins):
|
||||
if not self.should_skip_port_component(port, attrs, "io"):
|
||||
yield port.io
|
||||
elif isinstance(res.ios[0], DiffPairs):
|
||||
if not self.should_skip_port_component(port, attrs, "p"):
|
||||
yield port.p
|
||||
if not self.should_skip_port_component(port, attrs, "n"):
|
||||
yield port.n
|
||||
else:
|
||||
assert False
|
||||
|
||||
def iter_port_constraints(self):
|
||||
for res, pin, port, attrs in self._ports:
|
||||
if isinstance(res.ios[0], Pins):
|
||||
if not self.should_skip_port_component(port, attrs, "io"):
|
||||
yield port.io.name, res.ios[0].map_names(self._conn_pins, res), attrs
|
||||
elif isinstance(res.ios[0], DiffPairs):
|
||||
if not self.should_skip_port_component(port, attrs, "p"):
|
||||
yield port.p.name, res.ios[0].p.map_names(self._conn_pins, res), attrs
|
||||
if not self.should_skip_port_component(port, attrs, "n"):
|
||||
yield port.n.name, res.ios[0].n.map_names(self._conn_pins, res), attrs
|
||||
else:
|
||||
assert False
|
||||
|
||||
def iter_port_constraints_bits(self):
|
||||
for port_name, pin_names, attrs in self.iter_port_constraints():
|
||||
if len(pin_names) == 1:
|
||||
yield port_name, pin_names[0], attrs
|
||||
else:
|
||||
for bit, pin_name in enumerate(pin_names):
|
||||
yield "{}[{}]".format(port_name, bit), pin_name, attrs
|
||||
|
||||
def add_clock_constraint(self, clock, frequency):
|
||||
if not isinstance(clock, Signal):
|
||||
raise TypeError("Object {!r} is not a Signal".format(clock))
|
||||
if not isinstance(frequency, (int, float)):
|
||||
raise TypeError("Frequency must be a number, not {!r}".format(frequency))
|
||||
|
||||
if clock in self._clocks:
|
||||
raise ValueError("Cannot add clock constraint on {!r}, which is already constrained "
|
||||
"to {} Hz"
|
||||
.format(clock, self._clocks[clock]))
|
||||
else:
|
||||
self._clocks[clock] = float(frequency)
|
||||
|
||||
def iter_clock_constraints(self):
|
||||
# Back-propagate constraints through the input buffer. For clock constraints on pins
|
||||
# (the majority of cases), toolchains work better if the constraint is defined on the pin
|
||||
# and not on the buffered internal net; and if the toolchain is advanced enough that
|
||||
# it considers clock phase and delay of the input buffer, it is *necessary* to define
|
||||
# the constraint on the pin to match the designer's expectation of phase being referenced
|
||||
# to the pin.
|
||||
#
|
||||
# Constraints on nets with no corresponding input pin (e.g. PLL or SERDES outputs) are not
|
||||
# affected.
|
||||
pin_i_to_port = SignalDict()
|
||||
for res, pin, port, attrs in self._ports:
|
||||
if hasattr(pin, "i"):
|
||||
if isinstance(res.ios[0], Pins):
|
||||
pin_i_to_port[pin.i] = port.io
|
||||
elif isinstance(res.ios[0], DiffPairs):
|
||||
pin_i_to_port[pin.i] = port.p
|
||||
else:
|
||||
assert False
|
||||
|
||||
for net_signal, frequency in self._clocks.items():
|
||||
port_signal = pin_i_to_port.get(net_signal)
|
||||
yield net_signal, port_signal, frequency
|
||||
import warnings
|
||||
warnings.warn("instead of nmigen.build.res, use amaranth.build.res",
|
||||
DeprecationWarning, stacklevel=2)
|
||||
|
|
|
|||
|
|
@ -1,268 +1,7 @@
|
|||
from collections import OrderedDict
|
||||
from contextlib import contextmanager
|
||||
from abc import ABCMeta, abstractmethod
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import tempfile
|
||||
import zipfile
|
||||
import hashlib
|
||||
import pathlib
|
||||
from amaranth.build.run import *
|
||||
from amaranth.build.run import __all__
|
||||
|
||||
|
||||
__all__ = ["BuildPlan", "BuildProducts", "LocalBuildProducts", "RemoteSSHBuildProducts"]
|
||||
|
||||
|
||||
|
||||
class BuildPlan:
|
||||
def __init__(self, script):
|
||||
"""A build plan.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
script : str
|
||||
The base name (without extension) of the script that will be executed.
|
||||
"""
|
||||
self.script = script
|
||||
self.files = OrderedDict()
|
||||
|
||||
def add_file(self, filename, content):
|
||||
"""
|
||||
Add ``content``, which can be a :class:`str`` or :class:`bytes`, to the build plan
|
||||
as ``filename``. The file name can be a relative path with directories separated by
|
||||
forward slashes (``/``).
|
||||
"""
|
||||
assert isinstance(filename, str) and filename not in self.files
|
||||
self.files[filename] = content
|
||||
|
||||
def digest(self, size=64):
|
||||
"""
|
||||
Compute a `digest`, a short byte sequence deterministically and uniquely identifying
|
||||
this build plan.
|
||||
"""
|
||||
hasher = hashlib.blake2b(digest_size=size)
|
||||
for filename in sorted(self.files):
|
||||
hasher.update(filename.encode("utf-8"))
|
||||
content = self.files[filename]
|
||||
if isinstance(content, str):
|
||||
content = content.encode("utf-8")
|
||||
hasher.update(content)
|
||||
hasher.update(self.script.encode("utf-8"))
|
||||
return hasher.digest()
|
||||
|
||||
def archive(self, file):
|
||||
"""
|
||||
Archive files from the build plan into ``file``, which can be either a filename, or
|
||||
a file-like object. The produced archive is deterministic: exact same files will
|
||||
always produce exact same archive.
|
||||
"""
|
||||
with zipfile.ZipFile(file, "w") as archive:
|
||||
# Write archive members in deterministic order and with deterministic timestamp.
|
||||
for filename in sorted(self.files):
|
||||
archive.writestr(zipfile.ZipInfo(filename), self.files[filename])
|
||||
|
||||
def execute_local(self, root="build", *, run_script=True):
|
||||
"""
|
||||
Execute build plan using the local strategy. Files from the build plan are placed in
|
||||
the build root directory ``root``, and, if ``run_script`` is ``True``, the script
|
||||
appropriate for the platform (``{script}.bat`` on Windows, ``{script}.sh`` elsewhere) is
|
||||
executed in the build root.
|
||||
|
||||
Returns :class:`LocalBuildProducts`.
|
||||
"""
|
||||
os.makedirs(root, exist_ok=True)
|
||||
cwd = os.getcwd()
|
||||
try:
|
||||
os.chdir(root)
|
||||
|
||||
for filename, content in self.files.items():
|
||||
filename = pathlib.Path(filename)
|
||||
# Forbid parent directory components completely to avoid the possibility
|
||||
# of writing outside the build root.
|
||||
assert ".." not in filename.parts
|
||||
dirname = os.path.dirname(filename)
|
||||
if dirname:
|
||||
os.makedirs(dirname, exist_ok=True)
|
||||
|
||||
mode = "wt" if isinstance(content, str) else "wb"
|
||||
with open(filename, mode) as f:
|
||||
f.write(content)
|
||||
|
||||
if run_script:
|
||||
if sys.platform.startswith("win32"):
|
||||
# Without "call", "cmd /c {}.bat" will return 0.
|
||||
# See https://stackoverflow.com/a/30736987 for a detailed explanation of why.
|
||||
# Running the script manually from a command prompt is unaffected.
|
||||
subprocess.check_call(["cmd", "/c", "call {}.bat".format(self.script)])
|
||||
else:
|
||||
subprocess.check_call(["sh", "{}.sh".format(self.script)])
|
||||
|
||||
return LocalBuildProducts(os.getcwd())
|
||||
|
||||
finally:
|
||||
os.chdir(cwd)
|
||||
|
||||
def execute_remote_ssh(self, *, connect_to = {}, root, run_script=True):
|
||||
"""
|
||||
Execute build plan using the remote SSH strategy. Files from the build
|
||||
plan are transferred via SFTP to the directory ``root`` on a remote
|
||||
server. If ``run_script`` is ``True``, the ``paramiko`` SSH client will
|
||||
then run ``{script}.sh``. ``root`` can either be an absolute or
|
||||
relative (to the login directory) path.
|
||||
|
||||
``connect_to`` is a dictionary that holds all input arguments to
|
||||
``paramiko``'s ``SSHClient.connect``
|
||||
(`documentation <http://docs.paramiko.org/en/stable/api/client.html#paramiko.client.SSHClient.connect>`_).
|
||||
At a minimum, the ``hostname`` input argument must be supplied in this
|
||||
dictionary as the remote server.
|
||||
|
||||
Returns :class:`RemoteSSHBuildProducts`.
|
||||
"""
|
||||
from paramiko import SSHClient
|
||||
|
||||
with SSHClient() as client:
|
||||
client.load_system_host_keys()
|
||||
client.connect(**connect_to)
|
||||
|
||||
with client.open_sftp() as sftp:
|
||||
def mkdir_exist_ok(path):
|
||||
try:
|
||||
sftp.mkdir(str(path))
|
||||
except IOError as e:
|
||||
# mkdir fails if directory exists. This is fine in nmigen.build.
|
||||
# Reraise errors containing e.errno info.
|
||||
if e.errno:
|
||||
raise e
|
||||
|
||||
def mkdirs(path):
|
||||
# Iteratively create parent directories of a file by iterating over all
|
||||
# parents except for the root ("."). Slicing the parents results in
|
||||
# TypeError, so skip over the root ("."); this also handles files
|
||||
# already in the root directory.
|
||||
for parent in reversed(path.parents):
|
||||
if parent == pathlib.PurePosixPath("."):
|
||||
continue
|
||||
else:
|
||||
mkdir_exist_ok(parent)
|
||||
|
||||
mkdir_exist_ok(root)
|
||||
|
||||
sftp.chdir(root)
|
||||
for filename, content in self.files.items():
|
||||
filename = pathlib.PurePosixPath(filename)
|
||||
assert ".." not in filename.parts
|
||||
|
||||
mkdirs(filename)
|
||||
|
||||
mode = "wt" if isinstance(content, str) else "wb"
|
||||
with sftp.file(str(filename), mode) as f:
|
||||
# "b/t" modifier ignored in SFTP.
|
||||
if mode == "wt":
|
||||
f.write(content.encode("utf-8"))
|
||||
else:
|
||||
f.write(content)
|
||||
|
||||
if run_script:
|
||||
transport = client.get_transport()
|
||||
channel = transport.open_session()
|
||||
channel.set_combine_stderr(True)
|
||||
|
||||
cmd = "if [ -f ~/.profile ]; then . ~/.profile; fi && cd {} && sh {}.sh".format(root, self.script)
|
||||
channel.exec_command(cmd)
|
||||
|
||||
# Show the output from the server while products are built.
|
||||
buf = channel.recv(1024)
|
||||
while buf:
|
||||
print(buf.decode("utf-8"), end="")
|
||||
buf = channel.recv(1024)
|
||||
|
||||
return RemoteSSHBuildProducts(connect_to, root)
|
||||
|
||||
def execute(self):
|
||||
"""
|
||||
Execute build plan using the default strategy. Use one of the ``execute_*`` methods
|
||||
explicitly to have more control over the strategy.
|
||||
"""
|
||||
return self.execute_local()
|
||||
|
||||
|
||||
class BuildProducts(metaclass=ABCMeta):
|
||||
@abstractmethod
|
||||
def get(self, filename, mode="b"):
|
||||
"""
|
||||
Extract ``filename`` from build products, and return it as a :class:`bytes` (if ``mode``
|
||||
is ``"b"``) or a :class:`str` (if ``mode`` is ``"t"``).
|
||||
"""
|
||||
assert mode in ("b", "t")
|
||||
|
||||
@contextmanager
|
||||
def extract(self, *filenames):
|
||||
"""
|
||||
Extract ``filenames`` from build products, place them in an OS-specific temporary file
|
||||
location, with the extension preserved, and delete them afterwards. This method is used
|
||||
as a context manager, e.g.: ::
|
||||
|
||||
with products.extract("bitstream.bin", "programmer.cfg") \
|
||||
as bitstream_filename, config_filename:
|
||||
subprocess.check_call(["program", "-c", config_filename, bitstream_filename])
|
||||
"""
|
||||
files = []
|
||||
try:
|
||||
for filename in filenames:
|
||||
# On Windows, a named temporary file (as created by Python) is not accessible to
|
||||
# others if it's still open within the Python process, so we close it and delete
|
||||
# it manually.
|
||||
file = tempfile.NamedTemporaryFile(
|
||||
prefix="nmigen_", suffix="_" + os.path.basename(filename),
|
||||
delete=False)
|
||||
files.append(file)
|
||||
file.write(self.get(filename))
|
||||
file.close()
|
||||
|
||||
if len(files) == 0:
|
||||
return (yield)
|
||||
elif len(files) == 1:
|
||||
return (yield files[0].name)
|
||||
else:
|
||||
return (yield [file.name for file in files])
|
||||
finally:
|
||||
for file in files:
|
||||
os.unlink(file.name)
|
||||
|
||||
|
||||
class LocalBuildProducts(BuildProducts):
|
||||
def __init__(self, root):
|
||||
# We provide no guarantees that files will be available on the local filesystem (i.e. in
|
||||
# any way other than through `products.get()`) in general, so downstream code must never
|
||||
# rely on this, even when we happen to use a local build most of the time.
|
||||
self.__root = root
|
||||
|
||||
def get(self, filename, mode="b"):
|
||||
super().get(filename, mode)
|
||||
with open(os.path.join(self.__root, filename), "r" + mode) as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
class RemoteSSHBuildProducts(BuildProducts):
|
||||
def __init__(self, connect_to, root):
|
||||
self.__connect_to = connect_to
|
||||
self.__root = root
|
||||
|
||||
def get(self, filename, mode="b"):
|
||||
super().get(filename, mode)
|
||||
|
||||
from paramiko import SSHClient
|
||||
|
||||
with SSHClient() as client:
|
||||
client.load_system_host_keys()
|
||||
client.connect(**self.__connect_to)
|
||||
|
||||
with client.open_sftp() as sftp:
|
||||
sftp.chdir(self.__root)
|
||||
|
||||
with sftp.file(filename, "r" + mode) as f:
|
||||
# "b/t" modifier ignored in SFTP.
|
||||
if mode == "t":
|
||||
return f.read().decode("utf-8")
|
||||
else:
|
||||
return f.read()
|
||||
import warnings
|
||||
warnings.warn("instead of nmigen.build.run, use amaranth.build.run",
|
||||
DeprecationWarning, stacklevel=2)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue