forge package

Submodules

forge.cli module

Forge CLI.

forge.cli.call_main()[source]
forge.cli.match(name, pattern)[source]
forge.cli.primary_version(resources)[source]
forge.cli.unfurl(repos)[source]

forge.config module

class forge.config.Config(search_path=None, registry=None, docker_repo=None, user=None, password=None, workdir=None, profiles=None, concurrency=None)[source]

Bases: object

class forge.config.ECRRegistry(type, account=None, region=None, aws_access_key_id=None, aws_secret_access_key=None)[source]

Bases: object

class forge.config.GCRRegistry(type, url, project, key=None)[source]

Bases: object

class forge.config.LocalRegistry(type)[source]

Bases: object

class forge.config.Profile(search_path=None, registry=None)[source]

Bases: object

class forge.config.Registry(type, url, verify, user, password, namespace=None)[source]

Bases: object

forge.config.load(*args, **kwargs)[source]

forge.core module

class forge.core.Forge(verbose=0, config=None, profile=None, branch=None, scan_base=True)[source]

Bases: object

bake
build
clean
deploy
execute(goal)[source]
load_config()[source]
load_services()[source]
manifest
metadata
prompt(msg, default=None, loader=None, echo=True, optional=False)[source]
pull
push
scan
setup
summary
template(svc)[source]
forge.core.file_contents(path)[source]
forge.core.get_docker(registry)[source]

forge.dispatcher module

class forge.dispatcher.Dispatcher[source]

Bases: object

dispatch()[source]
schedule(fun, *args)[source]
work()[source]

forge.docker module

class forge.docker.Builder(docker, cid, changes=())[source]

Bases: object

commit(name, version)[source]
cp(source, target)[source]
kill()[source]
run(*args)[source]
class forge.docker.Docker(registry, namespace, user, password, verify=True)[source]

Bases: forge.docker.DockerBase

image
registry_get
remote_exists
repo_get
class forge.docker.DockerBase[source]

Bases: object

build
builder
builder_hash(dockerfile, args)[source]
builder_prefix(name)[source]
clean
exists
find_builders(name)[source]
get_changes(dockerfile)[source]
local_exists
needs_push
pull
push
run
tag
validate
class forge.docker.DockerImageBuilder[source]

Bases: object

DOCKER = 'docker'
IMAGEBUILDER = 'imagebuilder'
classmethod get_cmd_from_name(str)[source]
exception forge.docker.DockerImageBuilderError[source]

Bases: forge.tasks.TaskError

report_traceback = False
class forge.docker.ECRDocker(account=None, region=None, aws_access_key_id=None, aws_secret_access_key=None)[source]

Bases: forge.docker.DockerBase

image
namespace
registry
remote_exists
class forge.docker.GCRDocker(url, project, key)[source]

Bases: forge.docker.Docker

class forge.docker.LocalDocker[source]

Bases: forge.docker.DockerBase

image(name, version)[source]
needs_push(name, version)[source]
remote_exists(name, version)[source]
forge.docker.image(registry, namespace, name, version)[source]

forge.executor module

exception forge.executor.ChildError(parent, *children)[source]

Bases: exceptions.Exception

Used to indicate that a background task has had an error. The details are reported at the source of the error, so this error message is intentionally sparse.

forge.executor.PENDING = PENDING

A sentinal value used to indicate that the task terminated with an error of some kind.

class forge.executor.Result(executor, parent)[source]

Bases: object

errors
exception
get()[source]
get_traceback()[source]
is_leaf_error()[source]
is_signal((filename, lineno, funcname, text))[source]
leaf_errors
recover()[source]
report(autocolor=True)[source]
result
terminal
traversal
wait()[source]
class forge.executor.executor(name=None, async=False)[source]

Bases: object

An executor provides some useful utilities for safely running and coordinating code:

# an executor can run stuff safely:
exe = executor("my-executor")
result = exe.run(lambda x: x/0, 1)

# a result can be an error or a value
if result.value is ERROR:
    print result.exception
else:
    print result.value

# you can retrieve the result just as if you had run the
# function
try:
    x = result.get()
    print x
except ZeroDivisionError, e:
    print e

An executor can also be used to run asynchronous tasks:

exe = executor("my-async-executor", async=True)
result = exe.run(lambda x: x/0, 1)
# the result is pending
if result.value is PENDING:
   print "still waiting..."

# block until the result is available
result.wait()

if result.value is ERROR:
    print result.exception
else:
    print result.value

When executors are nested, any errors occuring in asynchronous tasks are tracked:

def my_code():
exe = executor(“sub-executor”, async=True) # lets launch a background task and ignore the result exe.run(lambda: 1/0)

exe = executor(“root-executor”) result = exe.run(my_code)

The executor tracks all background tasks and should any errors occur, the executor constructs a full stack trace that includes not only the line of code in the background thread, but the stack for the code that launched the background thread:

print result.report() -->

    root-executor: 1 child task(s) errored
      sub-executor: unexpected error
        
        Traceback (most recent call last):
          File "<stdin>", line 1, in <module>
          File "<stdin>", line 4, in my_code
          File "<stdin>", line 4, in <lambda>
        ZeroDivisionError: integer division or modulo by zero
ALLOCATED = {}
COLORS = [u'', u'', u'', u'', u'', u'', u'', u'', u'', u'', u'', u'', u'']
CURRENT = <eventlet.corolocal.local object>
MUXER = <forge.executor._Muxer object>
classmethod allocate_color(name)[source]
classmethod current()[source]
classmethod current_result()[source]
do_run(result, fun, args, kwargs)[source]
echo(text=u'', prefix=u'\u2551 ', newline=True)[source]
error(text)[source]
info(text)[source]
n = 'bold_white_on_magenta'
report()[source]
classmethod resize(size)[source]
run(fun, *args, **kwargs)[source]
classmethod setup()[source]
wait()[source]
warn(text)[source]
forge.executor.output = <module 'forge.output' from '/opt/buildhome/python2.7/local/lib/python2.7/site-packages/forge/output.pyc'>

A sentinal value used to indicate that the task is not yet complete.

forge.github module

class forge.github.Github(token)[source]

Bases: object

clone(url, directory)[source]
exists(url)[source]
get(api)[source]
list(organization, filter='*')[source]
paginate(api)[source]
pull(url, directory)[source]
remote(directory)[source]
forge.github.inject_token(url, token)[source]
forge.github.next_page(response)[source]

forge.istio module

forge.istio.istio[source]

forge.jinja2 module

class forge.jinja2.WarnUndefined(hint=None, obj=missing, name=None, exc=<class 'jinja2.exceptions.UndefinedError'>)[source]

Bases: jinja2.runtime.Undefined

warn()[source]
forge.jinja2.render[source]

Renders a file or directory as a jinja template using the supplied variables.

The source is a path pointing to either an individual file or a directory. The target is a path pointing to the desired location of the output.

If the source points to a file, then the target is created/overwritten as a file.

If the source points to a directory, the target is created as a directory. If the target already exists, it is removed and recreated prior to rendering the template.

forge.jinja2.renders[source]

Renders a string as a jinja template. The name is used where filename would normally appear in error messages.

forge.kubernetes module

class forge.kubernetes.Kubernetes(namespace=None, context=None, dry_run=False)[source]

Bases: object

annotate
apply
delete
label
list
resources
forge.kubernetes.is_yaml_empty(dir)[source]
forge.kubernetes.is_yaml_file(name)[source]
forge.kubernetes.selector(labels)[source]

forge.match module

class forge.match.Begin(cls)[source]

Bases: forge.match.Marker

class forge.match.End[source]

Bases: forge.match.Marker

class forge.match.Fragment(start, extend, doc)[source]

Bases: object

class forge.match.Marker[source]

Bases: object

exception forge.match.MatchError[source]

Bases: exceptions.Exception

class forge.match.State[source]
apply(*args, **kwargs)[source]
compile()[source]
edge(*args)[source]
edges
epsilon_closure
force()[source]
match(*args, **kwargs)[source]
nodes
sequence = 216
transitions
forge.match.cat(patterns)[source]
forge.match.choice(*patterns)[source]
forge.match.compile(fragment)[source]
forge.match.deduplicate(items)[source]
class forge.match.delay(thunk)[source]

Bases: object

force()[source]
forge.match.flatten(values)[source]
class forge.match.lazy(name)[source]

Bases: forge.match.delay

force()[source]
forge.match.many(*pattern, **kwargs)[source]
forge.match.match(*pattern)[source]
forge.match.ntuple(pattern, **kwargs)[source]
forge.match.one(*pattern)[source]
forge.match.opt(*pattern)[source]
forge.match.ppargs(args, dedup=False)[source]
forge.match.ppfun(fun)[source]
forge.match.projections(value, match_value=True)[source]
class forge.match.trait(value)[source]

Bases: object

forge.match.when(pattern, action)[source]

forge.output module

class forge.output.Drawer[source]

Bases: object

draw(lines, trim=True)[source]
class forge.output.Terminal(*args, **kwargs)[source]

Bases: blessed.terminal.Terminal

wrap(text)[source]
wrap_line(text)[source]

forge.schema module

Load & validate json/yaml files with quality error messages.

The schema module contains a library for defining schemas that can perform loading and validation of json or yaml documents with high quality error messages.

Motivation

If you’re wondering why this is written as part of forge as opposed to using an external library, there are a number of reasons:

  1. An important goal for forge is to provide high quality error messages for schema violations. For example, including the filename and line number of the cause.
  2. Similarly, it is important to be able to quickly extend configuration input to meet new requirements while maintaining backwards compatibility.
  3. Finally, it is also very imporant to be able to generate high quality documentation from the schema.

So far I have been unable to find existing tooling that meets these requirements in combination. My first attempt was to use the jsonschema package combined with docson, but jsonschema doesn’t validate files, it validates data structures, and so it doesn’t have access to the filename and line number where the validation occurred.

Secondly, while json schema in general is quite flexible which aids in (2), it is actually too flexible. The json schema union construct allows for situations where it is difficult/impossible to tell which alternative schema the input was intended to match, and so this makes providing useful error messages quite difficult.

This library defines a more restricted form of union which is flexible enough for extending schemas while retaining backwards compatibility, but still maintains the property of being able to unambiguously classify input as being intended to match just one of the options.

If you know of a good quality third party library that meets these requirements, please let me know!

Tutorial

A schema is represented as a tree of python objects, for example you can construct a schema for a map as follows:

# this schema will load an open ended map
>>> scm = Map(Any())

Any schema object knows how to load from either a string or a file:

# load from a file
>>> scm.load("data.yaml")  

# load from a string
>>> scm.load("string-data", "{foo: bar, baz: moo}")
OrderedDict([(u'foo', u'bar'), (u'baz', u'moo')])

# the name is used in error messages
>>> scm.load("string-data", "asdf")
Traceback (most recent call last):
  ...
SchemaError: expecting map[any], got string
  in "string-data", line 1, column 1

You can use Any() and Scalar() to construct polymorphic types, but you can also define monomorphic schemas:

# a list of strings
>>> scm = Sequence(String())
>>> scm.load("strings", "[a, b, c]")
[u'a', u'b', u'c']
>>> scm.load("strings", "[1, 2, 3]")
Traceback (most recent call last):
  ...
SchemaError: expecting string, got integer
  in "strings", line 1, column 2

You can define structured types as well using the Class schema. The Class schema requires a type name and a documentation string as the first two arguments:

# define a structured type
>>> scm = Class("book", "A yaml type identifying a book",
...             Field("title", String(), docs="The title of the book"),
...             Field("isbn", String(), docs="The isbn number of the book"),
...             Field("author", String(), docs="The isbn number of the book"))

>>> scm.load('potter.yaml', '''{title: "The Philosopher's Stone", isbn: "1234", author: "J.K. Rowling"}''')
OrderedDict([('author', u'J.K. Rowling'), ('isbn', u'1234'), ('title', u"The Philosopher's Stone")])

By default, all map types will produce OrderedDicts, but if you want a custom class, you can pass a constructor to the Class schema when you construct it:

>>> class Book(object):
...     def __init__(self, title, isbn, author):
...         self.title = title
...         self.isbn = isbn
...         self.author = author
...     def __repr__(self):
...         return "Book(%r, %r, %r)" % (self.title, self.isbn, self.author)

# structured type with custom class
>>> scm = Class("book", "A yaml type identifying a book",
...             Book,
...             Field("title", String(), docs="The title of the book"),
...             Field("isbn", String(), docs="The isbn number of the book"),
...             Field("author", String(), docs="The isbn number of the book"))
>>> scm.load('potter.yaml', '''{title: "The Philosopher's Stone", isbn: "1234", author: "J.K. Rowling"}''')
Book(u"The Philosopher's Stone", u'1234', u'J.K. Rowling')

The constructor doesn’t need to be a class, you can use any callable. It will be invoked with the field values supplied as keyword arguments:

>>> scm = Class("coord", "A yaml type identifying a coordinate",
...             lambda **kw: (kw["x"], kw["y"]),
...             Field("x", Float(), docs="The X coordinate."),
...             Field("y", Float(), docs="The Y coordinate."))
>>> scm.load("data", "{x: 1.0, y: 2.0}")
(1.0, 2.0)

If the yaml value happens to contain dashes or conflict with a python keyword, you can specify an alias for a Field to be used in python code:

>>> scm = Class("coord", "A yaml type identifying a coordinate",
...             lambda x_coord, y_coord: (x_coord, y_coord),
...             Field("x-coord", Float(), alias="x_coord", docs="The X coordinate."),
...             Field("y-coord", Float(), alias="y_coord", docs="The Y coordinate."))
>>> scm.load("data", "{x-coord: 1.0, y-coord: 2.0}")
(1.0, 2.0)
class forge.schema.Any[source]

Bases: forge.schema.Schema

load
ScalarNode:
Load any scalar node as a python object.
MappingNode:
Load any mapping node as a python OrderedDict.
SequenceNode:
Load any sequence node as a python list.
Node:
Default data loder. Reports an exception.
basestring:
Load data from a json or yaml file.
basestring, basestring:
Load data from json or yaml input. The supplied name will appear as the filename in error messages.
name = 'any'
traversal
class forge.schema.Base64(*tags)[source]

Bases: forge.schema.Scalar

decode

ScalarNode: None

ScalarNode: None

default_tags = ('string',)
name = 'base64'
render()[source]
class forge.schema.Boolean(*tags)[source]

Bases: forge.schema.Scalar

decode

ScalarNode: None

ScalarNode: None

default_tags = ('bool',)
name = 'boolean'
render()[source]
class forge.schema.Class[source]

Bases: forge.schema.Schema

docname
load

MappingNode: None

Node:
Default data loder. Reports an exception.
basestring:
Load data from a json or yaml file.
basestring, basestring:
Load data from json or yaml input. The supplied name will appear as the filename in error messages.
render()[source]
render_all()[source]
traversal
class forge.schema.Collection[source]

Bases: forge.schema.Schema

class forge.schema.Constant(value, type=None)[source]

Bases: forge.schema.Scalar

decode

ScalarNode: None

ScalarNode: None

name
render()[source]
traversal
class forge.schema.Field[source]

Bases: object

required
class forge.schema.Float(*tags)[source]

Bases: forge.schema.Scalar

decode

ScalarNode: None

ScalarNode: None

default_tags = ('float', 'integer')
name = 'float'
render()[source]
class forge.schema.Integer(*tags)[source]

Bases: forge.schema.Scalar

decode

ScalarNode: None

ScalarNode: None

default_tags = ('integer',)
name = 'integer'
render()[source]
class forge.schema.Map[source]

Bases: forge.schema.Collection

docname
load

MappingNode: None

Node:
Default data loder. Reports an exception.
basestring:
Load data from a json or yaml file.
basestring, basestring:
Load data from json or yaml input. The supplied name will appear as the filename in error messages.
name
traversal
class forge.schema.Scalar(*tags)[source]

Bases: forge.schema.Schema

decode

ScalarNode: None

default_tags = ('string', 'integer', 'float')
load
ScalarNode:
Load data from a yaml node.
Node:
Default data loder. Reports an exception.
basestring:
Load data from a json or yaml file.
basestring, basestring:
Load data from json or yaml input. The supplied name will appear as the filename in error messages.
name = 'scalar'
traversal
class forge.schema.Schema[source]

Bases: object

docname
load
Node:
Default data loder. Reports an exception.
basestring:
Load data from a json or yaml file.
basestring, basestring:
Load data from json or yaml input. The supplied name will appear as the filename in error messages.
exception forge.schema.SchemaError[source]

Bases: exceptions.Exception

class forge.schema.Sequence[source]

Bases: forge.schema.Collection

docname
load

SequenceNode: None

Node:
Default data loder. Reports an exception.
basestring:
Load data from a json or yaml file.
basestring, basestring:
Load data from json or yaml input. The supplied name will appear as the filename in error messages.
name
traversal
class forge.schema.String(*tags)[source]

Bases: forge.schema.Scalar

decode

ScalarNode: None

ScalarNode: None

default_tags = ('string',)
name = 'string'
render()[source]
class forge.schema.Union[source]

Bases: forge.schema.Schema

Unions must be able to descriminate between their schemas. The means to descriminate can be somewhat flexible. A descriminator is computed according to the following algorithm:

Logically the descriminator consists of the following components:

  1. The type. This is sufficient for scalar values and seqences, but we need more to descriminate maps into distinct types.
  2. For maps, a further descriminator is computed based on a signature composed of all required fields of type Constant.
docname
load

Node: None

Node:
Default data loder. Reports an exception.
basestring:
Load data from a json or yaml file.
basestring, basestring:
Load data from json or yaml input. The supplied name will appear as the filename in error messages.
name
traversal

forge.sentinel module

class forge.sentinel.Sentinel(name)[source]

Bases: object

A convenience class that can be used for creating constant values that str/repr using their constant name.

forge.service module

class forge.service.Container(service, dockerfile, context=None, args=None, rebuild=None, name=None, index=None, builder=None)[source]

Bases: object

abs_context
abs_dockerfile
build
image
rebuild
version
class forge.service.Discovery(forge)[source]

Bases: object

dependencies
resolve(svc, dep)[source]
search
class forge.service.Service(forge, descriptor, shallow=False)[source]

Bases: object

containers
deployment()[source]
docker
forge_profile
image(container)[source]
info()[source]
json()[source]
manifest_dir
manifest_target_dir
metadata()[source]
name
profile
pull
rel_descriptor
repo
requires
root
search_path
version
forge.service.get_ancestors(path, stop='/')[source]
forge.service.get_ignores(directory)[source]
forge.service.get_search_path(forge, svc)[source]
forge.service.get_version(path, dirty)[source]
forge.service.is_git(path)[source]
forge.service.is_service_descriptor(path)[source]
forge.service.load_service_yaml(path, **vars)[source]
forge.service.load_service_yamls[source]
forge.service.shafiles(root, files)[source]

forge.service_info module

forge.service_info.load(*args, **kwargs)[source]

forge.tasks module

class forge.tasks.Elidable(*parts)[source]

Bases: object

elide()[source]
class forge.tasks.SHResult(command, code, output)[source]

Bases: object

class forge.tasks.Secret[source]

Bases: str

exception forge.tasks.TaskError[source]

Bases: exceptions.Exception

report_traceback = False

Used to signal anticipated errors has occured. A task error will be rendered without it’s stack trace, so it should include enough information in the error message to diagnose the issue.

class forge.tasks.TaskFilter(name='')[source]

Bases: logging.Filter

This logging filter augments log records with useful context when log statements are made within a task. It also captures the log messages made within a task and records them in the execution object for a task invocation.

filter(record)[source]
forge.tasks.cull(task, sequence)[source]
class forge.tasks.decorator(task, object=_UNBOUND)[source]

Bases: object

go(*args, **kwargs)[source]
run(*args, **kwargs)[source]
forge.tasks.elapsed(delta)[source]

Return a pretty representation of an elapsed time.

forge.tasks.elide(t)[source]
class forge.tasks.execution[source]

Bases: object

info(*args, **kwargs)[source]
log(*args, **kwargs)[source]
forge.tasks.gather(sequence)[source]

Resolve a sequence of asynchronously executed tasks.

forge.tasks.get[source]
forge.tasks.json_patch(response, parser)[source]
forge.tasks.project(task, sequence)[source]
forge.tasks.setup(logfile=None)[source]

Setup the task system. This will perform eventlet monkey patching as well as set up logging.

forge.tasks.sh[source]
class forge.tasks.task(name=None, context=None)[source]

Bases: object

A decorator used to mark a given function or method as a task.

A task can really be any python code, however it is expected that tasks will perform scripting, coordination, integration, and general glue-like activities that are used to automate tasks on behalf of humans.

This kind of code generally suffers from a number of problems:

  • There is rarely good user feedback for what is happening at any given moment.
  • When integration assumptions are violated (e.g. remote system barfs) the errors are often swallowed/opaque.
  • Because of the way it is incrementally built via growing convenience scripts it is often opaque and difficult to debug.
  • When parallel workflows are needed, they are difficult to code in a way that preserves clear user feedback on progress and errors.

Using the task decorator provides a number of conveniences useful for this kind of code.

  • Task arguments/results are automatically captured for easy debugging.
  • Convenience APIs for executing tasks in parallel.
  • Convenience for safely executing shell and http requests with good error reporting and user feedback.

Any python function can be marked as a task and invoked in the normal way you would invoke any function, e.g.:

@task()
def normpath(path):
    parts = [p for p in path.split("/") if p]
    normalized = "/".join(parts)
    if path.startswith("/"):
      return "/" + normalized
    else:
      return normalized

print normpath("/foo//bar/baz") -> "/foo/bar/baz"

The decorator however provides several other convenient ways you can invoke a task:

# using normpath.go, I can launch subtasks in parallel
normalized = normpath.go("asdf"), normpath.go("fdsa"), normpath.go("bleh")
# now I can fetch the result of an individual subtask:
result = normalized[0].get()
# or sync on any outstanding sub tasks:
task.sync()

You can also run a task. This will render progress indicators, status, and errors to the screen as the task and any subtasks proceed:

normpath.run("/foo//bar/baz")
static context(*args, **kwds)[source]
static echo(*args, **kwargs)[source]
static error(*args, **kwargs)[source]
generate_id()[source]
static info(*args, **kwargs)[source]
static sync()[source]

Wait until all child tasks have terminated.

static terminal()[source]
static verbose(*args, **kwds)[source]
static warn(*args, **kwargs)[source]
forge.tasks.watch[source]

forge.util module

forge.util.dict_constructor(loader, node)[source]
forge.util.dict_representer(dumper, data)[source]
forge.util.search_parents(name, start=None, root=False)[source]
forge.util.setup()[source]
forge.util.setup_logging()[source]
forge.util.setup_yaml()[source]
forge.util.unicode_representer(dumper, uni)[source]