Checkpoint commit
M .assembly/commit/on_commit-00_quality_check +1 -1
@@ 1,4 1,4 @@ 
 #!/usr/bin/env bash
 
-rake quality_check
+rake quality_checks
 

          
M .pryrc +1 -0
@@ 4,6 4,7 @@ 
 
 begin
 	require 'assemblage'
+	Assemblage.load_config unless Assemblage.config
 rescue Exception => e
 	$stderr.puts "Ack! Libraries failed to load: #{e.message}\n\t" +
 		e.backtrace.join( "\n\t" )

          
M Cookbook.md +39 -13
@@ 1,29 1,55 @@ 
 # Assemblage Cookbook
 
+Assembly script search path:
+
+- «assembly run dir»/scripts/__all__
+- «assembly run dir»/scripts/«trait»/__all__
+- «assembly run dir»/scripts/«trait»/«event»
+- «repo clone»/.assembly/«event»
+
+
+
 ### Run Tests On Every Commit in a Single Repo
 
-    <repo>/.assemblies/commit/00_run_tests.sh
+    «repo»/.assemblies/commit/00_run_tests.sh
 
 
 ### Mail a Notification On Every Release
 
-    /usr/local/assemblies/__global___/tag/00_mail_release_notice.sh
+    /usr/local/assemblies/__any__/tag/00_mail_release_notice.sh
 
 
 ### Build gem for Every Release for Every Ruby Repo
 
-    /usr/local/assemblies/<trait>/tag/00_run_tests.sh
-
-
+    /usr/local/assemblies/«trait»/tag/03_build_gem.sh
 
 
-<repo>/.assemblies/config.yml
+for workers: osx10.14 osx10.15 freebsd11 linux19
+
+repo:  --» all workers
+repo: osx10.14 freebsd11 --» only listed workers
+repo: -osx10.15 -linux19 --» any except listed workers
+ 
+
+## Result Hash
 
----
-traits:
-  - ruby
+{
+  id: «uuid»,
+  version: «Assemblage version»,
+  log: «log of setup/teardown»,
+  stages: [
+    {
+      name: «stage name»,
+      status: ('in-progress'|'succeeded'|'failed'),
+      times: {
+          utime:  «ms»,
+          stime:  «ms»,
+          cutime: «ms»,
+          cstime: «ms»
+      },
+      log: «stdout + stderr of the assembly, error message on failure»,
+      artifacts: «tarball»
+    },
+  ]
+}
 
-requirements:
-  imagemagick: 6
-  ruby: 2.6
-

          
M README.md +8 -8
@@ 82,14 82,14 @@ course, run all of this on a single host
 Now copy the Mercurial hook to the repository (or another directory) and hook it into the .hg/hgrc:
 
     [hooks]
-    incoming.assemblage = assemblage-hook.rb
-    changegroup.assemblage = assemblage-hook.rb
-    commit.assemblage = assemblage-hook.rb
-    incoming.assemblage = assemblage-hook.rb
-    txnclose-bookmark.assemblage = assemblage-hook.rb
-    txnclose-phase.assemblage = assemblage-hook.rb
-    pushkey.assemblage = assemblage-hook.rb
-    tag.assemblage = assemblage-hook.rb
+    incoming.assemblage = mercurial-hook.rb
+    changegroup.assemblage = mercurial-hook.rb
+    commit.assemblage = mercurial-hook.rb
+    incoming.assemblage = mercurial-hook.rb
+    txnclose-bookmark.assemblage = mercurial-hook.rb
+    txnclose-phase.assemblage = mercurial-hook.rb
+    pushkey.assemblage = mercurial-hook.rb
+    tag.assemblage = mercurial-hook.rb
 
 And finally, we'll combine all the parts into an assembly called
 `project1-freebsd-tests` that will run on a worker with the `freebsd`, `ruby`,

          
A => data/assemblage/ASSEMBLAGE_SCHEMA.sql +45 -0
@@ 0,0 1,45 @@ 
+--
+-- Sets up schema for Assemblage database
+--
+-- If running in local dev environment, you should load DEV_DB_SETUP.sql instead:
+--
+--   psql -qU postgres -f DEV_DB_SETUP.sql
+--
+-- If there's already a `assemblage` database, you can run this directly like so:
+--
+--   psql -qU postgres -f FM_SCHEMA.sql assemblage
+--
+
+--
+-- Extensions
+--
+
+CREATE EXTENSION IF NOT EXISTS plpgsql WITH SCHEMA pg_catalog;
+COMMENT ON EXTENSION plpgsql IS 'PL/pgSQL procedural language';
+
+CREATE EXTENSION IF NOT EXISTS citext WITH SCHEMA public;
+COMMENT ON EXTENSION citext IS 'data type for case-insensitive character strings';
+
+CREATE EXTENSION IF NOT EXISTS pg_stat_statements WITH SCHEMA public;
+COMMENT ON EXTENSION pg_stat_statements IS 'track execution statistics of all SQL statements executed';
+
+CREATE EXTENSION IF NOT EXISTS "uuid-ossp" WITH SCHEMA public;
+COMMENT ON EXTENSION "uuid-ossp" IS 'generate universally unique identifiers (UUIDs)';
+
+
+--
+-- System/non-model tables
+--
+
+-- The table that tracks which migrations have already been run for the current
+-- database
+SET search_path TO public, pg_catalog;
+CREATE TABLE IF NOT EXISTS schema_migrations (
+    name text NOT NULL,
+    model_class text NOT NULL
+);
+ALTER TABLE public.schema_migrations OWNER TO migrator;
+
+-- Migrator should be able to do anything to any public table.
+GRANT ALL ON ALL TABLES IN SCHEMA public TO migrator WITH GRANT OPTION;
+

          
A => data/assemblage/DEV_DB_SETUP.sql +43 -0
@@ 0,0 1,43 @@ 
+--
+-- Assemblage PostgreSQL DDL
+--
+
+SET statement_timeout = 0;
+SET lock_timeout = 0;
+SET client_encoding = 'UTF8';
+SET standard_conforming_strings = on;
+SET check_function_bodies = false;
+SET client_min_messages = warning;
+
+--
+-- Roles
+--
+
+CREATE ROLE admin WITH SUPERUSER;
+CREATE ROLE migrator;
+CREATE ROLE application;
+
+--
+-- Users
+--
+
+CREATE USER service IN ROLE application;
+
+-- Production database
+DROP DATABASE IF EXISTS assemblage;
+CREATE DATABASE assemblage WITH OWNER postgres ENCODING = 'utf-8';
+GRANT ALL ON DATABASE assemblage TO migrator WITH GRANT OPTION;
+
+-- Unit test database
+DROP DATABASE IF EXISTS assemblage_test;
+CREATE DATABASE assemblage_test WITH OWNER postgres ENCODING = 'utf-8';
+GRANT ALL ON DATABASE assemblage_test TO PUBLIC;
+
+
+-- Install the rest into each new database
+\c assemblage
+\ir ASSEMBLAGE_SCHEMA.sql
+
+\c assemblage_test
+\ir ASSEMBLAGE_SCHEMA.sql
+

          
R data/assemblage/migrations/20180314_initial.rb =>  +0 -91
@@ 1,91 0,0 @@ 
-# -*- ruby -*-
-# frozen_string_literal: true
-
-Sequel.migration do
-	up do
-
-		create_table( :repositories ) do
-			primary_key :id
-
-			text :name, null: false, unique: true
-			text :type, null: false
-			text :url, null: false, unique: true
-
-			jsonb :requirements, default: Sequel.pg_jsonb({})
-			jsonb :traits, default: Sequel.pg_jsonb([])
-
-			timestamptz :created_at, default: Sequel.function(:now)
-			timestamptz :updated_at
-			timestamptz :removed_at
-		end
-
-
-		create_table( :connections ) do
-			primary_key :id
-
-			text :type, null: false
-			text :name, null: false
-
-			timestamptz :created_at, default: Sequel.function(:now)
-			timestamptz :updated_at
-			timestamptz :removed_at
-
-			unique [:type, :name]
-			constraint( :valid_connection_type, type: %w[worker publisher] )
-		end
-
-
-		create_table( :events ) do
-			primary_key :id
-
-			text :type, null: false
-			jsonb :data, default: Sequel.pg_jsonb([])
-
-			timestamptz :created_at, default: Sequel.function(:now)
-			timestamptz :updated_at
-			timestamptz :removed_at
-
-			foreign_key :repository_id, :repositories, null: false,
-				on_delete: :cascade
-			foreign_key :via_connection_id, :connections, null: false
-		end
-
-
-		create_table( :assemblies ) do
-			primary_key :id
-
-			text :name
-
-			timestamptz :created_at, default: Sequel.function(:now)
-			timestamptz :updated_at
-			timestamptz :finished_at
-
-			foreign_key :via_connection_id, :connections, null: false
-			foreign_key :triggering_event_id, :events, null: false,
-				on_delete: :cascade
-		end
-
-
-		create_table( :assembly_results ) do
-			primary_key :id
-
-			bytea :data
-
-			timestamptz :created_at, default: Sequel.function(:now)
-			timestamptz :updated_at
-
-			foreign_key :assembly_id, :assemblies, null: false
-		end
-
-	end
-
-
-	down do
-		drop_table( :assembly_results, cascade: true )
-		drop_table( :assemblies, cascade: true )
-		drop_table( :events, cascade: true )
-		drop_table( :connections, cascade: true )
-		drop_table( :repositories, cascade: true )
-	end
-end
-

          
A => data/assemblage/sqlunet-post-import.sql +250 -0
@@ 0,0 1,250 @@ 
+-- Additional imports for the wordnet31 database converted from MySQL:
+-- * Foreign key constraints -- pgloader don't import them correctly; maybe
+--   because my source is a MyISAM db?
+-- * pgloader doesn't copy views
+
+--
+-- Work around error (have to space it out like this or pgloader tries to run it even 
+-- though it's in a comment):
+--   C R E A T E  I N D E X k_sumoformulas_formula ON sumoformulas (formula)
+--   ERROR:  index row size 6232 exceeds maximum 2712 for index "k_sumoformulas_formula"
+--   HINT:  Values larger than 1/3 of a buffer page cannot be indexed.
+--   Consider a function index of an MD5 hash of the value, or use full text indexing.
+--
+CREATE INDEX k_sumoformulas_formula ON sumoformulas USING GIN (to_tsvector('english', formula));
+CREATE INDEX k_sumoformulas_formula_hash ON sumoformulas (md5(formula));
+
+-- bnc-constrain
+ALTER TABLE bncs ADD FOREIGN KEY (wordid) REFERENCES words (wordid);
+ALTER TABLE bncconvtasks ADD FOREIGN KEY (wordid) REFERENCES words (wordid);
+ALTER TABLE bncimaginfs ADD FOREIGN KEY (wordid) REFERENCES words (wordid);
+ALTER TABLE bncspwrs ADD FOREIGN KEY (wordid) REFERENCES words (wordid);
+
+-- fn-constrain
+ALTER TABLE fnannosets ADD FOREIGN KEY (cxnid) REFERENCES fncxns (cxnid);
+ALTER TABLE fnannosets ADD FOREIGN KEY (frameid) REFERENCES fnframes (frameid);
+ALTER TABLE fnannosets ADD FOREIGN KEY (luid) REFERENCES fnlexunits (luid) NOT VALID;
+ALTER TABLE fnannosets ADD FOREIGN KEY (sentenceid) REFERENCES fnsentences (sentenceid);
+ALTER TABLE fncorpuses ADD FOREIGN KEY (luid) REFERENCES fnlexunits (luid) NOT VALID;
+ALTER TABLE fndocuments ADD FOREIGN KEY (corpusid) REFERENCES fncorpuses (corpusid);
+ALTER TABLE fnfegrouprealizations ADD FOREIGN KEY (luid) REFERENCES fnlexunits (luid) NOT VALID;
+ALTER TABLE fnfegrouprealizations_fes ADD FOREIGN KEY (fegrid) REFERENCES fnfegrouprealizations (fegrid);
+ALTER TABLE fnfegrouprealizations_fes ADD FOREIGN KEY (feid) REFERENCES fnfes (feid);
+ALTER TABLE fnfegrouprealizations_fes ADD FOREIGN KEY (fetypeid) REFERENCES fnfetypes (fetypeid);
+ALTER TABLE fnferealizations ADD FOREIGN KEY (fetypeid) REFERENCES fnfetypes (fetypeid);
+ALTER TABLE fnferealizations ADD FOREIGN KEY (luid) REFERENCES fnlexunits (luid);
+ALTER TABLE fnfes ADD FOREIGN KEY (coretypeid) REFERENCES fncoretypes (coretypeid);
+ALTER TABLE fnfes ADD FOREIGN KEY (fetypeid) REFERENCES fnfetypes (fetypeid);
+ALTER TABLE fnfes ADD FOREIGN KEY (frameid) REFERENCES fnframes (frameid);
+ALTER TABLE fnfes_excluded ADD FOREIGN KEY (fe2id) REFERENCES fnfes (feid);
+ALTER TABLE fnfes_excluded ADD FOREIGN KEY (feid) REFERENCES fnfes (feid);
+ALTER TABLE fnfes_required ADD FOREIGN KEY (fe2id) REFERENCES fnfes (feid);
+ALTER TABLE fnfes_required ADD FOREIGN KEY (feid) REFERENCES fnfes (feid);
+ALTER TABLE fnfes_semtypes ADD FOREIGN KEY (feid) REFERENCES fnfes (feid);
+ALTER TABLE fnfes_semtypes ADD FOREIGN KEY (semtypeid) REFERENCES fnsemtypes (semtypeid);
+ALTER TABLE fnframes_related ADD FOREIGN KEY (frame2id) REFERENCES fnframes (frameid);
+ALTER TABLE fnframes_related ADD FOREIGN KEY (frameid) REFERENCES fnframes (frameid);
+ALTER TABLE fnframes_related ADD FOREIGN KEY (relationid) REFERENCES fnframerelations (relationid);
+ALTER TABLE fnframes_semtypes ADD FOREIGN KEY (frameid) REFERENCES fnframes (frameid);
+ALTER TABLE fnframes_semtypes ADD FOREIGN KEY (semtypeid) REFERENCES fnsemtypes (semtypeid);
+ALTER TABLE fngovernors ADD FOREIGN KEY (fnwordid) REFERENCES fnwords (fnwordid);
+ALTER TABLE fngovernors_annosets ADD FOREIGN KEY (annosetid) REFERENCES fnannosets (annosetid);
+ALTER TABLE fngovernors_annosets ADD FOREIGN KEY (governorid) REFERENCES fngovernors (governorid);
+ALTER TABLE fnlabels ADD FOREIGN KEY (feid) REFERENCES fnfes (feid);
+ALTER TABLE fnlabels ADD FOREIGN KEY (labelitypeid) REFERENCES fnlabelitypes (labelitypeid);
+ALTER TABLE fnlabels ADD FOREIGN KEY (labeltypeid) REFERENCES fnlabeltypes (labeltypeid);
+ALTER TABLE fnlabels ADD FOREIGN KEY (layerid) REFERENCES fnlayers (layerid);
+ALTER TABLE fnlayers ADD FOREIGN KEY (annosetid) REFERENCES fnannosets (annosetid);
+ALTER TABLE fnlayers ADD FOREIGN KEY (layertypeid) REFERENCES fnlayertypes (layertypeid);
+ALTER TABLE fnlexemes ADD FOREIGN KEY (fnwordid) REFERENCES fnwords (fnwordid);
+ALTER TABLE fnlexemes ADD FOREIGN KEY (luid) REFERENCES fnlexunits (luid);
+ALTER TABLE fnlexemes ADD FOREIGN KEY (posid) REFERENCES fnposes (posid);
+ALTER TABLE fnlexunits ADD FOREIGN KEY (frameid) REFERENCES fnframes (frameid);
+ALTER TABLE fnlexunits ADD FOREIGN KEY (incorporatedfeid) REFERENCES fnfes (feid);
+ALTER TABLE fnlexunits ADD FOREIGN KEY (posid) REFERENCES fnposes (posid);
+ALTER TABLE fnlexunits_governors ADD FOREIGN KEY (governorid) REFERENCES fngovernors (governorid);
+ALTER TABLE fnlexunits_governors ADD FOREIGN KEY (luid) REFERENCES fnlexunits (luid);
+ALTER TABLE fnlexunits_semtypes ADD FOREIGN KEY (luid) REFERENCES fnlexunits (luid);
+ALTER TABLE fnlexunits_semtypes ADD FOREIGN KEY (semtypeid) REFERENCES fnsemtypes (semtypeid);
+ALTER TABLE fnpatterns ADD FOREIGN KEY (fegrid) REFERENCES fnfegrouprealizations (fegrid);
+ALTER TABLE fnpatterns_annosets ADD FOREIGN KEY (annosetid) REFERENCES fnannosets (annosetid);
+ALTER TABLE fnpatterns_annosets ADD FOREIGN KEY (patternid) REFERENCES fnpatterns (patternid);
+ALTER TABLE fnpatterns_valenceunits ADD FOREIGN KEY (feid) REFERENCES fnfes (feid);
+ALTER TABLE fnpatterns_valenceunits ADD FOREIGN KEY (fetypeid) REFERENCES fnfetypes (fetypeid);
+ALTER TABLE fnpatterns_valenceunits ADD FOREIGN KEY (patternid) REFERENCES fnpatterns (patternid);
+ALTER TABLE fnpatterns_valenceunits ADD FOREIGN KEY (vuid) REFERENCES fnvalenceunits (vuid);
+ALTER TABLE fnsemtypes_supers ADD FOREIGN KEY (semtypeid) REFERENCES fnsemtypes (semtypeid);
+ALTER TABLE fnsemtypes_supers ADD FOREIGN KEY (supersemtypeid) REFERENCES fnsemtypes (semtypeid);
+ALTER TABLE fnsentences ADD FOREIGN KEY (corpusid) REFERENCES fncorpuses (corpusid);
+ALTER TABLE fnsentences ADD FOREIGN KEY (documentid) REFERENCES fndocuments (documentid);
+ALTER TABLE fnsubcorpuses ADD FOREIGN KEY (luid) REFERENCES fnlexunits (luid);
+ALTER TABLE fnsubcorpuses_sentences ADD FOREIGN KEY (sentenceid) REFERENCES fnsentences (sentenceid);
+ALTER TABLE fnsubcorpuses_sentences ADD FOREIGN KEY (subcorpusid) REFERENCES fnsubcorpuses (subcorpusid);
+ALTER TABLE fnvalenceunits ADD FOREIGN KEY (ferid) REFERENCES fnferealizations (ferid);
+ALTER TABLE fnvalenceunits ADD FOREIGN KEY (gfid) REFERENCES fngftypes (gfid);
+ALTER TABLE fnvalenceunits ADD FOREIGN KEY (ptid) REFERENCES fnpttypes (ptid);
+ALTER TABLE fnvalenceunits_annosets ADD FOREIGN KEY (annosetid) REFERENCES fnannosets (annosetid);
+ALTER TABLE fnvalenceunits_annosets ADD FOREIGN KEY (vuid) REFERENCES fnvalenceunits (vuid);
+ALTER TABLE fnwords ADD FOREIGN KEY (wordid) REFERENCES words (wordid);
+
+-- glf-constrain
+ALTER TABLE glfs ADD FOREIGN KEY (synsetid) REFERENCES synsets (synsetid);
+ALTER TABLE glfs ADD FOREIGN KEY (wordid) REFERENCES words (wordid);
+
+-- ilfwn-constrain
+ALTER TABLE ilfs ADD FOREIGN KEY (synsetid) REFERENCES synsets (synsetid);
+ALTER TABLE ilfs ADD FOREIGN KEY (wordid) REFERENCES words (wordid);
+
+-- legacy-constrain
+ALTER TABLE sensemaps3031 ADD FOREIGN KEY (synsetid) REFERENCES synsets (synsetid);
+ALTER TABLE sensemaps3031 ADD FOREIGN KEY (wordid) REFERENCES words (wordid);
+ALTER TABLE senses20 ADD FOREIGN KEY (wordid) REFERENCES words (wordid);
+ALTER TABLE senses21 ADD FOREIGN KEY (wordid) REFERENCES words (wordid);
+ALTER TABLE senses30 ADD FOREIGN KEY (wordid) REFERENCES words (wordid);
+ALTER TABLE sensemaps2021 ADD FOREIGN KEY (wordid) REFERENCES words (wordid);
+ALTER TABLE sensemaps2130 ADD FOREIGN KEY (wordid) REFERENCES words (wordid);
+ALTER TABLE synsetmaps2031 ADD FOREIGN KEY (synsetid) REFERENCES synsets (synsetid);
+ALTER TABLE synsetmaps2131 ADD FOREIGN KEY (synsetid) REFERENCES synsets (synsetid);
+ALTER TABLE synsetmaps3031 ADD FOREIGN KEY (synsetid) REFERENCES synsets (synsetid);
+
+-- pb-constrain
+ALTER TABLE pbargs ADD FOREIGN KEY (exampleid) REFERENCES pbexamples (exampleid);
+ALTER TABLE pbargs ADD FOREIGN KEY (func) REFERENCES pbfuncs (func);
+ALTER TABLE pbargs ADD FOREIGN KEY (narg) REFERENCES pbargns (narg);
+ALTER TABLE pbexamples ADD FOREIGN KEY (aspect) REFERENCES pbaspects (aspect);
+ALTER TABLE pbexamples ADD FOREIGN KEY (form) REFERENCES pbforms (form);
+ALTER TABLE pbexamples ADD FOREIGN KEY (person) REFERENCES pbpersons (person);
+ALTER TABLE pbexamples ADD FOREIGN KEY (rolesetid) REFERENCES pbrolesets (rolesetid);
+ALTER TABLE pbexamples ADD FOREIGN KEY (tense) REFERENCES pbtenses (tense);
+ALTER TABLE pbexamples ADD FOREIGN KEY (voice) REFERENCES pbvoices (voice);
+ALTER TABLE pbrels ADD FOREIGN KEY (exampleid) REFERENCES pbexamples (exampleid);
+ALTER TABLE pbroles ADD FOREIGN KEY (func) REFERENCES pbfuncs (func);
+ALTER TABLE pbroles ADD FOREIGN KEY (rolesetid) REFERENCES pbrolesets (rolesetid);
+ALTER TABLE pbrolesets ADD FOREIGN KEY (pbwordid) REFERENCES pbwords (pbwordid);
+ALTER TABLE pbvnmaps ADD FOREIGN KEY (roleid) REFERENCES pbroles (roleid);
+ALTER TABLE pbvnmaps ADD FOREIGN KEY (rolesetid) REFERENCES pbrolesets (rolesetid);
+ALTER TABLE pbvnmaps ADD FOREIGN KEY (vnclassid) REFERENCES vnclasses (classid);
+ALTER TABLE pbvnmaps ADD FOREIGN KEY (vnroleid) REFERENCES vnroles (roleid);
+ALTER TABLE pbwords ADD FOREIGN KEY (wordid) REFERENCES words (wordid);
+
+-- pm-constrain
+ALTER TABLE predicatematrix ADD FOREIGN KEY (fnfeid) REFERENCES fnfes (feid);
+ALTER TABLE predicatematrix ADD FOREIGN KEY (fnframeid) REFERENCES fnframes (frameid);
+ALTER TABLE predicatematrix ADD FOREIGN KEY (fnluid) REFERENCES fnlexunits (luid);
+ALTER TABLE predicatematrix ADD FOREIGN KEY (fnwordid) REFERENCES fnwords (fnwordid);
+ALTER TABLE predicatematrix ADD FOREIGN KEY (pbrolesetid) REFERENCES pbrolesets (rolesetid);
+ALTER TABLE predicatematrix ADD FOREIGN KEY (pbwordid) REFERENCES pbwords (pbwordid);
+ALTER TABLE predicatematrix ADD FOREIGN KEY (sumoid) REFERENCES sumoterms (sumoid);
+ALTER TABLE predicatematrix ADD FOREIGN KEY (synsetid) REFERENCES synsets (synsetid);
+ALTER TABLE predicatematrix ADD FOREIGN KEY (vnclassid) REFERENCES vnclasses (classid);
+ALTER TABLE predicatematrix ADD FOREIGN KEY (vnroleid) REFERENCES vnroles (roleid);
+ALTER TABLE predicatematrix ADD FOREIGN KEY (vnwordid) REFERENCES vnwords (vnwordid);
+ALTER TABLE predicatematrix ADD FOREIGN KEY (wordid) REFERENCES words (wordid);
+
+-- sumo-constrain
+ALTER TABLE sumoformulas ADD FOREIGN KEY (sumofileid) REFERENCES sumofiles (sumofileid);
+ALTER TABLE sumomaps ADD FOREIGN KEY (sumoid) REFERENCES sumoterms (sumoid);
+ALTER TABLE sumomaps ADD FOREIGN KEY (synsetid) REFERENCES synsets (synsetid);
+ALTER TABLE sumoparsemaps ADD FOREIGN KEY (formulaid) REFERENCES sumoformulas (formulaid);
+ALTER TABLE sumoparsemaps ADD FOREIGN KEY (sumoid) REFERENCES sumoterms (sumoid);
+ALTER TABLE sumotermattrs ADD FOREIGN KEY (sumoid) REFERENCES sumoterms (sumoid);
+
+-- vn-constrain
+ALTER TABLE vnclassmembers ADD FOREIGN KEY (classid) REFERENCES vnclasses (classid);
+ALTER TABLE vnclassmembers ADD FOREIGN KEY (vnwordid) REFERENCES vnwords (vnwordid);
+ALTER TABLE vnclassmembersenses ADD FOREIGN KEY (classid) REFERENCES vnclasses (classid);
+ALTER TABLE vnclassmembersenses ADD FOREIGN KEY (vnwordid) REFERENCES vnwords (vnwordid);
+ALTER TABLE vnexamplemaps ADD FOREIGN KEY (exampleid) REFERENCES vnexamples (exampleid);
+ALTER TABLE vnexamplemaps ADD FOREIGN KEY (frameid) REFERENCES vnframes (frameid);
+ALTER TABLE vnframes ADD FOREIGN KEY (nameid) REFERENCES vnframenames (nameid);
+ALTER TABLE vnframes ADD FOREIGN KEY (semanticsid) REFERENCES vnsemantics (semanticsid);
+ALTER TABLE vnframes ADD FOREIGN KEY (subnameid) REFERENCES vnframesubnames (subnameid);
+ALTER TABLE vnframes ADD FOREIGN KEY (syntaxid) REFERENCES vnsyntaxes (syntaxid);
+ALTER TABLE vnframemaps ADD FOREIGN KEY (classid) REFERENCES vnclasses (classid);
+ALTER TABLE vnframemaps ADD FOREIGN KEY (frameid) REFERENCES vnframes (frameid);
+ALTER TABLE vngroupingmaps ADD FOREIGN KEY (classid) REFERENCES vnclasses (classid);
+ALTER TABLE vngroupingmaps ADD FOREIGN KEY (groupingid) REFERENCES vngroupings (groupingid);
+ALTER TABLE vngroupingmaps ADD FOREIGN KEY (vnwordid) REFERENCES vnwords (vnwordid);
+ALTER TABLE vnpredicatemaps ADD FOREIGN KEY (predid) REFERENCES vnpredicates (predid);
+ALTER TABLE vnpredicatemaps ADD FOREIGN KEY (semanticsid) REFERENCES vnsemantics (semanticsid);
+ALTER TABLE vnroles ADD FOREIGN KEY (restrsid) REFERENCES vnrestrs (restrsid);
+ALTER TABLE vnroles ADD FOREIGN KEY (roletypeid) REFERENCES vnroletypes (roletypeid);
+ALTER TABLE vnrolemaps ADD FOREIGN KEY (classid) REFERENCES vnclasses (classid);
+ALTER TABLE vnrolemaps ADD FOREIGN KEY (roleid) REFERENCES vnroles (roleid);
+ALTER TABLE vnwords ADD FOREIGN KEY (wordid) REFERENCES words (wordid);
+
+-- wn-constrain
+ALTER TABLE adjpositions ADD FOREIGN KEY (synsetid) REFERENCES synsets (synsetid);
+ALTER TABLE adjpositions ADD FOREIGN KEY (wordid) REFERENCES words (wordid);
+ALTER TABLE casedwords ADD FOREIGN KEY (wordid) REFERENCES words (wordid);
+ALTER TABLE lexlinks ADD FOREIGN KEY (linkid) REFERENCES linktypes (linkid);
+ALTER TABLE lexlinks ADD FOREIGN KEY (synset1id) REFERENCES synsets (synsetid);
+ALTER TABLE lexlinks ADD FOREIGN KEY (synset2id) REFERENCES synsets (synsetid);
+ALTER TABLE lexlinks ADD FOREIGN KEY (word1id) REFERENCES words (wordid);
+ALTER TABLE lexlinks ADD FOREIGN KEY (word2id) REFERENCES words (wordid);
+ALTER TABLE morphmaps ADD FOREIGN KEY (morphid) REFERENCES morphs (morphid);
+ALTER TABLE morphmaps ADD FOREIGN KEY (wordid) REFERENCES words (wordid);
+ALTER TABLE samples ADD FOREIGN KEY (synsetid) REFERENCES synsets (synsetid);
+ALTER TABLE semlinks ADD FOREIGN KEY (linkid) REFERENCES linktypes (linkid);
+ALTER TABLE semlinks ADD FOREIGN KEY (synset1id) REFERENCES synsets (synsetid);
+ALTER TABLE semlinks ADD FOREIGN KEY (synset2id) REFERENCES synsets (synsetid);
+ALTER TABLE senses ADD FOREIGN KEY (synsetid) REFERENCES synsets (synsetid);
+ALTER TABLE senses ADD FOREIGN KEY (wordid) REFERENCES words (wordid);
+ALTER TABLE synsets ADD FOREIGN KEY (lexdomainid) REFERENCES lexdomains (lexdomainid);
+ALTER TABLE vframemaps ADD FOREIGN KEY (frameid) REFERENCES vframes (frameid);
+ALTER TABLE vframemaps ADD FOREIGN KEY (synsetid) REFERENCES synsets (synsetid);
+ALTER TABLE vframemaps ADD FOREIGN KEY (wordid) REFERENCES words (wordid);
+ALTER TABLE vframesentencemaps ADD FOREIGN KEY (sentenceid) REFERENCES vframesentences (sentenceid);
+ALTER TABLE vframesentencemaps ADD FOREIGN KEY (synsetid) REFERENCES synsets (synsetid);
+ALTER TABLE vframesentencemaps ADD FOREIGN KEY (wordid) REFERENCES words (wordid);
+
+-- xwn-constrain
+ALTER TABLE xwnparselfts ADD FOREIGN KEY (synsetid) REFERENCES synsets (synsetid);
+ALTER TABLE xwnwsds ADD FOREIGN KEY (synsetid) REFERENCES synsets (synsetid);
+
+-- legacy-views
+CREATE OR REPLACE VIEW legacy20
+  AS SELECT s.wordid, lemma, m.quality, s.sensekey AS ssensekey, d.sensekey AS dsensekey, m.srcsynsetid AS ssynsetid, m.synsetid AS dsynsetid, d.senseid AS dsenseid, d.sensenum, d.lexid, d.tagcount, definition FROM senses20 AS s INNER JOIN sensemaps2021 m ON s.wordid = m.wordid AND s.synsetid = m.srcsynsetid INNER JOIN senses AS d ON m.wordid = d.wordid AND m.synsetid = d.synsetid LEFT JOIN synsets AS y ON d.synsetid = y.synsetid LEFT JOIN words AS w ON d.wordid = w.wordid;
+CREATE OR REPLACE VIEW legacy21
+  AS SELECT s.wordid, lemma, m.quality, s.sensekey AS ssensekey, d.sensekey AS dsensekey, m.srcsynsetid AS ssynsetid, m.synsetid AS dsynsetid, d.senseid AS dsenseid, d.sensenum, d.lexid, d.tagcount, definition FROM senses21 AS s INNER JOIN sensemaps2130 m ON s.wordid = m.wordid AND s.synsetid = m.srcsynsetid INNER JOIN senses AS d ON m.wordid = d.wordid AND m.synsetid = d.synsetid LEFT JOIN synsets AS y ON d.synsetid = y.synsetid LEFT JOIN words AS w ON d.wordid = w.wordid;
+CREATE OR REPLACE VIEW sensemaps2030
+  AS SELECT m1.wordid, m1.srcsynsetid, m2.synsetid, (m1.quality*m2.quality) AS quality FROM sensemaps2021 m1 INNER JOIN sensemaps2130 m2 ON m1.wordid = m2.wordid AND m1.synsetid = m2.srcsynsetid;
+CREATE OR REPLACE VIEW synsets20
+  AS SELECT DISTINCT srcsynsetid FROM sensemaps2021;
+CREATE OR REPLACE VIEW synsets21
+  AS SELECT DISTINCT srcsynsetid FROM sensemaps2130;
+
+-- sumo-views
+CREATE OR REPLACE VIEW sumoarg0maps AS SELECT mapid,sumoid,formulaid FROM sumoparsemaps WHERE sumoparsetype = 'a' AND argnum = 0;
+CREATE OR REPLACE VIEW sumoarg1maps AS SELECT mapid,sumoid,formulaid FROM sumoparsemaps WHERE sumoparsetype = 'a' AND argnum = 1;
+CREATE OR REPLACE VIEW sumoarg2maps AS SELECT mapid,sumoid,formulaid FROM sumoparsemaps WHERE sumoparsetype = 'a' AND argnum = 2;
+CREATE OR REPLACE VIEW sumoarg3maps AS SELECT mapid,sumoid,formulaid FROM sumoparsemaps WHERE sumoparsetype = 'a' AND argnum = 3;
+CREATE OR REPLACE VIEW sumodisjointformulas AS SELECT formulaid FROM sumoarg0maps WHERE sumoid = 4;
+CREATE OR REPLACE VIEW sumodomainformulas AS SELECT formulaid FROM sumoarg0maps WHERE sumoid = 5;
+CREATE OR REPLACE VIEW sumoinstanceformulas AS SELECT formulaid FROM sumoarg0maps WHERE sumoid = 3;
+CREATE OR REPLACE VIEW sumopartitionformulas AS SELECT formulaid FROM sumoarg0maps WHERE sumoid = 6;
+CREATE OR REPLACE VIEW sumorules AS SELECT formulaid,formula,sumofileid FROM sumoformulas INNER JOIN sumoparsemaps USING (formulaid) WHERE argnum = 0 AND (sumoid = 13 OR sumoid = 14);
+CREATE OR REPLACE VIEW sumosubclassformulas AS SELECT formulaid FROM sumoarg0maps WHERE sumoid = 1;
+CREATE OR REPLACE VIEW sumosubrelationformulas AS SELECT formulaid FROM sumoarg0maps WHERE sumoid = 2;
+CREATE OR REPLACE VIEW sumodisjoints AS SELECT a1.sumoid AS sumodisjoint1id, a2.sumoid AS sumodisjoint2id,formulaid FROM sumodisjointformulas AS a0 INNER JOIN sumoarg1maps AS a1 USING (formulaid) INNER JOIN sumoarg2maps AS a2 USING (formulaid);
+CREATE OR REPLACE VIEW sumoinstances AS SELECT a1.sumoid AS sumoinstanceid, a2.sumoid AS sumoclassid,formulaid FROM sumoinstanceformulas AS a0 INNER JOIN sumoarg1maps AS a1 USING (formulaid) INNER JOIN sumoarg2maps AS a2 USING (formulaid);
+CREATE OR REPLACE VIEW sumorelations AS SELECT sumoid FROM sumoarg0maps INNER JOIN sumoterms USING (sumoid) GROUP BY sumoid;
+CREATE OR REPLACE VIEW sumosubclasses AS SELECT a1.sumoid AS sumoclassid, a2.sumoid AS sumosuperclassid,formulaid FROM sumosubclassformulas AS a0 INNER JOIN sumoarg1maps AS a1 USING (formulaid) INNER JOIN sumoarg2maps AS a2 USING (formulaid);
+CREATE OR REPLACE VIEW sumosubrelations AS SELECT a1.sumoid AS sumorelationid, a2.sumoid AS sumosuperrelationid,formulaid FROM sumosubrelationformulas AS a0 INNER JOIN sumoarg1maps AS a1 USING (formulaid) INNER JOIN sumoarg2maps AS a2 USING (formulaid);
+
+-- wn-views
+CREATE OR REPLACE VIEW samplesets AS
+  SELECT synsetid, string_agg(DISTINCT sample, '|') AS sampleset
+  FROM samples
+  GROUP BY synsetid;
+CREATE OR REPLACE VIEW sensesXsynsets AS SELECT * FROM senses INNER JOIN synsets USING (synsetid);
+CREATE OR REPLACE VIEW wordsXsenses AS SELECT * FROM words INNER JOIN senses USING (wordid);
+CREATE OR REPLACE VIEW wordsXsensesXsynsets AS SELECT * FROM words INNER JOIN senses USING (wordid) INNER JOIN synsets USING (synsetid);
+CREATE OR REPLACE VIEW adjectiveswithpositions AS SELECT * FROM senses INNER JOIN adjpositions USING (wordid,synsetid) LEFT JOIN words USING (wordid) LEFT JOIN synsets USING (synsetid);
+CREATE OR REPLACE VIEW dict AS SELECT * FROM words LEFT JOIN senses s USING (wordid) LEFT JOIN casedwords USING (wordid,casedwordid) LEFT JOIN synsets USING (synsetid) LEFT JOIN samplesets USING (synsetid);
+CREATE OR REPLACE VIEW morphology AS SELECT * FROM words INNER JOIN morphmaps USING (wordid) INNER JOIN morphs USING (morphid);
+CREATE OR REPLACE VIEW sensesXlexlinksXsenses AS SELECT linkid,s.synsetid AS ssynsetid,s.wordid AS swordid,s.senseid AS ssenseid,s.casedwordid AS scasedwordid,s.sensenum AS ssensenum,s.lexid AS slexid,s.tagcount AS stagcount,s.sensekey AS ssensekey,s.pos AS spos,s.lexdomainid AS slexdomainid,s.definition AS sdefinition,d.synsetid AS dsynsetid,d.wordid AS dwordid,d.senseid AS dsenseid,d.casedwordid AS dcasedwordid,d.sensenum AS dsensenum,d.lexid AS dlexid,d.tagcount AS dtagcount,d.sensekey AS dsensekey,d.pos AS dpos,d.lexdomainid AS dlexdomainid,d.definition AS ddefinition FROM sensesXsynsets AS s INNER JOIN lexlinks AS l ON s.synsetid = l.synset1id AND s.wordid = l.word1id INNER JOIN sensesXsynsets AS d ON l.synset2id = d.synsetid AND l.word2id = d.wordid;
+CREATE OR REPLACE VIEW sensesXsemlinksXsenses AS SELECT linkid,s.synsetid AS ssynsetid,s.wordid AS swordid,s.senseid AS ssenseid,s.casedwordid AS scasedwordid,s.sensenum AS ssensenum,s.lexid AS slexid,s.tagcount AS stagcount,s.sensekey AS ssensekey,s.pos AS spos,s.lexdomainid AS slexdomainid,s.definition AS sdefinition,d.synsetid AS dsynsetid,d.wordid AS dwordid,d.senseid AS dsenseid,d.casedwordid AS dcasedwordid,d.sensenum AS dsensenum,d.lexid AS dlexid,d.tagcount AS dtagcount,d.sensekey AS dsensekey,d.pos AS dpos,d.lexdomainid AS dlexdomainid,d.definition AS ddefinition FROM sensesXsynsets AS s INNER JOIN semlinks AS l ON s.synsetid = l.synset1id INNER JOIN sensesXsynsets AS d ON l.synset2id = d.synsetid;
+CREATE OR REPLACE VIEW synsetsXsemlinksXsynsets AS SELECT linkid,s.synsetid AS ssynsetid,s.definition AS sdefinition,d.synsetid AS dsynsetid,d.definition AS ddefinition FROM synsets AS s INNER JOIN semlinks AS l ON s.synsetid = l.synset1id INNER JOIN synsets AS d ON l.synset2id = d.synsetid;
+CREATE OR REPLACE VIEW verbswithframes AS SELECT * FROM senses INNER JOIN vframemaps USING (wordid,synsetid) INNER JOIN vframes USING (frameid) LEFT JOIN words USING (wordid) LEFT JOIN synsets USING (synsetid);
+

          
M gem.deps.rb +2 -0
@@ 11,9 11,11 @@ gem 'pg', "~> 1.1"
 gem 'pluggability', "~> 0.6"
 gem 'rb-readline', "~> 0.5"
 gem 'sequel', "~> 5.6"
+gem 'sequel-inline_schema', "~> 0.0"
 gem 'state_machines', "~> 0.5"
 gem 'tty-prompt', "~> 0.18"
 gem 'tty-table', "~> 0.10"
+gem 'tty-command', "~> 0.9"
 gem 'uuid', "~> 2.3"
 
 group :development do

          
M lib/assemblage.rb +11 -5
@@ 46,16 46,11 @@ module Assemblage
 
 	# Autoload subordinate modules
 	autoload :AssemblyBuilder, 'assemblage/assembly_builder'
-	autoload :AssemblyResult, 'assemblage/assembly_result'
 	autoload :Auth, 'assemblage/auth'
 	autoload :CLI, 'assemblage/cli'
 	autoload :Client, 'assemblage/client'
-	autoload :Connection, 'assemblage/connection'
-	autoload :DbObject, 'assemblage/db_object'
-	autoload :Event, 'assemblage/event'
 	autoload :Protocol, 'assemblage/protocol'
 	autoload :Publisher, 'assemblage/publisher'
-	autoload :Repository, 'assemblage/repository'
 	autoload :Server, 'assemblage/server'
 	autoload :VCSStrategy, 'assemblage/vcs_strategy'
 	autoload :Worker, 'assemblage/worker'

          
@@ 63,6 58,15 @@ module Assemblage
 	require 'assemblage/mixins'
 	extend Assemblage::MethodUtilities
 
+
+	require 'assemblage/db_object'
+	Assemblage::DbObject.register_model( 'assemblage/assembly' )
+	Assemblage::DbObject.register_model( 'assemblage/assembly_result' )
+	Assemblage::DbObject.register_model( 'assemblage/connection' )
+	Assemblage::DbObject.register_model( 'assemblage/event' )
+	Assemblage::DbObject.register_model( 'assemblage/repository' )
+
+
 	##
 	# An Array of callbacks to be run after the config is loaded
 	singleton_attr_reader :after_configure_hooks

          
@@ 165,6 169,8 @@ module Assemblage
 	def self::uuidgen
 		return @uuidgen ||= UUID.new
 	end
+	singleton_method_alias :uuid, :uuidgen
+
 
 end # module Assemblage
 

          
A => lib/assemblage/assembly.rb +101 -0
@@ 0,0 1,101 @@ 
+# -*- ruby -*-
+# frozen_string_literal: true
+
+require 'assemblage' unless defined?( Assemblage )
+require 'assemblage/db_object'
+
+
+
+class Assemblage::Assembly < Assemblage::DbObject( :assemblies )
+
+
+	# The valid #status value
+	VALID_STATUSES = %w[new in_progress succeeded failed]
+
+
+	# Maintain the timestamp fields automatically
+	plugin :timestamps
+
+
+	set_schema do
+		primary_key :id
+
+		uuid :uuid, unique: true
+		text :assemblage_version, null: false
+		assembly_status :status, null: false
+		text :log
+
+		timestamptz :created_at, default: Sequel.function(:now)
+		timestamptz :updated_at
+		timestamptz :finished_at
+
+		foreign_key :creating_worker_id, :connections, null: false
+		foreign_key :triggering_event_id, :events, null: false,
+			on_delete: :cascade
+	end
+
+
+	##
+	# The connection of the worker that created this Assembly
+	many_to_one :creating_worker, class: 'Assemblage::Connection' do |ds|
+		ds.workers
+	end
+
+	##
+	# The event which triggered the building of this Assembly
+	many_to_one :triggering_event, class: 'Assemblage::Event'
+
+
+	dataset_module do
+
+		### Limit the results to assemblies which have finished building.
+		def finished
+			return self.exclude( finished_at: nil )
+		end
+
+
+		### Limit the results to assemblies which have not finished building.
+		def unfinished
+			return self.where( finished_at: nil )
+		end
+
+	end
+
+
+	### Create the status enum before the table is created.
+	def self::before_create_table( * )
+		self.db.create_enum( :assembly_status, VALID_STATUSES )
+	end
+
+
+	### Set some defaults for new objects.
+	def initialize( * )
+		super
+
+		self[ :status ] ||= 'new'
+		self[ :uuid ]   ||= Assemblage.uuid.generate
+	end
+
+
+
+	### Ensure the model object's values fall within its contraints.
+	def validate
+		self.validate_uuid
+		self.validate_status
+	end
+
+
+	### Ensure that the assembly's UUID is valid.
+	def validate_uuid
+		self.validates_presence( :uuid )
+		self.validates_format( /\A\h{8}(-\h{4}){3}-\h{12}\z/, :uuid )
+	end
+
+
+	### Ensure the assembly's status is valid.
+	def validate_status
+		self.validates_presence( :status )
+		self.validates_includes( VALID_STATUSES, :status )
+	end
+
+end # class Assemblage::Assembly

          
M lib/assemblage/assembly_builder.rb +195 -39
@@ 1,8 1,12 @@ 
 # -*- ruby -*-
 # frozen_string_literal: true
 
+require 'fiber'
 require 'tmpdir'
+require 'stringio'
 require 'configurability'
+require 'loggability'
+require 'tty-command'
 
 require 'assemblage' unless defined?( Assemblage )
 

          
@@ 10,21 14,37 @@ require 'assemblage' unless defined?( As
 
 # An object that provides the logic for how to build an assembly.
 class Assemblage::AssemblyBuilder
-	extend Configurability
+	extend Configurability,
+		Loggability
 
 
 	# The stages of the assembly run by the builder.
 	ASSEMBLY_STAGES = %i[
 		clone_repo
-		prep_assembly
-		execute_assembly
-		wait_for_completion
+		make_build_plan
+		execute_assembly_scripts
 		gather_results
 		clean_up
 	]
 
+	# An Array of valid statuses as Symbols
+	VALID_STATUSES = %i[ in_progress succeeded failed ]
 
-	configurability( 'assemblage/builder' ) do
+	# A list of glob patterns to use when searching for assembly script directories
+	SCRIPT_PATH = [
+		"%<rundir>s/scripts/__all__",
+		"%<rundir>s/scripts/%<trait>s/__all__",
+		"%<rundir>s/scripts/%<trait>s/%<event>s",
+		"%<repo>s/.assembly/%<event>s",
+	]
+
+
+	# Loggability API
+	log_to :assemblage
+
+
+	# Configurability API
+	configurability( 'assemblage.builder' ) do
 
 		##
 		# The directory the build does its work in

          
@@ 33,15 53,25 @@ class Assemblage::AssemblyBuilder
 	end
 
 
-	### Create a new AssemblyBuilder from the specified +assembly_info+, which
-	### should be a Hash of assembly infomation such as that created by
-	### Assembly#to_hash.
-	def initialize( assembly_info )
-		@id         = assembly_info[:id]
-		@name       = assembly_info[:name]
-		@repository = assembly_info[:repository]
+	### Create a new AssemblyBuilder that will use assembly scripts from the specified
+	### +directory+ to build the assembly with the specified +name+, +repository+,
+	### and +revision+.
+	def initialize( directory, assembly_id:, name:, repository:, revision: )
+		@id               = Assemblage.uuidgen.generate
 
-		@fiber      = nil
+		@assembly_id      = assembly_id
+		@name             = name
+		@revision         = revision
+		@repository       = repository
+
+		@fiber            = nil
+		@current_stage    = nil
+		@build_log        = StringIO.new( String.new('', encoding: 'utf-8'), 'w' )
+		@build_log_reader = StringIO.new( @build_log.string, 'r' )
+		@plan             = {}
+		@script_results   = []
+
+		@work_dir         = nil
 	end
 
 

          
@@ 50,11 80,15 @@ class Assemblage::AssemblyBuilder
 	######
 
 	##
-	# The ID of the assembly the builder is working on
+	# The unique ID of the builder instance
 	attr_reader :id
 
 	##
-	# The name of the assembly the builder is working on
+	# The ID of the assembly the builder is working on
+	attr_reader :assembly_id
+
+	##
+	# The name of the event that triggered the assembly the builder is working on
 	attr_reader :name
 
 	##

          
@@ 63,16 97,51 @@ class Assemblage::AssemblyBuilder
 	attr_reader :repository
 
 	##
+	# The revision that should be built
+	attr_reader :revision
+
+	##
 	# The Fiber which is running the build
 	attr_reader :fiber
 
+	##
+	# The IO writer for the setup/teardown log of the builder
+	attr_reader :build_log
+
+	##
+	# The IO reader for the setup/teardown log of the builder
+	attr_reader :build_log_reader
+
+	##
+	# The execution plan as an ordered list of scripts that will be run
+	attr_reader :plan
+
+	##
+	# The Array of script result Hashes accumulated so far
+	attr_reader :script_results
+
+	##
+	# The stage the builder is currently working on
+	attr_accessor :current_stage
+
+	##
+	# The working directory for this build
+	attr_reader :work_dir
+
 
 	### Start building the assembly.
 	def start
+		@work_dir = Dir.mktmpdir( [self.name, "assemblies"], self.class.work_dir )
 		@fiber = Fiber.new( &self.method(:build) )
 	end
 
 
+	### Returns +true+ if the builder has been started and is still running.
+	def running?
+		return self.fiber&.alive?
+	end
+
+
 	### Resume building the assembly.
 	def resume
 		return self.fiber.resume

          
@@ 82,67 151,154 @@ class Assemblage::AssemblyBuilder
 	### Build the assembly, yielding control to the worker on long-running
 	### stages.
 	def build
-		return ASSEMBLY_STAGES.reduce( {status: nil} ) do |result, stage_name|
-			self.fiber.yield( nil )
-			self.build_stage( stage_name, result )
+		ASSEMBLY_STAGES.each do |stage_name|
+			Fiber.yield
+			self.build_stage( stage_name )
 		end
 	end
 
 
+	### Return an incremental result hash to record the progress of the builder.
+	def in_progress_result
+		return {
+			id: self.assembly_id,
+			builder_id: self.id,
+			version: Assemblage::VERSION,
+			log: self.build_log_reader.read,
+			current_stage: self.current_stage
+		}
+	end
+
+
+	### Return a complete result hash to record the results of the builder.
+	def final_result
+		self.build_log_reader.rewind
+
+		return {
+			id: self.assembly_id,
+			builder_id: self.id,
+			version: Assemblage::VERSION,
+			log: self.build_log_reader.read,
+			status: self.script_status,
+			scripts: self.script_results
+		}
+	end
+
+
+	### Derive the status of the build by examining the status of the script results.
+	def script_status
+		return :no_op if self.script_results.empty?
+		return self.script_results.last[:status]
+	end
+
+
 	### Run the specified +stage_name+ of the build and yield control of the fiber
 	### according to its success or failure.
-	def build_stage( stage_name, result )
+	def build_stage( stage_name )
+		self.current_stage = stage_name
 		build_method = self.method( stage_name )
-		return build_method.call( result )
+
+		build_method.call
 	rescue => err
-		self.log.error "%p while running stage %p: %s" % [ err.class, stage_name, err.message ]
-		return result.merge({
-			status: 'failed',
-			stage: stage_name,
-			description: err.message
-		})
+		logmsg = "%p while building the %s stage: %s" % [
+			err.class,
+			self.current_stage,
+			err.full_message( order: :bottom )
+		]
+		self.add_build_log( logmsg, level: :error )
+	end
+
+
+	### Add +message+ to the build log and output it to the system log too.
+	def add_build_log( message, level: :info )
+		self.build_log.puts( message )
+		self.log.public_send( level, message )
+	end
+
+
+	### Make a script result hash for the script of the given +name+ and return it.
+	def make_script_result( name )
+		return {
+			name: name,
+			status: 'in-progress',
+			times: nil,
+			log: nil,
+			artifacts: nil
+		}
 	end
 
 
 	### Builder stage: Clone the assembly's repo.
-	def clone_repo( result )
+	def clone_repo
 		repo_url  = self.repository[ :url ] or raise "No repository URL for this assembly?!"
 		repo_type = self.repository[ :type ] or raise "No repository type set for this assembly."
-		work_dir  = Dir.mktmpdir( [self.name, "assemblies"], self.class.work_dir )
+		rev       = self.revision
 
-		vcs = Assemblage::VCSStrategy.get_subclass( repo_type )
-		vcs.clone( repo_url, work_dir )
-
-		return result.merge( work_dir: work_dir )
+		vcs = Assemblage::VCSStrategy.create( repo_type )
+		self.add_build_log "Cloning %p @ %s into %s using %p" % [ repo_url, rev, work_dir, vcs ]
+		vcs.clone( repo_url, work_dir, rev )
+		self.add_build_log "Clone successful."
 	end
 
 
-	### Builder stage: Set up the working copy of the repo to build the assembly.
-	def prep_assembly( result )
+	### Builder stage: Search the script discovery path for scripts to execute and
+	### add them to the plan.
+	def make_build_plan
+		vars = {
+			rundir: Dir.pwd,
+			event: self.name,
+			repo: self.work_dir,
+		}
+		vars[:trait] = "{%s}" % [ self.repository[:traits].join(',') ]
 
+		scripts = SCRIPT_PATH.each_with_object( {} ) do |pathspec, hash|
+			self.log.debug "  looking for assembly scripts in %s" % [ pathspec ]
+			base = pathspec % vars
+			self.log.debug "  expanded to: %s" % [ base ]
+			pattern = Pathname( base ) + '*'
+			Pathname.glob( pattern ).each do |file|
+				hash[ file.basename ] = file
+			end
+		end
+
+		self.plan.merge( scripts )
+		self.log.debug "Plan is: %p" % [ self.plan ]
 	end
 
 
-	### Builder stage: Execute the assembly script.
-	def execute_assembly( result )
+	### Builder stage: Execute the assembly scripts in the #plan.
+	def execute_assembly_scripts
+		self.plan.keys.sort.each do |basename|
+			script = self.plan[ basename ]
 
+			unless script.executable?
+				self.add_build_log "  skipping: not executable."
+				next
+			end
+
+			self.add_build_log "Executing %s" % [ script ]
+			cmd = TTY::Command.new
+			cmd.run( script )
+		end
 	end
 
 
 	### Builder stage: Wait for the assembly script to be finished.
-	def wait_for_completion( result )
+	def wait_for_completion
 
 	end
 
 
 	### Builder stage: Gather the results generated by the assembly script.
-	def gather_results( result )
+	def gather_results
 
 	end
 
 
 	### Builder stage: Clean up the repo.
-	def clean_up( result )
+	### TODO: make this optional via the runner, i.e., leave the build around for
+	### troubleshooting?
+	def clean_up
 
 	end
 

          
M lib/assemblage/assembly_result.rb +15 -0
@@ 10,6 10,21 @@ class Assemblage::AssemblyResult < Assem
 	# Maintain the timestamp fields automatically
 	plugin :timestamps
 
+
+	set_schema do
+		primary_key :id
+
+		text :stage, null: false
+		text :log, null: false
+		bytea :data
+
+		timestamptz :created_at, default: Sequel.function(:now)
+		timestamptz :updated_at
+
+		foreign_key :assembly_id, :assemblies, null: false
+	end
+
+
 	#
 	# Associations
 	#

          
M lib/assemblage/auth.rb +5 -7
@@ 27,13 27,13 @@ module Assemblage::Auth
 	CONNECTION_TYPE_KEY = 'type'
 
 	# Regexp for testing connection names for validity
-	CLIENT_NAME_PATTERN = /\A[a-z][\w\-]+\z/i
+	CLIENT_NAME_PATTERN = /\A[a-z](\w+)(-\w+)*\z/
 
 	# The minimum number of characters for a connection name
 	NAME_MIN_LENGTH = 3
 
 	# The amximum number of characters for a connection name
-	NAME_MAX_LENGTH = 35
+	NAME_MAX_LENGTH = 64
 
 
 	# Loggability API -- log to the Assemblage logger

          
@@ 92,10 92,7 @@ module Assemblage::Auth
 				[ self.local_cert_path, cert.public_key ]
 			return cert
 		else
-			cert = self.make_local_cert
-			self.log.warn "No cert_store_dir set: using ephemeral local cert (%s)." %
-				[ cert.public_key ]
-			return cert
+			raise "Can't make a local cert: no local cert directory!"
 		end
 	end
 

          
@@ 109,7 106,8 @@ module Assemblage::Auth
 
 	### Returns +true+ if a local cert has been generated.
 	def self::has_local_cert?
-		return self.local_cert_path && self.local_cert_path.exist?
+		return self.local_cert_path &&
+			self.local_cert_path.exist?
 	end
 
 

          
M lib/assemblage/cli.rb +1 -1
@@ 209,7 209,7 @@ module Assemblage::CLI
 	### Load the config file using either assemblage-base's config-loader if available, or
 	### fall back to DEFAULT_CONFIG_FILE
 	def self::load_config( global={} )
-		Assemblage.load_config( global[:c] ) if global[:c]
+		Assemblage.load_config( global[:c] ) # if global[:c]
 
 		# Set up the logging formatter
 		Loggability.format_with( :color ) if $stdout.tty?

          
M lib/assemblage/command/client.rb +21 -16
@@ 1,6 1,8 @@ 
 # -*- ruby -*-
 # frozen_string_literal: true
 
+require 'pry'
+
 require 'assemblage/cli' unless defined?( Assemblage::CLI )
 require 'assemblage/client'
 

          
@@ 11,36 13,39 @@ module Assemblage::CLI::ClientCommand
 
 	desc "Start a client shell connected "
 
-	arg :SERVER_URL
-	arg :NAME
 	arg :DIRECTORY, :optional
 
 	command :client do |client|
 
 		client.action do |globals, options, args|
-			url = args.shift or help_now!( "No server URL specified.", 64 )
-			name = args.shift or help_now!( "No name specified.", 64 )
 			directory = Pathname( args.shift || '.' ).expand_path
+			Assemblage.use_run_directory( directory, reload_config: true )
 
 			self.log.debug "Creating a client..."
 			cert = Assemblage::Auth.local_cert
 			server_cert ||= Assemblage::Auth.remote_cert( 'server' )
+			endpoint = server_cert['endpoint'] or
+				raise "Server cert doesn't have an endpoint set."
 
-			client = Assemblage::Client.new( url.to_s, cert, server_cert ) do |type, data, header|
-				prompt.say ""
-			end
-			client.on_message( :worker_registered ) do |data, **headers|
-				self.log.debug "Client successfully registered."
-				client.stop
+			client = Assemblage::Client.new( endpoint, cert, server_cert )
+			@event_thread = Thread.new do
+				client.start( &self.method(:handle_message) )
 			end
-			client.on_message( :error ) do |data, **header|
-				client.handle_error_message( data, **header )
-				client.stop
-			end
-			client.send_message( :register_worker, [name, cert.public_key] )
-			client.start
+
+			Pry.config.prompt_name = "assemblage %s> " % [ endpoint ]
+			Pry.pry( client )
 		end
 	end
 
+
+	###############
+	module_function
+	###############
+
+	### Default callback for the client.
+	def handle_message( type, data, header )
+		$stderr.puts "[%s]: (%p) %p" % [ type, header, data ]
+	end
+
 end # module Assemblage::CLI::ClientCommand
 

          
A => lib/assemblage/command/migrate.rb +201 -0
@@ 0,0 1,201 @@ 
+# -*- ruby -*-
+# frozen_string_literal: true
+
+require 'assemblage'
+require 'assemblage/cli'
+
+
+# Migration command
+module Assemblage::CLI::MigrateCommand
+	extend Assemblage::CLI::Subcommand
+
+
+	desc "Database migration commands."
+	long_desc %{
+		These are commands related to database migration.
+	}
+	command :migrate do |migrate|
+
+		migrate.desc "Override the database connection uri"
+		migrate.flag [:D, :database], type: String, default: Assemblage::DbObject.uri
+
+		migrate.default_command :schema
+
+
+		# schema
+		migrate.desc 'Install any missing tables and run any pending migrations'
+		migrate.command :schema do |cmd|
+			cmd.desc "Migrate to the specified migration target (its name)"
+			cmd.flag [:t, 'target'], type: String
+
+			cmd.action do |globals, options, args|
+				set_database( options[:D] ) if options[:D]
+
+				do_schema( globals, options )
+			end
+		end
+
+
+		migrate.desc 'Show any pending migrations'
+		migrate.command :show do |cmd|
+
+			cmd.desc "Show already-applied migrations too."
+			cmd.switch [:a, 'all']
+
+			cmd.action do |globals, options, args|
+				set_database( options[:D] ) if options[:D]
+
+				migrator = Assemblage::DbObject.migrator
+				self.display_missing_model_classes
+
+				applied, pending = migrator.get_partitioned_migrations
+
+				if options[:a]
+					prompt.say '---' unless applied.empty?
+					self.display_migrations( applied, "\u2713", :green )
+				end
+
+				prompt.say '---' unless pending.empty?
+				self.display_migrations( pending, " " )
+			end
+		end
+
+
+		migrate.desc 'Show any pending migrations, and exit with 0 if there are none or 1 if there are any.'
+		migrate.command :check do |cmd|
+
+			cmd.action do |globals, options, args|
+				set_database( options[:D] ) if options[:D]
+
+				migrator = Assemblage::DbObject.migrator
+
+				missing_model_classes = self.display_missing_model_classes
+				pending = migrator.get_partitioned_migrations[1]
+				self.display_migrations( pending, " " )
+
+				exit_now!( exit_code: 1 ) unless pending.empty? && missing_model_classes.empty?
+
+				puts "%p: No pending migrations." % [ Assemblage::DbObject ]
+			end
+		end
+
+
+		# destroy
+		migrate.desc 'Drop the selected schemas in the database.'
+		migrate.command :destroy do |cmd|
+
+			cmd.action do |globals, options, args|
+				set_database( options[:D] ) if options[:D]
+
+				do_destroy( globals )
+			end
+
+		end
+
+	end
+
+
+	#
+	# Utility methods
+	#
+
+	### Install schemas in the current database, applying options from +globals+ and
+	### +options+.
+	def self::do_schema( globals, options )
+		txn_opts = {}
+		txn_opts[:rollback] = :always if globals[:n]
+		log_level = $VERBOSE || $DEBUG || globals[:d] ? :debug : :fatal
+
+		prompt.say "Installing %p schemas... " % [ Assemblage::DbObject ]
+		Loggability.with_level( log_level ) do
+			Assemblage::DbObject.db.transaction( txn_opts ) do |conn|
+				Assemblage::DbObject.create_all_schemas
+				Assemblage::DbObject.migrate( options[:t] )
+			end
+		end
+		puts "done."
+	end
+
+
+	### Destroy all model database schemas after prompting, applying options from
+	### +global+.
+	def self::do_destroy( globals )
+		txn_opts = {}
+
+		schemas = []
+		if globals[:r]
+			schemas += globals[:r].map {|domain| domain.sub(/^assemblage-/, '') }
+		end
+
+		txn_opts[:rollback] = :always if globals[:n]
+
+		if schemas.empty?
+			prompt.say( "WARNING", color: :red )
+			prompt.say "This will destroy all data the current database."
+
+			if prompt.no?( "Are you sure? " )
+				prompt.say '', "Aborted."
+				exit
+			end
+
+			prompt.say ''
+			prompt.say "Dropping all schemas... "
+			Assemblage::DbObject.db.transaction( txn_opts ) do
+				Assemblage::DbObject.drop_all_schemas
+			end
+			prompt.say "done."
+		else
+			schemas.each do |schema|
+				prompt.say "Dropping any schemas named %p..." % [ schema ]
+				Assemblage::DbObject.db.transaction( txn_opts ) do
+					Assemblage::DbObject.drop_schema( schema, if_exists: true, cascade: true )
+				end
+				prompt.say "done."
+			end
+		end
+	end
+
+
+	### Set the database to the specified +dburi+.
+	def self::set_database( dburi )
+		Assemblage::DbObject.db = Sequel.connect( dburi )
+		prompt.say( "Connected to %s", color: :bold )
+	end
+
+
+	### Output a list of any model classes that are missing from the database.
+	def self::display_missing_model_classes
+		missing_classes = Loggability.with_level( :fatal ) do
+			Assemblage::DbObject.uninstalled_tables
+		end
+
+		if missing_classes.empty?
+			prompt.say "All tables installed."
+		else
+			prompt.say "Missing tables:"
+			missing_classes.sort_by( &:table_name ).each do |klass|
+				prompt.say "- %s (%p)" % [ klass.table_name, klass ]
+			end
+		end
+
+		return missing_classes
+	end
+
+
+	### Show the specified +migrations+ as a bulleted list with the given +sigil+
+	### and +attributes+.
+	def self::display_migrations( migrations, sigil, *attributes )
+		bullet = hl.decorate( sigil, *attributes )
+		migrations.each do |migration|
+			item = "[%s]: {%p} %s (%s)" % [
+				bullet,
+				migration.model_class,
+				migration.description,
+				migration.name
+			]
+			prompt.say( item )
+		end
+	end
+
+end # module Assemblage::CLI::MigrateCommand
+

          
M lib/assemblage/command/publisher.rb +21 -21
@@ 118,27 118,27 @@ module Assemblage::CLI::PublisherCommand
 		end
 
 
-		publisher.desc "Publish an event from a repo."
-		publisher.long_desc <<~END_DESC
-		Publish the specified EVENT
-		END_DESC
-		publisher.arg :URL
-		publisher.arg :PUBLIC_KEY
-		publisher.arg :PUBLISHER_DIRECTORY, :optional
-		publisher.command 'add-server' do |add|
-
-			add.action do |globals, options, args|
-				url = args.shift or help_now!( "Missing the server url." )
-				public_key = args.shift or help_now!( "Missing the server public key." )
-
-				Assemblage.use_run_directory( args.shift )
-
-				prompt.say "Registering with %s..." % [ server_url ]
-				Assemblage::Publisher.register_with_server( name, server_url, server_key )
-
-				prompt.say "done."
-			end
-		end
+		# publisher.desc "Publish an event from a repo."
+		# publisher.long_desc <<~END_DESC
+		# Publish the specified EVENT
+		# END_DESC
+		# publisher.arg :URL
+		# publisher.arg :PUBLIC_KEY
+		# publisher.arg :PUBLISHER_DIRECTORY, :optional
+		# publisher.command 'publish' do |add|
+		#
+		# 	add.action do |globals, options, args|
+		# 		url = args.shift or help_now!( "Missing the server url." )
+		# 		public_key = args.shift or help_now!( "Missing the server public key." )
+		#
+		# 		Assemblage.use_run_directory( args.shift )
+		#
+		# 		prompt.say "Registering with %s..." % [ server_url ]
+		# 		Assemblage::Publisher.register_with_server( name, server_url, server_key )
+		#
+		# 		prompt.say "done."
+		# 	end
+		# end
 
 	end
 

          
M lib/assemblage/command/server.rb +14 -6
@@ 66,9 66,9 @@ module Assemblage::CLI::ServerCommand
 				prompt.say "Generating a server key..."
 				Assemblage::Server.generate_cert
 
-				prompt.say "Creating the assemblies database (%s)..." %
-					[ Assemblage.config.assemblage.db.uri ]
-				Assemblage::Server.create_database
+				# prompt.say "Creating the assemblies database (%s)..." %
+				# 	[ Assemblage.config.assemblage.db.uri ]
+				# Assemblage::Server.create_database
 
 				msg = CREATE_ADVICE % {
 					public_key: Assemblage::Server.public_key,

          
@@ 162,7 162,11 @@ module Assemblage::CLI::ServerCommand
 		Add a new repository named NAME and type TYPE to the server config in SERVER_DIRECTORY.
 		Workers will check out source from the specified URL in response to events
 		published from it.
-
+		
+		The -w flag specifies a list of worker name patterns. Workers which match one or more of
+		these patterns will act on events from this repository. Prefix the pattern with a `-` to
+		omit matching workers instead.
+		
 		Valid types are:
 		  #{Assemblage::VCSStrategy.available.map(&:to_s).join(', ')}
 		END_DESC

          
@@ 172,10 176,14 @@ module Assemblage::CLI::ServerCommand
 		server.arg :SERVER_DIRECTORY, :optional
 		server.command 'add-repo' do |add|
 
+			add.flag [:w, :workers],
+				type: Array,
+				desc: "Build assemblies on the given workers."
 			add.action do |globals, options, args|
 				name = args.shift or help_now!( "Missing the repo name." )
 				type = args.shift or help_now!( "Missing the repo type." )
 				url = args.shift or help_now!( "Missing the repo URL." )
+				worker_patterns = options[:w] || []
 
 				Assemblage.use_run_directory( args.shift )
 

          
@@ 186,7 194,7 @@ module Assemblage::CLI::ServerCommand
 
 
 				prompt.say "Approving connections from %s..." % [ name ]
-				Assemblage::Server.add_repo( name, type, url )
+				Assemblage::Server.add_repo( name, type, url, *worker_patterns )
 				prompt.say "done."
 			end
 

          
@@ 212,5 220,5 @@ module Assemblage::CLI::ServerCommand
 
 	end
 
-end # module Assemblage::CLI::CreateServer
+end # module Assemblage::CLI::ServerCommand
 

          
A => lib/assemblage/command/simulate.rb +72 -0
@@ 0,0 1,72 @@ 
+# -*- ruby -*-
+# frozen_string_literal: true
+
+require 'socket'
+require 'assemblage/cli' unless defined?( Assemblage::CLI )
+require 'assemblage/assembly_builder'
+
+
+# Aseemblage simulation commands
+module Assemblage::CLI::SimulateCommand
+	extend Assemblage::CLI::Subcommand
+
+
+	desc "Simulation commands"
+	command :simulate do |simulate|
+
+		simulate.default_command :event
+
+		simulate.desc 'Simulate an assembly for a particular event'
+		simulate.long_desc <<~END_DESC
+		END_DESC
+		simulate.arg :EVENT_NAME
+		simulate.command :event do |event|
+
+			event.desc "Specify the directory that contains global assembly scripts"
+			event.flag [:d, :directory], type: String,
+				default: Dir.tmpdir
+
+			event.action do |globals, options, args|
+				event_name = args.shift or help_now!( "Requires an EVENT_NAME." )
+				directory = options[:d]
+
+				prompt.say "Simulating a %s event." % [ hl.green(event_name) ]
+				assembly = {
+					assembly_id: Process.clock_gettime( Process::CLOCK_MONOTONIC ).to_i,
+					name: event_name,
+					repository: {
+						url: Pathname.pwd.expand_path,
+						type: 'mercurial', # :TODO: Allow modification; auto-detect?
+						traits: %w[ruby macos]
+					},
+					revision: 'tip', # :TODO: Allow specification; auto-determine based on repo type?
+				}
+
+				builder = Assemblage::AssemblyBuilder.new( directory, assembly )
+				builder.start
+
+				stage = nil
+				while builder.running?
+					res = builder.in_progress_result
+					if stage != res[:current_stage]
+						stage = res[:current_stage]
+						prompt.say ' '
+						prompt.say( headline_string "-- %s ----------" % [ stage ] )
+					end
+					prompt.say '.', newline: false
+					builder.resume
+				end
+
+				res = builder.final_result
+				log = res.delete( :log )
+
+				pp res
+				prompt.say( log )
+			end
+		end
+
+	end
+
+
+end # module Assemblage::CLI::SimulateCommand
+

          
M lib/assemblage/command/worker.rb +1 -1
@@ 182,5 182,5 @@ module Assemblage::CLI::WorkerCommand
 		return traits
 	end
 
-end # module Assemblage::CLI::AddServer
+end # module Assemblage::CLI::WorkerCommand
 

          
M lib/assemblage/connection.rb +28 -1
@@ 8,14 8,41 @@ require 'assemblage/db_object'
 require 'assemblage/protocol'
 
 
-class Assemblage::Connection < Assemblage::DbObject( Sequel[:connections] )
+class Assemblage::Connection < Assemblage::DbObject( :connections )
 
 	# Maintain the timestamp fields automatically
 	plugin :timestamps
 
 
+	set_schema do
+		primary_key :id
+
+		text :type, null: false
+		text :name, null: false
+
+		timestamptz :created_at, default: Sequel.function(:now)
+		timestamptz :updated_at
+		timestamptz :removed_at
+
+		unique [:type, :name]
+		constraint( :valid_connection_type, type: %w[worker publisher] )
+	end
+
+
 	dataset_module do
 
+		### Limit the results to worker connections.
+		def workers
+			return self.where( type: 'worker' )
+		end
+
+
+		### Limit the results to publisher connections.
+		def publishers
+			return self.where( type: 'publisher' )
+		end
+
+
 		### Limit the dataset to connections which have been soft-deleted.
 		def deleted
 			return self.exclude( removed_at: nil )

          
M lib/assemblage/db_object.rb +181 -45
@@ 39,9 39,12 @@ class Assemblage::DbObject
 
 
 	plugin :dirty
+	plugin :force_encoding, 'UTF-8'
+	plugin :inline_schema
+	plugin :inline_migrations
 	plugin :subclasses
-	plugin :force_encoding, 'UTF-8'
 	plugin :validation_helpers
+	# plugin :auto_validations
 
 
 	##

          
@@ 51,19 54,50 @@ class Assemblage::DbObject
 	@registered_models = Set.new
 
 
-	# Configurability API
-	configurability( 'assemblage.db' ) do
+	### Sequel callback -- called after each connection in the pool is established.
+	def self::after_connect( conn, server )
+		self.log.info "Prepping new connection to the %s server" % [ server ]
+
+		# set some connection defaults if it's a PG::Connection
+		if conn.respond_to?( :execute )
+			# You can't use placeholders for a SET apparently
+			conn.execute "SET TIMEZONE TO '#{self.timezone}'"
+		end
+
+	end
+
+
+	### Configurability interface -- Configure the Sequel connection
+	def self::configure( config=nil )
+		config = super
+
+		if self.uri
+			options = self.unmangle_driver_options( config&.to_hash || {} )
+			Loggability[ Assemblage::DbObject ].debug "Connecting to %s with options: %p" %
+				[ self.uri, options ]
 
-		##
-		# :singleton-method:
-		# The URI of the database to connect to
-		setting :uri, default: nil
+			options[:after_connect] = self.method( :after_connect )
+			self.db = Sequel.connect( self.uri, options )
+		end
+
+	end
+
 
-		##
-		# :singleton-method:
-		# A Hash of options to use when creating the database connections
-		setting :options, default: { log_warn_duration: 0.02 }
+	### Split Postgres driver options out from regular Sequel options in the
+	### specified +opthash+ and return them in the form Sequel expects them.
+	def self::unmangle_driver_options( opthash )
+		driver_opts, conn_opts = opthash.partition do |key, _|
+			PG::Connection.conndefaults_hash.key?( key )
+		end
 
+		options = Hash[ *(conn_opts.flatten) ]
+		options[:driver_options] = Hash[ *(driver_opts.flatten) ]
+
+		options.delete( :uri )
+		options.delete( :admin_role )
+		options.delete( :slow_query_seconds )
+
+		return options
 	end
 
 

          
@@ 72,8 106,12 @@ class Assemblage::DbObject
 	def self::db=( newdb )
 		newdb.sql_log_level = :debug
 		newdb.logger = Loggability[ Assemblage::DbObject ]
+		newdb.log_warn_duration = self.slow_query_seconds
 
 		newdb.extension( :pg_json )
+		newdb.extension( :pg_enum )
+		newdb.extension( :pg_streaming )
+		newdb.extension( :pagination )
 
 		super
 

          
@@ 110,38 148,141 @@ class Assemblage::DbObject
 	end
 
 
-	### Returns +true+ if the database for the model classes exist.
-	def self::database_is_current?
-		return Loggability.with_level( :fatal ) do
-			Sequel::Migrator.is_current?( self.db, self.migrations_dir.to_s )
-		end
-	rescue Sequel::Migrator::Error => err
-		self.log.debug "Got a %p while checking to see if the database was current: %s" %
-			[ err.class, err.message ]
-		return false
+	# Configurability API
+	configurability( 'assemblage.db' ) do
+
+		##
+		# :singleton-method:
+		# The URI of the database to connect to
+		setting :uri, default: nil
+
+		##
+		# :singleton-method:
+		# The name of the ROLE to use for admin operations
+		setting :admin_role, default: 'admin'
+
+		##
+		# :singleton-method:
+		# The number of (Float) seconds that should be considered "slow" for a
+		# single query; queries that take longer than this amount of time will be logged
+		# at `warn` level.
+		setting :slow_query_seconds, default: 0.1
+
+		##
+		# :singleton-method:
+		# What level of server messages are sent to the client.
+		# Ref: https://www.postgresql.org/docs/current/static/runtime-config-logging.html#RUNTIME-CONFIG-LOGGING-WHEN
+		setting :client_min_messages, default: 'warning'
+
+		##
+		# :singleton-method:
+		# The maximum number of connections to use in the Sequel pool
+		# Ref: http://sequel.jeremyevans.net/rdoc/files/doc/opening_databases_rdoc.html#label-General+connection+options
+		setting :max_connections, default: 4
+
+		##
+		# :singleton-method:
+		# The timezone to use for database connections.
+		setting :timezone, default: 'America/Los_Angeles'
+
+	end
+
+
+	### Create the schema for this class's view (if it doesn't already exist).
+	def self::create_schema
+		self.db.create_schema( self.schema_name, if_not_exists: true )
 	end
 
 
-	### Set up the metastore database and migrate to the latest version.
-	def self::setup_database
-		raise "Can't migrate a mock database" if self.db.adapter_scheme == :mock
-		unless self.database_is_current?
-			self.log.info "Installing database schema in %s..." % [ self.db ]
-			Sequel::Migrator.apply( self.db, self.migrations_dir.to_s )
+	### Create the schema named +name+, dropping any previous schema by the same name.
+	def self::create_schema!
+		self.drop_schema!
+		self.create_schema
+	end
+
+
+	### Drop the empty schema named +name+ (if it exists).
+	def self::drop_schema
+		self.db.drop_schema( self.schema_name, if_exists: true )
+	end
+
+
+	### Drop the schema named +name+ and all of its objects.
+	def self::drop_schema!
+		self.db.drop_schema( self.schema_name, if_exists: true, cascade: true )
+	end
+
+
+	### Returns +true+ if a schema named +name+ exists.
+	def self::schema_exists?( name=self.schema_name )
+		ds = self.db[ :pg_catalog__pg_namespace ].
+			filter( :nspname => name.to_s ).
+			select( :nspname )
+
+		return ds.first ? true : false
+	end
+
+
+	### Return the name of the schema the receiving class is in.
+	def self::schema_name
+		schemaname, tablename = self.db.send( :schema_and_table, self.table_name )
+		return schemaname
+	end
+
+
+	### Return the Array of the schemas used by all currently-loaded model classes.
+	def self::all_loaded_schemas
+		return Assemblage::DbObject.descendents.map( &:schema_name ).uniq.compact
+	end
+
+
+	### Create the schemas for all currently-loaded model classes and grant
+	### privileges on them to the migration user
+	def self::create_all_schemas
+		self.with_role( :migrator ) do
+			self.all_loaded_schemas.each do |schema|
+				self.db.create_schema( schema, if_not_exists: true )
+			end
 		end
 	end
 
 
-	### Tear down the configured metastore database.
-	def self::teardown_database
-		self.log.info "Tearing down database schema..."
-		Sequel::Migrator.apply( self.db, self.migrations_dir.to_s, 0 )
+	### Drop the schemas for all currently-loaded model classes.
+	def self::drop_all_schemas
+		self.with_admin_role do
+			self.all_loaded_schemas.each do |schema|
+				next if ['public', 'tiger', 'tiger_data', 'topology'].include?( schema )
+				self.db.drop_schema( schema, if_exists: true, cascade: true )
+			end
+		end
+	end
+
+
+	### Migration hook -- set up permissions on all the tables.
+	def self::after_migration
+		super
+		self.grant_model_permissions_to( 'application' )
 	end
 
 
-	### Return the current database migrations directory as a Pathname
-	def self::migrations_dir
-		return Assemblage::DATADIR + 'migrations'
+	### Clear the table that tracks migrations which have already been applied. Probably
+	### only useful for a testing environment.
+	def self::clear_migrations_table
+		migration_table = self.db[ Sequel.qualify(:public, :schema_migrations) ]
+		migration_table.truncate
+	end
+
+
+	### Grant permissions on the schemas, tables, and sequences for all loaded
+	### domain classes to +user+.
+	def self::grant_model_permissions_to( user )
+		self.all_loaded_schemas.each do |schema|
+			self.db << "GRANT USAGE ON SCHEMA %s TO %s" % [ schema, user ]
+			self.db << "GRANT SELECT,INSERT,UPDATE,DELETE ON ALL TABLES IN SCHEMA %s TO %s" %
+				[ schema, user ]
+			self.db << "GRANT USAGE,SELECT ON ALL SEQUENCES IN SCHEMA %s TO %s" %
+				[ schema, user ]
+		end
 	end
 
 

          
@@ 156,21 297,16 @@ class Assemblage::DbObject
 	end
 
 
-	### Configurability interface -- Configure the Sequel connection
-	def self::configure( config=nil )
-		super
-
-		if self.uri
-			Loggability[ Assemblage::DbObject ].info "Connecting to %s" % [ self.uri ]
-			self.db = Sequel.connect( self.uri, self.options )
-			Assemblage::DbObject.setup_database
-			Assemblage::DbObject.require_models
-		end
-
+	### Wrap the block in the default ADMIN role.
+	def self::with_admin_role( &block )
+		self.with_role( self.admin_role, &block )
 	end
 
 
-	Assemblage::DbObject.register_model( 'assemblage/connection' )
+	# Load models after the system is configured
+	Configurability.after_configure do
+		Assemblage::DbObject.require_models
+	end
 
 end # class Assemblage::DbObject
 

          
M lib/assemblage/event.rb +23 -1
@@ 6,7 6,7 @@ require 'assemblage/db_object'
 require 'assemblage/protocol'
 
 
-class Assemblage::Event < Assemblage::DbObject( Sequel[:events] )
+class Assemblage::Event < Assemblage::DbObject( :events )
 
 	# Maintain the timestamp fields automatically
 	plugin :timestamps

          
@@ 14,10 14,32 @@ class Assemblage::Event < Assemblage::Db
 	# Serialize the `data` column
 	plugin :serialization, :json, :data
 
+
+	set_schema do
+		primary_key :id
+
+		text :type, null: false
+		jsonb :data, default: Sequel.pg_jsonb([])
+
+		timestamptz :created_at, default: Sequel.function(:now)
+		timestamptz :updated_at
+		timestamptz :removed_at
+
+		foreign_key :repository_id, :repositories, null: false,
+			on_delete: :cascade
+		foreign_key :via_publisher_id, :connections, null: false
+	end
+
+
 	##
 	# The repository the event occurred in
 	many_to_one :repository, class: 'Assemblage::Repository'
 
+	##
+	# The publisher connection the event was sent by
+	many_to_one :via_publisher, class: 'Assemblage::Connection' do |ds|
+		ds.publishers
+	end
 
 end # class Assemblage::Event
 

          
M lib/assemblage/repository.rb +44 -3
@@ 11,8 11,21 @@ class Assemblage::Repository < Assemblag
 	# Maintain the timestamp fields automatically
 	plugin :timestamps
 
-	# Serialize the requirements and traits columns
-	plugin :serialization, :json, :requirements, :traits
+
+	set_schema do
+		primary_key :id
+
+		text :name, null: false, unique: true
+		text :type, null: false
+		text :url, null: false, unique: true
+
+		jsonb :workers, null: false, default: Sequel.pg_jsonb([])
+
+		timestamptz :created_at, default: Sequel.function(:now)
+		timestamptz :updated_at
+		timestamptz :removed_at
+	end
+
 
 	#
 	# Associations

          
@@ 24,7 37,35 @@ class Assemblage::Repository < Assemblag
 
 	##
 	# Assemblies for this repository
-	one_to_many :events, class: 'Assemblage::Event'
+	one_to_many :events, class: 'Assemblage::Event',
+		after_add: :create_event_assemblies
+
+
+
+	### Overridden to set up a few default values.
+	def initialize( * )
+		super
+
+		self[ :workers ] ||= Sequel.pg_jsonb( [] )
+	end
+
+
+	### Create assemblies for the given +event+ for workers which match this repository.
+	def create_event_assemblies( event )
+		patterns, antipatterns = self.workers.partition do |pattern|
+			pattern.start_with?( '-' )
+		end
+
+		positive_regexps = Regexp.union( *patterns )
+		negative_regexps = Regexp.union( *antipatterns )
+
+		matching_workers = Assemblage::Connection.workers.where( name: positive_regexps ).
+			exclude( name: negative_regexps ).all
+
+		matching_workers.each do |worker|
+			Assemblage::Assembly.create( creating_worker: worker, triggering_event: event )
+		end
+	end
 
 end # class Assemblage::Repository
 

          
M lib/assemblage/server.rb +6 -9
@@ 39,6 39,7 @@ class Assemblage::Server
 		status
 		status_report
 		event
+		fetch_assemblies
 	]
 
 	# The list of valid commands for unauthenticated connections

          
@@ 121,12 122,6 @@ class Assemblage::Server
 	end
 
 
-	### Create the database the assembly information is tracked in.
-	def self::create_database
-		Assemblage::DbObject.setup_database
-	end
-
-
 	### Approve connections from the client with the specified +name+ and
 	### +public_key+ to the server configured in the current run directory.
 	def self::approve_connections_from( name, type=:worker )

          
@@ 163,7 158,7 @@ class Assemblage::Server
 
 
 	### Add a new repository to the server configured in the current run directory.
-	def self::add_repo( name, type, url )
+	def self::add_repo( name, type, url, *patterns )
 		unless Assemblage::VCSStrategy.available.include?( type )
 			raise "Invalid repo type %p; supported types are: %s" %
 				[ type, Assemblage::VCSStrategy.available.join(', ') ]

          
@@ 173,6 168,7 @@ class Assemblage::Server
 			self.log.info "Adding %s repo %s at %s" % [ type, name, url ]
 			repo.type = type
 			repo.url = url
+			repo.workers = Sequel.pg_jsonb( patterns )
 		end
 
 		return repo

          
@@ 319,7 315,8 @@ class Assemblage::Server
 
 		repo = Assemblage::Repository[ name: repo_name ] or
 			raise "No such repository %p" % [ repo_name ]
-		event = repo.add_event( type: event_type, data: event_data, via_connection_id: connection.id )
+		event = repo.
+			add_event( type: event_type, data: event_data, via_connection_id: connection.id )
 
 		msg = connection.response( :event_added, [event.id] )
 		self.queue_output_message( msg )

          
@@ 341,7 338,7 @@ class Assemblage::Server
 
 
 	### Handle a connection asking for any assemblies pending for it.
-	def handle_fetch_assemblies_command( connection, criteria )
+	def handle_fetch_assemblies_command( connection, criteria, * )
 		self.log.debug "Connection %s fetched assemblies: %p" % [ connection, criteria ]
 		msg = connection.response( :assemblies, [] )
 		self.queue_output_message( msg )

          
M lib/assemblage/vcs_strategy.rb +1 -1
@@ 27,7 27,7 @@ class Assemblage::VCSStrategy
 
 	### Clone the repository at the given +url+ into the specified +directory+ at
 	### the specified +revision+.
-	def self::clone( url, directory, revision )
+	def clone( url, directory, revision )
 		raise NotImplementedError, "%p doesn't implement %s" % [ self.class, __method__ ]
 	end
 

          
M lib/assemblage/worker.rb +33 -25
@@ 29,9 29,6 @@ class Assemblage::Worker
 	# The name given to workers by default
 	DEFAULT_NAME = "#{Socket.gethostname.gsub('.', '-').downcase}-worker"
 
-	# A Regexp for matching valid worker names
-	VALID_NAME = /\A[a-z](\w+)(-\w+)*\z/
-
 	# A Regexp for matching valid traits
 	VALID_TRAIT = /\A\p{Alnum}+(-\p{Alnum}+)*:\s*\p{Print}+\z/
 

          
@@ 145,7 142,7 @@ class Assemblage::Worker
 			raise "Invalid trait %p" % [ trait ] unless trait.match( VALID_TRAIT )
 		end
 
-		self.log.debug "Attempting to set up %s as a run directory." % [ directory ]
+		self.log.debug "Attempting to set up %s as a worker run directory." % [ directory ]
 		directory.mkpath
 		directory.chmod( 0755 )
 

          
@@ 358,21 355,44 @@ class Assemblage::Worker
 	### Start the next assembly if there is one and the worker is idle.
 	def work_on_assemblies( * )
 		builder = self.current_assembly_builder or return # No builders queued
-		result = builder.resume
+
+		builder.resume
 
-		if result
+		if builder.running?
+			self.log.debug "Building still working."
+			self.send_assembly_progress( builder )
+		else
 			self.log.info "Builder finished; queueing result."
-			self.on_assembly_finished( builder, result )
-		else
-			self.log.debug "Building still working."
+			self.send_assembly_result( builder )
+			self.assembly_builders.shift
 		end
 	end
 
 
-	### Notify the worker that the assembly being built by the specified +builder+
-	### has finished with the given +result+.
-	def on_assembly_finished( builder, result )
-		self.send_result( builder.assembly_id, result )
+	### Handle a `new_assembly` message from the server.
+	def on_new_assembly_message( assembly, * )
+		self.log.info "Creating a new builder for: %p" % [ assembly ]
+		builder = Assemblage::AssemblyBuilder.new( Dir.pwd, assembly )
+		builder.start
+
+		self.builder_queue << builder
+
+		super
+	end
+
+
+	### Notify the srever that the assembly being built by the specified +builder+
+	### is still building.
+	def send_assembly_progress( builder )
+		self.send_result( builder.assembly_id, builder.in_progress_result )
+		super
+	end
+
+
+	### Notify the server that the assembly being built by the specified +builder+
+	### has finished.
+	def send_assembly_result( builder )
+		self.send_result( builder.assembly_id, builder.final_result )
 		super
 	end
 

          
@@ 415,17 435,5 @@ class Assemblage::Worker
 		self.client.send_message( :post_result, [assembly_id, result] )
 	end
 
-
-	### Handle a `new_assembly` message from the server.
-	def on_new_assembly_message( assembly, * )
-		self.log.info "Creating a new builder for: %p" % [ assembly ]
-		builder = Assemblage::AssemblyBuilder.new( assembly )
-		builder.start
-
-		self.builder_queue << builder
-
-		super
-	end
-
 end # class Assemblage::Worker
 

          
A => spec/assemblage/assembly_builder_spec.rb +14 -0
@@ 0,0 1,14 @@ 
+# -*- ruby -*-
+# frozen_string_literal: true
+
+require_relative '../spec_helper'
+
+require 'assemblage/assembly_builder'
+
+
+RSpec.describe Assemblage::AssemblyBuilder do
+
+	it "can be built with an assembly description hash" 
+
+end
+

          
A => spec/assemblage/assembly_spec.rb +124 -0
@@ 0,0 1,124 @@ 
+#!/usr/bin/env rspec -cfd
+
+require_relative '../spec_helper'
+
+require 'assemblage'
+
+
+RSpec.describe 'Assemblage::Assembly', :db do
+
+	let( :described_class ) { Assemblage::Assembly }
+
+	let( :worker_connection ) do
+		return Assemblage::Connection.create( type: 'worker', name: 'worker1' )
+	end
+	let( :publisher_connection ) do
+		return Assemblage::Connection.create( type: 'publisher', name: 'pub1' )
+	end
+	let( :repository ) do
+		return Assemblage::Repository.create(
+			name: 'testing',
+			type: 'mercurial',
+			url: 'https://example.com/hg/testing'
+		)
+	end
+	let( :triggering_event ) do
+		return Assemblage::Event.create(
+			type: 'commit',
+			repository: repository,
+			via_publisher: publisher_connection
+		)
+	end
+
+	let( :valid_fields ) {{
+		creating_worker: worker_connection,
+		triggering_event: triggering_event,
+		assemblage_version: Assemblage::VERSION,
+	}}
+
+
+	it "generates its own UUID if necessary" do
+		instance = described_class.new
+		expect( instance.uuid ).to be_a_uuid
+	end
+
+
+	it "sets its status to `new` on creation" do
+		instance = described_class.new
+		expect( instance.status ).to eq( 'new' )
+	end
+
+
+	it "is valid if created with its required fields set" do
+		instance = described_class.new( valid_fields )
+		expect( instance ).to be_valid
+	end
+
+
+	it "is invalid if it doesn't have a UUID" do
+		instance = described_class.new( valid_fields )
+
+		instance.uuid = nil
+
+		expect( instance ).to_not be_valid
+		expect( instance.errors.keys ).to contain_exactly( :uuid )
+		expect( instance.errors.on(:uuid) ).to include( a_string_matching /is not present/i )
+	end
+
+
+	it "is invalid if it has an invalid UUID" do
+		instance = described_class.new( valid_fields )
+
+		instance.uuid = 'a hand-made clown painting'
+
+		expect( instance ).to_not be_valid
+		expect( instance.errors.keys ).to contain_exactly( :uuid )
+		expect( instance.errors.on(:uuid) ).to include( a_string_matching /is invalid/i )
+	end
+
+
+	it "is invalid if it has an invalid UUID" do
+		instance = described_class.new( valid_fields )
+
+		instance.uuid = 'a hand-made clown painting'
+
+		expect( instance ).to_not be_valid
+		expect( instance.errors.keys ).to contain_exactly( :uuid )
+		expect( instance.errors.on(:uuid) ).to include( a_string_matching /is invalid/i )
+	end
+
+
+	it "is invalid if it doesn't have the assemblage version" do
+		instance = described_class.new( valid_fields )
+
+		instance.uuid = nil
+
+		expect( instance ).to_not be_valid
+		expect( instance.errors.keys ).to contain_exactly( :uuid )
+		expect( instance.errors.on(:uuid) ).to include( a_string_matching /is not present/i )
+	end
+
+
+	it "is invalid if it has an invalid UUID" do
+		instance = described_class.new( valid_fields )
+
+		instance.uuid = 'a hand-made clown painting'
+
+		expect( instance ).to_not be_valid
+		expect( instance.errors.keys ).to contain_exactly( :uuid )
+		expect( instance.errors.on(:uuid) ).to include( a_string_matching /is invalid/i )
+	end
+
+
+	it "is invalid if it has an invalid UUID" do
+		instance = described_class.new( valid_fields )
+
+		instance.uuid = 'a hand-made clown painting'
+
+		expect( instance ).to_not be_valid
+		expect( instance.errors.keys ).to contain_exactly( :uuid )
+		expect( instance.errors.on(:uuid) ).to include( a_string_matching /is invalid/i )
+	end
+
+end
+

          
A => spec/assemblage/repository_spec.rb +31 -0
@@ 0,0 1,31 @@ 
+#!/usr/bin/env rspec -cfd
+
+require_relative '../spec_helper'
+
+
+RSpec.describe 'Assemblage::Repository' do
+
+	let( :described_class ) { Assemblage::Assembly }
+
+
+	context "with no worker specification" do
+
+		it "creates an assembly for each registered worker for each event" do
+			worker1 = Assemblage::Connection.create( type: 'worker', name: 'worker1' )
+			worker2 = Assemblage::Connection.create( type: 'worker', name: 'worker2' )
+			worker2 = Assemblage::Connection.create( type: 'worker', name: 'worker2' )
+			repo = Assemblage::Repository.create(
+				name: 'testing',
+				type: 'mercurial',
+				url: 'https://example.com/hg/testing'
+			)
+
+			
+
+		end
+
+
+	end
+
+end
+

          
M spec/assemblage/server_spec.rb +1 -1
@@ 18,7 18,7 @@ describe Assemblage::Server, db: true do
 
 	after( :all ) do
 		Assemblage::Auth.cert_store_dir = @original_store_dir
-		FileUtils.rm_rf( @test_data_dir )
+		FileUtils.rm_rf( @test_data_dir ) if @test_data_dir&.exist?
 	end
 
 	before( :each ) do

          
M spec/assemblage/worker_spec.rb +1 -1
@@ 16,7 16,7 @@ describe Assemblage::Worker do
 
 	after( :all ) do
 		Assemblage::Auth.cert_store_dir = @original_store_dir
-		FileUtils.rm_rf( @test_data_dir )
+		FileUtils.rm_rf( @test_data_dir ) if @test_data_dir&.exist?
 	end
 
 

          
M spec/spec_helper.rb +51 -49
@@ 15,7 15,7 @@ require 'assemblage'
 #
 #    # in spec/spec_helper.rb
 #    RSpec.configure do |c|
-#      c.include( AssemblageSpecHelpers )
+#      c.include( Assemblage::SpecHelpers )
 #    end
 #
 #    # in my_class_spec.rb; mark an example as needing database setup

          
@@ 25,7 25,7 @@ require 'assemblage'
 #
 #    end
 #
-module AssemblageSpecHelpers
+module Assemblage::SpecHelpers
 
 	# The path to the local config file.
 	LOCAL_TEST_CONFIG = Pathname( '~/.assemblage-test.yml' ).expand_path

          
@@ 43,11 43,11 @@ module AssemblageSpecHelpers
 	def self::included( context )
 
 		context.before( :all ) do
-			if Assemblage.config || AssemblageSpecHelpers.load_test_config
-				if Assemblage::DbObject.model_superclasses.empty? || Assemblage::DbObject.db == ::DB
+			if Assemblage.config || Assemblage::SpecHelpers.load_test_config
+				if Assemblage::DbObject.db
+					Assemblage::SpecHelpers.setup_database
+				else
 					warn "No database configured for testing!"
-				else
-					AssemblageSpecHelpers.setup_test_databases
 				end
 			else
 				warn "No testing config found!"

          
@@ 55,16 55,28 @@ module AssemblageSpecHelpers
 		end
 
 		context.after( :all ) do
-			cleanup_test_databases() if Assemblage::DbObject.model_superclasses.any?
+			Assemblage::SpecHelpers.teardown_database if Assemblage::DbObject.db
 		end
 
 		context.around( :each ) do |example|
 			setting = example.metadata[:db]
-			if setting && setting != :no_transaction && setting != :without_transaction
-				AssemblageSpecHelpers.wrap_example_in_transactions( example )
+
+			if ((db = Assemblage::DbObject.db))
+				if setting == :no_transaction || setting == :without_transaction
+					Loggability[ Assemblage ].debug "  running without a transaction"
+					example.run
+				else
+					Loggability[ Assemblage ].debug "  running with a transaction"
+					db.transaction( rollback: :always ) do
+						example.run
+					end
+				end
+
+			elsif setting.to_s == 'pending'
+				example.metadata[:pending] ||=
+					"a configured database connection in #{test_config}"
 			else
-				Loggability[ Assemblage::DbObject ].debug "DB: Running without a transaction"
-				example.run
+				fail "No database connection configured! Create a #{test_config} file."
 			end
 		end
 

          
@@ 82,47 94,27 @@ module AssemblageSpecHelpers
 	end
 
 
-	### Run the specified +example+ in the context of a transaction for each loaded
-	### model superclass. Raises if any of the loaded superclasses aren't
-	### configured.
-	def self::wrap_example_in_transactions( example )
-		txn_classes = Assemblage::DbObject.model_superclasses
-		Loggability[ Assemblage::DbObject ].debug "Wrapping example for model superclasses: %p" %
-			[ txn_classes ]
-
-		wrapped_proc = txn_classes.inject( example.method(:run) ) do |callback, txn_class|
-			if ((db = txn_class.db))
-				Loggability[ Assemblage ].debug "DB: Running with an outer transaction"
-				Proc.new { db.transaction(auto_savepoint: :only, rollback: :always, &callback) }
-			else
-				fail "No database connection for %p configured! Add a %s section to the test config." %
-					[ txn_class, txn_class.config_key ]
-			end
-		end
-
-		wrapped_proc.call
-	end
-
 
 	###############
 	module_function
 	###############
 
-	### Set up the database connection using the SDK config
-	def setup_test_databases
-		Assemblage::DbObject.log.info "Setting up the test database."
-		Assemblage::DbObject.with_role( 'migrator' ) do
-			Assemblage::DbObject.setup_database
+	### Set up the metastore database and migrate to the latest version.
+	def setup_database
+		raise "Can't migrate a mock database" if Assemblage::DbObject.db.adapter_scheme == :mock
+		Loggability.with_level( :fatal ) do
+			Assemblage::DbObject.clear_migrations_table
+			Assemblage::DbObject.create_all_schemas
+			Assemblage::DbObject.migrate
+			# Assemblage::DbObject.preload_tables
 		end
 	end
 
 
-	### Clean up the database connection
-	def cleanup_test_databases
-		Assemblage::DbObject.log.info "Tearing down the test database."
-		Assemblage::DbObject.with_role( 'migrator' ) do
-			Assemblage::DbObject.teardown_database
-		end
+	### Tear down the configured metastore database.
+	def teardown_database
+		Assemblage.logger.info "Tearing down the test database."
+		Assemblage::DbObject.drop_all_schemas
 	end
 
 

          
@@ 153,13 145,13 @@ module AssemblageSpecHelpers
 	end
 
 
-	### Load and install a FM config from the +configfile+ specified. If no
+	### Load and install a Assemblage config from the +configfile+ specified. If no
 	### configfile is specified, it will first look for a file at the
 	### LOCAL_TEST_CONFIG. If it doesn't exist, the DEFAULT_TEST_CONFIG will be
 	### checked. If that too doesn't exist, then +nil+ is passed to
-	### FM.load_config, which will use non-test-specific config files.
+	### Assemblage.load_config, which will use non-test-specific config files.
 	def load_test_config( configfile=nil )
-		configfile ||= ENV[ FM::CONFIG_ENV ]
+		configfile ||= ENV[ Assemblage::CONFIG_ENV ]
 		configfile ||= LOCAL_TEST_CONFIG if LOCAL_TEST_CONFIG.exist?
 		configfile ||= DEFAULT_TEST_CONFIG if DEFAULT_TEST_CONFIG.exist?
 

          
@@ 167,9 159,11 @@ module AssemblageSpecHelpers
 
 		$stderr.puts "Loading config from: %s" % [ configfile || 'defaults' ]
 
-		unless FM.config && FM.config.path == configfile
-			FM.load_config( configfile )
+		unless Assemblage.config && Assemblage.config.path == configfile
+			Assemblage.load_config( configfile )
 		end
+
+		return Assemblage.config
 	end
 
 

          
@@ 184,7 178,14 @@ module AssemblageSpecHelpers
 		end
 	end
 
-end # module AssemblageSpecHelpers
+
+	RSpec::Matchers.define( :be_a_uuid ) do
+		match do |string|
+			string.to_s =~ /\A\h{8}(-\h{4}){3}-\h{12}\z/
+		end
+	end
+
+end # module Assemblage::SpecHelpers
 
 
 ### Mock with RSpec

          
@@ 201,6 202,7 @@ RSpec.configure do |config|
 	config.wait_timeout = 3
 
 	config.include( Loggability::SpecHelpers )
+	config.include( Assemblage::SpecHelpers )
 end