M Pure-Data.py +10 -5
@@ 35,7 35,11 @@ cmake_minimum_required(VERSION 3.10 FATA
project(${package})
-include_directories("${PUREDATA_DIR}/src" "${common_assets}")
+if (${PUREDATA_INCLUDE_DIR})
+ include_directories("${PUREDATA_INCLUDE_DIR}")
+endif()
+
+include_directories("${common_assets}")
set(CMAKE_CXX_STANDARD 14)
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "${CMAKE_SOURCE_DIR}/artifacts")
@@ 79,21 83,22 @@ if(CMAKE_CL_64)
add_definitions(-D__x86_64__=1)
endif()
-find_library(PD_LIB pd PATHS "${PUREDATA_DIR}/bin")
-
-message(STATUS "Pd in ${PUREDATA_DIR}, stub library ${PD_LIB}")
+find_library(PD_LIB pd PATHS "${PUREDATA_LIBRARY_DIR}")
if(${PD_LIB} STREQUAL "PD_LIB-NOTFOUND")
target_link_libraries(kronos_pd "-Wl,-undefined -Wl,dynamic_lookup")
+ message(STATUS "Linking Pd dynamically")
else()
+ message(STATUS "Linking with Pd stub library ${PD_LIB}")
target_link_libraries(kronos_pd ${PD_LIB})
endif()
""")
strip_extern_re = re.compile("^:Extern:(.*)$")
-def build(syscfg, job):
+def build(syscfg, job):
cfg = syscfg.copy()
+ job["package"] = job["package"].lower()
cfg.update(job)
externs = ""
M Pure-Data/pd.cpp +18 -9
@@ 1,11 1,11 @@
-#include "m_pd.h"
#include "kreflect.h"
+#include "m_pd.h"
#include <unordered_map>
#include <memory>
#include <tuple>
-#ifndef NDEBUG
+#if true //ndef NDEBUG
#define CONSOLE(...) post(__VA_ARGS__)
#else
#define CONSOLE(...)
@@ 129,6 129,8 @@ class Class : public IConfigurator {
void Dispatch(t_wrapper* w, Param& p, int argc, t_atom* argv) {
if (!p.ksym->size) return;
+ CONSOLE("[%s] %s", name.c_str(), p.ksym->sym);
+
auto scratch = (const char* const)alloca(p.ksym->size);
auto writePtr = scratch;
PdToBlob(p.ksym->type_descriptor, writePtr, argc, argv);
@@ 186,9 188,11 @@ class Class : public IConfigurator {
public:
Class(std::string name, krt_class* k, t_class *pd):k(k),name(name),pd(pd) {
+ CONSOLE("Wrapping [%s]", name.c_str());
KReflect::Configure(*k, *this);
KReflect::Setup(*k, *this);
std::sort(parameters.begin(), parameters.end());
+ CONSOLE("Metadata parsed...");
if (signalSlots.size()) {
CLASS_MAINSIGNALIN(pd, t_wrapper, reserved);
@@ 205,6 209,7 @@ public:
class_addmethod(pd, (t_method)DspProc, gensym("dsp"), A_CANT, 0);
}
+ CONSOLE("Methods added... done!");
outScratch.reset(new char[k->result_type_size]);
}
@@ 213,7 218,7 @@ public:
memset(space.get(), 0, k->get_size());
KReflect::Configure(*k, *this);
k->construct(space.get(), nullptr);
- return space;
+ return std::move(space);
}
void AddConnections(t_wrapper* wrap) {
@@ 238,21 243,24 @@ public:
}
}
- t_wrapper* Construct(t_symbol* s, int argc, t_atom* argv) {
+ void* Construct(t_symbol* s, int argc, t_atom* argv) {
+ CONSOLE("Constructing %s (%i args)", name.c_str(), argc);
t_wrapper* wrap = (t_wrapper*)pd_new(Pd());
- wrap->reserved = 0;
wrap->cls = this;
+ new (&wrap->inst) Instance();
+ wrap->inst.kstate = ConstructInstance();
AddConnections(wrap);
- new (&wrap->inst) Instance;
- wrap->inst.kstate = std::move(ConstructInstance());
// constructor args
for (auto& p : parameters) {
if (argc-- < 1) break;
+ CONSOLE("Dispatch %s", p.name);
Dispatch(wrap, p, 1, argv++);
}
- return wrap;
+ CONSOLE("Done, returning %x", wrap);
+
+ return (void*)wrap;
}
void Perform(t_wrapper* wrap, t_signal** sig) {
@@ 309,7 317,7 @@ public:
};
void Destructor(t_wrapper* wrap) {
- wrap->inst.~Instance();
+ if (wrap->inst) delete wrap->inst;
}
t_class* Declare(std::string name, krt_class* k, t_newmethod constructor) {
@@ 343,6 351,7 @@ extern "C" {
__declspec(dllexport)
#endif
void PACKAGE_SETUP() {
+ CONSOLE( "package_setup()");
#define F(CLASS) CLASS ## _Declare();
DSP_CLASSES
#undef F
A => build.py +101 -0
@@ 0,0 1,101 @@
+from __future__ import print_function
+
+import os, platform, io, sys, shutil, tempfile, zipfile, traceback
+import datetime, time
+import base64, glob, re, json
+import config
+import cmake
+
+from config import cfg, script_dir
+
+def bundle_artifacts(zip_path, from_dir):
+ with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zip_handle:
+ for root, dirs, files in os.walk(from_dir + "/artifacts"):
+ for file in files:
+ zip_handle.write(os.path.join(root, file), arcname = file)
+
+def build(builder, job_id, job, progress):
+ job["status"] = "building"
+
+ tmpdir = tempfile.mkdtemp(prefix = "kronos." + builder.__name__ + ".")
+
+ artifact_dir = tmpdir + "/artifacts"
+ build_dir = tmpdir + "/build"
+
+ os.makedirs(artifact_dir)
+ os.makedirs(build_dir)
+
+ os.chdir(tmpdir)
+
+ with open("source.k","w") as write_source:
+ write_source.write(job.pop("source", ""))
+
+ progress(job)
+
+ zip_path = None
+
+ try:
+ job["package"] = cmake.sanitize(job["title"].split()[0])
+
+ print("* %s [%s] (%s) : %s" % (job["title"], builder.__name__, job_id, tmpdir))
+
+ for mode in ["_RELEASE", "_DEBUG", ""]:
+ for bt in ["RUNTIME", "LIBRARY"]:
+ cfg["cmake"].update({ "CMAKE_" + bt + "_OUTPUT_DIRECTORY" + mode: artifact_dir })
+
+ builder.build(cfg, job)
+
+ zip_path = job["package"] + ".zip"
+
+ bundle_artifacts(zip_path, tmpdir)
+
+ job["status"] = "success"
+ job["url"] = "job/" + job_id + "/" + zip_path
+
+ except ValueError as ve:
+ job["status"] = "failure"
+ job["log"] = ve.message
+ except:
+ job["status"] = "failure"
+ job["log"] = traceback.format_exc()
+
+ progress(job)
+
+ return zip_path
+
+# build directly from command line argument
+if __name__ == "__main__":
+ import argparse
+ import importlib
+
+ user_dir = os.getcwd()
+
+ parser = argparse.ArgumentParser(description="Compile a Veneer build manifest")
+ parser.add_argument("-a", "--architecture", metavar="<arch>", required=True,
+ help = "Which architecture to build for")
+ parser.add_argument("-c", "--configuration", metavar="<path>", type=str, default="system-config.json",
+ help = "path to build system configuration file (.json)")
+ parser.add_argument("jobs", metavar='<job.json>', type=str, nargs ="+", help = "path to manifest")
+
+ args = vars(parser.parse_args())
+ config.initialize(args["configuration"])
+
+ arch = importlib.import_module(args["architecture"])
+ cfg["builder_assets"] = script_dir + "/" + args["architecture"]
+ cfg["common_assets"]= script_dir + "/Common"
+
+ def progress(job):
+ print("- %s" % job["status"])
+
+ for job in args["jobs"]:
+ job = json.load(open(job,"r"))
+ print("\n[%s]" % job.get("title", "job"))
+
+ artifact = build(arch, "cli", job, progress)
+
+ if job["status"] != "success":
+ print(job["log"])
+
+ if artifact:
+ shutil.move(artifact, user_dir)
+ print("- Created %s" % artifact)
R builder.py => +0 -273
@@ 1,273 0,0 @@
-from __future__ import print_function
-
-import subprocess, os, platform, io, sys, shutil, tempfile, zipfile, traceback
-import httplib, urllib, importlib
-import datetime, time
-import base64, glob, re, json
-
-import cmake
-
-cfg = {}
-couchdb = None
-
-authorization_header = None
-
-def merge_config(cfg, filename):
- to_merge = json.load(open(filename))
- if to_merge:
- cfg.update(to_merge)
-
-def request(method, selector = "/", body = None, pheaders = {}):
- couchdb.connect()
-
- headers = {
- "Referer": "http://127.0.0.1",
- "Content-Type": "application/json",
- "Accept": "application/json"
- }
-
- headers.update(pheaders)
-
- if not "Authorization" in headers:
- headers["Authorization"] = authorization_header
-
- if headers["Content-Type"] == "application/json":
- body = json.dumps(body)
-
- couchdb.request(method, selector, body, headers)
-
- resp = couchdb.getresponse()
- status = resp.status
- body = resp.read()
-
- if re.search("^application/json", resp.getheader("Content-Type", "")):
- body = json.loads(body)
-
- if "error" in body:
- print("%i: %s %s: %s" % (status, method, selector, body))
- raise RuntimeError(body)
-
- return body
- else:
- return { "error": resp, "status": resp.status }
-
-
-def GET(selector = "/", headers = {}):
- page = "/" + cfg["database"] + selector
- return request("GET", page, None, headers)
-
-def PUT(selector, body, headers = {}):
- return request("PUT", "/" + cfg["database"] + selector, body, headers)
-
-def POST(selector, body, headers = {}):
- return request("POST", "/" + cfg["database"] + selector, body, headers)
-
-def range_query(start, end = None):
- if end is None:
- end = start + [{}]
-
- return "startkey=%s&endkey=%s" % (
- urllib.quote_plus(json.dumps(start)),
- urllib.quote_plus(json.dumps(end)))
-
-
-cfg["platform"] = [platform.system() + " " + platform.release(), platform.machine(), platform.version()]
-cfg["python"] = sys.executable.replace("\\","/")
-cfg["common_assets"] = os.path.dirname(os.path.realpath(__file__)).replace("\\","/") + "/Common"
-
-merge_config(cfg, "system-defaults.json")
-merge_config(cfg, "system-config.json")
-
-if "password" in cfg:
- authorization_header = "Basic " + base64.b64encode(cfg.pop("username","") + ":" + cfg.pop("password", ""))
-
-if cfg["scheme"] == "https":
- print("Buildmaster: https://" + cfg["buildmaster"])
- couchdb = httplib.HTTPSConnection(cfg["buildmaster"])
-else:
- print("Buildmaster: http://" + cfg["buildmaster"])
- couchdb = httplib.HTTPConnection(cfg["buildmaster"])
-
-print(platform.platform(terse=1))
-print("Building %s externs for %s" % (cfg["architectures"], cfg["platform"]))
-
-builders = {}
-
-for arch in cfg["architectures"]:
- builders[arch] = importlib.import_module(arch)
- builders[arch].banner()
-
-script_dir = os.path.dirname(os.path.realpath(__file__)).replace("\\","/")
-print("script dir %s" % script_dir)
-
-def db_view(name, params):
- query = urllib.urlencode(params)
- if query:
- query = "?" + query
- return "/_design/d/_view/" + name + query;
-
-def current_seq():
- return GET("/_changes?" + urllib.urlencode({
- "limit": 0,
- "since": "now"
- })).get("last_seq", None)
-
-def heartbeat(platform, archs):
- while True:
- status = {}
-
- try:
- status = GET("/heartbeat")
- except:
- time.sleep(15)
-
- time_now = time.time() * 1000.0
-
- for arch in archs:
- idkey = json.dumps([arch] + platform)
-
- if idkey not in status or status[idkey] < time_now:
- status.update({idkey: time_now})
-
- try:
- if "rev" in status:
- status["_rev"] = status.pop("rev")
-
- PUT("/heartbeat/", status)
- return
- except:
- time.sleep(5)
-
-db_seq = "now"
-
-while True:
- for arch in cfg["architectures"]:
- try:
- id = [arch] + cfg["platform"]
-
- startkey = ["pending", cfg["platform"], arch]
-
- db_seq = current_seq()
- print("[%s] Checking for jobs..." % arch)
-
- jobs = GET(db_view("status", {
- "startkey": json.dumps(startkey),
- "endkey": json.dumps(startkey + [{}])
- }))
-
- for row in jobs["rows"]:
- job_id = row["id"]
-
- job = GET("/" + job_id)
-
- if "error" in job:
- print("Could not retrieve job")
- continue
-
- job["status"] = "building"
-
- tmpdir = tempfile.mkdtemp(prefix = "kronos." + arch + ".")
-
- artifact_dir = tmpdir + "/artifacts"
- build_dir = tmpdir + "/build"
-
- os.makedirs(artifact_dir)
- os.makedirs(build_dir)
-
- os.chdir(tmpdir)
-
- with open("source.k","w") as write_source:
- write_source.write(job.pop("source", ""))
-
- claim = PUT("/" + urllib.quote_plus(job_id) + "/", job)
-
- if "error" in claim:
- print("Could not claim job.")
- continue
-
- try:
-
- job.pop("_id")
- job.pop("_rev")
-
- job["package"] = cmake.sanitize(job["title"].split()[0])
-
-
- print("* %s [%s] (%s) : %s" % (job["title"], arch, job_id, tmpdir))
-
- for mode in ["_RELEASE", "_DEBUG", ""]:
- for bt in ["RUNTIME", "LIBRARY"]:
- cfg["cmake"].update({ "CMAKE_" + bt + "_OUTPUT_DIRECTORY" + mode: artifact_dir })
-
- cfg["builder_assets"] = script_dir + "/" + arch
- builders[arch].build(cfg, job)
-
- zip_path = job["package"] + ".zip"
-
- with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zip_handle:
- for root, dirs, files in os.walk(tmpdir + "/artifacts"):
- for file in files:
- zip_handle.write(os.path.join(root, file), arcname = file)
-
- job["status"] = "success"
- job["url"] = "job/" + job_id + "/" + zip_path
-
- with open(zip_path, "rb") as zipped:
- job["_attachments"] = {
- zip_path: {
- "content_type": "application/octet-stream",
- "data": base64.b64encode(zipped.read())
- }
- }
-
-# shutil.rmtree(tmpdir)
-
- except ValueError as ve:
- job["status"] = "failure"
- job["log"] = ve.message
- except:
- job["status"] = "failure"
- job["log"] = traceback.format_exc()
- print(json.dumps(job))
-
-
- job["_rev"] = claim["rev"]
- PUT("/" + job_id + "/", job)
-
-
-
- except:
- print(sys.exc_info())
- print(traceback.format_exc())
-
- while True:
- heartbeat(cfg["platform"], cfg["architectures"])
-
- changes = POST("/_changes?" + urllib.urlencode({
- "limit": 1,
- "since": db_seq,
- "feed": "longpoll",
- "filter": "_selector"
- }), {
- "selector": {
- "_id": { "$ne": "heartbeat" }
- }
- })
-
- if "error" in changes:
- print(changes["error"])
-
- else:
- db_seq = changes.get("last_seq", "now")
-
- if changes.get("results", None):
- break
-
-
-
-
-
-
-
-
-
M cmake.py +9 -3
@@ 25,8 25,15 @@ def bracket(s):
def dict_to_vars(d):
config = "# Generated configuration #\n"
for key in d:
- config = config + "SET(" + key + " " + encode_str(d[key]) + \
- " CACHE STRING \"generated\")\n"
+
+ config = config + "SET(" + key + " "
+
+ value = d[key]
+ if isinstance(value, list):
+ config = config + " ".join([encode_str(x) for x in value])
+ else:
+ config = config + encode_str(value)
+ config = config + " CACHE STRING \"generated\")\n"
return config + "# Generated configuration #\n\n"
@@ 35,7 42,6 @@ def sanitize(s):
def subproc(args, cwd = None):
- print(args)
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr = subprocess.STDOUT, universal_newlines = True, cwd = cwd)
log = ""
A => config.py +18 -0
@@ 0,0 1,18 @@
+import json, platform, sys, os
+
+cfg = {}
+
+script_dir = os.path.dirname(os.path.realpath(__file__)).replace("\\","/")
+
+def merge_config(filename):
+ to_merge = json.load(open(filename))
+ if to_merge:
+ cfg.update(to_merge)
+
+def initialize(config_file):
+ cfg["platform"] = [platform.system() + " " + platform.release(), platform.machine(), platform.version()]
+ cfg["python"] = sys.executable.replace("\\","/")
+ cfg["common_assets"] = os.path.dirname(os.path.realpath(__file__)).replace("\\","/") + "/Common"
+
+ merge_config("system-defaults.json")
+ merge_config(config_file)
No newline at end of file
A => service.py +218 -0
@@ 0,0 1,218 @@
+from __future__ import print_function
+
+import subprocess, os, platform, io, sys, shutil, tempfile, zipfile, traceback
+import httplib, urllib, importlib
+import datetime, time
+import base64, glob, re, json, pprint
+
+import cmake
+import build
+import argparse
+
+import config
+from config import cfg, script_dir
+
+parser = argparse.ArgumentParser(description="Run a Veneer builder service")
+parser.add_argument("-c", "--configuration", metavar="<path>", type=str, default="system-config.json",
+ help = "path to build system configuration file (.json)")
+parser.add_argument("-s", "--show", default=False, action="store_true", help="Print the merged configuration to stdout")
+parser.add_argument("-D", "--define", metavar="<string>", nargs=2, action="append",
+ help = "override a key-value pair in the configuration, such as '-D buildmaster localhost'")
+
+args = vars(parser.parse_args())
+couchdb = None
+authorization_header = None
+
+print(args)
+
+def request(method, selector = "/", body = None, pheaders = {}):
+ couchdb.connect()
+
+ headers = {
+ "Referer": "http://127.0.0.1",
+ "Content-Type": "application/json",
+ "Accept": "application/json"
+ }
+
+ headers.update(pheaders)
+
+ if not "Authorization" in headers:
+ headers["Authorization"] = authorization_header
+
+ if headers["Content-Type"] == "application/json":
+ body = json.dumps(body)
+
+ couchdb.request(method, selector, body, headers)
+
+ resp = couchdb.getresponse()
+ status = resp.status
+ body = resp.read()
+
+ if re.search("^application/json", resp.getheader("Content-Type", "")):
+ body = json.loads(body)
+
+ if "error" in body:
+ print("%i: %s %s: %s" % (status, method, selector, body))
+ raise RuntimeError(body)
+
+ return body
+ else:
+ return { "error": resp, "status": resp.status }
+
+
+def GET(selector = "/", headers = {}):
+ page = "/" + cfg["database"] + selector
+ return request("GET", page, None, headers)
+
+def PUT(selector, body, headers = {}):
+ return request("PUT", "/" + cfg["database"] + selector, body, headers)
+
+def POST(selector, body, headers = {}):
+ return request("POST", "/" + cfg["database"] + selector, body, headers)
+
+def range_query(start, end = None):
+ if end is None:
+ end = start + [{}]
+
+ return "startkey=%s&endkey=%s" % (
+ urllib.quote_plus(json.dumps(start)),
+ urllib.quote_plus(json.dumps(end)))
+
+config.initialize(args["configuration"])
+
+if args["define"] is not None:
+ for item in args["define"]:
+ cfg[item[0]] = item[1]
+
+if (args["show"]):
+ cshow = dict(cfg)
+ if "password" in cshow:
+ cshow["password"] = "*" * len(cshow["password"])
+ pprint.pprint(cshow)
+
+
+if "password" in cfg:
+ authorization_header = "Basic " + base64.b64encode(cfg.pop("username","") + ":" + cfg.pop("password", ""))
+
+if cfg["scheme"] == "https":
+ print("Buildmaster: https://" + cfg["buildmaster"])
+ couchdb = httplib.HTTPSConnection(cfg["buildmaster"])
+else:
+ print("Buildmaster: http://" + cfg["buildmaster"])
+ couchdb = httplib.HTTPConnection(cfg["buildmaster"])
+
+print(platform.platform(terse=1))
+print("Building %s externs for %s" % (cfg["architectures"], cfg["platform"]))
+
+builders = {}
+
+for arch in cfg["architectures"]:
+ builders[arch] = importlib.import_module(arch)
+ builders[arch].banner()
+
+def db_view(name, params):
+ query = urllib.urlencode(params)
+ if query:
+ query = "?" + query
+ return "/_design/d/_view/" + name + query;
+
+def current_seq():
+ return GET("/_changes?" + urllib.urlencode({
+ "limit": 0,
+ "since": "now"
+ })).get("last_seq", None)
+
+def heartbeat(platform, archs):
+ while True:
+ status = {}
+
+ try:
+ status = GET("/heartbeat")
+ except:
+ time.sleep(15)
+
+ time_now = time.time() * 1000.0
+
+ for arch in archs:
+ idkey = json.dumps([arch] + platform)
+
+ if idkey not in status or status[idkey] < time_now:
+ status.update({idkey: time_now})
+
+ try:
+ if "rev" in status:
+ status["_rev"] = status.pop("rev")
+
+ PUT("/heartbeat/", status)
+ return
+ except:
+ time.sleep(5)
+
+db_seq = "now"
+
+while True:
+ for arch in cfg["architectures"]:
+ id = [arch] + cfg["platform"]
+ cfg["builder_assets"] = script_dir + "/" + arch
+
+ startkey = ["pending", cfg["platform"], arch]
+
+ db_seq = current_seq()
+ print("[%s] Checking for jobs..." % arch)
+
+ jobs = GET(db_view("status", {
+ "startkey": json.dumps(startkey),
+ "endkey": json.dumps(startkey + [{}])
+ }))
+
+ for row in jobs["rows"]:
+ job_id = row["id"]
+ job = GET("/" + job_id)
+
+ if "error" in job:
+ print("Could not retrieve job")
+ continue
+
+ def progress(job):
+ update = PUT("/" + urllib.quote_plus(job_id) + "/", job)
+
+ if "error" in update:
+ raise ValueError("'%s' while updating job %s status" % (update["error"], job_id))
+
+ job["_rev"] = update["rev"]
+
+ artifact = build.build(builders[arch], job_id, job, progress)
+
+ if artifact:
+ with open(artifact, "rb") as zipped:
+ job["_attachments"] = {
+ artifact: {
+ "content_type": "application/octet-stream",
+ "data": base64.b64encode(zipped.read())
+ }
+ }
+
+ progress(job)
+
+ while True:
+ heartbeat(cfg["platform"], cfg["architectures"])
+
+ changes = POST("/_changes?" + urllib.urlencode({
+ "limit": 1,
+ "since": db_seq,
+ "feed": "longpoll",
+ "filter": "_selector"
+ }), {
+ "selector": {
+ "_id": { "$ne": "heartbeat" }
+ }
+ })
+
+ if "error" in changes:
+ print(changes["error"])
+
+ else:
+ db_seq = changes.get("last_seq", "now")
+
+ if changes.get("results", None):
+ break
No newline at end of file
M system-defaults.json +2 -1
@@ 1,5 1,6 @@
{
"scheme": "https",
"buildmaster": "db.kronoslang.io",
- "database": "builder"
+ "database": "builder",
+ "cmake": { }
}
No newline at end of file