diff options
author | Elizabeth Hunt <me@liz.coffee> | 2025-07-20 17:21:38 -0700 |
---|---|---|
committer | Elizabeth Hunt <me@liz.coffee> | 2025-07-20 18:31:57 -0700 |
commit | bfdef4064b4a172a2027f3813ab88f38728d61c0 (patch) | |
tree | 94babba33cd28aaacad39b49936b803486784a73 | |
parent | 9e220eca4545982df83ffcaa66a9b050a3d6f24e (diff) | |
download | ci-bfdef4064b4a172a2027f3813ab88f38728d61c0.tar.gz ci-bfdef4064b4a172a2027f3813ab88f38728d61c0.zip |
Fixes
-rwxr-xr-x | .ci/ci.cjs | 992 | ||||
-rw-r--r-- | model/job/jobs.ts | 1 | ||||
-rw-r--r-- | u/trace/itrace.ts | 4 | ||||
-rw-r--r-- | worker/fs.ts | 29 | ||||
-rw-r--r-- | worker/index.ts | 1 | ||||
-rwxr-xr-x | worker/jobs/ci_pipeline.run | 10 | ||||
-rwxr-xr-x | worker/scripts/build_docker_image.ts | 24 | ||||
-rwxr-xr-x | worker/scripts/checkout_ci.ts | 48 |
8 files changed, 557 insertions, 552 deletions
@@ -1,605 +1,621 @@ #!/usr/bin/env node -"use strict"; +'use strict'; // ../u/leftpadesque/debug.ts var _hasEnv = true; -var _env = _hasEnv && (process.env.ENVIRONMENT ?? "").toLowerCase().includes("prod") ? "production" : "development"; -var isProd = () => _env === "production"; -var _debug = !isProd() || _hasEnv && ["y", "t"].some((process.env.DEBUG ?? "").toLowerCase().startsWith); +var _env = _hasEnv && (process.env.ENVIRONMENT ?? '').toLowerCase().includes('prod') ? 'production' : 'development'; +var isProd = () => _env === 'production'; +var _debug = !isProd() || (_hasEnv && ['y', 't'].some((process.env.DEBUG ?? '').toLowerCase().startsWith)); var isDebug = () => _debug; // ../u/leftpadesque/memoize.ts var memoize = (fn) => { - const cache = /* @__PURE__ */ new Map(); - return (...args) => { - const key = JSON.stringify(args); - if (cache.has(key)) { - return cache.get(key); - } - const res = fn.apply(args); - cache.set(key, res); - return res; - }; + const cache = /* @__PURE__ */ new Map(); + return (...args) => { + const key = JSON.stringify(args); + if (cache.has(key)) { + return cache.get(key); + } + const res = fn.apply(args); + cache.set(key, res); + return res; + }; }; // ../u/trace/itrace.ts var TraceableImpl = class _TraceableImpl { - constructor(item, trace) { - this.item = item; - this.trace = trace; - } - map(mapper) { - const result = mapper(this); - return new _TraceableImpl(result, this.trace); - } - coExtend(mapper) { - const results = mapper(this); - return Array.from(results).map((result) => this.move(result)); - } - flatMap(mapper) { - return mapper(this); - } - flatMapAsync(mapper) { - return new _TraceableImpl( - mapper(this).then((t) => t.get()), - this.trace - ); - } - traceScope(mapper) { - return new _TraceableImpl(this.get(), this.trace.traceScope(mapper(this))); - } - peek(peek) { - peek(this); - return this; - } - move(t) { - return this.map(() => t); - } - bimap(mapper) { - const { item, trace: _trace } = mapper(this); - return this.move(item).traceScope(() => _trace); - } - get() { - return this.item; - } + constructor(item, trace) { + this.item = item; + this.trace = trace; + } + map(mapper) { + const result = mapper(this); + return new _TraceableImpl(result, this.trace); + } + coExtend(mapper) { + const results = mapper(this); + return Array.from(results).map((result) => this.move(result)); + } + flatMap(mapper) { + return mapper(this); + } + flatMapAsync(mapper) { + return new _TraceableImpl( + mapper(this).then((t) => t.get()), + this.trace, + ); + } + traceScope(mapper) { + return new _TraceableImpl(this.get(), this.trace.traceScope(mapper(this))); + } + peek(peek) { + peek(this); + return this; + } + move(t) { + return this.map(() => t); + } + bimap(mapper) { + const { item, trace: _trace } = mapper(this); + return this.move(item).traceScope(() => _trace); + } + get() { + return this.item; + } }; // ../u/trace/metric/emittable.ts var EmittableMetric = class { - constructor(name, unit) { - this.name = name; - this.unit = unit; - } - withValue(value) { - return { - name: this.name, - unit: this.unit, - emissionTimestamp: Date.now(), - value, - _tag: MetricValueTag - }; - } + constructor(name, unit) { + this.name = name; + this.unit = unit; + } + withValue(value) { + return { + name: this.name, + unit: this.unit, + emissionTimestamp: Date.now(), + value, + _tag: MetricValueTag, + }; + } }; // ../u/trace/metric/metric.ts var _Tagged = class { - constructor(_tag = IMetricTag) { - this._tag = _tag; - } + constructor(_tag = IMetricTag) { + this._tag = _tag; + } }; var Metric = class _Metric extends _Tagged { - constructor(name, parent = void 0, count = new EmittableMetric(_Metric.join(name, "count"), "COUNT" /* COUNT */), time = new EmittableMetric(_Metric.join(name, "time"), "MILLISECONDS" /* MILLISECONDS */)) { - super(); - this.name = name; - this.parent = parent; - this.count = count; - this.time = time; - } - static DELIM = "."; - child(_name) { - const childName = _Metric.join(this.name, _name); - return new _Metric(childName, this); - } - asResult() { - return ResultMetric.from(this); - } - static join(...name) { - return name.join(_Metric.DELIM); - } - static fromName(name) { - return new _Metric(name); - } + constructor( + name, + parent = void 0, + count = new EmittableMetric(_Metric.join(name, 'count'), 'COUNT' /* COUNT */), + time = new EmittableMetric(_Metric.join(name, 'time'), 'MILLISECONDS' /* MILLISECONDS */), + ) { + super(); + this.name = name; + this.parent = parent; + this.count = count; + this.time = time; + } + static DELIM = '.'; + child(_name) { + const childName = _Metric.join(this.name, _name); + return new _Metric(childName, this); + } + asResult() { + return ResultMetric.from(this); + } + static join(...name) { + return name.join(_Metric.DELIM); + } + static fromName(name) { + return new _Metric(name); + } }; var ResultMetric = class _ResultMetric extends Metric { - constructor(name, parent = void 0, failure, success, warn) { - super(name, parent); - this.name = name; - this.parent = parent; - this.failure = failure; - this.success = success; - this.warn = warn; - } - static from(metric) { - const failure = metric.child("failure"); - const success = metric.child("success"); - const warn = metric.child("warn"); - return new _ResultMetric(metric.name, metric.parent, failure, success, warn); - } + constructor(name, parent = void 0, failure, success, warn) { + super(name, parent); + this.name = name; + this.parent = parent; + this.failure = failure; + this.success = success; + this.warn = warn; + } + static from(metric) { + const failure = metric.child('failure'); + const success = metric.child('success'); + const warn = metric.child('warn'); + return new _ResultMetric(metric.name, metric.parent, failure, success, warn); + } }; // ../u/trace/metric/trace.ts var MetricsTrace = class _MetricsTrace { - constructor(metricConsumer, activeTraces = /* @__PURE__ */ new Map(), completedTraces = /* @__PURE__ */ new Set()) { - this.metricConsumer = metricConsumer; - this.activeTraces = activeTraces; - this.completedTraces = completedTraces; - } - traceScope(trace) { - const now = Date.now(); - const metricsToTrace = (Array.isArray(trace) ? trace : [trace]).filter(isIMetric); - const initialTraces = new Map(metricsToTrace.map((metric) => [metric, now])); - return new _MetricsTrace(this.metricConsumer, initialTraces); - } - trace(metrics) { - if (!metrics || typeof metrics === "string") { - return this; - } - const now = Date.now(); - const allMetrics = Array.isArray(metrics) ? metrics : [metrics]; - const valuesToEmit = allMetrics.filter(isMetricValue); - const traceableMetrics = allMetrics.filter(isIMetric); - const metricsToStart = traceableMetrics.filter((m) => !this.activeTraces.has(m)); - const metricsToEnd = traceableMetrics.filter((m) => this.activeTraces.has(m) && !this.completedTraces.has(m)); - const endedMetricValues = metricsToEnd.flatMap((metric) => [ - metric.count.withValue(1), - metric.time.withValue(now - this.activeTraces.get(metric)) - ]); - const allMetricsToEmit = [...valuesToEmit, ...endedMetricValues]; - if (allMetricsToEmit.length > 0) { - this.metricConsumer(allMetricsToEmit); - } - const nextActiveTraces = new Map([ - ...this.activeTraces, - ...metricsToStart.map((m) => [m, now]) - ]); - const nextCompletedTraces = /* @__PURE__ */ new Set([...this.completedTraces, ...metricsToEnd]); - return new _MetricsTrace(this.metricConsumer, nextActiveTraces, nextCompletedTraces); - } + constructor(metricConsumer, activeTraces = /* @__PURE__ */ new Map(), completedTraces = /* @__PURE__ */ new Set()) { + this.metricConsumer = metricConsumer; + this.activeTraces = activeTraces; + this.completedTraces = completedTraces; + } + traceScope(trace) { + const now = Date.now(); + const metricsToTrace = (Array.isArray(trace) ? trace : [trace]).filter(isIMetric); + const initialTraces = new Map(metricsToTrace.map((metric) => [metric, now])); + return new _MetricsTrace(this.metricConsumer, initialTraces); + } + trace(metrics) { + if (!metrics || typeof metrics === 'string') { + return this; + } + const now = Date.now(); + const allMetrics = Array.isArray(metrics) ? metrics : [metrics]; + const valuesToEmit = allMetrics.filter(isMetricValue); + const traceableMetrics = allMetrics.filter(isIMetric); + const metricsToStart = traceableMetrics.filter((m) => !this.activeTraces.has(m)); + const metricsToEnd = traceableMetrics.filter((m) => this.activeTraces.has(m) && !this.completedTraces.has(m)); + const endedMetricValues = metricsToEnd.flatMap((metric) => [ + metric.count.withValue(1), + metric.time.withValue(now - this.activeTraces.get(metric)), + ]); + const allMetricsToEmit = [...valuesToEmit, ...endedMetricValues]; + if (allMetricsToEmit.length > 0) { + this.metricConsumer(allMetricsToEmit); + } + const nextActiveTraces = new Map([...this.activeTraces, ...metricsToStart.map((m) => [m, now])]); + const nextCompletedTraces = /* @__PURE__ */ new Set([...this.completedTraces, ...metricsToEnd]); + return new _MetricsTrace(this.metricConsumer, nextActiveTraces, nextCompletedTraces); + } }; // ../u/trace/metric/index.ts -var MetricValueTag = "MetricValue"; +var MetricValueTag = 'MetricValue'; var isMetricValue = (t) => isTagged(t, MetricValueTag); -var IMetricTag = "IMetric"; +var IMetricTag = 'IMetric'; var isIMetric = (t) => isTagged(t, IMetricTag); // ../u/trace/log/ansi.ts var ANSI = { - RESET: "\x1B[0m", - BOLD: "\x1B[1m", - DIM: "\x1B[2m", - RED: "\x1B[31m", - GREEN: "\x1B[32m", - YELLOW: "\x1B[33m", - BLUE: "\x1B[34m", - MAGENTA: "\x1B[35m", - CYAN: "\x1B[36m", - WHITE: "\x1B[37m", - BRIGHT_RED: "\x1B[91m", - BRIGHT_YELLOW: "\x1B[93m", - GRAY: "\x1B[90m" + RESET: '\x1B[0m', + BOLD: '\x1B[1m', + DIM: '\x1B[2m', + RED: '\x1B[31m', + GREEN: '\x1B[32m', + YELLOW: '\x1B[33m', + BLUE: '\x1B[34m', + MAGENTA: '\x1B[35m', + CYAN: '\x1B[36m', + WHITE: '\x1B[37m', + BRIGHT_RED: '\x1B[91m', + BRIGHT_YELLOW: '\x1B[93m', + GRAY: '\x1B[90m', }; // ../u/trace/log/level.ts -var logLevelOrder = [ - "DEBUG" /* DEBUG */, - "INFO" /* INFO */, - "WARN" /* WARN */, - "ERROR" /* ERROR */, - "SYS" /* SYS */ -]; -var isLogLevel = (l) => typeof l === "string" && logLevelOrder.some((level) => level === l); +var logLevelOrder = ['DEBUG' /* DEBUG */, 'INFO' /* INFO */, 'WARN' /* WARN */, 'ERROR' /* ERROR */, 'SYS' /* SYS */]; +var isLogLevel = (l) => typeof l === 'string' && logLevelOrder.some((level) => level === l); // ../u/trace/log/pretty_json_console.ts var PrettyJsonConsoleLogger = class { - log(level, ...trace) { - const message = JSON.stringify( - { - level, - trace - }, - null, - 4 - ); - const styled = `${this.getStyle(level)}${message}${ANSI.RESET} + log(level, ...trace) { + const message = JSON.stringify( + { + level, + trace, + }, + null, + 4, + ); + const styled = `${this.getStyle(level)}${message}${ANSI.RESET} `; - this.getStream(level)(styled); - } - getStream(level) { - if (level === "ERROR" /* ERROR */) { - return console.error; - } - return console.log; - } - getStyle(level) { - switch (level) { - case "UNKNOWN" /* UNKNOWN */: - case "INFO" /* INFO */: - return `${ANSI.MAGENTA}`; - case "DEBUG" /* DEBUG */: - return `${ANSI.CYAN}`; - case "WARN" /* WARN */: - return `${ANSI.BRIGHT_YELLOW}`; - case "ERROR" /* ERROR */: - return `${ANSI.BRIGHT_RED}`; - case "SYS" /* SYS */: - return `${ANSI.DIM}${ANSI.BLUE}`; - } - } + this.getStream(level)(styled); + } + getStream(level) { + if (level === 'ERROR' /* ERROR */) { + return console.error; + } + return console.log; + } + getStyle(level) { + switch (level) { + case 'UNKNOWN' /* UNKNOWN */: + case 'INFO' /* INFO */: + return `${ANSI.MAGENTA}`; + case 'DEBUG' /* DEBUG */: + return `${ANSI.CYAN}`; + case 'WARN' /* WARN */: + return `${ANSI.BRIGHT_YELLOW}`; + case 'ERROR' /* ERROR */: + return `${ANSI.BRIGHT_RED}`; + case 'SYS' /* SYS */: + return `${ANSI.DIM}${ANSI.BLUE}`; + } + } }; // ../u/trace/log/trace.ts var LogTrace = class _LogTrace { - constructor(logger = new PrettyJsonConsoleLogger(), traces = [defaultTrace], defaultLevel = "INFO" /* INFO */, allowedLevels = defaultAllowedLevelsSupplier) { - this.logger = logger; - this.traces = traces; - this.defaultLevel = defaultLevel; - this.allowedLevels = allowedLevels; - } - traceScope(trace) { - return new _LogTrace(this.logger, this.traces.concat(trace), this.defaultLevel, this.allowedLevels); - } - trace(trace) { - const { traces, level: _level } = this.foldTraces(this.traces.concat(trace)); - if (!this.allowedLevels().has(_level)) return; - const level = _level === "UNKNOWN" /* UNKNOWN */ ? this.defaultLevel : _level; - this.logger.log(level, ...traces); - } - foldTraces(_traces) { - const _logTraces = _traces.map((trace) => typeof trace === "function" ? trace() : trace); - const _level = _logTraces.filter((trace) => isLogLevel(trace)).reduce((acc, level2) => Math.max(logLevelOrder.indexOf(level2), acc), -1); - const level = logLevelOrder[_level] ?? "UNKNOWN" /* UNKNOWN */; - const traces = _logTraces.filter((trace) => !isLogLevel(trace)).map((trace) => { - if (typeof trace === "object") { - return `TracedException.Name = ${trace.name}, TracedException.Message = ${trace.message}, TracedException.Stack = ${trace.stack}`; - } - return trace; - }); - return { - level, - traces - }; - } + constructor( + logger = new PrettyJsonConsoleLogger(), + traces = [defaultTrace], + defaultLevel = 'INFO' /* INFO */, + allowedLevels = defaultAllowedLevelsSupplier, + ) { + this.logger = logger; + this.traces = traces; + this.defaultLevel = defaultLevel; + this.allowedLevels = allowedLevels; + } + traceScope(trace) { + return new _LogTrace(this.logger, this.traces.concat(trace), this.defaultLevel, this.allowedLevels); + } + trace(trace) { + const { traces, level: _level } = this.foldTraces(this.traces.concat(trace)); + if (!this.allowedLevels().has(_level)) return; + const level = _level === 'UNKNOWN' /* UNKNOWN */ ? this.defaultLevel : _level; + this.logger.log(level, ...traces); + } + foldTraces(_traces) { + const _logTraces = _traces.map((trace) => (typeof trace === 'function' ? trace() : trace)); + const _level = _logTraces + .filter((trace) => isLogLevel(trace)) + .reduce((acc, level2) => Math.max(logLevelOrder.indexOf(level2), acc), -1); + const level = logLevelOrder[_level] ?? 'UNKNOWN'; /* UNKNOWN */ + const traces = _logTraces + .filter((trace) => !isLogLevel(trace)) + .map((trace) => { + if (typeof trace === 'object') { + return `TracedException.Name = ${trace.name}, TracedException.Message = ${trace.message}, TracedException.Stack = ${trace.stack}`; + } + return trace; + }); + return { + level, + traces, + }; + } }; -var defaultTrace = () => `TimeStamp = ${(/* @__PURE__ */ new Date()).toISOString()}`; +var defaultTrace = () => `TimeStamp = ${/* @__PURE__ */ new Date().toISOString()}`; var defaultAllowedLevels = memoize( - (isDebug2) => /* @__PURE__ */ new Set([ - "UNKNOWN" /* UNKNOWN */, - ...isDebug2 ? ["DEBUG" /* DEBUG */] : [], - "INFO" /* INFO */, - "WARN" /* WARN */, - "ERROR" /* ERROR */, - "SYS" /* SYS */ - ]) + (isDebug2) => + /* @__PURE__ */ new Set([ + 'UNKNOWN' /* UNKNOWN */, + ...(isDebug2 ? ['DEBUG' /* DEBUG */] : []), + 'INFO' /* INFO */, + 'WARN' /* WARN */, + 'ERROR' /* ERROR */, + 'SYS' /* SYS */, + ]), ); var defaultAllowedLevelsSupplier = () => defaultAllowedLevels(isDebug()); // ../u/trace/trace.ts var LogTraceable = class _LogTraceable extends TraceableImpl { - static LogTrace = new LogTrace(); - static of(t) { - return new _LogTraceable(t, _LogTraceable.LogTrace); - } + static LogTrace = new LogTrace(); + static of(t) { + return new _LogTraceable(t, _LogTraceable.LogTrace); + } }; var getEmbeddedMetricConsumer = (logTrace) => (metrics) => { - if (metrics.length === 0) return; - logTrace.traceScope("SYS" /* SYS */).trace(`Metrics = <metrics>${JSON.stringify(metrics)}</metrics>`); + if (metrics.length === 0) return; + logTrace.traceScope('SYS' /* SYS */).trace(`Metrics = <metrics>${JSON.stringify(metrics)}</metrics>`); }; var EmbeddedMetricsTraceable = class _EmbeddedMetricsTraceable extends TraceableImpl { - static MetricsTrace = new MetricsTrace(getEmbeddedMetricConsumer(LogTraceable.LogTrace)); - static of(t, metricsTrace = _EmbeddedMetricsTraceable.MetricsTrace) { - return new _EmbeddedMetricsTraceable(t, metricsTrace); - } + static MetricsTrace = new MetricsTrace(getEmbeddedMetricConsumer(LogTraceable.LogTrace)); + static of(t, metricsTrace = _EmbeddedMetricsTraceable.MetricsTrace) { + return new _EmbeddedMetricsTraceable(t, metricsTrace); + } }; // ../u/process/exec.ts -var import_node_util = require("node:util"); -var import_node_child_process = require("node:child_process"); +var import_node_util = require('node:util'); +var import_node_child_process = require('node:child_process'); var exec = (0, import_node_util.promisify)(import_node_child_process.exec); -var CmdMetric = Metric.fromName("Exec").asResult(); +var CmdMetric = Metric.fromName('Exec').asResult(); // ../u/process/signals.ts -var SigIntMetric = Metric.fromName("SigInt").asResult(); -var SigTermMetric = Metric.fromName("SigTerm").asResult(); +var SigIntMetric = Metric.fromName('SigInt').asResult(); +var SigTermMetric = Metric.fromName('SigTerm').asResult(); // ../u/server/response/pengueno.ts var ResponseCodeMetrics = [0, 1, 2, 3, 4, 5].map((x) => Metric.fromName(`response.${x}xx`).asResult()); // ../u/server/activity/health.ts -var healthCheckMetric = Metric.fromName("Health").asResult(); +var healthCheckMetric = Metric.fromName('Health').asResult(); // ../u/server/filter/json.ts -var ParseJsonMetric = Metric.fromName("JsonParse").asResult(); +var ParseJsonMetric = Metric.fromName('JsonParse').asResult(); // ../u/server/filter/index.ts var ErrorSource = ((ErrorSource2) => { - ErrorSource2[ErrorSource2["USER"] = "WARN" /* WARN */] = "USER"; - ErrorSource2[ErrorSource2["SYSTEM"] = "ERROR" /* ERROR */] = "SYSTEM"; - return ErrorSource2; + ErrorSource2[(ErrorSource2['USER'] = 'WARN') /* WARN */] = 'USER'; + ErrorSource2[(ErrorSource2['SYSTEM'] = 'ERROR') /* ERROR */] = 'SYSTEM'; + return ErrorSource2; })(ErrorSource || {}); // ../u/types/object.ts -var isObject = (o) => typeof o === "object" && !Array.isArray(o) && !!o; +var isObject = (o) => typeof o === 'object' && !Array.isArray(o) && !!o; // ../u/types/tagged.ts -var isTagged = (o, tag) => !!(isObject(o) && "_tag" in o && o._tag === tag); +var isTagged = (o, tag) => !!(isObject(o) && '_tag' in o && o._tag === tag); // ../u/types/fn/either.ts -var IEitherTag = "IEither"; -var ELeftTag = "E.Left"; +var IEitherTag = 'IEither'; +var ELeftTag = 'E.Left'; var isLeft = (o) => isTagged(o, ELeftTag); -var ERightTag = "E.Right"; +var ERightTag = 'E.Right'; var isRight = (o) => isTagged(o, ERightTag); var _Tagged2 = class { - constructor(_tag = IEitherTag) { - this._tag = _tag; - } + constructor(_tag = IEitherTag) { + this._tag = _tag; + } }; var Either = class _Either extends _Tagged2 { - constructor(self) { - super(); - this.self = self; - } - moveRight(t) { - return this.mapRight(() => t); - } - mapBoth(errBranch, okBranch) { - if (isLeft(this.self)) return _Either.left(errBranch(this.self.err)); - return _Either.right(okBranch(this.self.ok)); - } - mapRight(mapper) { - if (isRight(this.self)) return _Either.right(mapper(this.self.ok)); - return _Either.left(this.self.err); - } - mapLeft(mapper) { - if (isLeft(this.self)) return _Either.left(mapper(this.self.err)); - return _Either.right(this.self.ok); - } - flatMap(mapper) { - if (isRight(this.self)) return mapper(this.self.ok); - return _Either.left(this.self.err); - } - filter(mapper) { - if (isLeft(this.self)) return _Either.left(this.self.err); - return _Either.fromFailable(() => this.right().filter(mapper).get()); - } - async flatMapAsync(mapper) { - if (isLeft(this.self)) return Promise.resolve(_Either.left(this.self.err)); - return await mapper(this.self.ok).catch((err) => _Either.left(err)); - } - fold(leftFolder, rightFolder) { - if (isLeft(this.self)) return leftFolder(this.self.err); - return rightFolder(this.self.ok); - } - left() { - if (isLeft(this.self)) return Optional.from(this.self.err); - return Optional.none(); - } - right() { - if (isRight(this.self)) return Optional.from(this.self.ok); - return Optional.none(); - } - joinRight(other, mapper) { - return this.flatMap((t) => other.mapRight((o) => mapper(o, t))); - } - joinRightAsync(other, mapper) { - return this.flatMapAsync(async (t) => { - const o = typeof other === "function" ? other() : other; - return o.then((other2) => other2.mapRight((o2) => mapper(o2, t))); - }); - } - static left(e) { - return new _Either({ err: e, _tag: ELeftTag }); - } - static right(t) { - return new _Either({ ok: t, _tag: ERightTag }); - } - static fromFailable(s) { - try { - return _Either.right(s()); - } catch (e) { - return _Either.left(e); - } - } - static async fromFailableAsync(s) { - return await (typeof s === "function" ? s() : s).then((t) => _Either.right(t)).catch((e) => _Either.left(e)); - } + constructor(self) { + super(); + this.self = self; + } + moveRight(t) { + return this.mapRight(() => t); + } + mapBoth(errBranch, okBranch) { + if (isLeft(this.self)) return _Either.left(errBranch(this.self.err)); + return _Either.right(okBranch(this.self.ok)); + } + mapRight(mapper) { + if (isRight(this.self)) return _Either.right(mapper(this.self.ok)); + return _Either.left(this.self.err); + } + mapLeft(mapper) { + if (isLeft(this.self)) return _Either.left(mapper(this.self.err)); + return _Either.right(this.self.ok); + } + flatMap(mapper) { + if (isRight(this.self)) return mapper(this.self.ok); + return _Either.left(this.self.err); + } + filter(mapper) { + if (isLeft(this.self)) return _Either.left(this.self.err); + return _Either.fromFailable(() => this.right().filter(mapper).get()); + } + async flatMapAsync(mapper) { + if (isLeft(this.self)) return Promise.resolve(_Either.left(this.self.err)); + return await mapper(this.self.ok).catch((err) => _Either.left(err)); + } + fold(leftFolder, rightFolder) { + if (isLeft(this.self)) return leftFolder(this.self.err); + return rightFolder(this.self.ok); + } + left() { + if (isLeft(this.self)) return Optional.from(this.self.err); + return Optional.none(); + } + right() { + if (isRight(this.self)) return Optional.from(this.self.ok); + return Optional.none(); + } + joinRight(other, mapper) { + return this.flatMap((t) => other.mapRight((o) => mapper(o, t))); + } + joinRightAsync(other, mapper) { + return this.flatMapAsync(async (t) => { + const o = typeof other === 'function' ? other() : other; + return o.then((other2) => other2.mapRight((o2) => mapper(o2, t))); + }); + } + static left(e) { + return new _Either({ err: e, _tag: ELeftTag }); + } + static right(t) { + return new _Either({ ok: t, _tag: ERightTag }); + } + static fromFailable(s) { + try { + return _Either.right(s()); + } catch (e) { + return _Either.left(e); + } + } + static async fromFailableAsync(s) { + return await (typeof s === 'function' ? s() : s).then((t) => _Either.right(t)).catch((e) => _Either.left(e)); + } }; // ../u/types/fn/optional.ts -var IOptionalTag = "IOptional"; -var IOptionalEmptyError = class extends Error { -}; -var OSomeTag = "O.Some"; -var ONoneTag = "O.None"; +var IOptionalTag = 'IOptional'; +var IOptionalEmptyError = class extends Error {}; +var OSomeTag = 'O.Some'; +var ONoneTag = 'O.None'; var isNone = (o) => isTagged(o, ONoneTag); var isSome = (o) => isTagged(o, OSomeTag); var _Tagged3 = class { - constructor(_tag = IOptionalTag) { - this._tag = _tag; - } + constructor(_tag = IOptionalTag) { + this._tag = _tag; + } }; var Optional = class _Optional extends _Tagged3 { - constructor(self) { - super(); - this.self = self; - } - move(t) { - return this.map(() => t); - } - orSome(supplier) { - if (isNone(this.self)) return _Optional.from(supplier()); - return this; - } - get() { - if (isNone(this.self)) throw new IOptionalEmptyError("called get() on None optional"); - return this.self.value; - } - filter(mapper) { - if (isNone(this.self) || !mapper(this.self.value)) return _Optional.none(); - return _Optional.some(this.self.value); - } - map(mapper) { - if (isNone(this.self)) return _Optional.none(); - return _Optional.from(mapper(this.self.value)); - } - flatMap(mapper) { - if (isNone(this.self)) return _Optional.none(); - return _Optional.from(mapper(this.self.value)).orSome(() => _Optional.none()).get(); - } - present() { - return isSome(this.self); - } - *[Symbol.iterator]() { - if (isSome(this.self)) yield this.self.value; - } - static some(value) { - return new _Optional({ value, _tag: OSomeTag }); - } - static _none = new _Optional({ _tag: ONoneTag }); - static none() { - return this._none; - } - static from(value) { - if (value === null || value === void 0) return _Optional.none(); - return _Optional.some(value); - } + constructor(self) { + super(); + this.self = self; + } + move(t) { + return this.map(() => t); + } + orSome(supplier) { + if (isNone(this.self)) return _Optional.from(supplier()); + return this; + } + get() { + if (isNone(this.self)) throw new IOptionalEmptyError('called get() on None optional'); + return this.self.value; + } + filter(mapper) { + if (isNone(this.self) || !mapper(this.self.value)) return _Optional.none(); + return _Optional.some(this.self.value); + } + map(mapper) { + if (isNone(this.self)) return _Optional.none(); + return _Optional.from(mapper(this.self.value)); + } + flatMap(mapper) { + if (isNone(this.self)) return _Optional.none(); + return _Optional + .from(mapper(this.self.value)) + .orSome(() => _Optional.none()) + .get(); + } + present() { + return isSome(this.self); + } + *[Symbol.iterator]() { + if (isSome(this.self)) yield this.self.value; + } + static some(value) { + return new _Optional({ value, _tag: OSomeTag }); + } + static _none = new _Optional({ _tag: ONoneTag }); + static none() { + return this._none; + } + static from(value) { + if (value === null || value === void 0) return _Optional.none(); + return _Optional.some(value); + } }; // ../model/job/index.ts -var isJob = (j) => !!(isObject(j) && "arguments" in j && isObject(j.arguments) && "type" in j && typeof j.type === "string" && j); +var isJob = (j) => + !!(isObject(j) && 'arguments' in j && isObject(j.arguments) && 'type' in j && typeof j.type === 'string' && j); // ../model/pipeline/builder.ts var BasePipelineBuilder = class { - stages = []; - addStage(stage) { - this.stages.push(stage); - return this; - } - build() { - return new PipelineImpl(this.stages); - } + stages = []; + addStage(stage) { + this.stages.push(stage); + return this; + } + build() { + return new PipelineImpl(this.stages); + } }; var DefaultGitHookPipelineBuilder = class extends BasePipelineBuilder { - constructor(remoteUrl = process.env.remote, rev = process.env.rev, refname = process.env.refname) { - super(); - this.remoteUrl = remoteUrl; - this.refname = refname; - this.addStage({ - parallelJobs: [ - { - type: "fetch_code", - arguments: { - remoteUrl, - checkout: rev, - path: this.getSourceDestination() - } - } - ] - }); - } - getSourceDestination() { - return this.remoteUrl.split("/").at(-1) ?? "src"; - } - getBranch() { - const branchRefPrefix = "refs/heads/"; - return this.refname.split(branchRefPrefix).at(1); - } + constructor(remoteUrl = process.env.remote, rev = process.env.rev, refname = process.env.refname) { + super(); + this.remoteUrl = remoteUrl; + this.refname = refname; + this.addStage({ + parallelJobs: [ + { + type: 'fetch_code', + arguments: { + remoteUrl, + checkout: rev, + path: this.getSourceDestination(), + }, + }, + ], + }); + } + getSourceDestination() { + return this.remoteUrl.split('/').at(-1) ?? 'src'; + } + getBranch() { + const branchRefPrefix = 'refs/heads/'; + return this.refname.split(branchRefPrefix).at(1); + } }; // ../model/pipeline/impl.ts var PipelineImpl = class _PipelineImpl { - constructor(serialJobs) { - this.serialJobs = serialJobs; - } - serialize() { - return JSON.stringify({ serialJobs: this.serialJobs }); - } - static from(s) { - return Either.fromFailable(() => JSON.parse(s)).flatMap( - (eitherPipelineJson) => isPipeline(eitherPipelineJson) ? Either.right(eitherPipelineJson) : Either.left(new Error("oh noes D: its a bad pipewine :((")) - ).mapRight((pipeline) => new _PipelineImpl(pipeline.serialJobs)); - } + constructor(serialJobs) { + this.serialJobs = serialJobs; + } + serialize() { + return JSON.stringify({ serialJobs: this.serialJobs }); + } + static from(s) { + return Either.fromFailable(() => JSON.parse(s)) + .flatMap((eitherPipelineJson) => + isPipeline(eitherPipelineJson) + ? Either.right(eitherPipelineJson) + : Either.left(new Error('oh noes D: its a bad pipewine :((')), + ) + .mapRight((pipeline) => new _PipelineImpl(pipeline.serialJobs)); + } }; // ../model/pipeline/index.ts -var isPipelineStage = (t) => isObject(t) && "parallelJobs" in t && Array.isArray(t.parallelJobs) && t.parallelJobs.every((j) => isJob(j)); -var isPipeline = (t) => isObject(t) && "serialJobs" in t && Array.isArray(t.serialJobs) && t.serialJobs.every((p) => isPipelineStage(p)); +var isPipelineStage = (t) => + isObject(t) && 'parallelJobs' in t && Array.isArray(t.parallelJobs) && t.parallelJobs.every((j) => isJob(j)); +var isPipeline = (t) => + isObject(t) && 'serialJobs' in t && Array.isArray(t.serialJobs) && t.serialJobs.every((p) => isPipelineStage(p)); // dist/ci.js -var REGISTRY = "oci.liz.coffee"; -var NAMESPACE = "emprespresso"; -var IMG = "ci"; -var REMOTE = "ssh://src.liz.coffee:2222"; +var REGISTRY = 'oci.liz.coffee'; +var NAMESPACE = 'emprespresso'; +var IMG = 'ci'; +var REMOTE = 'ssh://src.liz.coffee:2222'; var getPipeline = () => { - const gitHookPipeline = new DefaultGitHookPipelineBuilder(); - const branch = gitHookPipeline.getBranch(); - if (!branch) - return gitHookPipeline.build(); - const commonBuildArgs = { - registry: REGISTRY, - namespace: NAMESPACE, - imageTag: branch - }; - const baseCiPackageBuild = { - type: "build_docker_image.js", - arguments: { - ...commonBuildArgs, - context: gitHookPipeline.getSourceDestination(), - repository: IMG + "_base", - buildTarget: IMG + "_base", - dockerfile: "Dockerfile" - } - }; - gitHookPipeline.addStage({ - parallelJobs: [baseCiPackageBuild] - }); - const subPackages = ["worker", "hooks"].map((_package) => ({ - type: "build_docker_image.js", - arguments: { - ...commonBuildArgs, - repository: `${IMG}_${_package}`, - buildTarget: _package, - dockerfile: `${_package}/Dockerfile` - } - })); - gitHookPipeline.addStage({ - parallelJobs: subPackages - }); - const isRelease = branch === "release"; - if (!isRelease) { + const gitHookPipeline = new DefaultGitHookPipelineBuilder(); + const branch = gitHookPipeline.getBranch(); + if (!branch) return gitHookPipeline.build(); + const commonBuildArgs = { + registry: REGISTRY, + namespace: NAMESPACE, + imageTag: branch, + }; + const baseCiPackageBuild = { + type: 'build_docker_image.js', + arguments: { + ...commonBuildArgs, + context: gitHookPipeline.getSourceDestination(), + repository: IMG + '_base', + buildTarget: IMG + '_base', + dockerfile: 'Dockerfile', + }, + }; + gitHookPipeline.addStage({ + parallelJobs: [baseCiPackageBuild], + }); + const subPackages = ['worker', 'hooks'].map((_package) => ({ + type: 'build_docker_image.js', + arguments: { + ...commonBuildArgs, + repository: `${IMG}_${_package}`, + buildTarget: _package, + dockerfile: `${_package}/Dockerfile`, + }, + })); + gitHookPipeline.addStage({ + parallelJobs: subPackages, + }); + const isRelease = branch === 'release'; + if (!isRelease) { + return gitHookPipeline.build(); + } + const fetchAnsibleCode = { + type: 'fetch_code', + arguments: { + remoteUrl: `${REMOTE}/infra`, + checkout: 'main', + path: 'infra', + }, + }; + const thenDeploy = { + type: 'ansible_playbook.js', + arguments: { + path: 'infra', + playbooks: 'playbooks/ci.yml', + }, + }; + [fetchAnsibleCode, thenDeploy].forEach((deploymentStage) => + gitHookPipeline.addStage({ parallelJobs: [deploymentStage] }), + ); return gitHookPipeline.build(); - } - const fetchAnsibleCode = { - type: "fetch_code", - arguments: { - remoteUrl: `${REMOTE}/infra`, - checkout: "main", - path: "infra" - } - }; - const thenDeploy = { - type: "ansible_playbook.js", - arguments: { - path: "infra", - playbooks: "playbooks/ci.yml" - } - }; - [fetchAnsibleCode, thenDeploy].forEach((deploymentStage) => gitHookPipeline.addStage({ parallelJobs: [deploymentStage] })); - return gitHookPipeline.build(); }; var main = () => { - const data = getPipeline().serialize(); - process.stdout.write(data); + const data = getPipeline().serialize(); + process.stdout.write(data); }; main(); diff --git a/model/job/jobs.ts b/model/job/jobs.ts index eb22afd..dc23070 100644 --- a/model/job/jobs.ts +++ b/model/job/jobs.ts @@ -43,7 +43,6 @@ export interface CheckoutCiJobProps extends JobArgT { readonly rev: string; readonly run: string; - readonly executorLaminarPath: string; readonly returnPath: string; } diff --git a/u/trace/itrace.ts b/u/trace/itrace.ts index 57c4419..e2019fa 100644 --- a/u/trace/itrace.ts +++ b/u/trace/itrace.ts @@ -27,9 +27,7 @@ export interface ITraceable<T, Trace = BaseTraceWith> { readonly move: <_T>(t: _T) => ITraceable<_T, Trace>; readonly map: <_T>(mapper: ITraceableMapper<T, _T, Trace>) => ITraceable<_T, Trace>; readonly bimap: <_T>(mapper: ITraceableMapper<T, ITraceableTuple<_T, Trace>, Trace>) => ITraceable<_T, Trace>; - readonly coExtend: <_T>( - mapper: ITraceableMapper<T, Array<_T>, Trace>, - ) => Array<ITraceable<_T, Trace>>; + readonly coExtend: <_T>(mapper: ITraceableMapper<T, Array<_T>, Trace>) => Array<ITraceable<_T, Trace>>; readonly peek: (peek: ITraceableMapper<T, void, Trace>) => ITraceable<T, Trace>; readonly traceScope: (mapper: ITraceableMapper<T, Trace, Trace>) => ITraceable<T, Trace>; diff --git a/worker/fs.ts b/worker/fs.ts new file mode 100644 index 0000000..2c02b34 --- /dev/null +++ b/worker/fs.ts @@ -0,0 +1,29 @@ +import { Either, getStdout, IEither, LogMetricTraceable } from '@emprespresso/pengueno'; +import { readFileSync } from 'node:fs'; + +// the container which runs the pipeline synthesizer (A) might be spawned by another container +// (B) ((which should be the one running this job)) by talking to the host's docker daemon +// (mounting /var/run/docker.sock) and executing the {@link getPipelineGenerationCommand}. +// +// so mounting {@link getSrcDirectoryForCiJob} has no meaning as it doesn't exist on the host; +// here we replace the path in (B) with the actual volume source on the host, where the src +// exists. +export async function getPathOnHost(path: string, home = '/var/lib/laminar'): Promise<IEither<Error, string>> { + const container = await Either.fromFailable<Error, string>(() => + readFileSync('/etc/hostname', 'utf-8'), + ).flatMapAsync((hostname) => + LogMetricTraceable.of(`docker inspect ${hostname}`.split(' ')) + .map((tCmd) => getStdout(tCmd)) + .get(), + ); + if (container.left().present()) { + return Either.right(path); + } + return container + .flatMap((inspect) => Either.fromFailable(() => JSON.parse(inspect))) + .mapRight( + ([container]) => + container.Mounts.find(({ Destination }: { Destination: string }) => Destination === home).Source, + ) + .mapRight((source) => path.replace(home, source)); +} diff --git a/worker/index.ts b/worker/index.ts index 9ad32b9..9224e5d 100644 --- a/worker/index.ts +++ b/worker/index.ts @@ -1,2 +1,3 @@ +export * from './fs.js'; export * from './secret.js'; export * from './executor.js'; diff --git a/worker/jobs/ci_pipeline.run b/worker/jobs/ci_pipeline.run index 6bee929..ff1efa7 100755 --- a/worker/jobs/ci_pipeline.run +++ b/worker/jobs/ci_pipeline.run @@ -3,14 +3,4 @@ # add scripts executed by the pipeline export PATH=$PATH:$PIPELINE_PATH -containerid=$(cat /etc/hostname) -isindocker=$(docker ps -q -f "id=$containerid") -if [ -n "$isindocker" ]; then - executorLaminarPath=$(docker inspect "$containerid" | jq -r '.[0].Mounts[] | select(.Destination == "/var/lib/laminar") | .Source') -else - executorLaminarPath=$(pwd) -fi - -export executorLaminarPath - checkout_ci.js diff --git a/worker/scripts/build_docker_image.ts b/worker/scripts/build_docker_image.ts index f29bd61..31bd260 100755 --- a/worker/scripts/build_docker_image.ts +++ b/worker/scripts/build_docker_image.ts @@ -10,7 +10,7 @@ import { Command, } from '@emprespresso/pengueno'; import type { BuildDockerImageJob, BuildDockerImageJobProps } from '@emprespresso/ci_model'; -import { Bitwarden, type LoginItem } from '@emprespresso/ci_worker'; +import { Bitwarden, getPathOnHost, type LoginItem } from '@emprespresso/ci_worker'; import path from 'path'; const job = getRequiredEnvVars([ @@ -78,9 +78,14 @@ await LogMetricTraceable.ofLogTraceable(_logJob) .map(async (tEitherWithAuthdRegistryBuildJob) => { const eitherWithAuthdRegistryBuildJob = await tEitherWithAuthdRegistryBuildJob.get(); tEitherWithAuthdRegistryBuildJob.trace.trace('finally building the image~ (◕ᴗ◕✿)'); - const eitherBuiltImage = await eitherWithAuthdRegistryBuildJob.flatMapAsync((job) => + const context = await eitherWithAuthdRegistryBuildJob.flatMapAsync((j) => getPathOnHost(j.arguments.context)); + const buildJobArgs = eitherWithAuthdRegistryBuildJob.joinRight(context, (context, job) => ({ + ...job.arguments, + context, + })); + const eitherBuiltImage = await buildJobArgs.flatMapAsync((buildJobArgs) => tEitherWithAuthdRegistryBuildJob - .move(getBuildCommand(job.arguments)) + .move(getBuildCommand(buildJobArgs)) .map((tBuildCmd) => getStdout(tBuildCmd, { env: {}, @@ -123,18 +128,7 @@ function getDockerLoginCommand(username: string, registry: string): Command { } function getBuildCommand({ buildTarget, imageTag, dockerfile, context }: BuildDockerImageJobProps): Command { - return [ - 'cat', - path.join(context, dockerfile), - '|', - 'docker', - 'build', - '--target', - buildTarget, - '-t', - imageTag, - '-', - ]; + return ['docker', 'build', '--target', buildTarget, '-t', imageTag, '-f', dockerfile, context]; } function getPushCommand(tag: string): Command { diff --git a/worker/scripts/checkout_ci.ts b/worker/scripts/checkout_ci.ts index 65cbc2e..c4006e6 100755 --- a/worker/scripts/checkout_ci.ts +++ b/worker/scripts/checkout_ci.ts @@ -14,9 +14,9 @@ import { IEither, } from '@emprespresso/pengueno'; import { mkdir, readFile, rm } from 'fs/promises'; -import { join } from 'path'; +import path, { join } from 'path'; import { type CheckoutCiJob, type FetchCodeJob, Pipeline, PipelineImpl } from '@emprespresso/ci_model'; -import { executeJob, executePipeline } from '@emprespresso/ci_worker'; +import { executeJob, executePipeline, getPathOnHost } from '@emprespresso/ci_worker'; interface CiWorkflow { workflow: string; @@ -42,7 +42,7 @@ const READONLY_CREDENTIALS = { username: 'readonly', password: 'readonly' }; // -> checkout_ci (1000.uuidB) const run = `${Date.now()}.${crypto.randomUUID().replaceAll('-', '')}`; -const eitherJob = getRequiredEnvVars(['remote', 'refname', 'rev', 'executorLaminarPath']).mapRight( +const eitherJob = getRequiredEnvVars(['remote', 'refname', 'rev']).mapRight( (baseArgs) => <CheckoutCiJob>{ type: 'checkout_ci.js', @@ -76,19 +76,13 @@ await LogMetricTraceable.ofLogTraceable(logTraceableJob) mkdir(wd, { recursive: true }) .then(() => process.chdir(wd)) .then(() => tEitherJob.move(fetchPackageJob).map(executeJob).get()) - .then((e) => - e.fold( - (err) => { - throw err; - }, - () => ciJob, - ), - ), + .then(() => ciJob), ); }), ) .map(async (tEitherCiJob) => { const eitherCiJob = await tEitherCiJob.get(); + const jobSrcOnHost = await eitherCiJob.flatMapAsync((job) => getPathOnHost(getSrcDirectoryForCiJob(job))); const repoCiFileContents = await eitherCiJob.flatMapAsync((ciJob) => Either.fromFailableAsync<Error, string>(() => readFile(join(getSrcDirectoryForCiJob(ciJob), CI_WORKFLOW_FILE), 'utf-8'), @@ -98,9 +92,13 @@ await LogMetricTraceable.ofLogTraceable(logTraceableJob) .flatMap((fileText) => Either.fromFailable<Error, CiWorkflow>(() => JSON.parse(fileText)).filter((json) => isCiWorkflow(json)), ) - .joinRight(eitherCiJob, (job: CheckoutCiJob, { workflow }) => ({ + .joinRight(eitherCiJob, (job, { workflow }) => ({ job, - commands: getPipelineGenerationCommand(job, workflow), + workflow, + })) + .joinRight(jobSrcOnHost, (src, { job, workflow }) => ({ + job, + commands: getPipelineGenerationCommand(job, workflow, src), })); }) .map( @@ -150,16 +148,6 @@ await LogMetricTraceable.ofLogTraceable(logTraceableJob) () => afterJob, ); }) - .map( - TraceUtil.promiseify((e) => - e.get().fold( - (err) => { - throw err; - }, - (ok) => ok, - ), - ), - ) .get(); function getWorkingDirectoryForCiJob(job: CheckoutCiJob) { @@ -170,20 +158,10 @@ function getSrcDirectoryForCiJob(job: CheckoutCiJob) { return `${getWorkingDirectoryForCiJob(job)}/src`; } -function getSrcDirectoryForChildContainer(job: CheckoutCiJob) { - // the container which runs the pipeline synthesizer (A) might be spawned by another container - // (B) ((which should be the one running this job)) by talking to the host's docker daemon - // (mounting /var/run/docker.sock) and executing the {@link getPipelineGenerationCommand}. - // - // so mounting {@link getSrcDirectoryForCiJob} has no meaning as it doesn't exist on the host; - // here we replace the path in (B) with the actual volume source on the host, where the src - // exists. - return getSrcDirectoryForCiJob(job).replace('/var/lib/laminar', job.arguments.executorLaminarPath); -} - function getPipelineGenerationCommand( job: CheckoutCiJob, pipelineGeneratorPath: string, + srcMount: string, credentials = READONLY_CREDENTIALS, registry = OCI_REGISTRY, image = PIPELINE_IMAGE, @@ -201,7 +179,7 @@ function getPipelineGenerationCommand( '-e', ), '-v', - `${getSrcDirectoryForChildContainer(job)}:/src`, + `${srcMount}:/src`, image, `/src/${pipelineGeneratorPath}`, ], |