diff options
Diffstat (limited to 'worker')
-rw-r--r-- | worker/deno.json | 4 | ||||
-rw-r--r-- | worker/executor.ts | 161 | ||||
-rw-r--r-- | worker/index.ts | 2 | ||||
-rw-r--r-- | worker/mod.ts | 2 | ||||
-rw-r--r-- | worker/package.json | 28 | ||||
-rwxr-xr-x | worker/scripts/ansible_playbook.ts | 193 | ||||
-rwxr-xr-x | worker/scripts/build_docker_image.ts | 259 | ||||
-rwxr-xr-x | worker/scripts/checkout_ci.ts | 294 | ||||
-rw-r--r-- | worker/secret.ts | 286 | ||||
-rw-r--r-- | worker/tsconfig.json | 15 |
10 files changed, 578 insertions, 666 deletions
diff --git a/worker/deno.json b/worker/deno.json deleted file mode 100644 index c908330..0000000 --- a/worker/deno.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "name": "@emprespresso/ci_worker", - "exports": "./mod.ts" -} diff --git a/worker/executor.ts b/worker/executor.ts index ea79995..f4b7906 100644 --- a/worker/executor.ts +++ b/worker/executor.ts @@ -1,101 +1,86 @@ import { - getStdout, - type ITraceable, - LogLevel, - type LogMetricTraceSupplier, - memoize, - Metric, - TraceUtil, - validateExecutionEntries, - Either, - type IEither, -} from "@emprespresso/pengueno"; -import type { Job, JobArgT, Pipeline } from "@emprespresso/ci_model"; + getStdout, + type ITraceable, + LogLevel, + type LogMetricTraceSupplier, + memoize, + Metric, + TraceUtil, + validateExecutionEntries, + Either, + type IEither, +} from '@emprespresso/pengueno'; +import type { Job, JobArgT, Pipeline } from '@emprespresso/ci_model'; // -- <job.exectuor> -- const jobTypeMetric = memoize((type: string) => Metric.fromName(`run.${type}`)); export const executeJob = (tJob: ITraceable<Job, LogMetricTraceSupplier>) => - tJob - .bimap(TraceUtil.withMetricTrace(jobTypeMetric(tJob.get().type))) - .peek((tJob) => - tJob.trace.trace(`let's do this little job ok!! ${tJob.get()}`), - ) - .map((tJob) => - validateExecutionEntries(tJob.get().arguments) - .mapLeft((badEntries) => { - tJob.trace.addTrace(LogLevel.ERROR).trace(badEntries.toString()); - return new Error("invalid job arguments"); - }) - .flatMapAsync((args) => - getStdout(tJob.move(tJob.get().type), { env: args }), - ), - ) - .peek( - TraceUtil.promiseify((q) => - q.trace.trace( - q - .get() - .fold( - ({ isLeft }) => - jobTypeMetric(tJob.get().type)[isLeft ? "failure" : "success"], + tJob + .bimap(TraceUtil.withMetricTrace(jobTypeMetric(tJob.get().type))) + .peek((tJob) => tJob.trace.trace(`let's do this little job ok!! ${tJob.get()}`)) + .map((tJob) => + validateExecutionEntries(tJob.get().arguments) + .mapLeft((badEntries) => { + tJob.trace.addTrace(LogLevel.ERROR).trace(badEntries.toString()); + return new Error('invalid job arguments'); + }) + .flatMapAsync((args) => getStdout(tJob.move(tJob.get().type), { env: args })), + ) + .peek( + TraceUtil.promiseify((q) => + q.trace.trace( + q.get().fold(({ isLeft }) => jobTypeMetric(tJob.get().type)[isLeft ? 'failure' : 'success']), + ), ), - ), - ), - ) - .get(); + ) + .get(); // -- </job.exectuor> -- // -- <pipeline.executor> -- -const pipelinesMetric = Metric.fromName("pipelines"); +const pipelinesMetric = Metric.fromName('pipelines'); export const executePipeline = ( - tPipeline: ITraceable<Pipeline, LogMetricTraceSupplier>, - baseEnv?: JobArgT, + tPipeline: ITraceable<Pipeline, LogMetricTraceSupplier>, + baseEnv?: JobArgT, ): Promise<IEither<Error, void>> => - tPipeline - .bimap(TraceUtil.withFunctionTrace(executePipeline)) - .bimap(TraceUtil.withMetricTrace(pipelinesMetric)) - .map(async (tJobs): Promise<IEither<Error, void>> => { - for (const [i, serialStage] of tJobs.get().serialJobs.entries()) { - tJobs.trace.trace( - `executing stage ${i}. do your best little stage :>\n${serialStage}`, - ); - const jobResults = await Promise.all( - serialStage.parallelJobs.map((job) => - tJobs - .bimap((_) => [job, `stage ${i}`]) - .map( - (tJob) => - <Job>{ - ...tJob.get(), - arguments: { - ...baseEnv, - ...tJob.get().arguments, - }, - }, - ) - .map(executeJob) - .peek( - TraceUtil.promiseify((tEitherJobOutput) => - tEitherJobOutput - .get() - .mapRight((stdout) => - tEitherJobOutput.trace.addTrace("STDOUT").trace(stdout), + tPipeline + .bimap(TraceUtil.withFunctionTrace(executePipeline)) + .bimap(TraceUtil.withMetricTrace(pipelinesMetric)) + .map(async (tJobs): Promise<IEither<Error, void>> => { + for (const [i, serialStage] of tJobs.get().serialJobs.entries()) { + tJobs.trace.trace(`executing stage ${i}. do your best little stage :>\n${serialStage}`); + const jobResults = await Promise.all( + serialStage.parallelJobs.map((job) => + tJobs + .bimap((_) => [job, `stage ${i}`]) + .map( + (tJob) => + <Job>{ + ...tJob.get(), + arguments: { + ...baseEnv, + ...tJob.get().arguments, + }, + }, + ) + .map(executeJob) + .peek( + TraceUtil.promiseify((tEitherJobOutput) => + tEitherJobOutput + .get() + .mapRight((stdout) => tEitherJobOutput.trace.addTrace('STDOUT').trace(stdout)), + ), + ) + .get(), ), - ), - ) - .get(), - ), - ); - const failures = jobResults.filter((e) => - e.fold(({ isLeft }) => isLeft), - ); - if (failures.length > 0) { - tJobs.trace.trace(pipelinesMetric.failure); - return Either.left(new Error(failures.toString())); - } - } - tJobs.trace.trace(pipelinesMetric.success); - return Either.right(undefined); - }) - .get(); + ); + const failures = jobResults.filter((e) => e.fold(({ isLeft }) => isLeft)); + if (failures.length > 0) { + tJobs.trace.trace(pipelinesMetric.failure); + return Either.left(new Error(failures.toString())); + } + } + tJobs.trace.trace(pipelinesMetric.success); + return Either.right(undefined); + }) + .get(); // -- </pipeline.executor> -- diff --git a/worker/index.ts b/worker/index.ts new file mode 100644 index 0000000..9ad32b9 --- /dev/null +++ b/worker/index.ts @@ -0,0 +1,2 @@ +export * from './secret.js'; +export * from './executor.js'; diff --git a/worker/mod.ts b/worker/mod.ts deleted file mode 100644 index 97980a8..0000000 --- a/worker/mod.ts +++ /dev/null @@ -1,2 +0,0 @@ -export * from "./secret.ts"; -export * from "./executor.ts"; diff --git a/worker/package.json b/worker/package.json new file mode 100644 index 0000000..c49dfaf --- /dev/null +++ b/worker/package.json @@ -0,0 +1,28 @@ +{ + "name": "@emprespresso/ci_worker", + "version": "0.1.0", + "type": "module", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "exports": { + ".": { + "types": "./dist/index.d.ts", + "import": "./dist/index.js" + } + }, + "scripts": { + "build": "tsc", + "dev": "tsc --watch", + "clean": "rm -rf dist", + "type-check": "tsc --noEmit" + }, + "dependencies": { + "@emprespresso/pengueno": "*", + "@emprespresso/ci_model": "*" + }, + "files": [ + "dist/**/*", + "package.json", + "README.md" + ] +} diff --git a/worker/scripts/ansible_playbook.ts b/worker/scripts/ansible_playbook.ts index 0879dc5..c6d8f2c 100755 --- a/worker/scripts/ansible_playbook.ts +++ b/worker/scripts/ansible_playbook.ts @@ -1,113 +1,100 @@ -#!/usr/bin/env -S deno run --allow-env --allow-net --allow-run --allow-read --allow-write +#!/usr/bin/env node import { - Either, - getRequiredEnvVars, - getStdout, - type IEither, - LogTraceable, - LogMetricTraceable, - Metric, - prependWith, - TraceUtil, -} from "@emprespresso/pengueno"; -import type { AnsiblePlaybookJob } from "@emprespresso/ci_model"; -import { Bitwarden, type SecureNote } from "@emprespresso/ci_worker"; + Either, + getRequiredEnvVars, + getStdout, + type IEither, + LogTraceable, + LogMetricTraceable, + Metric, + prependWith, + TraceUtil, +} from '@emprespresso/pengueno'; +import type { AnsiblePlaybookJob } from '@emprespresso/ci_model'; +import { Bitwarden, type SecureNote } from '@emprespresso/ci_worker'; +import { writeFile, mkdtemp } from 'fs/promises'; +import { join } from 'path'; +import { tmpdir } from 'os'; -const eitherJob = getRequiredEnvVars([ - "path", - "playbooks", -]) - .mapRight((baseArgs) => ( - <AnsiblePlaybookJob> { - type: "ansible_playbook.ts", - arguments: baseArgs, - } - )); +const eitherJob = getRequiredEnvVars(['path', 'playbooks']).mapRight( + (baseArgs) => + <AnsiblePlaybookJob>{ + type: 'ansible_playbook.ts', + arguments: baseArgs, + }, +); -const eitherVault = Bitwarden.getConfigFromEnvironment() - .mapRight((config) => new Bitwarden(config)); +const eitherVault = Bitwarden.getConfigFromEnvironment().mapRight((config) => new Bitwarden(config)); -const playbookMetric = Metric.fromName("ansiblePlaybook.playbook"); -const _logJob = LogTraceable.of(eitherJob).bimap(TraceUtil.withTrace("ansible_playbook")); -await LogMetricTraceable.ofLogTraceable(_logJob).bimap(TraceUtil.withMetricTrace(playbookMetric)) - .peek((tEitherJob) => - tEitherJob.trace.trace("starting ansible playbook job! (⑅˘꒳˘)") - ) - .map((tEitherJob) => - tEitherJob.get().flatMapAsync((job) => - eitherVault.flatMapAsync(async (vault) => { - const eitherKey = await vault.unlock(tEitherJob); - return eitherKey.mapRight((key) => ({ job, key, vault })); - }) +const playbookMetric = Metric.fromName('ansiblePlaybook.playbook'); +const _logJob = LogTraceable.of(eitherJob).bimap(TraceUtil.withTrace('ansible_playbook')); +await LogMetricTraceable.ofLogTraceable(_logJob) + .bimap(TraceUtil.withMetricTrace(playbookMetric)) + .peek((tEitherJob) => tEitherJob.trace.trace('starting ansible playbook job! (⑅˘꒳˘)')) + .map((tEitherJob) => + tEitherJob.get().flatMapAsync((job) => + eitherVault.flatMapAsync(async (vault) => { + const eitherKey = await vault.unlock(tEitherJob); + return eitherKey.mapRight((key) => ({ job, key, vault })); + }), + ), ) - ) - .map(async (tEitherJobVault) => { - tEitherJobVault.trace.trace( - "getting ansible secwets uwu~", - ); - const eitherJobVault = await tEitherJobVault.get(); - - const eitherSshKey = await eitherJobVault - .flatMapAsync(({ key, vault }) => - vault.fetchSecret<SecureNote>(tEitherJobVault, key, "ssh_key") - ); - const eitherSshKeyFile = await eitherSshKey.mapRight(({ notes }) => notes) - .flatMapAsync(saveToTempFile); - const eitherAnsibleSecrets = await eitherJobVault - .flatMapAsync(({ key, vault }) => - vault.fetchSecret<SecureNote>(tEitherJobVault, key, "ansible_playbooks") - ); - const eitherAnsibleSecretsFile = await eitherAnsibleSecrets.mapRight(( - { notes }, - ) => notes).flatMapAsync(saveToTempFile); + .map(async (tEitherJobVault) => { + tEitherJobVault.trace.trace('getting ansible secwets uwu~'); + const eitherJobVault = await tEitherJobVault.get(); - return eitherJobVault.flatMapAsync(async ({ job, vault, key }) => { - const eitherLocked = await vault.lock(tEitherJobVault, key); - return eitherLocked.flatMap((_locked) => - eitherSshKeyFile.flatMap((sshKeyFile) => - eitherAnsibleSecretsFile.mapRight((secretsFile) => ({ - job, - sshKeyFile, - secretsFile, - })) - ) - ); - }); - }) - .map(async (tEitherJobAndSecrets) => { - const eitherJobAndSecrets = await tEitherJobAndSecrets.get(); - return eitherJobAndSecrets.flatMapAsync( - ({ job, sshKeyFile, secretsFile }) => { - const volumes = [ - `${job.arguments.path}:/ansible`, - `${sshKeyFile}:/root/id_rsa`, - `${secretsFile}:/ansible/secrets.yml`, - ]; - const playbookCmd = - `ansible-playbook -e @secrets.yml ${job.arguments.playbooks}`; - const deployCmd = [ - "docker", - "run", - ...prependWith(volumes, "-v"), - "willhallonline/ansible:latest", - ...playbookCmd.split(" "), - ]; - tEitherJobAndSecrets.trace.trace( - `running ansible magic~ (◕ᴗ◕✿) ${deployCmd}`, + const eitherSshKey = await eitherJobVault.flatMapAsync(({ key, vault }) => + vault.fetchSecret<SecureNote>(tEitherJobVault, key, 'ssh_key'), ); - return tEitherJobAndSecrets.move(deployCmd).map(getStdout).get(); - }, - ); - }) - .get(); + const eitherSshKeyFile = await eitherSshKey.mapRight(({ notes }) => notes).flatMapAsync(saveToTempFile); + const eitherAnsibleSecrets = await eitherJobVault.flatMapAsync(({ key, vault }) => + vault.fetchSecret<SecureNote>(tEitherJobVault, key, 'ansible_playbooks'), + ); + const eitherAnsibleSecretsFile = await eitherAnsibleSecrets + .mapRight(({ notes }) => notes) + .flatMapAsync(saveToTempFile); + + return eitherJobVault.flatMapAsync(async ({ job, vault, key }) => { + const eitherLocked = await vault.lock(tEitherJobVault, key); + return eitherLocked.flatMap((_locked) => + eitherSshKeyFile.flatMap((sshKeyFile) => + eitherAnsibleSecretsFile.mapRight((secretsFile) => ({ + job, + sshKeyFile, + secretsFile, + })), + ), + ); + }); + }) + .map(async (tEitherJobAndSecrets) => { + const eitherJobAndSecrets = await tEitherJobAndSecrets.get(); + return eitherJobAndSecrets.flatMapAsync(({ job, sshKeyFile, secretsFile }) => { + const volumes = [ + `${job.arguments.path}:/ansible`, + `${sshKeyFile}:/root/id_rsa`, + `${secretsFile}:/ansible/secrets.yml`, + ]; + const playbookCmd = `ansible-playbook -e @secrets.yml ${job.arguments.playbooks}`; + const deployCmd = [ + 'docker', + 'run', + ...prependWith(volumes, '-v'), + 'willhallonline/ansible:latest', + ...playbookCmd.split(' '), + ]; + tEitherJobAndSecrets.trace.trace(`running ansible magic~ (◕ᴗ◕✿) ${deployCmd}`); + return tEitherJobAndSecrets.move(deployCmd).map(getStdout).get(); + }); + }) + .get(); const saveToTempFile = (text: string): Promise<IEither<Error, string>> => - Either.fromFailableAsync( - () => Deno.makeTempDir({ dir: Deno.cwd() }) - .then((dir) => Deno.makeTempFile({ dir })) - .then(async (f) => { - await Deno.writeTextFile(f, text); - return f; - }), - ); + Either.fromFailableAsync(() => + mkdtemp(join(tmpdir(), 'ci-')).then(async (dir) => { + const filePath = join(dir, 'temp-file'); + await writeFile(filePath, text); + return filePath; + }), + ); diff --git a/worker/scripts/build_docker_image.ts b/worker/scripts/build_docker_image.ts index 49abe41..228dfcc 100755 --- a/worker/scripts/build_docker_image.ts +++ b/worker/scripts/build_docker_image.ts @@ -1,162 +1,131 @@ -#!/usr/bin/env -S deno run --allow-env --allow-net --allow-run +#!/usr/bin/env node import { - getRequiredEnvVars, - getStdout, - LogLevel, - LogTraceable, - LogMetricTraceable, - Metric, - TraceUtil, -} from "@emprespresso/pengueno"; -import type { - BuildDockerImageJob, - BuildDockerImageJobProps, -} from "@emprespresso/ci_model"; -import { Bitwarden, type LoginItem } from "@emprespresso/ci_worker"; + getRequiredEnvVars, + getStdout, + LogLevel, + LogTraceable, + LogMetricTraceable, + Metric, + TraceUtil, +} from '@emprespresso/pengueno'; +import type { BuildDockerImageJob, BuildDockerImageJobProps } from '@emprespresso/ci_model'; +import { Bitwarden, type LoginItem } from '@emprespresso/ci_worker'; const eitherJob = getRequiredEnvVars([ - "registry", - "namespace", - "repository", - "imageTag", - "context", - "dockerfile", - "buildTarget", -]) - .mapRight((baseArgs) => ( - <BuildDockerImageJob> { - type: "build_docker_image.ts", - arguments: baseArgs, - } - )); -const eitherVault = Bitwarden.getConfigFromEnvironment() - .mapRight((config) => new Bitwarden(config)); + 'registry', + 'namespace', + 'repository', + 'imageTag', + 'context', + 'dockerfile', + 'buildTarget', +]).mapRight( + (baseArgs) => + <BuildDockerImageJob>{ + type: 'build_docker_image.ts', + arguments: baseArgs, + }, +); +const eitherVault = Bitwarden.getConfigFromEnvironment().mapRight((config) => new Bitwarden(config)); -const buildImageMetric = Metric.fromName("dockerImage.build"); -const loginMetric = Metric.fromName("dockerRegistry.login"); -const _logJob = LogTraceable.of(eitherJob).bimap( - (tEitherJob) => { - const trace = "build_docker_image." + - tEitherJob.get().fold(({ isRight, value }) => isRight ? value.arguments.buildTarget : ""); - return [tEitherJob.get(), trace]; - }, - ); +const buildImageMetric = Metric.fromName('dockerImage.build'); +const loginMetric = Metric.fromName('dockerRegistry.login'); +const _logJob = LogTraceable.of(eitherJob).bimap((tEitherJob) => { + const trace = + 'build_docker_image.' + + tEitherJob.get().fold(({ isRight, value }) => (isRight ? value.arguments.buildTarget : '')); + return [tEitherJob.get(), trace]; +}); await LogMetricTraceable.ofLogTraceable(_logJob) - .bimap(TraceUtil.withMetricTrace(buildImageMetric)) - .bimap(TraceUtil.withMetricTrace(loginMetric)) - .peek((tEitherJob) => - tEitherJob.trace.trace("starting docker image build job! (⑅˘꒳˘)") - ) - .map((tEitherJob) => - tEitherJob.get() - .flatMapAsync((job) => - eitherVault.flatMapAsync(async (vault) => { - const eitherKey = await vault.unlock(tEitherJob); - return eitherKey.mapRight((key) => ({ job, key, vault })); - }) - ) - ) - .map(async (tEitherJobVault) => { - tEitherJobVault.trace.trace("logging into the wegistwy uwu~"); - const eitherJobVault = await tEitherJobVault.get(); - const eitherDockerRegistryLoginItem = await eitherJobVault.flatMapAsync(( - { job, key, vault }, - ) => - vault.fetchSecret<LoginItem>(tEitherJobVault, key, job.arguments.registry) - .finally(() => vault.lock(tEitherJobVault, key)) - ); - return eitherDockerRegistryLoginItem.flatMapAsync(({ login }) => - eitherJobVault.flatMapAsync(async ({ job }) => { - const loginCommand = getDockerLoginCommand( - login.username, - job.arguments.registry, + .bimap(TraceUtil.withMetricTrace(buildImageMetric)) + .bimap(TraceUtil.withMetricTrace(loginMetric)) + .peek((tEitherJob) => tEitherJob.trace.trace('starting docker image build job! (⑅˘꒳˘)')) + .map((tEitherJob) => + tEitherJob.get().flatMapAsync((job) => + eitherVault.flatMapAsync(async (vault) => { + const eitherKey = await vault.unlock(tEitherJob); + return eitherKey.mapRight((key) => ({ job, key, vault })); + }), + ), + ) + .map(async (tEitherJobVault) => { + tEitherJobVault.trace.trace('logging into the wegistwy uwu~'); + const eitherJobVault = await tEitherJobVault.get(); + const eitherDockerRegistryLoginItem = await eitherJobVault.flatMapAsync(({ job, key, vault }) => + vault + .fetchSecret<LoginItem>(tEitherJobVault, key, job.arguments.registry) + .finally(() => vault.lock(tEitherJobVault, key)), ); - const eitherLoggedIn = await tEitherJobVault.move(loginCommand).map(( - tLoginCmd, - ) => - getStdout(tLoginCmd, { env: { REGISTRY_PASSWORD: login.password } }) - ).get(); - return eitherLoggedIn.moveRight(job); - }) - ); - }) - .peek(async (tEitherWithAuthdRegistry) => { - const eitherWithAuthdRegistry = await tEitherWithAuthdRegistry.get(); - return tEitherWithAuthdRegistry.trace.trace( - eitherWithAuthdRegistry.fold(({ isLeft}) => - loginMetric[isLeft ? "failure" : "success"] - ), - ); - }) - .map(async (tEitherWithAuthdRegistryBuildJob) => { - const eitherWithAuthdRegistryBuildJob = - await tEitherWithAuthdRegistryBuildJob.get(); - tEitherWithAuthdRegistryBuildJob.trace.trace( - "finally building the image~ (◕ᴗ◕✿)", - ); - const eitherBuiltImage = await eitherWithAuthdRegistryBuildJob.flatMapAsync( - (job) => - tEitherWithAuthdRegistryBuildJob - .move(getBuildCommand(job.arguments)) - .map((tBuildCmd) => - getStdout(tBuildCmd, { - env: {}, - clearEnv: true, - }) - ) - .get(), - ); - return eitherBuiltImage.flatMap((buildOutput) => - eitherWithAuthdRegistryBuildJob.mapRight((job) => ({ buildOutput, job })) - ); - }) - .peek(async (tEitherWithBuiltImage) => { - const eitherWithBuiltImage = await tEitherWithBuiltImage.get(); - eitherWithBuiltImage.fold(({ isLeft, value}) => { - tEitherWithBuiltImage.trace.trace( - buildImageMetric[isLeft ? "failure" : "success"], - ); - if (isLeft) { - tEitherWithBuiltImage.trace.addTrace(LogLevel.ERROR).trace( - `oh nyoo we couldn't buiwd the img :(( ${value}`, + return eitherDockerRegistryLoginItem.flatMapAsync(({ login }) => + eitherJobVault.flatMapAsync(async ({ job }) => { + const loginCommand = getDockerLoginCommand(login.username, job.arguments.registry); + const eitherLoggedIn = await tEitherJobVault + .move(loginCommand) + .map((tLoginCmd) => getStdout(tLoginCmd, { env: { REGISTRY_PASSWORD: login.password } })) + .get(); + return eitherLoggedIn.moveRight(job); + }), ); - return; - } - tEitherWithBuiltImage.trace.addTrace("buildOutput").trace(value.buildOutput); - }); - }) - .map(async (tEitherWithBuiltImage) => { - const eitherWithBuiltImage = await tEitherWithBuiltImage.get(); - return eitherWithBuiltImage - .mapRight(({ job }) => - tEitherWithBuiltImage.move(getPushCommand(job.arguments.imageTag)) - ) - .flatMapAsync((tPushCommand) => getStdout(tPushCommand)); - }) - .get(); + }) + .peek(async (tEitherWithAuthdRegistry) => { + const eitherWithAuthdRegistry = await tEitherWithAuthdRegistry.get(); + return tEitherWithAuthdRegistry.trace.trace( + eitherWithAuthdRegistry.fold(({ isLeft }) => loginMetric[isLeft ? 'failure' : 'success']), + ); + }) + .map(async (tEitherWithAuthdRegistryBuildJob) => { + const eitherWithAuthdRegistryBuildJob = await tEitherWithAuthdRegistryBuildJob.get(); + tEitherWithAuthdRegistryBuildJob.trace.trace('finally building the image~ (◕ᴗ◕✿)'); + const eitherBuiltImage = await eitherWithAuthdRegistryBuildJob.flatMapAsync((job) => + tEitherWithAuthdRegistryBuildJob + .move(getBuildCommand(job.arguments)) + .map((tBuildCmd) => + getStdout(tBuildCmd, { + env: {}, + clearEnv: true, + }), + ) + .get(), + ); + return eitherBuiltImage.flatMap((buildOutput) => + eitherWithAuthdRegistryBuildJob.mapRight((job) => ({ buildOutput, job })), + ); + }) + .peek(async (tEitherWithBuiltImage) => { + const eitherWithBuiltImage = await tEitherWithBuiltImage.get(); + eitherWithBuiltImage.fold(({ isLeft, value }) => { + tEitherWithBuiltImage.trace.trace(buildImageMetric[isLeft ? 'failure' : 'success']); + if (isLeft) { + tEitherWithBuiltImage.trace + .addTrace(LogLevel.ERROR) + .trace(`oh nyoo we couldn't buiwd the img :(( ${value}`); + return; + } + tEitherWithBuiltImage.trace.addTrace('buildOutput').trace(value.buildOutput); + }); + }) + .map(async (tEitherWithBuiltImage) => { + const eitherWithBuiltImage = await tEitherWithBuiltImage.get(); + return eitherWithBuiltImage + .mapRight(({ job }) => tEitherWithBuiltImage.move(getPushCommand(job.arguments.imageTag))) + .flatMapAsync((tPushCommand) => getStdout(tPushCommand)); + }) + .get(); const getDockerLoginCommand = (username: string, registry: string) => - `docker login --username ${username} --password $REGISTRY_PASSWORD ${registry}` - .split(" "); + `docker login --username ${username} --password $REGISTRY_PASSWORD ${registry}`.split(' '); -const getBuildCommand = ( - { +const getBuildCommand = ({ buildTarget, imageTag, dockerfile, context }: BuildDockerImageJobProps) => [ + 'docker', + 'build', + '--target', buildTarget, + '-t', imageTag, + '-f', dockerfile, context, - }: BuildDockerImageJobProps, -) => [ - "docker", - "build", - "--target", - buildTarget, - "-t", - imageTag, - "-f", - dockerfile, - context, ]; -const getPushCommand = (tag: string) => ["docker", "push", tag]; +const getPushCommand = (tag: string) => ['docker', 'push', tag]; diff --git a/worker/scripts/checkout_ci.ts b/worker/scripts/checkout_ci.ts index efe74fb..8e4dcca 100755 --- a/worker/scripts/checkout_ci.ts +++ b/worker/scripts/checkout_ci.ts @@ -1,182 +1,152 @@ -#!/usr/bin/env -S deno run --allow-all +#!/usr/bin/env node import { - type Command, - Either, - LogTraceable, - getRequiredEnvVars, - getStdout, - isObject, - LogMetricTraceable, - Metric, - prependWith, - TraceUtil, -} from "@emprespresso/pengueno"; -import { - type CheckoutCiJob, - type FetchCodeJob, - PipelineImpl, -} from "@emprespresso/ci_model"; -import { executeJob, executePipeline } from "@emprespresso/ci_worker"; + type Command, + Either, + LogTraceable, + getRequiredEnvVars, + getStdout, + isObject, + LogMetricTraceable, + Metric, + prependWith, + TraceUtil, +} from '@emprespresso/pengueno'; +import { mkdir, readFile, rm } from 'fs/promises'; +import { join } from 'path'; +import { type CheckoutCiJob, type FetchCodeJob, PipelineImpl } from '@emprespresso/ci_model'; +import { executeJob, executePipeline } from '@emprespresso/ci_worker'; const run = Date.now().toString(); -const eitherJob = getRequiredEnvVars(["remote", "refname", "rev"]).mapRight( - (baseArgs) => - <CheckoutCiJob>{ - type: "checkout_ci.ts", - arguments: { - ...baseArgs, - run, - returnPath: Deno.cwd(), - }, - }, +const eitherJob = getRequiredEnvVars(['remote', 'refname', 'rev']).mapRight( + (baseArgs) => + <CheckoutCiJob>{ + type: 'checkout_ci.ts', + arguments: { + ...baseArgs, + run, + returnPath: process.cwd(), + }, + }, ); -const ciRunMetric = Metric.fromName("checkout_ci.run"); -const _logJob = LogTraceable.of(eitherJob).bimap( - TraceUtil.withTrace(`checkout_ci.${run}`), -); +const ciRunMetric = Metric.fromName('checkout_ci.run'); +const _logJob = LogTraceable.of(eitherJob).bimap(TraceUtil.withTrace(`checkout_ci.${run}`)); await LogMetricTraceable.ofLogTraceable(_logJob) - .bimap(TraceUtil.withMetricTrace(ciRunMetric)) - .map((tEitherJob) => - tEitherJob.get().flatMapAsync((ciJob) => { - const wd = getWorkingDirectoryForCiJob(ciJob); - const fetchPackageJob = <FetchCodeJob>{ - type: "fetch_code.ts", - arguments: { - remoteUrl: ciJob.arguments.remote, - checkout: ciJob.arguments.rev, - path: getSrcDirectoryForCiJob(ciJob), - }, - }; - return Either.fromFailableAsync<Error, CheckoutCiJob>(() => - Deno.mkdir(wd) - .then(() => Deno.chdir(wd)) - .then(() => tEitherJob.move(fetchPackageJob).map(executeJob).get()) - .then(() => ciJob), - ); - }), - ) - .map((tEitherCiJob) => - tEitherCiJob.get().then((eitherCiJob) => - eitherCiJob.flatMapAsync<{ cmd: Command; job: CheckoutCiJob }>((ciJob) => - Either.fromFailableAsync<Error, string>(() => - Deno.readTextFile( - `${getSrcDirectoryForCiJob(ciJob)}/${CI_WORKFLOW_FILE}`, - ), - ).then((eitherWorkflowJson) => - eitherWorkflowJson - .flatMap((json) => - Either.fromFailable<Error, unknown>(JSON.parse(json)), - ) - .flatMap((eitherWorkflowParse) => { - if (isCiWorkflow(eitherWorkflowParse)) { - return Either.right({ - cmd: getPipelineGenerationCommand( - ciJob, - eitherWorkflowParse.workflow, - ), - job: ciJob, - }); - } - return Either.left( - new Error( - "couldn't find any valid ci configuration (。•́︿•̀。), that's okay~", + .bimap(TraceUtil.withMetricTrace(ciRunMetric)) + .map((tEitherJob) => + tEitherJob.get().flatMapAsync((ciJob) => { + const wd = getWorkingDirectoryForCiJob(ciJob); + const fetchPackageJob = <FetchCodeJob>{ + type: 'fetch_code.ts', + arguments: { + remoteUrl: ciJob.arguments.remote, + checkout: ciJob.arguments.rev, + path: getSrcDirectoryForCiJob(ciJob), + }, + }; + return Either.fromFailableAsync<Error, CheckoutCiJob>(() => + mkdir(wd, { recursive: true }) + .then(() => process.chdir(wd)) + .then(() => tEitherJob.move(fetchPackageJob).map(executeJob).get()) + .then(() => ciJob), + ); + }), + ) + .map((tEitherCiJob) => + tEitherCiJob.get().then((eitherCiJob) => + eitherCiJob.flatMapAsync<{ cmd: Command; job: CheckoutCiJob }>((ciJob) => + Either.fromFailableAsync<Error, string>(() => + readFile(join(getSrcDirectoryForCiJob(ciJob), CI_WORKFLOW_FILE), 'utf-8'), + ).then((eitherWorkflowJson) => + eitherWorkflowJson + .flatMap((json) => Either.fromFailable<Error, unknown>(JSON.parse(json))) + .flatMap((eitherWorkflowParse) => { + if (isCiWorkflow(eitherWorkflowParse)) { + return Either.right({ + cmd: getPipelineGenerationCommand(ciJob, eitherWorkflowParse.workflow), + job: ciJob, + }); + } + return Either.left( + new Error("couldn't find any valid ci configuration (。•́︿•̀。), that's okay~"), + ); + }), ), - ); - }), + ), ), - ), - ), - ) - .map(async (tEitherPipelineGenerationCommand) => { - const eitherJobCommand = await tEitherPipelineGenerationCommand.get(); - const eitherPipeline = await eitherJobCommand.flatMapAsync((jobCommand) => - tEitherPipelineGenerationCommand - .move(jobCommand.cmd) - .map(getStdout) - .get(), - ); - return eitherPipeline - .flatMap(PipelineImpl.from) - .flatMap((pipeline) => - eitherJobCommand.mapRight(({ job }) => ({ job, pipeline })), - ); - }) - .peek( - TraceUtil.promiseify((tEitherPipeline) => - tEitherPipeline - .get() - .mapRight((val) => val.pipeline.serialize()) - .mapRight( - (pipeline) => - `built the pipeline~ (◕ᴗ◕✿) let's make something amazing! ${pipeline}`, - ) - .mapRight((msg) => tEitherPipeline.trace.trace(msg)), - ), - ) - .map(async (tEitherPipeline) => { - const eitherPipeline = await tEitherPipeline.get(); - return eitherPipeline.flatMapAsync(({ pipeline, job }) => - tEitherPipeline - .move(pipeline) - .map((p) => - executePipeline(p, { - HOME: getWorkingDirectoryForCiJob(job), - }), - ) - .get(), + ) + .map(async (tEitherPipelineGenerationCommand) => { + const eitherJobCommand = await tEitherPipelineGenerationCommand.get(); + const eitherPipeline = await eitherJobCommand.flatMapAsync((jobCommand) => + tEitherPipelineGenerationCommand.move(jobCommand.cmd).map(getStdout).get(), + ); + return eitherPipeline + .flatMap(PipelineImpl.from) + .flatMap((pipeline) => eitherJobCommand.mapRight(({ job }) => ({ job, pipeline }))); + }) + .peek( + TraceUtil.promiseify((tEitherPipeline) => + tEitherPipeline + .get() + .mapRight((val) => val.pipeline.serialize()) + .mapRight((pipeline) => `built the pipeline~ (◕ᴗ◕✿) let's make something amazing! ${pipeline}`) + .mapRight((msg) => tEitherPipeline.trace.trace(msg)), + ), + ) + .map(async (tEitherPipeline) => { + const eitherPipeline = await tEitherPipeline.get(); + return eitherPipeline.flatMapAsync(({ pipeline, job }) => + tEitherPipeline + .move(pipeline) + .map((p) => + executePipeline(p, { + HOME: getWorkingDirectoryForCiJob(job), + }), + ) + .get(), + ); + }) + .get() + .then((e) => + e + .flatMap(() => eitherJob) + .fold(({ isLeft, isRight, value }) => { + if (isLeft || !isRight) throw value; + return rm(getWorkingDirectoryForCiJob(value), { + recursive: true, + }); + }), ); - }) - .get() - .then((e) => - e - .flatMap(() => eitherJob) - .fold(({ isLeft, isRight, value }) => { - if (isLeft || !isRight) throw value; - return Deno.remove(getWorkingDirectoryForCiJob(value), { - recursive: true, - }); - }), - ); -const getWorkingDirectoryForCiJob = (job: CheckoutCiJob) => - `${job.arguments.returnPath}/${job.arguments.run}`; +const getWorkingDirectoryForCiJob = (job: CheckoutCiJob) => `${job.arguments.returnPath}/${job.arguments.run}`; -const getSrcDirectoryForCiJob = (job: CheckoutCiJob) => - `${job.arguments.returnPath}/${job.arguments.run}/src`; +const getSrcDirectoryForCiJob = (job: CheckoutCiJob) => `${job.arguments.returnPath}/${job.arguments.run}/src`; -const _runFlags = ( - "--rm --network none --cap-drop ALL" + "--security-opt no-new-privileges" -).split(" "); -const _image = "oci.liz.coffee/img/ci-worker:release"; +const _runFlags = ('--rm --network none --cap-drop ALL' + '--security-opt no-new-privileges').split(' '); +const _image = 'oci.liz.coffee/img/ci-worker:release'; const getPipelineGenerationCommand = ( - job: CheckoutCiJob, - pipelineGeneratorPath: string, - image = _image, - runFlags = _runFlags, + job: CheckoutCiJob, + pipelineGeneratorPath: string, + image = _image, + runFlags = _runFlags, ) => [ - "docker", - "run", - ...runFlags, - ...prependWith( - Object.entries(job.arguments).map(([key, val]) => `"${key}"="${val}"`), - "-e", - ), - "-v", - `${getSrcDirectoryForCiJob( - job, - )}/${pipelineGeneratorPath}:/pipeline_generator`, - image, - "/pipeline_generator", + 'docker', + 'run', + ...runFlags, + ...prependWith( + Object.entries(job.arguments).map(([key, val]) => `"${key}"="${val}"`), + '-e', + ), + '-v', + `${getSrcDirectoryForCiJob(job)}/${pipelineGeneratorPath}:/pipeline_generator`, + image, + '/pipeline_generator', ]; export interface CiWorkflow { - workflow: string; + workflow: string; } export const isCiWorkflow = (t: unknown): t is CiWorkflow => - isObject(t) && - "workflow" in t && - typeof t.workflow === "string" && - !t.workflow.includes(".."); -const CI_WORKFLOW_FILE = ".ci/ci.json"; + isObject(t) && 'workflow' in t && typeof t.workflow === 'string' && !t.workflow.includes('..'); +const CI_WORKFLOW_FILE = '.ci/ci.json'; diff --git a/worker/secret.ts b/worker/secret.ts index 951c539..e3edb2d 100644 --- a/worker/secret.ts +++ b/worker/secret.ts @@ -1,36 +1,32 @@ import { - Either, - getRequiredEnvVars, - getStdout, - type IEither, - type ITraceable, - type LogMetricTraceSupplier, - Metric, - TraceUtil, -} from "@emprespresso/pengueno"; + Either, + getRequiredEnvVars, + getStdout, + type IEither, + type ITraceable, + type LogMetricTraceSupplier, + Metric, + TraceUtil, +} from '@emprespresso/pengueno'; // -- <ISecret> -- export interface LoginItem { - login: { - username: string; - password: string; - }; + login: { + username: string; + password: string; + }; } export interface SecureNote { - notes: string; + notes: string; } export type SecretItem = LoginItem | SecureNote; export interface IVault<TClient, TKey, TItemId> { - unlock: (client: TClient) => Promise<IEither<Error, TKey>>; - lock: (client: TClient, key: TKey) => Promise<IEither<Error, TKey>>; + unlock: (client: TClient) => Promise<IEither<Error, TKey>>; + lock: (client: TClient, key: TKey) => Promise<IEither<Error, TKey>>; - fetchSecret: <T extends SecretItem>( - client: TClient, - key: TKey, - item: TItemId, - ) => Promise<IEither<Error, T>>; + fetchSecret: <T extends SecretItem>(client: TClient, key: TKey, item: TItemId) => Promise<IEither<Error, T>>; } // -- </ISecret> -- @@ -39,156 +35,122 @@ type TClient = ITraceable<unknown, LogMetricTraceSupplier>; type TKey = string; type TItemId = string; export class Bitwarden implements IVault<TClient, TKey, TItemId> { - constructor(private readonly config: BitwardenConfig) {} + constructor(private readonly config: BitwardenConfig) {} - public unlock(client: TClient) { - return client - .move(this.config) - .bimap(TraceUtil.withMetricTrace(Bitwarden.loginMetric)) - .flatMap((tConfig) => - tConfig.move(`bw config server ${tConfig.get().server}`).map(getStdout), - ) - .map(async (tEitherWithConfig) => { - const eitherWithConfig = await tEitherWithConfig.get(); - tEitherWithConfig.trace.trace("logging in~ ^.^"); - return eitherWithConfig.flatMapAsync((_) => - tEitherWithConfig - .move("bw login --apikey --quiet") - .map(getStdout) - .get(), - ); - }) - .peek(async (tEitherWithAuthd) => { - const eitherWithAuthd = await tEitherWithAuthd.get(); - return tEitherWithAuthd.trace.trace( - eitherWithAuthd.fold( - ({ isLeft }) => - Bitwarden.loginMetric[isLeft ? "failure" : "success"], - ), - ); - }) - .map(async (tEitherWithAuthd) => { - const eitherWithAuthd = await tEitherWithAuthd.get(); - tEitherWithAuthd.trace.trace("unlocking the secret vault~ (◕ᴗ◕✿)"); - return eitherWithAuthd.flatMapAsync((_) => - tEitherWithAuthd - .move("bw unlock --passwordenv BW_PASSWORD --raw") - .map(getStdout) - .get(), - ); - }) - .peek(async (tEitherWithSession) => { - const eitherWithAuthd = await tEitherWithSession.get(); - return tEitherWithSession.trace.trace( - eitherWithAuthd.fold( - ({ isLeft }) => - Bitwarden.unlockVaultMetric[isLeft ? "failure" : "success"], - ), - ); - }) - .get(); - } + public unlock(client: TClient) { + return client + .move(this.config) + .bimap(TraceUtil.withMetricTrace(Bitwarden.loginMetric)) + .flatMap((tConfig) => tConfig.move(`bw config server ${tConfig.get().server}`).map(getStdout)) + .map(async (tEitherWithConfig) => { + const eitherWithConfig = await tEitherWithConfig.get(); + tEitherWithConfig.trace.trace('logging in~ ^.^'); + return eitherWithConfig.flatMapAsync((_) => + tEitherWithConfig.move('bw login --apikey --quiet').map(getStdout).get(), + ); + }) + .peek(async (tEitherWithAuthd) => { + const eitherWithAuthd = await tEitherWithAuthd.get(); + return tEitherWithAuthd.trace.trace( + eitherWithAuthd.fold(({ isLeft }) => Bitwarden.loginMetric[isLeft ? 'failure' : 'success']), + ); + }) + .map(async (tEitherWithAuthd) => { + const eitherWithAuthd = await tEitherWithAuthd.get(); + tEitherWithAuthd.trace.trace('unlocking the secret vault~ (◕ᴗ◕✿)'); + return eitherWithAuthd.flatMapAsync((_) => + tEitherWithAuthd.move('bw unlock --passwordenv BW_PASSWORD --raw').map(getStdout).get(), + ); + }) + .peek(async (tEitherWithSession) => { + const eitherWithAuthd = await tEitherWithSession.get(); + return tEitherWithSession.trace.trace( + eitherWithAuthd.fold(({ isLeft }) => Bitwarden.unlockVaultMetric[isLeft ? 'failure' : 'success']), + ); + }) + .get(); + } - public fetchSecret<T extends SecretItem>( - client: TClient, - key: string, - item: string, - ): Promise<IEither<Error, T>> { - return client - .move(key) - .bimap(TraceUtil.withMetricTrace(Bitwarden.fetchSecretMetric)) - .peek((tSession) => - tSession.trace.trace(`looking for your secret ${item} (⑅˘꒳˘)`), - ) - .flatMap((tSession) => - tSession.move("bw list items").map((listCmd) => - getStdout(listCmd, { - env: { BW_SESSION: tSession.get() }, - }), - ), - ) - .map( - TraceUtil.promiseify((tEitherItemsJson) => - tEitherItemsJson - .get() - .flatMap( - (itemsJson): IEither<Error, Array<T & { name: string }>> => - Either.fromFailable(() => JSON.parse(itemsJson)), + public fetchSecret<T extends SecretItem>(client: TClient, key: string, item: string): Promise<IEither<Error, T>> { + return client + .move(key) + .bimap(TraceUtil.withMetricTrace(Bitwarden.fetchSecretMetric)) + .peek((tSession) => tSession.trace.trace(`looking for your secret ${item} (⑅˘꒳˘)`)) + .flatMap((tSession) => + tSession.move('bw list items').map((listCmd) => + getStdout(listCmd, { + env: { BW_SESSION: tSession.get() }, + }), + ), + ) + .map( + TraceUtil.promiseify((tEitherItemsJson) => + tEitherItemsJson + .get() + .flatMap( + (itemsJson): IEither<Error, Array<T & { name: string }>> => + Either.fromFailable(() => JSON.parse(itemsJson)), + ) + .flatMap((itemsList): IEither<Error, T> => { + const secret = itemsList.find(({ name }) => name === item); + if (!secret) { + return Either.left(new Error(`couldn't find the item ${item} (。•́︿•̀。)`)); + } + return Either.right(secret); + }), + ), ) - .flatMap((itemsList): IEither<Error, T> => { - const secret = itemsList.find(({ name }) => name === item); - if (!secret) { - return Either.left( - new Error(`couldn't find the item ${item} (。•́︿•̀。)`), + .peek(async (tEitherWithSecret) => { + const eitherWithSecret = await tEitherWithSecret.get(); + return tEitherWithSecret.trace.trace( + eitherWithSecret.fold(({ isLeft }) => Bitwarden.fetchSecretMetric[isLeft ? 'failure' : 'success']), ); - } - return Either.right(secret); - }), - ), - ) - .peek(async (tEitherWithSecret) => { - const eitherWithSecret = await tEitherWithSecret.get(); - return tEitherWithSecret.trace.trace( - eitherWithSecret.fold( - ({ isLeft }) => - Bitwarden.fetchSecretMetric[isLeft ? "failure" : "success"], - ), - ); - }) - .get(); - } + }) + .get(); + } - public lock(client: TClient, key: TKey) { - return client - .move(key) - .bimap(TraceUtil.withMetricTrace(Bitwarden.lockVaultMetric)) - .peek((tSession) => - tSession.trace.trace(`taking care of locking the vault :3`), - ) - .flatMap((tSession) => - tSession.move("bw lock").map((lockCmd) => - getStdout(lockCmd, { - env: { BW_SESSION: tSession.get() }, - }), - ), - ) - .peek(async (tEitherWithLocked) => { - const eitherWithLocked = await tEitherWithLocked.get(); - return eitherWithLocked.fold(({ isLeft }) => { - tEitherWithLocked.trace.trace( - Bitwarden.lockVaultMetric[isLeft ? "failure" : "success"], - ); - if (isLeft) return; - tEitherWithLocked.trace.trace( - "all locked up and secure now~ (。•̀ᴗ-)✧", - ); - }); - }) - .get(); - } + public lock(client: TClient, key: TKey) { + return client + .move(key) + .bimap(TraceUtil.withMetricTrace(Bitwarden.lockVaultMetric)) + .peek((tSession) => tSession.trace.trace(`taking care of locking the vault :3`)) + .flatMap((tSession) => + tSession.move('bw lock').map((lockCmd) => + getStdout(lockCmd, { + env: { BW_SESSION: tSession.get() }, + }), + ), + ) + .peek(async (tEitherWithLocked) => { + const eitherWithLocked = await tEitherWithLocked.get(); + return eitherWithLocked.fold(({ isLeft }) => { + tEitherWithLocked.trace.trace(Bitwarden.lockVaultMetric[isLeft ? 'failure' : 'success']); + if (isLeft) return; + tEitherWithLocked.trace.trace('all locked up and secure now~ (。•̀ᴗ-)✧'); + }); + }) + .get(); + } - public static getConfigFromEnvironment(): IEither<Error, BitwardenConfig> { - return getRequiredEnvVars([ - "BW_SERVER", - "BW_CLIENTSECRET", - "BW_CLIENTID", - "BW_PASSWORD", - ]).mapRight(({ BW_SERVER, BW_CLIENTSECRET, BW_CLIENTID }) => ({ - clientId: BW_CLIENTID, - secret: BW_CLIENTSECRET, - server: BW_SERVER, - })); - } + public static getConfigFromEnvironment(): IEither<Error, BitwardenConfig> { + return getRequiredEnvVars(['BW_SERVER', 'BW_CLIENTSECRET', 'BW_CLIENTID', 'BW_PASSWORD']).mapRight( + ({ BW_SERVER, BW_CLIENTSECRET, BW_CLIENTID }) => ({ + clientId: BW_CLIENTID, + secret: BW_CLIENTSECRET, + server: BW_SERVER, + }), + ); + } - private static loginMetric = Metric.fromName("Bitwarden.login"); - private static unlockVaultMetric = Metric.fromName("Bitwarden.unlockVault"); - private static fetchSecretMetric = Metric.fromName("Bitwarden.fetchSecret"); - private static lockVaultMetric = Metric.fromName("Bitwarden.lock"); + private static loginMetric = Metric.fromName('Bitwarden.login'); + private static unlockVaultMetric = Metric.fromName('Bitwarden.unlockVault'); + private static fetchSecretMetric = Metric.fromName('Bitwarden.fetchSecret'); + private static lockVaultMetric = Metric.fromName('Bitwarden.lock'); } export interface BitwardenConfig { - server: string; - secret: string; - clientId: string; + server: string; + secret: string; + clientId: string; } // -- </IVault> -- diff --git a/worker/tsconfig.json b/worker/tsconfig.json new file mode 100644 index 0000000..58e9147 --- /dev/null +++ b/worker/tsconfig.json @@ -0,0 +1,15 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "outDir": "./dist", + "rootDir": "./", + "composite": true, + "declaration": true, + "declarationMap": true, + "sourceMap": true, + "noEmit": false + }, + "include": ["**/*.ts"], + "exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts"], + "references": [{ "path": "../u" }, { "path": "../model" }] +} |