Skip to content

Commit

Permalink
Merge pull request #1347 from openmeterio/feat/migrations-ci
Browse files Browse the repository at this point in the history
Add CI checks in separate PR
  • Loading branch information
GAlexIHU authored Aug 21, 2024
2 parents 61894e7 + 429ade5 commit c1b8bb9
Show file tree
Hide file tree
Showing 9 changed files with 286 additions and 40 deletions.
36 changes: 36 additions & 0 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,42 @@ jobs:
engine.stderr.log
retention-days: 14

migrations:
name: Migration Checks
runs-on: depot-ubuntu-latest-8

steps:
# Required as a workaround for Dagger to properly detect Git metadata
- name: Checkout repository
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
with:
fetch-depth: 0 # Needed to compare against base branch

- name: Run pipeline
uses: dagger/dagger-for-github@29a88e72255e732147ba18a670978b90bcc59efd # v6.4.0
with:
verb: call
module: github.com/${{ github.repository }}@${{ github.ref }}
args: --ref ${{ github.ref }} migrate check
cloud-token: ${{ secrets.DAGGER_CLOUD_TOKEN }}
version: ${{ env.DAGGER_VERSION }}

- name: Export Dagger Engine logs
id: export-dagger-engine-logs
run: docker logs $(docker container list --all --filter 'name=^dagger-engine-*' --format '{{.Names}}') > engine.stdout.log 2> engine.stderr.log
if: always()
continue-on-error: true

- name: Upload Dagger Engine logs as artifact
uses: actions/upload-artifact@89ef406dd8d7e03cfd12d9e0a4a378f454709029 # v4.3.5
if: always() && steps.export-dagger-engine-logs.outcome == 'success'
with:
name: "[${{ github.job }}] Dagger Engine logs"
path: |
engine.stdout.log
engine.stderr.log
retention-days: 14

lint:
name: Lint
runs-on: depot-ubuntu-latest-8
Expand Down
5 changes: 5 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,11 @@ gen-api: ## Generate API and SDKs
dagger call --source .:default generate web-sdk -o api/client/web
dagger call --source .:default generate python-sdk -o api/client/python

.PHONY: migrate-check
migrate-check: ## Validate migrations
$(call print-target)
dagger call --source .:default migrate check

.PHONY: generate
generate: ## Generate code
$(call print-target)
Expand Down
50 changes: 38 additions & 12 deletions atlas.hcl
Original file line number Diff line number Diff line change
@@ -1,12 +1,9 @@
env "local" {
// Declare where the schema definition resides.
src = "ent://internal/ent/schema"
src = "${local.schema_src}"

migration {
// Define the directory where the migrations are stored.
dir = "file://tools/migrate/migrations"
// We use golang-migrate
format = golang-migrate
dir = "${local.migrations_dir}"
format = "${local.migrations_format}"
}

format {
Expand All @@ -15,8 +12,7 @@ env "local" {
}
}

// Define the URL of the database which is managed in this environment.
url = "postgres://postgres:postgres@localhost:5432/postgres?search_path=public&sslmode=disable"
url = "${local.local_url}"

// Define the URL of the Dev Database for this environment
// See: https://atlasgo.io/concepts/dev-database
Expand All @@ -28,18 +24,34 @@ env "local" {
}
}

env "ci" {
src = "${local.schema_src}"

migration {
dir = "${local.migrations_dir}"
format = "${local.migrations_format}"
}

format {
migrate {
diff = "{{ sql . \" \" }}"
}
}

dev = "${local.ci_url}"
}

// CAN be used for all remote deployments
env "remote" {
// Declare where the schema definition resides.
src = "ent://internal/ent/schema"
src = "${local.schema_src}"

migration {
// Define the directory where the migrations are stored.
dir = "file://tools/migrate/migrations"
// We use golang-migrate
format = golang-migrate
format = "${local.migrations_format}"
// Remote deployments already had auto deploy present
baseline = "20240807123504"
baseline = "${local.init_migration_ts}"
}

format {
Expand All @@ -53,6 +65,20 @@ env "remote" {
dev = "docker://postgres/15/dev?search_path=public"
}

locals {
// Define the directory where the schema definition resides.
schema_src = "ent://internal/ent/schema"
// Define the initial migration timestamp
init_migration_ts = "20240807123504"
// Define the directory where the migrations are stored.
migrations_dir = "file://tools/migrate/migrations"
// We use golang-migrate
migrations_format = "golang-migrate"
// Define common connection URLs
local_url = "postgres://postgres:postgres@localhost:5432/postgres?search_path=public&sslmode=disable"
ci_url = "postgres://postgres:postgres@postgres:5432/postgres?search_path=public&sslmode=disable"
}

lint {
non_linear {
error = true
Expand Down
14 changes: 11 additions & 3 deletions ci/e2e.go
Original file line number Diff line number Diff line change
Expand Up @@ -70,12 +70,20 @@ func redis() *dagger.Service {
AsService()
}

func postgres() *dagger.Service {
func pg() *dagger.Container {
return dag.Container().
From(fmt.Sprintf("postgres:%s", postgresVersion)).
WithEnvVariable("POSTGRES_USER", "postgres").
WithEnvVariable("POSTGRES_PASSWORD", "postgres").
WithEnvVariable("POSTGRES_DB", "postgres").
WithExposedPort(5432).
AsService()
WithExposedPort(5432)
}

func postgres() *dagger.Service {
return pg().AsService()
}

// Creates a postgres service unique by name
func postgresNamed(name string) *dagger.Service {
return pg().WithLabel("uniq-name", name).AsService()
}
98 changes: 98 additions & 0 deletions ci/migrate.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
package main

import (
"context"
"fmt"

"github.com/openmeterio/openmeter/ci/internal/dagger"
"github.com/sourcegraph/conc/pool"
)

func (m *Ci) Migrate() *Migrate {
return &Migrate{
Source: m.Source,
}
}

type Migrate struct {
Source *dagger.Directory
}

func (m *Migrate) Check(
ctx context.Context,
) error {
app := goModuleCross("").
WithSource(m.Source).
Container().
WithEnvVariable("GOFLAGS", "-tags=musl")

bin := dag.Container(dagger.ContainerOpts{
Platform: "linux/amd64",
}).From("arigaio/atlas:0.25.0").File("atlas")

atlas := app.
WithFile("/bin/atlas", bin).
WithDirectory("internal/ent", m.Source.Directory("internal/ent")).
WithDirectory("tools/migrate/migrations", m.Source.Directory("tools/migrate/migrations")).
WithFile("atlas.hcl", m.Source.File("atlas.hcl"))

p := pool.New().WithErrors().WithContext(ctx)

// Always validate schema is generated
p.Go(func(ctx context.Context) error {
result := app.
WithExec([]string{"go", "generate", "-x", "-tags=musl", "-ldflags", "linkmode=external", "./internal/ent/..."}).
Directory("internal/ent")

source := m.Source.Directory("internal/ent")

err := diff(ctx, source, result)
if err != nil {
return fmt.Errorf("schema is not in sync with generated code")
}
return nil
})

// Always validate migrations are in sync with schema
p.Go(func(ctx context.Context) error {
result := atlas.
WithServiceBinding("postgres", postgresNamed("no-diff")).
WithExec([]string{"atlas", "migrate", "--env", "ci", "diff", "test"}).
Directory("tools/migrate/migrations")

source := m.Source.Directory("tools/migrate/migrations")
err := diff(ctx, source, result)
if err != nil {
return fmt.Errorf("migrations are not in sync with schema")
}

return nil
})

// Always lint last 10 migrations
p.Go(syncFunc(
atlas.
WithServiceBinding("postgres", postgresNamed("last-10")).
WithExec([]string{"atlas", "migrate", "--env", "ci", "lint", "--latest", "10"}),
))

// Validate checksum is intact
p.Go(syncFunc(
atlas.
WithServiceBinding("postgres", postgresNamed("validate")).
WithExec([]string{"atlas", "migrate", "--env", "ci", "validate"}),
))

return p.Wait()
}

func diff(ctx context.Context, d1, d2 *dagger.Directory) error {
_, err := dag.Container(dagger.ContainerOpts{Platform: ""}).
From(alpineBaseImage).
WithExec([]string{"apk", "add", "--update", "--no-cache", "ca-certificates", "tzdata", "bash"}).
WithDirectory("src", d1).
WithDirectory("res", d2).
WithExec([]string{"diff", "-u", "-r", "src", "res"}).
Sync(ctx)
return err
}
12 changes: 8 additions & 4 deletions dagger.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,6 @@
"go.work.sum"
],
"dependencies": [
{
"name": "kafka",
"source": "github.com/sagikazarmark/daggerverse/kafka@c964ee26f982c4db0282523cd06f75ecb7e1102f"
},
{
"name": "archivist",
"source": "github.com/sagikazarmark/daggerverse/archivist@8f444e2c2b8e8162cea76d702086034ed3edc4f1"
Expand Down Expand Up @@ -42,6 +38,14 @@
"name": "helm-docs",
"source": "github.com/sagikazarmark/daggerverse/helm-docs@8f444e2c2b8e8162cea76d702086034ed3edc4f1"
},
{
"name": "kafka",
"source": "github.com/sagikazarmark/daggerverse/kafka@c964ee26f982c4db0282523cd06f75ecb7e1102f"
},
{
"name": "nix",
"source": "github.com/tsirysndr/daggerverse/nix@631932b459d218e641dec8047085f0cc87cf5f1c"
},
{
"name": "python",
"source": "github.com/sagikazarmark/daggerverse/python@8f444e2c2b8e8162cea76d702086034ed3edc4f1"
Expand Down
69 changes: 50 additions & 19 deletions flake.nix
Original file line number Diff line number Diff line change
Expand Up @@ -13,16 +13,17 @@
inputs.devenv.flakeModule
];

systems = [ "x86_64-linux" "x86_64-darwin" "aarch64-darwin" ];
systems = [ "x86_64-linux" "x86_64-darwin" "aarch64-darwin" "aarch64-linux" ];

perSystem = { config, self', inputs', pkgs, system, ... }: rec {
perSystem = { config, self', inputs', pkgs, lib, system, ... }: rec {
_module.args.pkgs = import inputs.nixpkgs {
inherit system;

overlays = [
(final: prev: {
dagger = inputs'.dagger.packages.dagger;
licensei = self'.packages.licensei;
atlasx = self'.packages.atlasx;
})
];
};
Expand Down Expand Up @@ -91,7 +92,7 @@
# python
poetry

self'.packages.atlasx
atlasx

just
semver-tool
Expand All @@ -117,6 +118,19 @@
};

ci = devenv.shells.default;

# Lighteweight target to use inside dagger
dagger = {
languages = {
go = devenv.shells.default.languages.go;
};
packages = with pkgs; [
gnumake
git
atlasx
];
containers = devenv.shells.default.containers;
};
};

packages = {
Expand All @@ -142,25 +156,42 @@
];
};

atlasx = pkgs.stdenv.mkDerivation rec {
pname = "atlasx";
version = "0.25.0";
src = pkgs.fetchurl {
# License: https://ariga.io/legal/atlas/eula/eula-20240804.pdf
url = "https://release.ariga.io/atlas/atlas-darwin-arm64-v${version}";
hash = "sha256-bYJtNDE13UhJWL4ALLKI0sHMZrDS//kFWzguGX63EAo=";
};
atlasx =
let
systemMappings = {
x86_64-linux = "linux-amd64";
x86_64-darwin = "darwin-amd64";
aarch64-darwin = "darwin-arm64";
aarch64-linux = "linux-arm64";
};
hashMappings = {
x86_64-linux = "sha256-6270kOQ0uqiv/ljHtAi41uCzb+bkf+99rnmsc87/n6w=";
x86_64-darwin = "sha256-KeOp6LeHIY59Y2DJVAhMcr9xyb3KItFqEs6y9+uA7rM=";
aarch64-darwin = "sha256-bYJtNDE13UhJWL4ALLKI0sHMZrDS//kFWzguGX63EAo=";
aarch64-linux = "sha256-pRLZo7bwFJ1Xxlw2Afi/tAT6HSEvQ/B83ZzHGzCKXT8=";
};
in
pkgs.stdenv.mkDerivation rec {
pname = "atlasx";
version = "0.25.0";

src = pkgs.fetchurl {
# License: https://ariga.io/legal/atlas/eula/eula-20240804.pdf
url = "https://release.ariga.io/atlas/atlas-${systemMappings."${system}"}-v${version}";
hash = hashMappings."${system}";
};

unpackPhase = ''
cp $src atlas
'';
unpackPhase = ''
cp $src atlas
'';

installPhase = ''
mkdir -p $out/bin
cp atlas $out/bin/atlas
'';
installPhase = ''
mkdir -p $out/bin
cp atlas $out/bin/atlas
chmod +x $out/bin/atlas
'';

};
};
};
};
};
Expand Down
Loading

0 comments on commit c1b8bb9

Please sign in to comment.