Skip to content

sbt-source-dist 0.1.10 (#212) #64

sbt-source-dist 0.1.10 (#212)

sbt-source-dist 0.1.10 (#212) #64

name: CI
on:
pull_request:
push:
branches:
- master
- main
tags-ignore:
- v*
concurrency:
# Only run once for latest commit per ref and cancel other (previous) runs.
group: ci-${{ github.ref }}
cancel-in-progress: true
jobs:
style-compile-mima:
name: Compile, Code Style, Binary Compatibility
runs-on: ubuntu-20.04
env:
JAVA_OPTS: -Xms2G -Xmx2G -Xss2M -XX:ReservedCodeCacheSize=256M -Dfile.encoding=UTF-8
steps:
- name: Checkout
uses: actions/checkout@v3
# temporarily do full checkout
# with: # https://github.com/olafurpg/setup-scala#faster-checkout-of-big-repos
# fetch-depth: 100
#- name: Fetch tags
# run: git fetch --depth=100 origin +refs/tags/*:refs/tags/*
- name: Setup Java 8
uses: actions/setup-java@v3
with:
distribution: temurin
java-version: 8
- name: Cache Coursier cache
uses: coursier/[email protected]
# temporarily remove mima checks
- name: "Code style, compile tests, MiMa. Run locally with: sbt +~2.13 \"javafmtCheckAll; Test/compile; mimaReportBinaryIssues\""
run: sbt "javafmtCheckAll; +Test/compile"
documentation:
name: ScalaDoc, Documentation with Paradox
runs-on: ubuntu-20.04
env:
JAVA_OPTS: -Xms2G -Xmx2G -Xss2M -XX:ReservedCodeCacheSize=256M -Dfile.encoding=UTF-8
steps:
- name: Checkout
uses: actions/checkout@v3
# temporarily do full checkout
# with: # https://github.com/olafurpg/setup-scala#faster-checkout-of-big-repos
# fetch-depth: 100
#- name: Fetch tags
# run: git fetch --depth=100 origin +refs/tags/*:refs/tags/*
- name: Setup Java 11
uses: actions/setup-java@v3
with:
distribution: temurin
java-version: 11
- name: Cache Coursier cache
uses: coursier/[email protected]
- name: "Create all API docs and create site with Paradox"
run: sbt docs/makeSite
# TODO: Fix after documentation updates
# - name: Run Link Validator
# run: cs launch net.runne::site-link-validator:0.2.2 -- scripts/link-validator.conf
connectors:
runs-on: ubuntu-20.04
strategy:
fail-fast: false
matrix:
include:
- { connector: amqp, pre_cmd: 'docker-compose up -d amqp' }
- { connector: avroparquet }
- { connector: awslambda }
- { connector: aws-event-bridge, pre_cmd: 'docker-compose up -d amazoneventbridge' }
- { connector: azure-storage-queue }
- { connector: cassandra, pre_cmd: 'docker-compose up -d cassandra' }
- { connector: couchbase, pre_cmd: 'docker-compose up -d couchbase_prep' }
- { connector: csv }
- { connector: dynamodb, pre_cmd: 'docker-compose up -d dynamodb' }
- { connector: elasticsearch, pre_cmd: 'docker-compose up -d elasticsearch6 elasticsearch7 opensearch1' }
- { connector: file }
- { connector: ftp, pre_cmd: './scripts/ftp-servers.sh' }
- { connector: geode, pre_cmd: 'docker-compose up -d geode' }
- { connector: google-cloud-bigquery }
- { connector: google-cloud-bigquery-storage }
- { connector: google-cloud-pub-sub, pre_cmd: 'docker-compose up -d gcloud-pubsub-emulator_prep' }
- { connector: google-cloud-pub-sub-grpc, pre_cmd: 'docker-compose up -d gcloud-pubsub-emulator_prep' }
- { connector: google-cloud-storage }
- { connector: google-common }
- { connector: google-fcm }
# hbase disabled until we resolve why new docker image fails our build: https://github.com/akka/alpakka/issues/2185
# - { connector: hbase, pre_cmd: 'docker-compose up -d hbase' }
- { connector: hdfs, pre_cmd: 'file ${HOME}/.cache/coursier/v1/https/repo1.maven.org/maven2/org/typelevel/cats-kernel_2.13/2.0.0/cats-kernel_2.13-2.0.0.jar' }
- { connector: huawei-push-kit }
- { connector: influxdb, pre_cmd: 'docker-compose up -d influxdb' }
- { connector: ironmq, pre_cmd: 'docker-compose up -d ironauth ironmq' }
- { connector: jms, pre_cmd: 'docker-compose up -d ibmmq' }
- { connector: json-streaming }
- { connector: kinesis }
- { connector: kudu, pre_cmd: 'docker-compose up -d kudu-master-data kudu-tserver-data kudu-master kudu-tserver' }
- { connector: mongodb, pre_cmd: 'docker-compose up -d mongo' }
- { connector: mqtt, pre_cmd: 'docker-compose up -d mqtt' }
- { connector: mqtt-streaming, pre_cmd: 'docker-compose up -d mqtt' }
- { connector: orientdb, pre_cmd: 'docker-compose up -d orientdb' }
- { connector: pravega, pre_cmd: 'docker-compose up -d pravega'}
- { connector: reference }
- { connector: s3 }
- { connector: spring-web }
- { connector: simple-codecs }
- { connector: slick }
- { connector: sns, pre_cmd: 'docker-compose up -d amazonsns' }
- { connector: solr }
- { connector: sqs, pre_cmd: 'docker-compose up -d elasticmq' }
- { connector: sse }
- { connector: text }
- { connector: udp }
- { connector: unix-domain-socket }
- { connector: xml }
env:
JAVA_OPTS: -Xms2G -Xmx2G -Xss2M -XX:ReservedCodeCacheSize=256M -Dfile.encoding=UTF-8
steps:
- name: Checkout
uses: actions/checkout@v3
# temporarily do full checkout
# with: # gh-detect-changes.sh compares with the target branch
# fetch-depth: 0
- name: Setup Java 8
uses: actions/setup-java@v3
with:
distribution: temurin
java-version: 8
- name: Cache Coursier cache
uses: coursier/[email protected]
- name: ${{ matrix.connector }}
env:
CONNECTOR: ${{ matrix.connector }}
PRE_CMD: ${{ matrix.pre_cmd }}
run: |-
$PRE_CMD
sbt "+${CONNECTOR}/test"
- name: Print logs on failure
if: failure()
run: find . -name "*.log" -exec ./scripts/cat-log.sh {} \;