Skip to content

Commit

Permalink
feat: add files for saving service api logs into files (#169)
Browse files Browse the repository at this point in the history
* feat: add files for saving service api logs into files

issue 269 on jira

* feat: add missing values for init files

* feat: add fluentbit deployment, logging at console only

* feat: fix openshift deployment file paths

* feat: file mapping with wrong values
  • Loading branch information
Ricardo Campos authored Jan 9, 2024
1 parent c012826 commit 28545fd
Show file tree
Hide file tree
Showing 8 changed files with 499 additions and 7 deletions.
34 changes: 30 additions & 4 deletions .github/workflows/merge.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,11 @@ jobs:
overwrite: false
parameters:
-p ZONE=test -p NAME=${{ github.event.repository.name }}
-p AWS_KINESIS_STREAM='${{ secrets.AWS_KINESIS_STREAM }}'
-p AWS_KINESIS_ROLE_ARN='${{ secrets.AWS_KINESIS_ROLE_ARN }}'
-p AWS_ACCESS_KEY_ID=${{ secrets.AWS_ACCESS_KEY_ID }}
-p AWS_ACCESS_KEY_SECRET='${{ secrets.AWS_ACCESS_KEY_SECRET }}'
triggers: ('common/' 'database/' 'backend/' 'frontend/')

deploys-test:
name: TEST Deployments
Expand All @@ -59,19 +64,27 @@ jobs:
issues: write
strategy:
matrix:
name: [database, backend, frontend]
name: [database, backend, frontend, fluentbit]
include:
- name: database
overwrite: false
file: database/openshift.deploy.yml
- name: backend
verification_path: actuator/health
file: backend/openshift.deploy.yml
parameters:
-p RESULTS_ENV_OPENSEARCH=test
- name: frontend
file: frontend/openshift.deploy.yml
parameters:
-p VITE_USER_POOLS_WEB_CLIENT_ID=${{ vars.VITE_USER_POOLS_WEB_CLIENT_ID }}
- name: fluentbit
file: common/openshift.fluentbit.yml
overwrite: true
steps:
- uses: bcgov-nr/[email protected]
with:
file: ${{ matrix.name }}/openshift.deploy.yml
file: ${{ matrix.file }}
oc_namespace: ${{ vars.OC_NAMESPACE }}
oc_server: ${{ vars.OC_SERVER }}
oc_token: ${{ secrets.OC_TOKEN }}
Expand All @@ -98,6 +111,11 @@ jobs:
overwrite: false
parameters:
-p ZONE=prod -p NAME=${{ github.event.repository.name }}
-p AWS_KINESIS_STREAM='${{ secrets.AWS_KINESIS_STREAM }}'
-p AWS_KINESIS_ROLE_ARN='${{ secrets.AWS_KINESIS_ROLE_ARN }}'
-p AWS_ACCESS_KEY_ID=${{ secrets.AWS_ACCESS_KEY_ID }}
-p AWS_ACCESS_KEY_SECRET='${{ secrets.AWS_ACCESS_KEY_SECRET }}'
triggers: ('common/' 'database/' 'backend/' 'frontend/')

image-promotions:
name: Promote images to PROD
Expand All @@ -123,19 +141,27 @@ jobs:
runs-on: ubuntu-22.04
strategy:
matrix:
name: [database, backend, frontend]
name: [database, backend, frontend, fluentbit]
include:
- name: database
overwrite: false
file: database/openshift.deploy.yml
- name: backend
verification_path: actuator/health
file: backend/openshift.deploy.yml
parameters:
-p RESULTS_ENV_OPENSEARCH=production
- name: frontend
file: frontend/openshift.deploy.yml
parameters:
-p VITE_USER_POOLS_WEB_CLIENT_ID=${{ vars.VITE_USER_POOLS_WEB_CLIENT_ID }}
- name: fluentbit
file: common/openshift.fluentbit.yml
overwrite: true
steps:
- uses: bcgov-nr/[email protected]
with:
file: ${{ matrix.name }}/openshift.deploy.yml
file: ${{ matrix.file }}
oc_namespace: ${{ vars.OC_NAMESPACE }}
oc_server: ${{ vars.OC_SERVER }}
oc_token: ${{ secrets.OC_TOKEN }}
Expand Down
12 changes: 10 additions & 2 deletions .github/workflows/pr-open.yml
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,10 @@ jobs:
parameters:
-p ZONE=${{ steps.tag.outputs.tag }}
-p NAME=${{ github.event.repository.name }}
-p AWS_KINESIS_STREAM='${{ secrets.AWS_KINESIS_STREAM }}'
-p AWS_KINESIS_ROLE_ARN='${{ secrets.AWS_KINESIS_ROLE_ARN }}'
-p AWS_ACCESS_KEY_ID=${{ secrets.AWS_ACCESS_KEY_ID }}
-p AWS_ACCESS_KEY_SECRET='${{ secrets.AWS_ACCESS_KEY_SECRET }}'
triggers: ('common/' 'database/' 'backend/' 'frontend/')

builds:
Expand Down Expand Up @@ -90,7 +94,7 @@ jobs:
runs-on: ubuntu-22.04
strategy:
matrix:
name: [database, backend, frontend]
name: [database, backend, frontend, fluentbit]
include:
- name: database
file: database/openshift.deploy.yml
Expand All @@ -112,10 +116,14 @@ jobs:
-p VITE_USER_POOLS_WEB_CLIENT_ID=${{ vars.VITE_USER_POOLS_WEB_CLIENT_ID }}
-p MIN_REPLICAS=1
-p MAX_REPLICAS=2
- name: fluentbit
file: common/openshift.fluentbit.yml
overwrite: true
triggers: ('common/' 'database/' 'backend/' 'frontend/')
steps:
- uses: bcgov-nr/[email protected]
with:
file: ${{ matrix.name }}/openshift.deploy.yml
file: ${{ matrix.file }}
oc_namespace: ${{ vars.OC_NAMESPACE }}
oc_server: ${{ vars.OC_SERVER }}
oc_token: ${{ secrets.OC_TOKEN }}
Expand Down
13 changes: 13 additions & 0 deletions backend/openshift.deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,10 @@ parameters:
- name: DB_POOL_MAX_LIFETIME
description: Maximum lifetime of a connection in the pool.
value: "1800000"
- name: RESULTS_ENV_OPENSEARCH
description: Environment name for OpenSearch. # One of: development, test, production
required: true
value: development
objects:
- apiVersion: v1
kind: ImageStream
Expand Down Expand Up @@ -93,6 +97,9 @@ objects:
- image: "${NAME}-${ZONE}-${COMPONENT}:${IMAGE_TAG}"
imagePullPolicy: Always
name: "${NAME}"
volumeMounts:
- name: ${NAME}-${ZONE}-fluentbit-logs
mountPath: /logs
env:
- name: POSTGRES_HOST
value: ${NAME}-${ZONE}-database
Expand All @@ -117,6 +124,8 @@ objects:
value: ${DB_POOL_IDLE_TIMEOUT}
- name: DB_POOL_MAX_LIFETIME
value: ${DB_POOL_MAX_LIFETIME}
- name: RESULTS_ENV_OPENSEARCH
value: ${RESULTS_ENV_OPENSEARCH}
ports:
- containerPort: 8080
protocol: TCP
Expand Down Expand Up @@ -147,6 +156,10 @@ objects:
initialDelaySeconds: 60
periodSeconds: 30
timeoutSeconds: 5
volumes:
- name: ${NAME}-${ZONE}-fluentbit-logs
persistentVolumeClaim:
claimName: ${NAME}-${ZONE}-fluentbit-logs
- apiVersion: v1
kind: Service
metadata:
Expand Down
3 changes: 3 additions & 0 deletions backend/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -212,6 +212,9 @@
<artifactId>lombok</artifactId>
</exclude>
</excludes>
<profiles>
<profile>dev</profile>
</profiles>
</configuration>
</plugin>

Expand Down
7 changes: 6 additions & 1 deletion backend/src/main/resources/application.properties
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# General application
logging.level.ca.bc.gov.restapi.results = ${LOGGING_LEVEL:INFO}
spring.application.name = results-backend-api
spring.application.name = results-api
server.error.include-message=always
server.port = ${SERVER_PORT:8080}

Expand Down Expand Up @@ -31,3 +31,8 @@ spring.jpa.show-sql = true
spring.jpa.hibernate.ddl-auto = update
spring.jpa.defer-datasource-initialization=true
spring.sql.init.mode=always

# OpenSearch settings
nr-results-ecs-version = 8.9
nr-results-backend-env-opensearch = ${RESULTS_ENV_OPENSEARCH:development}
nr-results-team-email-address = [email protected]
70 changes: 70 additions & 0 deletions backend/src/main/resources/logback-spring.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<property name="LOGS" value="/logs" />
<springProperty scope="context" name="serviceEnv" source="nr-results-backend-env-opensearch" />
<springProperty scope="context" name="applicationName" source="spring.application.name" />
<springProperty scope="context" name="teamEmail" source="nr-results-team-email-address" />
<springProperty scope="context" name="ecsVersion" source="nr-results-ecs-version" />

<appender name="Console" class="ch.qos.logback.core.ConsoleAppender">
<!-- https://logback.qos.ch/manual/layouts.html -->
<!-- %d = 2006-10-20 14:06:49,812 or we can use like %date{yyyy-MM-dd HH:mm:ss.SSS}, or %d{ISO8601} -->
<!-- %p = level -->
<!-- %C{length} = fully qualified class name of the caller -->
<!-- %t = thread name -->
<!-- %m = message -->
<!-- %n = new line -->
<layout class="ch.qos.logback.classic.PatternLayout">
<Pattern>
%date{yyyy-MM-dd HH:mm:ss.SSS} %highlight(%-5level) [%blue(%t)] %yellow(%c): %msg%n%throwable
</Pattern>
</layout>
</appender>

<appender name="RollingFile" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOGS}/results-api.log</file>
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<Pattern>{%nopex"labels.project":"${applicationName}","service.environment":"${serviceEnv}","@timestamp":"%date{yyyy-MM-dd HH:mm:ss.SSS}","log.level":"%p","log.logger":"%logger{36}","message":"%replace(%msg){'\"','\\"'}","ecs.version":"${ecsVersion}","event.category":"web","event.dataset":"application.log.utc","event.ingested":"diagnostic","event.kind":"event","organization.id":"${teamEmail}","organization.name":"TeamSILVA"}%n</Pattern>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- rollover daily and when the file reaches 10 MegaBytes -->
<fileNamePattern>${LOGS}/archived/results-api-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
<timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
<maxFileSize>10MB</maxFileSize>
</timeBasedFileNamingAndTriggeringPolicy>
</rollingPolicy>
</appender>

<springProfile name="dev,all-tests,integration-test,test">
<!-- LOG everything at INFO level -->
<root level="info">
<appender-ref ref="Console" />
</root>
<!-- LOG "ca.bc.gov.restapi.results*" at TRACE level -->
<logger name="ca.bc.gov.restapi.results" level="trace" additivity="false">
<appender-ref ref="Console" />
</logger>
</springProfile>

<springProfile name="default,prod,native">
<!-- LOG everything at INFO level -->
<root level="info">
<appender-ref ref="RollingFile" />
<appender-ref ref="Console" />
</root>
<root level="warn">
<appender-ref ref="RollingFile" />
<appender-ref ref="Console" />
</root>
<root level="error">
<appender-ref ref="RollingFile" />
<appender-ref ref="Console" />
</root>
<!-- LOG "ca.bc.gov.restapi.results*" at TRACE level -->
<logger name="ca.bc.gov.restapi.results" level="trace" additivity="false">
<appender-ref ref="RollingFile" />
<appender-ref ref="Console" />
</logger>
</springProfile>

</configuration>
Loading

0 comments on commit 28545fd

Please sign in to comment.