Speeding up software delivery using Kólga

TL;DR

Writing and deploying an application with Kólga

Prerequisites

About the applications

Application dependencies

Uvicorn

FastAPI

AIO Pika

DotEnv

About the code

.
├── README.md
├── docker-compose.yml
├── poster
│ ├── Dockerfile
│ ├── README.md
│ ├── docker-entrypoint.sh
│ ├── poetry.lock
│ ├── poster
│ │ ├── __init__.py
│ │ └── main.py
│ └── pyproject.toml
├── reporter
│ ├── Dockerfile
│ ├── README.md
│ ├── docker-entrypoint.sh
│ ├── poetry.lock
│ ├── pyproject.toml
│ └── reporter
│ ├── __init__.py
│ └── main.py
└── scores
├── Dockerfile
├── README.md
├── alembic
│ ├── README
│ ├── env.py
│ ├── script.py.mako
│ └── versions
│ └── 229236b6dde7_score_table.py
├── alembic.ini
├── docker-entrypoint.sh
├── poetry.lock
├── pyproject.toml
└── scores
├── __init__.py
├── database.py
├── main.py
├── models.py
└── schemas.py

Scores API

#  scores/scores/main.py#  <...redacted imports...>
# <...redacted environment and Database/AMQP connection setup...>
# <...redacted health check endpoints ...>
async def send_message(loop, score: schemas.Score):
message = json.dumps(score.dict()).encode()
connection = await connect(BROKER_URL, loop=loop) # Creating a channel
channel = await connection.channel()
scores_exchange = await channel.declare_exchange("scores", ExchangeType.FANOUT) # Sending the message
await scores_exchange.publish(
Message(message, delivery_mode=DeliveryMode.PERSISTENT), routing_key="kolga"
)
print(f" [x] Sent '{message}'") await connection.close()@app.post("/scores")
async def create_scores(
score: schemas.Score, db: Session = Depends(get_db)
) -> schemas.Score:
score_record = models.Score(**score.dict())
db.add(score_record)
db.commit()
loop = asyncio.get_event_loop()
loop.create_task(send_message(loop, score))
return score_record@app.get("/scores")
def get_scores(db: Session = Depends(get_db)) -> List[models.Score]:
return [entry for entry in db.query(models.Score).all()]
if __name__ == "__main__":
uvicorn.run(app, host="0.0.0.0", port=8000, log_level="info")

REST API and database structure

#  scores/scores/models.py
from sqlalchemy import Column, Integer, String
from .database import Base
class Score(Base):
__tablename__ = "Score"
id: int = Column(Integer, primary_key=True, index=True)
user: str = Column(String(255), index=True)
score: str = Column(Integer(), default=1)
# scores/scores/schema.py
from pydantic.main import BaseModel
class Score(BaseModel):
user: str
score: int
class Config:
orm_mode = True

AMQP

Reporter Frontend

#  /reporter/reporter/main.py#  <...redacted imports...>
# <...redacted environment and Database/AMQP connection setup...>
# <...redacted health check endpoints ...>
# <...redacted ConnectionManager code...>
html = (
# <...redacted HTML and JavaScript...>
)
# WebSocket connection manager
manager = ConnectionManager()
@app.get("/")
async def get():
return HTMLResponse(html)
@app.websocket("/ws/{client_id}")
async def websocket_endpoint(websocket: WebSocket, client_id: int):
await manager.connect(websocket)
async with aiohttp.request("GET", f"{SCORES_API}/scores") as object_names_response:
if object_names_response.status != 200:
raise HTTPException(
status_code=500, detail="Could not get scores"
)
scores: List[str] = await object_names_response.json()
for score in scores:
await manager.send_personal_message(json.dumps(score), websocket)
async def on_message(message: IncomingMessage):
"""
on_message doesn't necessarily have to be defined as async.
Here it is to show that it's possible.
"""
print(" [x] Received message %r" % message)
decoded_message = message.body.decode() try:
print(f"Message body is: {json.loads(decoded_message)}")
except Exception:
print("Not a JSON message, ignoring")
print("Broadcasting?") await manager.broadcast(decoded_message)# <...redacted AMQP consumer and connection setup...>if __name__ == "__main__":
uvicorn.run(app, host="0.0.0.0", port=8000, log_level="trace")

Slack Poster

#  /poster/poster/main.py#  <...redacted imports...>
# <...redacted environment setup for Slack connection...>
# <...redacted health check endpoints ...>
async def on_message(message: IncomingMessage):
"""
on_message doesn't necessarily have to be defined as async.
Here it is to show that it's possible.
"""
print(" [x] Received message %r" % message)
decoded_message = message.body.decode() try:
json_message = json.loads(decoded_message)
except Exception:
print("Not a JSON message, ignoring")
return None
await slack_client.chat_postMessage(
channel=SLACK_CHANNEL,
text=f":tada: {json_message['user']} just scored {json_message['score']} points",
)
# <...redacted AMQP consumer and connection setup...>if __name__ == "__main__":
uvicorn.run(app, host="0.0.0.0", port=8000, log_level="info")

Docker

Dockerfile

# General Dockerfile for all of the services# ===================================================
FROM andersinnovations/python:3.9-slim AS build-base
# ===================================================
EXPOSE 8000/tcp# ===================================
FROM build-base AS poetry
# ===================================
# <...redacted poetry installation ...>
# Note that you do not need to use Poetry for this to work, one could
# just as well use `pip` for installing Python dependencies here. The process
# would be almost the same.
# ===================================
FROM build-base AS base
# ===================================
COPY --from=poetry --chown=appuser:appuser /app/requirements.txt /app/requirements.txt
COPY --from=poetry --chown=appuser:appuser /app/requirements-dev.txt /app/requirements-dev.txt
# ==============================
FROM base AS development
# ==============================
# <...redacted development stage ...># ==============================
FROM base AS production
# ==============================
# Install production dependencies
RUN apt-install.sh build-essential libpq-dev libssl-dev \\
&& pip install --no-cache-dir -r requirements.txt \\
&& apt-cleanup.sh build-essential
# Copy code to image
COPY --chown=appuser:appuser . /app
# Set app user
USER appuser
# Set up start command
ENTRYPOINT ["./docker-entrypoint.sh"]
#!/bin/bashset -e# Check if the database is available
if [ -z "$SKIP_DATABASE_CHECK" -o "$SKIP_DATABASE_CHECK" = "0" ]; then
wait-for-it.sh "${DATABASE_HOST}:${DATABASE_PORT-5432}"
fi
# Check if the broker is available
if [ -z "$SKIP_BROKER_CHECK" -o "$SKIP_BROKER_CHECK" = "0" ]; then
wait-for-it.sh -t 20 "${BROKER_HOST}:${BROKER_PORT-5672}"
fi
# Start server
if [[ ! -z "$@" ]]; then
echo "Command is $@"
"$@"
elif [[ "$DEV_SERVER" = "1" ]]; then
uvicorn scores.main:app --host 0.0.0.0 --reload
else
gunicorn scores.main:app --bind 0.0.0.0:8000 -k uvicorn.workers.UvicornWorker
fi

CI / CD and Kólga

GitLab and GitHub

Setting up the base

# .gitlab-ci.yml
include:
- remote: '<https://raw.githubusercontent.com/><YOUR REPO CONTAINING KOLGA>/v3/.gitlab-ci-base-template.yml'

Deployment stage

Application deployments

# poster/.gitlab-ci.ymlbuild-poster:
extends: .build
variables:
DOCKER_BUILD_CONTEXT: poster
DOCKER_BUILD_SOURCE: poster/Dockerfile
DOCKER_IMAGE_NAME: poster
review-poster:
extends: .review-no-env
environment:
name: qa/r/${CI_COMMIT_REF_SLUG}
variables:
DOCKER_IMAGE_NAME: poster
DOCKER_BUILD_SOURCE: poster/Dockerfile
PROJECT_NAME: poster
K8S_INGRESS_DISABLED: 1
# reporter/.gitlab-ci.ymlbuild-reporter:
extends: .build
variables:
DOCKER_BUILD_CONTEXT: reporter
DOCKER_BUILD_SOURCE: reporter/Dockerfile
DOCKER_IMAGE_NAME: reporter
review-reporter:
extends: .review
environment:
url: https://$CI_PROJECT_PATH_SLUG-$CI_ENVIRONMENT_SLUG-reporter.$KUBE_INGRESS_BASE_DOMAIN
variables:
DOCKER_IMAGE_NAME: reporter
DOCKER_BUILD_SOURCE: reporter/Dockerfile
PROJECT_NAME: reporter
K8S_SECRET_SCORES_API: https://$CI_PROJECT_PATH_SLUG-$CI_ENVIRONMENT_SLUG-scores.$KUBE_INGRESS_BASE_DOMAIN
K8S_SECRET_REPORTER_URL: https://$CI_PROJECT_PATH_SLUG-$CI_ENVIRONMENT_SLUG-reporter.$KUBE_INGRESS_BASE_DOMAIN
# scores/.gitlab-ci.ymlbuild-scores:
extends: .build
variables:
DOCKER_BUILD_CONTEXT: scores
DOCKER_BUILD_SOURCE: scores/Dockerfile
DOCKER_IMAGE_NAME: scores
review-scores:
extends: .review
environment:
url: https://$CI_PROJECT_PATH_SLUG-$CI_ENVIRONMENT_SLUG-scores.$KUBE_INGRESS_BASE_DOMAIN
variables:
DOCKER_IMAGE_NAME: scores
DOCKER_BUILD_SOURCE: scores/Dockerfile
PROJECT_NAME: scores
include:
- remote: '<https://raw.githubusercontent.com/><YOUR REPO CONTAINING KOLGA>/v3/.gitlab-ci-base-template.yml'
- local: poster/.gitlab-ci.yml
- local: reporter/.gitlab-ci.yml
- local: scores/.gitlab-ci.yml
cleanup_review:
extends: .cleanup_review
stop_review:
extends: .stop_review

Dependency applications

include:
- remote: '<https://raw.githubusercontent.com/><YOUR REPO CONTAINING KOLGA>/v3/.gitlab-ci-base-template.yml'
- local: poster/.gitlab-ci.yml
- local: reporter/.gitlab-ci.yml
- local: scores/.gitlab-ci.yml
service-postgres:
extends: .review-service
variables:
POSTGRES_IMAGE: "docker.io/bitnami/postgresql:12.5.0"
script:
- devops deploy_service --track review --service postgresql --env-var DATABASE_URL --projects poster reporter scores
service-rabbitmq:
extends: .review-service
script:
- devops deploy_service --track review --service rabbitmq --env-var BROKER_URL --projects poster reporter scores
cleanup_review:
extends: .cleanup_review
stop_review:
extends: .stop_review

Staging and Production

staging:
extends: .staging
environment:
url: http://poster.$KUBE_INGRESS_BASE_DOMAIN
variables:
DOCKER_BUILD_CONTEXT: reporter
DOCKER_BUILD_SOURCE: reporter/Dockerfile
DOCKER_IMAGE_NAME: reporter
production:
extends: .production
environment:
url: http://poster.$KUBE_INGRESS_BASE_DOMAIN
variables:
DOCKER_BUILD_CONTEXT: reporter
DOCKER_BUILD_SOURCE: reporter/Dockerfile
DOCKER_IMAGE_NAME: reporter

Thats it

Closing notes

--

--

Get the Medium app

A button that says 'Download on the App Store', and if clicked it will lead you to the iOS App store
A button that says 'Get it on, Google Play', and if clicked it will lead you to the Google Play store