init commit
This commit is contained in:
68
.gitea/workflows/build-and-publish.yaml
Normal file
68
.gitea/workflows/build-and-publish.yaml
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
name: Build and Publish
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
env:
|
||||||
|
CHART_NAME: ${{ github.event.repository.name }}
|
||||||
|
IMAGE_NAME: ${{ github.event.repository.name }}
|
||||||
|
jobs:
|
||||||
|
build-release:
|
||||||
|
runs-on: nix
|
||||||
|
permissions:
|
||||||
|
id-token: write
|
||||||
|
contents: read
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Build Docker Image via Nix Flake
|
||||||
|
run: |
|
||||||
|
nix build .#dockerImage --print-build-logs
|
||||||
|
docker load < result
|
||||||
|
|
||||||
|
- name: Log in to Gitea Container Registry
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ${{ github.server_url }}
|
||||||
|
username: ${{ secrets.CI_USER }}
|
||||||
|
password: ${{ secrets.CI_PASSWORD }}
|
||||||
|
|
||||||
|
- name: Tag and Push Docker Image
|
||||||
|
run: |
|
||||||
|
VERSION=${{ github.run_number }}
|
||||||
|
|
||||||
|
# Strip https from server URL
|
||||||
|
REGISTRY=${GITHUB_SERVER_URL#https://}
|
||||||
|
|
||||||
|
TARGET_IMAGE=$REGISTRY/${{ github.repository_owner }}/${{ env.IMAGE_NAME }}
|
||||||
|
|
||||||
|
# Auto-detect the built image name (better version)
|
||||||
|
SOURCE_IMAGE=$(docker load < result | awk '{print $3}')
|
||||||
|
|
||||||
|
docker tag $SOURCE_IMAGE $TARGET_IMAGE:$VERSION
|
||||||
|
docker tag $SOURCE_IMAGE $TARGET_IMAGE:latest
|
||||||
|
docker push $TARGET_IMAGE:$VERSION
|
||||||
|
docker push $TARGET_IMAGE:latest
|
||||||
|
|
||||||
|
- name: Setup Helm
|
||||||
|
uses: azure/setup-helm@v4
|
||||||
|
with:
|
||||||
|
version: v3.14.0
|
||||||
|
|
||||||
|
- name: Package Helm Chart
|
||||||
|
run: |
|
||||||
|
VERSION=${{ github.run_number }}
|
||||||
|
helm repo add bjw-s https://bjw-s-labs.github.io/helm-charts
|
||||||
|
helm dependency build ops/chart
|
||||||
|
helm package ops/chart --version $VERSION --app-version $VERSION
|
||||||
|
|
||||||
|
- name: Push Helm Chart to Gitea Registry
|
||||||
|
run: |
|
||||||
|
VERSION=${{ github.run_number }}
|
||||||
|
CHART_FILE=${{ env.CHART_NAME }}-${VERSION}.tgz
|
||||||
|
|
||||||
|
curl -f --user "${{ secrets.CI_USER }}:${{ secrets.CI_PASSWORD }}" \
|
||||||
|
-X POST \
|
||||||
|
--upload-file ./$CHART_FILE \
|
||||||
|
"${{ github.server_url }}/api/packages/${{ github.repository_owner }}/helm/api/charts"
|
||||||
46
config/config.exs
Normal file
46
config/config.exs
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
import Config
|
||||||
|
|
||||||
|
config :workload_service,
|
||||||
|
ecto_repos: [WorkloadService.Repo],
|
||||||
|
event_stores: [WorkloadService.EventStore]
|
||||||
|
|
||||||
|
config :workload_service, WorkloadServiceWeb.Endpoint,
|
||||||
|
url: [host: "localhost"],
|
||||||
|
adapter: Bandit.PhoenixAdapter,
|
||||||
|
render_errors: [
|
||||||
|
formats: [json: WorkloadServiceWeb.ErrorJSON],
|
||||||
|
layout: false
|
||||||
|
],
|
||||||
|
pubsub_server: WorkloadService.PubSub,
|
||||||
|
live_view: [signing_salt: "workload_secret"]
|
||||||
|
|
||||||
|
config :logger, :console,
|
||||||
|
format: "$time $metadata[$level] $message\n",
|
||||||
|
metadata: [:request_id]
|
||||||
|
|
||||||
|
config :phoenix, :json_library, Jason
|
||||||
|
|
||||||
|
config :workload_service, WorkloadService.CommandedApp,
|
||||||
|
event_store: [
|
||||||
|
adapter: Commanded.EventStore.Adapters.EventStore,
|
||||||
|
event_store: WorkloadService.EventStore
|
||||||
|
],
|
||||||
|
pub_sub: :local,
|
||||||
|
registry: :local
|
||||||
|
|
||||||
|
config :commanded,
|
||||||
|
event_store_adapter: Commanded.EventStore.Adapters.EventStore
|
||||||
|
|
||||||
|
config :commanded_ecto_projections,
|
||||||
|
repo: WorkloadService.Repo
|
||||||
|
|
||||||
|
config :flop, repo: WorkloadService.Repo
|
||||||
|
|
||||||
|
config :workload_service, WorkloadService.Repo,
|
||||||
|
database: "workload_service_dev",
|
||||||
|
username: "postgres",
|
||||||
|
password: "postgres",
|
||||||
|
host: "localhost",
|
||||||
|
pool_size: 10
|
||||||
|
|
||||||
|
import_config "#{config_env()}.exs"
|
||||||
42
config/dev.exs
Normal file
42
config/dev.exs
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
import Config
|
||||||
|
|
||||||
|
config :workload_service, :amqp_url, "amqp://guest:guest@localhost:5672"
|
||||||
|
|
||||||
|
config :workload_service, WorkloadService.EventStore,
|
||||||
|
serializer: Commanded.Serialization.JsonSerializer,
|
||||||
|
username: "postgres",
|
||||||
|
password: "postgres",
|
||||||
|
database: "workload_service_eventstore_dev",
|
||||||
|
hostname: "localhost",
|
||||||
|
pool_size: 10
|
||||||
|
|
||||||
|
config :workload_service, WorkloadService.Repo,
|
||||||
|
username: "postgres",
|
||||||
|
password: "postgres",
|
||||||
|
hostname: "localhost",
|
||||||
|
database: "workload_service_dev",
|
||||||
|
stacktrace: true,
|
||||||
|
show_sensitive_data_on_connection_error: true,
|
||||||
|
pool_size: 10
|
||||||
|
|
||||||
|
config :workload_service, WorkloadServiceWeb.Endpoint,
|
||||||
|
http: [ip: {127, 0, 0, 1}],
|
||||||
|
check_origin: false,
|
||||||
|
code_reloader: true,
|
||||||
|
debug_errors: true,
|
||||||
|
secret_key_base: "rSPVNB6DCC2RMMlmk9QkCVGAzasUD6AWh5ussctvNuUxgZL9DRnFXTo6jcIz6JpB",
|
||||||
|
watchers: []
|
||||||
|
|
||||||
|
config :workload_service, dev_routes: true
|
||||||
|
|
||||||
|
config :logger, :default_formatter, format: "[$level] $message\n"
|
||||||
|
|
||||||
|
config :phoenix, :stacktrace_depth, 20
|
||||||
|
|
||||||
|
config :phoenix, :plug_init_mode, :runtime
|
||||||
|
|
||||||
|
config :open_api_spex, :cache_adapter, OpenApiSpex.Plug.NoneCache
|
||||||
|
|
||||||
|
config :workload_service,
|
||||||
|
provider_service_url: "http://localhost:4002",
|
||||||
|
solicitation_service_url: "http://localhost:8081"
|
||||||
6
config/prod.exs
Normal file
6
config/prod.exs
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
import Config
|
||||||
|
|
||||||
|
config :logger, level: :debug
|
||||||
|
|
||||||
|
# Runtime production configuration, including reading
|
||||||
|
# of environment variables, is done on config/runtime.exs.
|
||||||
94
config/runtime.exs
Normal file
94
config/runtime.exs
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
import Config
|
||||||
|
|
||||||
|
# config/runtime.exs is executed for all environments, including
|
||||||
|
# during releases. It is executed after compilation and before the
|
||||||
|
# system starts, so it is typically used to load production configuration
|
||||||
|
# and secrets from environment variables or elsewhere. Do not define
|
||||||
|
# any compile-time configuration in here, as it won't be applied.
|
||||||
|
# The block below contains prod specific runtime configuration.
|
||||||
|
|
||||||
|
logger_level =
|
||||||
|
case System.get_env("LOG_LEVEL", "info") do
|
||||||
|
"debug" -> :debug
|
||||||
|
"info" -> :info
|
||||||
|
"warn" -> :warning
|
||||||
|
"error" -> :error
|
||||||
|
val when val in ["warning", "error"] -> :error
|
||||||
|
_ -> :info
|
||||||
|
end
|
||||||
|
|
||||||
|
config :logger, level: logger_level
|
||||||
|
|
||||||
|
config :logger, :console, format: {Logger.Formatter, :format}
|
||||||
|
|
||||||
|
rabbitmq_host = System.get_env("RABBITMQ_HOST", "localhost")
|
||||||
|
rabbitmq_vhost = System.get_env("RABBITMQ_VHOST", "/")
|
||||||
|
rabbitmq_username = System.get_env("RABBITMQ_USERNAME")
|
||||||
|
rabbitmq_password = System.get_env("RABBITMQ_PASSWORD")
|
||||||
|
|
||||||
|
amqp_url =
|
||||||
|
if rabbitmq_username && rabbitmq_password do
|
||||||
|
"amqp://#{rabbitmq_username}:#{rabbitmq_password}@#{rabbitmq_host}/#{rabbitmq_vhost}"
|
||||||
|
end
|
||||||
|
|
||||||
|
if amqp_url do
|
||||||
|
config :workload_service, :amqp_url, amqp_url
|
||||||
|
end
|
||||||
|
|
||||||
|
# ## Using releases
|
||||||
|
#
|
||||||
|
# If you use `mix release`, you need to explicitly enable the server
|
||||||
|
# by passing the PHX_SERVER=true when you start it:
|
||||||
|
#
|
||||||
|
# PHX_SERVER=true bin/workload_service start
|
||||||
|
#
|
||||||
|
# Alternatively, you can use `mix phx.gen.release` to generate a `bin/server`
|
||||||
|
# script that automatically sets the env var above.
|
||||||
|
if System.get_env("PHX_SERVER") do
|
||||||
|
config :workload_service, WorkloadServiceWeb.Endpoint, server: true
|
||||||
|
end
|
||||||
|
|
||||||
|
if cookie = System.get_env("RELEASE_COOKIE") do
|
||||||
|
config :elixir, :cookie, cookie
|
||||||
|
end
|
||||||
|
|
||||||
|
if config_env() == :prod do
|
||||||
|
database_url =
|
||||||
|
System.get_env("DATABASE_URL") ||
|
||||||
|
raise """
|
||||||
|
environment variable DATABASE_URL is missing.
|
||||||
|
For example: ecto://USER:PASS@HOST/DATABASE
|
||||||
|
"""
|
||||||
|
|
||||||
|
maybe_ipv6 = if System.get_env("ECTO_IPV6") in ~w(true 1), do: [:inet6], else: []
|
||||||
|
|
||||||
|
config :workload_service, WorkloadService.Repo,
|
||||||
|
url: database_url,
|
||||||
|
pool_size: String.to_integer(System.get_env("DATABASE_POOL_SIZE") || "1"),
|
||||||
|
socket_options: maybe_ipv6
|
||||||
|
|
||||||
|
config :workload_service, WorkloadService.EventStore,
|
||||||
|
serializer: Commanded.Serialization.JsonSerializer,
|
||||||
|
url: database_url,
|
||||||
|
schema: "eventstore",
|
||||||
|
pool_size: String.to_integer(System.get_env("EVENTSTORE_POOL_SIZE") || "1")
|
||||||
|
|
||||||
|
secret_key_base =
|
||||||
|
System.get_env("SECRET_KEY_BASE") ||
|
||||||
|
raise """
|
||||||
|
environment variable SECRET_KEY_BASE is missing.
|
||||||
|
You can generate one by calling: mix phx.gen.secret
|
||||||
|
"""
|
||||||
|
|
||||||
|
host = System.get_env("PHX_HOST") || "example.com"
|
||||||
|
|
||||||
|
config :workload_service, :dns_cluster_query, System.get_env("DNS_CLUSTER_QUERY")
|
||||||
|
|
||||||
|
config :workload_service, WorkloadServiceWeb.Endpoint,
|
||||||
|
url: [host: host, port: String.to_integer(System.get_env("PORT", "4000")), scheme: "http"],
|
||||||
|
http: [
|
||||||
|
ip: {0, 0, 0, 0, 0, 0, 0, 0},
|
||||||
|
port: String.to_integer(System.get_env("PORT", "4000"))
|
||||||
|
],
|
||||||
|
secret_key_base: secret_key_base
|
||||||
|
end
|
||||||
24
config/test.exs
Normal file
24
config/test.exs
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
import Config
|
||||||
|
|
||||||
|
config :workload_service, WorkloadService.Repo,
|
||||||
|
username: "postgres",
|
||||||
|
password: "postgres",
|
||||||
|
hostname: "localhost",
|
||||||
|
database: "workload_service_test#{System.get_env("MIX_TEST_PARTITION")}",
|
||||||
|
pool: Ecto.Adapters.SQL.Sandbox,
|
||||||
|
pool_size: System.schedulers_online() * 2
|
||||||
|
|
||||||
|
config :workload_service, WorkloadServiceWeb.Endpoint,
|
||||||
|
http: [ip: {127, 0, 0, 1}, port: 4002],
|
||||||
|
secret_key_base: "workload_test_secret_key_base_at_least_64_bytes_long_for_testing",
|
||||||
|
server: false
|
||||||
|
|
||||||
|
config :logger, level: :warning
|
||||||
|
|
||||||
|
config :phoenix, :plug_init_mode, :runtime
|
||||||
|
|
||||||
|
config :workload_service, WorkloadService.Application,
|
||||||
|
event_store: [
|
||||||
|
adapter: Commanded.EventStore.Adapters.InMemory,
|
||||||
|
serializer: Commanded.Serialization.JsonSerializer
|
||||||
|
]
|
||||||
61
flake.lock
generated
Normal file
61
flake.lock
generated
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
{
|
||||||
|
"nodes": {
|
||||||
|
"flake-utils": {
|
||||||
|
"inputs": {
|
||||||
|
"systems": "systems"
|
||||||
|
},
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1731533236,
|
||||||
|
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
|
||||||
|
"owner": "numtide",
|
||||||
|
"repo": "flake-utils",
|
||||||
|
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "numtide",
|
||||||
|
"repo": "flake-utils",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nixpkgs": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1775710090,
|
||||||
|
"narHash": "sha256-ar3rofg+awPB8QXDaFJhJ2jJhu+KqN/PRCXeyuXR76E=",
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "4c1018dae018162ec878d42fec712642d214fdfa",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "NixOS",
|
||||||
|
"ref": "nixos-unstable",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
"inputs": {
|
||||||
|
"flake-utils": "flake-utils",
|
||||||
|
"nixpkgs": "nixpkgs"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"systems": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1681028828,
|
||||||
|
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||||
|
"owner": "nix-systems",
|
||||||
|
"repo": "default",
|
||||||
|
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"owner": "nix-systems",
|
||||||
|
"repo": "default",
|
||||||
|
"type": "github"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": "root",
|
||||||
|
"version": 7
|
||||||
|
}
|
||||||
55
flake.nix
Normal file
55
flake.nix
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
{
|
||||||
|
description = "Workload Service - Elixir/Commanded CQRS/ES service";
|
||||||
|
|
||||||
|
inputs = {
|
||||||
|
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
|
||||||
|
flake-utils.url = "github:numtide/flake-utils";
|
||||||
|
};
|
||||||
|
|
||||||
|
outputs = {
|
||||||
|
self,
|
||||||
|
nixpkgs,
|
||||||
|
flake-utils,
|
||||||
|
}:
|
||||||
|
flake-utils.lib.eachDefaultSystem (
|
||||||
|
system:
|
||||||
|
let
|
||||||
|
pkgs = nixpkgs.legacyPackages.${system};
|
||||||
|
beamPackages = pkgs.beamPackages;
|
||||||
|
pname = "workload_service";
|
||||||
|
version = "1.0.0";
|
||||||
|
mixFodDeps = beamPackages.fetchMixDeps {
|
||||||
|
inherit pname version;
|
||||||
|
src = ./.;
|
||||||
|
sha256 = "sha256-YkX+Bti+GyU6RQOjVTwsPIWKuqJox0lz+qK2V8Zg1XI=";
|
||||||
|
};
|
||||||
|
package = beamPackages.mixRelease {
|
||||||
|
inherit pname version mixFodDeps;
|
||||||
|
src = ./.;
|
||||||
|
meta = {
|
||||||
|
mainProgram = "workload_service";
|
||||||
|
};
|
||||||
|
removeCookie = false;
|
||||||
|
};
|
||||||
|
dockerImage = pkgs.dockerTools.buildLayeredImage {
|
||||||
|
name = "workload_service";
|
||||||
|
contents = [ package pkgs.bashInteractive pkgs.busybox pkgs.shadow ];
|
||||||
|
config = {
|
||||||
|
Cmd = [ "${package}/bin/workload_service" "start" ];
|
||||||
|
};
|
||||||
|
};
|
||||||
|
in
|
||||||
|
{
|
||||||
|
packages.default = package;
|
||||||
|
packages.dockerImage = dockerImage;
|
||||||
|
devShells.default = pkgs.mkShell {
|
||||||
|
buildInputs = with pkgs; [
|
||||||
|
elixir
|
||||||
|
elixir-ls
|
||||||
|
kubernetes-helm
|
||||||
|
git
|
||||||
|
];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
9
lib/workload_service/aggregates/quote_task.ex
Normal file
9
lib/workload_service/aggregates/quote_task.ex
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
defmodule WorkloadService.Aggregates.QuoteTask do
|
||||||
|
use WorkloadService.Aggregates.Task,
|
||||||
|
task_type: "quote",
|
||||||
|
commands: WorkloadService.Commands.QuoteTask
|
||||||
|
|
||||||
|
def validate_submission(_) do
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
end
|
||||||
9
lib/workload_service/aggregates/solicitation_task.ex
Normal file
9
lib/workload_service/aggregates/solicitation_task.ex
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
defmodule WorkloadService.Aggregates.SolicitationTask do
|
||||||
|
use WorkloadService.Aggregates.Task,
|
||||||
|
task_type: "solicitation",
|
||||||
|
commands: WorkloadService.Commands.SolicitationTask
|
||||||
|
|
||||||
|
def validate_submission(_) do
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
end
|
||||||
145
lib/workload_service/aggregates/task.ex
Normal file
145
lib/workload_service/aggregates/task.ex
Normal file
@@ -0,0 +1,145 @@
|
|||||||
|
defmodule WorkloadService.Aggregates.Task do
|
||||||
|
@moduledoc """
|
||||||
|
Behaviour and __using__ macro for task aggregates.
|
||||||
|
Each task type can override validation and add specific behavior.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
defmodule WorkloadService.Aggregates.QuoteTask do
|
||||||
|
use WorkloadService.Aggregates.Task,
|
||||||
|
task_type: "quote"
|
||||||
|
end
|
||||||
|
"""
|
||||||
|
|
||||||
|
@callback validate_submission(map()) :: :ok | {:error, term()}
|
||||||
|
|
||||||
|
defmacro __using__(opts) do
|
||||||
|
task_type = Keyword.fetch!(opts, :task_type)
|
||||||
|
commands_module = Keyword.get(opts, :commands, WorkloadService.Commands.Task)
|
||||||
|
|
||||||
|
quote do
|
||||||
|
@behaviour Commanded.Aggregates.Aggregate
|
||||||
|
@task_type unquote(task_type)
|
||||||
|
|
||||||
|
alias unquote(commands_module).CreateTask
|
||||||
|
alias unquote(commands_module).SubmitResponse
|
||||||
|
alias unquote(commands_module).ApproveSubmission
|
||||||
|
alias unquote(commands_module).CompleteTask
|
||||||
|
|
||||||
|
alias Commanded.Aggregates.Aggregate
|
||||||
|
|
||||||
|
defstruct [
|
||||||
|
:id,
|
||||||
|
:application_id,
|
||||||
|
:provider_id,
|
||||||
|
:provider_name,
|
||||||
|
:task_info,
|
||||||
|
:submission,
|
||||||
|
:attachments,
|
||||||
|
:status,
|
||||||
|
:version
|
||||||
|
]
|
||||||
|
|
||||||
|
@impl Aggregate
|
||||||
|
def execute(%__MODULE__{status: nil}, %CreateTask{} = cmd) do
|
||||||
|
%WorkloadService.Events.TaskCreated{
|
||||||
|
id: cmd.id,
|
||||||
|
application_id: cmd.application_id,
|
||||||
|
provider_id: cmd.provider_id,
|
||||||
|
provider_name: cmd.provider_name,
|
||||||
|
task_info: cmd.task_info || %{},
|
||||||
|
attachments: cmd.attachments || []
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl Aggregate
|
||||||
|
def execute(%__MODULE__{}, %CreateTask{}) do
|
||||||
|
{:error, :task_already_exists}
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl Aggregate
|
||||||
|
def execute(%__MODULE__{status: status}, %SubmitResponse{} = cmd)
|
||||||
|
when status in [nil, "created", "draft", "approved"] do
|
||||||
|
with :ok <- validate_submission(cmd.submission) do
|
||||||
|
new_status = if status == "approved", do: "draft", else: "draft"
|
||||||
|
|
||||||
|
%WorkloadService.Events.SubmissionUpdated{
|
||||||
|
id: cmd.id,
|
||||||
|
submission: cmd.submission,
|
||||||
|
attachments: cmd.attachments
|
||||||
|
}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl Aggregate
|
||||||
|
def execute(%__MODULE__{status: "draft"}, %ApproveSubmission{}) do
|
||||||
|
%WorkloadService.Events.SubmissionApproved{
|
||||||
|
id: nil
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl Aggregate
|
||||||
|
def execute(%__MODULE__{status: status}, %ApproveSubmission{}) do
|
||||||
|
{:error, {:invalid_state, "cannot approve in state: #{status}"}}
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl Aggregate
|
||||||
|
def execute(%__MODULE__{status: "approved"}, %CompleteTask{} = cmd) do
|
||||||
|
%WorkloadService.Events.TaskCompleted{
|
||||||
|
id: cmd.id,
|
||||||
|
completed_by: cmd.completed_by
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl Aggregate
|
||||||
|
def execute(%__MODULE__{status: status}, %CompleteTask{}) do
|
||||||
|
{:error, {:invalid_state, "cannot complete in state: #{status}"}}
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl Aggregate
|
||||||
|
def apply(%__MODULE__{} = agg, %WorkloadService.Events.TaskCreated{} = e) do
|
||||||
|
%{
|
||||||
|
agg
|
||||||
|
| id: e.id,
|
||||||
|
application_id: e.application_id,
|
||||||
|
provider_id: e.provider_id,
|
||||||
|
provider_name: e.provider_name,
|
||||||
|
task_info: e.task_info,
|
||||||
|
attachments: e.attachments,
|
||||||
|
status: "created",
|
||||||
|
version: agg.version + 1
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl Aggregate
|
||||||
|
def apply(%__MODULE__{} = agg, %WorkloadService.Events.SubmissionUpdated{} = e) do
|
||||||
|
%{
|
||||||
|
agg
|
||||||
|
| submission: e.submission,
|
||||||
|
attachments: e.attachments || [],
|
||||||
|
status: "draft",
|
||||||
|
version: agg.version + 1
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl Aggregate
|
||||||
|
def apply(%__MODULE__{} = agg, %WorkloadService.Events.SubmissionApproved{}) do
|
||||||
|
%{
|
||||||
|
agg
|
||||||
|
| status: "approved",
|
||||||
|
version: agg.version + 1
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl Aggregate
|
||||||
|
def apply(%__MODULE__{} = agg, %WorkloadService.Events.TaskCompleted{}) do
|
||||||
|
%{
|
||||||
|
agg
|
||||||
|
| status: "completed",
|
||||||
|
version: agg.version + 1
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
defoverridable execute: 2, apply: 2
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
48
lib/workload_service/aggregates/task_id.ex
Normal file
48
lib/workload_service/aggregates/task_id.ex
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
defmodule WorkloadService.Aggregates.TaskId do
|
||||||
|
@moduledoc """
|
||||||
|
Task identifier with org_id, type and task_id.
|
||||||
|
ID format: "org_id:type:task_id" (e.g., "test:quote:uuid")
|
||||||
|
"""
|
||||||
|
|
||||||
|
@derive Jason.Encoder
|
||||||
|
defstruct [:org_id, :type, :task_id]
|
||||||
|
|
||||||
|
def new(org_id, type, task_id) when type in ["quote", "solicitation"] do
|
||||||
|
%__MODULE__{
|
||||||
|
org_id: org_id,
|
||||||
|
type: type,
|
||||||
|
task_id: task_id
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
def parse(string) when is_binary(string) do
|
||||||
|
case String.split(string, ":", parts: 3) do
|
||||||
|
[org_id, type, task_id] when type in ["quote", "solicitation"] ->
|
||||||
|
{:ok, new(org_id, type, task_id)}
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
{:error, :invalid_task_id}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def parse!(string) do
|
||||||
|
case parse(string) do
|
||||||
|
{:ok, id} -> id
|
||||||
|
{:error, reason} -> raise ArgumentError, "invalid task id #{inspect(string)}: #{reason}"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defimpl String.Chars do
|
||||||
|
def to_string(%WorkloadService.Aggregates.TaskId{
|
||||||
|
org_id: org_id,
|
||||||
|
type: type,
|
||||||
|
task_id: task_id
|
||||||
|
}) do
|
||||||
|
"#{org_id}:#{type}:#{task_id}"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defimpl Commanded.Serialization.JsonDecoder do
|
||||||
|
def decode(id), do: id
|
||||||
|
end
|
||||||
|
end
|
||||||
29
lib/workload_service/application.ex
Normal file
29
lib/workload_service/application.ex
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
defmodule WorkloadService.Application do
|
||||||
|
@moduledoc false
|
||||||
|
|
||||||
|
use Application
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def start(_type, _args) do
|
||||||
|
children = [
|
||||||
|
WorkloadService.CommandedApp,
|
||||||
|
WorkloadService.Consumers.QuoteRequestedConsumer,
|
||||||
|
WorkloadService.Consumers.SolicitationRequestedConsumer,
|
||||||
|
WorkloadService.Projectors.TaskProjector,
|
||||||
|
WorkloadService.Repo,
|
||||||
|
WorkloadServiceWeb.Telemetry,
|
||||||
|
{DNSCluster, query: Application.get_env(:workload_service, :dns_cluster_query) || :ignore},
|
||||||
|
{Phoenix.PubSub, name: WorkloadService.PubSub},
|
||||||
|
WorkloadServiceWeb.Endpoint
|
||||||
|
]
|
||||||
|
|
||||||
|
opts = [strategy: :one_for_one, name: WorkloadService.Supervisor]
|
||||||
|
Supervisor.start_link(children, opts)
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def config_change(changed, _new, removed) do
|
||||||
|
WorkloadServiceWeb.Endpoint.config_change(changed, removed)
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
end
|
||||||
32
lib/workload_service/commanded_app.ex
Normal file
32
lib/workload_service/commanded_app.ex
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
defmodule WorkloadService.Router do
|
||||||
|
use Commanded.Commands.Router
|
||||||
|
|
||||||
|
dispatch(
|
||||||
|
[
|
||||||
|
WorkloadService.Commands.QuoteTask.CreateTask,
|
||||||
|
WorkloadService.Commands.QuoteTask.SubmitResponse,
|
||||||
|
WorkloadService.Commands.QuoteTask.ApproveSubmission,
|
||||||
|
WorkloadService.Commands.QuoteTask.CompleteTask
|
||||||
|
],
|
||||||
|
to: WorkloadService.Aggregates.QuoteTask,
|
||||||
|
identity: :id
|
||||||
|
)
|
||||||
|
|
||||||
|
dispatch(
|
||||||
|
[
|
||||||
|
WorkloadService.Commands.SolicitationTask.CreateTask,
|
||||||
|
WorkloadService.Commands.SolicitationTask.SubmitResponse,
|
||||||
|
WorkloadService.Commands.SolicitationTask.ApproveSubmission,
|
||||||
|
WorkloadService.Commands.SolicitationTask.CompleteTask
|
||||||
|
],
|
||||||
|
to: WorkloadService.Aggregates.SolicitationTask,
|
||||||
|
identity: :id
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
defmodule WorkloadService.CommandedApp do
|
||||||
|
use Commanded.Application,
|
||||||
|
otp_app: :workload_service
|
||||||
|
|
||||||
|
router(WorkloadService.Router)
|
||||||
|
end
|
||||||
53
lib/workload_service/commands/quote_task.ex
Normal file
53
lib/workload_service/commands/quote_task.ex
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
defmodule WorkloadService.Commands.QuoteTask do
|
||||||
|
@moduledoc """
|
||||||
|
Quote task commands.
|
||||||
|
"""
|
||||||
|
|
||||||
|
defmodule CreateTask do
|
||||||
|
@moduledoc """
|
||||||
|
Command to create a new quote task.
|
||||||
|
"""
|
||||||
|
@derive Jason.Encoder
|
||||||
|
defstruct [:id, :application_id, :provider_id, :provider_name, :task_info, :attachments]
|
||||||
|
|
||||||
|
def new(attrs) do
|
||||||
|
struct(__MODULE__, attrs)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defmodule SubmitResponse do
|
||||||
|
@moduledoc """
|
||||||
|
Command to submit response for a quote task.
|
||||||
|
"""
|
||||||
|
@derive Jason.Encoder
|
||||||
|
defstruct [:id, :submission, :attachments]
|
||||||
|
|
||||||
|
def new(attrs) do
|
||||||
|
struct(__MODULE__, attrs)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defmodule ApproveSubmission do
|
||||||
|
@moduledoc """
|
||||||
|
Command to approve submission for a quote task.
|
||||||
|
"""
|
||||||
|
@derive Jason.Encoder
|
||||||
|
defstruct [:id]
|
||||||
|
|
||||||
|
def new(attrs) do
|
||||||
|
struct(__MODULE__, attrs)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defmodule CompleteTask do
|
||||||
|
@moduledoc """
|
||||||
|
Command to complete a quote task.
|
||||||
|
"""
|
||||||
|
@derive Jason.Encoder
|
||||||
|
defstruct [:id, :completed_by]
|
||||||
|
|
||||||
|
def new(attrs) do
|
||||||
|
struct(__MODULE__, attrs)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
53
lib/workload_service/commands/solicitation_task.ex
Normal file
53
lib/workload_service/commands/solicitation_task.ex
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
defmodule WorkloadService.Commands.SolicitationTask do
|
||||||
|
@moduledoc """
|
||||||
|
Solicitation task commands.
|
||||||
|
"""
|
||||||
|
|
||||||
|
defmodule CreateTask do
|
||||||
|
@moduledoc """
|
||||||
|
Command to create a new solicitation task.
|
||||||
|
"""
|
||||||
|
@derive Jason.Encoder
|
||||||
|
defstruct [:id, :application_id, :provider_id, :provider_name, :task_info, :attachments]
|
||||||
|
|
||||||
|
def new(attrs) do
|
||||||
|
struct(__MODULE__, attrs)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defmodule SubmitResponse do
|
||||||
|
@moduledoc """
|
||||||
|
Command to submit response for a solicitation task.
|
||||||
|
"""
|
||||||
|
@derive Jason.Encoder
|
||||||
|
defstruct [:id, :submission, :attachments]
|
||||||
|
|
||||||
|
def new(attrs) do
|
||||||
|
struct(__MODULE__, attrs)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defmodule ApproveSubmission do
|
||||||
|
@moduledoc """
|
||||||
|
Command to approve submission for a solicitation task.
|
||||||
|
"""
|
||||||
|
@derive Jason.Encoder
|
||||||
|
defstruct [:id]
|
||||||
|
|
||||||
|
def new(attrs) do
|
||||||
|
struct(__MODULE__, attrs)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defmodule CompleteTask do
|
||||||
|
@moduledoc """
|
||||||
|
Command to complete a solicitation task.
|
||||||
|
"""
|
||||||
|
@derive Jason.Encoder
|
||||||
|
defstruct [:id, :completed_by]
|
||||||
|
|
||||||
|
def new(attrs) do
|
||||||
|
struct(__MODULE__, attrs)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
93
lib/workload_service/consumers/quote_requested_consumer.ex
Normal file
93
lib/workload_service/consumers/quote_requested_consumer.ex
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
defmodule WorkloadService.Consumers.QuoteRequestedConsumer do
|
||||||
|
use GenServer
|
||||||
|
require Logger
|
||||||
|
|
||||||
|
alias WorkloadService.CommandedApp
|
||||||
|
alias WorkloadService.Commands.QuoteTask
|
||||||
|
|
||||||
|
@exchange "workload_service.events.quote_requested"
|
||||||
|
@queue "workload_service.quote_requested"
|
||||||
|
@routing_key "quote.requested"
|
||||||
|
|
||||||
|
def start_link(opts \\ []) do
|
||||||
|
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
|
||||||
|
end
|
||||||
|
|
||||||
|
def init(_opts) do
|
||||||
|
{:ok, conn} = AMQP.Connection.open(amqp_url())
|
||||||
|
{:ok, channel} = AMQP.Channel.open(conn)
|
||||||
|
|
||||||
|
AMQP.Queue.declare(channel, @queue, durable: true)
|
||||||
|
AMQP.Queue.bind(channel, @queue, @exchange, routing_key: @routing_key)
|
||||||
|
{:ok, _tag} = AMQP.Basic.consume(channel, @queue)
|
||||||
|
|
||||||
|
Logger.info("QuoteRequestedConsumer started, listening on #{@queue}")
|
||||||
|
|
||||||
|
{:ok, %{channel: channel}}
|
||||||
|
end
|
||||||
|
|
||||||
|
def handle_info({:basic_consume_ok, _}, state), do: {:noreply, state}
|
||||||
|
def handle_info({:basic_cancel, _}, state), do: {:stop, :normal, state}
|
||||||
|
def handle_info({:basic_cancel_ok, _}, state), do: {:noreply, state}
|
||||||
|
|
||||||
|
def handle_info({:basic_deliver, payload, meta}, state) do
|
||||||
|
case process(payload) do
|
||||||
|
:ok ->
|
||||||
|
AMQP.Basic.ack(state.channel, meta.delivery_tag)
|
||||||
|
|
||||||
|
{:error, reason} ->
|
||||||
|
Logger.error("QuoteRequestedConsumer: failed to process: #{inspect(reason)}")
|
||||||
|
AMQP.Basic.reject(state.channel, meta.delivery_tag, requeue: false)
|
||||||
|
end
|
||||||
|
|
||||||
|
{:noreply, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp process(payload) do
|
||||||
|
with {:ok, event} <- Jason.decode(payload),
|
||||||
|
:ok <- handle_event(event) do
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp handle_event(%{
|
||||||
|
"application_id" => application_id,
|
||||||
|
"org_id" => org_id,
|
||||||
|
"provider_id" => provider_id,
|
||||||
|
"policy_type" => policy_type
|
||||||
|
} = event) do
|
||||||
|
task_id = WorkloadService.Aggregates.TaskId.new(org_id, "quote", Ecto.UUID.generate())
|
||||||
|
|
||||||
|
command = %QuoteTask.CreateTask{
|
||||||
|
id: task_id,
|
||||||
|
application_id: application_id,
|
||||||
|
provider_id: provider_id,
|
||||||
|
provider_name: Map.get(event, "provider_name", ""),
|
||||||
|
task_info: %{
|
||||||
|
"policy_type" => policy_type,
|
||||||
|
"provider_email" => Map.get(event, "provider_email"),
|
||||||
|
"applicant_info" => Map.get(event, "applicant_info", %{}),
|
||||||
|
"car_details" => Map.get(event, "car_details", %{}),
|
||||||
|
"building_details" => Map.get(event, "building_details", %{}),
|
||||||
|
"life_details" => Map.get(event, "life_details", %{})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
case CommandedApp.dispatch(command) do
|
||||||
|
:ok ->
|
||||||
|
Logger.info("QuoteRequestedConsumer: created task #{task_id}")
|
||||||
|
:ok
|
||||||
|
|
||||||
|
{:error, reason} ->
|
||||||
|
{:error, reason}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp handle_event(event) do
|
||||||
|
{:error, {:invalid_event, event}}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp amqp_url do
|
||||||
|
Application.get_env(:workload_service, :amqp_url, "amqp://guest:guest@localhost:5672")
|
||||||
|
end
|
||||||
|
end
|
||||||
@@ -0,0 +1,138 @@
|
|||||||
|
defmodule WorkloadService.Consumers.SolicitationRequestedConsumer do
|
||||||
|
use GenServer
|
||||||
|
require Logger
|
||||||
|
|
||||||
|
alias WorkloadService.CommandedApp
|
||||||
|
alias WorkloadService.Commands.SolicitationTask
|
||||||
|
|
||||||
|
@exchange "workload_service.events.solicitation_requested"
|
||||||
|
@queue "workload_service.solicitation_requested"
|
||||||
|
@routing_key "solicitation.requested"
|
||||||
|
|
||||||
|
@provider_service_url "http://localhost:4002"
|
||||||
|
@solicitation_service_url "http://localhost:8081"
|
||||||
|
|
||||||
|
def start_link(opts \\ []) do
|
||||||
|
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
|
||||||
|
end
|
||||||
|
|
||||||
|
def init(_opts) do
|
||||||
|
{:ok, conn} = AMQP.Connection.open(amqp_url())
|
||||||
|
{:ok, channel} = AMQP.Channel.open(conn)
|
||||||
|
|
||||||
|
AMQP.Queue.declare(channel, @queue, durable: true)
|
||||||
|
AMQP.Queue.bind(channel, @queue, @exchange, routing_key: @routing_key)
|
||||||
|
{:ok, _tag} = AMQP.Basic.consume(channel, @queue)
|
||||||
|
|
||||||
|
Logger.info("SolicitationRequestedConsumer started, listening on #{@queue}")
|
||||||
|
|
||||||
|
{:ok, %{channel: channel}}
|
||||||
|
end
|
||||||
|
|
||||||
|
def handle_info({:basic_consume_ok, _}, state), do: {:noreply, state}
|
||||||
|
def handle_info({:basic_cancel, _}, state), do: {:stop, :normal, state}
|
||||||
|
def handle_info({:basic_cancel_ok, _}, state), do: {:noreply, state}
|
||||||
|
|
||||||
|
def handle_info({:basic_deliver, payload, meta}, state) do
|
||||||
|
case process(payload) do
|
||||||
|
:ok ->
|
||||||
|
AMQP.Basic.ack(state.channel, meta.delivery_tag)
|
||||||
|
|
||||||
|
{:error, reason} ->
|
||||||
|
Logger.error("SolicitationRequestedConsumer: failed to process: #{inspect(reason)}")
|
||||||
|
AMQP.Basic.reject(state.channel, meta.delivery_tag, requeue: false)
|
||||||
|
end
|
||||||
|
|
||||||
|
{:noreply, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp process(payload) do
|
||||||
|
with {:ok, event} <- Jason.decode(payload),
|
||||||
|
{:ok, provider} <- get_provider(event["provider_id"]),
|
||||||
|
{:ok, template} <- get_active_template(provider, event["policy_type"]),
|
||||||
|
{:ok, result} <- generate_solicitation(event, template),
|
||||||
|
:ok <- create_task(event, result) do
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp get_provider(provider_id) do
|
||||||
|
url = "#{@provider_service_url}/api/v1/providers/#{provider_id}"
|
||||||
|
|
||||||
|
case Req.get(url) do
|
||||||
|
{:ok, %{status: 200, body: %{"data" => provider}}} -> {:ok, provider}
|
||||||
|
{:ok, %{status: 404}} -> {:error, :provider_not_found}
|
||||||
|
{:error, reason} -> {:error, reason}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp get_active_template(provider, policy_type) do
|
||||||
|
templates = get_in(provider, ["templates", policy_type]) || []
|
||||||
|
default_id = get_in(provider, ["default_templates", policy_type])
|
||||||
|
|
||||||
|
template =
|
||||||
|
if default_id do
|
||||||
|
Enum.find(templates, &(&1["template_id"] == default_id))
|
||||||
|
else
|
||||||
|
Enum.find(templates, &(&1["active"] == true))
|
||||||
|
end
|
||||||
|
|
||||||
|
case template do
|
||||||
|
nil -> {:error, :no_active_template}
|
||||||
|
t -> {:ok, t}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp generate_solicitation(event, template) do
|
||||||
|
url = "#{@solicitation_service_url}/api/solicitations/generate"
|
||||||
|
|
||||||
|
body = %{
|
||||||
|
org_id: event["org_id"],
|
||||||
|
id: event["id"],
|
||||||
|
template_document_url: template["document_url"],
|
||||||
|
fields: event["fields"] || %{}
|
||||||
|
}
|
||||||
|
|
||||||
|
case Req.post(url, json: body) do
|
||||||
|
{:ok, %{status: 200, body: result}} ->
|
||||||
|
{:ok, result}
|
||||||
|
|
||||||
|
{:ok, %{status: status, body: body}} ->
|
||||||
|
{:error, "solicitation_service returned #{status}: #{inspect(body)}"}
|
||||||
|
|
||||||
|
{:error, reason} ->
|
||||||
|
{:error, reason}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp create_task(event, result) do
|
||||||
|
org_id = event["org_id"]
|
||||||
|
task_id = WorkloadService.Aggregates.TaskId.new(org_id, "solicitation", Ecto.UUID.generate())
|
||||||
|
|
||||||
|
command = %SolicitationTask.CreateTask{
|
||||||
|
id: task_id,
|
||||||
|
application_id: event["id"],
|
||||||
|
provider_id: event["provider_id"],
|
||||||
|
provider_name: event["provider_name"] || "",
|
||||||
|
task_info: %{
|
||||||
|
"quote_id" => event["quote_id"],
|
||||||
|
"plan_id" => event["plan_id"],
|
||||||
|
"document_url" => result["document_url"]
|
||||||
|
},
|
||||||
|
attachments: [result["document_url"]] |> Enum.filter(& &1)
|
||||||
|
}
|
||||||
|
|
||||||
|
case CommandedApp.dispatch(command) do
|
||||||
|
:ok ->
|
||||||
|
Logger.info("SolicitationRequestedConsumer: created task #{task_id}")
|
||||||
|
:ok
|
||||||
|
|
||||||
|
{:error, reason} ->
|
||||||
|
{:error, reason}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp amqp_url do
|
||||||
|
Application.get_env(:workload_service, :amqp_url, "amqp://guest:guest@localhost:5672")
|
||||||
|
end
|
||||||
|
end
|
||||||
3
lib/workload_service/event_store.ex
Normal file
3
lib/workload_service/event_store.ex
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
defmodule WorkloadService.EventStore do
|
||||||
|
use EventStore, otp_app: :workload_service
|
||||||
|
end
|
||||||
47
lib/workload_service/events/task.ex
Normal file
47
lib/workload_service/events/task.ex
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
defmodule WorkloadService.Events do
|
||||||
|
@moduledoc """
|
||||||
|
All domain events for the workload service.
|
||||||
|
"""
|
||||||
|
|
||||||
|
defmodule TaskCreated do
|
||||||
|
@moduledoc """
|
||||||
|
Emitted when a new task is created (quote or solicitation).
|
||||||
|
ID format: "org_id:type:task_id" (e.g., "test:quote:uuid") - TaskId struct
|
||||||
|
"""
|
||||||
|
@derive Jason.Encoder
|
||||||
|
defstruct [:id, :application_id, :provider_id, :provider_name, :task_info, :attachments]
|
||||||
|
end
|
||||||
|
|
||||||
|
defmodule SubmissionUpdated do
|
||||||
|
@moduledoc """
|
||||||
|
Emitted when submission is updated (user provides response data).
|
||||||
|
"""
|
||||||
|
@derive Jason.Encoder
|
||||||
|
defstruct [:id, :submission, :attachments]
|
||||||
|
end
|
||||||
|
|
||||||
|
defmodule SubmissionApproved do
|
||||||
|
@moduledoc """
|
||||||
|
Emitted when submission is approved and ready to send.
|
||||||
|
"""
|
||||||
|
@derive Jason.Encoder
|
||||||
|
defstruct [:id]
|
||||||
|
end
|
||||||
|
|
||||||
|
defmodule TaskCompleted do
|
||||||
|
@moduledoc """
|
||||||
|
Emitted when task is completed and sent to policy-service.
|
||||||
|
Triggers RabbitMQ publish.
|
||||||
|
"""
|
||||||
|
@derive Jason.Encoder
|
||||||
|
defstruct [:id, :completed_by]
|
||||||
|
end
|
||||||
|
|
||||||
|
defmodule QuoteAccepted do
|
||||||
|
@moduledoc """
|
||||||
|
Emitted when a quote is accepted - triggers solicitation generation.
|
||||||
|
"""
|
||||||
|
@derive Jason.Encoder
|
||||||
|
defstruct [:id, :provider_id, :quote_id, :plan_id]
|
||||||
|
end
|
||||||
|
end
|
||||||
86
lib/workload_service/message_bus.ex
Normal file
86
lib/workload_service/message_bus.ex
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
defmodule WorkloadService.MessageBus do
|
||||||
|
@moduledoc false
|
||||||
|
|
||||||
|
use GenServer
|
||||||
|
require Logger
|
||||||
|
|
||||||
|
alias AMQP.{Connection, Channel, Exchange}
|
||||||
|
|
||||||
|
@reconnect_interval 5_000
|
||||||
|
|
||||||
|
def start_link(opts \\ []) do
|
||||||
|
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
|
||||||
|
end
|
||||||
|
|
||||||
|
def publish(routing_key, payload) when is_binary(payload) do
|
||||||
|
GenServer.cast(__MODULE__, {:publish, routing_key, payload})
|
||||||
|
end
|
||||||
|
|
||||||
|
def publish(routing_key, payload) do
|
||||||
|
GenServer.cast(__MODULE__, {:publish, routing_key, Jason.encode!(payload)})
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def init(_opts) do
|
||||||
|
send(self(), :connect)
|
||||||
|
{:ok, %{channel: nil, connection: nil}}
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def handle_info(:connect, _state) do
|
||||||
|
case connect() do
|
||||||
|
{:ok, connection, channel} ->
|
||||||
|
{:noreply, %{connection: connection, channel: channel}}
|
||||||
|
|
||||||
|
{:error, reason} ->
|
||||||
|
Logger.error("Failed to connect to RabbitMQ: #{inspect(reason)}")
|
||||||
|
schedule_reconnect()
|
||||||
|
{:noreply, %{channel: nil, connection: nil}}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def handle_info({:DOWN, _, :process, _pid, reason}, _state) do
|
||||||
|
Logger.error("RabbitMQ connection lost: #{inspect(reason)}")
|
||||||
|
schedule_reconnect()
|
||||||
|
{:noreply, %{channel: nil, connection: nil}}
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def handle_cast({:publish, _routing_key, _payload}, %{channel: nil} = state) do
|
||||||
|
Logger.warning("RabbitMQ not connected, message dropped")
|
||||||
|
{:noreply, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def handle_cast({:publish, routing_key, payload}, %{channel: channel} = state) do
|
||||||
|
Exchange.direct(channel, exchange_name(), durable: true)
|
||||||
|
|
||||||
|
AMQP.Basic.publish(channel, exchange_name(), routing_key, payload,
|
||||||
|
content_type: "application/json",
|
||||||
|
persistent: true
|
||||||
|
)
|
||||||
|
|
||||||
|
{:noreply, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp connect do
|
||||||
|
config = Application.get_env(:workload_service, WorkloadService.MessageBus, [])
|
||||||
|
|
||||||
|
with {:ok, connection} <- Connection.open(config),
|
||||||
|
{:ok, channel} <- Channel.open(connection) do
|
||||||
|
Exchange.declare(channel, exchange_name(), :direct, durable: true)
|
||||||
|
Process.monitor(connection.pid)
|
||||||
|
{:ok, connection, channel}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp exchange_name do
|
||||||
|
Application.get_env(:workload_service, WorkloadService.MessageBus, [])
|
||||||
|
|> Keyword.get(:exchange, "workload_service.events")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp schedule_reconnect do
|
||||||
|
Process.send_after(self(), :connect, @reconnect_interval)
|
||||||
|
end
|
||||||
|
end
|
||||||
61
lib/workload_service/projections/task.ex
Normal file
61
lib/workload_service/projections/task.ex
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
defmodule WorkloadService.Projections.Task do
|
||||||
|
@moduledoc """
|
||||||
|
Ecto schema for the task read model.
|
||||||
|
ID format: "org_id:type:task_id" (e.g., "test:quote:uuid")
|
||||||
|
"""
|
||||||
|
|
||||||
|
use Ecto.Schema
|
||||||
|
import Ecto.Changeset
|
||||||
|
|
||||||
|
@derive {Flop.Schema,
|
||||||
|
filterable: [:status, :org_id, :application_id, :provider_id],
|
||||||
|
sortable: [:inserted_at, :updated_at, :status]}
|
||||||
|
|
||||||
|
@primary_key {:id, :string, []}
|
||||||
|
schema "tasks" do
|
||||||
|
field(:org_id, :string)
|
||||||
|
field(:application_id, :string)
|
||||||
|
field(:provider_id, :string)
|
||||||
|
field(:provider_name, :string)
|
||||||
|
field(:task_info, :map)
|
||||||
|
field(:submission, :map)
|
||||||
|
field(:attachments, {:array, :string})
|
||||||
|
field(:status, :string)
|
||||||
|
field(:version, :integer, default: 1)
|
||||||
|
|
||||||
|
timestamps(type: :utc_datetime)
|
||||||
|
end
|
||||||
|
|
||||||
|
@statuses ["created", "draft", "approved", "completed"]
|
||||||
|
|
||||||
|
def status_changeset(task, attrs) do
|
||||||
|
task
|
||||||
|
|> cast(attrs, [:status, :task_info, :submission, :attachments, :updated_at])
|
||||||
|
|> validate_inclusion(:status, @statuses)
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_changeset(task, attrs) do
|
||||||
|
task
|
||||||
|
|> cast(attrs, [
|
||||||
|
:id,
|
||||||
|
:org_id,
|
||||||
|
:application_id,
|
||||||
|
:provider_id,
|
||||||
|
:provider_name,
|
||||||
|
:task_info,
|
||||||
|
:submission,
|
||||||
|
:attachments,
|
||||||
|
:status,
|
||||||
|
:version
|
||||||
|
])
|
||||||
|
|> validate_required([
|
||||||
|
:id,
|
||||||
|
:org_id,
|
||||||
|
:application_id,
|
||||||
|
:provider_id,
|
||||||
|
:provider_name,
|
||||||
|
:status
|
||||||
|
])
|
||||||
|
|> validate_inclusion(:status, @statuses)
|
||||||
|
end
|
||||||
|
end
|
||||||
61
lib/workload_service/projectors/task_projector.ex
Normal file
61
lib/workload_service/projectors/task_projector.ex
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
defmodule WorkloadService.Projectors.TaskProjector do
|
||||||
|
use Commanded.Projections.Ecto,
|
||||||
|
application: WorkloadService.CommandedApp,
|
||||||
|
repo: WorkloadService.Repo,
|
||||||
|
name: "TaskProjector"
|
||||||
|
|
||||||
|
alias WorkloadService.Aggregates.TaskId
|
||||||
|
alias WorkloadService.Events
|
||||||
|
alias WorkloadService.Projections.Task
|
||||||
|
import Ecto.Query
|
||||||
|
|
||||||
|
project(%Events.TaskCreated{} = e, _meta, fn multi ->
|
||||||
|
task_id = TaskId.parse!(to_string(e.id))
|
||||||
|
%{org_id: org_id} = task_id
|
||||||
|
|
||||||
|
Ecto.Multi.insert(multi, :task, %Task{
|
||||||
|
id: to_string(e.id),
|
||||||
|
org_id: org_id,
|
||||||
|
application_id: e.application_id,
|
||||||
|
provider_id: e.provider_id,
|
||||||
|
provider_name: e.provider_name,
|
||||||
|
task_info: e.task_info,
|
||||||
|
attachments: e.attachments || [],
|
||||||
|
status: "created"
|
||||||
|
})
|
||||||
|
end)
|
||||||
|
|
||||||
|
project(%Events.SubmissionUpdated{} = e, _meta, fn multi ->
|
||||||
|
multi
|
||||||
|
|> Ecto.Multi.run(:fetch, fn repo, _ ->
|
||||||
|
{:ok, repo.get(Task, to_string(e.id))}
|
||||||
|
end)
|
||||||
|
|> Ecto.Multi.update(:task, fn %{fetch: task} ->
|
||||||
|
Ecto.Changeset.change(task, %{
|
||||||
|
status: "draft",
|
||||||
|
submission: e.submission,
|
||||||
|
attachments: e.attachments || []
|
||||||
|
})
|
||||||
|
end)
|
||||||
|
end)
|
||||||
|
|
||||||
|
project(%Events.SubmissionApproved{} = e, _meta, fn multi ->
|
||||||
|
multi
|
||||||
|
|> Ecto.Multi.run(:fetch, fn repo, _ ->
|
||||||
|
{:ok, repo.get(Task, to_string(e.id))}
|
||||||
|
end)
|
||||||
|
|> Ecto.Multi.update(:task, fn %{fetch: task} ->
|
||||||
|
Ecto.Changeset.change(task, %{status: "approved"})
|
||||||
|
end)
|
||||||
|
end)
|
||||||
|
|
||||||
|
project(%Events.TaskCompleted{} = e, _meta, fn multi ->
|
||||||
|
multi
|
||||||
|
|> Ecto.Multi.run(:fetch, fn repo, _ ->
|
||||||
|
{:ok, repo.get(Task, to_string(e.id))}
|
||||||
|
end)
|
||||||
|
|> Ecto.Multi.update(:task, fn %{fetch: task} ->
|
||||||
|
Ecto.Changeset.change(task, %{status: "completed"})
|
||||||
|
end)
|
||||||
|
end)
|
||||||
|
end
|
||||||
37
lib/workload_service/release.ex
Normal file
37
lib/workload_service/release.ex
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
defmodule WorkloadService.Release do
|
||||||
|
@moduledoc """
|
||||||
|
Used for executing DB release tasks when run in production without Mix
|
||||||
|
installed.
|
||||||
|
"""
|
||||||
|
@app :workload_service
|
||||||
|
|
||||||
|
def migrate do
|
||||||
|
load_app()
|
||||||
|
init_event_store()
|
||||||
|
|
||||||
|
for repo <- repos() do
|
||||||
|
{:ok, _, _} = Ecto.Migrator.with_repo(repo, &Ecto.Migrator.run(&1, :up, all: true))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def rollback(repo, version) do
|
||||||
|
load_app()
|
||||||
|
{:ok, _, _} = Ecto.Migrator.with_repo(repo, &Ecto.Migrator.run(&1, :down, to: version))
|
||||||
|
end
|
||||||
|
|
||||||
|
defp repos do
|
||||||
|
Application.fetch_env!(@app, :ecto_repos)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp load_app do
|
||||||
|
Application.ensure_all_started(:ssl)
|
||||||
|
Application.ensure_all_started(:postgrex)
|
||||||
|
Application.ensure_loaded(@app)
|
||||||
|
end
|
||||||
|
|
||||||
|
def init_event_store do
|
||||||
|
config = WorkloadService.EventStore.config()
|
||||||
|
|
||||||
|
:ok = EventStore.Tasks.Init.exec(config, [])
|
||||||
|
end
|
||||||
|
end
|
||||||
7
lib/workload_service/repo.ex
Normal file
7
lib/workload_service/repo.ex
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
defmodule WorkloadService.Repo do
|
||||||
|
@moduledoc false
|
||||||
|
|
||||||
|
use Ecto.Repo,
|
||||||
|
otp_app: :workload_service,
|
||||||
|
adapter: Ecto.Adapters.Postgres
|
||||||
|
end
|
||||||
38
lib/workload_service/workload/queries.ex
Normal file
38
lib/workload_service/workload/queries.ex
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
defmodule WorkloadService.Workload.Queries do
|
||||||
|
@moduledoc """
|
||||||
|
Database query functions for tasks.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import Ecto.Query
|
||||||
|
alias WorkloadService.Projections.Task
|
||||||
|
alias WorkloadService.Repo
|
||||||
|
|
||||||
|
def list_tasks(params \\ %{}) do
|
||||||
|
base_query()
|
||||||
|
|> Flop.validate_and_run(params, for: Task)
|
||||||
|
end
|
||||||
|
|
||||||
|
def list_tasks_by_org(org_id, params \\ %{}) do
|
||||||
|
base_query()
|
||||||
|
|> where(org_id: ^org_id)
|
||||||
|
|> Flop.validate_and_run(params, for: Task)
|
||||||
|
end
|
||||||
|
|
||||||
|
def get_task_by_id(id) do
|
||||||
|
case Repo.get(Task, id) do
|
||||||
|
nil -> {:error, :not_found}
|
||||||
|
task -> {:ok, task}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def get_task_by_org_and_application(org_id, application_id) do
|
||||||
|
case Repo.get_by(Task, org_id: org_id, application_id: application_id) do
|
||||||
|
nil -> {:error, :not_found}
|
||||||
|
task -> {:ok, task}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp base_query do
|
||||||
|
from(t in Task, as: :task)
|
||||||
|
end
|
||||||
|
end
|
||||||
46
lib/workload_service_web.ex
Normal file
46
lib/workload_service_web.ex
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
defmodule WorkloadServiceWeb do
|
||||||
|
@moduledoc """
|
||||||
|
The entrypoint for defining your web interface, such as controllers, components, channels, and so on.
|
||||||
|
|
||||||
|
This can be used in your application as:
|
||||||
|
|
||||||
|
use WorkloadServiceWeb, :controller
|
||||||
|
use WorkloadServiceWeb, :html
|
||||||
|
|
||||||
|
The definitions below will be executed for every controller,
|
||||||
|
component, etc, so keep them short and clean, focused
|
||||||
|
on imports, uses and aliases.
|
||||||
|
|
||||||
|
Do NOT define functions inside the quoted expressions
|
||||||
|
below. Instead, define additional modules and import
|
||||||
|
those modules here.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def static_paths, do: ~w(assets fonts images favicon.ico robots.txt)
|
||||||
|
|
||||||
|
def router do
|
||||||
|
quote do
|
||||||
|
use Phoenix.Router, helpers: false
|
||||||
|
|
||||||
|
import Plug.Conn
|
||||||
|
import Phoenix.Controller
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def controller do
|
||||||
|
quote do
|
||||||
|
use Phoenix.Controller, formats: [:json]
|
||||||
|
|
||||||
|
import Plug.Conn
|
||||||
|
|
||||||
|
alias WorkloadServiceWeb.Router.Helpers, as: Routes
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
When used, dispatch to the appropriate controller/live_view/etc.
|
||||||
|
"""
|
||||||
|
defmacro __using__(which) when is_atom(which) do
|
||||||
|
apply(__MODULE__, which, [])
|
||||||
|
end
|
||||||
|
end
|
||||||
20
lib/workload_service_web/api_spec.ex
Normal file
20
lib/workload_service_web/api_spec.ex
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
defmodule WorkloadServiceWeb.ApiSpec do
|
||||||
|
alias OpenApiSpex.{Info, OpenApi, Server}
|
||||||
|
alias WorkloadServiceWeb.Endpoint
|
||||||
|
@behaviour OpenApi
|
||||||
|
|
||||||
|
@impl OpenApi
|
||||||
|
def spec do
|
||||||
|
%OpenApi{
|
||||||
|
servers: [
|
||||||
|
Server.from_endpoint(Endpoint)
|
||||||
|
],
|
||||||
|
info: %Info{
|
||||||
|
title: "Workload Service",
|
||||||
|
version: "1.0"
|
||||||
|
},
|
||||||
|
paths: OpenApiSpex.Paths.from_router(WorkloadServiceWeb.Router)
|
||||||
|
}
|
||||||
|
|> OpenApiSpex.resolve_schema_modules()
|
||||||
|
end
|
||||||
|
end
|
||||||
3
lib/workload_service_web/controllers.ex
Normal file
3
lib/workload_service_web/controllers.ex
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
defmodule WorkloadServiceWeb.Controllers do
|
||||||
|
@moduledoc false
|
||||||
|
end
|
||||||
28
lib/workload_service_web/controllers/fallback_controller.ex
Normal file
28
lib/workload_service_web/controllers/fallback_controller.ex
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
defmodule WorkloadServiceWeb.FallbackController do
|
||||||
|
use WorkloadServiceWeb, :controller
|
||||||
|
|
||||||
|
def call(conn, {:error, :not_found}) do
|
||||||
|
conn
|
||||||
|
|> put_status(:not_found)
|
||||||
|
|> put_view(json: WorkloadServiceWeb.ErrorJSON)
|
||||||
|
|> render(:"404")
|
||||||
|
end
|
||||||
|
|
||||||
|
def call(conn, {:error, reason}) when is_atom(reason) do
|
||||||
|
conn
|
||||||
|
|> put_status(:unprocessable_entity)
|
||||||
|
|> json(%{error: Atom.to_string(reason)})
|
||||||
|
end
|
||||||
|
|
||||||
|
def call(conn, {:error, %{errors: errors}}) do
|
||||||
|
conn
|
||||||
|
|> put_status(:unprocessable_entity)
|
||||||
|
|> json(%{errors: errors})
|
||||||
|
end
|
||||||
|
|
||||||
|
def call(conn, {:error, reason}) do
|
||||||
|
conn
|
||||||
|
|> put_status(:unprocessable_entity)
|
||||||
|
|> json(%{error: inspect(reason)})
|
||||||
|
end
|
||||||
|
end
|
||||||
15
lib/workload_service_web/controllers/health_controller.ex
Normal file
15
lib/workload_service_web/controllers/health_controller.ex
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
defmodule WorkloadServiceWeb.HealthController do
|
||||||
|
use WorkloadServiceWeb, :controller
|
||||||
|
|
||||||
|
def health(conn, _params) do
|
||||||
|
conn
|
||||||
|
|> put_status(:ok)
|
||||||
|
|> json(%{status: "ok"})
|
||||||
|
end
|
||||||
|
|
||||||
|
def ready(conn, _params) do
|
||||||
|
conn
|
||||||
|
|> put_status(:ok)
|
||||||
|
|> json(%{status: "ready"})
|
||||||
|
end
|
||||||
|
end
|
||||||
249
lib/workload_service_web/controllers/task_controller.ex
Normal file
249
lib/workload_service_web/controllers/task_controller.ex
Normal file
@@ -0,0 +1,249 @@
|
|||||||
|
defmodule WorkloadServiceWeb.TaskController do
|
||||||
|
use WorkloadServiceWeb, :controller
|
||||||
|
use OpenApiSpex.ControllerSpecs
|
||||||
|
|
||||||
|
alias WorkloadService.CommandedApp
|
||||||
|
alias WorkloadService.Workload.Queries
|
||||||
|
alias WorkloadServiceWeb.Schemas.Task, as: S
|
||||||
|
|
||||||
|
tags(["Tasks"])
|
||||||
|
|
||||||
|
operation(:list,
|
||||||
|
summary: "List tasks",
|
||||||
|
parameters: [
|
||||||
|
page: [in: :query, type: :integer, required: false, example: 1],
|
||||||
|
page_size: [in: :query, type: :integer, required: false, example: 20],
|
||||||
|
status: [in: :query, type: :string, required: false],
|
||||||
|
policy_type: [in: :query, type: :string, required: false],
|
||||||
|
org_id: [in: :query, type: :string, required: false],
|
||||||
|
application_id: [in: :query, type: :string, required: false],
|
||||||
|
provider_id: [in: :query, type: :string, required: false]
|
||||||
|
],
|
||||||
|
responses: [
|
||||||
|
ok: {"Task list", "application/json", S.TaskListResponse},
|
||||||
|
bad_request: {"Invalid params", "application/json", S.ErrorResponse}
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def list(conn, params) do
|
||||||
|
case Queries.list_tasks(params) do
|
||||||
|
{:ok, {tasks, meta}} ->
|
||||||
|
conn
|
||||||
|
|> put_status(:ok)
|
||||||
|
|> json(%{
|
||||||
|
data: Enum.map(tasks, &task_summary/1),
|
||||||
|
meta: meta_json(meta)
|
||||||
|
})
|
||||||
|
|
||||||
|
{:error, _} ->
|
||||||
|
conn |> put_status(:bad_request) |> json(%{error: "invalid parameters"})
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
operation(:show,
|
||||||
|
summary: "Get task by ID",
|
||||||
|
parameters: [
|
||||||
|
id: [in: :path, type: :string, required: true]
|
||||||
|
],
|
||||||
|
responses: [
|
||||||
|
ok: {"Task detail", "application/json", S.TaskDetailResponse},
|
||||||
|
not_found: {"Not found", "application/json", S.ErrorResponse}
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def show(conn, %{"id" => id}) do
|
||||||
|
case Queries.get_task_by_id(id) do
|
||||||
|
{:error, :not_found} ->
|
||||||
|
conn |> put_status(:not_found) |> json(%{error: "task not found"})
|
||||||
|
|
||||||
|
{:ok, task} ->
|
||||||
|
conn |> put_status(:ok) |> json(%{data: task_detail(task)})
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
operation(:respond_to_quote,
|
||||||
|
summary: "Record quote response",
|
||||||
|
parameters: [
|
||||||
|
id: [in: :path, type: :string, required: true]
|
||||||
|
],
|
||||||
|
request_body: {"Quote response", "application/json", S.QuoteResponseRequest, required: true},
|
||||||
|
responses: [
|
||||||
|
ok: {"Quote response recorded", "application/json", S.TaskDetailResponse},
|
||||||
|
not_found: {"Not found", "application/json", S.ErrorResponse},
|
||||||
|
unprocessable_entity: {"Error", "application/json", S.ErrorResponse}
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def respond_to_quote(conn, %{"id" => id} = params) do
|
||||||
|
case Queries.get_task_by_id(id) do
|
||||||
|
{:error, :not_found} ->
|
||||||
|
conn |> put_status(:not_found) |> json(%{error: "task not found"})
|
||||||
|
|
||||||
|
{:ok, %{status: "created"} = _task} ->
|
||||||
|
command = %WorkloadService.Commands.QuoteTask.SubmitResponse{
|
||||||
|
id: id,
|
||||||
|
submission: %{
|
||||||
|
"quote_id" => params["quote_id"],
|
||||||
|
"plans" => params["plans"],
|
||||||
|
"valid_until" => params["valid_until"],
|
||||||
|
"responded_by" => params["responded_by"],
|
||||||
|
"document_data" => params["document_data"]
|
||||||
|
},
|
||||||
|
attachments: [params["document_url"]] |> Enum.filter(& &1)
|
||||||
|
}
|
||||||
|
|
||||||
|
case CommandedApp.dispatch(command) do
|
||||||
|
:ok ->
|
||||||
|
{:ok, task} = Queries.get_task_by_id(id)
|
||||||
|
conn |> put_status(:ok) |> json(%{data: task_detail(task)})
|
||||||
|
|
||||||
|
{:error, reason} ->
|
||||||
|
conn |> put_status(:unprocessable_entity) |> json(%{error: inspect(reason)})
|
||||||
|
end
|
||||||
|
|
||||||
|
{:ok, %{status: "draft"} = _task} ->
|
||||||
|
command = %WorkloadService.Commands.QuoteTask.ApproveSubmission{
|
||||||
|
id: id
|
||||||
|
}
|
||||||
|
|
||||||
|
case CommandedApp.dispatch(command) do
|
||||||
|
:ok ->
|
||||||
|
{:ok, task} = Queries.get_task_by_id(id)
|
||||||
|
conn |> put_status(:ok) |> json(%{data: task_detail(task)})
|
||||||
|
|
||||||
|
{:error, reason} ->
|
||||||
|
conn |> put_status(:unprocessable_entity) |> json(%{error: inspect(reason)})
|
||||||
|
end
|
||||||
|
|
||||||
|
{:ok, %{status: "approved"} = _task} ->
|
||||||
|
command = %WorkloadService.Commands.QuoteTask.CompleteTask{
|
||||||
|
id: id,
|
||||||
|
completed_by: params["completed_by"] || "system"
|
||||||
|
}
|
||||||
|
|
||||||
|
case CommandedApp.dispatch(command) do
|
||||||
|
:ok ->
|
||||||
|
{:ok, task} = Queries.get_task_by_id(id)
|
||||||
|
conn |> put_status(:ok) |> json(%{data: task_detail(task)})
|
||||||
|
|
||||||
|
{:error, reason} ->
|
||||||
|
conn |> put_status(:unprocessable_entity) |> json(%{error: inspect(reason)})
|
||||||
|
end
|
||||||
|
|
||||||
|
{:ok, _task} ->
|
||||||
|
conn |> put_status(:unprocessable_entity) |> json(%{error: "invalid state"})
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
operation(:confirm_delivery,
|
||||||
|
summary: "Confirm solicitation delivery",
|
||||||
|
parameters: [
|
||||||
|
id: [in: :path, type: :string, required: true]
|
||||||
|
],
|
||||||
|
request_body:
|
||||||
|
{"Delivery confirmation", "application/json", S.ConfirmDeliveryRequest, required: false},
|
||||||
|
responses: [
|
||||||
|
ok: {"Delivery confirmed", "application/json", S.TaskDetailResponse},
|
||||||
|
not_found: {"Not found", "application/json", S.ErrorResponse},
|
||||||
|
unprocessable_entity: {"Error", "application/json", S.ErrorResponse}
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def confirm_delivery(conn, %{"id" => id} = params) do
|
||||||
|
case Queries.get_task_by_id(id) do
|
||||||
|
{:error, :not_found} ->
|
||||||
|
conn |> put_status(:not_found) |> json(%{error: "task not found"})
|
||||||
|
|
||||||
|
{:ok, %{status: "created"} = _task} ->
|
||||||
|
command = %WorkloadService.Commands.SolicitationTask.SubmitResponse{
|
||||||
|
id: id,
|
||||||
|
submission: %{
|
||||||
|
"delivery_confirmed_by" => params["delivery_confirmed_by"] || "system"
|
||||||
|
},
|
||||||
|
attachments: []
|
||||||
|
}
|
||||||
|
|
||||||
|
case CommandedApp.dispatch(command) do
|
||||||
|
:ok ->
|
||||||
|
{:ok, task} = Queries.get_task_by_id(id)
|
||||||
|
conn |> put_status(:ok) |> json(%{data: task_detail(task)})
|
||||||
|
|
||||||
|
{:error, reason} ->
|
||||||
|
conn |> put_status(:unprocessable_entity) |> json(%{error: inspect(reason)})
|
||||||
|
end
|
||||||
|
|
||||||
|
{:ok, %{status: "draft"} = _task} ->
|
||||||
|
command = %WorkloadService.Commands.SolicitationTask.ApproveSubmission{
|
||||||
|
id: id
|
||||||
|
}
|
||||||
|
|
||||||
|
case CommandedApp.dispatch(command) do
|
||||||
|
:ok ->
|
||||||
|
{:ok, task} = Queries.get_task_by_id(id)
|
||||||
|
conn |> put_status(:ok) |> json(%{data: task_detail(task)})
|
||||||
|
|
||||||
|
{:error, reason} ->
|
||||||
|
conn |> put_status(:unprocessable_entity) |> json(%{error: inspect(reason)})
|
||||||
|
end
|
||||||
|
|
||||||
|
{:ok, %{status: "approved"} = _task} ->
|
||||||
|
command = %WorkloadService.Commands.SolicitationTask.CompleteTask{
|
||||||
|
id: id,
|
||||||
|
completed_by: params["completed_by"] || "system"
|
||||||
|
}
|
||||||
|
|
||||||
|
case CommandedApp.dispatch(command) do
|
||||||
|
:ok ->
|
||||||
|
{:ok, task} = Queries.get_task_by_id(id)
|
||||||
|
conn |> put_status(:ok) |> json(%{data: task_detail(task)})
|
||||||
|
|
||||||
|
{:error, reason} ->
|
||||||
|
conn |> put_status(:unprocessable_entity) |> json(%{error: inspect(reason)})
|
||||||
|
end
|
||||||
|
|
||||||
|
{:ok, _task} ->
|
||||||
|
conn |> put_status(:unprocessable_entity) |> json(%{error: "invalid state"})
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp task_summary(t) do
|
||||||
|
%{
|
||||||
|
id: t.id,
|
||||||
|
org_id: t.org_id,
|
||||||
|
application_id: t.application_id,
|
||||||
|
provider_id: t.provider_id,
|
||||||
|
provider_name: t.provider_name,
|
||||||
|
task_info: t.task_info,
|
||||||
|
status: t.status,
|
||||||
|
created_at: t.inserted_at
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp task_detail(t) do
|
||||||
|
%{
|
||||||
|
id: t.id,
|
||||||
|
org_id: t.org_id,
|
||||||
|
application_id: t.application_id,
|
||||||
|
provider_id: t.provider_id,
|
||||||
|
provider_name: t.provider_name,
|
||||||
|
task_info: t.task_info,
|
||||||
|
submission: t.submission,
|
||||||
|
attachments: t.attachments,
|
||||||
|
status: t.status,
|
||||||
|
version: t.version,
|
||||||
|
created_at: t.inserted_at,
|
||||||
|
updated_at: t.updated_at
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp meta_json(meta) do
|
||||||
|
%{
|
||||||
|
total_count: meta.total_count,
|
||||||
|
total_pages: meta.total_pages,
|
||||||
|
current_page: meta.current_page,
|
||||||
|
page_size: meta.page_size,
|
||||||
|
has_next: meta.has_next_page?,
|
||||||
|
has_prev: meta.has_previous_page?
|
||||||
|
}
|
||||||
|
end
|
||||||
|
end
|
||||||
33
lib/workload_service_web/endpoint.ex
Normal file
33
lib/workload_service_web/endpoint.ex
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
defmodule WorkloadServiceWeb.Endpoint do
|
||||||
|
use Phoenix.Endpoint, otp_app: :workload_service
|
||||||
|
|
||||||
|
@session_options [
|
||||||
|
store: :cookie,
|
||||||
|
key: "_workload_service_key",
|
||||||
|
signing_salt: "workload_salt",
|
||||||
|
same_site: "Lax"
|
||||||
|
]
|
||||||
|
|
||||||
|
plug Plug.Static,
|
||||||
|
at: "/",
|
||||||
|
from: :workload_service,
|
||||||
|
gzip: false,
|
||||||
|
only: WorkloadServiceWeb.static_paths()
|
||||||
|
|
||||||
|
if code_reloading? do
|
||||||
|
plug Phoenix.CodeReloader
|
||||||
|
end
|
||||||
|
|
||||||
|
plug Plug.RequestId
|
||||||
|
plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint]
|
||||||
|
|
||||||
|
plug Plug.Parsers,
|
||||||
|
parsers: [:urlencoded, :multipart, :json],
|
||||||
|
pass: ["*/*"],
|
||||||
|
json_decoder: Phoenix.json_library()
|
||||||
|
|
||||||
|
plug Plug.MethodOverride
|
||||||
|
plug Plug.Head
|
||||||
|
plug Plug.Session, @session_options
|
||||||
|
plug WorkloadServiceWeb.Router
|
||||||
|
end
|
||||||
5
lib/workload_service_web/error_json.ex
Normal file
5
lib/workload_service_web/error_json.ex
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
defmodule WorkloadServiceWeb.ErrorJSON do
|
||||||
|
def render(template, _assigns) do
|
||||||
|
%{errors: %{detail: Phoenix.Controller.status_message_from_template(template)}}
|
||||||
|
end
|
||||||
|
end
|
||||||
32
lib/workload_service_web/router.ex
Normal file
32
lib/workload_service_web/router.ex
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
defmodule WorkloadServiceWeb.Router do
|
||||||
|
use WorkloadServiceWeb, :router
|
||||||
|
|
||||||
|
alias WorkloadServiceWeb.TaskController
|
||||||
|
alias WorkloadServiceWeb.HealthController
|
||||||
|
|
||||||
|
pipeline :api do
|
||||||
|
plug OpenApiSpex.Plug.PutApiSpec, module: WorkloadServiceWeb.ApiSpec
|
||||||
|
end
|
||||||
|
|
||||||
|
scope "/api" do
|
||||||
|
pipe_through [:api]
|
||||||
|
|
||||||
|
get "/health", HealthController, :health
|
||||||
|
get "/health/ready", HealthController, :ready
|
||||||
|
|
||||||
|
get "/openapi", OpenApiSpex.Plug.RenderSpec, []
|
||||||
|
|
||||||
|
scope "/v1" do
|
||||||
|
get "/tasks", TaskController, :list
|
||||||
|
get "/tasks/:id", TaskController, :show
|
||||||
|
post "/tasks/:id/respond-quote", TaskController, :respond_to_quote
|
||||||
|
post "/tasks/:id/confirm-delivery", TaskController, :confirm_delivery
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
if Mix.env() == :dev do
|
||||||
|
scope "/swaggerui" do
|
||||||
|
get "/", OpenApiSpex.Plug.SwaggerUI, path: "/api/openapi"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
146
lib/workload_service_web/schemas/task.ex
Normal file
146
lib/workload_service_web/schemas/task.ex
Normal file
@@ -0,0 +1,146 @@
|
|||||||
|
defmodule WorkloadServiceWeb.Schemas.Task do
|
||||||
|
alias OpenApiSpex.Schema
|
||||||
|
|
||||||
|
defmodule PaginationMeta do
|
||||||
|
require OpenApiSpex
|
||||||
|
|
||||||
|
OpenApiSpex.schema(%{
|
||||||
|
title: "PaginationMeta",
|
||||||
|
type: :object,
|
||||||
|
properties: %{
|
||||||
|
total_count: %Schema{type: :integer},
|
||||||
|
total_pages: %Schema{type: :integer},
|
||||||
|
current_page: %Schema{type: :integer},
|
||||||
|
page_size: %Schema{type: :integer},
|
||||||
|
has_next: %Schema{type: :boolean},
|
||||||
|
has_prev: %Schema{type: :boolean}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
end
|
||||||
|
|
||||||
|
defmodule Plan do
|
||||||
|
require OpenApiSpex
|
||||||
|
|
||||||
|
OpenApiSpex.schema(%{
|
||||||
|
title: "Plan",
|
||||||
|
type: :object,
|
||||||
|
properties: %{
|
||||||
|
plan_id: %Schema{type: :string},
|
||||||
|
plan_name: %Schema{type: :string},
|
||||||
|
premium: %Schema{type: :number},
|
||||||
|
coverage_details: %Schema{type: :string},
|
||||||
|
deductible: %Schema{type: :number, nullable: true},
|
||||||
|
coverage_limit: %Schema{type: :number, nullable: true}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
end
|
||||||
|
|
||||||
|
defmodule TaskSummary do
|
||||||
|
require OpenApiSpex
|
||||||
|
|
||||||
|
OpenApiSpex.schema(%{
|
||||||
|
title: "TaskSummary",
|
||||||
|
type: :object,
|
||||||
|
properties: %{
|
||||||
|
id: %Schema{type: :string},
|
||||||
|
org_id: %Schema{type: :string},
|
||||||
|
application_id: %Schema{type: :string},
|
||||||
|
provider_id: %Schema{type: :string},
|
||||||
|
provider_name: %Schema{type: :string},
|
||||||
|
task_info: %Schema{type: :object},
|
||||||
|
status: %Schema{type: :string, enum: ["created", "draft", "approved", "completed"]},
|
||||||
|
created_at: %Schema{type: :string, format: :"date-time"}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
end
|
||||||
|
|
||||||
|
defmodule TaskDetail do
|
||||||
|
require OpenApiSpex
|
||||||
|
|
||||||
|
OpenApiSpex.schema(%{
|
||||||
|
title: "TaskDetail",
|
||||||
|
type: :object,
|
||||||
|
properties: %{
|
||||||
|
id: %Schema{type: :string},
|
||||||
|
org_id: %Schema{type: :string},
|
||||||
|
application_id: %Schema{type: :string},
|
||||||
|
provider_id: %Schema{type: :string},
|
||||||
|
provider_name: %Schema{type: :string},
|
||||||
|
task_info: %Schema{type: :object},
|
||||||
|
submission: %Schema{type: :object, nullable: true},
|
||||||
|
attachments: %Schema{type: :array, items: %Schema{type: :string}},
|
||||||
|
status: %Schema{type: :string, enum: ["created", "draft", "approved", "completed"]},
|
||||||
|
version: %Schema{type: :integer},
|
||||||
|
created_at: %Schema{type: :string, format: :"date-time"},
|
||||||
|
updated_at: %Schema{type: :string, format: :"date-time"}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
end
|
||||||
|
|
||||||
|
defmodule QuoteResponseRequest do
|
||||||
|
require OpenApiSpex
|
||||||
|
|
||||||
|
OpenApiSpex.schema(%{
|
||||||
|
title: "QuoteResponseRequest",
|
||||||
|
type: :object,
|
||||||
|
required: [:quote_id, :document_url],
|
||||||
|
properties: %{
|
||||||
|
quote_id: %Schema{type: :string},
|
||||||
|
plans: %Schema{type: :array, items: Plan},
|
||||||
|
valid_until: %Schema{type: :string, format: :date},
|
||||||
|
responded_by: %Schema{type: :string},
|
||||||
|
document_url: %Schema{type: :string},
|
||||||
|
document_data: %Schema{type: :object, additionalProperties: true}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
end
|
||||||
|
|
||||||
|
defmodule ConfirmDeliveryRequest do
|
||||||
|
require OpenApiSpex
|
||||||
|
|
||||||
|
OpenApiSpex.schema(%{
|
||||||
|
title: "ConfirmDeliveryRequest",
|
||||||
|
type: :object,
|
||||||
|
properties: %{
|
||||||
|
delivery_confirmed_by: %Schema{type: :string}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
end
|
||||||
|
|
||||||
|
defmodule TaskListResponse do
|
||||||
|
require OpenApiSpex
|
||||||
|
|
||||||
|
OpenApiSpex.schema(%{
|
||||||
|
title: "TaskListResponse",
|
||||||
|
type: :object,
|
||||||
|
properties: %{
|
||||||
|
data: %Schema{type: :array, items: TaskSummary},
|
||||||
|
meta: PaginationMeta
|
||||||
|
}
|
||||||
|
})
|
||||||
|
end
|
||||||
|
|
||||||
|
defmodule TaskDetailResponse do
|
||||||
|
require OpenApiSpex
|
||||||
|
|
||||||
|
OpenApiSpex.schema(%{
|
||||||
|
title: "TaskDetailResponse",
|
||||||
|
type: :object,
|
||||||
|
properties: %{
|
||||||
|
data: TaskDetail
|
||||||
|
}
|
||||||
|
})
|
||||||
|
end
|
||||||
|
|
||||||
|
defmodule ErrorResponse do
|
||||||
|
require OpenApiSpex
|
||||||
|
|
||||||
|
OpenApiSpex.schema(%{
|
||||||
|
title: "ErrorResponse",
|
||||||
|
type: :object,
|
||||||
|
properties: %{
|
||||||
|
error: %Schema{type: :string}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
end
|
||||||
|
end
|
||||||
51
lib/workload_service_web/telemetry.ex
Normal file
51
lib/workload_service_web/telemetry.ex
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
defmodule WorkloadServiceWeb.Telemetry do
|
||||||
|
use Supervisor
|
||||||
|
import Telemetry.Metrics
|
||||||
|
|
||||||
|
def start_link(arg) do
|
||||||
|
Supervisor.start_link(__MODULE__, arg, name: __MODULE__)
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def init(_arg) do
|
||||||
|
children = [
|
||||||
|
{:telemetry_poller, measurements: periodic_measurements(), period: 10_000}
|
||||||
|
]
|
||||||
|
|
||||||
|
Supervisor.init(children, strategy: :one_for_one)
|
||||||
|
end
|
||||||
|
|
||||||
|
def metrics do
|
||||||
|
[
|
||||||
|
summary("phoenix.endpoint.start.system_time",
|
||||||
|
unit: {:native, :millisecond}
|
||||||
|
),
|
||||||
|
summary("phoenix.endpoint.stop.duration",
|
||||||
|
unit: {:native, :millisecond}
|
||||||
|
),
|
||||||
|
summary("phoenix.router_dispatch.start.system_time",
|
||||||
|
tags: [:route],
|
||||||
|
unit: {:native, :millisecond}
|
||||||
|
),
|
||||||
|
summary("phoenix.router_dispatch.exception.duration",
|
||||||
|
tags: [:route],
|
||||||
|
unit: {:native, :millisecond}
|
||||||
|
),
|
||||||
|
summary("phoenix.router_dispatch.stop.duration",
|
||||||
|
tags: [:route],
|
||||||
|
unit: {:native, :millisecond}
|
||||||
|
),
|
||||||
|
summary("workload_service.repo.query.total_time",
|
||||||
|
unit: {:native, :millisecond}
|
||||||
|
),
|
||||||
|
summary("vm.memory.total", unit: {:byte, :kilobyte}),
|
||||||
|
summary("vm.total_run_queue_lengths.total"),
|
||||||
|
summary("vm.total_run_queue_lengths.cpu"),
|
||||||
|
summary("vm.total_run_queue_lengths.io")
|
||||||
|
]
|
||||||
|
end
|
||||||
|
|
||||||
|
defp periodic_measurements do
|
||||||
|
[]
|
||||||
|
end
|
||||||
|
end
|
||||||
59
mix.exs
Normal file
59
mix.exs
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
defmodule WorkloadService.MixProject do
|
||||||
|
use Mix.Project
|
||||||
|
|
||||||
|
def project do
|
||||||
|
[
|
||||||
|
app: :workload_service,
|
||||||
|
version: "1.0.0",
|
||||||
|
elixir: "~> 1.16",
|
||||||
|
elixirc_paths: elixirc_paths(Mix.env()),
|
||||||
|
start_permanent: Mix.env() == :prod,
|
||||||
|
aliases: aliases(),
|
||||||
|
deps: deps()
|
||||||
|
]
|
||||||
|
end
|
||||||
|
|
||||||
|
def application do
|
||||||
|
[
|
||||||
|
mod: {WorkloadService.Application, []},
|
||||||
|
extra_applications: [:logger, :runtime_tools]
|
||||||
|
]
|
||||||
|
end
|
||||||
|
|
||||||
|
defp elixirc_paths(:test), do: ["lib", "test/support"]
|
||||||
|
defp elixirc_paths(_), do: ["lib"]
|
||||||
|
|
||||||
|
defp deps do
|
||||||
|
[
|
||||||
|
{:phoenix, "~> 1.7"},
|
||||||
|
{:phoenix_ecto, "~> 4.4"},
|
||||||
|
{:ecto_sql, "~> 3.11"},
|
||||||
|
{:postgrex, ">= 0.0.0"},
|
||||||
|
{:phoenix_html, "~> 4.0"},
|
||||||
|
{:phoenix_live_reload, "~> 1.2", only: :dev},
|
||||||
|
{:telemetry_metrics, "~> 1.0"},
|
||||||
|
{:telemetry_poller, "~> 1.0"},
|
||||||
|
{:jason, "~> 1.4"},
|
||||||
|
{:dns_cluster, "~> 0.1.1"},
|
||||||
|
{:bandit, "~> 1.5"},
|
||||||
|
{:commanded, "~> 1.4"},
|
||||||
|
{:commanded_eventstore_adapter, "~> 1.4"},
|
||||||
|
{:commanded_ecto_projections, "~> 1.4"},
|
||||||
|
{:eventstore, "~> 1.4"},
|
||||||
|
{:open_api_spex, "~> 3.20"},
|
||||||
|
{:flop, "~> 0.20"},
|
||||||
|
{:amqp, "~> 4.1"},
|
||||||
|
{:uuid, "~> 1.1"},
|
||||||
|
{:req, "~> 0.5"}
|
||||||
|
]
|
||||||
|
end
|
||||||
|
|
||||||
|
defp aliases do
|
||||||
|
[
|
||||||
|
setup: ["deps.get", "ecto.setup"],
|
||||||
|
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
|
||||||
|
"ecto.reset": ["ecto.drop", "ecto.setup"],
|
||||||
|
test: ["ecto.create --quiet", "ecto.migrate --quiet", "test"]
|
||||||
|
]
|
||||||
|
end
|
||||||
|
end
|
||||||
50
mix.lock
Normal file
50
mix.lock
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
%{
|
||||||
|
"amqp": {:hex, :amqp, "4.1.0", "ab993d7a7adb41bc52fc084224441bad34ba04a6ff97fcd4a17f8281ed010ed1", [:mix], [{:amqp_client, "~> 4.0", [hex: :amqp_client, repo: "hexpm", optional: false]}], "hexpm", "360559cb5a4c4a920806a8c281d0418db3a625ce730008c8460749dd9b1cf838"},
|
||||||
|
"amqp_client": {:hex, :amqp_client, "4.2.1", "cff0cc13186e57457dc5745f1b3a4127c6857717cb8f5920dc457c84d0ad00a2", [:make, :rebar3], [{:credentials_obfuscation, "3.5.0", [hex: :credentials_obfuscation, repo: "hexpm", optional: false]}, {:rabbit_common, "4.2.1", [hex: :rabbit_common, repo: "hexpm", optional: false]}], "hexpm", "8ae00b055a58500e0557f73d9c0ffe257487131e603f7f84fe72cbfaaf03838a"},
|
||||||
|
"backoff": {:hex, :backoff, "1.1.6", "83b72ed2108ba1ee8f7d1c22e0b4a00cfe3593a67dbc792799e8cce9f42f796b", [:rebar3], [], "hexpm", "cf0cfff8995fb20562f822e5cc47d8ccf664c5ecdc26a684cbe85c225f9d7c39"},
|
||||||
|
"bandit": {:hex, :bandit, "1.10.4", "02b9734c67c5916a008e7eb7e2ba68aaea6f8177094a5f8d95f1fb99069aac17", [:mix], [{:hpax, "~> 1.0", [hex: :hpax, repo: "hexpm", optional: false]}, {:plug, "~> 1.18", [hex: :plug, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:thousand_island, "~> 1.0", [hex: :thousand_island, repo: "hexpm", optional: false]}, {:websock, "~> 0.5", [hex: :websock, repo: "hexpm", optional: false]}], "hexpm", "a5faf501042ac1f31d736d9d4a813b3db4ef812e634583b6a457b0928798a51d"},
|
||||||
|
"commanded": {:hex, :commanded, "1.4.9", "289bc371943cf082f1161b1560563f5451ca176c967670cccd63fc3988fcd225", [:mix], [{:backoff, "~> 1.1", [hex: :backoff, repo: "hexpm", optional: false]}, {:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:telemetry_registry, "~> 0.2", [hex: :telemetry_registry, repo: "hexpm", optional: false]}], "hexpm", "a4f49c23041a23687aa10e99f3db7ee3b8ae470bb615b73b9f887b86437263e7"},
|
||||||
|
"commanded_ecto_projections": {:hex, :commanded_ecto_projections, "1.4.0", "a1b220577577d5e0aee4c92b2d9bc6de221f9c1ac2ab36932cba15881761332f", [:mix], [{:commanded, "~> 1.4", [hex: :commanded, repo: "hexpm", optional: false]}, {:ecto, "~> 3.11", [hex: :ecto, repo: "hexpm", optional: false]}, {:ecto_sql, "~> 3.11", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "8919a6173cd8f30fe2f948c2967f9289c7f5fe4eeca7abc67966bfca31f4aa9f"},
|
||||||
|
"commanded_eventstore_adapter": {:hex, :commanded_eventstore_adapter, "1.4.2", "4f2d9d9bd8ef7807a5a4c278b4344adddbbbb4d9c86c693872bc85b944be1fe8", [:mix], [{:commanded, "~> 1.4", [hex: :commanded, repo: "hexpm", optional: false]}, {:eventstore, "~> 1.4", [hex: :eventstore, repo: "hexpm", optional: false]}, {:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "26eaa68515e3e73834d769b73bddfea76c3fdcaff085d735c22b82a66ba19b10"},
|
||||||
|
"credentials_obfuscation": {:hex, :credentials_obfuscation, "3.5.0", "61e282adfb4439486b3994faaec69543c7ee6cc7e70c6340e8853fd9deaf8219", [:rebar3], [], "hexpm", "843adbe3246861ce0f1a0fa3222f384834eb31defd8d6b9cba7afd2977c957bc"},
|
||||||
|
"db_connection": {:hex, :db_connection, "2.9.0", "a6a97c5c958a2d7091a58a9be40caf41ab496b0701d21e1d1abff3fa27a7f371", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "17d502eacaf61829db98facf6f20808ed33da6ccf495354a41e64fe42f9c509c"},
|
||||||
|
"decimal": {:hex, :decimal, "2.3.0", "3ad6255aa77b4a3c4f818171b12d237500e63525c2fd056699967a3e7ea20f62", [:mix], [], "hexpm", "a4d66355cb29cb47c3cf30e71329e58361cfcb37c34235ef3bf1d7bf3773aeac"},
|
||||||
|
"dns_cluster": {:hex, :dns_cluster, "0.1.3", "0bc20a2c88ed6cc494f2964075c359f8c2d00e1bf25518a6a6c7fd277c9b0c66", [:mix], [], "hexpm", "46cb7c4a1b3e52c7ad4cbe33ca5079fbde4840dedeafca2baf77996c2da1bc33"},
|
||||||
|
"ecto": {:hex, :ecto, "3.13.5", "9d4a69700183f33bf97208294768e561f5c7f1ecf417e0fa1006e4a91713a834", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "df9efebf70cf94142739ba357499661ef5dbb559ef902b68ea1f3c1fabce36de"},
|
||||||
|
"ecto_sql": {:hex, :ecto_sql, "3.13.5", "2f8282b2ad97bf0f0d3217ea0a6fff320ead9e2f8770f810141189d182dc304e", [:mix], [{:db_connection, "~> 2.4.1 or ~> 2.5", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.13.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.7", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.19 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "aa36751f4e6a2b56ae79efb0e088042e010ff4935fc8684e74c23b1f49e25fdc"},
|
||||||
|
"eventstore": {:hex, :eventstore, "1.4.8", "26778c991cfb078f3906a4267060efc7bb5e5943f69ddb8ae6fb60f07042a66e", [:mix], [{:fsm, "~> 0.3", [hex: :fsm, repo: "hexpm", optional: false]}, {:gen_stage, "~> 1.2", [hex: :gen_stage, repo: "hexpm", optional: false]}, {:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: true]}, {:poolboy, "~> 1.5", [hex: :poolboy, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.17", [hex: :postgrex, repo: "hexpm", optional: false]}], "hexpm", "30c914602fdea8db5992a90ecb1f84068531e764cf0c066be71ff0eec4e3bcb9"},
|
||||||
|
"file_system": {:hex, :file_system, "1.1.1", "31864f4685b0148f25bd3fbef2b1228457c0c89024ad67f7a81a3ffbc0bbad3a", [:mix], [], "hexpm", "7a15ff97dfe526aeefb090a7a9d3d03aa907e100e262a0f8f7746b78f8f87a5d"},
|
||||||
|
"finch": {:hex, :finch, "0.21.0", "b1c3b2d48af02d0c66d2a9ebfb5622be5c5ecd62937cf79a88a7f98d48a8290c", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.6.2 or ~> 1.7", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 1.1", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "87dc6e169794cb2570f75841a19da99cfde834249568f2a5b121b809588a4377"},
|
||||||
|
"flop": {:hex, :flop, "0.26.3", "9bc700b34f96a57e56aaa89b850926356311372556eacd5a1abe0fdd0ea40bf2", [:mix], [{:ecto, "~> 3.11", [hex: :ecto, repo: "hexpm", optional: false]}, {:nimble_options, "~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}], "hexpm", "cd77588229778ac55560c90dfbe15ab6486773f067d6e52db9fa703b8c9a9d2d"},
|
||||||
|
"fsm": {:hex, :fsm, "0.3.1", "087aa9b02779a84320dc7a2d8464452b5308e29877921b2bde81cdba32a12390", [:mix], [], "hexpm", "fbf0d53f89e9082b326b0b5828b94b4c549ff9d1452bbfd00b4d1ac082208e96"},
|
||||||
|
"gen_stage": {:hex, :gen_stage, "1.3.2", "7c77e5d1e97de2c6c2f78f306f463bca64bf2f4c3cdd606affc0100b89743b7b", [:mix], [], "hexpm", "0ffae547fa777b3ed889a6b9e1e64566217413d018cabd825f786e843ffe63e7"},
|
||||||
|
"hpax": {:hex, :hpax, "1.0.3", "ed67ef51ad4df91e75cc6a1494f851850c0bd98ebc0be6e81b026e765ee535aa", [:mix], [], "hexpm", "8eab6e1cfa8d5918c2ce4ba43588e894af35dbd8e91e6e55c817bca5847df34a"},
|
||||||
|
"jason": {:hex, :jason, "1.4.4", "b9226785a9aa77b6857ca22832cffa5d5011a667207eb2a0ad56adb5db443b8a", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "c5eb0cab91f094599f94d55bc63409236a8ec69a21a67814529e8d5f6cc90b3b"},
|
||||||
|
"mime": {:hex, :mime, "2.0.7", "b8d739037be7cd402aee1ba0306edfdef982687ee7e9859bee6198c1e7e2f128", [:mix], [], "hexpm", "6171188e399ee16023ffc5b76ce445eb6d9672e2e241d2df6050f3c771e80ccd"},
|
||||||
|
"mint": {:hex, :mint, "1.7.1", "113fdb2b2f3b59e47c7955971854641c61f378549d73e829e1768de90fc1abf1", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:hpax, "~> 0.1.1 or ~> 0.2.0 or ~> 1.0", [hex: :hpax, repo: "hexpm", optional: false]}], "hexpm", "fceba0a4d0f24301ddee3024ae116df1c3f4bb7a563a731f45fdfeb9d39a231b"},
|
||||||
|
"nimble_options": {:hex, :nimble_options, "1.1.1", "e3a492d54d85fc3fd7c5baf411d9d2852922f66e69476317787a7b2bb000a61b", [:mix], [], "hexpm", "821b2470ca9442c4b6984882fe9bb0389371b8ddec4d45a9504f00a66f650b44"},
|
||||||
|
"nimble_pool": {:hex, :nimble_pool, "1.1.0", "bf9c29fbdcba3564a8b800d1eeb5a3c58f36e1e11d7b7fb2e084a643f645f06b", [:mix], [], "hexpm", "af2e4e6b34197db81f7aad230c1118eac993acc0dae6bc83bac0126d4ae0813a"},
|
||||||
|
"open_api_spex": {:hex, :open_api_spex, "3.22.2", "0b3c4f572ee69cb6c936abf426b9d84d8eebd34960871fd77aead746f0d69cb0", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}, {:poison, "~> 3.0 or ~> 4.0 or ~> 5.0 or ~> 6.0", [hex: :poison, repo: "hexpm", optional: true]}, {:ymlr, "~> 2.0 or ~> 3.0 or ~> 4.0 or ~> 5.0", [hex: :ymlr, repo: "hexpm", optional: true]}], "hexpm", "0a4fc08472d75e9cfe96e0748c6b1565b3b4398f97bf43fcce41b41b6fd3fb33"},
|
||||||
|
"phoenix": {:hex, :phoenix, "1.8.5", "919db335247e6d4891764dc3063415b0d2457641c5f9b3751b5df03d8e20bbcf", [:mix], [{:bandit, "~> 1.0", [hex: :bandit, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.7", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:websock_adapter, "~> 0.5.3", [hex: :websock_adapter, repo: "hexpm", optional: false]}], "hexpm", "83b2bb125127e02e9f475c8e3e92736325b5b01b0b9b05407bcb4083b7a32485"},
|
||||||
|
"phoenix_ecto": {:hex, :phoenix_ecto, "4.7.0", "75c4b9dfb3efdc42aec2bd5f8bccd978aca0651dbcbc7a3f362ea5d9d43153c6", [:mix], [{:ecto, "~> 3.5", [hex: :ecto, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.1", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:plug, "~> 1.9", [hex: :plug, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.16 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}], "hexpm", "1d75011e4254cb4ddf823e81823a9629559a1be93b4321a6a5f11a5306fbf4cc"},
|
||||||
|
"phoenix_html": {:hex, :phoenix_html, "4.3.0", "d3577a5df4b6954cd7890c84d955c470b5310bb49647f0a114a6eeecc850f7ad", [:mix], [], "hexpm", "3eaa290a78bab0f075f791a46a981bbe769d94bc776869f4f3063a14f30497ad"},
|
||||||
|
"phoenix_live_reload": {:hex, :phoenix_live_reload, "1.6.2", "b18b0773a1ba77f28c52decbb0f10fd1ac4d3ae5b8632399bbf6986e3b665f62", [:mix], [{:file_system, "~> 0.2.10 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}], "hexpm", "d1f89c18114c50d394721365ffb428cce24f1c13de0467ffa773e2ff4a30d5b9"},
|
||||||
|
"phoenix_pubsub": {:hex, :phoenix_pubsub, "2.2.0", "ff3a5616e1bed6804de7773b92cbccfc0b0f473faf1f63d7daf1206c7aeaaa6f", [:mix], [], "hexpm", "adc313a5bf7136039f63cfd9668fde73bba0765e0614cba80c06ac9460ff3e96"},
|
||||||
|
"phoenix_template": {:hex, :phoenix_template, "1.0.4", "e2092c132f3b5e5b2d49c96695342eb36d0ed514c5b252a77048d5969330d639", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}], "hexpm", "2c0c81f0e5c6753faf5cca2f229c9709919aba34fab866d3bc05060c9c444206"},
|
||||||
|
"plug": {:hex, :plug, "1.19.1", "09bac17ae7a001a68ae393658aa23c7e38782be5c5c00c80be82901262c394c0", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "560a0017a8f6d5d30146916862aaf9300b7280063651dd7e532b8be168511e62"},
|
||||||
|
"plug_crypto": {:hex, :plug_crypto, "2.1.1", "19bda8184399cb24afa10be734f84a16ea0a2bc65054e23a62bb10f06bc89491", [:mix], [], "hexpm", "6470bce6ffe41c8bd497612ffde1a7e4af67f36a15eea5f921af71cf3e11247c"},
|
||||||
|
"postgrex": {:hex, :postgrex, "0.22.0", "fb027b58b6eab1f6de5396a2abcdaaeb168f9ed4eccbb594e6ac393b02078cbd", [:mix], [{:db_connection, "~> 2.9", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "a68c4261e299597909e03e6f8ff5a13876f5caadaddd0d23af0d0a61afcc5d84"},
|
||||||
|
"rabbit_common": {:hex, :rabbit_common, "4.2.1", "1d64e391e12116b76b1425eb96b7552de51f0301093eba669b5334f4759cc1e8", [:make, :rebar3], [{:credentials_obfuscation, "3.5.0", [hex: :credentials_obfuscation, repo: "hexpm", optional: false]}, {:ranch, "2.2.0", [hex: :ranch, repo: "hexpm", optional: false]}, {:recon, "2.5.6", [hex: :recon, repo: "hexpm", optional: false]}, {:thoas, "1.2.1", [hex: :thoas, repo: "hexpm", optional: false]}], "hexpm", "ff509b07e639b1784898c28031e5204fea14260172e4fc339f94405586037e40"},
|
||||||
|
"ranch": {:hex, :ranch, "2.2.0", "25528f82bc8d7c6152c57666ca99ec716510fe0925cb188172f41ce93117b1b0", [:make, :rebar3], [], "hexpm", "fa0b99a1780c80218a4197a59ea8d3bdae32fbff7e88527d7d8a4787eff4f8e7"},
|
||||||
|
"recon": {:hex, :recon, "2.5.6", "9052588e83bfedfd9b72e1034532aee2a5369d9d9343b61aeb7fbce761010741", [:mix, :rebar3], [], "hexpm", "96c6799792d735cc0f0fd0f86267e9d351e63339cbe03df9d162010cefc26bb0"},
|
||||||
|
"req": {:hex, :req, "0.5.17", "0096ddd5b0ed6f576a03dde4b158a0c727215b15d2795e59e0916c6971066ede", [:mix], [{:brotli, "~> 0.3.1", [hex: :brotli, repo: "hexpm", optional: true]}, {:ezstd, "~> 1.0", [hex: :ezstd, repo: "hexpm", optional: true]}, {:finch, "~> 0.17", [hex: :finch, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mime, "~> 2.0.6 or ~> 2.1", [hex: :mime, repo: "hexpm", optional: false]}, {:nimble_csv, "~> 1.0", [hex: :nimble_csv, repo: "hexpm", optional: true]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "0b8bc6ffdfebbc07968e59d3ff96d52f2202d0536f10fef4dc11dc02a2a43e39"},
|
||||||
|
"telemetry": {:hex, :telemetry, "1.4.1", "ab6de178e2b29b58e8256b92b382ea3f590a47152ca3651ea857a6cae05ac423", [:rebar3], [], "hexpm", "2172e05a27531d3d31dd9782841065c50dd5c3c7699d95266b2edd54c2dafa1c"},
|
||||||
|
"telemetry_metrics": {:hex, :telemetry_metrics, "1.1.0", "5bd5f3b5637e0abea0426b947e3ce5dd304f8b3bc6617039e2b5a008adc02f8f", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "e7b79e8ddfde70adb6db8a6623d1778ec66401f366e9a8f5dd0955c56bc8ce67"},
|
||||||
|
"telemetry_poller": {:hex, :telemetry_poller, "1.3.0", "d5c46420126b5ac2d72bc6580fb4f537d35e851cc0f8dbd571acf6d6e10f5ec7", [:rebar3], [{:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "51f18bed7128544a50f75897db9974436ea9bfba560420b646af27a9a9b35211"},
|
||||||
|
"telemetry_registry": {:hex, :telemetry_registry, "0.3.2", "701576890320be6428189bff963e865e8f23e0ff3615eade8f78662be0fc003c", [:mix, :rebar3], [{:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "e7ed191eb1d115a3034af8e1e35e4e63d5348851d556646d46ca3d1b4e16bab9"},
|
||||||
|
"thoas": {:hex, :thoas, "1.2.1", "19a25f31177a17e74004d4840f66d791d4298c5738790fa2cc73731eb911f195", [:rebar3], [], "hexpm", "e38697edffd6e91bd12cea41b155115282630075c2a727e7a6b2947f5408b86a"},
|
||||||
|
"thousand_island": {:hex, :thousand_island, "1.4.3", "2158209580f633be38d43ec4e3ce0a01079592b9657afff9080d5d8ca149a3af", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "6e4ce09b0fd761a58594d02814d40f77daff460c48a7354a15ab353bb998ea0b"},
|
||||||
|
"uuid": {:hex, :uuid, "1.1.8", "e22fc04499de0de3ed1116b770c7737779f226ceefa0badb3592e64d5cfb4eb9", [:mix], [], "hexpm", "c790593b4c3b601f5dc2378baae7efaf5b3d73c4c6456ba85759905be792f2ac"},
|
||||||
|
"websock": {:hex, :websock, "0.5.3", "2f69a6ebe810328555b6fe5c831a851f485e303a7c8ce6c5f675abeb20ebdadc", [:mix], [], "hexpm", "6105453d7fac22c712ad66fab1d45abdf049868f253cf719b625151460b8b453"},
|
||||||
|
"websock_adapter": {:hex, :websock_adapter, "0.5.9", "43dc3ba6d89ef5dec5b1d0a39698436a1e856d000d84bf31a3149862b01a287f", [:mix], [{:bandit, ">= 0.6.0", [hex: :bandit, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.6", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:websock, "~> 0.5", [hex: :websock, repo: "hexpm", optional: false]}], "hexpm", "5534d5c9adad3c18a0f58a9371220d75a803bf0b9a3d87e6fe072faaeed76a08"},
|
||||||
|
}
|
||||||
14
ops/chart/Chart.yaml
Normal file
14
ops/chart/Chart.yaml
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
apiVersion: v2
|
||||||
|
name: workload-service
|
||||||
|
description: Workload service for quote and solicitation tasks
|
||||||
|
type: application
|
||||||
|
version: 0.1.0
|
||||||
|
appVersion: "1.0.0"
|
||||||
|
keywords:
|
||||||
|
- elixir
|
||||||
|
- commanded
|
||||||
|
- cqrs
|
||||||
|
dependencies:
|
||||||
|
- name: common
|
||||||
|
version: "4.6.2"
|
||||||
|
repository: https://bjw-s-labs.github.io/helm-charts/
|
||||||
4
ops/chart/templates/common.tpl
Normal file
4
ops/chart/templates/common.tpl
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
{{/*
|
||||||
|
Render all resources provided by the common library
|
||||||
|
*/}}
|
||||||
|
{{- include "bjw-s.common.loader.all" . -}}
|
||||||
247
ops/chart/values.yaml
Normal file
247
ops/chart/values.yaml
Normal file
@@ -0,0 +1,247 @@
|
|||||||
|
controllers:
|
||||||
|
main:
|
||||||
|
enabled: true
|
||||||
|
type: deployment
|
||||||
|
replicas: 1
|
||||||
|
initContainers:
|
||||||
|
migrate:
|
||||||
|
image:
|
||||||
|
repository: gitea.corredorconect.com/software-engineering/workload-service
|
||||||
|
tag: '{{ $.Chart.AppVersion }}'
|
||||||
|
command:
|
||||||
|
- "/bin/workload_service"
|
||||||
|
args:
|
||||||
|
- "eval"
|
||||||
|
- "WorkloadService.Release.migrate"
|
||||||
|
env:
|
||||||
|
MIX_ENV: prod
|
||||||
|
SECRET_KEY_BASE:
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: '{{ include "bjw-s.common.lib.chart.names.fullname" $ }}-secrets'
|
||||||
|
key: secretKeyBase
|
||||||
|
RELEASE_COOKIE:
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: '{{ include "bjw-s.common.lib.chart.names.fullname" $ }}-secrets'
|
||||||
|
key: cookie
|
||||||
|
DATABASE_URL:
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: '{{ include "bjw-s.common.lib.chart.names.fullname" $ }}-cluster-pg-app'
|
||||||
|
key: uri
|
||||||
|
containers:
|
||||||
|
main:
|
||||||
|
image:
|
||||||
|
repository: gitea.corredorconect.com/software-engineering/workload-service
|
||||||
|
tag: '{{ $.Chart.AppVersion }}'
|
||||||
|
env:
|
||||||
|
LOG_LEVEL: debug
|
||||||
|
MIX_ENV: prod
|
||||||
|
PORT: "8080"
|
||||||
|
PHX_HOST: "0.0.0.0"
|
||||||
|
PHX_SERVER: "true"
|
||||||
|
RABBITMQ_HOST:
|
||||||
|
value: "rabbitmq.rabbitmq.svc.cluster.local"
|
||||||
|
RABBITMQ_VHOST:
|
||||||
|
value: "application"
|
||||||
|
RABBITMQ_USERNAME:
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: '{{ include "bjw-s.common.lib.chart.names.fullname" $ }}-rabbitmq-user-user-credentials'
|
||||||
|
key: username
|
||||||
|
RABBITMQ_PASSWORD:
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: '{{ include "bjw-s.common.lib.chart.names.fullname" $ }}-rabbitmq-user-user-credentials'
|
||||||
|
key: password
|
||||||
|
RELEASE_COOKIE:
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: '{{ include "bjw-s.common.lib.chart.names.fullname" $ }}-secrets'
|
||||||
|
key: cookie
|
||||||
|
SECRET_KEY_BASE:
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: '{{ include "bjw-s.common.lib.chart.names.fullname" $ }}-secrets'
|
||||||
|
key: secretKeyBase
|
||||||
|
DATABASE_URL:
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: '{{ include "bjw-s.common.lib.chart.names.fullname" $ }}-cluster-pg-app'
|
||||||
|
key: uri
|
||||||
|
probes:
|
||||||
|
liveness:
|
||||||
|
enabled: true
|
||||||
|
custom: true
|
||||||
|
spec:
|
||||||
|
httpGet:
|
||||||
|
path: /health
|
||||||
|
port: 8080
|
||||||
|
initialDelaySeconds: 30
|
||||||
|
periodSeconds: 10
|
||||||
|
readiness:
|
||||||
|
enabled: true
|
||||||
|
custom: true
|
||||||
|
spec:
|
||||||
|
httpGet:
|
||||||
|
path: /health/ready
|
||||||
|
port: 8080
|
||||||
|
initialDelaySeconds: 5
|
||||||
|
periodSeconds: 5
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
cpu: 100m
|
||||||
|
memory: 256Mi
|
||||||
|
limits:
|
||||||
|
cpu: 500m
|
||||||
|
memory: 512Mi
|
||||||
|
|
||||||
|
service:
|
||||||
|
main:
|
||||||
|
controller: main
|
||||||
|
type: ClusterIP
|
||||||
|
ports:
|
||||||
|
http:
|
||||||
|
port: 8080
|
||||||
|
protocol: HTTP
|
||||||
|
|
||||||
|
rawResources:
|
||||||
|
rabbitmq-user:
|
||||||
|
enabled: true
|
||||||
|
apiVersion: rabbitmq.com/v1beta1
|
||||||
|
kind: User
|
||||||
|
sufix: rabbitmq-user
|
||||||
|
spec:
|
||||||
|
spec:
|
||||||
|
rabbitmqClusterReference:
|
||||||
|
name: rabbitmq
|
||||||
|
namespace: rabbitmq
|
||||||
|
tags:
|
||||||
|
- administrator
|
||||||
|
|
||||||
|
rabbitmq-user-permission:
|
||||||
|
enabled: true
|
||||||
|
apiVersion: rabbitmq.com/v1beta1
|
||||||
|
kind: Permission
|
||||||
|
sufix: rabbitmq-user-permission
|
||||||
|
spec:
|
||||||
|
spec:
|
||||||
|
rabbitmqClusterReference:
|
||||||
|
name: rabbitmq
|
||||||
|
namespace: rabbitmq
|
||||||
|
vhost: "application"
|
||||||
|
userReference:
|
||||||
|
name: '{{ include "bjw-s.common.lib.chart.names.fullname" $ }}-rabbitmq-user'
|
||||||
|
permissions:
|
||||||
|
write: ".*"
|
||||||
|
configure: ".*"
|
||||||
|
read: ".*"
|
||||||
|
|
||||||
|
exchange-quote-requested:
|
||||||
|
enabled: true
|
||||||
|
apiVersion: rabbitmq.com/v1beta1
|
||||||
|
kind: Exchange
|
||||||
|
suffix: exchange-quote-requested
|
||||||
|
spec:
|
||||||
|
spec:
|
||||||
|
name: workload_service.events.quote_requested
|
||||||
|
type: topic
|
||||||
|
durable: true
|
||||||
|
rabbitmqClusterReference:
|
||||||
|
name: rabbitmq
|
||||||
|
namespace: rabbitmq
|
||||||
|
|
||||||
|
exchange-solicitation-requested:
|
||||||
|
enabled: true
|
||||||
|
apiVersion: rabbitmq.com/v1beta1
|
||||||
|
kind: Exchange
|
||||||
|
suffix: exchange-solicitation-requested
|
||||||
|
spec:
|
||||||
|
spec:
|
||||||
|
name: workload_service.events.solicitation_requested
|
||||||
|
type: topic
|
||||||
|
durable: true
|
||||||
|
rabbitmqClusterReference:
|
||||||
|
name: rabbitmq
|
||||||
|
namespace: rabbitmq
|
||||||
|
|
||||||
|
exchange-task-completed:
|
||||||
|
enabled: true
|
||||||
|
apiVersion: rabbitmq.com/v1beta1
|
||||||
|
kind: Exchange
|
||||||
|
suffix: exchange-task-completed
|
||||||
|
spec:
|
||||||
|
spec:
|
||||||
|
name: workload_service.events.task_completed
|
||||||
|
type: topic
|
||||||
|
durable: true
|
||||||
|
rabbitmqClusterReference:
|
||||||
|
name: rabbitmq
|
||||||
|
namespace: rabbitmq
|
||||||
|
|
||||||
|
password-generator:
|
||||||
|
enabled: true
|
||||||
|
apiVersion: generators.external-secrets.io/v1alpha1
|
||||||
|
kind: Password
|
||||||
|
suffix: password-generator
|
||||||
|
spec:
|
||||||
|
spec:
|
||||||
|
length: 32
|
||||||
|
noUpper: false
|
||||||
|
allowRepeat: true
|
||||||
|
secretKeys:
|
||||||
|
- cookie
|
||||||
|
- secretKeyBase
|
||||||
|
|
||||||
|
external-secret:
|
||||||
|
enabled: true
|
||||||
|
apiVersion: external-secrets.io/v1
|
||||||
|
kind: ExternalSecret
|
||||||
|
suffix: secrets
|
||||||
|
spec:
|
||||||
|
spec:
|
||||||
|
refreshInterval: 0s
|
||||||
|
secretStoreRef:
|
||||||
|
name: cluster-secrets-store
|
||||||
|
kind: ClusterSecretStore
|
||||||
|
target:
|
||||||
|
name: '{{ include "bjw-s.common.lib.chart.names.fullname" $ }}-secrets'
|
||||||
|
creationPolicy: Owner
|
||||||
|
dataFrom:
|
||||||
|
- sourceRef:
|
||||||
|
generatorRef:
|
||||||
|
apiVersion: generators.external-secrets.io/v1alpha1
|
||||||
|
kind: Password
|
||||||
|
name: '{{ include "bjw-s.common.lib.chart.names.fullname" $ }}-password-generator'
|
||||||
|
|
||||||
|
cluster:
|
||||||
|
enabled: true
|
||||||
|
apiVersion: postgresql.cnpg.io/v1
|
||||||
|
kind: Cluster
|
||||||
|
suffix: pg
|
||||||
|
spec:
|
||||||
|
spec:
|
||||||
|
description: "PostgreSQL cluster for workload-service"
|
||||||
|
instances: 1
|
||||||
|
bootstrap:
|
||||||
|
initdb:
|
||||||
|
database: workload_service
|
||||||
|
owner: workload_service
|
||||||
|
storage:
|
||||||
|
size: 5Gi
|
||||||
|
|
||||||
|
database:
|
||||||
|
enabled: true
|
||||||
|
apiVersion: postgresql.cnpg.io/v1
|
||||||
|
kind: Database
|
||||||
|
suffix: database
|
||||||
|
spec:
|
||||||
|
spec:
|
||||||
|
name: workload_service
|
||||||
|
owner: workload_service
|
||||||
|
cluster:
|
||||||
|
name: '{{ include "bjw-s.common.lib.chart.names.fullname" $ }}-cluster-pg'
|
||||||
|
schemas:
|
||||||
|
- name: eventstore
|
||||||
|
owner: workload_service
|
||||||
25
priv/repo/migrations/20240101000001_create_tasks.exs
Normal file
25
priv/repo/migrations/20240101000001_create_tasks.exs
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
defmodule WorkloadService.Repo.Migrations.CreateTasks do
|
||||||
|
use Ecto.Migration
|
||||||
|
|
||||||
|
def change do
|
||||||
|
create table(:tasks, primary_key: false) do
|
||||||
|
add :id, :string, primary_key: true
|
||||||
|
add :org_id, :string, null: false
|
||||||
|
add :application_id, :string, null: false
|
||||||
|
add :provider_id, :string, null: false
|
||||||
|
add :provider_name, :string
|
||||||
|
add :task_info, :map, default: %{}
|
||||||
|
add :submission, :map
|
||||||
|
add :attachments, {:array, :string}, default: []
|
||||||
|
add :status, :string, null: false, default: "created"
|
||||||
|
add :version, :integer, default: 1
|
||||||
|
|
||||||
|
timestamps type: :utc_datetime
|
||||||
|
end
|
||||||
|
|
||||||
|
create index(:tasks, [:application_id])
|
||||||
|
create index(:tasks, [:org_id])
|
||||||
|
create index(:tasks, [:provider_id])
|
||||||
|
create index(:tasks, [:status])
|
||||||
|
end
|
||||||
|
end
|
||||||
@@ -0,0 +1,12 @@
|
|||||||
|
defmodule WorkloadService.Repo.Migrations.CreateProjectionVersions do
|
||||||
|
use Ecto.Migration
|
||||||
|
|
||||||
|
def change do
|
||||||
|
create table(:projection_versions, primary_key: false) do
|
||||||
|
add(:projection_name, :text, primary_key: true)
|
||||||
|
add(:last_seen_event_number, :bigint)
|
||||||
|
|
||||||
|
timestamps(type: :naive_datetime_usec)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
11
priv/repo/seeds.exs
Normal file
11
priv/repo/seeds.exs
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
defmodule WorkloadService.Repo.Seeds do
|
||||||
|
@moduledoc """
|
||||||
|
Seeds for development and testing.
|
||||||
|
"""
|
||||||
|
|
||||||
|
alias WorkloadService.Repo
|
||||||
|
|
||||||
|
def run do
|
||||||
|
IO.puts("No seeds defined for workload_service")
|
||||||
|
end
|
||||||
|
end
|
||||||
5
rel/overlays/bin/migrate
Normal file
5
rel/overlays/bin/migrate
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
cd -P -- "$(dirname -- "$0")"
|
||||||
|
exec ./workload_service eval WorkloadService.Release.migrate
|
||||||
1
rel/overlays/bin/migrate.bat
Normal file
1
rel/overlays/bin/migrate.bat
Normal file
@@ -0,0 +1 @@
|
|||||||
|
call "%~dp0\workload_service" eval WorkloadService.Release.migrate
|
||||||
5
rel/overlays/bin/server
Normal file
5
rel/overlays/bin/server
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
set -eu
|
||||||
|
|
||||||
|
cd -P -- "$(dirname -- "$0")"
|
||||||
|
PHX_SERVER=true exec ./workload_service start
|
||||||
2
rel/overlays/bin/server.bat
Normal file
2
rel/overlays/bin/server.bat
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
set PHX_SERVER=true
|
||||||
|
call "%~dp0\workload_service" start
|
||||||
38
rel/vm.args.eex
Normal file
38
rel/vm.args.eex
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
## --- memory optimisation (embedded/low-RAM targets) ---
|
||||||
|
|
||||||
|
## disable carrier utilization limit
|
||||||
|
+MBacul 0
|
||||||
|
+MHacul 0
|
||||||
|
|
||||||
|
## smaller carrier sizes
|
||||||
|
+MBsmbcs 64
|
||||||
|
+MBlmbcs 128
|
||||||
|
+MHsmbcs 64
|
||||||
|
+MHlmbcs 128
|
||||||
|
|
||||||
|
## smaller main carrier
|
||||||
|
+MMscs 20
|
||||||
|
|
||||||
|
## --- scheduler tuning ---
|
||||||
|
|
||||||
|
+S 1:1
|
||||||
|
+SDcpu 1:1
|
||||||
|
+SDio 1
|
||||||
|
|
||||||
|
## --- resource limits ---
|
||||||
|
|
||||||
|
+t 100000
|
||||||
|
+P 50000
|
||||||
|
+Q 8192
|
||||||
|
|
||||||
|
## --- general ---
|
||||||
|
|
||||||
|
+c false
|
||||||
|
+sbwt none
|
||||||
|
+sbwtdcpu none
|
||||||
|
+sbwtdio none
|
||||||
|
+swt very_low
|
||||||
|
+swtdcpu very_low
|
||||||
|
+swtdio very_low
|
||||||
|
+secio false
|
||||||
|
+K true
|
||||||
Reference in New Issue
Block a user