This commit is contained in:
Haim Kortovich
2026-03-05 11:35:01 -05:00
commit 072dbf6e66
43 changed files with 1400 additions and 0 deletions

View File

@@ -0,0 +1,4 @@
[
import_deps: [:ecto_sql],
inputs: ["*.exs"]
]

View File

@@ -0,0 +1,19 @@
defmodule CustomerService.Repo.Migrations.AddCustomerTable do
use Ecto.Migration
def change do
create table(:customers, primary_key: false) do
add :id, :uuid, primary_key: true
add :first_name, :string
add :last_name, :string
add :birth_date, :date
add :gender, :string
add :email, :string
add :phone, :string
timestamps()
end
create index(:customers, [:email])
end
end

View File

@@ -0,0 +1,12 @@
defmodule CreateProjectionVersions do
use Ecto.Migration
def change do
create table(:projection_versions, primary_key: false) do
add(:projection_name, :text, primary_key: true)
add(:last_seen_event_number, :bigint)
timestamps(type: :naive_datetime_usec)
end
end
end

11
priv/repo/seeds.exs Normal file
View File

@@ -0,0 +1,11 @@
# Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# CustomerService.Repo.insert!(%CustomerService.SomeSchema{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.

BIN
priv/static/favicon.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 152 B

5
priv/static/robots.txt Normal file
View File

@@ -0,0 +1,5 @@
# See https://www.robotstxt.org/robotstxt.html for documentation on how to use the robots.txt file
#
# To ban all spiders from the entire site uncomment the next two lines:
# User-agent: *
# Disallow: /