diff --git a/grai-server/app/connections/adapters/base.py b/grai-server/app/connections/adapters/base.py index fe80aac98..8def3bb8e 100644 --- a/grai-server/app/connections/adapters/base.py +++ b/grai-server/app/connections/adapters/base.py @@ -133,6 +133,7 @@ def get_nodes_and_edges(self): def run_update(self): nodes, edges = self.integration.get_nodes_and_edges() capture_quarantined_errors(self.integration, self.run) + update(self.run.workspace, self.run.source, nodes) update(self.run.workspace, self.run.source, edges) diff --git a/grai-server/app/connections/migrations/0031_alter_connector_slug_alter_run_status.py b/grai-server/app/connections/migrations/0031_alter_connector_slug_alter_run_status.py new file mode 100644 index 000000000..d9eaf3f89 --- /dev/null +++ b/grai-server/app/connections/migrations/0031_alter_connector_slug_alter_run_status.py @@ -0,0 +1,48 @@ +# Generated by Django 4.2.11 on 2024-05-01 00:12 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("connections", "0030_alter_connector_options_connector_priority_and_more"), + ] + + operations = [ + migrations.AlterField( + model_name="connector", + name="slug", + field=models.CharField( + blank=True, + choices=[ + ("postgres", "Postgres"), + ("snowflake", "Snowflake"), + ("dbt", "dbt"), + ("dbt_cloud", "dbt Cloud"), + ("yaml_file", "YAML"), + ("mssql", "Microsoft SQL Server"), + ("bigquery", "Google BigQuery"), + ("fivetran", "Fivetran"), + ("mysql", "MySQL"), + ("redshift", "Amazon Redshift"), + ("metabase", "Metabase"), + ("looker", "Looker"), + ("openlineage", "OpenLineage"), + ("flatfile", "Flat File"), + ("cube", "Cube"), + ], + max_length=255, + null=True, + ), + ), + migrations.AlterField( + model_name="run", + name="status", + field=models.CharField( + choices=[("pending", "Pending"), ("running", "Running"), ("success", "Success"), ("error", "Error")], + default="pending", + max_length=255, + ), + ), + ] diff --git a/grai-server/app/connections/models.py b/grai-server/app/connections/models.py index 2f420e7d9..b0150b361 100755 --- a/grai-server/app/connections/models.py +++ b/grai-server/app/connections/models.py @@ -5,6 +5,7 @@ from django.dispatch import receiver from django.db.models.signals import pre_save from django.utils import timezone +from enum import Enum class ConnectorSlugs(models.TextChoices): @@ -172,6 +173,13 @@ def save(self, *args, **kwargs): task.delete() +class RunStatus(models.TextChoices): + PENDING = "pending", "Pending" + RUNNING = "running", "Running" + SUCCESS = "success", "Success" + ERROR = "error", "Error" + + class Run(TenantModel): TESTS = "tests" UPDATE = "update" @@ -202,7 +210,7 @@ class Run(TenantModel): blank=True, null=True, ) - status = models.CharField(max_length=255) + status = models.CharField(max_length=255, choices=RunStatus.choices, default=RunStatus.PENDING) metadata = models.JSONField(default=dict) workspace = models.ForeignKey( "workspaces.Workspace", diff --git a/grai-server/app/connections/task_helpers.py b/grai-server/app/connections/task_helpers.py index e042458db..8c7f3141d 100644 --- a/grai-server/app/connections/task_helpers.py +++ b/grai-server/app/connections/task_helpers.py @@ -6,6 +6,7 @@ from typing import ( Any, Dict, + Iterable, List, Optional, Protocol, @@ -16,7 +17,7 @@ Union, ) from uuid import UUID - +from time import sleep from django.contrib.postgres.aggregates import ArrayAgg from django.db import models from django.db.models import Q, Value @@ -38,6 +39,8 @@ from .adapters.schemas import model_to_schema, schema_to_model from itertools import islice from pympler import asizeof +from functools import reduce +from django.db.models import Subquery class NameNamespace(Protocol): @@ -62,10 +65,17 @@ class SpecNameNamespace(Protocol): def to_dict(instance): + """ + Shallow conversion of a model instance to a dictionary. + This is useful for merging model instances but should not be relied on for serialization + """ opts = instance._meta - data = {} - for f in chain(opts.concrete_fields, opts.private_fields): - data[f.name] = f.value_from_object(instance) + data = { + f.name: getattr(instance, f.name) if hasattr(instance, f.name) else f.value_from_object(instance) + for f in chain(opts.concrete_fields, opts.private_fields) + } + # for f in chain(opts.concrete_fields, opts.private_fields): + # data[f.name] = f.value_from_object(instance) for f in opts.many_to_many: data[f.name] = [i.id for i in f.value_from_object(instance)] return data @@ -89,7 +99,7 @@ def merge_node_dict(a: models.Model, b: Dict) -> models.Model: @merge.register def merge_node_node(a: models.Model, b: models.Model) -> models.Model: assert isinstance(a, type(b)) - return type(a)(merge(to_dict(a), to_dict(b))) + return type(a)(**merge(to_dict(a), to_dict(b))) def get_node(workspace: Workspace, grai_type: NameNamespaceDict) -> NodeModel: @@ -296,6 +306,37 @@ def create_batches(data: list, threshold_size=500 * 1024 * 1024) -> list: yield batch +def create_dict_batches(data: list, threshold_size=500 * 1024 * 1024) -> Dict: + batch = {} + current_batch_size = 0 + for item in data: + item_size = asizeof.asizeof(item) + if current_batch_size + item_size > threshold_size and batch: + yield batch + batch = {} + current_batch_size = 0 + batch[(item.name, item.namespace)] = item + current_batch_size += item_size + if batch: + yield batch + + +def valid_items(items: List[NodeModel | EdgeModel], workspace: Workspace) -> Iterable[NodeModel | EdgeModel]: + seen_keys = set() + for item in items: + if item.workspace != workspace: + raise ValueError( + f"Items in the batch must all belong to the same workspace.", + f"Expected workspace id {workspace.id}, got {item.workspace.id}", + ) + key = (item.name, item.namespace) + if key in seen_keys: + warnings.warn(f"Multiple {type(item)} items with unique (name, namespace): {key} detected in batch.") + else: + seen_keys.add(key) + yield item + + def update( workspace: Workspace, source: Source, @@ -310,27 +351,57 @@ def update( is_node = items[0].type in ["Node", "SourceNode"] Model = NodeModel if is_node else EdgeModel relationship = source.nodes if is_node else source.edges + through_label = "node_id" if is_node else "edge_id" + threshold_bytes = 200 * 1024 * 1024 + + items = (schema_to_model(item, workspace) for item in items) + found_items = [] + for batch in create_dict_batches(valid_items(items, workspace), threshold_bytes): + # Update existing items + updated_item_keys = set() + existing_item_filter = reduce( + lambda q, key: q | Q(name=key[0], namespace=key[1], workspace=workspace), batch.keys(), Q() + ) + updated_items = [ + merge(item, batch[(item.name, item.namespace)]) + for item in Model.objects.filter(existing_item_filter).iterator() + ] + del existing_item_filter + + Model.objects.bulk_update(updated_items, ["metadata", "display_name"]) + for item in updated_items: + batch[(item.name, item.namespace)] = item + updated_item_keys.add((item.name, item.namespace)) + + # Create new items + new_items = (item for item in batch.values() if (item.name, item.namespace) not in updated_item_keys) + for item in Model.objects.bulk_create(new_items): + batch[(item.name, item.namespace)] = item + + # Create foreign keys to source + through_items = ( + relationship.through(source_id=source.id, **{through_label: item.id}) for item in batch.values() + ) + relationship.through.objects.bulk_create(through_items, ignore_conflicts=True) - new_items, deactivated_items, updated_items = process_updates(workspace, source, items, active_items) - - # relationship creationcan be improved with a switch to a bulk_create on the through entity - # https://stackoverflow.com/questions/68422898/efficiently-bulk-updating-many-manytomany-fields - for batch in create_batches(new_items): - Model.objects.bulk_create(batch) - for batch in create_batches(new_items): - Model.objects.bulk_update(batch, ["metadata"]) + found_items.extend([item.id for item in batch.values()]) + del batch - with transaction.atomic(): - relationship.add(*new_items, *updated_items) + # Remove old source relations. + num_deleted, _ = ( + relationship.through.objects.filter(source_id=source.id) + .exclude(**{f"{through_label}__in": found_items}) + .delete() + ) - if len(deactivated_items) > 0: - relationship.remove(*deactivated_items) + if num_deleted > 0: empty_source_query = Q(workspace=workspace, data_sources=None) - deletable_nodes = NodeModel.objects.filter(empty_source_query) - deleted_edge_query = Q(source__in=deletable_nodes) | Q(destination__in=deletable_nodes) | empty_source_query + deletable_nodes_subquery = Subquery(deletable_nodes.values("id")) + EdgeModel.objects.filter( + Q(source__in=deletable_nodes_subquery) | Q(destination__in=deletable_nodes_subquery) | empty_source_query + ).delete() - EdgeModel.objects.filter(deleted_edge_query).delete() deletable_nodes.delete() diff --git a/grai-server/app/connections/tasks.py b/grai-server/app/connections/tasks.py index e2f4c0deb..34277e1bc 100644 --- a/grai-server/app/connections/tasks.py +++ b/grai-server/app/connections/tasks.py @@ -148,6 +148,7 @@ def execute_run(run: Run): ) run.status = "success" + run.finished_at = timezone.now() run.save() diff --git a/grai-server/app/connections/tests/test_tasks.py b/grai-server/app/connections/tests/test_tasks.py index 6d822376a..72cc96eae 100644 --- a/grai-server/app/connections/tests/test_tasks.py +++ b/grai-server/app/connections/tests/test_tasks.py @@ -1,7 +1,7 @@ import os import uuid from datetime import date - +from time import sleep import pytest from decouple import config from django.conf import settings @@ -218,10 +218,10 @@ def test_run_update_server_postgres(self, test_workspace, test_postgres_connecto }, secrets={"password": "grai"}, ) + run = Run.objects.create(connection=connection, workspace=test_workspace, source=test_source) process_run(str(run.id)) - run.refresh_from_db() assert run.status == "success" diff --git a/grai-server/app/poetry.lock b/grai-server/app/poetry.lock index 28e639a29..960b58dac 100644 --- a/grai-server/app/poetry.lock +++ b/grai-server/app/poetry.lock @@ -263,17 +263,17 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "boto3" -version = "1.34.94" +version = "1.34.95" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.34.94-py3-none-any.whl", hash = "sha256:bbb87d641c73462e53b1777083b55c8f13921618ad08757478a8122985c56c13"}, - {file = "boto3-1.34.94.tar.gz", hash = "sha256:22f65b3c9b7a419f8f39c2dddc421e14fab8cbb3bd8a9d467e874237d39f59b1"}, + {file = "boto3-1.34.95-py3-none-any.whl", hash = "sha256:e836b71d79671270fccac0a4d4c8ec239a6b82ea47c399b64675aa597d0ee63b"}, + {file = "boto3-1.34.95.tar.gz", hash = "sha256:decf52f8d5d8a1b10c9ff2a0e96ee207ed79e33d2e53fdf0880a5cbef70785e0"}, ] [package.dependencies] -botocore = ">=1.34.94,<1.35.0" +botocore = ">=1.34.95,<1.35.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -282,13 +282,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.94" +version = "1.34.95" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.34.94-py3-none-any.whl", hash = "sha256:f00a79002e0cb9d6895ecd0919c506402850177d7b6c4d2634fa2da362d95bcb"}, - {file = "botocore-1.34.94.tar.gz", hash = "sha256:99b11be9a28f9051af4c96fa121e9c3f22a86d499abd773c9e868b2a38961bae"}, + {file = "botocore-1.34.95-py3-none-any.whl", hash = "sha256:ead5823e0dd6751ece5498cb979fd9abf190e691c8833bcac6876fd6ca261fa7"}, + {file = "botocore-1.34.95.tar.gz", hash = "sha256:6bd76a2eadb42b91fa3528392e981ad5b4dfdee3968fa5b904278acf6cbf15ff"}, ] [package.dependencies] @@ -1005,17 +1005,17 @@ files = [ [[package]] name = "django" -version = "4.2.11" +version = "5.0.4" description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" files = [ - {file = "Django-4.2.11-py3-none-any.whl", hash = "sha256:ddc24a0a8280a0430baa37aff11f28574720af05888c62b7cfe71d219f4599d3"}, - {file = "Django-4.2.11.tar.gz", hash = "sha256:6e6ff3db2d8dd0c986b4eec8554c8e4f919b5c1ff62a5b4390c17aff2ed6e5c4"}, + {file = "Django-5.0.4-py3-none-any.whl", hash = "sha256:916423499d75d62da7aa038d19aef23d23498d8df229775eb0a6309ee1013775"}, + {file = "Django-5.0.4.tar.gz", hash = "sha256:4bd01a8c830bb77a8a3b0e7d8b25b887e536ad17a81ba2dce5476135c73312bd"}, ] [package.dependencies] -asgiref = ">=3.6.0,<4" +asgiref = ">=3.7.0,<4" sqlparse = ">=0.3.1" tzdata = {version = "*", markers = "sys_platform == \"win32\""} @@ -1213,24 +1213,24 @@ phonenumberslite = ["phonenumberslite (>=7.0.2)"] [[package]] name = "django-postgres-extra" -version = "2.0.8" +version = "2.0.9rc10" description = "Bringing all of PostgreSQL's awesomeness to Django." optional = false python-versions = ">=3.6" files = [ - {file = "django-postgres-extra-2.0.8.tar.gz", hash = "sha256:9efa08c6f18ed34460af41c6f679bb375b93d12544b1105aa348b787a30b46eb"}, - {file = "django_postgres_extra-2.0.8-py3-none-any.whl", hash = "sha256:447d5a971759943ee63a9d4cef9c6c1fa290e518611ea521a38b6732681d2f3a"}, + {file = "django-postgres-extra-2.0.9rc10.tar.gz", hash = "sha256:a97ae77154fe0cc5a13b3858327d8c4565508508b09d84198db84fbf23674257"}, + {file = "django_postgres_extra-2.0.9rc10-py3-none-any.whl", hash = "sha256:2370c90cde6551d32b5c8cfc8d635bb64929c72a568ffd4a945a97c8443122b5"}, ] [package.dependencies] -Django = ">=2.0,<5.0" +Django = ">=2.0,<6.0" python-dateutil = ">=2.8.0,<=3.0.0" [package.extras] -analysis = ["autoflake (==1.4)", "autopep8 (==1.6.0)", "black (==22.3.0)", "docformatter (==1.4)", "flake8 (==4.0.1)", "isort (==5.10.0)"] +analysis = ["autoflake (==1.4)", "autopep8 (==1.6.0)", "black (==22.3.0)", "django-stubs (==1.16.0)", "django-stubs (==1.9.0)", "docformatter (==1.4)", "flake8 (==4.0.1)", "isort (==5.10.0)", "mypy (==0.971)", "mypy (==1.2.0)", "types-dj-database-url (==1.3.0.0)", "types-psycopg2 (==2.9.21.9)", "types-python-dateutil (==2.8.19.12)", "typing-extensions (==4.1.0)", "typing-extensions (==4.5.0)"] docs = ["Sphinx (==2.2.0)", "docutils (<0.18)", "sphinx-rtd-theme (==0.4.3)"] publish = ["build (==0.7.0)", "twine (==3.7.1)"] -test = ["coveralls (==3.3.0)", "dj-database-url (==0.5.0)", "freezegun (==1.1.0)", "psycopg2 (>=2.8.4,<3.0.0)", "pytest (==6.2.5)", "pytest-benchmark (==3.4.1)", "pytest-cov (==3.0.0)", "pytest-django (==4.4.0)", "snapshottest (==0.6.0)", "tox (==3.24.4)"] +test = ["coveralls (==3.3.0)", "dj-database-url (==0.5.0)", "freezegun (==1.1.0)", "psycopg2 (>=2.8.4,<3.0.0)", "pytest (==6.2.5)", "pytest-benchmark (==3.4.1)", "pytest-cov (==3.0.0)", "pytest-django (==4.4.0)", "pytest-freezegun (==0.4.2)", "pytest-lazy-fixture (==0.6.3)", "snapshottest (==0.6.0)", "tox (==3.24.4)"] [[package]] name = "django-query-chunk" @@ -1512,13 +1512,13 @@ python-dateutil = ">=2.4" [[package]] name = "fastcore" -version = "1.5.32" +version = "1.5.33" description = "Python supercharged for fastai development" optional = false python-versions = ">=3.7" files = [ - {file = "fastcore-1.5.32-py3-none-any.whl", hash = "sha256:734e1b3fb98d9037a4555eec40b0394995954c12a1fe6ea2a484517e88ac25d7"}, - {file = "fastcore-1.5.32.tar.gz", hash = "sha256:149164d9341372bad662e20e028c93343e94ff6c5b4a5a7d2a15921a9867d740"}, + {file = "fastcore-1.5.33-py3-none-any.whl", hash = "sha256:566c29f7531c9eef6c9b291dcfeacf2f2de29d4bcfdbc54f2f81dbb36d21cede"}, + {file = "fastcore-1.5.33.tar.gz", hash = "sha256:2fca2ca0fe6a0d6530cb31dd42a126c0a58ccca67b75cbd2efacea42a772459d"}, ] [package.dependencies] @@ -1590,13 +1590,13 @@ dev = ["jsonref", "matplotlib"] [[package]] name = "google-api-core" -version = "2.18.0" +version = "2.19.0" description = "Google API client core library" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-core-2.18.0.tar.gz", hash = "sha256:62d97417bfc674d6cef251e5c4d639a9655e00c45528c4364fbfebb478ce72a9"}, - {file = "google_api_core-2.18.0-py3-none-any.whl", hash = "sha256:5a63aa102e0049abe85b5b88cb9409234c1f70afcda21ce1e40b285b9629c1d6"}, + {file = "google-api-core-2.19.0.tar.gz", hash = "sha256:cf1b7c2694047886d2af1128a03ae99e391108a08804f87cfd35970e49c9cd10"}, + {file = "google_api_core-2.19.0-py3-none-any.whl", hash = "sha256:8661eec4078c35428fd3f69a2c7ee29e342896b70f01d1a1cbcb334372dd6251"}, ] [package.dependencies] @@ -2534,13 +2534,13 @@ files = [ [[package]] name = "jsonschema" -version = "4.21.1" +version = "4.22.0" description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema-4.21.1-py3-none-any.whl", hash = "sha256:7996507afae316306f9e2290407761157c6f78002dcf7419acb99822143d1c6f"}, - {file = "jsonschema-4.21.1.tar.gz", hash = "sha256:85727c00279f5fa6bedbe6238d2aa6403bedd8b4864ab11207d07df3cc1b2ee5"}, + {file = "jsonschema-4.22.0-py3-none-any.whl", hash = "sha256:ff4cfd6b1367a40e7bc6411caec72effadd3db0bbe5017de188f2d6108335802"}, + {file = "jsonschema-4.22.0.tar.gz", hash = "sha256:5b22d434a45935119af990552c862e5d6d564e8f6601206b305a61fdf661a2b7"}, ] [package.dependencies] @@ -3063,13 +3063,13 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] name = "openai" -version = "1.24.0" +version = "1.25.0" description = "The official Python library for the openai API" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-1.24.0-py3-none-any.whl", hash = "sha256:81eed0d850a35a170797d15d94144eb993459411ba56325891ab488708f1b612"}, - {file = "openai-1.24.0.tar.gz", hash = "sha256:9cc6ac92bac9d57c8d4bc101e9e4c7201304c88efb89320d0f0348625fa8647d"}, + {file = "openai-1.25.0-py3-none-any.whl", hash = "sha256:d0cfdf6afb31a5dabf3b95966cb31f3c757a0edaf3228715409cb404b9933de0"}, + {file = "openai-1.25.0.tar.gz", hash = "sha256:22c35b26b8281cd2759b1a4c05ac99e2f2b26a9df71f90a0b4ddb75aa27adc81"}, ] [package.dependencies] @@ -5342,4 +5342,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "9dc205ba393fc5f2d4c2b51af971b380bc462a7def1b704615a3405a7becd8cb" +content-hash = "452c224975d114527b5e8f4fda8463feaef4d34a319a9d354504bd0bfb9b9f22" diff --git a/grai-server/app/pyproject.toml b/grai-server/app/pyproject.toml index 1ee10210c..151b65266 100644 --- a/grai-server/app/pyproject.toml +++ b/grai-server/app/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "the_guide" -version = "0.1.69" +version = "0.1.71" description = "" authors = ["Grai "] license = "Elastic-2.0" @@ -11,7 +11,7 @@ documentation = "https://docs.grai.io/" [tool.poetry.dependencies] python = "^3.11" -Django = "^4.1" +Django = "^5.0" djangorestframework = "^3.12.4" djangorestframework-simplejwt = {extras = ["crypto"], version = "^5.2.0"} drf-spectacular = "^0.27.1" diff --git a/grai-server/app/the_guide/__init__.py b/grai-server/app/the_guide/__init__.py index 9becb6754..020e5c816 100755 --- a/grai-server/app/the_guide/__init__.py +++ b/grai-server/app/the_guide/__init__.py @@ -2,5 +2,5 @@ # Django starts so that shared_task will use this app. from .celery import app as celery_app -__version__ = "0.1.69" +__version__ = "0.1.71" __all__ = ("celery_app", "__version__") diff --git a/grai-server/docker-compose.yml b/grai-server/docker-compose.yml index af371826a..da7cc54ef 100755 --- a/grai-server/docker-compose.yml +++ b/grai-server/docker-compose.yml @@ -1,5 +1,3 @@ -version: "3.7" - services: the_guide: build: @@ -19,6 +17,7 @@ services: - OPENAI_API_KEY=${OPENAI_API_KEY} - OPENAI_ORG_ID=${OPENAI_ORG_ID} - OPENAI_PREFERRED_MODEL=${OPENAI_PREFERRED_MODEL} + # - CREATE_SAMPLE_DATA=false depends_on: - redis - db @@ -42,6 +41,7 @@ services: - OPENAI_API_KEY=${OPENAI_API_KEY} - OPENAI_ORG_ID=${OPENAI_ORG_ID} - OPENAI_PREFERRED_MODEL=${OPENAI_PREFERRED_MODEL} + # - CREATE_SAMPLE_DATA=false depends_on: - redis - db @@ -60,6 +60,7 @@ services: - OPENAI_API_KEY=${OPENAI_API_KEY} - OPENAI_ORG_ID=${OPENAI_ORG_ID} - OPENAI_PREFERRED_MODEL=${OPENAI_PREFERRED_MODEL} + # - CREATE_SAMPLE_DATA=false depends_on: - redis - db diff --git a/grai-server/examples/docker-compose/docker-compose.yml b/grai-server/examples/docker-compose/docker-compose.yml deleted file mode 100755 index 2477483f4..000000000 --- a/grai-server/examples/docker-compose/docker-compose.yml +++ /dev/null @@ -1,47 +0,0 @@ -version: "3.7" - -services: - the_guide: - # image: ghcr.io/grai-io/grai-core/grai-server:latest - build: ./app - volumes: - - ./app/:/usr/src/app/ - ports: - - 8000:8000 - environment: - - DB_HOST=db - - DB_PORT=5432 - - DB_NAME=grai - - DB_USER=grai - - DB_PASSWORD=grai - - DJANGO_SUPERUSER_USERNAME=null@grai.io - - DJANGO_SUPERUSER_PASSWORD=super_secret - - DJANGO_SUPERUSER_WORKSPACE=Workspace1 - - SERVER_HOST=localhost - - FRONTEND_HOST=localhost - - FRONTEND_URL=http://localhost:3000 - healthcheck: - test: "curl -f -v -H 'Accept: application/json' http://localhost:8000/health/" - interval: 2s - timeout: 5s - retries: 10 - start_period: 20s - db: - image: ankane/pgvector - environment: - - POSTGRES_USER=grai - - POSTGRES_PASSWORD=grai - - POSTGRES_DB=grai - ports: - - 5432:5432 - healthcheck: - test: "pg_isready -U grai" - interval: 2s - timeout: 2s - retries: 15 - frontend: - build: ../grai-frontend - ports: - - 3000:80 - environment: - - REACT_APP_API_URL=http://localhost:8000